text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestStartContainer(t *testing.T) {
dm, err := getDockerManager()
if err != nil {
t.Errorf("test new docker manager failed. %v", err)
}
config := cri.ContainerConfig{
Name: "qcjtest",
Config: &container.Config{
Hostname: "127.0.0.1",
Image: "registry.southchina.huaweicloud.com/dgh/edge-demo-app:latest",
},
}
id, err := dm.CreateContainer(&config)
if err != nil {
t.Errorf("test create container failed, err [%v]", err)
}
err = dm.StartContainer(id)
if err != nil {
t.Errorf("test start container failed, err [%v]", err)
}
err = dm.StartContainer("123456")
if err == nil {
t.Errorf("test start container failed, err [%v]", err)
}
err = dm.StopContainer(id, 30)
if err != nil {
t.Errorf("test stop container failed, err [%v]", err)
}
err = dm.DeleteContainer(kubecontainer.ContainerID{ID: id})
if err != nil {
t.Errorf("test remove container failed, err [%v]", err)
}
} | explode_data.jsonl/24913 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 377
} | [
2830,
3393,
3479,
4502,
1155,
353,
8840,
836,
8,
341,
2698,
76,
11,
1848,
1669,
633,
35,
13659,
2043,
741,
743,
1848,
961,
2092,
341,
197,
3244,
13080,
445,
1944,
501,
26588,
6645,
4641,
13,
1018,
85,
497,
1848,
340,
197,
532,
25873... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestField_EmptyInterface(t *testing.T) {
type (
Rel struct {
Int int
}
Src struct {
Rel *Rel
}
SrcForce struct {
Rel *Rel `deepcopier:"force"`
}
Dst struct {
Rel interface{}
}
DstForce struct {
Rel interface{} `deepcopier:"force"`
}
)
var (
rel = &Rel{Int: 1}
src = &Src{Rel: rel}
srcForce = &SrcForce{Rel: rel}
)
//
// Without force
//
dst := &Dst{}
assert.Nil(t, Copy(src).To(dst))
assert.Nil(t, dst.Rel)
dst = &Dst{}
assert.Nil(t, Copy(dst).From(src))
assert.Nil(t, dst.Rel)
//
// With force
//
dstForce := &DstForce{}
assert.Nil(t, Copy(src).To(dstForce))
assert.Equal(t, src.Rel, dstForce.Rel)
dstForce = &DstForce{}
assert.Nil(t, Copy(dstForce).From(srcForce))
assert.Equal(t, srcForce.Rel, dstForce.Rel)
} | explode_data.jsonl/13665 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 392
} | [
2830,
3393,
1877,
76060,
1595,
5051,
1155,
353,
8840,
836,
8,
341,
13158,
2399,
197,
197,
6740,
2036,
341,
298,
57152,
526,
198,
197,
197,
630,
197,
7568,
1287,
2036,
341,
298,
197,
6740,
353,
6740,
198,
197,
197,
630,
197,
7568,
12... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRuleIdxPrefix(t *testing.T) {
common.Log.Debug("Entering function: %s", common.GetFunctionName())
sqls := [][]string{
{
"CREATE TABLE tbl (a int, unique key `xx_a` (`a`));",
"CREATE TABLE tbl (a int, key `xx_a` (`a`));",
`ALTER TABLE tbl ADD INDEX xx_a (a)`,
`ALTER TABLE tbl ADD UNIQUE INDEX xx_a (a)`,
},
{
`ALTER TABLE tbl ADD INDEX idx_a (a)`,
`ALTER TABLE tbl ADD UNIQUE INDEX uk_a (a)`,
},
}
for _, sql := range sqls[0] {
q, err := NewQuery4Audit(sql)
if err == nil {
rule := q.RuleIdxPrefix()
if rule.Item != "STA.003" {
t.Error("Rule not match:", rule.Item, "Expect : STA.003")
}
} else {
t.Error("sqlparser.Parse Error:", err)
}
}
for _, sql := range sqls[1] {
q, err := NewQuery4Audit(sql)
if err == nil {
rule := q.RuleIdxPrefix()
if rule.Item != "OK" {
t.Error("Rule not match:", rule.Item, "Expect : OK")
}
} else {
t.Error("sqlparser.Parse Error:", err)
}
}
common.Log.Debug("Exiting function: %s", common.GetFunctionName())
} | explode_data.jsonl/76846 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 482
} | [
2830,
3393,
11337,
11420,
14335,
1155,
353,
8840,
836,
8,
341,
83825,
5247,
20345,
445,
82867,
729,
25,
1018,
82,
497,
4185,
2234,
5152,
675,
2398,
30633,
82,
1669,
52931,
917,
515,
197,
197,
515,
298,
197,
1,
22599,
14363,
21173,
320... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestLookupDeclByNameNullable(t *testing.T) {
decl, ok := testSchema(t).lookupDeclByName("ExampleStruct", true)
if !ok {
t.Fatalf("lookupDeclByName failed")
}
checkStruct(t, decl.(*StructDecl), "ExampleStruct", true)
} | explode_data.jsonl/21385 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 85
} | [
2830,
3393,
34247,
21629,
16898,
15703,
1155,
353,
8840,
836,
8,
341,
197,
10005,
11,
5394,
1669,
1273,
8632,
1155,
568,
21020,
21629,
16898,
445,
13314,
9422,
497,
830,
340,
743,
753,
562,
341,
197,
3244,
30762,
445,
21020,
21629,
1689... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestQueryGetBlockByHash(t *testing.T) {
chainid := "mytestchainid4"
path := tempDir(t, "test4")
defer os.RemoveAll(path)
stub, err := setupTestLedger(chainid, path)
if err != nil {
t.Fatalf(err.Error())
}
args := [][]byte{[]byte(GetBlockByHash), []byte(chainid), []byte("0")}
prop := resetProvider(resources.Qscc_GetBlockByHash, chainid, &peer2.SignedProposal{}, nil)
res := stub.MockInvokeWithSignedProposal("1", args, prop)
assert.Equal(t, int32(shim.ERROR), res.Status, "GetBlockByHash should have failed with invalid hash: 0")
args = [][]byte{[]byte(GetBlockByHash), []byte(chainid), []byte(nil)}
res = stub.MockInvoke("2", args)
assert.Equal(t, int32(shim.ERROR), res.Status, "GetBlockByHash should have failed with nil hash")
} | explode_data.jsonl/18831 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 287
} | [
2830,
3393,
2859,
1949,
4713,
1359,
6370,
1155,
353,
8840,
836,
8,
341,
197,
8819,
307,
1669,
330,
2408,
1944,
8819,
307,
19,
698,
26781,
1669,
2730,
6184,
1155,
11,
330,
1944,
19,
1138,
16867,
2643,
84427,
5581,
692,
18388,
392,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRepository_Commits(t *testing.T) {
t.Parallel()
ctx := actor.WithActor(context.Background(), &actor.Actor{
UID: 1,
})
// TODO(sqs): test CommitsOptions.Base
gitCommands := []string{
"GIT_COMMITTER_NAME=a GIT_COMMITTER_EMAIL=a@a.com GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit --allow-empty -m foo --author='a <a@a.com>' --date 2006-01-02T15:04:05Z",
"GIT_COMMITTER_NAME=c GIT_COMMITTER_EMAIL=c@c.com GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit --allow-empty -m bar --author='a <a@a.com>' --date 2006-01-02T15:04:06Z",
}
wantGitCommits := []*gitdomain.Commit{
{
ID: "b266c7e3ca00b1a17ad0b1449825d0854225c007",
Author: gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:06Z")},
Committer: &gitdomain.Signature{Name: "c", Email: "c@c.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:07Z")},
Message: "bar",
Parents: []api.CommitID{"ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8"},
},
{
ID: "ea167fe3d76b1e5fd3ed8ca44cbd2fe3897684f8",
Author: gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")},
Committer: &gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")},
Message: "foo",
Parents: nil,
},
}
tests := map[string]struct {
repo api.RepoName
id api.CommitID
wantCommits []*gitdomain.Commit
wantTotal uint
}{
"git cmd": {
repo: MakeGitRepository(t, gitCommands...),
id: "b266c7e3ca00b1a17ad0b1449825d0854225c007",
wantCommits: wantGitCommits,
wantTotal: 2,
},
}
runCommitsTests := func(checker authz.SubRepoPermissionChecker) {
for label, test := range tests {
t.Run(label, func(t *testing.T) {
testCommits(ctx, label, test.repo, CommitsOptions{Range: string(test.id)}, checker, test.wantTotal, test.wantCommits, t)
// Test that trying to get a nonexistent commit returns RevisionNotFoundError.
if _, err := Commits(ctx, database.NewMockDB(), test.repo, CommitsOptions{Range: string(NonExistentCommitID)}, nil); !errors.HasType(err, &gitdomain.RevisionNotFoundError{}) {
t.Errorf("%s: for nonexistent commit: got err %v, want RevisionNotFoundError", label, err)
}
})
}
}
runCommitsTests(nil)
checker := getTestSubRepoPermsChecker()
runCommitsTests(checker)
} | explode_data.jsonl/8517 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1118
} | [
2830,
3393,
4624,
16946,
44703,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
20985,
1669,
12089,
26124,
18870,
5378,
19047,
1507,
609,
5621,
76094,
515,
197,
197,
6463,
25,
220,
16,
345,
197,
8824,
197,
322,
5343,
1141,
26358,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAssembleRowEvent(t *testing.T) {
testCases := []struct {
regionID uint64
entry *cdcpb.Event_Row
expected model.RegionFeedEvent
err string
}{{
regionID: 1,
entry: &cdcpb.Event_Row{
StartTs: 1,
CommitTs: 2,
Key: []byte("k1"),
Value: []byte("v1"),
OpType: cdcpb.Event_Row_PUT,
},
expected: model.RegionFeedEvent{
RegionID: 1,
Val: &model.RawKVEntry{
OpType: model.OpTypePut,
StartTs: 1,
CRTs: 2,
Key: []byte("k1"),
Value: []byte("v1"),
RegionID: 1,
},
},
}, {
regionID: 2,
entry: &cdcpb.Event_Row{
StartTs: 1,
CommitTs: 2,
Key: []byte("k2"),
Value: []byte("v2"),
OpType: cdcpb.Event_Row_DELETE,
},
expected: model.RegionFeedEvent{
RegionID: 2,
Val: &model.RawKVEntry{
OpType: model.OpTypeDelete,
StartTs: 1,
CRTs: 2,
Key: []byte("k2"),
Value: []byte("v2"),
RegionID: 2,
},
},
}, {
regionID: 4,
entry: &cdcpb.Event_Row{
StartTs: 1,
CommitTs: 2,
Key: []byte("k3"),
Value: []byte("v3"),
OldValue: []byte("ov3"),
OpType: cdcpb.Event_Row_PUT,
},
expected: model.RegionFeedEvent{
RegionID: 4,
Val: &model.RawKVEntry{
OpType: model.OpTypePut,
StartTs: 1,
CRTs: 2,
Key: []byte("k3"),
Value: []byte("v3"),
OldValue: []byte("ov3"),
RegionID: 4,
},
},
}, {
regionID: 2,
entry: &cdcpb.Event_Row{
StartTs: 1,
CommitTs: 2,
Key: []byte("k2"),
Value: []byte("v2"),
OpType: cdcpb.Event_Row_UNKNOWN,
},
err: "[CDC:ErrUnknownKVEventType]unknown kv optype: UNKNOWN, entry: start_ts:1 " +
"commit_ts:2 key:\"k2\" value:\"v2\" ",
}}
for _, tc := range testCases {
event, err := assembleRowEvent(tc.regionID, tc.entry)
require.Equal(t, tc.expected, event)
if err != nil {
require.Equal(t, tc.err, err.Error())
}
}
} | explode_data.jsonl/32860 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1029
} | [
2830,
3393,
2121,
15790,
3102,
1556,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
197,
3943,
915,
2622,
21,
19,
198,
197,
48344,
262,
353,
4385,
4672,
65,
6904,
66932,
198,
197,
42400,
1614,
65642,
28916,
15... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestLibrato_IndefiniteBackoff(t *testing.T) {
handler := &GrumpyHandler{ResponseCodes: []int{500}}
server := httptest.NewServer(handler)
defer server.Close()
config := GetConfig()
config.LibratoUrl, _ = url.Parse(server.URL)
config.LibratoUser = "user"
config.LibratoToken = "token"
measurements := make(chan Measurement, 10)
librato := NewLibratoOutputter(measurements, config)
if librato.sendWithBackoff([]byte(`{}`)) {
t.Errorf("Retry should have given up. This is an especially grumpy handler")
}
} | explode_data.jsonl/51436 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
9194,
81,
4330,
62,
1425,
823,
15856,
3707,
1847,
1155,
353,
8840,
836,
8,
341,
53326,
1669,
609,
6464,
6752,
3050,
90,
2582,
20871,
25,
3056,
396,
90,
20,
15,
15,
11248,
41057,
1669,
54320,
70334,
7121,
5475,
36514,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestConcurrentTraceArrival(t *testing.T) {
traceIds, batches := generateIdsAndBatches(128)
var wg sync.WaitGroup
cfg := Config{
DecisionWait: defaultTestDecisionWait,
NumTraces: uint64(2 * len(traceIds)),
ExpectedNewTracesPerSec: 64,
PolicyCfgs: testPolicy,
}
sp, _ := newTraceProcessor(zap.NewNop(), consumertest.NewNop(), cfg)
tsp := sp.(*tailSamplingSpanProcessor)
for _, batch := range batches {
// Add the same traceId twice.
wg.Add(2)
go func(td pdata.Traces) {
tsp.ConsumeTraces(context.Background(), td)
wg.Done()
}(batch)
go func(td pdata.Traces) {
tsp.ConsumeTraces(context.Background(), td)
wg.Done()
}(batch)
}
wg.Wait()
for i := range traceIds {
d, ok := tsp.idToTrace.Load(traceIds[i])
require.True(t, ok, "Missing expected traceId")
v := d.(*sampling.TraceData)
require.Equal(t, int64(i+1)*2, v.SpanCount, "Incorrect number of spans for entry %d", i)
}
} | explode_data.jsonl/31670 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 434
} | [
2830,
3393,
1109,
3231,
6550,
8838,
3936,
1155,
353,
8840,
836,
8,
341,
65058,
12701,
11,
44792,
1669,
6923,
12701,
3036,
33,
9118,
7,
16,
17,
23,
692,
2405,
63581,
12811,
28384,
2808,
198,
50286,
1669,
5532,
515,
197,
197,
74846,
141... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGammasGobable(t *testing.T) {
p, err := NewPolynomial([]*big.Int{
big.NewInt(0),
big.NewInt(10),
big.NewInt(20),
})
assert.NilError(t, err)
gammas := p.Gammas()
deserialized := new(Gammas)
shtest.EnsureGobable(t, gammas, deserialized)
} | explode_data.jsonl/60072 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 120
} | [
2830,
3393,
78884,
6899,
38,
674,
480,
1155,
353,
8840,
836,
8,
341,
3223,
11,
1848,
1669,
1532,
14658,
25358,
85288,
16154,
7371,
515,
197,
2233,
343,
7121,
1072,
7,
15,
1326,
197,
2233,
343,
7121,
1072,
7,
16,
15,
1326,
197,
2233,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAuth(t *testing.T) {
mockError := errors.New("error")
mockUsername := "mockUsername"
mockPassword := "mockPassword"
encoded := base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", mockUsername, mockPassword)))
testCases := map[string]struct {
mockECRClient func(m *mocks.Mockapi)
wantedUsername string
wantedPassword string
wantErr error
}{
"should return wrapped error given error returned from GetAuthorizationToken": {
mockECRClient: func(m *mocks.Mockapi) {
m.EXPECT().GetAuthorizationToken(gomock.Any()).Return(nil, mockError)
},
wantErr: fmt.Errorf("get ECR auth: %w", mockError),
},
"should return Auth data": {
mockECRClient: func(m *mocks.Mockapi) {
m.EXPECT().GetAuthorizationToken(gomock.Any()).Return(&ecr.GetAuthorizationTokenOutput{
AuthorizationData: []*ecr.AuthorizationData{
&ecr.AuthorizationData{
AuthorizationToken: aws.String(encoded),
},
},
}, nil)
},
wantedUsername: mockUsername,
wantedPassword: mockPassword,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
// GIVEN
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mockECRAPI := mocks.NewMockapi(ctrl)
tc.mockECRClient(mockECRAPI)
client := ECR{
mockECRAPI,
}
gotUsername, gotPassword, gotErr := client.Auth()
require.Equal(t, tc.wantedUsername, gotUsername)
require.Equal(t, tc.wantedPassword, gotPassword)
require.Equal(t, tc.wantErr, gotErr)
})
}
} | explode_data.jsonl/31311 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 640
} | [
2830,
3393,
5087,
1155,
353,
8840,
836,
8,
341,
77333,
1454,
1669,
5975,
7121,
445,
841,
5130,
77333,
11115,
1669,
330,
16712,
11115,
698,
77333,
4876,
1669,
330,
16712,
4876,
1837,
197,
19329,
1669,
2331,
21,
19,
36086,
14690,
50217,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBlobberGRPCService_GetObjectPath_Success(t *testing.T) {
req := &blobbergrpc.GetObjectPathRequest{
Context: &blobbergrpc.RequestContext{
Client: "owner",
ClientKey: "",
Allocation: "",
},
Allocation: "",
Path: "path",
BlockNum: "120",
}
mockStorageHandler := &storageHandlerI{}
mockReferencePackage := &mocks.PackageHandler{}
mockStorageHandler.On("verifyAllocation", mock.Anything, req.Allocation, false).Return(&allocation.Allocation{
ID: "allocationId",
Tx: req.Allocation,
OwnerID: "owner",
}, nil)
mockReferencePackage.On("GetObjectPathGRPC", mock.Anything, mock.Anything, mock.Anything).Return(&blobbergrpc.ObjectPath{
RootHash: "hash",
FileBlockNum: 120,
}, nil)
svc := newGRPCBlobberService(mockStorageHandler, mockReferencePackage)
resp, err := svc.GetObjectPath(context.Background(), req)
if err != nil {
t.Fatal("unexpected error")
}
assert.Equal(t, resp.ObjectPath.RootHash, "hash")
assert.Equal(t, resp.ObjectPath.FileBlockNum, int64(120))
} | explode_data.jsonl/66832 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 404
} | [
2830,
3393,
37985,
652,
8626,
4872,
1860,
13614,
1190,
1820,
87161,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
609,
35112,
652,
56585,
25618,
1820,
1900,
515,
197,
70871,
25,
609,
35112,
652,
56585,
9659,
1972,
515,
298,
71724,
25,
257,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSample1(t *testing.T) {
N, K := 2, 2
E := [][]int{
{0, 1},
{1, 2},
{2, 0},
}
A := []int{1, 2}
var expect int64 = 4
runSample(t, N, K, E, A, expect)
} | explode_data.jsonl/59614 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
17571,
16,
1155,
353,
8840,
836,
8,
341,
18317,
11,
730,
1669,
220,
17,
11,
220,
17,
198,
22784,
1669,
52931,
396,
515,
197,
197,
90,
15,
11,
220,
16,
1583,
197,
197,
90,
16,
11,
220,
17,
1583,
197,
197,
90,
17,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestVerifyPeerCertificate(t *testing.T) {
issuer, err := x509.ParseCertificate(testRSACertificateIssuer)
if err != nil {
panic(err)
}
rootCAs := x509.NewCertPool()
rootCAs.AddCert(issuer)
now := func() time.Time { return time.Unix(1476984729, 0) }
sentinelErr := errors.New("TestVerifyPeerCertificate")
verifyCallback := func(called *bool, rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
if l := len(rawCerts); l != 1 {
return fmt.Errorf("got len(rawCerts) = %d, wanted 1", l)
}
if len(validatedChains) == 0 {
return errors.New("got len(validatedChains) = 0, wanted non-zero")
}
*called = true
return nil
}
tests := []struct {
configureServer func(*Config, *bool)
configureClient func(*Config, *bool)
validate func(t *testing.T, testNo int, clientCalled, serverCalled bool, clientErr, serverErr error)
}{
{
configureServer: func(config *Config, called *bool) {
config.InsecureSkipVerify = false
config.VerifyPeerCertificate = func(rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
return verifyCallback(called, rawCerts, validatedChains)
}
},
configureClient: func(config *Config, called *bool) {
config.InsecureSkipVerify = false
config.VerifyPeerCertificate = func(rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
return verifyCallback(called, rawCerts, validatedChains)
}
},
validate: func(t *testing.T, testNo int, clientCalled, serverCalled bool, clientErr, serverErr error) {
if clientErr != nil {
t.Errorf("test[%d]: client handshake failed: %v", testNo, clientErr)
}
if serverErr != nil {
t.Errorf("test[%d]: server handshake failed: %v", testNo, serverErr)
}
if !clientCalled {
t.Errorf("test[%d]: client did not call callback", testNo)
}
if !serverCalled {
t.Errorf("test[%d]: server did not call callback", testNo)
}
},
},
{
configureServer: func(config *Config, called *bool) {
config.InsecureSkipVerify = false
config.VerifyPeerCertificate = func(rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
return sentinelErr
}
},
configureClient: func(config *Config, called *bool) {
config.VerifyPeerCertificate = nil
},
validate: func(t *testing.T, testNo int, clientCalled, serverCalled bool, clientErr, serverErr error) {
if serverErr != sentinelErr {
t.Errorf("#%d: got server error %v, wanted sentinelErr", testNo, serverErr)
}
},
},
{
configureServer: func(config *Config, called *bool) {
config.InsecureSkipVerify = false
},
configureClient: func(config *Config, called *bool) {
config.VerifyPeerCertificate = func(rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
return sentinelErr
}
},
validate: func(t *testing.T, testNo int, clientCalled, serverCalled bool, clientErr, serverErr error) {
if clientErr != sentinelErr {
t.Errorf("#%d: got client error %v, wanted sentinelErr", testNo, clientErr)
}
},
},
{
configureServer: func(config *Config, called *bool) {
config.InsecureSkipVerify = false
},
configureClient: func(config *Config, called *bool) {
config.InsecureSkipVerify = true
config.VerifyPeerCertificate = func(rawCerts [][]byte, validatedChains [][]*x509.Certificate) error {
if l := len(rawCerts); l != 1 {
return fmt.Errorf("got len(rawCerts) = %d, wanted 1", l)
}
// With InsecureSkipVerify set, this
// callback should still be called but
// validatedChains must be empty.
if l := len(validatedChains); l != 0 {
return fmt.Errorf("got len(validatedChains) = %d, wanted zero", l)
}
*called = true
return nil
}
},
validate: func(t *testing.T, testNo int, clientCalled, serverCalled bool, clientErr, serverErr error) {
if clientErr != nil {
t.Errorf("test[%d]: client handshake failed: %v", testNo, clientErr)
}
if serverErr != nil {
t.Errorf("test[%d]: server handshake failed: %v", testNo, serverErr)
}
if !clientCalled {
t.Errorf("test[%d]: client did not call callback", testNo)
}
},
},
}
for i, test := range tests {
c, s := net.Pipe()
done := make(chan error)
var clientCalled, serverCalled bool
go func() {
config := testConfig.Clone()
config.ServerName = "example.golang"
config.ClientAuth = RequireAndVerifyClientCert
config.ClientCAs = rootCAs
config.Time = now
test.configureServer(config, &serverCalled)
err = Server(s, config).Handshake()
s.Close()
done <- err
}()
config := testConfig.Clone()
config.ServerName = "example.golang"
config.RootCAs = rootCAs
config.Time = now
test.configureClient(config, &clientCalled)
clientErr := Client(c, config).Handshake()
c.Close()
serverErr := <-done
test.validate(t, i, clientCalled, serverCalled, clientErr, serverErr)
}
} | explode_data.jsonl/71373 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2022
} | [
2830,
3393,
32627,
30888,
33202,
1155,
353,
8840,
836,
8,
341,
197,
66817,
11,
1848,
1669,
856,
20,
15,
24,
8937,
33202,
8623,
11451,
1706,
20962,
98902,
340,
743,
1848,
961,
2092,
341,
197,
30764,
3964,
340,
197,
630,
33698,
34,
2121... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSortDependencies(t *testing.T) {
testInstanceA := createTestInstance("example-a")
testInstanceB := createTestInstance("example-b")
testInstanceC := createTestInstance("example-c", testInstanceB)
testInstanceD := createTestInstance("example-d", testInstanceC)
testInstanceE := createTestInstance("example-e")
testInstanceF := createTestInstance("example-f")
testInstanceG := createTestInstance("example-g", testInstanceF)
if !peerDepends(testInstanceC, testInstanceB) {
t.Errorf("Expected test instance c to depend on test instance b")
}
if !peerDepends(testInstanceD, testInstanceC) {
t.Errorf("Expected test instance d to depend on test instance c")
}
if !peerDepends(testInstanceG, testInstanceF) {
t.Errorf("Expected test instance g to depend on test instance f")
}
permutations := generatePermutations([]*ModuleInstance{
testInstanceA,
testInstanceB,
testInstanceC,
testInstanceD,
testInstanceE,
testInstanceF,
testInstanceG,
})
for _, classList := range permutations {
sortedList, err := sortDependencyList("client", classList)
if err != nil {
t.Errorf("Unexpected error when sorting peer list: %s", err)
}
if !isBefore(sortedList, "example-b", "example-c") {
t.Errorf("Expected example-b to be before example-c in sorted list")
}
if !isBefore(sortedList, "example-c", "example-d") {
t.Errorf("Expected example-c to be before example-d in sorted list")
}
if !isBefore(sortedList, "example-f", "example-g") {
t.Errorf("Expected example-f to be before example-g in sorted list")
}
}
} | explode_data.jsonl/34589 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 536
} | [
2830,
3393,
10231,
48303,
1155,
353,
8840,
836,
8,
341,
18185,
2523,
32,
1669,
1855,
2271,
2523,
445,
8687,
7409,
1138,
18185,
2523,
33,
1669,
1855,
2271,
2523,
445,
8687,
1455,
1138,
18185,
2523,
34,
1669,
1855,
2271,
2523,
445,
8687,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestObjectFilter(t *testing.T) {
cases := []struct {
note string
object string
filter string
expected string
}{
{
note: "base",
object: `{"a": {"b": {"c": 7, "d": 8}}, "e": 9}`,
filter: `{"a": {"b": {"c": null}}}`,
expected: `{"a": {"b": {"c": 7}}}`,
},
{
note: "multiple roots",
object: `{"a": {"b": {"c": 7, "d": 8}}, "e": 9}`,
filter: `{"a": {"b": {"c": null}}, "e": null}`,
expected: `{"a": {"b": {"c": 7}}, "e": 9}`,
},
{
note: "shared roots",
object: `{"a": {"b": {"c": 7, "d": 8}, "e": 9}}`,
filter: `{"a": {"b": {"c": null}, "e": null}}`,
expected: `{"a": {"b": {"c": 7}, "e": 9}}`,
},
{
note: "empty filter",
object: `{"a": 7}`,
filter: `{}`,
expected: `{}`,
},
{
note: "empty object",
object: `{}`,
filter: `{"a": {"b": null}}`,
expected: `{}`,
},
{
note: "arrays",
object: `{"a": [{"b": 7, "c": 8}, {"d": 9}]}`,
filter: `{"a": {"0": {"b": null}, "1": null}}`,
expected: `{"a": [{"b": 7}, {"d": 9}]}`,
},
{
note: "object with number keys",
object: `{"a": [{"1":["b", "c", "d"]}, {"x": "y"}]}`,
filter: `{"a": {"0": {"1": {"2": null}}}}`,
expected: `{"a": [{"1": ["d"]}]}`,
},
{
note: "sets",
object: `{"a": {"b", "c", "d"}, "x": {"y"}}`,
filter: `{"a": {"b": null, "d": null}, "x": null}`,
expected: `{"a": {"b", "d"}, "x": {"y"}}`,
},
}
for _, tc := range cases {
t.Run(tc.note, func(t *testing.T) {
obj := MustParseTerm(tc.object).Value.(Object)
filterObj := MustParseTerm(tc.filter).Value.(Object)
expected := MustParseTerm(tc.expected).Value.(Object)
actual, err := obj.Filter(filterObj)
if err != nil {
t.Errorf("unexpected error: %s", err)
}
if actual.Compare(expected) != 0 {
t.Errorf("Expected:\n\n\t%s\n\nGot:\n\n\t%s\n\n", expected, actual)
}
})
}
} | explode_data.jsonl/2908 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 987
} | [
2830,
3393,
1190,
5632,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
9038,
1272,
257,
914,
198,
197,
35798,
256,
914,
198,
197,
50108,
256,
914,
198,
197,
42400,
914,
198,
197,
59403,
197,
197,
515,
298,
903... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestDeletedControlPlaneIsAlwaysAllowed(t *testing.T) {
controlPlane := newControlPlaneWithVersion("my-smcp", "istio-system", versions.V2_2.String())
controlPlane.DeletionTimestamp = now()
validator, _, _ := createControlPlaneValidatorTestFixture()
response := validator.Handle(ctx, createCreateRequest(controlPlane))
assert.True(response.Allowed, "Expected validator to allow deleted ServiceMeshControlPlane", t)
} | explode_data.jsonl/10230 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 133
} | [
2830,
3393,
26039,
3273,
34570,
3872,
37095,
35382,
1155,
353,
8840,
836,
8,
341,
82786,
34570,
1669,
501,
3273,
34570,
2354,
5637,
445,
2408,
4668,
4672,
497,
330,
380,
815,
36648,
497,
10795,
5058,
17,
62,
17,
6431,
2398,
82786,
34570... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEmpty(t *testing.T) {
key := makeKey("aaa")
l := NewSkiplist(&testStorage{}, 0)
it := l.NewIter()
require.False(t, it.Valid())
it.First()
require.False(t, it.Valid())
it.Last()
require.False(t, it.Valid())
require.False(t, it.SeekGE(key))
require.False(t, it.Valid())
} | explode_data.jsonl/26098 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 123
} | [
2830,
3393,
3522,
1155,
353,
8840,
836,
8,
341,
23634,
1669,
1281,
1592,
445,
32646,
1138,
8810,
1669,
1532,
50,
6642,
39934,
2099,
1944,
5793,
22655,
220,
15,
340,
23374,
1669,
326,
7121,
8537,
2822,
17957,
50757,
1155,
11,
432,
47156,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFileDescForType(t *testing.T) {
for _, test := range []struct {
st reflect.Type
wantFd *dpb.FileDescriptorProto
}{
{reflect.TypeOf(pb.SearchResponse_Result{}), fdTest},
{reflect.TypeOf(pb.ToBeExtended{}), fdProto2},
} {
fd, err := s.fileDescForType(test.st)
if err != nil || !proto.Equal(fd, test.wantFd) {
t.Errorf("fileDescForType(%q) = %q, %v, want %q, <nil>", test.st, fd, err, test.wantFd)
}
}
} | explode_data.jsonl/79209 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 203
} | [
2830,
3393,
1703,
11065,
2461,
929,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
3056,
1235,
341,
197,
18388,
257,
8708,
10184,
198,
197,
50780,
74476,
353,
9796,
65,
8576,
11709,
31549,
198,
197,
59403,
197,
197,
90,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestBucketBlockSet_labelMatchers(t *testing.T) {
set := newBucketBlockSet(labels.FromStrings("a", "b", "c", "d"))
cases := []struct {
in []labels.Matcher
res []labels.Matcher
match bool
}{
{
in: []labels.Matcher{},
res: []labels.Matcher{},
match: true,
},
{
in: []labels.Matcher{
labels.NewEqualMatcher("a", "b"),
labels.NewEqualMatcher("c", "d"),
},
res: []labels.Matcher{},
match: true,
},
{
in: []labels.Matcher{
labels.NewEqualMatcher("a", "b"),
labels.NewEqualMatcher("c", "b"),
},
match: false,
},
{
in: []labels.Matcher{
labels.NewEqualMatcher("a", "b"),
labels.NewEqualMatcher("e", "f"),
},
res: []labels.Matcher{
labels.NewEqualMatcher("e", "f"),
},
match: true,
},
}
for _, c := range cases {
res, ok := set.labelMatchers(c.in...)
testutil.Equals(t, c.match, ok)
testutil.Equals(t, c.res, res)
}
} | explode_data.jsonl/39690 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 473
} | [
2830,
3393,
36018,
4713,
1649,
6106,
37862,
1155,
353,
8840,
836,
8,
341,
8196,
1669,
501,
36018,
4713,
1649,
36915,
11439,
20859,
445,
64,
497,
330,
65,
497,
330,
66,
497,
330,
67,
28075,
1444,
2264,
1669,
3056,
1235,
341,
197,
17430... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestImageFill(t *testing.T) {
w, h := 10, 10
img, err := NewImage(w, h, FilterNearest)
if err != nil {
t.Fatal(err)
return
}
clr := &mutableRGBA{0x80, 0x80, 0x80, 0x80}
if err := img.Fill(clr); err != nil {
t.Fatal(err)
return
}
clr.r = 0
for j := 0; j < h; j++ {
for i := 0; i < w; i++ {
got := img.At(i, j)
want := color.RGBA{0x80, 0x80, 0x80, 0x80}
if got != want {
t.Errorf("img At(%d, %d): got %#v; want %#v", i, j, got, want)
}
}
}
} | explode_data.jsonl/10897 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 260
} | [
2830,
3393,
1906,
14449,
1155,
353,
8840,
836,
8,
341,
6692,
11,
305,
1669,
220,
16,
15,
11,
220,
16,
15,
198,
39162,
11,
1848,
1669,
1532,
1906,
3622,
11,
305,
11,
12339,
8813,
15432,
340,
743,
1848,
961,
2092,
341,
197,
3244,
26... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestCacheHit(t *testing.T) {
c := cache.New()
c.SetWithExpiration("test-key", "test-value", maxTime)
v, ok := c.Get("test-key")
if !ok {
t.Error("cache miss")
}
if diff := cmp.Diff("test-value", v.(string)); diff != "" {
t.Errorf("value differs.\n%v", diff)
}
} | explode_data.jsonl/70364 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 119
} | [
2830,
3393,
8233,
19498,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6500,
7121,
2822,
1444,
4202,
2354,
66301,
445,
1944,
16173,
497,
330,
1944,
19083,
497,
1932,
1462,
692,
5195,
11,
5394,
1669,
272,
2234,
445,
1944,
16173,
1138,
743,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_docs_index__54fb35cd129733591dab1e37165b7232(t *testing.T) {
es, _ := elasticsearch.NewDefaultClient()
// tag:54fb35cd129733591dab1e37165b7232[]
res, err := es.Index(
"twitter",
strings.NewReader(`{
"message": "elasticsearch now has versioning support, double cool!"
}`),
es.Index.WithDocumentID("1"),
es.Index.WithVersion(2),
es.Index.WithVersionType("external"),
es.Index.WithPretty(),
)
fmt.Println(res, err)
if err != nil { // SKIP
t.Fatalf("Error getting the response: %s", err) // SKIP
} // SKIP
defer res.Body.Close() // SKIP
// end:54fb35cd129733591dab1e37165b7232[]
} | explode_data.jsonl/52734 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 258
} | [
2830,
3393,
49692,
3560,
563,
20,
19,
10798,
18,
20,
4385,
16,
17,
24,
22,
18,
18,
20,
24,
16,
67,
370,
16,
68,
18,
22,
16,
21,
20,
65,
22,
17,
18,
17,
1155,
353,
8840,
836,
8,
341,
78966,
11,
716,
1669,
655,
27791,
7121,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestValidateContextTags(t *testing.T) {
t.Run("long_key", func(t *testing.T) {
// NOTE(axw) this should probably fail, but does not. See:
// https://github.com/elastic/apm-server/issues/910
validateTransaction(t, func(tx *apm.Transaction) {
tx.Context.SetTag(strings.Repeat("x", 1025), "x")
})
})
t.Run("long_value", func(t *testing.T) {
validateTransaction(t, func(tx *apm.Transaction) {
tx.Context.SetTag("x", strings.Repeat("x", 1025))
})
})
t.Run("reserved_key_chars", func(t *testing.T) {
validateTransaction(t, func(tx *apm.Transaction) {
tx.Context.SetTag("x.y", "z")
})
})
} | explode_data.jsonl/791 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 259
} | [
2830,
3393,
17926,
1972,
15930,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
4825,
3097,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
197,
322,
16743,
41922,
86,
8,
419,
1265,
4658,
3690,
11,
714,
1558,
537,
13,
3496,
510,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRootDir(t *testing.T) {
var dirs = []struct {
input, dirname, result string
}{
{"/tmp/foo/bar", "content", "/tmp/foo/content"},
{"/", "content", "/content"},
{"/tmp/", "content", "/tmp/content"},
}
for _, tt := range dirs {
if got, want := rootDir(tt.input, tt.dirname), tt.result; got != want {
t.Errorf("rootDir(): got %v want %v", got, want)
}
}
} | explode_data.jsonl/66393 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 160
} | [
2830,
3393,
8439,
6184,
1155,
353,
8840,
836,
8,
341,
2405,
42248,
284,
3056,
1235,
341,
197,
22427,
11,
29908,
11,
1102,
914,
198,
197,
59403,
197,
197,
90,
3115,
5173,
60555,
49513,
497,
330,
1796,
497,
3521,
5173,
60555,
27917,
711... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_logSelectorExpr_String(t *testing.T) {
t.Parallel()
tests := []struct {
selector string
expectFilter bool
}{
{`{foo!~"bar"}`, false},
{`{foo="bar", bar!="baz"}`, false},
{`{foo="bar", bar!="baz"} != "bip" !~ ".+bop"`, true},
{`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, true},
{`{foo="bar", bar!="baz"} |= ""`, false},
{`{foo="bar", bar!="baz"} |~ ""`, false},
{`{foo="bar", bar!="baz"} |~ ".*"`, false},
{`{foo="bar", bar!="baz"} |= "" |= ""`, false},
{`{foo="bar", bar!="baz"} |~ "" |= "" |~ ".*"`, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.selector, func(t *testing.T) {
t.Parallel()
expr, err := ParseLogSelector(tt.selector)
if err != nil {
t.Fatalf("failed to parse log selector: %s", err)
}
f, err := expr.Filter()
if err != nil {
t.Fatalf("failed to get filter: %s", err)
}
require.Equal(t, tt.expectFilter, f != nil)
if expr.String() != strings.Replace(tt.selector, " ", "", -1) {
t.Fatalf("error expected: %s got: %s", tt.selector, expr.String())
}
})
}
} | explode_data.jsonl/62859 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 521
} | [
2830,
3393,
5224,
5877,
16041,
31777,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
78216,
1669,
3056,
1235,
341,
197,
197,
8925,
257,
914,
198,
197,
24952,
5632,
1807,
198,
197,
59403,
197,
197,
90,
63,
90,
7975,
0,
93,
1,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestServerAggregate(t *testing.T) {
t.Parallel()
o := onpar.New()
defer o.Run(t)
o.BeforeEach(func(t *testing.T) TS {
mockCalc := newMockCalculator()
return TS{
T: t,
mockCalc: mockCalc,
s: server.New(mockCalc),
}
})
o.Group("when the calculator does not return an error", func() {
o.BeforeEach(func(t TS) TS {
close(t.mockCalc.CalculateOutput.Err)
t.mockCalc.CalculateOutput.FinalResult <- map[string][]byte{
"0": marshalFloat64(99),
"1": marshalFloat64(101),
"2": []byte("invalid"),
"invalid": marshalFloat64(103),
}
return t
})
o.Spec("it uses the calculator and returns the results", func(t TS) {
resp, err := t.s.Aggregate(context.Background(), &v1.AggregateInfo{
Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "some-id",
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
BucketWidthNs: 2,
})
Expect(t, err == nil).To(BeTrue())
Expect(t, resp.Results).To(HaveLen(2))
Expect(t, resp.Results[0]).To(Equal(float64(99)))
Expect(t, resp.Results[1]).To(Equal(float64(101)))
})
o.Spec("it returns an error if an ID is not given", func(t TS) {
_, err := t.s.Aggregate(context.Background(), &v1.AggregateInfo{
Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
BucketWidthNs: 2,
})
Expect(t, err == nil).To(BeFalse())
})
o.Spec("it returns an error if an aggregation is not given", func(t TS) {
_, err := t.s.Aggregate(context.Background(), &v1.AggregateInfo{
BucketWidthNs: 2,
Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "some-id",
},
},
})
Expect(t, err == nil).To(BeFalse())
})
o.Spec("it returns an error if an bucket widtch is not given", func(t TS) {
_, err := t.s.Aggregate(context.Background(), &v1.AggregateInfo{
Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "some-id",
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
})
Expect(t, err == nil).To(BeFalse())
})
o.Spec("it uses the expected info for the calculator", func(t TS) {
t.s.Aggregate(context.Background(), &v1.AggregateInfo{Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "id",
TimeRange: &v1.TimeRange{
Start: 99,
End: 101,
},
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
BucketWidthNs: 2,
})
Expect(t, t.mockCalc.CalculateInput.Route).To(
Chain(Receive(), Equal("id")),
)
Expect(t, t.mockCalc.CalculateInput.AlgName).To(
Chain(Receive(), Equal("aggregation")),
)
})
o.Spec("it includes the request in the meta", func(t TS) {
info := &v1.AggregateInfo{Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "id",
TimeRange: &v1.TimeRange{
Start: 99,
End: 101,
},
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
BucketWidthNs: 2,
}
t.s.Aggregate(context.Background(), info)
marshelled, err := proto.Marshal(info)
Expect(t, err == nil).To(BeTrue())
Expect(t, t.mockCalc.CalculateInput.Meta).To(
Chain(Receive(), Equal(marshelled)),
)
})
})
o.Group("when the calculator returns an error", func() {
o.BeforeEach(func(t TS) TS {
t.mockCalc.CalculateOutput.Err <- fmt.Errorf("some-error")
close(t.mockCalc.CalculateOutput.FinalResult)
return t
})
o.Spec("it returns an error", func(t TS) {
_, err := t.s.Aggregate(context.Background(), &v1.AggregateInfo{Query: &v1.QueryInfo{
Filter: &v1.AnalystFilter{
SourceId: "id",
TimeRange: &v1.TimeRange{
Start: 99,
End: 101,
},
Envelopes: &v1.AnalystFilter_Counter{
Counter: &v1.CounterFilter{Name: "some-name"},
},
},
},
BucketWidthNs: 2,
})
Expect(t, err == nil).To(BeFalse())
})
})
} | explode_data.jsonl/71880 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2030
} | [
2830,
3393,
5475,
64580,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
22229,
1669,
389,
1732,
7121,
741,
16867,
297,
16708,
1155,
692,
22229,
31153,
4854,
18552,
1155,
353,
8840,
836,
8,
22965,
341,
197,
77333,
47168,
1669,
501... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInterpolate(t *testing.T) {
var testCases = []struct {
name string
command string
expected string
args []driver.NamedValue
}{
{
name: "no parameters",
command: "select * from something",
expected: "select * from something",
args: []driver.NamedValue{},
},
{
name: "nil value",
command: "select * from something where value = ?",
expected: "select * from something where value = NULL",
args: []driver.NamedValue{{Value: nil}},
},
{
name: "time value",
command: "select * from something where value = ?",
expected: "select * from something where value = '2000-02-16 10:12:30.000456789'",
args: []driver.NamedValue{{Value: time.Date(2000, 2, 16, 10, 12, 30, 456789, time.UTC)}},
},
{
name: "simple string",
command: "select * from something where value = ?",
expected: "select * from something where value = 'taco'",
args: []driver.NamedValue{{Value: "taco"}},
},
{
name: "multiple values",
command: "select * from something where value = ? and otherVal = ?",
expected: "select * from something where value = 'taco' and otherVal = 15.5",
args: []driver.NamedValue{{Value: "taco"}, {Value: 15.5}},
},
{
name: "strings with quotes",
command: "select * from something where value = ?",
expected: "select * from something where value = 'it''s other''s'",
args: []driver.NamedValue{{Value: "it's other's"}},
},
{
name: "strings with already escaped quotes",
command: "select * from something where value = ?",
expected: "select * from something where value = 'it''s other''s'",
args: []driver.NamedValue{{Value: "it''s other''s"}},
},
{
name: "with a param looking rune in a string",
command: "select * from something where value = ? and test = '?bad'",
expected: "select * from something where value = 'replace' and test = '?bad'",
args: []driver.NamedValue{{Value: "replace"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
stmt := testStatement(tc.command)
result, err := stmt.interpolate(tc.args)
if result != tc.expected {
t.Errorf("Expected query to be %s but got %s", tc.expected, result)
}
if err != nil {
t.Errorf("Received error from interpolate: %v", err)
}
})
}
} | explode_data.jsonl/25529 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 939
} | [
2830,
3393,
3306,
45429,
1155,
353,
8840,
836,
8,
341,
2405,
1273,
37302,
284,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
45566,
220,
914,
198,
197,
42400,
914,
198,
197,
31215,
257,
3056,
12521,
57872,
1130,
198,
197,
59403,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestTracesReceiverStartConsume(t *testing.T) {
c := kafkaTracesConsumer{
nextConsumer: consumertest.NewNop(),
logger: zap.NewNop(),
consumerGroup: &testConsumerGroup{},
}
ctx, cancelFunc := context.WithCancel(context.Background())
c.cancelConsumeLoop = cancelFunc
require.NoError(t, c.Shutdown(context.Background()))
err := c.consumeLoop(ctx, &tracesConsumerGroupHandler{
ready: make(chan bool),
})
assert.EqualError(t, err, context.Canceled.Error())
} | explode_data.jsonl/79392 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 182
} | [
2830,
3393,
1282,
2434,
25436,
3479,
1109,
31323,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
67852,
1282,
2434,
29968,
515,
197,
28144,
29968,
25,
220,
4662,
83386,
7121,
45,
453,
3148,
197,
17060,
25,
286,
32978,
7121,
45,
453,
3148,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEditorDeleteWord(t *testing.T) {
type Test struct {
Text string
Start int
Selection int
Delete int
Want int
Result string
}
tests := []Test{
// No text selected
{"", 0, 0, 0, 0, ""},
{"", 0, 0, -1, 0, ""},
{"", 0, 0, 1, 0, ""},
{"", 0, 0, -2, 0, ""},
{"", 0, 0, 2, 0, ""},
{"hello", 0, 0, -1, 0, "hello"},
{"hello", 0, 0, 1, 0, ""},
// Document (imho) incorrect behavior w.r.t. deleting spaces following
// words.
{"hello world", 0, 0, 1, 0, " world"}, // Should be "world", if you ask me.
{"hello world", 0, 0, 2, 0, "world"}, // Should be "".
{"hello ", 0, 0, 1, 0, " "}, // Should be "".
{"hello world", 11, 0, -1, 6, "hello "}, // Should be "hello".
{"hello world", 11, 0, -2, 5, "hello"}, // Should be "".
{"hello ", 6, 0, -1, 0, ""}, // Correct result.
{"hello world", 3, 0, 1, 3, "hel world"},
{"hello world", 3, 0, -1, 0, "lo world"},
{"hello world", 8, 0, -1, 6, "hello rld"},
{"hello world", 8, 0, 1, 8, "hello wo"},
{"hello world", 3, 0, 1, 3, "hel world"},
{"hello world", 3, 0, 2, 3, "helworld"},
{"hello world", 8, 0, 1, 8, "hello "},
{"hello world", 8, 0, -1, 5, "hello world"},
{"hello brave new world", 0, 0, 3, 0, " new world"},
{"helléèçàô world", 3, 0, 1, 3, "hel world"}, // unicode char with length > 1 in deleted part
// Add selected text.
//
// Several permutations must be tested:
// - select from the left or right
// - Delete + or -
// - abs(Delete) == 1 or > 1
//
// "brave |" selected; caret at |
{"hello there brave new world", 12, 6, 1, 12, "hello there new world"}, // #16
{"hello there brave new world", 12, 6, 2, 12, "hello there world"}, // The two spaces after "there" are actually suboptimal, if you ask me. See also above cases.
{"hello there brave new world", 12, 6, -1, 12, "hello there new world"},
{"hello there brave new world", 12, 6, -2, 6, "hello new world"},
{"hello there b®âve new world", 12, 6, 1, 12, "hello there new world"}, // unicode chars with length > 1 in selection
{"hello there b®âve new world", 12, 6, 2, 12, "hello there world"}, // ditto
{"hello there b®âve new world", 12, 6, -1, 12, "hello there new world"}, // ditto
{"hello there b®âve new world", 12, 6, -2, 6, "hello new world"}, // ditto
// "|brave " selected
{"hello there brave new world", 18, -6, 1, 12, "hello there new world"}, // #20
{"hello there brave new world", 18, -6, 2, 12, "hello there world"}, // ditto
{"hello there brave new world", 18, -6, -1, 12, "hello there new world"},
{"hello there brave new world", 18, -6, -2, 6, "hello new world"},
{"hello there b®âve new world", 18, -6, 1, 12, "hello there new world"}, // unicode chars with length > 1 in selection
// Random edge cases
{"hello there brave new world", 12, 6, 99, 12, "hello there "},
{"hello there brave new world", 18, -6, -99, 0, "new world"},
}
setup := func(t string) *Editor {
e := new(Editor)
gtx := layout.Context{
Ops: new(op.Ops),
Constraints: layout.Exact(image.Pt(100, 100)),
}
cache := text.NewCache(gofont.Collection())
fontSize := unit.Px(10)
font := text.Font{}
e.SetText(t)
e.Layout(gtx, cache, font, fontSize, nil)
return e
}
for ii, tt := range tests {
e := setup(tt.Text)
e.MoveCaret(tt.Start, tt.Start)
e.MoveCaret(0, tt.Selection)
e.deleteWord(tt.Delete)
if e.caret.start.ofs != tt.Want {
t.Fatalf("[%d] deleteWord: bad caret position: got %d, want %d", ii, e.caret.start.ofs, tt.Want)
}
if e.Text() != tt.Result {
t.Fatalf("[%d] deleteWord: invalid result: got %q, want %q", ii, e.Text(), tt.Result)
}
}
} | explode_data.jsonl/27266 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1532
} | [
2830,
3393,
9410,
6435,
10879,
1155,
353,
8840,
836,
8,
341,
13158,
3393,
2036,
341,
197,
49635,
414,
914,
198,
197,
65999,
257,
526,
198,
197,
197,
11177,
526,
198,
197,
96672,
262,
526,
271,
197,
17300,
517,
256,
526,
198,
197,
56... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPrintIngressClass(t *testing.T) {
testCases := []struct {
name string
ingressClass *networking.IngressClass
expected []metav1.TableRow
}{{
name: "example with params",
ingressClass: &networking.IngressClass{
ObjectMeta: metav1.ObjectMeta{
Name: "test1",
CreationTimestamp: metav1.Time{Time: time.Now().AddDate(-10, 0, 0)},
},
Spec: networking.IngressClassSpec{
Controller: "example.com/controller",
Parameters: &networking.IngressClassParametersReference{Kind: "customgroup", Name: "example"},
},
},
expected: []metav1.TableRow{{Cells: []interface{}{"test1", "example.com/controller", "customgroup/example", "10y"}}},
}, {
name: "example with params + API Group",
ingressClass: &networking.IngressClass{
ObjectMeta: metav1.ObjectMeta{
Name: "test1",
CreationTimestamp: metav1.Time{Time: time.Now().AddDate(-10, 0, 0)},
},
Spec: networking.IngressClassSpec{
Controller: "example.com/controller",
Parameters: &networking.IngressClassParametersReference{
APIGroup: utilpointer.StringPtr("example.com"),
Kind: "customgroup",
Name: "example",
},
},
},
expected: []metav1.TableRow{{Cells: []interface{}{"test1", "example.com/controller", "customgroup.example.com/example", "10y"}}},
}, {
name: "example without params",
ingressClass: &networking.IngressClass{
ObjectMeta: metav1.ObjectMeta{
Name: "test2",
CreationTimestamp: metav1.Time{Time: time.Now().AddDate(-11, 0, 0)},
},
Spec: networking.IngressClassSpec{
Controller: "example.com/controller2",
},
},
expected: []metav1.TableRow{{Cells: []interface{}{"test2", "example.com/controller2", "<none>", "11y"}}},
}}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
rows, err := printIngressClass(testCase.ingressClass, printers.GenerateOptions{})
if err != nil {
t.Fatalf("Error generating table rows for Ingress: %#v", err)
}
for i := range rows {
rows[i].Object.Object = nil
}
if !reflect.DeepEqual(testCase.expected, rows) {
t.Errorf("mismatch: %s", diff.ObjectReflectDiff(testCase.expected, rows))
}
})
}
} | explode_data.jsonl/72267 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 918
} | [
2830,
3393,
8994,
641,
2483,
1957,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
260,
914,
198,
197,
197,
287,
673,
1957,
353,
17511,
287,
5337,
2483,
1957,
198,
197,
42400,
257,
3056,
4059,
402,
16,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestNewInputDone(t *testing.T) {
config := common.MapStr{
"connection_string": "Endpoint=sb://something",
"eventhub": "insights-operational-logs",
"storage_account": "someaccount",
"storage_account_key": "secret",
}
inputtest.AssertNotStartedInputCanBeDone(t, NewInput, &config)
} | explode_data.jsonl/53936 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 125
} | [
2830,
3393,
3564,
2505,
17453,
1155,
353,
8840,
836,
8,
341,
25873,
1669,
4185,
10104,
2580,
515,
197,
197,
1,
7742,
3904,
788,
256,
330,
27380,
14149,
65,
1110,
33331,
756,
197,
197,
1,
3087,
26682,
788,
310,
330,
1330,
2796,
58555,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestScan(t *testing.T) {
table := map[int64]string{
8005001000: "8.5.1",
4007000000: "4.7.0",
23000000: "0.23.0",
14000000100: "14.0.0.100",
0: "0.0.0",
1001: "0.0.1.1",
1000000: "0.1.0",
1000000000: "1.0.0",
14000000000100: "14000.0.0.100",
}
var version Version
for input, expected := range table {
err := version.Scan(input)
if err != nil {
t.Errorf("failed to scan %d: %v", input, err)
}
got := version.String()
if expected != got {
t.Errorf("failed to scan %d: expected %q, got %q", input, expected, got)
}
}
} | explode_data.jsonl/74329 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 318
} | [
2830,
3393,
26570,
1155,
353,
8840,
836,
8,
341,
26481,
1669,
2415,
18640,
21,
19,
30953,
515,
197,
197,
23,
15,
15,
20,
15,
15,
16,
15,
15,
15,
25,
257,
330,
23,
13,
20,
13,
16,
756,
197,
197,
19,
15,
15,
22,
15,
15,
15,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestCustomMenuGetsLogOutAdded(t *testing.T) {
// prepare
internalLink := map[string]interface{}{
"label": "Some internal links",
"items": []interface{}{
map[string]interface{}{
"label": "The internal link",
"url": "http://example.com/internal",
},
},
}
uiOpts := map[string]interface{}{
"menu": []interface{}{internalLink},
}
spec := &v1.JaegerSpec{
Ingress: v1.JaegerIngressSpec{
Security: v1.IngressSecurityOAuthProxy,
},
}
// test
enableLogOut(uiOpts, spec)
// verify
expected := []interface{}{
internalLink,
map[string]interface{}{
"label": "Log Out",
"url": "/oauth/sign_in",
"anchorTarget": "_self",
},
}
assert.Len(t, uiOpts["menu"], 2)
assert.Equal(t, expected, uiOpts["menu"])
} | explode_data.jsonl/21868 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 333
} | [
2830,
3393,
10268,
3514,
49358,
2201,
2662,
19337,
1155,
353,
8840,
836,
8,
341,
197,
322,
10549,
198,
33343,
3939,
1669,
2415,
14032,
31344,
67066,
197,
197,
92667,
788,
330,
8373,
5306,
7746,
756,
197,
197,
1,
3615,
788,
3056,
4970,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCollectBool(t *testing.T) {
v := &Value{data: []bool{bool(true), bool(true), bool(true), bool(true), bool(true), bool(true)}}
collected := v.CollectBool(func(index int, val bool) interface{} {
return index
})
collectedArr := collected.MustInterSlice()
if assert.Equal(t, 6, len(collectedArr)) {
assert.Equal(t, collectedArr[0], 0)
assert.Equal(t, collectedArr[1], 1)
assert.Equal(t, collectedArr[2], 2)
assert.Equal(t, collectedArr[3], 3)
assert.Equal(t, collectedArr[4], 4)
assert.Equal(t, collectedArr[5], 5)
}
} | explode_data.jsonl/23407 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 229
} | [
2830,
3393,
47504,
11233,
1155,
353,
8840,
836,
8,
1476,
5195,
1669,
609,
1130,
90,
691,
25,
3056,
2641,
90,
2641,
3715,
701,
1807,
3715,
701,
1807,
3715,
701,
1807,
3715,
701,
1807,
3715,
701,
1807,
3715,
9139,
630,
46640,
2209,
1669... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnitChannelHandlerWriteLateOnChannel(t *testing.T) {
handler := &channelHandler{
ch: make(chan modelx.Measurement),
}
var waitgroup sync.WaitGroup
waitgroup.Add(1)
go func() {
time.Sleep(1 * time.Second)
handler.OnMeasurement(modelx.Measurement{})
waitgroup.Done()
}()
waitgroup.Wait()
if handler.lateWrites != 1 {
t.Fatal("unexpected lateWrites value")
}
} | explode_data.jsonl/53529 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 145
} | [
2830,
3393,
4562,
9629,
3050,
7985,
61457,
1925,
9629,
1155,
353,
8840,
836,
8,
341,
53326,
1669,
609,
10119,
3050,
515,
197,
23049,
25,
1281,
35190,
1614,
87,
53447,
24359,
1326,
197,
532,
2405,
3783,
4074,
12811,
28384,
2808,
198,
487... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCandles_FromLocalDB(t *testing.T) {
dbDir, err := os.Getwd()
require.Nil(t, err)
orm, err := NewSqlite3ORM(false, dbDir, "backend.db", nil)
require.Nil(t, err)
product := types.TestTokenPair
limit := 10
maxKlines, err := types.NewKlinesFactory("kline_m1440")
require.Nil(t, err)
err = orm.GetLatestKlinesByProduct(product, limit, time.Now().Unix(), maxKlines)
require.Nil(t, err)
maxIklines := types.ToIKlinesArray(maxKlines, time.Now().Unix(), true)
if len(maxIklines) == 0 {
err := constructLocalBackendDB(orm)
require.Nil(t, err)
}
m := types.GetAllKlineMap()
for freq, tname := range m {
if freq > 1440*60 {
continue
}
klines, _ := types.NewKlinesFactory(tname)
e := orm.GetLatestKlinesByProduct(product, limit, time.Now().Unix(), klines)
assert.True(t, e == nil)
iklines := types.ToIKlinesArray(klines, time.Now().Unix(), true)
assert.True(t, len(iklines) > 0)
//for _, k := range iklines {
// fmt.Printf("%+v\n", k.PrettyTimeString())
//}
restDatas := types.ToRestfulData(&iklines, limit)
assert.True(t, len(restDatas) <= limit)
}
maxTS := orm.getDealsMaxTimestamp()
assert.True(t, maxTS > 0)
} | explode_data.jsonl/77883 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 487
} | [
2830,
3393,
34,
20125,
53157,
7319,
3506,
1155,
353,
8840,
836,
8,
341,
20939,
6184,
11,
1848,
1669,
2643,
2234,
6377,
741,
17957,
59678,
1155,
11,
1848,
340,
197,
493,
11,
1848,
1669,
1532,
8269,
632,
18,
4365,
3576,
11,
2927,
6184,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDeepDependencyOutputWithMock(t *testing.T) {
// Test that the terraform command flows through for mock output retrieval to deeper dependencies. Previously the
// terraform command was being overwritten, so by the time the deep dependency retrieval runs, it was replaced with
// "output" instead of the original one.
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_GET_OUTPUT)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_GET_OUTPUT)
rootPath := filepath.Join(tmpEnvPath, TEST_FIXTURE_GET_OUTPUT, "nested-mocks", "live")
// Since we haven't applied anything, this should only succeed if mock outputs are used.
runTerragrunt(t, fmt.Sprintf("terragrunt validate --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
} | explode_data.jsonl/10148 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 237
} | [
2830,
3393,
33464,
36387,
5097,
2354,
11571,
1155,
353,
8840,
836,
8,
341,
197,
322,
3393,
429,
279,
59561,
627,
3210,
27455,
1526,
369,
7860,
2550,
56370,
311,
19117,
19543,
13,
58687,
279,
198,
197,
322,
59561,
627,
3210,
572,
1660,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUpdateMapLastUpdateTime(t *testing.T) {
ctx := context.Background()
c, srv := newMock(t)
wantReq := &pb.CommitRequest{
Database: "projects/projectID/databases/(default)",
Writes: []*pb.Write{{
Operation: &pb.Write_Update{
Update: &pb.Document{
Name: "projects/projectID/databases/(default)/documents/C/d",
Fields: map[string]*pb.Value{"a": intval(1)},
}},
UpdateMask: &pb.DocumentMask{FieldPaths: []string{"a"}},
CurrentDocument: &pb.Precondition{
ConditionType: &pb.Precondition_UpdateTime{aTimestamp2},
},
}},
}
srv.addRPC(wantReq, commitResponseForSet)
wr, err := c.Collection("C").Doc("d").UpdateMap(ctx, map[string]interface{}{"a": 1}, LastUpdateTime(aTime2))
if err != nil {
t.Fatal(err)
}
if !testEqual(wr, writeResultForSet) {
t.Errorf("got %v, want %v", wr, writeResultForSet)
}
} | explode_data.jsonl/15816 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 357
} | [
2830,
3393,
4289,
2227,
5842,
64299,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
741,
1444,
11,
43578,
1669,
501,
11571,
1155,
692,
50780,
27234,
1669,
609,
16650,
53036,
1900,
515,
197,
197,
5988,
25,
330,
17161,
40118,
915... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWatchEtcdState(t *testing.T) {
codec := latest.Codec
type T struct {
Type watch.EventType
Endpoints []string
}
testCases := map[string]struct {
Initial map[string]EtcdResponseWithError
Responses []*etcd.Response
From uint64
Expected []*T
}{
"from not found": {
Initial: map[string]EtcdResponseWithError{},
Responses: []*etcd.Response{
{
Action: "create",
Node: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{}})),
},
},
},
From: 1,
Expected: []*T{
{watch.Added, nil},
},
},
"from version 1": {
Responses: []*etcd.Response{
{
Action: "compareAndSwap",
Node: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{"127.0.0.1:9000"}})),
CreatedIndex: 1,
ModifiedIndex: 2,
},
PrevNode: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{}})),
CreatedIndex: 1,
ModifiedIndex: 1,
},
},
},
From: 1,
Expected: []*T{
{watch.Modified, []string{"127.0.0.1:9000"}},
},
},
"from initial state": {
Initial: map[string]EtcdResponseWithError{
"/somekey/foo": {
R: &etcd.Response{
Action: "get",
Node: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{}})),
CreatedIndex: 1,
ModifiedIndex: 1,
},
EtcdIndex: 1,
},
},
},
Responses: []*etcd.Response{
nil,
{
Action: "compareAndSwap",
Node: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{"127.0.0.1:9000"}})),
CreatedIndex: 1,
ModifiedIndex: 2,
},
PrevNode: &etcd.Node{
Value: string(runtime.EncodeOrDie(codec, &api.Endpoints{JSONBase: api.JSONBase{ID: "foo"}, Endpoints: []string{}})),
CreatedIndex: 1,
ModifiedIndex: 1,
},
},
},
Expected: []*T{
{watch.Added, nil},
{watch.Modified, []string{"127.0.0.1:9000"}},
},
},
}
for k, testCase := range testCases {
fakeClient := NewFakeEtcdClient(t)
for key, value := range testCase.Initial {
fakeClient.Data[key] = value
}
h := EtcdHelper{fakeClient, codec, versioner}
watching := h.Watch("/somekey/foo", testCase.From)
fakeClient.WaitForWatchCompletion()
t.Logf("Testing %v", k)
for i := range testCase.Responses {
if testCase.Responses[i] != nil {
fakeClient.WatchResponse <- testCase.Responses[i]
}
event := <-watching.ResultChan()
if e, a := testCase.Expected[i].Type, event.Type; e != a {
t.Errorf("%s: expected type %v, got %v", k, e, a)
break
}
if e, a := testCase.Expected[i].Endpoints, event.Object.(*api.Endpoints).Endpoints; !reflect.DeepEqual(e, a) {
t.Errorf("%s: expected type %v, got %v", k, e, a)
break
}
}
watching.Stop()
}
} | explode_data.jsonl/40979 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1517
} | [
2830,
3393,
14247,
31860,
4385,
1397,
1155,
353,
8840,
836,
8,
341,
43343,
66,
1669,
5535,
20274,
66,
198,
13158,
350,
2036,
341,
197,
27725,
414,
3736,
89879,
198,
197,
38407,
7706,
3056,
917,
198,
197,
532,
18185,
37302,
1669,
2415,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestConfig_SliceComment(t *testing.T) {
t.Skipf("Skipping until #3642 is resolved")
c := NewConfig()
require.NoError(t, c.LoadConfig("./testdata/slice_comment.toml"))
require.Len(t, c.Outputs, 1)
output, ok := c.Outputs[0].Output.(*MockupOuputPlugin)
require.True(t, ok)
require.Equal(t, []string{"test"}, output.Scopes)
} | explode_data.jsonl/72174 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 135
} | [
2830,
3393,
2648,
1098,
4754,
10677,
1155,
353,
8840,
836,
8,
341,
3244,
57776,
69,
445,
85945,
3080,
671,
18,
21,
19,
17,
374,
19673,
5130,
1444,
1669,
1532,
2648,
741,
17957,
35699,
1155,
11,
272,
13969,
2648,
13988,
92425,
2687,
47... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAchievements(t *testing.T) {
_, err := duoClient.GetAchievements(1234, "de", "ru", true, false, false, false)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/72540 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 65
} | [
2830,
3393,
71585,
12477,
1155,
353,
8840,
836,
8,
341,
197,
6878,
1848,
1669,
33721,
2959,
2234,
71585,
12477,
7,
16,
17,
18,
19,
11,
330,
450,
497,
330,
2672,
497,
830,
11,
895,
11,
895,
11,
895,
340,
743,
1848,
961,
2092,
341,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestRouteStatusPropagation(t *testing.T) {
svc := &Service{}
svc.Status.PropagateRouteStatus(RouteStatus{
Domain: "example.com",
Traffic: []TrafficTarget{{
Percent: 100,
RevisionName: "newstuff",
}, {
Percent: 0,
RevisionName: "oldstuff",
}},
})
want := ServiceStatus{
Domain: "example.com",
Traffic: []TrafficTarget{{
Percent: 100,
RevisionName: "newstuff",
}, {
Percent: 0,
RevisionName: "oldstuff",
}},
}
if diff := cmp.Diff(want, svc.Status); diff != "" {
t.Errorf("unexpected ServiceStatus (-want +got): %s", diff)
}
} | explode_data.jsonl/17374 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 262
} | [
2830,
3393,
4899,
2522,
35172,
1155,
353,
8840,
836,
8,
341,
1903,
7362,
1669,
609,
1860,
16094,
1903,
7362,
10538,
42483,
46836,
4899,
2522,
94945,
2522,
515,
197,
10957,
3121,
25,
330,
8687,
905,
756,
197,
10261,
956,
20615,
25,
3056,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestTransformResolveImportedTypes(t *testing.T) {
schema := []byte(`
syntax = "proto3";
package test;
import "google/protobuf/timestamp.proto";
message ErrorStatus {
google.protobuf.Timestamp time = 1;
}
`)
input := new(bytes.Buffer)
input.Write(schema)
output := new(bytes.Buffer)
transformer := proto2gql.NewTransformer(output)
transformer.Import("google/protobuf/timestamp.proto", "https://raw.githubusercontent.com/google/protobuf/master/src/google/protobuf/timestamp.proto")
if err := transformer.Transform(input); err != nil {
t.Fatal(err)
}
expected := `
type TestErrorStatus {
time: GoogleProtobufTimestamp
}
type GoogleProtobufTimestamp {
seconds: Int
nanos: Int
}
`
expected = strings.TrimSpace(expected)
actual := strings.TrimSpace(output.String())
if expected != actual {
t.Fatalf("Expected %s to equal to %s", expected, actual)
}
} | explode_data.jsonl/2074 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 329
} | [
2830,
3393,
8963,
56808,
11511,
291,
4173,
1155,
353,
8840,
836,
8,
341,
1903,
3416,
1669,
3056,
3782,
61528,
197,
1903,
13662,
284,
330,
15110,
18,
876,
197,
197,
1722,
1273,
401,
197,
21918,
330,
17485,
14,
32921,
5523,
4702,
57322,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestLogSamplingRules(t *testing.T) {
assert := assert.New(t)
tp := new(testLogger)
os.Setenv("DD_TRACE_SAMPLING_RULES", `[{"service": "some.service", "sample_rate": 0.234}, {"service": "other.service"}, {"service": "last.service", "sample_rate": 0.56}, {"odd": "pairs"}, {"sample_rate": 9.10}]`)
defer os.Unsetenv("DD_TRACE_SAMPLING_RULES")
_, _, _, stop := startTestTracer(t, WithLogger(tp))
defer stop()
assert.Len(tp.Lines(), 2)
assert.Contains(tp.Lines()[0], "WARN: at index 4: ignoring rule {Service: Name: Rate:9.10}: rate is out of [0.0, 1.0] range")
assert.Regexp(`Datadog Tracer v[0-9]+\.[0-9]+\.[0-9]+ WARN: DIAGNOSTICS Error\(s\) parsing DD_TRACE_SAMPLING_RULES: found errors:\n\tat index 1: rate not provided\n\tat index 3: rate not provided$`, tp.Lines()[1])
} | explode_data.jsonl/64650 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 327
} | [
2830,
3393,
2201,
98622,
26008,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
73423,
1669,
501,
8623,
7395,
340,
25078,
4202,
3160,
445,
4103,
24238,
85144,
1718,
50495,
50,
497,
77644,
4913,
7936,
788,
330,
14689,
57... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStorage_Store(t *testing.T) {
s := NewStorage("/tmp/chunk/1.chunk", "/tmp/index")
err := s.Open()
defer s.Close()
if err != nil {
t.Fatal(err)
}
bts, err := ioutil.ReadFile("testdata/test.txt")
if err != nil {
t.Fatal(err)
}
err = s.Store("test.txt", bts, FdNullFlags)
if err != nil {
t.Fatal(err)
}
} | explode_data.jsonl/77474 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 155
} | [
2830,
3393,
5793,
92684,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
1532,
5793,
4283,
5173,
21284,
3122,
14,
16,
47806,
497,
3521,
5173,
9022,
1138,
9859,
1669,
274,
12953,
741,
16867,
274,
10421,
741,
743,
1848,
961,
2092,
341,
197,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDefaultFactory_NewPolicyChecker(t *testing.T) {
// Ensure defaultFactory implements policy.PolicyCheckerFactory
var pcf policy.PolicyCheckerFactory
pcf = &defaultFactory{}
var pc policy.PolicyChecker
// Check we can obtain a new policyChecker
pc = pcf.NewPolicyChecker()
assert.NotNil(t, pc)
} | explode_data.jsonl/44683 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 98
} | [
2830,
3393,
3675,
4153,
39582,
13825,
35188,
1155,
353,
8840,
836,
8,
341,
197,
322,
29279,
1638,
4153,
5169,
4842,
1069,
8018,
35188,
4153,
198,
2405,
281,
9792,
4842,
1069,
8018,
35188,
4153,
198,
3223,
9792,
284,
609,
2258,
4153,
160... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBasicEmbedded(t *testing.T) {
type Foo struct {
A int
}
type Bar struct {
Foo // `db:""` is implied for an embedded struct
B int
C int `db:"-"`
}
type Baz struct {
A int
Bar `db:"Bar"`
}
m := NewMapperFunc("db", func(s string) string { return s })
z := Baz{}
z.A = 1
z.B = 2
z.C = 4
z.Bar.Foo.A = 3
zv := reflect.ValueOf(z)
fields := m.TypeMap(reflect.TypeOf(z))
if len(fields.Index) != 5 {
t.Errorf("Expecting 5 fields")
}
// for _, fi := range fields.Index {
// log.Println(fi)
// }
v := m.FieldByName(zv, "A")
if ival(v) != z.A {
t.Errorf("Expecting %d, got %d", z.A, ival(v))
}
v = m.FieldByName(zv, "Bar.B")
if ival(v) != z.Bar.B {
t.Errorf("Expecting %d, got %d", z.Bar.B, ival(v))
}
v = m.FieldByName(zv, "Bar.A")
if ival(v) != z.Bar.Foo.A {
t.Errorf("Expecting %d, got %d", z.Bar.Foo.A, ival(v))
}
v = m.FieldByName(zv, "Bar.C")
if _, ok := v.Interface().(int); ok {
t.Errorf("Expecting Bar.C to not exist")
}
fi := fields.GetByPath("Bar.C")
if fi != nil {
t.Errorf("Bar.C should not exist")
}
} | explode_data.jsonl/59094 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 532
} | [
2830,
3393,
15944,
83466,
1155,
353,
8840,
836,
8,
341,
13158,
33428,
2036,
341,
197,
22985,
526,
198,
197,
630,
13158,
4716,
2036,
341,
197,
12727,
2624,
442,
1565,
1999,
2974,
39917,
374,
6131,
369,
458,
22864,
2036,
198,
197,
12791,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStdlib(t *testing.T) {
testenv.MustHaveGoBuild(t)
start = time.Now()
walkDirs(t, filepath.Join(runtime.GOROOT(), "src"))
if testing.Verbose() {
fmt.Println(pkgCount, "packages typechecked in", time.Since(start))
}
} | explode_data.jsonl/53275 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
22748,
2740,
1155,
353,
8840,
836,
8,
341,
18185,
3160,
50463,
12116,
10850,
11066,
1155,
692,
21375,
284,
882,
13244,
741,
6692,
1692,
97384,
1155,
11,
26054,
22363,
89467,
1224,
868,
53837,
1507,
330,
3548,
5455,
743,
7497,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestWSUpgradeConnDeadline(t *testing.T) {
opts := testWSOptions()
opts.Websocket.HandshakeTimeout = time.Second
s := &Server{opts: opts}
rw := &testResponseWriter{}
req := testWSCreateValidReq()
res, err := s.wsUpgrade(rw, req)
if res == nil || err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if rw.conn.isClosed {
t.Fatal("Connection should NOT have been closed")
}
if !rw.conn.deadlineCleared {
t.Fatal("Connection deadline should have been cleared after handshake")
}
} | explode_data.jsonl/42707 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 188
} | [
2830,
3393,
7433,
43861,
9701,
83593,
1155,
353,
8840,
836,
8,
341,
64734,
1669,
1273,
7433,
3798,
741,
64734,
6473,
9556,
35308,
29661,
7636,
284,
882,
32435,
198,
1903,
1669,
609,
5475,
90,
10518,
25,
12185,
532,
7000,
86,
1669,
609,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestPtrToGC(t *testing.T) {
type T *uintptr
tt := TypeOf(T(nil))
pt := PtrTo(tt)
const n = 100
var x []interface{}
for i := 0; i < n; i++ {
v := New(pt)
p := new(*uintptr)
*p = new(uintptr)
**p = uintptr(i)
v.Elem().Set(ValueOf(p).Convert(pt))
x = append(x, v.Interface())
}
runtime.GC()
for i, xi := range x {
k := ValueOf(xi).Elem().Elem().Elem().Interface().(uintptr)
if k != uintptr(i) {
t.Errorf("lost x[%d] = %d, want %d", i, k, i)
}
}
} | explode_data.jsonl/29572 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 238
} | [
2830,
3393,
5348,
1249,
22863,
1155,
353,
8840,
836,
8,
341,
13158,
350,
353,
51380,
198,
3244,
83,
1669,
3990,
2124,
4140,
27907,
1171,
60796,
1669,
46409,
1249,
47152,
340,
4777,
308,
284,
220,
16,
15,
15,
198,
2405,
856,
3056,
4970... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestMenuParams(t *testing.T) {
b := newTestSitesBuilder(t).WithConfigFile("toml", `
[[menus.main]]
identifier = "contact"
title = "Contact Us"
url = "mailto:noreply@example.com"
weight = 300
[menus.main.params]
foo = "foo_config"
key2 = "key2_config"
camelCase = "camelCase_config"
`)
b.WithTemplatesAdded("index.html", `
Main: {{ len .Site.Menus.main }}
{{ range .Site.Menus.main }}
foo: {{ .Params.foo }}
key2: {{ .Params.KEy2 }}
camelCase: {{ .Params.camelcase }}
{{ end }}
`)
b.WithContent("_index.md", `
---
title: "Home"
menu:
main:
weight: 10
params:
foo: "foo_content"
key2: "key2_content"
camelCase: "camelCase_content"
---
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
Main: 2
foo: foo_content
key2: key2_content
camelCase: camelCase_content
foo: foo_config
key2: key2_config
camelCase: camelCase_config
`)
} | explode_data.jsonl/51846 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 374
} | [
2830,
3393,
3514,
4870,
1155,
353,
8840,
836,
8,
341,
2233,
1669,
501,
2271,
93690,
3297,
1155,
568,
2354,
2648,
1703,
445,
37401,
75,
497,
22074,
15505,
57627,
8880,
14288,
15909,
284,
330,
6287,
698,
2102,
284,
330,
8732,
3985,
698,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGC_TrackDeletedLayers_DoesNothingIfTriggerDisabled(t *testing.T) {
require.NoError(t, testutil.TruncateAllTables(suite.db))
enable, err := testutil.GCTrackDeletedLayersTrigger.Disable(suite.db)
require.NoError(t, err)
defer enable()
// disable other triggers that also insert on gc_blob_review_queue so that they don't interfere with this test
enable, err = testutil.GCTrackBlobUploadsTrigger.Disable(suite.db)
require.NoError(t, err)
defer enable()
// create repo
r := randomRepository(t)
rs := datastore.NewRepositoryStore(suite.db)
r, err = rs.CreateByPath(suite.ctx, r.Path)
require.NoError(t, err)
// create layer blob
bs := datastore.NewBlobStore(suite.db)
b := randomBlob(t)
err = bs.Create(suite.ctx, b)
require.NoError(t, err)
err = rs.LinkBlob(suite.ctx, r, b.Digest)
require.NoError(t, err)
// create manifest
ms := datastore.NewManifestStore(suite.db)
m := randomManifest(t, r, nil)
err = ms.Create(suite.ctx, m)
require.NoError(t, err)
// associate layer with manifest
err = ms.AssociateLayerBlob(suite.ctx, m, b)
require.NoError(t, err)
// dissociate layer blob
err = ms.DissociateLayerBlob(suite.ctx, m, b)
require.NoError(t, err)
// check that no review records were created
brs := datastore.NewGCBlobTaskStore(suite.db)
count, err := brs.Count(suite.ctx)
require.NoError(t, err)
require.Zero(t, count)
} | explode_data.jsonl/48569 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 529
} | [
2830,
3393,
22863,
21038,
473,
26039,
40235,
1557,
7072,
23780,
2679,
17939,
25907,
1155,
353,
8840,
836,
8,
341,
17957,
35699,
1155,
11,
1273,
1314,
8240,
26900,
2403,
21670,
89516,
7076,
4390,
197,
12552,
11,
1848,
1669,
1273,
1314,
122... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetPod(t *testing.T) {
ns := api.NamespaceDefault
c := &testClient{
Request: testRequest{Method: "GET", Path: testapi.Default.ResourcePath("pods", ns, "foo"), Query: buildQueryValues(nil)},
Response: Response{
StatusCode: http.StatusOK,
Body: &api.Pod{
Status: api.PodStatus{
Phase: api.PodRunning,
},
ObjectMeta: api.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
"name": "baz",
},
},
},
},
}
receivedPod, err := c.Setup(t).Pods(ns).Get("foo")
c.Validate(t, receivedPod, err)
} | explode_data.jsonl/36093 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 249
} | [
2830,
3393,
1949,
23527,
1155,
353,
8840,
836,
8,
341,
84041,
1669,
6330,
46011,
3675,
198,
1444,
1669,
609,
1944,
2959,
515,
197,
73806,
25,
1273,
1900,
90,
3523,
25,
330,
3806,
497,
7933,
25,
1273,
2068,
13275,
20766,
1820,
445,
79,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestManifestMetadata(t *testing.T) {
files := [][2]string{
{"/.manifest", `{
"metadata": {
"foo": {
"version": "1.0.0"
}
}
}`},
}
buf := archive.MustWriteTarGz(files)
bundle, err := NewReader(buf).Read()
if err != nil {
t.Fatal(err)
}
if bundle.Manifest.Metadata["foo"] == nil {
t.Fatal("Unexpected nil metadata key")
}
data, ok := bundle.Manifest.Metadata["foo"].(map[string]interface{})
if !ok {
t.Fatal("Unexpected structure in metadata")
}
if data["version"] != "1.0.0" {
t.Fatalf("Unexpected metadata value: %v", data["version"])
}
} | explode_data.jsonl/55367 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 259
} | [
2830,
3393,
38495,
14610,
1155,
353,
8840,
836,
8,
341,
74075,
1669,
508,
1457,
17,
30953,
515,
197,
197,
90,
3115,
13,
42315,
497,
1565,
515,
298,
197,
1,
17637,
788,
341,
571,
197,
1,
7975,
788,
341,
464,
197,
1,
4366,
788,
330,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestTrigger_SkipCI(t *testing.T) {
triggerer := New(
nil,
nil,
nil,
nil,
nil,
nil,
nil,
nil,
nil,
nil,
nil,
)
dummyHookSkip := *dummyHook
dummyHookSkip.Message = "foo [CI SKIP] bar"
triggerer.Trigger(noContext, dummyRepo, &dummyHookSkip)
} | explode_data.jsonl/26992 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 138
} | [
2830,
3393,
17939,
1098,
13389,
11237,
1155,
353,
8840,
836,
8,
341,
83228,
261,
1669,
1532,
1006,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
197,
84131,
345,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCreateOCIConfig(t *testing.T) {
assert := assert.New(t)
tmpdir, err := ioutil.TempDir(testDir, "")
assert.NoError(err)
defer os.RemoveAll(tmpdir)
bundleDir := filepath.Join(tmpdir, "bundle")
err = createOCIConfig(bundleDir)
// ENOENT
assert.Error(err)
err = os.MkdirAll(bundleDir, testDirMode)
assert.NoError(err)
err = createOCIConfig(bundleDir)
assert.NoError(err)
specFile := filepath.Join(bundleDir, specConfig)
assert.True(katautils.FileExists(specFile))
} | explode_data.jsonl/52189 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 198
} | [
2830,
3393,
4021,
63983,
2648,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
20082,
3741,
11,
1848,
1669,
43144,
65009,
6184,
8623,
6184,
11,
14676,
6948,
35699,
3964,
340,
16867,
2643,
84427,
10368,
3741,
692,
2233,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerFlavorList(t *testing.T) {
setup()
defer teardown()
mux.HandleFunc(testlib.CloudServerURL(flavorPath), func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, http.MethodGet, r.Method)
resp := `
[
{
"_id": "5d7f58903c4c0127da9896ae",
"name": "1c_1g"
},
{
"_id": "5d7f58903c4c0127da9896b5",
"name": "2c_4g"
}
]
`
_, _ = fmt.Fprint(w, resp)
})
flavors, err := client.Server.ListFlavors(ctx)
require.NoError(t, err)
assert.Equal(t, "1c_1g", flavors[0].Name)
} | explode_data.jsonl/35480 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 278
} | [
2830,
3393,
5475,
3882,
3292,
852,
1155,
353,
8840,
836,
8,
341,
84571,
741,
16867,
49304,
741,
2109,
2200,
63623,
8623,
2740,
94492,
5475,
3144,
49747,
3292,
1820,
701,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestProductDb_Get(t *testing.T) {
setUp()
defer Db.Close()
productDb := db.NewProductDb(Db)
product, err := productDb.Get("abc")
require.Nil(t, err)
require.Equal(t, "Product Test", product.GetName())
require.Equal(t, 0.0, product.GetPrice())
require.Equal(t, "disabled", product.GetStatus())
} | explode_data.jsonl/58345 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 121
} | [
2830,
3393,
4816,
7994,
13614,
1155,
353,
8840,
836,
8,
341,
8196,
2324,
741,
16867,
11988,
10421,
741,
69288,
7994,
1669,
2927,
7121,
4816,
7994,
52169,
340,
69288,
11,
1848,
1669,
1985,
7994,
2234,
445,
13683,
5130,
17957,
59678,
1155,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseBucketResource(t *testing.T) {
cases := map[string]struct {
ARN arn.ARN
ExpectErr string
ExpectBucketName string
}{
"resource-id empty": {
ARN: arn.ARN{
Partition: "aws",
Service: "s3",
Region: "us-west-2",
AccountID: "012345678901",
Resource: "bucket:",
},
ExpectErr: "bucket resource-id not set",
},
"resource not supported": {
ARN: arn.ARN{
Partition: "aws",
Service: "s3",
Region: "us-west-2",
AccountID: "012345678901",
Resource: "bucket/mybucket/object/key",
},
ExpectErr: "sub resource not supported",
},
"valid resource-id": {
ARN: arn.ARN{
Partition: "aws",
Service: "s3",
Region: "us-west-2",
AccountID: "012345678901",
Resource: "bucket/mybucket",
},
ExpectBucketName: "mybucket",
},
}
for name, c := range cases {
t.Run(name, func(t *testing.T) {
resParts := SplitResource(c.ARN.Resource)
a, err := parseBucketResource(c.ARN, resParts[1:])
if len(c.ExpectErr) == 0 && err != nil {
t.Fatalf("expect no error but got %v", err)
} else if len(c.ExpectErr) != 0 && err == nil {
t.Fatalf("expect error %q, but got nil", c.ExpectErr)
} else if len(c.ExpectErr) != 0 && err != nil {
if e, a := c.ExpectErr, err.Error(); !strings.Contains(a, e) {
t.Fatalf("expect error %q, got %q", e, a)
}
return
}
if e, a := c.ExpectBucketName, a; !reflect.DeepEqual(e, a) {
t.Errorf("expect %v, got %v", e, a)
}
})
}
} | explode_data.jsonl/62372 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 767
} | [
2830,
3393,
14463,
36018,
4783,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
2415,
14032,
60,
1235,
341,
197,
197,
9051,
1060,
796,
77,
875,
50195,
198,
197,
35911,
7747,
286,
914,
198,
197,
35911,
36018,
675,
914,
198,
197,
59403,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestImgTest_InitAdd_OverwriteBucketAndURL_ProperLinks(t *testing.T) {
unittest.MediumTest(t)
workDir := t.TempDir()
setupAuthWithGSUtil(t, workDir)
td := testutils.TestDataDir(t)
mh := mockRPCResponses("https://my-custom-gold-url.example.com").Build()
// Call imgtest init with the following flags. We expect it to load the baseline expectations
// and the known hashes (both empty).
ctx, output, exit := testContext(nil, mh, nil, nil)
env := imgTest{
bucketOverride: "my-custom-bucket",
gitHash: "1234567890123456789012345678901234567890",
corpus: "my_corpus",
instanceID: "my-instance",
passFailStep: true,
failureFile: filepath.Join(workDir, "failures.txt"),
workDir: workDir,
testKeysStrings: []string{"os:Android"},
urlOverride: "https://my-custom-gold-url.example.com",
}
runUntilExit(t, func() {
env.Init(ctx)
})
exit.AssertWasCalledWithCode(t, 0, output.String())
mg := &mocks.GCSUploader{}
resultsMatcher := mock.MatchedBy(func(results jsonio.GoldResults) bool {
assert.Equal(t, jsonio.GoldResults{
GitHash: "1234567890123456789012345678901234567890",
Key: map[string]string{
"os": "Android",
"source_type": "my_corpus",
},
Results: []jsonio.Result{{
Key: map[string]string{"name": "pixel-tests", "device": "angler"},
Options: map[string]string{"some_option": "is optional", "ext": "png"},
Digest: blankDigest,
}},
}, results)
return true
})
mg.On("UploadJSON", testutils.AnyContext, resultsMatcher, mock.Anything,
`my-custom-bucket/dm-json-v1/2021/01/23/22/1234567890123456789012345678901234567890/waterfall/dm-1611440480000000019.json`).Return(nil)
bytesMatcher := mock.MatchedBy(func(b []byte) bool {
assert.Len(t, b, 78) // spot check length
return true
})
mg.On("UploadBytes", testutils.AnyContext, bytesMatcher, mock.Anything,
`gs://my-custom-bucket/dm-images-v1/00000000000000000000000000000000.png`).Return(nil)
// Now call imgtest add with the following flags. This is simulating a test uploading a single
// result for a test called pixel-tests.
ctx, output, exit = testContext(mg, nil, nil, &timeOne)
env = imgTest{
workDir: workDir,
testName: "pixel-tests",
pngFile: filepath.Join(td, "00000000000000000000000000000000.png"),
pngDigest: blankDigest,
testKeysStrings: []string{"device:angler"},
testOptionalKeysStrings: []string{"some_option:is optional"},
}
runUntilExit(t, func() {
env.Add(ctx)
})
logs := output.String()
exit.AssertWasCalledWithCode(t, 1, logs)
mg.AssertExpectations(t)
assert.Contains(t, logs, `Untriaged or negative image: https://my-custom-gold-url.example.com/detail?test=pixel-tests&digest=00000000000000000000000000000000`)
assert.Contains(t, logs, `Test: pixel-tests FAIL`)
fb, err := ioutil.ReadFile(filepath.Join(workDir, "failures.txt"))
require.NoError(t, err)
assert.Contains(t, string(fb), "https://my-custom-gold-url.example.com/detail?test=pixel-tests&digest=00000000000000000000000000000000")
} | explode_data.jsonl/69526 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1247
} | [
2830,
3393,
13033,
2271,
15644,
2212,
62,
1918,
4934,
36018,
3036,
3144,
16670,
712,
24089,
1155,
353,
8840,
836,
8,
341,
20479,
14267,
1321,
23090,
2271,
1155,
692,
97038,
6184,
1669,
259,
65009,
6184,
741,
84571,
5087,
2354,
16522,
2742... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateLoginTemplate(t *testing.T) {
testCases := map[string]struct {
Template string
TemplateValid bool
}{
"default login template": {
Template: defaultLoginTemplateString,
TemplateValid: true,
},
"login template example": {
Template: LoginTemplateExample,
TemplateValid: true,
},
"original login template example": {
Template: originalLoginTemplateExample,
TemplateValid: true,
},
"template with missing parameter": {
Template: invalidLoginTemplate,
TemplateValid: false,
},
}
for k, testCase := range testCases {
allErrs := ValidateLoginTemplate([]byte(testCase.Template))
if testCase.TemplateValid {
for _, err := range allErrs {
t.Errorf("%s: template validation failed when it should have succeeded: %v", k, err)
}
} else if len(allErrs) == 0 {
t.Errorf("%s: template validation succeeded when it should have failed", k)
}
}
} | explode_data.jsonl/43988 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 338
} | [
2830,
3393,
17926,
6231,
7275,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
2415,
14032,
60,
1235,
341,
197,
197,
7275,
414,
914,
198,
197,
197,
7275,
4088,
1807,
198,
197,
59403,
197,
197,
86191,
5858,
3811,
788,
341,
298,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestEntry_OnCut_Password(t *testing.T) {
e := widget.NewPasswordEntry()
e.SetText("Testing")
typeKeys(e, keyShiftLeftDown, fyne.KeyRight, fyne.KeyRight, fyne.KeyRight)
clipboard := test.NewClipboard()
shortcut := &fyne.ShortcutCut{Clipboard: clipboard}
e.TypedShortcut(shortcut)
assert.Equal(t, "", clipboard.Content())
assert.Equal(t, "Testing", e.Text)
} | explode_data.jsonl/12334 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 141
} | [
2830,
3393,
5874,
35482,
37666,
93302,
1155,
353,
8840,
836,
8,
341,
7727,
1669,
9086,
7121,
4876,
5874,
741,
7727,
92259,
445,
16451,
1138,
13158,
8850,
2026,
11,
1376,
24841,
5415,
4454,
11,
51941,
811,
9610,
5979,
11,
51941,
811,
961... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCacheBinariesForBootstrapper(t *testing.T) {
download.DownloadMock = download.CreateDstDownloadMock
oldMinikubeHome := os.Getenv("MINIKUBE_HOME")
defer os.Setenv("MINIKUBE_HOME", oldMinikubeHome)
minikubeHome := t.TempDir()
var tc = []struct {
version, clusterBootstrapper string
minikubeHome string
err bool
}{
{
version: "v1.16.0",
clusterBootstrapper: bootstrapper.Kubeadm,
err: false,
minikubeHome: minikubeHome,
},
{
version: "invalid version",
clusterBootstrapper: bootstrapper.Kubeadm,
err: true,
minikubeHome: minikubeHome,
},
}
for _, test := range tc {
t.Run(test.version, func(t *testing.T) {
os.Setenv("MINIKUBE_HOME", test.minikubeHome)
err := CacheBinariesForBootstrapper(test.version, test.clusterBootstrapper, nil, "")
if err != nil && !test.err {
t.Fatalf("Got unexpected error %v", err)
}
if err == nil && test.err {
t.Fatalf("Expected error but got %v", err)
}
})
}
} | explode_data.jsonl/73739 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 527
} | [
2830,
3393,
8233,
28794,
5431,
2461,
17919,
495,
3106,
1155,
353,
8840,
836,
8,
341,
2698,
37702,
61204,
11571,
284,
4139,
7251,
54600,
11377,
11571,
271,
61828,
6217,
1579,
3760,
7623,
1669,
2643,
64883,
445,
16413,
28561,
42389,
28466,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestFacadeErrorf(t *testing.T) {
st, ok := status.FromError(facade.Errorf(codes.Internal, "%s", "ohno"))
require.True(t, ok, "error is not a gRPC status")
assert.Equal(t, codes.Internal, st.Code())
assert.Equal(t, "type(name): ohno", st.Message())
} | explode_data.jsonl/30821 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 102
} | [
2830,
3393,
55331,
1454,
69,
1155,
353,
8840,
836,
8,
341,
18388,
11,
5394,
1669,
2639,
11439,
1454,
955,
580,
1021,
13080,
1337,
2539,
32579,
11,
5962,
82,
497,
330,
2267,
2152,
5455,
17957,
32443,
1155,
11,
5394,
11,
330,
841,
374,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMessage_Format(t *testing.T) {
for i, test := range messageTests {
fields := test.message.Format()
got, err := formatFields(fields)
if err != nil {
t.Error(err)
continue
}
expected, _ := formatFields(test.fields)
if got != expected {
t.Errorf("Invalid message fields for #%v: got \n%v\n but expected \n%v", i, got, expected)
}
}
} | explode_data.jsonl/43042 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 145
} | [
2830,
3393,
2052,
72999,
1155,
353,
8840,
836,
8,
341,
2023,
600,
11,
1273,
1669,
2088,
1943,
18200,
341,
197,
55276,
1669,
1273,
6698,
9978,
2822,
197,
3174,
354,
11,
1848,
1669,
3561,
8941,
37701,
340,
197,
743,
1848,
961,
2092,
341... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestEdgeInfo(t *testing.T) {
edgeInfo := getTestEdgeInfo(t, "account")
testEdgeInfo(t, edgeInfo, 4)
edgeInfo = getTestEdgeInfo(t, "todo")
testEdgeInfo(t, edgeInfo, 0)
edgeInfo = getTestEdgeInfo(t, "folder")
testEdgeInfo(t, edgeInfo, 1)
} | explode_data.jsonl/73722 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 99
} | [
2830,
3393,
11656,
1731,
1155,
353,
8840,
836,
8,
341,
197,
7186,
1731,
1669,
633,
2271,
11656,
1731,
1155,
11,
330,
4608,
5130,
18185,
11656,
1731,
1155,
11,
6821,
1731,
11,
220,
19,
692,
197,
7186,
1731,
284,
633,
2271,
11656,
1731,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnsetSocialChannelFromWorkspace(t *testing.T) {
initMongoConn()
defer Close()
rand.Seed(time.Now().UnixNano())
w, err := createWorkspace()
if err != nil {
t.Fatalf(err.Error())
}
// first fetch it
w2, err := GetWorkspaceByChannelId(w.ChannelId)
if err != nil {
t.Errorf(err.Error())
}
if w2 == nil {
t.Errorf("couldnt fetch workspace by channel id got nil, expected: %+v", w)
}
if w2.ObjectId.Hex() != w.ObjectId.Hex() {
t.Errorf("workspaces are not same: expected: %+v, got: ", w)
}
err = UnsetSocialChannelFromWorkspace(w.ObjectId)
if err != nil {
t.Errorf("we should be able to unset social channel id")
}
_, err = GetWorkspaceByChannelId(w.ChannelId)
if err == nil {
t.Errorf("we should not be able to find the WS")
}
} | explode_data.jsonl/70374 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 305
} | [
2830,
3393,
1806,
746,
26317,
9629,
3830,
45981,
1155,
353,
8840,
836,
8,
341,
28248,
54998,
9701,
741,
16867,
13032,
741,
7000,
437,
5732,
291,
9730,
13244,
1005,
55832,
83819,
12367,
6692,
11,
1848,
1669,
1855,
45981,
741,
743,
1848,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestParseFileIncludeIgnore(t *testing.T) {
p := NewParser(nil)
err := p.ParseFile("testdata/include_ignore.ini")
if err != nil {
t.Errorf("unexpected error: %v", err)
}
expected := map[string]map[string]string{
"valid00": {
"valid00 L0": "valid00 V0",
"valid00 L1": "valid00 V1",
},
}
actual := p.Config.Map()
if !reflect.DeepEqual(expected, actual) {
t.Errorf("\nexpected: %q\nactual: %q", expected, actual)
}
} | explode_data.jsonl/49360 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 187
} | [
2830,
3393,
14463,
1703,
22283,
12497,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
1532,
6570,
27907,
340,
9859,
1669,
281,
8937,
1703,
445,
92425,
26393,
58493,
34958,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
13080,
445,
53859,
1465,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestValidateMatch(t *testing.T) {
tests := []struct {
match v1.Match
upstreamNames sets.String
msg string
}{
{
match: v1.Match{
Conditions: []v1.Condition{
{
Cookie: "version",
Value: "v1",
},
},
Action: &v1.Action{
Pass: "test",
},
},
upstreamNames: map[string]sets.Empty{
"test": {},
},
msg: "valid match with action",
},
{
match: v1.Match{
Conditions: []v1.Condition{
{
Cookie: "version",
Value: "v1",
},
},
Splits: []v1.Split{
{
Weight: 90,
Action: &v1.Action{
Pass: "test-1",
},
},
{
Weight: 10,
Action: &v1.Action{
Pass: "test-2",
},
},
},
},
upstreamNames: map[string]sets.Empty{
"test-1": {},
"test-2": {},
},
msg: "valid match with splits",
},
}
for _, test := range tests {
allErrs := validateMatch(test.match, field.NewPath("match"), test.upstreamNames, "")
if len(allErrs) > 0 {
t.Errorf("validateMatch() returned errors %v for valid input for the case of %s", allErrs, test.msg)
}
}
} | explode_data.jsonl/65851 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 608
} | [
2830,
3393,
17926,
8331,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
47706,
260,
348,
16,
36062,
198,
197,
59810,
4027,
7980,
7289,
6431,
198,
197,
21169,
1843,
914,
198,
197,
59403,
197,
197,
515,
298,
47706,
25,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWorkerStartOk(t *testing.T) {
worker := createDefaultWorker()
worker.Start()
rc := make(chan *ActionResult)
action := new(MockResultAction)
action.On("Run", nil, mock.AnythingOfType("map[string]interface {}"), mock.AnythingOfType("*runner.AsyncResultHandler")).Return(nil)
actionData := &ActionData{arc: rc, action: action}
// Create some work
okWorkRequest := ActionWorkRequest{ReqType: RtRun, actionData: actionData}
// Send some work
worker.Work <- okWorkRequest
// Check work result
result := <-actionData.arc
assert.Nil(t, result.err)
assert.NotNil(t, result)
assert.Equal(t, 200, result.results["code"])
assert.Equal(t, "mock", result.results["data"])
} | explode_data.jsonl/16732 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 234
} | [
2830,
3393,
21936,
3479,
11578,
1155,
353,
8840,
836,
8,
341,
197,
21462,
1669,
1855,
3675,
21936,
741,
197,
21462,
12101,
2822,
30295,
1669,
1281,
35190,
353,
17301,
692,
38933,
1669,
501,
66436,
2077,
2512,
340,
38933,
8071,
445,
6727,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestArrayScanner(t *testing.T) {
var s sql.Scanner = Array(&[]bool{})
if _, ok := s.(*BoolArray); !ok {
t.Errorf("Expected *BoolArray, got %T", s)
}
s = Array(&[]float64{})
if _, ok := s.(*Float64Array); !ok {
t.Errorf("Expected *Float64Array, got %T", s)
}
s = Array(&[]int64{})
if _, ok := s.(*Int64Array); !ok {
t.Errorf("Expected *Int64Array, got %T", s)
}
s = Array(&[]string{})
if _, ok := s.(*StringArray); !ok {
t.Errorf("Expected *StringArray, got %T", s)
}
for _, tt := range []interface{}{
&[]sql.Scanner{},
&[][]bool{},
&[][]float64{},
&[][]int64{},
&[][]string{},
} {
s = Array(tt)
if _, ok := s.(GenericArray); !ok {
t.Errorf("Expected GenericArray for %T, got %T", tt, s)
}
}
} | explode_data.jsonl/5302 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 352
} | [
2830,
3393,
1857,
31002,
1155,
353,
8840,
836,
8,
341,
2405,
274,
5704,
32098,
284,
2910,
2099,
1294,
2641,
37790,
743,
8358,
5394,
1669,
274,
41399,
11233,
1857,
1215,
753,
562,
341,
197,
3244,
13080,
445,
18896,
353,
11233,
1857,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestLaunchTestRequest(t *testing.T) {
runTest(t, "increment", func(client *daptest.Client, fixture protest.Fixture) {
runDebugSession(t, client, "launch", func() {
// We reuse the harness that builds, but ignore the built binary,
// only relying on the source to be built in response to LaunchRequest.
fixtures := protest.FindFixturesDir()
testdir, _ := filepath.Abs(filepath.Join(fixtures, "buildtest"))
client.LaunchRequestWithArgs(map[string]interface{}{
"mode": "test", "program": testdir, "output": "__mytestdir"})
}, fixture.Source)
})
} | explode_data.jsonl/17349 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 193
} | [
2830,
3393,
32067,
2271,
1900,
1155,
353,
8840,
836,
8,
341,
56742,
2271,
1155,
11,
330,
35744,
497,
2915,
12805,
353,
91294,
1944,
11716,
11,
12507,
8665,
991,
12735,
8,
341,
197,
56742,
7939,
5283,
1155,
11,
2943,
11,
330,
33499,
49... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSokuonS(t *testing.T) {
const want = "ssasshissussesso"
for _, v := range []string{"っさっしっすっせっそ", "ッサッシッスッセッソ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
} | explode_data.jsonl/11345 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 111
} | [
2830,
3393,
50,
16493,
263,
50,
1155,
353,
8840,
836,
8,
341,
4777,
1366,
284,
330,
778,
395,
71,
1038,
1854,
9823,
1837,
2023,
8358,
348,
1669,
2088,
3056,
917,
4913,
41791,
29713,
41791,
127441,
17219,
41791,
71242,
41791,
26831,
497,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestProcessNodesWithRetriesOnErrors(t *testing.T) {
cancel, controller := newController()
defer cancel()
assert.NotNil(t, controller)
wf := unmarshalWF(helloWorldWf)
assert.NotNil(t, wf)
woc := newWorkflowOperationCtx(wf, controller)
assert.NotNil(t, woc)
// Verify that there are no nodes in the wf status.
assert.Zero(t, len(woc.wf.Status.Nodes))
// Add the parent node for retries.
nodeName := "test-node"
nodeID := woc.wf.NodeID(nodeName)
node := woc.initializeNode(nodeName, wfv1.NodeTypeRetry, "", &wfv1.WorkflowStep{}, "", wfv1.NodeRunning)
retries := wfv1.RetryStrategy{}
retries.Limit = intstrutil.ParsePtr("2")
retries.RetryPolicy = wfv1.RetryPolicyAlways
woc.wf.Status.Nodes[nodeID] = *node
assert.Equal(t, node.Phase, wfv1.NodeRunning)
// Ensure there are no child nodes yet.
lastChild := getChildNodeIndex(node, woc.wf.Status.Nodes, -1)
assert.Nil(t, lastChild)
// Add child nodes.
for i := 0; i < 2; i++ {
childNode := fmt.Sprintf("child-node-%d", i)
woc.initializeNode(childNode, wfv1.NodeTypePod, "", &wfv1.WorkflowStep{}, "", wfv1.NodeRunning)
woc.addChildNode(nodeName, childNode)
}
n := woc.wf.GetNodeByName(nodeName)
lastChild = getChildNodeIndex(n, woc.wf.Status.Nodes, -1)
assert.NotNil(t, lastChild)
// Last child is still running. processNodesWithRetries() should return false since
// there should be no retries at this point.
n, _, err := woc.processNodeRetries(n, retries, &executeTemplateOpts{})
assert.Nil(t, err)
assert.Equal(t, n.Phase, wfv1.NodeRunning)
// Mark lastChild as successful.
woc.markNodePhase(lastChild.Name, wfv1.NodeSucceeded)
n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{})
assert.Nil(t, err)
// The parent node also gets marked as Succeeded.
assert.Equal(t, n.Phase, wfv1.NodeSucceeded)
// Mark the parent node as running again and the lastChild as errored.
n = woc.markNodePhase(n.Name, wfv1.NodeRunning)
woc.markNodePhase(lastChild.Name, wfv1.NodeError)
_, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{})
assert.NoError(t, err)
n = woc.wf.GetNodeByName(nodeName)
assert.Equal(t, n.Phase, wfv1.NodeRunning)
// Add a third node that has errored.
childNode := "child-node-3"
woc.initializeNode(childNode, wfv1.NodeTypePod, "", &wfv1.WorkflowStep{}, "", wfv1.NodeError)
woc.addChildNode(nodeName, childNode)
n = woc.wf.GetNodeByName(nodeName)
n, _, err = woc.processNodeRetries(n, retries, &executeTemplateOpts{})
assert.Nil(t, err)
assert.Equal(t, n.Phase, wfv1.NodeError)
} | explode_data.jsonl/70951 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1036
} | [
2830,
3393,
7423,
12288,
2354,
12020,
4019,
1925,
13877,
1155,
353,
8840,
836,
8,
341,
84441,
11,
6461,
1669,
501,
2051,
741,
16867,
9121,
741,
6948,
93882,
1155,
11,
6461,
340,
6692,
69,
1669,
650,
27121,
32131,
3203,
4791,
10134,
54,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestOuterLinkV2WithMetadataContainerEncode(t *testing.T) {
var o outerLinkV2WithMetadataContainer
_, err := MsgpackEncode(o)
requireErrorHasSuffix(t, errCodecEncodeSelf, err)
} | explode_data.jsonl/72244 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 69
} | [
2830,
3393,
51322,
3939,
53,
17,
2354,
14610,
4502,
32535,
1155,
353,
8840,
836,
8,
341,
2405,
297,
15955,
3939,
53,
17,
2354,
14610,
4502,
198,
197,
6878,
1848,
1669,
24205,
4748,
32535,
10108,
340,
17957,
1454,
10281,
40177,
1155,
11,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestListObjectsPagination(t *testing.T) {
svc := &mockS3Client{}
objects := []s3.ListObjectsOutput{
{
Contents: []s3.Object{
{
Key: aws.String("1"),
},
},
NextMarker: aws.String("marker"),
IsTruncated: aws.Bool(true),
},
{
Contents: []s3.Object{
{
Key: aws.String("2"),
},
},
NextMarker: aws.String("marker"),
IsTruncated: aws.Bool(true),
},
{
Contents: []s3.Object{
{
Key: aws.String("3"),
},
},
IsTruncated: aws.Bool(false),
},
{
Contents: []s3.Object{
{
Key: aws.String("2"),
},
},
NextMarker: aws.String("marker"),
IsTruncated: aws.Bool(true),
},
}
svc.Client = s3.New(defaults.Config())
svc.objects = objects
keys := getKeys(svc, "foo")
expected := []string{"1", "2", "3"}
if e, a := 3, len(keys); e != a {
t.Errorf("expected %d, but received %d", e, a)
}
for i := 0; i < 3; i++ {
if keys[i] != expected[i] {
t.Errorf("expected %q, but received %q", expected[i], keys[i])
}
}
} | explode_data.jsonl/7295 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 529
} | [
2830,
3393,
852,
11543,
44265,
1155,
353,
8840,
836,
8,
341,
1903,
7362,
1669,
609,
16712,
50,
18,
2959,
16094,
197,
19210,
1669,
3056,
82,
18,
5814,
11543,
5097,
515,
197,
197,
515,
298,
197,
14803,
25,
3056,
82,
18,
8348,
515,
571... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestIPPoolExists(t *testing.T) {
tests := []struct {
name string
ipPools []*antreacrds.ExternalIPPool
ipPoolToCheck string
expectedExists bool
}{
{
name: "check for existing IPPool",
ipPools: []*antreacrds.ExternalIPPool{
newExternalIPPool("eip1", "", "10.10.10.2", "10.10.10.3"),
},
ipPoolToCheck: "eip1",
expectedExists: true,
},
{
name: "check for non-existing IPPool",
ipPools: []*antreacrds.ExternalIPPool{
newExternalIPPool("eip1", "", "10.10.10.2", "10.10.10.3"),
},
ipPoolToCheck: "eip2",
expectedExists: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
stopCh := make(chan struct{})
defer close(stopCh)
var fakeCRDObjects []runtime.Object
for _, p := range tt.ipPools {
fakeCRDObjects = append(fakeCRDObjects, p)
}
controller := newController(fakeCRDObjects)
controller.crdInformerFactory.Start(stopCh)
controller.crdInformerFactory.WaitForCacheSync(stopCh)
go controller.Run(stopCh)
require.True(t, cache.WaitForCacheSync(stopCh, controller.HasSynced))
exists := controller.IPPoolExists(tt.ipPoolToCheck)
assert.Equal(t, tt.expectedExists, exists)
})
}
} | explode_data.jsonl/10263 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 546
} | [
2830,
3393,
3298,
10551,
15575,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
198,
197,
46531,
47,
6178,
286,
29838,
517,
265,
64748,
5356,
5121,
15342,
3298,
10551,
198,
197,
46531,
10551,
1249,
397... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestExists(t *testing.T) {
type incrementalTest struct {
// incremental flag was passed
incremental bool
// previous image existence
previousImage bool
// script installed
scriptInstalled bool
// expected result
expected bool
}
tests := []incrementalTest{
// 0-1: incremental, no image, no matter what with scripts
{true, false, false, false},
{true, false, true, false},
// 2: incremental, previous image, no scripts
{true, true, false, false},
// 3: incremental, previous image, scripts installed
{true, true, true, true},
// 4-7: no incremental build - should always return false no matter what other flags are
{false, false, false, false},
{false, false, true, false},
{false, true, false, false},
{false, true, true, false},
}
for i, ti := range tests {
bh := testBuildHandler()
bh.config.WorkingDir = "/working-dir"
bh.config.Incremental = ti.incremental
bh.config.BuilderPullPolicy = api.PullAlways
bh.installedScripts = map[string]bool{api.SaveArtifacts: ti.scriptInstalled}
bh.incrementalDocker.(*docker.FakeDocker).PullResult = ti.previousImage
bh.config.DockerConfig = &api.DockerConfig{Endpoint: "http://localhost:4243"}
incremental := bh.Exists(bh.config)
if incremental != ti.expected {
t.Errorf("(%d) Unexpected incremental result: %v. Expected: %v",
i, incremental, ti.expected)
}
if ti.incremental && ti.previousImage && ti.scriptInstalled {
if len(bh.fs.(*test.FakeFileSystem).ExistsFile) == 0 {
continue
}
scriptChecked := bh.fs.(*test.FakeFileSystem).ExistsFile[0]
expectedScript := "/working-dir/upload/scripts/save-artifacts"
if scriptChecked != expectedScript {
t.Errorf("(%d) Unexpected script checked. Actual: %s. Expected: %s",
i, scriptChecked, expectedScript)
}
}
}
} | explode_data.jsonl/59439 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 636
} | [
2830,
3393,
15575,
1155,
353,
8840,
836,
8,
341,
13158,
52299,
2271,
2036,
341,
197,
197,
322,
52299,
5181,
572,
5823,
198,
197,
17430,
13477,
278,
1807,
198,
197,
197,
322,
3681,
2168,
13885,
198,
197,
197,
19702,
1906,
1807,
198,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func Test_lessStrings(t *testing.T) {
tests := []struct {
a []string
b []string
want bool
}{
{[]string{"a", "b", "c"}, []string{"a", "b", "c"}, false},
{[]string{"a", "b", "c"}, []string{"a", "b"}, false},
{[]string{"a", "b", "c"}, []string{"a", "c"}, true},
{[]string{"a", "b", "c"}, []string{"b"}, true},
{[]string{"a", "b", "c"}, []string{}, true},
{[]string{"a", "b"}, []string{"a", "b", "c"}, true},
{[]string{"a", "b"}, []string{"a", "a"}, false},
{[]string{"a", "b"}, []string{"a", "c"}, true},
{[]string{"a", "b"}, []string{"b"}, true},
{[]string{"a", "a"}, []string{"a", "b", "c"}, true},
{[]string{"a", "a"}, []string{"a", "b"}, true},
{[]string{"a", "a"}, []string{"a", "a"}, false},
{[]string{"a", "a"}, []string{"a", "c"}, true},
{[]string{"a", "a"}, []string{"b"}, true},
{[]string{"a", "a"}, []string{"b", "a"}, true},
{[]string{"a", "a"}, []string{"c"}, true},
{[]string{"a", "c"}, []string{"a", "b", "c"}, false},
{[]string{"a", "c"}, []string{"a", "c"}, false},
{[]string{"a", "c"}, []string{"b"}, true},
{[]string{"a", "c"}, []string{"c"}, true},
{[]string{"b"}, []string{"a", "b", "c"}, false},
{[]string{"b"}, []string{"a", "c"}, false},
{[]string{"b"}, []string{"b"}, false},
{[]string{"b"}, []string{"b", "a"}, true},
{[]string{"b"}, []string{"c"}, true},
{[]string{"b"}, []string{}, true},
{[]string{"b", "a"}, []string{"a", "b", "c"}, false},
{[]string{"b", "a"}, []string{"b"}, false},
{[]string{"b", "a"}, []string{"b", "a"}, false},
{[]string{"b", "a"}, []string{"c"}, true},
{[]string{"c"}, []string{"a", "b", "c"}, false},
{[]string{"c"}, []string{"b"}, false},
{[]string{}, []string{"a", "b", "c"}, false},
{[]string{}, []string{"c"}, false},
{[]string{}, []string{}, false},
}
for i, tt := range tests {
t.Run(fmt.Sprintf("Case#%d", i+1), func(t *testing.T) {
if got := lessStrings(tt.a, tt.b); got != tt.want {
t.Errorf("lessStrings() %v < %v got = %v, want %v", tt.a, tt.b, got, tt.want)
}
})
}
} | explode_data.jsonl/24871 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 903
} | [
2830,
3393,
50747,
20859,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11323,
262,
3056,
917,
198,
197,
2233,
262,
3056,
917,
198,
197,
50780,
1807,
198,
197,
59403,
197,
197,
90,
1294,
917,
4913,
64,
497,
330,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSchema(t *testing.T) {
colColl := NewColCollection(allCols...)
schFromCols, err := SchemaFromCols(colColl)
require.NoError(t, err)
testSchema("SchemaFromCols", schFromCols, t)
testKeyColColl := NewColCollection(pkCols...)
testNonKeyColsColl := NewColCollection(nonPkCols...)
schFromPKAndNonPKCols, _ := SchemaFromPKAndNonPKCols(testKeyColColl, testNonKeyColsColl)
testSchema("SchemaFromPKAndNonPKCols", schFromPKAndNonPKCols, t)
eq := SchemasAreEqual(schFromCols, schFromPKAndNonPKCols)
assert.True(t, eq, "schemas should be equal")
} | explode_data.jsonl/5966 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 215
} | [
2830,
3393,
8632,
1155,
353,
8840,
836,
8,
341,
46640,
15265,
1669,
1532,
6127,
6482,
20388,
37567,
31218,
1903,
331,
3830,
37567,
11,
1848,
1669,
12539,
3830,
37567,
19611,
15265,
340,
17957,
35699,
1155,
11,
1848,
692,
18185,
8632,
445,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTranslateIngressV1(t *testing.T) {
prefix := networkingv1.PathTypePrefix
// no backend.
ing := &networkingv1.Ingress{
ObjectMeta: metav1.ObjectMeta{
Name: "test",
Namespace: "default",
Annotations: map[string]string{
"k8s.apisix.apache.org/use-regex": "true",
path.Join(annotations.AnnotationsPrefix, "enable-cors"): "true",
path.Join(annotations.AnnotationsPrefix, "allowlist-source-range"): "127.0.0.1",
},
},
Spec: networkingv1.IngressSpec{
Rules: []networkingv1.IngressRule{
{
Host: "apisix.apache.org",
IngressRuleValue: networkingv1.IngressRuleValue{
HTTP: &networkingv1.HTTPIngressRuleValue{
Paths: []networkingv1.HTTPIngressPath{
{
Path: "/foo",
PathType: &prefix,
Backend: networkingv1.IngressBackend{
Service: &networkingv1.IngressServiceBackend{
Name: "test-service",
Port: networkingv1.ServiceBackendPort{
Name: "port1",
},
},
},
},
{
Path: "/bar",
Backend: networkingv1.IngressBackend{
Service: &networkingv1.IngressServiceBackend{
Name: "test-service",
Port: networkingv1.ServiceBackendPort{
Number: 443,
},
},
},
},
},
},
},
},
},
},
}
client := fake.NewSimpleClientset()
informersFactory := informers.NewSharedInformerFactory(client, 0)
svcInformer := informersFactory.Core().V1().Services().Informer()
svcLister := informersFactory.Core().V1().Services().Lister()
epLister, epInformer := kube.NewEndpointListerAndInformer(informersFactory, false)
apisixClient := fakeapisix.NewSimpleClientset()
apisixInformersFactory := apisixinformers.NewSharedInformerFactory(apisixClient, 0)
processCh := make(chan struct{})
svcInformer.AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
processCh <- struct{}{}
},
})
epInformer.AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
processCh <- struct{}{}
},
})
stopCh := make(chan struct{})
defer close(stopCh)
go svcInformer.Run(stopCh)
go epInformer.Run(stopCh)
cache.WaitForCacheSync(stopCh, svcInformer.HasSynced)
_, err := client.CoreV1().Services("default").Create(context.Background(), _testSvc, metav1.CreateOptions{})
assert.Nil(t, err)
_, err = client.CoreV1().Endpoints("default").Create(context.Background(), _testEp, metav1.CreateOptions{})
assert.Nil(t, err)
tr := &translator{
TranslatorOptions: &TranslatorOptions{
ServiceLister: svcLister,
EndpointLister: epLister,
ApisixUpstreamLister: apisixInformersFactory.Apisix().V2beta3().ApisixUpstreams().Lister(),
},
}
<-processCh
<-processCh
ctx, err := tr.translateIngressV1(ing)
assert.Nil(t, err)
assert.Len(t, ctx.Routes, 2)
assert.Len(t, ctx.Upstreams, 2)
assert.Len(t, ctx.PluginConfigs, 2)
assert.Equal(t, []string{"/foo", "/foo/*"}, ctx.Routes[0].Uris)
assert.Equal(t, ctx.Upstreams[0].ID, ctx.Routes[0].UpstreamId)
assert.Equal(t, ctx.PluginConfigs[0].ID, ctx.Routes[0].PluginConfigId)
assert.Equal(t, "apisix.apache.org", ctx.Routes[0].Host)
assert.Equal(t, []string{"/bar"}, ctx.Routes[1].Uris)
assert.Equal(t, ctx.Upstreams[1].ID, ctx.Routes[1].UpstreamId)
assert.Equal(t, ctx.PluginConfigs[1].ID, ctx.Routes[1].PluginConfigId)
assert.Equal(t, "apisix.apache.org", ctx.Routes[1].Host)
assert.Equal(t, "roundrobin", ctx.Upstreams[0].Type)
assert.Equal(t, "http", ctx.Upstreams[0].Scheme)
assert.Len(t, ctx.Upstreams[0].Nodes, 2)
assert.Equal(t, 9080, ctx.Upstreams[0].Nodes[0].Port)
assert.Equal(t, "192.168.1.1", ctx.Upstreams[0].Nodes[0].Host)
assert.Equal(t, 9080, ctx.Upstreams[0].Nodes[1].Port)
assert.Equal(t, "192.168.1.2", ctx.Upstreams[0].Nodes[1].Host)
assert.Equal(t, "roundrobin", ctx.Upstreams[1].Type)
assert.Equal(t, "http", ctx.Upstreams[1].Scheme)
assert.Len(t, ctx.Upstreams[1].Nodes, 2)
assert.Equal(t, 9443, ctx.Upstreams[1].Nodes[0].Port)
assert.Equal(t, "192.168.1.1", ctx.Upstreams[1].Nodes[0].Host)
assert.Equal(t, 9443, ctx.Upstreams[1].Nodes[1].Port)
assert.Equal(t, "192.168.1.2", ctx.Upstreams[1].Nodes[1].Host)
assert.Len(t, ctx.PluginConfigs[0].Plugins, 2)
assert.Len(t, ctx.PluginConfigs[1].Plugins, 2)
} | explode_data.jsonl/6700 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2074
} | [
2830,
3393,
27473,
641,
2483,
53,
16,
1155,
353,
8840,
836,
8,
341,
3223,
5060,
1669,
28030,
85,
16,
17474,
929,
14335,
198,
197,
322,
902,
19163,
624,
197,
287,
1669,
609,
17511,
287,
85,
16,
5337,
2483,
515,
197,
23816,
12175,
25,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetContainerEnv(t *testing.T) {
assert := asrt.New(t)
container, err := FindContainerByLabels(map[string]string{"com.ddev.site-name": testContainerName})
assert.NoError(err)
require.NotEmpty(t, container)
env := GetContainerEnv("HOTDOG", *container)
assert.Equal("superior-to-corndog", env)
env = GetContainerEnv("POTATO", *container)
assert.Equal("future-fry", env)
env = GetContainerEnv("NONEXISTENT", *container)
assert.Equal("", env)
} | explode_data.jsonl/41379 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 172
} | [
2830,
3393,
1949,
4502,
14359,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
438,
3342,
7121,
1155,
692,
53290,
11,
1848,
1669,
7379,
4502,
1359,
23674,
9147,
14032,
30953,
4913,
874,
950,
3583,
22115,
11494,
788,
1273,
4502,
675,
3518,
694... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEvict(t *testing.T) {
m := NewMap(nil)
if 0 != len(m.Items()) {
t.Error()
}
for _, e := range list {
m.Set(e.k, &MapItem{Value: e}, e.f)
}
if len(list) != len(m.Items()) {
t.Error()
}
m.Expire(func(list, item *MapItem) bool {
if 10 > item.Value.(testItem).v {
m.Delete(item.Value.(testItem).k)
return true
}
return false
})
if 3 != len(m.Items()) {
t.Error()
}
} | explode_data.jsonl/74885 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 196
} | [
2830,
3393,
34112,
849,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1532,
2227,
27907,
340,
743,
220,
15,
961,
2422,
1255,
12054,
2140,
341,
197,
3244,
6141,
741,
197,
630,
2023,
8358,
384,
1669,
2088,
1140,
341,
197,
2109,
4202,
2026,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestModCheckExistenceOfManifestExists(t *testing.T) {
mod := Mod{}
mod.GoSumPath = testGoSumName
exists := mod.CheckExistenceOfManifest()
if !exists {
t.Errorf("Expected existence of %s", testGoSumName)
}
} | explode_data.jsonl/46383 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 84
} | [
2830,
3393,
4459,
3973,
25613,
763,
2124,
38495,
15575,
1155,
353,
8840,
836,
8,
341,
42228,
1669,
5650,
16094,
42228,
67131,
9190,
1820,
284,
1273,
10850,
9190,
675,
198,
8122,
1671,
1669,
1463,
10600,
25613,
763,
2124,
38495,
2822,
743,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestDeleteExecute(t *testing.T) {
config.WAITFREQUENCY = 0
client := new(CFClient)
stackname := "ToDeleteStack"
client.Client = &fakeDeleteCFClient{err: nil, stackname: stackname}
opts := &commander.CommandHelper{}
d := Delete{
client: client,
}
d.Execute(opts)
} | explode_data.jsonl/29824 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 107
} | [
2830,
3393,
6435,
17174,
1155,
353,
8840,
836,
8,
341,
25873,
1175,
18587,
37,
787,
74113,
284,
220,
15,
198,
25291,
1669,
501,
3025,
37,
2959,
340,
48227,
606,
1669,
330,
64105,
4336,
698,
25291,
11716,
284,
609,
30570,
6435,
9650,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestProhibitedVendorSync(t *testing.T) {
vendorListData := MarshalVendorList(buildVendorList34())
perms := allPurposesEnabledPermissions()
perms.cfg.HostVendorID = 10
perms.vendorIDs = map[openrtb_ext.BidderName]uint16{
openrtb_ext.BidderAppnexus: 2,
openrtb_ext.BidderPubmatic: 6,
openrtb_ext.BidderRubicon: 8,
openrtb_ext.BidderOpenx: 10,
}
perms.fetchVendorList = map[uint8]func(ctx context.Context, id uint16) (vendorlist.VendorList, error){
tcf2SpecVersion: listFetcher(map[uint16]vendorlist.VendorList{
34: parseVendorListDataV2(t, vendorListData),
}),
}
// COzTVhaOzTVhaGvAAAENAiCIAP_AAH_AAAAAAEEUACCKAAA : full consents to purposes for vendors 2, 6, 8
allowSync, err := perms.HostCookiesAllowed(context.Background(), SignalYes, "COzTVhaOzTVhaGvAAAENAiCIAP_AAH_AAAAAAEEUACCKAAA")
assert.NoErrorf(t, err, "Error processing HostCookiesAllowed")
assert.EqualValuesf(t, false, allowSync, "HostCookiesAllowed failure")
// Permission disallowed due to consent string not including vendor 10.
allowSync, err = perms.BidderSyncAllowed(context.Background(), openrtb_ext.BidderOpenx, SignalYes, "COzTVhaOzTVhaGvAAAENAiCIAP_AAH_AAAAAAEEUACCKAAA")
assert.NoErrorf(t, err, "Error processing BidderSyncAllowed")
assert.EqualValuesf(t, false, allowSync, "BidderSyncAllowed failure")
} | explode_data.jsonl/31099 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 517
} | [
2830,
3393,
1336,
92517,
44691,
12154,
1155,
353,
8840,
836,
8,
341,
5195,
8029,
852,
1043,
1669,
35667,
44691,
852,
43333,
44691,
852,
18,
19,
12367,
197,
87772,
1669,
678,
47,
324,
8285,
5462,
23851,
741,
197,
87772,
30481,
29840,
446... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerSSL(t *testing.T) {
tests := []struct {
label string
domain string
passphrase string
expectedMsgs []string
insecure bool
statusCode int
overrideProtocol bool
}{
{
label: "unknown CA", domain: "127.0.0.1", expectedMsgs: []string{"x509: certificate signed by unknown authority"},
},
{
label: "skip verification", domain: "127.0.0.1", insecure: true, statusCode: http.StatusAccepted,
},
{
label: "bad domain",
domain: "ELASTIC", expectedMsgs: []string{
"x509: certificate signed by unknown authority",
"x509: cannot validate certificate for 127.0.0.1",
},
},
{
label: "bad IP",
domain: "192.168.10.11", expectedMsgs: []string{
"x509: certificate signed by unknown authority",
"x509: certificate is valid for 192.168.10.11, not 127.0.0.1",
},
},
{
label: "bad schema", domain: "localhost", expectedMsgs: []string{
"malformed HTTP response",
"transport connection broken"},
overrideProtocol: true,
},
{
label: "with passphrase", domain: "localhost", statusCode: http.StatusAccepted, insecure: true, passphrase: "foobar",
},
}
var teardown = func() {}
defer teardown() // in case test crashes. calling teardown twice is ok
for idx, test := range tests {
var apm *beater
var err error
apm, teardown, err = setupServer(t, withSSL(t, test.domain, test.passphrase), nil)
require.NoError(t, err)
baseUrl, client := apm.client(test.insecure)
if test.overrideProtocol {
baseUrl = strings.Replace(baseUrl, "https", "http", 1)
}
req := makeTransactionRequest(t, baseUrl)
req.Header.Add("Content-Type", "application/json")
res, err := client.Do(req)
if len(test.expectedMsgs) > 0 {
var containsErrMsg bool
for _, msg := range test.expectedMsgs {
containsErrMsg = containsErrMsg || strings.Contains(err.Error(), msg)
}
assert.True(t, containsErrMsg,
fmt.Sprintf("expected %v at idx %d (%s)", err, idx, test.label))
}
if test.statusCode != 0 {
assert.Equal(t, res.StatusCode, test.statusCode,
fmt.Sprintf("wrong code at idx %d (%s)", idx, test.label))
}
teardown()
}
} | explode_data.jsonl/4947 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 879
} | [
2830,
3393,
5475,
22594,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
29277,
310,
914,
198,
197,
2698,
3121,
1843,
914,
198,
197,
41431,
27710,
981,
914,
198,
197,
42400,
6611,
82,
257,
3056,
917,
198,
197,
17430,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTypeOfTypeOf(t *testing.T) {
// Check that all the type constructors return concrete *rtype implementations.
// It's difficult to test directly because the reflect package is only at arm's length.
// The easiest thing to do is just call a function that crashes if it doesn't get an *rtype.
check := func(name string, typ Type) {
if underlying := TypeOf(typ).String(); underlying != "*reflect.rtype" {
t.Errorf("%v returned %v, not *reflect.rtype", name, underlying)
}
}
type T struct{ int }
check("TypeOf", TypeOf(T{}))
check("ArrayOf", ArrayOf(10, TypeOf(T{})))
check("ChanOf", ChanOf(BothDir, TypeOf(T{})))
check("FuncOf", FuncOf([]Type{TypeOf(T{})}, nil, false))
check("MapOf", MapOf(TypeOf(T{}), TypeOf(T{})))
check("PtrTo", PtrTo(TypeOf(T{})))
check("SliceOf", SliceOf(TypeOf(T{})))
} | explode_data.jsonl/29632 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 295
} | [
2830,
3393,
929,
34696,
2124,
1155,
353,
8840,
836,
8,
341,
197,
322,
4248,
429,
678,
279,
943,
54717,
470,
14175,
353,
32513,
38337,
624,
197,
322,
1084,
594,
5000,
311,
1273,
5961,
1576,
279,
8708,
6328,
374,
1172,
518,
6773,
594,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestACheck(t *testing.T) {
id := "eyJ6b25lIjoibmEwIiwicXVldWUiOiJTSVNZUEhVUy1KT0JTLVYzIiwicGFydF9pZCI6OSwib2Zmc2V0Ijo1NTEzMTU3fQ=="
result, err := test.RunCmdWithError("acheck", test.Bucket, id)
if len(err) > 0 && !strings.Contains(err, "incorrect zone") && len(result) == 0 {
t.Fail()
}
} | explode_data.jsonl/54473 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 157
} | [
2830,
3393,
1706,
1227,
1155,
353,
8840,
836,
8,
341,
15710,
1669,
330,
84609,
21,
65,
17,
20,
75,
40,
7305,
579,
76,
36,
86,
40,
37081,
292,
55,
53,
507,
54,
23710,
81096,
41,
9951,
53,
69930,
2230,
71,
53,
52,
88,
16,
33539,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGeometryCollection(t *testing.T) {
for i, tc := range []struct {
geoms []T
expected *expectedGeometryCollection
}{
{
expected: &expectedGeometryCollection{
layout: NoLayout,
stride: 0,
bounds: NewBounds(NoLayout),
empty: true,
},
},
{
geoms: []T{
NewPoint(XY),
},
expected: &expectedGeometryCollection{
layout: XY,
stride: 2,
bounds: NewBounds(XY).SetCoords(Coord{0, 0}, Coord{0, 0}),
empty: false,
},
},
{
geoms: []T{
NewPoint(XY),
NewLineString(XY),
},
expected: &expectedGeometryCollection{
layout: XY,
stride: 2,
bounds: NewBounds(XY).SetCoords(Coord{0, 0}, Coord{0, 0}),
empty: false,
},
},
{
geoms: []T{
NewLineString(XY),
NewPolygon(XY),
},
expected: &expectedGeometryCollection{
layout: XY,
stride: 2,
bounds: NewBounds(XY),
empty: true,
},
},
{
geoms: []T{
NewPoint(XY).MustSetCoords(Coord{1, 2}),
NewPoint(XY).MustSetCoords(Coord{3, 4}),
},
expected: &expectedGeometryCollection{
layout: XY,
stride: 2,
bounds: NewBounds(XY).SetCoords(Coord{1, 2}, Coord{3, 4}),
empty: false,
},
},
{
geoms: []T{
NewPoint(XY).MustSetCoords(Coord{1, 2}),
NewPoint(XYZ).MustSetCoords(Coord{3, 4, 5}),
},
expected: &expectedGeometryCollection{
layout: XYZ,
stride: 3,
bounds: NewBounds(XYZ).SetCoords(Coord{1, 2, 5}, Coord{3, 4, 5}),
empty: false,
},
},
{
geoms: []T{
NewPoint(XY).MustSetCoords(Coord{1, 2}),
NewPoint(XYM).MustSetCoords(Coord{3, 4, 5}),
},
expected: &expectedGeometryCollection{
layout: XYM,
stride: 3,
bounds: NewBounds(XYM).SetCoords(Coord{1, 2, 5}, Coord{3, 4, 5}),
empty: false,
},
},
{
geoms: []T{
NewPoint(XYZ).MustSetCoords(Coord{1, 2, 3}),
NewPoint(XYM).MustSetCoords(Coord{4, 5, 6}),
},
expected: &expectedGeometryCollection{
layout: XYZM,
stride: 4,
bounds: NewBounds(XYZM).SetCoords(Coord{1, 2, 3, 6}, Coord{4, 5, 3, 6}),
empty: false,
},
},
{
geoms: []T{
NewPoint(XYM).MustSetCoords(Coord{1, 2, 3}),
NewPoint(XYZ).MustSetCoords(Coord{4, 5, 6}),
},
expected: &expectedGeometryCollection{
layout: XYZM,
stride: 4,
bounds: NewBounds(XYZM).SetCoords(Coord{1, 2, 6, 3}, Coord{4, 5, 6, 3}),
empty: false,
},
},
} {
t.Run(strconv.Itoa(i), func(t *testing.T) {
NewGeometryCollection().MustPush(tc.geoms...).assertEqual(t, tc.expected, tc.geoms)
})
}
} | explode_data.jsonl/74715 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1313
} | [
2830,
3393,
20787,
6482,
1155,
353,
8840,
836,
8,
341,
2023,
600,
11,
17130,
1669,
2088,
3056,
1235,
341,
197,
197,
709,
6940,
262,
3056,
51,
198,
197,
42400,
353,
7325,
20787,
6482,
198,
197,
59403,
197,
197,
515,
298,
42400,
25,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRetry404(t *testing.T) {
tc := &testTime{now: time.Now()}
ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if tc.slept == 0 {
http.Error(w, "404 Not Found", http.StatusNotFound)
}
}))
defer ts.Close()
c := getClient(ts.URL)
c.time = tc
resp, err := c.requestRetry(http.MethodGet, "/", "", nil)
if err != nil {
t.Errorf("Error from request: %v", err)
} else if resp.StatusCode != 200 {
t.Errorf("Expected status code 200, got %d", resp.StatusCode)
}
} | explode_data.jsonl/6245 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 215
} | [
2830,
3393,
51560,
19,
15,
19,
1155,
353,
8840,
836,
8,
341,
78255,
1669,
609,
1944,
1462,
90,
3328,
25,
882,
13244,
23509,
57441,
1669,
54320,
70334,
7121,
13470,
1220,
2836,
19886,
89164,
18552,
3622,
1758,
37508,
11,
435,
353,
1254,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestConfigureBadAlgorithm(t *testing.T) {
for _, algorithm := range []string{
"MD2WithRSA",
"DSAWithSHA1",
"DSAWithSHA256",
"Unknown",
} {
t.Run(algorithm, func(t *testing.T) {
inspector, exists := inspectors.Get("signaturealgorithm")
require.True(t, exists, "inspectors.Get(\"signaturealgorithm\") to exist")
inspector, err := inspector.Configure(algorithm)
assert.EqualErrorf(t, err, fmt.Sprintf("unsupported SignatureAlgorithm %s", algorithm), algorithm)
})
}
} | explode_data.jsonl/54445 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 192
} | [
2830,
3393,
28560,
17082,
27847,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
12111,
1669,
2088,
3056,
917,
515,
197,
197,
1,
6076,
17,
2354,
73564,
756,
197,
197,
1,
72638,
2354,
33145,
16,
756,
197,
197,
1,
72638,
2354,
33145,
17,
20... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAddTxGroup(t *testing.T) {
q, mem := initEnv(0)
cfg := q.GetConfig()
defer q.Close()
defer mem.Close()
toAddr := "1PjMi9yGTjA9bbqUZa1Sj7dAUKyLA8KqE1"
//copytx
crouptx1 := types.Transaction{Execer: []byte("coins"), Payload: types.Encode(transfer), Fee: 460000000, Expire: 0, To: toAddr}
crouptx2 := types.Transaction{Execer: []byte("coins"), Payload: types.Encode(transfer), Fee: 100, Expire: 0, To: toAddr}
crouptx3 := types.Transaction{Execer: []byte("coins"), Payload: types.Encode(transfer), Fee: 100000000, Expire: 0, To: toAddr}
crouptx4 := types.Transaction{Execer: []byte("user.write"), Payload: types.Encode(transfer), Fee: 100000000, Expire: 0, To: toAddr}
txGroup, _ := types.CreateTxGroup([]*types.Transaction{&crouptx1, &crouptx2, &crouptx3, &crouptx4}, cfg.GetMinTxFeeRate())
for i := range txGroup.Txs {
err := txGroup.SignN(i, types.SECP256K1, mainPriv)
if err != nil {
t.Error("TestAddTxGroup SignNfailed ", err.Error())
}
}
tx := txGroup.Tx()
msg := mem.client.NewMessage("mempool", types.EventTx, tx)
mem.client.Send(msg, true)
resp, err := mem.client.Wait(msg)
if err != nil {
t.Error("TestAddTxGroup failed", err.Error())
}
reply := resp.GetData().(*types.Reply)
if !reply.GetIsOk() {
t.Error("TestAddTxGroup failed", string(reply.GetMsg()))
}
} | explode_data.jsonl/16837 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 539
} | [
2830,
3393,
2212,
31584,
2808,
1155,
353,
8840,
836,
8,
341,
18534,
11,
1833,
1669,
2930,
14359,
7,
15,
340,
50286,
1669,
2804,
2234,
2648,
741,
16867,
2804,
10421,
741,
16867,
1833,
10421,
741,
31709,
13986,
1669,
330,
16,
47,
73,
41... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestInterpretStringEncodeHex(t *testing.T) {
t.Parallel()
inter := parseCheckAndInterpret(t, `
fun test(): String {
return String.encodeHex([1, 2, 3, 0xCA, 0xDE])
}
`)
result, err := inter.Invoke("test")
require.NoError(t, err)
RequireValuesEqual(
t,
inter,
interpreter.NewStringValue("010203cade"),
result,
)
} | explode_data.jsonl/73417 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 160
} | [
2830,
3393,
3306,
8043,
703,
32535,
20335,
1155,
353,
8840,
836,
8,
1476,
3244,
41288,
7957,
2822,
58915,
1669,
4715,
3973,
3036,
3306,
8043,
1155,
11,
22074,
414,
2464,
1273,
4555,
923,
341,
688,
470,
923,
17313,
20335,
2561,
16,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPresentationMarshalMinimalJSON(t *testing.T) {
var p Presentation
assert.NoError(t, json.Unmarshal([]byte(`{}`), &p))
assert.Equal(t, &p, NewPresentation(), "new Presentation should be equal to empty JSON object")
} | explode_data.jsonl/79743 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
55239,
55438,
88328,
5370,
1155,
353,
8840,
836,
8,
341,
2405,
281,
50868,
198,
6948,
35699,
1155,
11,
2951,
38097,
10556,
3782,
5809,
90,
5541,
701,
609,
79,
1171,
6948,
12808,
1155,
11,
609,
79,
11,
1532,
55239,
1507,
33... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestNotFound(t *testing.T) {
router := MakeRouter()
router.GET("/zello/yes", printHello)
router.GET("/hello", printHello)
router.GET("/activity/:user", writeData)
// completely wrong path
RunRequest(router, "GET", "/notFound", 404, "Not Found", t)
// partial match
RunRequest(router, "GET", "/zello/random", 404, "Not Found", t)
RunRequest(router, "GET", "/activity", 404, "Not Found", t)
RunRequest(router, "GET", "/activity/", 404, "Not Found", t)
//test method with no handlers
RunRequest(router, "PUT", "/activity/123", 405, "Method Not Allowed", t)
RunRequest(router, "PATCH", "/activity/123", 405, "Method Not Allowed", t)
RunRequest(router, "DELETE", "/activity/123", 405, "Method Not Allowed", t)
} | explode_data.jsonl/15100 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 261
} | [
2830,
3393,
10372,
1155,
353,
8840,
836,
8,
1476,
67009,
1669,
7405,
9523,
741,
67009,
17410,
4283,
89,
4791,
14,
9693,
497,
1173,
9707,
340,
67009,
17410,
4283,
14990,
497,
1173,
9707,
340,
67009,
17410,
4283,
7175,
11315,
872,
497,
32... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestVerifyDir(t *testing.T) {
// Create temporary directory with a file "target" in it.
dir := tempDir(t)
target := filepath.Join(dir, "target")
err := ioutil.WriteFile(target, []byte{}, 0666)
require.NoError(t, err)
cmd, err := CreateCommand(
Config{
User: "test-user",
Flags: Flags{
Source: true,
Target: []string{target},
},
},
)
require.NoError(t, err)
// Run command with -d flag (directory mode). Since the target is a file,
// it should fail.
err = runSCP(cmd, "-t", "-d", target)
require.Regexp(t, ".*Not a directory", err)
} | explode_data.jsonl/74626 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 223
} | [
2830,
3393,
32627,
6184,
1155,
353,
8840,
836,
8,
341,
197,
322,
4230,
13340,
6220,
448,
264,
1034,
330,
5657,
1,
304,
432,
624,
48532,
1669,
2730,
6184,
1155,
340,
28861,
1669,
26054,
22363,
14161,
11,
330,
5657,
1138,
9859,
1669,
43... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMySQLBinlogRowImageChecker(t *testing.T) {
db, mock, err := sqlmock.New()
require.Nil(t, err)
ctx := context.Background()
cases := []struct {
version string
state State
needCheck bool
rowImage string
}{
// mysql < 5.6.2 don't need check
{
version: "5.6.1-log",
state: StateSuccess,
needCheck: false,
rowImage: "",
},
// mysql >= 5.6.2 need check - success
{
version: "5.6.2-log",
state: StateSuccess,
needCheck: true,
rowImage: "full",
},
// mysql >= 5.6.2 need check - failed
{
version: "5.6.2-log",
state: StateFailure,
needCheck: true,
rowImage: "NOBLOB",
},
// mariadb < 10.1.6 don't need check
{
version: "10.1.5-MariaDB-1~wheezy",
state: StateSuccess,
needCheck: false,
rowImage: "",
},
// mariadb >= 10.1.6 need check - success
{
version: "10.1.6-MariaDB-1~wheezy",
state: StateSuccess,
needCheck: true,
rowImage: "full",
},
// mariadb >= 10.1.6 need check - failed
{
version: "10.1.6-MariaDB-1~wheezy",
state: StateFailure,
needCheck: true,
rowImage: "NOBLOB",
},
}
for _, cs := range cases {
binlogDBChecker := NewMySQLBinlogRowImageChecker(db, &dbutil.DBConfig{})
versionRow := sqlmock.NewRows([]string{"Variable_name", "Value"}).AddRow("version", cs.version)
mock.ExpectQuery("SHOW GLOBAL VARIABLES LIKE 'version'").WillReturnRows(versionRow)
if cs.needCheck {
binlogRowImageRow := sqlmock.NewRows([]string{"Variable_name", "Value"}).AddRow("binlog_row_image", cs.rowImage)
mock.ExpectQuery("SHOW GLOBAL VARIABLES LIKE 'binlog_row_image'").WillReturnRows(binlogRowImageRow)
}
r := binlogDBChecker.Check(ctx)
require.Nil(t, mock.ExpectationsWereMet())
require.Equal(t, cs.state, r.State)
}
} | explode_data.jsonl/46931 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 835
} | [
2830,
3393,
59224,
28794,
839,
3102,
1906,
35188,
1155,
353,
8840,
836,
8,
341,
20939,
11,
7860,
11,
1848,
1669,
5704,
16712,
7121,
741,
17957,
59678,
1155,
11,
1848,
340,
20985,
1669,
2266,
19047,
2822,
1444,
2264,
1669,
3056,
1235,
34... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_buildMetricsHTTPConnectionManagerFilter(t *testing.T) {
cacheDir, _ := os.UserCacheDir()
certFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-crt-354e49305a5a39414a545530374e58454e48334148524c4e324258463837364355564c4e4532464b54355139495547514a38.pem")
keyFileName := filepath.Join(cacheDir, "pomerium", "envoy", "files", "tls-key-3350415a38414e4e4a4655424e55393430474147324651433949384e485341334b5157364f424b4c5856365a545937383735.pem")
srv, _ := NewServer("TEST", nil)
li, err := srv.buildMetricsListener(&config.Config{
Options: &config.Options{
MetricsAddr: "127.0.0.1:9902",
MetricsCertificate: aExampleComCert,
MetricsCertificateKey: aExampleComKey,
},
})
require.NoError(t, err)
testutil.AssertProtoJSONEqual(t, `
{
"name": "metrics-ingress",
"address": {
"socketAddress": {
"address": "127.0.0.1",
"ipv4Compat": true,
"portValue": 9902
}
},
"filterChains": [{
"filters": [{
"name": "envoy.filters.network.http_connection_manager",
"typedConfig": {
"@type": "type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager",
"httpFilters": [{
"name": "envoy.filters.http.router"
}],
"routeConfig": {
"name": "metrics",
"validateClusters": false,
"virtualHosts": [{
"name": "metrics",
"domains": ["*"],
"routes": [{
"name": "metrics",
"match": {
"prefix": "/"
},
"route": {
"cluster": "pomerium-control-plane-http"
}
}]
}]
},
"statPrefix": "metrics"
}
}],
"transportSocket": {
"name": "tls",
"typedConfig": {
"@type": "type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext",
"commonTlsContext": {
"tlsParams": {
"cipherSuites": [
"ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-GCM-SHA384",
"ECDHE-ECDSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-CHACHA20-POLY1305",
"ECDHE-RSA-CHACHA20-POLY1305"
],
"tlsMinimumProtocolVersion": "TLSv1_2"
},
"alpnProtocols": ["h2", "http/1.1"],
"tlsCertificates": [
{
"certificateChain": {
"filename": "`+certFileName+`"
},
"privateKey": {
"filename": "`+keyFileName+`"
}
}
]
}
}
}
}]
}`, li)
} | explode_data.jsonl/36948 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1259
} | [
2830,
3393,
20801,
27328,
9230,
4526,
2043,
5632,
1155,
353,
8840,
836,
8,
341,
52680,
6184,
11,
716,
1669,
2643,
7344,
8233,
6184,
741,
1444,
529,
10903,
1669,
26054,
22363,
31933,
6184,
11,
330,
79,
25359,
2356,
497,
330,
3160,
2253,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.