text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestReceiver_RegisterHandler_InstancePing(t *testing.T) {
receiver_grpc.TestReceiver(new(Receiver), func(t *testing.T, sequence int, conn *grpc.ClientConn, ctx context.Context) string {
client := management.NewManagementServiceClient(conn)
instancePing := &management.InstancePingPkg{
Service: fmt.Sprintf("service_%d", sequence),
ServiceInstance: fmt.Sprintf("instance_%d", sequence),
}
commands, err := client.KeepAlive(ctx, instancePing)
if err != nil {
t.Fatalf("cannot send the data to the server: %v", err)
}
if commands == nil {
t.Fatalf("instance ping result is nil")
}
return instancePing.String()
}, func(data *v1.SniffData) string {
return data.GetInstancePing().String()
}, t)
} | explode_data.jsonl/64048 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 264
} | [
2830,
3393,
25436,
73124,
3050,
70849,
69883,
1155,
353,
8840,
836,
8,
341,
17200,
12862,
15682,
3992,
8787,
25436,
1755,
7,
25436,
701,
2915,
1155,
353,
8840,
836,
11,
8500,
526,
11,
4534,
353,
56585,
11716,
9701,
11,
5635,
2266,
9328,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestSetPDClientDynamicOption(t *testing.T) {
store, clean := realtikvtest.CreateMockStoreAndSetup(t)
defer clean()
tk := testkit.NewTestKit(t, store)
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("0"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 0.5;")
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("0.5"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 1;")
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("1"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 1.5;")
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("1.5"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 10;")
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("10"))
require.Error(t, tk.ExecToErr("set tidb_tso_client_batch_max_wait_time = 0;"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = -1;")
tk.MustQuery("show warnings").Check(testkit.RowsWithSep("|", "Warning|1292|Truncated incorrect tidb_tso_client_batch_max_wait_time value: '-1'"))
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("0"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = -0.1;")
tk.MustQuery("show warnings").Check(testkit.RowsWithSep("|", "Warning|1292|Truncated incorrect tidb_tso_client_batch_max_wait_time value: '-0.1'"))
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("0"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 10.1;")
tk.MustQuery("show warnings").Check(testkit.RowsWithSep("|", "Warning|1292|Truncated incorrect tidb_tso_client_batch_max_wait_time value: '10.1'"))
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("10"))
tk.MustExec("set global tidb_tso_client_batch_max_wait_time = 11;")
tk.MustQuery("show warnings").Check(testkit.RowsWithSep("|", "Warning|1292|Truncated incorrect tidb_tso_client_batch_max_wait_time value: '11'"))
tk.MustQuery("select @@tidb_tso_client_batch_max_wait_time;").Check(testkit.Rows("10"))
tk.MustQuery("select @@tidb_enable_tso_follower_proxy;").Check(testkit.Rows("0"))
tk.MustExec("set global tidb_enable_tso_follower_proxy = on;")
tk.MustQuery("select @@tidb_enable_tso_follower_proxy;").Check(testkit.Rows("1"))
tk.MustExec("set global tidb_enable_tso_follower_proxy = off;")
tk.MustQuery("select @@tidb_enable_tso_follower_proxy;").Check(testkit.Rows("0"))
require.Error(t, tk.ExecToErr("set tidb_tso_client_batch_max_wait_time = 0;"))
} | explode_data.jsonl/5713 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1072
} | [
2830,
3393,
1649,
23025,
2959,
21752,
5341,
1155,
353,
8840,
836,
8,
341,
57279,
11,
4240,
1669,
1931,
83,
1579,
85,
1944,
7251,
11571,
6093,
3036,
21821,
1155,
340,
16867,
4240,
2822,
3244,
74,
1669,
1273,
8226,
7121,
2271,
7695,
1155,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBlockSigsAddWithTx(t *testing.T) {
db, closeDB := testutil.PrepareDB(t)
defer closeDB()
_, s := cipher.GenerateKeyPair()
h := testutil.RandSHA256(t)
sig := cipher.SignHash(h, s)
sigs, err := newBlockSigs(db)
require.NoError(t, err)
db.Update(func(tx *bolt.Tx) error {
return sigs.AddWithTx(tx, h, sig)
})
// check the db
db.View(func(tx *bolt.Tx) error {
bkt := tx.Bucket(blockSigsBkt)
v := bkt.Get(h[:])
require.NotNil(t, v)
var s cipher.Sig
err := encoder.DeserializeRaw(v, &s)
require.NoError(t, err)
require.Equal(t, sig, s)
return nil
})
} | explode_data.jsonl/800 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 270
} | [
2830,
3393,
4713,
50,
14462,
2212,
2354,
31584,
1155,
353,
8840,
836,
8,
341,
20939,
11,
3265,
3506,
1669,
1273,
1314,
28770,
3380,
3506,
1155,
340,
16867,
3265,
3506,
2822,
197,
6878,
274,
1669,
31088,
57582,
1592,
12443,
741,
9598,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNoDigestIfAlarmIsCleared(t *testing.T) {
require.Len(t, runLocalShardTest(t,
change(user1, alarm1, wire.StatusCritical, time1),
change(user1, alarm1, wire.StatusCleared, time2),
send(user1),
), 0)
} | explode_data.jsonl/3256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 89
} | [
2830,
3393,
2753,
45217,
2679,
43444,
3872,
34,
92333,
1155,
353,
8840,
836,
8,
341,
17957,
65819,
1155,
11,
1598,
7319,
2016,
567,
2271,
1155,
345,
197,
68380,
4277,
16,
11,
16624,
16,
11,
9067,
10538,
42008,
11,
882,
16,
1326,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRequestOrdersNamespaceInPath(t *testing.T) {
r := (&Request{
baseURL: &url.URL{},
pathPrefix: "/test/",
}).Name("bar").Resource("baz").Namespace("foo")
if s := r.URL().String(); s != "/test/namespaces/foo/baz/bar" {
t.Errorf("namespace should be in order in path: %s", s)
}
} | explode_data.jsonl/13252 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 122
} | [
2830,
3393,
1900,
24898,
22699,
641,
1820,
1155,
353,
8840,
836,
8,
341,
7000,
1669,
15899,
1900,
515,
197,
24195,
3144,
25,
262,
609,
1085,
20893,
38837,
197,
26781,
14335,
25,
3521,
1944,
35075,
197,
16630,
675,
445,
2257,
1827,
4783,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestIoBufferAppend(t *testing.T) {
bi := newIoBuffer(1)
b := bi.(*ioBuffer)
n := randN(64)
for i := 0; i < n; i++ {
s := randString(i + 16)
err := b.Append([]byte(s))
if err != nil {
t.Fatal(err)
}
if !bytes.Equal(b.Peek(len(s)), []byte(s)) {
t.Errorf("Expect peek %s but got %s", s, string(b.Peek(len(s))))
}
b.Drain(len(s))
}
} | explode_data.jsonl/24051 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 187
} | [
2830,
3393,
42799,
4095,
23877,
1155,
353,
8840,
836,
8,
341,
2233,
72,
1669,
501,
42799,
4095,
7,
16,
340,
2233,
1669,
6032,
41399,
815,
4095,
340,
9038,
1669,
10382,
45,
7,
21,
19,
340,
2023,
600,
1669,
220,
15,
26,
600,
366,
30... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestMixedcaseAccount_Address(t *testing.T) {
// https://github.com/matrix/EIPs/blob/master/EIPS/eip-55.md
// Note: 0X{checksum_addr} is not valid according to spec above
var res []struct {
A MixedcaseAddress
Valid bool
}
if err := json.Unmarshal([]byte(`[
{"A" : "0xae967917c465db8578ca9024c205720b1a3651A9", "Valid": false},
{"A" : "0xAe967917c465db8578ca9024c205720b1a3651A9", "Valid": true},
{"A" : "0XAe967917c465db8578ca9024c205720b1a3651A9", "Valid": false},
{"A" : "0x1111111111111111111112222222222223333323", "Valid": true}
]`), &res); err != nil {
t.Fatal(err)
}
for _, r := range res {
if got := r.A.ValidChecksum(); got != r.Valid {
t.Errorf("Expected checksum %v, got checksum %v, input %v", r.Valid, got, r.A.String())
}
}
//These should throw exceptions:
var r2 []MixedcaseAddress
for _, r := range []string{
`["0x11111111111111111111122222222222233333"]`, // Too short
`["0x111111111111111111111222222222222333332"]`, // Too short
`["0x11111111111111111111122222222222233333234"]`, // Too long
`["0x111111111111111111111222222222222333332344"]`, // Too long
`["1111111111111111111112222222222223333323"]`, // Missing 0x
`["x1111111111111111111112222222222223333323"]`, // Missing 0
`["0xG111111111111111111112222222222223333323"]`, //Non-hex
} {
if err := json.Unmarshal([]byte(r), &r2); err == nil {
t.Errorf("Expected failure, input %v", r)
}
}
} | explode_data.jsonl/35498 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 647
} | [
2830,
3393,
86433,
5638,
7365,
64899,
1155,
353,
8840,
836,
8,
1476,
197,
322,
3703,
1110,
5204,
905,
3183,
2555,
25646,
3298,
82,
34827,
23303,
25646,
25944,
16546,
573,
12,
20,
20,
21324,
198,
197,
322,
7036,
25,
220,
15,
55,
90,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestResources_Validate(t *testing.T) {
tests := []struct {
name string
resources *v1beta1.TaskRunResources
}{{
name: "no resources is valid",
}, {
name: "inputs only",
resources: &v1beta1.TaskRunResources{
Inputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource",
},
Name: "workspace",
},
}},
},
}, {
name: "multiple inputs only",
resources: &v1beta1.TaskRunResources{
Inputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource1",
},
Name: "workspace1",
},
}, {
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource2",
},
Name: "workspace2",
},
}},
},
}, {
name: "outputs only",
resources: &v1beta1.TaskRunResources{
Outputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource",
},
Name: "workspace",
},
}},
},
}, {
name: "multiple outputs only",
resources: &v1beta1.TaskRunResources{
Outputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource1",
},
Name: "workspace1",
},
}, {
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource2",
},
Name: "workspace2",
},
}},
},
}, {
name: "inputs and outputs",
resources: &v1beta1.TaskRunResources{
Inputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource",
},
Name: "workspace",
},
}},
Outputs: []v1beta1.TaskResourceBinding{{
PipelineResourceBinding: v1beta1.PipelineResourceBinding{
ResourceRef: &v1beta1.PipelineResourceRef{
Name: "testresource",
},
Name: "workspace",
},
}},
},
}}
for _, ts := range tests {
t.Run(ts.name, func(t *testing.T) {
if err := ts.resources.Validate(context.Background()); err != nil {
t.Errorf("TaskRunInputs.Validate() error = %v", err)
}
})
}
} | explode_data.jsonl/82031 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1148
} | [
2830,
3393,
11277,
62,
17926,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
414,
914,
198,
197,
10202,
2360,
353,
85,
16,
19127,
16,
28258,
6727,
11277,
198,
197,
15170,
515,
197,
11609,
25,
330,
2152,
4963,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRequestExecute(t *testing.T) {
http.HandleFunc("/foo/bar", func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(200)
fmt.Fprint(w, `{"foo":"bar"}`)
})
ts := httptest.NewServer(http.DefaultServeMux)
defer ts.Close()
Addr = ts.URL[len("http://"):]
for i, tt := range requestExecuteTests {
err := tt.r.Execute(tt.method, tt.path)
if !reflect.DeepEqual(err, tt.err) {
t.Errorf("#%d: err got: \"%v\"\nwant: \"%v\"", i, err, tt.err)
}
}
} | explode_data.jsonl/773 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 237
} | [
2830,
3393,
1900,
17174,
1155,
353,
8840,
836,
8,
341,
28080,
63623,
4283,
7975,
49513,
497,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
6692,
15753,
1005,
1649,
445,
2762,
10804,
497,
330,
5132,
8931,
1138,
197,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCatchUpClient_PullBlocks(t *testing.T) {
lg, err := logger.New(&logger.Config{
Level: "info",
OutputPath: []string{"stdout"},
ErrOutputPath: []string{"stderr"},
Encoding: "console",
})
require.NoError(t, err)
localConfigs, sharedConfig := newTestSetup(t, 3)
tr1, err := startTransportWithLedger(t, lg, localConfigs, sharedConfig, 0, 5)
require.NoError(t, err)
defer tr1.Close()
tr2, err := startTransportWithLedger(t, lg, localConfigs, sharedConfig, 1, 10)
require.NoError(t, err)
defer tr2.Close()
cc := comm.NewCatchUpClient(lg, nil)
require.NotNil(t, cc)
err = cc.UpdateMembers(sharedConfig.ConsensusConfig.Members)
require.NoError(t, err)
//get all from the leader hint
blocks, err := cc.PullBlocks(context.Background(), 1, 3, 1)
require.NoError(t, err)
require.Equal(t, 3, len(blocks))
//get some from the leader hint
blocks, err = cc.PullBlocks(context.Background(), 1, 8, 1)
require.NoError(t, err)
require.Equal(t, 5, len(blocks))
//get all from member 2, wrong leader hint
blocks, err = cc.PullBlocks(context.Background(), 6, 9, 1)
require.NoError(t, err)
require.Equal(t, 4, len(blocks))
//get all from one of 1/2, no hint
blocks, err = cc.PullBlocks(context.Background(), 1, 3, 0)
require.NoError(t, err)
require.Equal(t, 3, len(blocks))
//get all from member 2, no hint
blocks, err = cc.PullBlocks(context.Background(), 6, 9, 0)
require.NoError(t, err)
require.Equal(t, 4, len(blocks))
} | explode_data.jsonl/70495 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 574
} | [
2830,
3393,
57760,
2324,
2959,
1088,
617,
29804,
1155,
353,
8840,
836,
8,
341,
8810,
70,
11,
1848,
1669,
5925,
7121,
2099,
9786,
10753,
515,
197,
197,
4449,
25,
260,
330,
2733,
756,
197,
80487,
1820,
25,
262,
3056,
917,
4913,
36358,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWaitForTask(t *testing.T) {
ctx := context.Background()
srv := newAgentEndpointServiceTestServer()
tc, err := newTestClient(ctx, srv)
if err != nil {
t.Fatal(err)
}
defer tc.close()
td, err := ioutil.TempDir(os.TempDir(), "")
if err != nil {
t.Fatalf("error creating temp dir: %v", err)
}
defer os.RemoveAll(td)
taskStateFile = filepath.Join(td, "testState")
// Stream recieve.
srv.streamSend <- struct{}{}
if err := tc.client.waitForTask(ctx); err != nil {
t.Errorf("did not expect error from a closed stream: %v", err)
}
if !srv.taskStart {
t.Error("expected ReportTaskStart to have been called")
}
if !srv.execTaskStart {
t.Error("expected ReportTaskProgress for TaskType_EXEC_STEP_TASK to have been called")
}
if !srv.execTaskComplete {
t.Error("expected ReportTaskComplete for TaskType_EXEC_STEP_TASK to have been called")
}
if !srv.patchTaskStart {
t.Error("expected ReportTaskProgress for TaskType_APPLY_PATCHES to have been called")
}
if !srv.patchTaskComplete {
t.Error("expected ReportTaskComplete for TaskType_APPLY_PATCHES to have been called")
}
} | explode_data.jsonl/74265 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 413
} | [
2830,
3393,
14190,
2461,
6262,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
741,
1903,
10553,
1669,
501,
16810,
27380,
1860,
2271,
5475,
741,
78255,
11,
1848,
1669,
501,
2271,
2959,
7502,
11,
43578,
340,
743,
1848,
961,
2092,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestValidQueryResponse(t *testing.T) {
msg := netbootMessage{
Header: netbootHeader{
Magic: 1,
Cookie: 1,
Cmd: cmdQuery,
Arg: 0,
},
}
res := netbootMessage{
Header: netbootHeader{
Magic: 1,
Cookie: 1,
Cmd: cmdAck,
Arg: 0,
},
}
want := "somenode"
copy(res.Data[:], want)
q := &netbootQuery{
message: msg,
}
var buf bytes.Buffer
if err := binary.Write(&buf, binary.LittleEndian, res); err != nil {
t.Fatalf("failed to write struct: %v", err)
}
got, err := q.parse(buf.Bytes())
if err != nil {
t.Errorf("Expecting no error, but received: %v", err)
}
if got != want {
t.Errorf("Data parsed, want %q, got %q", want, got)
}
} | explode_data.jsonl/78497 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 315
} | [
2830,
3393,
4088,
2859,
2582,
1155,
353,
8840,
836,
8,
341,
21169,
1669,
4179,
4619,
2052,
515,
197,
197,
4047,
25,
4179,
4619,
4047,
515,
298,
9209,
12780,
25,
220,
220,
16,
345,
298,
6258,
9619,
25,
220,
16,
345,
298,
6258,
2277,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestTemplate_GetVolumeMounts(t *testing.T) {
t.Run("Default", func(t *testing.T) {
x := &Template{}
assert.Empty(t, x.GetVolumeMounts())
})
t.Run("Container", func(t *testing.T) {
x := &Template{Container: &corev1.Container{VolumeMounts: []corev1.VolumeMount{{}}}}
assert.NotEmpty(t, x.GetVolumeMounts())
})
t.Run("ContainerSet", func(t *testing.T) {
x := &Template{ContainerSet: &ContainerSetTemplate{VolumeMounts: []corev1.VolumeMount{{}}}}
assert.NotEmpty(t, x.GetVolumeMounts())
})
t.Run("Script", func(t *testing.T) {
x := &Template{Script: &ScriptTemplate{Container: corev1.Container{VolumeMounts: []corev1.VolumeMount{{}}}}}
assert.NotEmpty(t, x.GetVolumeMounts())
})
} | explode_data.jsonl/26055 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 272
} | [
2830,
3393,
7275,
13614,
18902,
16284,
82,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
3675,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
10225,
1669,
609,
7275,
16094,
197,
6948,
11180,
1155,
11,
856,
2234,
18902,
16284,
82,
239... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCheckCylic(t *testing.T) {
g := TxGraph{}
g["tx3"] = []string{"tx1", "tx2"}
g["tx2"] = []string{"tx1", "tx0"}
g["tx1"] = []string{"tx0", "tx2"}
output, cylic, _ := TopSortDFS(g)
if output != nil {
t.Fatal("sort fail1")
}
t.Log(cylic)
//if len(cylic) != 2 {
if cylic == false {
t.Fatal("sort fail2")
}
} | explode_data.jsonl/77673 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 160
} | [
2830,
3393,
3973,
56715,
415,
1155,
353,
8840,
836,
8,
341,
3174,
1669,
39850,
11212,
16094,
3174,
1183,
3998,
18,
1341,
284,
3056,
917,
4913,
3998,
16,
497,
330,
3998,
17,
16707,
3174,
1183,
3998,
17,
1341,
284,
3056,
917,
4913,
3998... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestConfig_LoadManifest_BundleDependencyNotInstalled(t *testing.T) {
c := NewTestConfig(t)
c.TestContext.AddTestFile("testdata/missingdep.porter.yaml", Name)
err := c.LoadManifest()
require.Errorf(t, err, "bundle missingdep not installed in PORTER_HOME")
} | explode_data.jsonl/10945 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
2648,
19553,
38495,
1668,
4206,
36387,
2623,
60800,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
1532,
2271,
2648,
1155,
692,
1444,
8787,
1972,
1904,
2271,
1703,
445,
92425,
14,
30616,
14891,
14598,
261,
33406,
497,
3988,
692,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEc2Instance_LoadDetails_ErrInstanceDetails(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
ec2Instance, mockEC2ApiHelper := getMockInstance(ctrl)
mockEC2ApiHelper.EXPECT().GetInstanceDetails(&instanceID).Return(nil, mockError)
err := ec2Instance.LoadDetails(mockEC2ApiHelper)
assert.Error(t, mockError, err)
} | explode_data.jsonl/756 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 124
} | [
2830,
3393,
50730,
17,
2523,
19553,
7799,
93623,
2523,
7799,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
23743,
991,
18176,
2822,
197,
757,
17,
2523,
11,
7860,
7498,
17,
6563,
5511,
1669,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReconcileEndpointSlicesUpdating(t *testing.T) {
client := newClientset()
namespace := "test"
svc, _ := newServiceAndendpointMeta("foo", namespace)
// start with 250 pods
pods := []*corev1.Pod{}
for i := 0; i < 250; i++ {
ready := !(i%3 == 0)
pods = append(pods, newPod(i, namespace, ready, 1))
}
r := newReconciler(client, []*corev1.Node{{ObjectMeta: metav1.ObjectMeta{Name: "node-1"}}}, defaultMaxEndpointsPerSlice)
reconcileHelper(t, r, &svc, pods, []*discovery.EndpointSlice{}, time.Now())
numActionsExpected := 3
assert.Len(t, client.Actions(), numActionsExpected, "Expected 3 additional clientset actions")
slices := fetchEndpointSlices(t, client, namespace)
numActionsExpected++
expectUnorderedSlicesWithLengths(t, slices, []int{100, 100, 50})
svc.Spec.Ports[0].TargetPort.IntVal = 81
reconcileHelper(t, r, &svc, pods, []*discovery.EndpointSlice{&slices[0], &slices[1], &slices[2]}, time.Now())
numActionsExpected += 3
assert.Len(t, client.Actions(), numActionsExpected, "Expected 3 additional clientset actions")
expectActions(t, client.Actions(), 3, "update", "endpointslices")
expectUnorderedSlicesWithLengths(t, fetchEndpointSlices(t, client, namespace), []int{100, 100, 50})
} | explode_data.jsonl/76057 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 473
} | [
2830,
3393,
693,
40446,
457,
27380,
50,
37414,
46910,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
501,
2959,
746,
741,
56623,
1669,
330,
1944,
698,
1903,
7362,
11,
716,
1669,
501,
1860,
3036,
32540,
12175,
445,
7975,
497,
4473,
692,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestKitTimerToLogFullNativeBuild(t *testing.T) {
doKitFullBuild(t, "timer-to-log", "4Gi", "15m0s", 2*TestTimeoutLong, kitOptions{
dependencies: []string{
"camel:timer", "camel:log",
},
traits: []string{
"quarkus.package-type=native",
},
})
} | explode_data.jsonl/61812 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 114
} | [
2830,
3393,
7695,
10105,
1249,
2201,
9432,
20800,
11066,
1155,
353,
8840,
836,
8,
341,
19935,
7695,
9432,
11066,
1155,
11,
330,
19278,
4686,
46332,
497,
330,
19,
46841,
497,
330,
16,
20,
76,
15,
82,
497,
220,
17,
9,
2271,
7636,
6583... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRecordMetrics(t *testing.T) {
cases := []struct {
name string
language []string
includePatterns []string
want string
}{
{
name: "Empty values",
language: nil,
includePatterns: []string{},
want: ".generic",
},
{
name: "Include patterns no extension",
language: nil,
includePatterns: []string{"foo", "bar.go"},
want: ".generic",
},
{
name: "Include patterns first extension",
language: nil,
includePatterns: []string{"foo.c", "bar.go"},
want: ".c",
},
{
name: "Non-empty language",
language: []string{"xml"},
includePatterns: []string{"foo.c", "bar.go"},
want: ".xml",
},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
var extensionHint string
if len(tt.includePatterns) > 0 {
filename := tt.includePatterns[0]
extensionHint = filepath.Ext(filename)
}
got := toMatcher(tt.language, extensionHint)
if diff := cmp.Diff(tt.want, got); diff != "" {
t.Fatal(diff)
}
})
}
} | explode_data.jsonl/5444 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 576
} | [
2830,
3393,
6471,
27328,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
310,
914,
198,
197,
8810,
2616,
286,
3056,
917,
198,
197,
24699,
57656,
3056,
917,
198,
197,
50780,
310,
914,
198,
197,
59403,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestUseUnsuppliedParam(t *testing.T) {
const SCRIPT = `
function getMessage(message) {
if (message === undefined) {
message = '';
}
message += " 123 456";
return message;
}
getMessage();
`
testScript1(SCRIPT, asciiString(" 123 456"), t)
} | explode_data.jsonl/75290 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 102
} | [
2830,
3393,
10253,
1806,
12776,
3440,
2001,
1155,
353,
8840,
836,
8,
341,
4777,
53679,
284,
22074,
7527,
57753,
7333,
8,
341,
197,
743,
320,
1994,
2049,
5614,
8,
341,
298,
24753,
284,
7547,
197,
197,
532,
197,
24753,
1421,
330,
220,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAltKeyring_ConstructorSupportedAlgos(t *testing.T) {
keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil)
require.NoError(t, err)
// should fail when using unsupported signing algorythm.
_, _, err = keyring.NewMnemonic("test", English, sdk.FullFundraiserPath, notSupportedAlgo{})
require.EqualError(t, err, "unsupported signing algo")
// but works with default signing algo.
_, _, err = keyring.NewMnemonic("test", English, sdk.FullFundraiserPath, hd.Secp256k1)
require.NoError(t, err)
// but we can create a new keybase with our provided algos.
keyring2, err := New(t.Name(), BackendTest, t.TempDir(), nil, func(options *Options) {
options.SupportedAlgos = SigningAlgoList{
notSupportedAlgo{},
}
})
require.NoError(t, err)
// now this new keyring does not fail when signing with provided algo
_, _, err = keyring2.NewMnemonic("test", English, sdk.FullFundraiserPath, notSupportedAlgo{})
require.NoError(t, err)
} | explode_data.jsonl/73467 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 344
} | [
2830,
3393,
26017,
1592,
12640,
15100,
3086,
34636,
2101,
34073,
1155,
353,
8840,
836,
8,
341,
23634,
12640,
11,
1848,
1669,
1532,
1155,
2967,
1507,
55260,
2271,
11,
259,
65009,
6184,
1507,
2092,
340,
17957,
35699,
1155,
11,
1848,
692,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_Mock_On_WithFuncArg(t *testing.T) {
// make a test impl object
var mockedService = new(TestExampleImplementation)
c := mockedService.
On("TheExampleMethodFunc", AnythingOfType("func(string) error")).
Return(nil)
assert.Equal(t, []*Call{c}, mockedService.ExpectedCalls)
assert.Equal(t, "TheExampleMethodFunc", c.Method)
assert.Equal(t, 1, len(c.Arguments))
assert.Equal(t, AnythingOfType("func(string) error"), c.Arguments[0])
fn := func(string) error { return nil }
assert.NotPanics(t, func() {
mockedService.TheExampleMethodFunc(fn)
})
} | explode_data.jsonl/8569 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 234
} | [
2830,
3393,
1245,
1176,
35482,
62,
2354,
9626,
2735,
1155,
353,
8840,
836,
8,
8022,
197,
322,
1281,
264,
1273,
11605,
1633,
319,
2405,
46149,
1860,
284,
501,
31159,
13314,
36850,
7229,
1444,
1669,
46149,
1860,
3224,
197,
86391,
445,
785... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRunListTokens(t *testing.T) {
var err error
var bufOut, bufErr bytes.Buffer
tmpDir, err := ioutil.TempDir("", "kubeadm-token-test")
if err != nil {
t.Errorf("Unable to create temporary directory: %v", err)
}
defer os.RemoveAll(tmpDir)
fullPath := filepath.Join(tmpDir, "test-config-file")
f, err := os.Create(fullPath)
if err != nil {
t.Errorf("Unable to create test file %q: %v", fullPath, err)
}
defer f.Close()
// test config without secrets; should fail
if _, err = f.WriteString(testConfigToken); err != nil {
t.Errorf("Unable to write test file %q: %v", fullPath, err)
}
client, err := getClientset(fullPath, true)
if err != nil {
t.Errorf("Unable to run getClientset() for test file %q: %v", fullPath, err)
}
if err = RunListTokens(&bufOut, &bufErr, client); err == nil {
t.Errorf("RunListTokens() did not fail for a config without secrets: %v", err)
}
// test config without secrets but use a dummy API server that returns secrets
portString := "9008"
http.HandleFunc("/", httpHandler)
httpServer := &http.Server{Addr: "localhost:" + portString}
go func() {
err := httpServer.ListenAndServe()
if err != nil {
t.Errorf("Failed to start dummy API server: localhost:%s", portString)
}
}()
fmt.Printf("dummy API server listening on localhost:%s\n", portString)
testConfigTokenOpenPort := strings.Replace(testConfigToken, "server: localhost:8000", "server: localhost:"+portString, -1)
if _, err = f.WriteString(testConfigTokenOpenPort); err != nil {
t.Errorf("Unable to write test file %q: %v", fullPath, err)
}
client, err = getClientset(fullPath, true)
if err != nil {
t.Errorf("Unable to run getClientset() for test file %q: %v", fullPath, err)
}
// the order of these tests should match the case check
// for httpTestItr in httpHandler
testCases := []struct {
name string
expectedError bool
}{
{
name: "token-id not defined",
expectedError: true,
},
{
name: "secret name not formatted correctly",
expectedError: true,
},
{
name: "token-secret not defined",
expectedError: true,
},
{
name: "token expiration not formatted correctly",
expectedError: true,
},
{
name: "token expiration formatted correctly",
expectedError: false,
},
{
name: "token usage constant not true",
expectedError: false,
},
{
name: "token usage constant set to true",
expectedError: false,
},
}
for _, tc := range testCases {
bufErr.Reset()
atomic.StoreUint32(&httpSentResponse, 0)
fmt.Printf("Running HTTP test case (%d) %q\n", atomic.LoadUint32(&httpTestItr), tc.name)
// should always return nil here if a valid list of secrets if fetched
err := RunListTokens(&bufOut, &bufErr, client)
if err != nil {
t.Errorf("HTTP test case %d: Was unable to fetch a list of secrets", atomic.LoadUint32(&httpTestItr))
}
// wait for a response from the dummy HTTP server
timeSpent := 0 * time.Millisecond
timeToSleep := 50 * time.Millisecond
timeMax := 2000 * time.Millisecond
for {
if atomic.LoadUint32(&httpSentResponse) == 1 {
break
}
if timeSpent >= timeMax {
t.Errorf("HTTP test case %d: The server did not respond within %d ms", atomic.LoadUint32(&httpTestItr), timeMax)
}
timeSpent += timeToSleep
time.Sleep(timeToSleep)
}
// check if an error is written in the error buffer
hasError := bufErr.Len() != 0
if hasError != tc.expectedError {
t.Errorf("HTTP test case %d: RunListTokens expected error: %v, saw: %v; %v", atomic.LoadUint32(&httpTestItr), tc.expectedError, hasError, bufErr.String())
}
}
} | explode_data.jsonl/11816 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1402
} | [
2830,
3393,
6727,
852,
29300,
1155,
353,
8840,
836,
8,
341,
2405,
1848,
1465,
198,
2405,
6607,
2662,
11,
6607,
7747,
5820,
22622,
271,
20082,
6184,
11,
1848,
1669,
43144,
65009,
6184,
19814,
330,
74,
392,
3149,
76,
34841,
16839,
1138,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMarshal(t *testing.T) {
testCases := []string{
// TODO: these values will change with each CLDR update. This issue
// will be solved if we decide to fix the indexes.
"und",
"ca-ES-valencia",
"ca-ES-valencia-u-va-posix",
"ca-ES-valencia-u-co-phonebk",
"ca-ES-valencia-u-co-phonebk-va-posix",
"x-klingon",
"en-US",
"en-US-u-va-posix",
"en",
"en-u-co-phonebk",
"en-001",
"sh",
"en-GB-u-rg-uszzzz",
"en-GB-u-rg-uszzzz-va-posix",
"en-GB-u-co-phonebk-rg-uszzzz",
// Invalid tags should also roundtrip.
"en-GB-u-co-phonebk-rg-uszz",
}
for _, tc := range testCases {
var tag Tag
err := tag.UnmarshalText([]byte(tc))
if err != nil {
t.Errorf("UnmarshalText(%q): unexpected error: %v", tc, err)
}
b, err := tag.MarshalText()
if err != nil {
t.Errorf("MarshalText(%q): unexpected error: %v", tc, err)
}
if got := string(b); got != tc {
t.Errorf("%s: got %q; want %q", tc, got, tc)
}
}
} | explode_data.jsonl/15831 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 470
} | [
2830,
3393,
55438,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
917,
515,
197,
197,
322,
5343,
25,
1493,
2750,
686,
2297,
448,
1817,
6976,
7687,
2647,
13,
1096,
4265,
198,
197,
197,
322,
686,
387,
27956,
421,
582,
10279,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestH12_HeadContentLengthSmallBody(t *testing.T) {
h12Compare{
ReqFunc: (*Client).Head,
Handler: func(w ResponseWriter, r *Request) {
io.WriteString(w, "small")
},
}.run(t)
} | explode_data.jsonl/4737 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
39,
16,
17,
62,
12346,
2762,
4373,
25307,
5444,
1155,
353,
8840,
836,
8,
341,
9598,
16,
17,
27374,
515,
197,
197,
27234,
9626,
25,
4609,
2959,
568,
12346,
345,
197,
197,
3050,
25,
2915,
3622,
5949,
6492,
11,
435,
353,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestGetOrdererEndpointFail(t *testing.T) {
signer, err := common.GetDefaultSigner()
assert.NoError(t, err)
mockchain := "mockchain"
factory.InitFactories(nil)
mockResponse := &pb.ProposalResponse{
Response: &pb.Response{Status: 404, Payload: []byte{}},
Endorsement: &pb.Endorsement{},
}
mockEndorserClient := common.GetMockEndorserClient(mockResponse, nil)
cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore())
assert.NoError(t, err)
_, err = common.GetOrdererEndpointOfChain(mockchain, signer, mockEndorserClient, cryptoProvider)
assert.Error(t, err, "GetOrdererEndpointOfChain from invalid response")
} | explode_data.jsonl/46373 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 237
} | [
2830,
3393,
1949,
4431,
261,
27380,
19524,
1155,
353,
8840,
836,
8,
341,
69054,
261,
11,
1848,
1669,
4185,
2234,
3675,
7264,
261,
741,
6948,
35699,
1155,
11,
1848,
692,
77333,
8819,
1669,
330,
16712,
8819,
698,
1166,
2919,
26849,
17417,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLimit(t *testing.T) {
var tree = RWTree{Limit: 5}
assert.Equal(t, 2, tree.GetIndex("_faster", "key")) // 1 2
assert.Equal(t, 4, tree.GetIndex("http", "GET /robots.txt")) // 3 4
assert.Equal(t, 5, tree.GetIndex("http", "GET /favicon.ico")) // 3 5(overflow)
assert.Equal(t, 5, tree.GetIndex("_faster", "key", "foobar")) // 1 2 5(overflow)
assert.Equal(t, 5, tree.GetIndex("https")) // 5(overflow)
assert.Contains(t, tree.root.children, "_faster")
assert.Contains(t, tree.root.children, "http")
assert.Contains(t, tree.root.children, "_overflow")
assert.Equal(t, 3, len(tree.root.children))
assert.True(t, tree.Exists("_faster", "key"))
assert.False(t, tree.Exists("_faster", "key", "foobar"))
assert.True(t, tree.Exists("http", "GET /robots.txt"))
assert.False(t, tree.Exists("http", "GET /favicon.ico"))
assert.False(t, tree.Exists("https"))
} | explode_data.jsonl/36812 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 384
} | [
2830,
3393,
16527,
1155,
353,
8840,
836,
8,
341,
2405,
4916,
284,
46206,
6533,
90,
16527,
25,
220,
20,
630,
6948,
12808,
1155,
11,
220,
17,
11,
4916,
2234,
1552,
16975,
69,
2300,
497,
330,
792,
2761,
1843,
442,
220,
16,
220,
17,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRecipeUnmarshall(t *testing.T) {
body := []byte(`{"id":1,"title":"","description":"","ingredients":[{"amount":10},{"amount":20}],"instructions":""}`)
expected := Recipe{
ID: 1,
Ingredients: jsonb([]byte(`[{"amount":10},{"amount":20}]`)),
}
actual := Recipe{}
err := json.Unmarshal(body, &actual)
if err != nil {
t.Errorf("Unexpected error when marshalling, %s", err)
}
if !reflect.DeepEqual(actual, expected) {
t.Errorf("Unexpected unmarshalled result, actual: %+v", actual)
}
} | explode_data.jsonl/36813 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 188
} | [
2830,
3393,
28780,
1806,
84161,
1155,
353,
8840,
836,
8,
341,
35402,
1669,
3056,
3782,
5809,
4913,
307,
788,
16,
1335,
2102,
60767,
4684,
60767,
38120,
66582,
6045,
788,
16,
15,
36828,
6045,
788,
17,
15,
92,
28503,
62295,
62366,
27085,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestOcNodeResourceToInternal(t *testing.T) {
resource := ocNodeResourceToInternal(nil, nil)
assert.EqualValues(t, data.NewResource(), resource)
ocNode := &occommon.Node{}
ocResource := &ocresource.Resource{}
resource = ocNodeResourceToInternal(ocNode, ocResource)
assert.EqualValues(t, data.NewResource(), resource)
ts, err := ptypes.TimestampProto(time.Date(2020, 2, 11, 20, 26, 0, 0, time.UTC))
assert.NoError(t, err)
ocNode = &occommon.Node{
Identifier: &occommon.ProcessIdentifier{
HostName: "host1",
Pid: 123,
StartTimestamp: ts,
},
LibraryInfo: &occommon.LibraryInfo{
Language: occommon.LibraryInfo_CPP,
ExporterVersion: "v1.2.0",
CoreLibraryVersion: "v2.0.1",
},
ServiceInfo: &occommon.ServiceInfo{
Name: "svcA",
},
Attributes: map[string]string{
"node-attr": "val1",
},
}
ocResource = &ocresource.Resource{
Type: "good-resource",
Labels: map[string]string{
"resource-attr": "val2",
},
}
resource = ocNodeResourceToInternal(ocNode, ocResource)
expectedAttrs := data.AttributesMap{
conventions.AttributeHostHostname: data.NewAttributeValueString("host1"),
conventions.OCAttributeProcessID: data.NewAttributeValueInt(123),
conventions.OCAttributeProcessStartTime: data.NewAttributeValueString("2020-02-11T20:26:00Z"),
conventions.AttributeLibraryLanguage: data.NewAttributeValueString("CPP"),
conventions.OCAttributeExporterVersion: data.NewAttributeValueString("v1.2.0"),
conventions.AttributeLibraryVersion: data.NewAttributeValueString("v2.0.1"),
conventions.AttributeServiceName: data.NewAttributeValueString("svcA"),
"node-attr": data.NewAttributeValueString("val1"),
conventions.OCAttributeResourceType: data.NewAttributeValueString("good-resource"),
"resource-attr": data.NewAttributeValueString("val2"),
}
assert.EqualValues(t, expectedAttrs, resource.Attributes())
// Make sure hard-coded fields override same-name values in Attributes.
// To do that add Attributes with same-name.
for k := range expectedAttrs {
// Set all except "attr1" which is not a hard-coded field to some bogus values.
if strings.Index(k, "-attr") < 0 {
ocNode.Attributes[k] = "this will be overridden 1"
}
}
ocResource.Labels[conventions.OCAttributeResourceType] = "this will be overridden 2"
// Convert again.
resource = ocNodeResourceToInternal(ocNode, ocResource)
// And verify that same-name attributes were ignored.
assert.EqualValues(t, expectedAttrs, resource.Attributes())
} | explode_data.jsonl/8735 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 965
} | [
2830,
3393,
46,
66,
1955,
4783,
1249,
11569,
1155,
353,
8840,
836,
8,
341,
50346,
1669,
17796,
1955,
4783,
1249,
11569,
27907,
11,
2092,
340,
6948,
12808,
6227,
1155,
11,
821,
7121,
4783,
1507,
5101,
692,
197,
509,
1955,
1669,
609,
50... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_FindReferences(t *testing.T) {
g := NewGomegaWithT(t)
ctx := context.Background()
test, err := makeTestResolver()
g.Expect(err).ToNot(HaveOccurred())
rg := createResourceGroup()
g.Expect(test.client.Create(ctx, rg)).To(Succeed())
account := createDummyResource()
ref := genruntime.ResourceReference{ARMID: "test"}
account.Spec.KeyVaultReference = &batch.KeyVaultReference{
Reference: ref,
}
g.Expect(test.client.Create(ctx, account)).To(Succeed())
refs, err := reflecthelpers.FindResourceReferences(&account.Spec)
g.Expect(err).ToNot(HaveOccurred())
g.Expect(refs).To(HaveLen(1))
g.Expect(refs).To(HaveKey(ref))
} | explode_data.jsonl/45826 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 240
} | [
2830,
3393,
95245,
31712,
1155,
353,
8840,
836,
8,
341,
3174,
1669,
1532,
38,
32696,
2354,
51,
1155,
340,
20985,
1669,
2266,
19047,
2822,
18185,
11,
1848,
1669,
1281,
2271,
18190,
741,
3174,
81893,
3964,
568,
1249,
2623,
83844,
12367,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestImgTest_Init_NoChangeListNorCommitHash_NonzeroExitCode(t *testing.T) {
unittest.MediumTest(t)
workDir := t.TempDir()
setupAuthWithGSUtil(t, workDir)
keysFile := filepath.Join(workDir, "keys.json")
require.NoError(t, ioutil.WriteFile(keysFile, []byte(`{"os": "Android"}`), 0644))
// Call imgtest init with the following flags. We expect it to fail because we need to provide
// a commit or CL info
ctx, output, exit := testContext(nil, nil, nil, nil)
env := imgTest{
corpus: "my_corpus",
instanceID: "my-instance",
keysFile: keysFile,
passFailStep: true,
workDir: workDir,
}
runUntilExit(t, func() {
env.Init(ctx)
})
outStr := output.String()
exit.AssertWasCalledWithCode(t, 1, outStr)
assert.Contains(t, outStr, `invalid configuration: field "gitHash", "commit_id", or "change_list_id" must be set`)
} | explode_data.jsonl/69524 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 337
} | [
2830,
3393,
13033,
2271,
15644,
36989,
4072,
852,
32663,
33441,
6370,
1604,
263,
14154,
15339,
2078,
1155,
353,
8840,
836,
8,
341,
20479,
14267,
1321,
23090,
2271,
1155,
692,
97038,
6184,
1669,
259,
65009,
6184,
741,
84571,
5087,
2354,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMapToSummary(t *testing.T) {
// given
executions := getExecutions()
// when
result := MapToSummary(executions)
// then - test mappings
for i := 0; i < len(executions); i++ {
assert.Equal(t, result[i].Id, executions[i].Id)
assert.Equal(t, result[i].Name, executions[i].Name)
assert.Equal(t, result[i].TestName, executions[i].TestName)
assert.Equal(t, result[i].TestType, executions[i].TestType)
assert.Equal(t, result[i].Status, executions[i].ExecutionResult.Status)
assert.Equal(t, result[i].StartTime, executions[i].StartTime)
assert.Equal(t, result[i].EndTime, executions[i].EndTime)
}
} | explode_data.jsonl/73395 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 237
} | [
2830,
3393,
2227,
1249,
19237,
1155,
353,
8840,
836,
8,
341,
197,
322,
2661,
198,
67328,
3977,
1669,
633,
10216,
3977,
2822,
197,
322,
979,
198,
9559,
1669,
5027,
1249,
19237,
46896,
3977,
692,
197,
322,
1221,
481,
1273,
44817,
198,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGenerateOmitTags(t *testing.T) {
o := &Options{OmitTagsList: []string{"test1"}}
o.generateOmitTags()
if _, ok := o.OmitTags["test1"]; !ok {
t.Error("expected map entry")
}
} | explode_data.jsonl/63660 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
31115,
46,
1763,
15930,
1155,
353,
8840,
836,
8,
1476,
22229,
1669,
609,
3798,
90,
46,
1763,
15930,
852,
25,
3056,
917,
4913,
1944,
16,
95642,
22229,
22019,
46,
1763,
15930,
741,
743,
8358,
5394,
1669,
297,
8382,
1763,
159... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestFetchMultipleKeyspaces(t *testing.T) {
service := compose.EnsureUp(t, "redis")
expectedKeyspaces := map[string]uint{
"foo": 0,
"bar": 1,
"baz": 2,
}
expectedEvents := len(expectedKeyspaces)
for name, keyspace := range expectedKeyspaces {
addEntry(t, service.Host(), name, keyspace)
}
config := getConfig(service.Host())
config["key.patterns"] = []map[string]interface{}{
{
"pattern": "foo",
"keyspace": 0,
},
{
"pattern": "bar",
// keyspace set to 1 in the host url
},
{
"pattern": "baz",
"keyspace": 2,
},
}
ms := mbtest.NewFetcher(t, config)
events, err := ms.FetchEvents()
assert.Len(t, err, 0)
assert.Len(t, events, expectedEvents)
for _, event := range events {
name := event.MetricSetFields["name"].(string)
expectedKeyspace, found := expectedKeyspaces[name]
if !assert.True(t, found, name+" not expected") {
continue
}
id := event.MetricSetFields["id"].(string)
assert.Equal(t, fmt.Sprintf("%d:%s", expectedKeyspace, name), id)
keyspace := event.ModuleFields["keyspace"].(mapstr.M)
keyspaceID := keyspace["id"].(string)
assert.Equal(t, fmt.Sprintf("db%d", expectedKeyspace), keyspaceID)
}
} | explode_data.jsonl/53888 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 483
} | [
2830,
3393,
20714,
32089,
8850,
27338,
1155,
353,
8840,
836,
8,
341,
52934,
1669,
30335,
22834,
19098,
2324,
1155,
11,
330,
21748,
5130,
42400,
8850,
27338,
1669,
2415,
14032,
60,
2496,
515,
197,
197,
1,
7975,
788,
220,
15,
345,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestHandshakeClientAES256GCMSHA384(t *testing.T) {
test := &clientTest{
name: "ECDHE-ECDSA-AES256-GCM-SHA384",
args: []string{"-cipher", "ECDHE-ECDSA-AES256-GCM-SHA384"},
cert: testECDSACertificate,
key: testECDSAPrivateKey,
}
runClientTestTLS12(t, test)
} | explode_data.jsonl/27696 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 123
} | [
2830,
3393,
2314,
29661,
2959,
69168,
17,
20,
21,
22863,
4826,
17020,
18,
23,
19,
1155,
353,
8840,
836,
8,
341,
18185,
1669,
609,
2972,
2271,
515,
197,
11609,
25,
330,
36,
6484,
1799,
12,
7498,
72638,
6691,
1570,
17,
20,
21,
12010,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRuleOffsetLimit(t *testing.T) {
common.Log.Debug("Entering function: %s", common.GetFunctionName())
sqls := []string{
"select c1,c2 from tbl where name=xx order by number limit 1 offset 2000",
"select c1,c2 from tbl where name=xx order by number limit 2000,1",
}
for _, sql := range sqls {
q, err := NewQuery4Audit(sql)
if err == nil {
rule := q.RuleOffsetLimit()
if rule.Item != "CLA.003" {
t.Error("Rule not match:", rule.Item, "Expect : CLA.003")
}
} else {
t.Error("sqlparser.Parse Error:", err)
}
}
common.Log.Debug("Exiting function: %s", common.GetFunctionName())
} | explode_data.jsonl/76747 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 241
} | [
2830,
3393,
11337,
6446,
16527,
1155,
353,
8840,
836,
8,
341,
83825,
5247,
20345,
445,
82867,
729,
25,
1018,
82,
497,
4185,
2234,
5152,
675,
2398,
30633,
82,
1669,
3056,
917,
515,
197,
197,
1,
1742,
272,
16,
10109,
17,
504,
21173,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGatherClusterImageRegistry(t *testing.T) {
tests := []struct {
name string
inputObj *imageregistryv1.Config
evalOutput func(t *testing.T, obj *imageregistryv1.Config)
}{
{
name: "httpSecret",
inputObj: &imageregistryv1.Config{
ObjectMeta: metav1.ObjectMeta{
Name: "cluster",
},
Spec: imageregistryv1.ImageRegistrySpec{
HTTPSecret: "secret",
},
},
evalOutput: func(t *testing.T, obj *imageregistryv1.Config) {
if obj.Spec.HTTPSecret != "xxxxxx" {
t.Errorf("expected HTTPSecret anonymized, got %q", obj.Spec.HTTPSecret)
}
},
},
{
name: "s3",
inputObj: &imageregistryv1.Config{
ObjectMeta: metav1.ObjectMeta{
Name: "cluster",
},
Spec: imageregistryv1.ImageRegistrySpec{
Storage: imageregistryv1.ImageRegistryConfigStorage{
S3: &imageregistryv1.ImageRegistryConfigStorageS3{
Bucket: "foo",
Region: "bar",
RegionEndpoint: "point",
KeyID: "key",
},
},
},
},
evalOutput: func(t *testing.T, obj *imageregistryv1.Config) {
if obj.Spec.Storage.S3.Bucket != "xxx" {
t.Errorf("expected s3 bucket anonymized, got %q", obj.Spec.Storage.S3.Bucket)
}
if obj.Spec.Storage.S3.Region != "xxx" {
t.Errorf("expected s3 region anonymized, got %q", obj.Spec.Storage.S3.Region)
}
if obj.Spec.Storage.S3.RegionEndpoint != "xxxxx" {
t.Errorf("expected s3 region endpoint anonymized, got %q", obj.Spec.Storage.S3.RegionEndpoint)
}
if obj.Spec.Storage.S3.KeyID != "xxx" {
t.Errorf("expected s3 keyID anonymized, got %q", obj.Spec.Storage.S3.KeyID)
}
},
},
{
name: "azure",
inputObj: &imageregistryv1.Config{
ObjectMeta: metav1.ObjectMeta{
Name: "cluster",
},
Spec: imageregistryv1.ImageRegistrySpec{
Storage: imageregistryv1.ImageRegistryConfigStorage{
Azure: &imageregistryv1.ImageRegistryConfigStorageAzure{
AccountName: "account",
Container: "container",
},
},
},
},
evalOutput: func(t *testing.T, obj *imageregistryv1.Config) {
if obj.Spec.Storage.Azure.AccountName != "xxxxxxx" {
t.Errorf("expected azure account name anonymized, got %q", obj.Spec.Storage.Azure.AccountName)
}
if obj.Spec.Storage.Azure.Container == "xxxxxxx" {
t.Errorf("expected azure container anonymized, got %q", obj.Spec.Storage.Azure.Container)
}
},
},
{
name: "gcs",
inputObj: &imageregistryv1.Config{
ObjectMeta: metav1.ObjectMeta{
Name: "cluster",
},
Spec: imageregistryv1.ImageRegistrySpec{
Storage: imageregistryv1.ImageRegistryConfigStorage{
GCS: &imageregistryv1.ImageRegistryConfigStorageGCS{
Bucket: "bucket",
Region: "region",
ProjectID: "foo",
KeyID: "bar",
},
},
},
},
evalOutput: func(t *testing.T, obj *imageregistryv1.Config) {
if obj.Spec.Storage.GCS.Bucket != "xxxxxx" {
t.Errorf("expected gcs bucket anonymized, got %q", obj.Spec.Storage.GCS.Bucket)
}
if obj.Spec.Storage.GCS.ProjectID != "xxx" {
t.Errorf("expected gcs projectID endpoint anonymized, got %q", obj.Spec.Storage.GCS.ProjectID)
}
if obj.Spec.Storage.GCS.KeyID != "xxx" {
t.Errorf("expected gcs keyID anonymized, got %q", obj.Spec.Storage.GCS.KeyID)
}
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
client := imageregistryfake.NewSimpleClientset(test.inputObj)
gatherer := &Gatherer{registryClient: client.ImageregistryV1()}
records, errs := GatherClusterImageRegistry(gatherer)()
if len(errs) > 0 {
t.Errorf("unexpected errors: %#v", errs)
return
}
if numRecords := len(records); numRecords != 1 {
t.Errorf("expected one record, got %d", numRecords)
return
}
if expectedRecordName := "config/imageregistry"; records[0].Name != expectedRecordName {
t.Errorf("expected %q record name, got %q", expectedRecordName, records[0].Name)
return
}
item := records[0].Item
itemBytes, err := item.Marshal(context.TODO())
if err != nil {
t.Fatalf("unable to marshal config: %v", err)
}
var output imageregistryv1.Config
obj, _, err := registrySerializer.LegacyCodec(imageregistryv1.SchemeGroupVersion).Decode(itemBytes, nil, &output)
if err != nil {
t.Fatalf("failed to decode object: %v", err)
}
test.evalOutput(t, obj.(*imageregistryv1.Config))
})
}
} | explode_data.jsonl/32587 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2030
} | [
2830,
3393,
38,
1856,
28678,
1906,
15603,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
22427,
5261,
256,
353,
28016,
52633,
4944,
85,
16,
10753,
198,
197,
93413,
5097,
2915,
1155,
353,
88... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestComponentNameFromURL(t *testing.T) {
tests := []struct {
rawurl string
expectedName string
}{
{
rawurl: "/hello",
expectedName: "hello",
},
{
rawurl: "/hello?int=42",
expectedName: "hello",
},
{
rawurl: "/hello/world",
expectedName: "hello",
},
{
rawurl: "hello",
expectedName: "hello",
},
{
rawurl: "test://hello",
},
{
rawurl: "compo://",
},
{
rawurl: "http://www.github.com",
},
}
for _, test := range tests {
if name := ComponentNameFromURLString(test.rawurl); name != test.expectedName {
t.Errorf(`name is not "%s": "%s"`, test.expectedName, name)
}
}
} | explode_data.jsonl/36073 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 321
} | [
2830,
3393,
2189,
675,
3830,
3144,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
76559,
1085,
981,
914,
198,
197,
42400,
675,
914,
198,
197,
59403,
197,
197,
515,
298,
76559,
1085,
25,
981,
3521,
14990,
756,
298,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAuthenticatedHeaders(t *testing.T) {
p := &gophercloud.ProviderClient{
TokenID: "1234",
}
expected := map[string]string{"X-Auth-Token": "1234"}
actual := p.AuthenticatedHeaders()
th.CheckDeepEquals(t, expected, actual)
} | explode_data.jsonl/5887 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 87
} | [
2830,
3393,
26712,
10574,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
609,
70,
16940,
12361,
36208,
2959,
515,
197,
33299,
915,
25,
330,
16,
17,
18,
19,
756,
197,
532,
42400,
1669,
2415,
14032,
30953,
4913,
55,
12,
5087,
89022,
788,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPushExporterServicer_Submit_InvalidLabel(t *testing.T) {
// Submitting a metric with invalid labelnames should not include that metric
srv, exp := makeTestCustomPushExporter(t)
mf := tests.MakeTestMetricFamily(prometheus_models.MetricType_GAUGE, 5, sampleLabels)
extraMetric := tests.MakePromoGauge(10)
mf.Metric[2] = &extraMetric
mf.Metric[2].Label = append(mf.Metric[2].Label, &prometheus_models.LabelPair{Name: tests.MakeStrPtr("1"), Value: tests.MakeStrPtr("badLabelName")})
mc := exporters.MetricAndContext{
Family: mf,
Context: sampleGatewayContext,
}
err := exp.Submit([]exporters.MetricAndContext{mc})
assert.NoError(t, err)
assert.Equal(t, 1, len(srv.FamiliesByName))
for _, fam := range srv.FamiliesByName {
assert.Equal(t, 4, len(fam.Metric))
}
// If all metrics are invalid, the family should not be submitted
srv, exp = makeTestCustomPushExporter(t)
mf = tests.MakeTestMetricFamily(prometheus_models.MetricType_GAUGE, 1, sampleLabels)
badMetric := tests.MakePromoGauge(10)
mf.Metric[0] = &badMetric
mf.Metric[0].Label = append(mf.Metric[0].Label, &prometheus_models.LabelPair{Name: tests.MakeStrPtr("1"), Value: tests.MakeStrPtr("badLabelName")})
mc = exporters.MetricAndContext{
Family: mf,
Context: sampleGatewayContext,
}
err = exp.Submit([]exporters.MetricAndContext{mc})
assert.NoError(t, err)
assert.Equal(t, 0, len(srv.FamiliesByName))
} | explode_data.jsonl/61839 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 538
} | [
2830,
3393,
16644,
88025,
39159,
12999,
36359,
1763,
62,
7928,
2476,
1155,
353,
8840,
836,
8,
341,
197,
322,
29170,
1280,
264,
18266,
448,
8318,
2383,
11400,
1265,
537,
2924,
429,
18266,
198,
1903,
10553,
11,
1343,
1669,
1281,
2271,
102... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAppendSuccessful(t *testing.T) {
m := NewMeasurement("google.de", 443, 5, 3, 1)
m.appendSuccessful(&TCPResponse{
Host: m.Host,
Port: m.Port,
Latency: 21.50,
Timeout: m.Timeout,
Sequence: 0,
})
if m.count != 1 {
t.Fatalf("want: %d, got %d", 1, m.count)
}
if m.successful != 1 {
t.Fatalf("want: %d, got %d", 1, m.successful)
}
if m.failed != 0 {
t.Fatalf("want: %d, got %d", 0, m.failed)
}
if len(m.data) != 1 {
t.Fatalf("want: %d, got %d", 1, len(m.data))
}
} | explode_data.jsonl/2469 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 252
} | [
2830,
3393,
23877,
36374,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1532,
76548,
445,
17485,
2285,
497,
220,
19,
19,
18,
11,
220,
20,
11,
220,
18,
11,
220,
16,
692,
2109,
2057,
36374,
2099,
49896,
2582,
515,
197,
197,
9296,
25,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestMapProxy_ManyPutGet(t *testing.T) {
for i := 0; i < 100; i++ {
testKey := "testingKey" + strconv.Itoa(i)
testValue := "testingValue" + strconv.Itoa(i)
mp.Put(testKey, testValue)
res, err := mp.Get(testKey)
AssertEqualf(t, err, res, testValue, "get returned a wrong value")
}
mp.Clear()
} | explode_data.jsonl/56964 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
2227,
16219,
1245,
3767,
19103,
1949,
1155,
353,
8840,
836,
8,
341,
2023,
600,
1669,
220,
15,
26,
600,
366,
220,
16,
15,
15,
26,
600,
1027,
341,
197,
18185,
1592,
1669,
330,
8840,
1592,
1,
488,
33317,
64109,
1956,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAuthPostBadPassword(t *testing.T) {
t.Parallel()
setupMore := func(h *testHarness) *testHarness {
h.bodyReader.Return = mocks.Values{
PID: "test@test.com",
Password: "world hello",
}
h.storer.Users["test@test.com"] = &mocks.User{
Email: "test@test.com",
Password: "$2a$10$IlfnqVyDZ6c1L.kaA/q3bu1nkAC6KukNUsizvlzay1pZPXnX2C9Ji", // hello world
}
return h
}
t.Run("normal", func(t *testing.T) {
t.Parallel()
h := setupMore(testSetup())
r := mocks.Request("POST")
resp := httptest.NewRecorder()
w := h.ab.NewResponse(resp)
var afterCalled bool
h.ab.Events.After(authboss.EventAuthFail, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) {
afterCalled = true
return false, nil
})
if err := h.auth.LoginPost(w, r); err != nil {
t.Error(err)
}
if resp.Code != 200 {
t.Error("wanted a 200:", resp.Code)
}
if h.responder.Data[authboss.DataErr] != "Invalid Credentials" {
t.Error("wrong error:", h.responder.Data)
}
if _, ok := h.session.ClientValues[authboss.SessionKey]; ok {
t.Error("user should not be logged in")
}
if !afterCalled {
t.Error("after should have been called")
}
})
t.Run("handledAfter", func(t *testing.T) {
t.Parallel()
h := setupMore(testSetup())
r := mocks.Request("POST")
resp := httptest.NewRecorder()
w := h.ab.NewResponse(resp)
var afterCalled bool
h.ab.Events.After(authboss.EventAuthFail, func(w http.ResponseWriter, r *http.Request, handled bool) (bool, error) {
w.WriteHeader(http.StatusTeapot)
afterCalled = true
return true, nil
})
if err := h.auth.LoginPost(w, r); err != nil {
t.Error(err)
}
if h.responder.Status != 0 {
t.Error("responder should not have been called to give a status")
}
if _, ok := h.session.ClientValues[authboss.SessionKey]; ok {
t.Error("user should not be logged in")
}
if !afterCalled {
t.Error("after should have been called")
}
if resp.Code != http.StatusTeapot {
t.Error("should have left the response alone once teapot was sent")
}
})
} | explode_data.jsonl/3467 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 879
} | [
2830,
3393,
5087,
4133,
17082,
4876,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
84571,
7661,
1669,
2915,
3203,
353,
1944,
74248,
8,
353,
1944,
74248,
341,
197,
9598,
5079,
5062,
46350,
284,
68909,
35145,
515,
298,
10025,
915... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDeleteDeploymentConfig(t *testing.T) {
mockRegistry := test.NewDeploymentConfigRegistry()
storage := REST{registry: mockRegistry}
channel, err := storage.Delete(kapi.NewDefaultContext(), "foo")
if channel == nil {
t.Error("Unexpected nil channel")
}
if err != nil {
t.Errorf("Unexpected non-nil error: %#v", err)
}
select {
case result := <-channel:
status, ok := result.Object.(*kapi.Status)
if !ok {
t.Errorf("Expected status type, got: %#v", result)
}
if status.Status != kapi.StatusSuccess {
t.Errorf("Expected status=success, got: %#v", status)
}
case <-time.After(50 * time.Millisecond):
t.Errorf("Timed out waiting for result")
}
} | explode_data.jsonl/66993 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
6435,
75286,
2648,
1155,
353,
8840,
836,
8,
341,
77333,
15603,
1669,
1273,
7121,
75286,
2648,
15603,
741,
197,
16172,
1669,
25414,
90,
29172,
25,
7860,
15603,
532,
71550,
11,
1848,
1669,
5819,
18872,
5969,
2068,
7121,
3675,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestWebServiceDescriber_URI(t *testing.T) {
const (
testApp = "phonetool"
testEnv = "test"
testSvc = "jobs"
testEnvSubdomain = "test.phonetool.com"
testEnvLBDNSName = "http://abc.us-west-1.elb.amazonaws.com"
testSvcPath = "*"
)
mockErr := errors.New("some error")
testCases := map[string]struct {
setupMocks func(mocks webSvcDescriberMocks)
wantedURI string
wantedError error
}{
"fail to get output of environment stack": {
setupMocks: func(m webSvcDescriberMocks) {
gomock.InOrder(
m.svcDescriber.EXPECT().EnvOutputs().Return(nil, mockErr),
)
},
wantedError: fmt.Errorf("get output for environment test: some error"),
},
"fail to get parameters of service stack": {
setupMocks: func(m webSvcDescriberMocks) {
gomock.InOrder(
m.svcDescriber.EXPECT().EnvOutputs().Return(map[string]string{
stack.EnvOutputPublicLoadBalancerDNSName: testEnvLBDNSName,
stack.EnvOutputSubdomain: testEnvSubdomain,
}, nil),
m.svcDescriber.EXPECT().Params().Return(nil, mockErr),
)
},
wantedError: fmt.Errorf("get parameters for service jobs: some error"),
},
"https web service": {
setupMocks: func(m webSvcDescriberMocks) {
gomock.InOrder(
m.svcDescriber.EXPECT().EnvOutputs().Return(map[string]string{
stack.EnvOutputPublicLoadBalancerDNSName: testEnvLBDNSName,
stack.EnvOutputSubdomain: testEnvSubdomain,
}, nil),
m.svcDescriber.EXPECT().Params().Return(map[string]string{
stack.LBWebServiceRulePathParamKey: testSvcPath,
}, nil),
)
},
wantedURI: "https://jobs.test.phonetool.com",
},
"http web service": {
setupMocks: func(m webSvcDescriberMocks) {
gomock.InOrder(
m.svcDescriber.EXPECT().EnvOutputs().Return(map[string]string{
stack.EnvOutputPublicLoadBalancerDNSName: testEnvLBDNSName,
}, nil),
m.svcDescriber.EXPECT().Params().Return(map[string]string{
stack.LBWebServiceRulePathParamKey: testSvcPath,
}, nil),
)
},
wantedURI: "http://http://abc.us-west-1.elb.amazonaws.com/*",
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
// GIVEN
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mockSvcDescriber := mocks.NewMocksvcDescriber(ctrl)
mocks := webSvcDescriberMocks{
svcDescriber: mockSvcDescriber,
}
tc.setupMocks(mocks)
d := &WebServiceDescriber{
service: &config.Service{
App: testApp,
Name: testSvc,
},
svcDescriber: mockSvcDescriber,
initServiceDescriber: func(string) error { return nil },
}
// WHEN
actual, err := d.URI(testEnv)
// THEN
if tc.wantedError != nil {
require.EqualError(t, err, tc.wantedError.Error())
} else {
require.Nil(t, err)
require.Equal(t, tc.wantedURI, actual)
}
})
}
} | explode_data.jsonl/42399 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1335
} | [
2830,
3393,
94388,
62664,
652,
23116,
1155,
353,
8840,
836,
8,
341,
4777,
2399,
197,
18185,
2164,
688,
284,
330,
51701,
295,
1749,
698,
197,
18185,
14359,
688,
284,
330,
1944,
698,
197,
18185,
92766,
688,
284,
330,
31105,
698,
197,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestShutDown(t *testing.T) {
store, clean := testkit.CreateMockStore(t)
defer clean()
cc := &clientConn{}
se, err := session.CreateSession4Test(store)
require.NoError(t, err)
tc := &TiDBContext{Session: se}
cc.setCtx(tc)
// set killed flag
cc.status = connStatusShutdown
// assert ErrQueryInterrupted
err = cc.handleQuery(context.Background(), "select 1")
require.Equal(t, executor.ErrQueryInterrupted, err)
} | explode_data.jsonl/73153 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 157
} | [
2830,
3393,
2016,
332,
4454,
1155,
353,
8840,
836,
8,
341,
57279,
11,
4240,
1669,
1273,
8226,
7251,
11571,
6093,
1155,
340,
16867,
4240,
2822,
63517,
1669,
609,
2972,
9701,
16094,
84686,
11,
1848,
1669,
3797,
7251,
5283,
19,
2271,
31200... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReceiversBuilder_BuildCustom(t *testing.T) {
factories := createExampleFactories()
tests := []struct {
dataType string
shouldFail bool
}{
{
dataType: "logs",
shouldFail: false,
},
{
dataType: "nosuchdatatype",
shouldFail: true,
},
}
for _, test := range tests {
t.Run(test.dataType, func(t *testing.T) {
dataType := test.dataType
cfg := createExampleConfig(dataType)
// Build the pipeline
allExporters, err := NewExportersBuilder(zap.NewNop(), componenttest.TestApplicationStartInfo(), cfg, factories.Exporters).Build()
if test.shouldFail {
assert.Error(t, err)
return
}
assert.NoError(t, err)
pipelineProcessors, err := NewPipelinesBuilder(zap.NewNop(), componenttest.TestApplicationStartInfo(), cfg, allExporters, factories.Processors).Build()
assert.NoError(t, err)
receivers, err := NewReceiversBuilder(zap.NewNop(), componenttest.TestApplicationStartInfo(), cfg, pipelineProcessors, factories.Receivers).Build()
assert.NoError(t, err)
require.NotNil(t, receivers)
receiver := receivers[cfg.Receivers["examplereceiver"]]
// Ensure receiver has its fields correctly populated.
require.NotNil(t, receiver)
assert.NotNil(t, receiver.receiver)
// Compose the list of created exporters.
exporterNames := []string{"exampleexporter"}
var exporters []*builtExporter
for _, name := range exporterNames {
// Ensure exporter is created.
exp := allExporters[cfg.Exporters[name]]
require.NotNil(t, exp)
exporters = append(exporters, exp)
}
// Send Data via receiver and verify that all exporters of the pipeline receive it.
// First check that there are no traces in the exporters yet.
for _, exporter := range exporters {
consumer := exporter.le.(*componenttest.ExampleExporterConsumer)
require.Equal(t, len(consumer.Logs), 0)
}
// Send one data.
log := pdata.Logs{}
producer := receiver.receiver.(*componenttest.ExampleReceiverProducer)
producer.LogConsumer.ConsumeLogs(context.Background(), log)
// Now verify received data.
for _, name := range exporterNames {
// Check that the data is received by exporter.
exporter := allExporters[cfg.Exporters[name]]
// Validate exported data.
consumer := exporter.le.(*componenttest.ExampleExporterConsumer)
require.Equal(t, 1, len(consumer.Logs))
assert.EqualValues(t, log, consumer.Logs[0])
}
})
}
} | explode_data.jsonl/50903 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 907
} | [
2830,
3393,
693,
346,
1945,
3297,
96686,
10268,
1155,
353,
8840,
836,
8,
341,
1166,
52893,
1669,
1855,
13314,
17417,
2433,
2822,
78216,
1669,
3056,
1235,
341,
197,
59254,
256,
914,
198,
197,
197,
5445,
19524,
1807,
198,
197,
59403,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestGetMetricsListEmpty(t *testing.T) {
cache := cache.New()
path, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(path)
carbonserver := CarbonserverListener{
whisperData: path,
cacheGet: cache.Get,
metrics: &metricStruct{},
}
metrics, err := carbonserver.getMetricsList()
if err != errMetricsListEmpty {
t.Errorf("err: '%v', expected: '%v'", err, errMetricsListEmpty)
}
if metrics != nil {
t.Errorf("metrics: '%v', expected: 'nil'", err)
}
} | explode_data.jsonl/44293 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 217
} | [
2830,
3393,
1949,
27328,
852,
3522,
1155,
353,
8840,
836,
8,
341,
52680,
1669,
6500,
7121,
741,
26781,
11,
1848,
1669,
43144,
65009,
6184,
19814,
14676,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
2643,
8442... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestConvertFromMap(t *testing.T) {
// Using the same inputs from TestConvertToMap, test the reverse mapping.
for _, test := range converterMapTestInputs {
if test.expected != nil {
testConvertFromMap(t, test)
}
}
} | explode_data.jsonl/20447 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
12012,
3830,
2227,
1155,
353,
8840,
836,
8,
341,
197,
322,
12091,
279,
1852,
11127,
504,
3393,
12012,
1249,
2227,
11,
1273,
279,
9931,
12731,
624,
2023,
8358,
1273,
1669,
2088,
27058,
2227,
2271,
31946,
341,
197,
743,
1273,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 3 |
func TestAccAWSDBInstance_kmsKey(t *testing.T) {
var v rds.DBInstance
keyRegex := regexp.MustCompile("^arn:aws:kms:")
ri := rand.New(rand.NewSource(time.Now().UnixNano())).Int()
config := fmt.Sprintf(testAccAWSDBInstanceConfigKmsKeyId, ri)
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSDBInstanceDestroy,
Steps: []resource.TestStep{
{
Config: config,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSDBInstanceExists("aws_db_instance.bar", &v),
testAccCheckAWSDBInstanceAttributes(&v),
resource.TestMatchResourceAttr(
"aws_db_instance.bar", "kms_key_id", keyRegex),
),
},
},
})
} | explode_data.jsonl/33918 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 309
} | [
2830,
3393,
14603,
36136,
3506,
2523,
4698,
1011,
1592,
1155,
353,
8840,
836,
8,
341,
2405,
348,
435,
5356,
22537,
2523,
198,
23634,
32464,
1669,
41877,
98626,
48654,
1885,
25,
8635,
57071,
1011,
2974,
692,
197,
461,
1669,
10382,
7121,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseTag(t *testing.T) {
// Disable strict mode for the duration of this test so that we can test
// some malformed tags
Strict = false
defer func() {
Strict = true
}()
testCases := []struct {
tag string
wantName string
wantOptions *formOptions
}{
{"id", "id", nil},
{"id,empty", "id", &formOptions{Empty: true}},
{"id,indexed", "id", &formOptions{IndexedArray: true}},
{"id,zero", "id", &formOptions{Zero: true}},
// invalid invocations
{"id,", "id", nil},
{"id,,", "id", nil},
{"id,foo", "id", nil},
{"id,foo=bar", "id", nil},
}
for _, tc := range testCases {
t.Run(tc.tag, func(t *testing.T) {
name, options := parseTag(tc.tag)
assert.Equal(t, tc.wantName, name)
assert.Equal(t, tc.wantOptions, options)
})
}
} | explode_data.jsonl/30327 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 332
} | [
2830,
3393,
14463,
5668,
1155,
353,
8840,
836,
8,
341,
197,
322,
28027,
7304,
3856,
369,
279,
8090,
315,
419,
1273,
773,
429,
582,
646,
1273,
198,
197,
322,
1045,
79250,
9492,
198,
197,
41857,
284,
895,
198,
16867,
2915,
368,
341,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestKeyValueStringReturnsFormattedString(t *testing.T) {
var s KeyValueString = map[string]string{"KEY1": "AB", "KEY2": "CD"}
output := s.String()
// Both strings can be valid output, as order is not maintained in map.
validOutput1 := "KEY1=AB,KEY2=CD"
validOutput2 := "KEY2=CD,KEY1=AB"
expected := []string{validOutput1, validOutput2}
assert.Contains(t, expected, output)
} | explode_data.jsonl/64729 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 134
} | [
2830,
3393,
72082,
703,
16446,
43500,
703,
1155,
353,
8840,
836,
8,
341,
2405,
274,
98620,
703,
284,
2415,
14032,
30953,
4913,
4784,
16,
788,
330,
1867,
497,
330,
4784,
17,
788,
330,
6484,
16707,
21170,
1669,
274,
6431,
741,
197,
322,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCache(t *testing.T) {
now, _ := time.Parse(time.UnixDate, "Fri Apr 21 10:51:21 BST 2017")
utc := now.UTC()
c, crr := newTestCache(maxTTL)
for _, tc := range cacheTestCases {
m := tc.in.Msg()
m = cacheMsg(m, tc)
state := request.Request{W: nil, Req: m}
mt, _ := response.Typify(m, utc)
valid, k := key(state.Name(), m, mt, state.Do())
if valid {
crr.set(m, k, mt, c.pttl)
}
i, _ := c.get(time.Now().UTC(), state, "dns://:53")
ok := i != nil
if ok != tc.shouldCache {
t.Errorf("Cached message that should not have been cached: %s", state.Name())
continue
}
if ok {
resp := i.toMsg(m, time.Now().UTC())
if err := test.Header(tc.Case, resp); err != nil {
t.Error(err)
continue
}
if err := test.Section(tc.Case, test.Answer, resp.Answer); err != nil {
t.Error(err)
}
if err := test.Section(tc.Case, test.Ns, resp.Ns); err != nil {
t.Error(err)
}
if err := test.Section(tc.Case, test.Extra, resp.Extra); err != nil {
t.Error(err)
}
}
}
} | explode_data.jsonl/39108 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 471
} | [
2830,
3393,
8233,
1155,
353,
8840,
836,
8,
341,
80922,
11,
716,
1669,
882,
8937,
9730,
10616,
941,
1916,
11,
330,
53884,
5076,
220,
17,
16,
220,
16,
15,
25,
20,
16,
25,
17,
16,
43541,
220,
17,
15,
16,
22,
1138,
197,
28355,
1669,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func Test_parseEntryPointsConfiguration(t *testing.T) {
testCases := []struct {
name string
value string
expectedResult map[string]string
}{
{
name: "all parameters",
value: "Name:foo " +
"Address::8000 " +
"TLS:goo,gii " +
"TLS " +
"CA:car " +
"CA.Optional:true " +
"Redirect.EntryPoint:https " +
"Redirect.Regex:http://localhost/(.*) " +
"Redirect.Replacement:http://mydomain/$1 " +
"Redirect.Permanent:true " +
"Compress:true " +
"WhiteListSourceRange:10.42.0.0/16,152.89.1.33/32,afed:be44::/16 " +
"ProxyProtocol.TrustedIPs:192.168.0.1 " +
"ForwardedHeaders.TrustedIPs:10.0.0.3/24,20.0.0.3/24 " +
"Auth.Basic.Users:test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0 " +
"Auth.Digest.Users:test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e " +
"Auth.HeaderField:X-WebAuth-User " +
"Auth.Forward.Address:https://authserver.com/auth " +
"Auth.Forward.TrustForwardHeader:true " +
"Auth.Forward.TLS.CA:path/to/local.crt " +
"Auth.Forward.TLS.CAOptional:true " +
"Auth.Forward.TLS.Cert:path/to/foo.cert " +
"Auth.Forward.TLS.Key:path/to/foo.key " +
"Auth.Forward.TLS.InsecureSkipVerify:true ",
expectedResult: map[string]string{
"address": ":8000",
"auth_basic_users": "test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0",
"auth_digest_users": "test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e",
"auth_forward_address": "https://authserver.com/auth",
"auth_forward_tls_ca": "path/to/local.crt",
"auth_forward_tls_caoptional": "true",
"auth_forward_tls_cert": "path/to/foo.cert",
"auth_forward_tls_insecureskipverify": "true",
"auth_forward_tls_key": "path/to/foo.key",
"auth_forward_trustforwardheader": "true",
"auth_headerfield": "X-WebAuth-User",
"ca": "car",
"ca_optional": "true",
"compress": "true",
"forwardedheaders_trustedips": "10.0.0.3/24,20.0.0.3/24",
"name": "foo",
"proxyprotocol_trustedips": "192.168.0.1",
"redirect_entrypoint": "https",
"redirect_permanent": "true",
"redirect_regex": "http://localhost/(.*)",
"redirect_replacement": "http://mydomain/$1",
"tls": "goo,gii",
"tls_acme": "TLS",
"whitelistsourcerange": "10.42.0.0/16,152.89.1.33/32,afed:be44::/16",
},
},
{
name: "compress on",
value: "name:foo Compress:on",
expectedResult: map[string]string{
"name": "foo",
"compress": "on",
},
},
{
name: "TLS",
value: "Name:foo TLS:goo TLS",
expectedResult: map[string]string{
"name": "foo",
"tls": "goo",
"tls_acme": "TLS",
},
},
}
for _, test := range testCases {
test := test
t.Run(test.name, func(t *testing.T) {
t.Parallel()
conf := parseEntryPointsConfiguration(test.value)
assert.Len(t, conf, len(test.expectedResult))
assert.Equal(t, test.expectedResult, conf)
})
}
} | explode_data.jsonl/49638 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1847
} | [
2830,
3393,
21039,
5874,
11411,
7688,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
198,
197,
16309,
688,
914,
198,
197,
42400,
2077,
2415,
14032,
30953,
198,
197,
59403,
197,
197,
515,
298,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDeprecatedProperties(t *testing.T) {
dict, err := ParseYAML([]byte(`
version: "3"
services:
web:
image: web
container_name: web
db:
image: db
container_name: db
expose: ["5434"]
`))
assert.NoError(t, err)
configDetails := buildConfigDetails(dict)
_, err = Load(configDetails)
assert.NoError(t, err)
deprecated := GetDeprecatedProperties(configDetails)
assert.Equal(t, 2, len(deprecated))
assert.Contains(t, deprecated, "container_name")
assert.Contains(t, deprecated, "expose")
} | explode_data.jsonl/16393 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 199
} | [
2830,
3393,
51344,
7903,
1155,
353,
8840,
836,
8,
341,
2698,
849,
11,
1848,
1669,
14775,
56,
31102,
10556,
3782,
61528,
4366,
25,
330,
18,
698,
12779,
510,
220,
3482,
510,
262,
2168,
25,
3482,
198,
262,
5476,
1269,
25,
3482,
198,
22... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_existingVariable_nonExistent(t *testing.T) {
t.Run("Variable does not exist", func(t *testing.T) {
expectedBool := false
key := "place_holder"
_, exists := os.LookupEnv(key)
if exists != expectedBool {
t.Errorf("Should be:`%v` got:`%v`", expectedBool, exists)
}
})
} | explode_data.jsonl/11982 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 119
} | [
2830,
3393,
62630,
7827,
21637,
840,
18128,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
7827,
1558,
537,
3000,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
42400,
11233,
1669,
895,
198,
197,
23634,
1669,
330,
2007,
50788,
698,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestReconcileWithConditionChecks(t *testing.T) {
names.TestingSeed()
prName := "test-pipeline-run"
conditions := []*v1alpha1.Condition{
tb.Condition("cond-1", "foo", tb.ConditionSpec(
tb.ConditionSpecCheck("", "foo", tb.Args("bar")),
)),
tb.Condition("cond-2", "foo", tb.ConditionSpec(
tb.ConditionSpecCheck("", "foo", tb.Args("bar")),
)),
}
ps := []*v1alpha1.Pipeline{tb.Pipeline("test-pipeline", "foo", tb.PipelineSpec(
tb.PipelineTask("hello-world-1", "hello-world",
tb.PipelineTaskCondition("cond-1"),
tb.PipelineTaskCondition("cond-2")),
))}
prs := []*v1alpha1.PipelineRun{tb.PipelineRun(prName, "foo",
tb.PipelineRunAnnotation("PipelineRunAnnotation", "PipelineRunValue"),
tb.PipelineRunSpec("test-pipeline",
tb.PipelineRunServiceAccount("test-sa"),
),
)}
ts := []*v1alpha1.Task{tb.Task("hello-world", "foo")}
d := test.Data{
PipelineRuns: prs,
Pipelines: ps,
Tasks: ts,
Conditions: conditions,
}
testAssets, cancel := getPipelineRunController(t, d)
defer cancel()
c := testAssets.Controller
clients := testAssets.Clients
err := c.Reconciler.Reconcile(context.Background(), "foo/"+prName)
if err != nil {
t.Errorf("Did not expect to see error when reconciling completed PipelineRun but saw %s", err)
}
// Check that the PipelineRun was reconciled correctly
_, err = clients.Pipeline.Tekton().PipelineRuns("foo").Get(prName, metav1.GetOptions{})
if err != nil {
t.Fatalf("Somehow had error getting completed reconciled run out of fake client: %s", err)
}
ccNameBase := prName + "-hello-world-1-9l9zj"
expectedConditionChecks := []*v1alpha1.TaskRun{
makeExpectedTr("cond-1", ccNameBase+"-cond-1-mz4c7"),
makeExpectedTr("cond-2", ccNameBase+"-cond-2-mssqb"),
}
// Check that the expected TaskRun was created
condCheck0 := clients.Pipeline.Actions()[0].(ktesting.CreateAction).GetObject().(*v1alpha1.TaskRun)
condCheck1 := clients.Pipeline.Actions()[1].(ktesting.CreateAction).GetObject().(*v1alpha1.TaskRun)
if condCheck0 == nil || condCheck1 == nil {
t.Errorf("Expected two ConditionCheck TaskRuns to be created, but it wasn't.")
}
actual := []*v1alpha1.TaskRun{condCheck0, condCheck1}
if d := cmp.Diff(actual, expectedConditionChecks); d != "" {
t.Errorf("expected to see 2 ConditionCheck TaskRuns created. Diff %s", d)
}
} | explode_data.jsonl/81298 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 931
} | [
2830,
3393,
693,
40446,
457,
2354,
10547,
49820,
1155,
353,
8840,
836,
8,
341,
93940,
8787,
287,
41471,
741,
25653,
675,
1669,
330,
1944,
2268,
8790,
22973,
698,
197,
16495,
1669,
29838,
85,
16,
7141,
16,
75134,
515,
197,
62842,
75134,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestCase11(t *testing.T) {
golden := `v=0
o=- 3331435948 1116907222000 IN IP4 192.168.2.109
s=Session
c=IN IP4 192.168.2.109
t=0 0
a=range:npt=now-
a=control:*
m=video 0 RTP/AVP 96
a=control:trackID=0
a=rtpmap:96 H264/90000
a=fmtp:96 ;packetization-mode=1;sprop-parameter-sets=Z00AKpY1QPAET8s3AQEBQAABwgAAV+Qh,aO4xsg==
b=AS:5000
`
golden = strings.ReplaceAll(golden, "\n", "\r\n")
ctx, err := ParseSdp2LogicContext([]byte(golden))
assert.Equal(t, nil, err)
_ = ctx
} | explode_data.jsonl/55607 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 243
} | [
2830,
30573,
16,
16,
1155,
353,
8840,
836,
8,
341,
3174,
813,
268,
1669,
1565,
85,
28,
15,
198,
78,
10829,
220,
18,
18,
18,
16,
19,
18,
20,
24,
19,
23,
220,
16,
16,
16,
21,
24,
15,
22,
17,
17,
17,
15,
15,
15,
1964,
6790,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIsSignatureContractBadWrongPush(t *testing.T) {
prog := make([]byte, 35)
prog[0] = byte(PUSHBYTES32)
prog[33] = byte(NOP)
prog[34] = byte(CHECKSIG)
assert.Equal(t, false, IsSignatureContract(prog))
} | explode_data.jsonl/40584 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
3872,
25088,
14067,
17082,
29185,
16644,
1155,
353,
8840,
836,
8,
341,
197,
32992,
1669,
1281,
10556,
3782,
11,
220,
18,
20,
340,
197,
32992,
58,
15,
60,
284,
4922,
5304,
19518,
97849,
18,
17,
340,
197,
32992,
58,
18,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBlockProfile(t *testing.T) {
type TestCase struct {
name string
f func()
re string
}
tests := [...]TestCase{
{"chan recv", blockChanRecv, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ runtime\.chanrecv1\+0x[0-9a-f]+ .*/src/runtime/chan.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockChanRecv\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"chan send", blockChanSend, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ runtime\.chansend1\+0x[0-9a-f]+ .*/src/runtime/chan.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockChanSend\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"chan close", blockChanClose, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ runtime\.chanrecv1\+0x[0-9a-f]+ .*/src/runtime/chan.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockChanClose\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"select recv async", blockSelectRecvAsync, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ runtime\.selectgo\+0x[0-9a-f]+ .*/src/runtime/select.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockSelectRecvAsync\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"select send sync", blockSelectSendSync, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ runtime\.selectgo\+0x[0-9a-f]+ .*/src/runtime/select.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockSelectSendSync\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"mutex", blockMutex, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ sync\.\(\*Mutex\)\.Lock\+0x[0-9a-f]+ .*/src/sync/mutex\.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockMutex\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
{"cond", blockCond, `
[0-9]+ [0-9]+ @( 0x[[:xdigit:]]+)+
# 0x[0-9a-f]+ sync\.\(\*Cond\)\.Wait\+0x[0-9a-f]+ .*/src/sync/cond\.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.blockCond\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
# 0x[0-9a-f]+ runtime/pprof\.TestBlockProfile\+0x[0-9a-f]+ .*/src/runtime/pprof/pprof_test.go:[0-9]+
`},
}
runtime.SetBlockProfileRate(1)
defer runtime.SetBlockProfileRate(0)
for _, test := range tests {
test.f()
}
var w bytes.Buffer
Lookup("block").WriteTo(&w, 1)
prof := w.String()
if !strings.HasPrefix(prof, "--- contention:\ncycles/second=") {
t.Fatalf("Bad profile header:\n%v", prof)
}
if strings.HasSuffix(prof, "#\t0x0\n\n") {
t.Errorf("Useless 0 suffix:\n%v", prof)
}
for _, test := range tests {
if !regexp.MustCompile(strings.Replace(test.re, "\t", "\t+", -1)).MatchString(prof) {
t.Fatalf("Bad %v entry, expect:\n%v\ngot:\n%v", test.name, test.re, prof)
}
}
} | explode_data.jsonl/13653 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1731
} | [
2830,
3393,
4713,
8526,
1155,
353,
8840,
836,
8,
341,
13158,
30573,
2036,
341,
197,
11609,
914,
198,
197,
1166,
262,
2915,
741,
197,
17200,
256,
914,
198,
197,
532,
78216,
1669,
48179,
16458,
515,
197,
197,
4913,
5658,
27006,
497,
250... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestConfigureSecurityGroupPermissionsInvalidOpenPorts(t *testing.T) {
driver := NewTestDriver()
driver.OpenPorts = []string{"2222/tcp", "abc1"}
perms, err := driver.configureSecurityGroupPermissions(securityGroupNoIpPermissions)
assert.Error(t, err)
assert.Nil(t, perms)
} | explode_data.jsonl/7410 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 97
} | [
2830,
3393,
28560,
15352,
2808,
23851,
7928,
5002,
68273,
1155,
353,
8840,
836,
8,
341,
33652,
1669,
1532,
2271,
11349,
741,
33652,
12953,
68273,
284,
3056,
917,
4913,
17,
17,
17,
17,
95958,
497,
330,
13683,
16,
16707,
197,
87772,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_Text_SearchOnline(t *testing.T) {
config := ReadConfiguration(t)
client, ctx := PrepareTest(t, config)
localFile := "DocumentElements/Text/SampleWordDocument.docx"
requestDocument := OpenFile(t, localFile)
options := map[string]interface{}{
}
request := &models.SearchOnlineRequest{
Document: requestDocument,
Pattern: ToStringPointer("aspose"),
Optionals: options,
}
_, _, err := client.WordsApi.SearchOnline(ctx, request)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/65693 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 212
} | [
2830,
3393,
20550,
65913,
19598,
1155,
353,
8840,
836,
8,
341,
262,
2193,
1669,
4457,
7688,
1155,
340,
262,
2943,
11,
5635,
1669,
31166,
2271,
1155,
11,
2193,
340,
262,
2205,
1703,
1669,
330,
7524,
11868,
14,
1178,
11374,
1516,
10879,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestItalianHolidays(t *testing.T) {
c := NewCalendar()
c.Observed = ObservedExact
AddItalianHolidays(c)
tests := []testStruct{
{time.Date(2020, 1, 1, 12, 0, 0, 0, time.UTC), true, "ITCapodanno"},
{time.Date(2020, 1, 6, 12, 0, 0, 0, time.UTC), true, "ITEpifania"},
{time.Date(2020, 4, 13, 12, 0, 0, 0, time.UTC), true, "ITPasquetta"},
{time.Date(2020, 4, 25, 12, 0, 0, 0, time.UTC), true, "ITFestaDellaLiberazione"},
{time.Date(2020, 5, 1, 12, 0, 0, 0, time.UTC), true, "ITFestaDelLavoro"},
{time.Date(2020, 6, 2, 12, 0, 0, 0, time.UTC), true, "ITFestaDellaRepubblica"},
{time.Date(2020, 8, 15, 12, 0, 0, 0, time.UTC), true, "ITFerragosto"},
{time.Date(2020, 11, 1, 12, 0, 0, 0, time.UTC), true, "ITTuttiISanti"},
{time.Date(2020, 12, 8, 12, 0, 0, 0, time.UTC), true, "ITImmacolata"},
{time.Date(2020, 12, 25, 12, 0, 0, 0, time.UTC), true, "ITNatale"},
{time.Date(2020, 12, 26, 12, 0, 0, 0, time.UTC), true, "ITSantoStefano"},
}
for _, test := range tests {
got := c.IsHoliday(test.t)
if got != test.want {
t.Errorf("got: %t for %s; want: %t (%s)", got, test.name, test.want, test.t)
}
}
} | explode_data.jsonl/49707 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 528
} | [
2830,
3393,
69111,
39,
19891,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
1532,
19882,
741,
1444,
8382,
1279,
2771,
284,
50441,
2771,
57954,
198,
37972,
69111,
39,
19891,
1337,
692,
78216,
1669,
3056,
1944,
9422,
515,
197,
197,
90,
1678,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestCollectionReplica_removePartition(t *testing.T) {
node := newQueryNodeMock()
collectionID := UniqueID(0)
initTestMeta(t, node, collectionID, 0)
partitionIDs := []UniqueID{1, 2, 3}
for _, id := range partitionIDs {
err := node.historical.replica.addPartition(collectionID, id)
assert.NoError(t, err)
partition, err := node.historical.replica.getPartitionByID(id)
assert.NoError(t, err)
assert.Equal(t, partition.ID(), id)
err = node.historical.replica.removePartition(id)
assert.NoError(t, err)
}
err := node.Stop()
assert.NoError(t, err)
} | explode_data.jsonl/11484 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 220
} | [
2830,
3393,
6482,
18327,
15317,
18193,
49978,
1155,
353,
8840,
836,
8,
341,
20831,
1669,
501,
2859,
1955,
11571,
741,
1444,
1908,
915,
1669,
28650,
915,
7,
15,
340,
28248,
2271,
12175,
1155,
11,
2436,
11,
4426,
915,
11,
220,
15,
692,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHmhdBox_MaxPDUSize(t *testing.T) {
hb := HmhdBox{
maxPDUSize: 11,
}
if hb.MaxPDUSize() != 11 {
t.Fatalf("MaxPDUSize() not correct.")
}
} | explode_data.jsonl/11009 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 77
} | [
2830,
3393,
39,
76,
15990,
1611,
58843,
47,
21547,
1695,
1155,
353,
8840,
836,
8,
341,
9598,
65,
1669,
472,
76,
15990,
1611,
515,
197,
22543,
47,
21547,
1695,
25,
220,
16,
16,
345,
197,
630,
743,
45135,
14535,
47,
21547,
1695,
368,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestTopDownFunctionErrors(t *testing.T) {
compiler := compileModules([]string{
`
package test1
p(x) = y {
y = x[_]
}
r = y {
p([1, 2, 3], y)
}`,
`
package test2
p(1, x) = y {
y = x
}
p(2, x) = y {
y = x+1
}
r = y {
p(3, 0, y)
}`,
`
package test3
p(1, x) = y {
y = x
}
p(2, x) = y {
y = x+1
}
p(x, y) = z {
z = x
}
r = y {
p(1, 0, y)
}`,
})
store := inmem.NewFromObject(loadSmallTestData())
ctx := context.Background()
txn := storage.NewTransactionOrDie(ctx, store)
defer store.Abort(ctx, txn)
assertTopDownWithPath(t, compiler, store, "function output conflict single", []string{"test1", "r"}, "", functionConflictErr(nil))
assertTopDownWithPath(t, compiler, store, "function input no match", []string{"test2", "r"}, "", "")
assertTopDownWithPath(t, compiler, store, "function output conflict multiple", []string{"test3", "r"}, "", completeDocConflictErr(nil))
} | explode_data.jsonl/25223 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 444
} | [
2830,
3393,
5366,
4454,
5152,
13877,
1155,
353,
8840,
836,
8,
341,
197,
33620,
1669,
19192,
28201,
10556,
917,
515,
197,
197,
3989,
197,
197,
1722,
1273,
16,
271,
197,
3223,
2075,
8,
284,
379,
341,
298,
14522,
284,
856,
13496,
921,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMatch_Int(t *testing.T) {
var testData = []struct {
rawYql string
data map[string]interface{}
out bool
}{
{
rawYql: `a=10`,
data: map[string]interface{}{
"a": 9,
},
out: false,
},
{
rawYql: `a=10`,
data: map[string]interface{}{
"a": 10,
},
out: true,
},
{
rawYql: `a>10`,
data: map[string]interface{}{
"a": 10,
},
out: false,
},
{
rawYql: `a>10`,
data: map[string]interface{}{
"a": 11,
},
out: true,
},
{
rawYql: `a>=10`,
data: map[string]interface{}{
"a": 10,
},
out: true,
},
{
rawYql: `a>=10`,
data: map[string]interface{}{
"a": 11,
},
out: true,
},
{
rawYql: `a>=10`,
data: map[string]interface{}{
"a": 1,
},
out: false,
},
{
rawYql: `a<10`,
data: map[string]interface{}{
"a": 1,
},
out: true,
},
{
rawYql: `a<10`,
data: map[string]interface{}{
"a": 10,
},
out: false,
},
{
rawYql: `a<10`,
data: map[string]interface{}{
"a": 11,
},
out: false,
},
}
ass := assert.New(t)
for _, tc := range testData {
ok, err := Match(tc.rawYql, tc.data)
ass.NoError(err)
ass.Equal(tc.out, ok, "rawYql=%s||data=%+v", tc.rawYql, tc.data)
}
} | explode_data.jsonl/65935 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 722
} | [
2830,
3393,
8331,
32054,
1155,
353,
8840,
836,
8,
341,
2405,
67348,
284,
3056,
1235,
341,
197,
76559,
56,
1470,
914,
198,
197,
8924,
256,
2415,
14032,
31344,
16094,
197,
13967,
262,
1807,
198,
197,
59403,
197,
197,
515,
298,
76559,
56... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGetFields(t *testing.T) {
testData := "<call:4>W1AW<STATION_CALL:6>KF4MDV"
expected := [2]string{"call", "station_call"}
record, err := ParseADIFRecord([]byte(testData))
if err != nil {
t.Fatal(err)
}
fieldNames := record.GetFields()
if len(fieldNames) != len(expected) {
t.Fatalf("Expected %d fields but got %d", len(expected), len(fieldNames))
}
OUTER:
for _, exp := range expected {
for _, field := range fieldNames {
if exp == field {
continue OUTER
}
}
t.Fatalf("Expected field %v wasn't in the actual fields", exp)
}
} | explode_data.jsonl/61734 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 230
} | [
2830,
3393,
1949,
8941,
1155,
353,
8840,
836,
8,
341,
18185,
1043,
1669,
4055,
6659,
25,
19,
29,
54,
16,
14419,
27,
784,
3495,
14935,
25,
21,
53300,
37,
19,
6076,
53,
698,
42400,
1669,
508,
17,
30953,
4913,
6659,
497,
330,
20155,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestAddLearnerOrPeer_fromLearner(t *testing.T) {
raftService := newTestRaftService(t, 3, []uint64{2}, []uint64{3})
_, err := raftService.raftProtocolManager.ProposeNewPeer(TEST_URL, true)
if err == nil {
t.Errorf("learner should not be allowed to add learner or peer")
}
if err != nil && !strings.Contains(err.Error(), "learner node can't add peer or learner") {
t.Errorf("expect error message: propose new peer failed, got: %v\n", err)
}
_, err = raftService.raftProtocolManager.ProposeNewPeer(TEST_URL, false)
if err == nil {
t.Errorf("learner should not be allowed to add learner or peer")
}
if err != nil && !strings.Contains(err.Error(), "learner node can't add peer or learner") {
t.Errorf("expect error message: propose new peer failed, got: %v\n", err)
}
} | explode_data.jsonl/13618 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 286
} | [
2830,
3393,
2212,
43,
682,
1194,
2195,
30888,
5673,
43,
682,
1194,
1155,
353,
8840,
836,
8,
1476,
197,
2944,
1860,
1669,
501,
2271,
55535,
723,
1860,
1155,
11,
220,
18,
11,
3056,
2496,
21,
19,
90,
17,
2137,
3056,
2496,
21,
19,
90,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestFB_FizzBuzz(t *testing.T) {
end := 100
step := 50
f, err := FizzBuzz(1, end, step)
if err != nil {
t.FailNow()
}
for i := 1; i < end; i++ {
if val, ok := f.m[i]; ok {
fmt.Printf("%v\t%v\n", i, val)
//do something here
}
}
} | explode_data.jsonl/19479 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
16208,
1400,
8759,
59473,
1155,
353,
8840,
836,
8,
1476,
6246,
1669,
220,
16,
15,
15,
198,
77093,
1669,
220,
20,
15,
271,
1166,
11,
1848,
1669,
434,
8759,
59473,
7,
16,
11,
835,
11,
3019,
340,
743,
1848,
961,
2092,
341... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestFindTopK(t *testing.T) {
num1 := []int{1, 34, 67, 444}
num2 := []int{45, 67, 88, 888}
k := findTopK(num1, 0, num2, 0, 7)
t.Log(k)
} | explode_data.jsonl/82564 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 76
} | [
2830,
3393,
9885,
5366,
42,
1155,
353,
8840,
836,
8,
341,
22431,
16,
1669,
3056,
396,
90,
16,
11,
220,
18,
19,
11,
220,
21,
22,
11,
220,
19,
19,
19,
532,
22431,
17,
1669,
3056,
396,
90,
19,
20,
11,
220,
21,
22,
11,
220,
23,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReadFromDisk(t *testing.T) {
dir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal("Failed to create temporary directory")
}
// clean up tmp dir
defer os.RemoveAll(dir)
assert.Equal(t, "", GetParameter(dir, "testParamNotExistingYet"))
} | explode_data.jsonl/52027 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 98
} | [
2830,
3393,
4418,
3830,
47583,
1155,
353,
8840,
836,
8,
341,
48532,
11,
1848,
1669,
43144,
65009,
6184,
19814,
14676,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
445,
9408,
311,
1855,
13340,
6220,
1138,
197,
630,
197,
322,
4240,
705,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHandler_CreateSystemBuckets(t *testing.T) {
t.Run("Success", func(t *testing.T) {
// Given
g := gomega.NewGomegaWithT(t)
privatePrefix := "private"
publicPrefix := "public"
region := "region"
cfg := bucket.Config{
PrivatePrefix: privatePrefix,
PublicPrefix: publicPrefix,
Region: region,
}
minioCli := &automock.BucketClient{}
handler := bucket.NewHandler(minioCli, cfg)
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(publicPrefix))).Return(false, nil).Once()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(publicPrefix)), region).Return(nil).Once()
minioCli.On("SetBucketPolicy", mock.MatchedBy(testBucketNameFn(publicPrefix)), mock.MatchedBy(func(policy string) bool { return true })).Return(nil).Once()
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(privatePrefix))).Return(false, nil).Once()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(privatePrefix)), region).Return(nil).Once()
defer minioCli.AssertExpectations(t)
// When
buckets, err := handler.CreateSystemBuckets()
// Then
g.Expect(buckets.Private).To(gomega.HavePrefix(privatePrefix))
g.Expect(buckets.Public).To(gomega.HavePrefix(publicPrefix))
g.Expect(err).NotTo(gomega.HaveOccurred())
})
t.Run("Exists", func(t *testing.T) {
// Given
g := gomega.NewGomegaWithT(t)
privatePrefix := "private"
publicPrefix := "public"
region := "region"
cfg := bucket.Config{
PrivatePrefix: privatePrefix,
PublicPrefix: publicPrefix,
Region: region,
}
minioCli := &automock.BucketClient{}
handler := bucket.NewHandler(minioCli, cfg)
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(publicPrefix))).Return(true, nil).Once()
minioCli.On("SetBucketPolicy", mock.MatchedBy(testBucketNameFn(publicPrefix)), mock.MatchedBy(func(policy string) bool { return true })).Return(nil).Once()
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(privatePrefix))).Return(true, nil).Once()
defer minioCli.AssertExpectations(t)
// When
buckets, err := handler.CreateSystemBuckets()
// Then
g.Expect(buckets.Private).To(gomega.HavePrefix(privatePrefix))
g.Expect(buckets.Public).To(gomega.HavePrefix(publicPrefix))
g.Expect(err).NotTo(gomega.HaveOccurred())
})
t.Run("Temporary Errors", func(t *testing.T) {
// Given
g := gomega.NewGomegaWithT(t)
privatePrefix := "private"
publicPrefix := "public"
region := "region"
cfg := bucket.Config{
PrivatePrefix: privatePrefix,
PublicPrefix: publicPrefix,
Region: region,
}
testErr := errors.New("Test err")
minioCli := &automock.BucketClient{}
handler := bucket.NewHandler(minioCli, cfg)
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(publicPrefix))).Return(false, testErr).Once()
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(publicPrefix))).Return(false, nil).Once()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(publicPrefix)), region).Return(nil).Once()
minioCli.On("SetBucketPolicy", mock.MatchedBy(testBucketNameFn(publicPrefix)), mock.MatchedBy(func(policy string) bool { return true })).Return(nil).Once()
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(privatePrefix))).Return(false, nil).Twice()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(privatePrefix)), region).Return(testErr).Once()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(privatePrefix)), region).Return(nil).Once()
defer minioCli.AssertExpectations(t)
// When
buckets, err := handler.CreateSystemBuckets()
// Then
g.Expect(buckets.Private).To(gomega.HavePrefix(privatePrefix))
g.Expect(buckets.Public).To(gomega.HavePrefix(publicPrefix))
g.Expect(err).NotTo(gomega.HaveOccurred())
})
t.Run("Fatal Errors", func(t *testing.T) {
// Given
g := gomega.NewGomegaWithT(t)
privatePrefix := "private"
publicPrefix := "public"
region := "region"
cfg := bucket.Config{
PrivatePrefix: privatePrefix,
PublicPrefix: publicPrefix,
Region: region,
}
testErr := errors.New("Test err")
minioCli := &automock.BucketClient{}
handler := bucket.NewHandler(minioCli, cfg)
times := 5
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(publicPrefix))).Return(false, nil).Maybe()
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(publicPrefix)), region).Return(testErr).Maybe()
minioCli.On("BucketExists", mock.MatchedBy(testBucketNameFn(privatePrefix))).Return(false, nil).Times(times)
minioCli.On("MakeBucket", mock.MatchedBy(testBucketNameFn(privatePrefix)), region).Return(testErr).Times(times)
defer minioCli.AssertExpectations(t)
// When
_, err := handler.CreateSystemBuckets()
// Then
g.Expect(err).To(gomega.HaveOccurred())
g.Expect(err.Error()).To(gomega.ContainSubstring(testErr.Error()))
})
} | explode_data.jsonl/67219 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1969
} | [
2830,
3393,
3050,
34325,
2320,
33,
38551,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
7188,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
197,
322,
16246,
198,
197,
3174,
1669,
342,
32696,
7121,
38,
32696,
2354,
51,
1155,
692,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInt64DataPoint_CopyTo(t *testing.T) {
ms := NewInt64DataPoint()
NewInt64DataPoint().CopyTo(ms)
assert.True(t, ms.IsNil())
generateTestInt64DataPoint().CopyTo(ms)
assert.EqualValues(t, generateTestInt64DataPoint(), ms)
} | explode_data.jsonl/19528 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 90
} | [
2830,
3393,
1072,
21,
19,
1043,
2609,
77637,
1249,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
1072,
21,
19,
1043,
2609,
741,
197,
3564,
1072,
21,
19,
1043,
2609,
1005,
12106,
1249,
35680,
340,
6948,
32443,
1155,
11,
9829,
4506,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOKExFuture_FutureCancelOrder(t *testing.T) {
t.Log(okex.OKExFuture.FutureCancelOrder(goex.EOS_USD, goex.QUARTER_CONTRACT, "e88bd3361de94512b8acaf9aa154f95a"))
} | explode_data.jsonl/3930 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 78
} | [
2830,
3393,
3925,
840,
24206,
1400,
2976,
9269,
4431,
1155,
353,
8840,
836,
8,
341,
3244,
5247,
60207,
327,
15480,
840,
24206,
76356,
9269,
4431,
47415,
327,
5142,
3126,
13467,
35,
11,
728,
327,
13,
5757,
2992,
640,
4307,
41105,
11,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_SymlinkLoopWithGlobsShouldResolve(t *testing.T) {
test := func(t *testing.T) {
resolver, err := newDirectoryResolver("./test-fixtures/symlinks-loop")
require.NoError(t, err)
locations, err := resolver.FilesByGlob("**/file.target")
require.NoError(t, err)
// Note: I'm not certain that this behavior is correct, but it is not an infinite loop (which is the point of the test)
// - block/loop0/file.target
// - devices/loop0/file.target
// - devices/loop0/subsystem/loop0/file.target
assert.Len(t, locations, 3)
}
testWithTimeout(t, 5*time.Second, test)
} | explode_data.jsonl/50130 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 216
} | [
2830,
3393,
1098,
88,
44243,
14620,
2354,
38,
68164,
14996,
56808,
1155,
353,
8840,
836,
8,
341,
18185,
1669,
2915,
1155,
353,
8840,
836,
8,
341,
197,
10202,
7921,
11,
1848,
1669,
501,
9310,
18190,
13988,
1944,
70913,
18513,
2687,
88,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewNamespace(t *testing.T) {
testCases := []struct {
words []string
namespace string
}{
{
[]string{"thread", "info_scheme", "query update"},
"thread_info_scheme_query_update",
},
{
[]string{"thread", "info_scheme", "query_update"},
"thread_info_scheme_query_update",
},
{
[]string{"thread", "info", "scheme", "query", "update"},
"thread_info_scheme_query_update",
},
}
for _, cases := range testCases {
if got := newNamespace(cases.words...); got != cases.namespace {
t.Errorf("want %s, got %s", cases.namespace, got)
}
}
} | explode_data.jsonl/71242 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 244
} | [
2830,
3393,
3564,
22699,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
197,
5761,
257,
3056,
917,
198,
197,
56623,
914,
198,
197,
59403,
197,
197,
515,
298,
197,
1294,
917,
4913,
4528,
497,
330,
2733,
53293,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestSetBit(t *testing.T) {
var b Bits64
t.Logf("%v", b)
(&b).SetBitAt(5)
t.Logf("SetBitAt(5) %v", b)
(&b).SetBitsByUint64(0x0f0f0f)
t.Logf("SetBitsByUint64(0x0f0f0f) %v", b)
(&b).ClearBitAt(2)
t.Logf("ClearBitAt(2) %v", b)
(&b).ClearBitsByUint64(0x0f)
t.Logf("ClearBitsByUint64(0x0f) %v", b)
b.SetBitsByUint64(0)
t.Logf("%v", b)
(&b).NegBitAt(5)
t.Logf("NegBitAt(5) %v", b)
(&b).NegBitAt(5)
t.Logf("NegBitAt(5) %v", b)
(&b).NegBitsByUint64(0xf0f0f0f)
t.Logf("NegBitsByUint64(0xf0f0f0f) %v", b)
b.SetBitsByUint64(0)
t.Logf("TestBitAt(7) %v", (&b).TestBitAt(7))
t.Logf("TestBitsByUint64(0x704) %v", (&b).TestBitsByUint64(0x704))
} | explode_data.jsonl/34956 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 395
} | [
2830,
3393,
1649,
8344,
1155,
353,
8840,
836,
8,
341,
2405,
293,
49457,
21,
19,
198,
3244,
98954,
4430,
85,
497,
293,
692,
197,
2099,
65,
568,
1649,
8344,
1655,
7,
20,
340,
3244,
98954,
445,
1649,
8344,
1655,
7,
20,
8,
1018,
85,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAgentConnectCARoots_list(t *testing.T) {
t.Parallel()
assert := assert.New(t)
require := require.New(t)
a := NewTestAgent(t.Name(), "")
defer a.Shutdown()
// Set some CAs. Note that NewTestAgent already bootstraps one CA so this just
// adds a second and makes it active.
ca2 := connect.TestCAConfigSet(t, a, nil)
// List
req, _ := http.NewRequest("GET", "/v1/agent/connect/ca/roots", nil)
resp := httptest.NewRecorder()
obj, err := a.srv.AgentConnectCARoots(resp, req)
require.NoError(err)
value := obj.(structs.IndexedCARoots)
assert.Equal(value.ActiveRootID, ca2.ID)
// Would like to assert that it's the same as the TestAgent domain but the
// only way to access that state via this package is by RPC to the server
// implementation running in TestAgent which is more or less a tautology.
assert.NotEmpty(value.TrustDomain)
assert.Len(value.Roots, 2)
// We should never have the secret information
for _, r := range value.Roots {
assert.Equal("", r.SigningCert)
assert.Equal("", r.SigningKey)
}
assert.Equal("MISS", resp.Header().Get("X-Cache"))
// Test caching
{
// List it again
resp2 := httptest.NewRecorder()
obj2, err := a.srv.AgentConnectCARoots(resp2, req)
require.NoError(err)
assert.Equal(obj, obj2)
// Should cache hit this time and not make request
assert.Equal("HIT", resp2.Header().Get("X-Cache"))
}
// Test that caching is updated in the background
{
// Set a new CA
ca := connect.TestCAConfigSet(t, a, nil)
retry.Run(t, func(r *retry.R) {
// List it again
resp := httptest.NewRecorder()
obj, err := a.srv.AgentConnectCARoots(resp, req)
r.Check(err)
value := obj.(structs.IndexedCARoots)
if ca.ID != value.ActiveRootID {
r.Fatalf("%s != %s", ca.ID, value.ActiveRootID)
}
// There are now 3 CAs because we didn't complete rotation on the original
// 2
if len(value.Roots) != 3 {
r.Fatalf("bad len: %d", len(value.Roots))
}
// Should be a cache hit! The data should've updated in the cache
// in the background so this should've been fetched directly from
// the cache.
if resp.Header().Get("X-Cache") != "HIT" {
r.Fatalf("should be a cache hit")
}
})
}
} | explode_data.jsonl/33651 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 834
} | [
2830,
3393,
16810,
14611,
36390,
1905,
82,
2019,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
6948,
1669,
2060,
7121,
1155,
340,
17957,
1669,
1373,
7121,
1155,
340,
11323,
1669,
1532,
2271,
16810,
1155,
2967,
1507,
14676,
16867,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestListNextResultsMultiPagesWithListResponderError(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
tests := []struct {
name string
prepareErr error
sendErr *retry.Error
}{
{
name: "testListResponderError",
prepareErr: nil,
sendErr: nil,
},
{
name: "testSendError",
sendErr: &retry.Error{RawError: fmt.Errorf("error")},
},
}
lastResult := compute.VirtualMachineListResult{
NextLink: to.StringPtr("next"),
}
for _, test := range tests {
armClient := mockarmclient.NewMockInterface(ctrl)
req := &http.Request{
Method: "GET",
}
armClient.EXPECT().PrepareGetRequest(gomock.Any(), gomock.Any()).Return(req, test.prepareErr)
if test.prepareErr == nil {
armClient.EXPECT().Send(gomock.Any(), req).Return(&http.Response{
StatusCode: http.StatusNotFound,
Body: ioutil.NopCloser(bytes.NewReader([]byte(`{"foo":"bar"}`))),
}, test.sendErr)
armClient.EXPECT().CloseResponse(gomock.Any(), gomock.Any())
}
response := &http.Response{
StatusCode: http.StatusNotFound,
Body: ioutil.NopCloser(bytes.NewBuffer([]byte(`{"foo":"bar"}`))),
}
expected := compute.VirtualMachineListResult{}
expected.Response = autorest.Response{Response: response}
vmssClient := getTestVMClient(armClient)
result, err := vmssClient.listNextResults(context.TODO(), lastResult)
assert.Error(t, err)
if test.sendErr != nil {
assert.NotEqual(t, expected, result)
} else {
assert.Equal(t, expected, result)
}
}
} | explode_data.jsonl/16695 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 633
} | [
2830,
3393,
852,
5847,
9801,
20358,
17713,
2354,
852,
30884,
1454,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
23743,
991,
18176,
2822,
78216,
1669,
3056,
1235,
341,
197,
11609,
981,
914,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestJob_createCallout(t *testing.T) {
type fields struct {
session session.ServiceFormatter
info Response
}
type args struct {
options Options
}
tests := []struct {
name string
fields fields
args args
want Response
wantErr bool
}{
{
name: "Passing",
fields: fields{
session: &mockSessionFormatter{
url: "https://test.salesforce.com",
client: mockHTTPClient(func(req *http.Request) *http.Response {
if req.URL.String() != "https://test.salesforce.com/jobs/ingest" {
return &http.Response{
StatusCode: 500,
Status: "Invalid URL",
Body: ioutil.NopCloser(strings.NewReader(req.URL.String())),
Header: make(http.Header),
}
}
resp := `{
"apiVersion": 44.0,
"columnDelimiter": "COMMA",
"concurrencyMode": "Parallel",
"contentType": "CSV",
"contentUrl": "services/v44.0/jobs",
"createdById": "1234",
"createdDate": "1/1/1970",
"externalIdFieldName": "namename",
"id": "9876",
"jobType": "V2Ingest",
"lineEnding": "LF",
"object": "Account",
"operation": "Insert",
"state": "Open",
"systemModstamp": "1/1/1980"
}`
return &http.Response{
StatusCode: http.StatusOK,
Status: "Good",
Body: ioutil.NopCloser(strings.NewReader(resp)),
Header: make(http.Header),
}
}),
},
},
args: args{
options: Options{
ColumnDelimiter: Comma,
ContentType: CSV,
ExternalIDFieldName: "Some External Field",
LineEnding: Linefeed,
Object: "Account",
Operation: Insert,
},
},
want: Response{
APIVersion: 44.0,
ColumnDelimiter: "COMMA",
ConcurrencyMode: "Parallel",
ContentType: "CSV",
ContentURL: "services/v44.0/jobs",
CreatedByID: "1234",
CreatedDate: "1/1/1970",
ExternalIDFieldName: "namename",
ID: "9876",
JobType: "V2Ingest",
LineEnding: "LF",
Object: "Account",
Operation: "Insert",
State: "Open",
SystemModstamp: "1/1/1980",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
j := &Job{
session: tt.fields.session,
info: tt.fields.info,
}
got, err := j.createCallout(tt.args.options)
if (err != nil) != tt.wantErr {
t.Errorf("Job.createCallout() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Job.createCallout() = %v, want %v", got, tt.want)
}
})
}
} | explode_data.jsonl/19879 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1467
} | [
2830,
3393,
12245,
8657,
7220,
411,
1155,
353,
8840,
836,
8,
341,
13158,
5043,
2036,
341,
197,
25054,
3797,
13860,
14183,
198,
197,
27043,
262,
5949,
198,
197,
532,
13158,
2827,
2036,
341,
197,
35500,
14566,
198,
197,
532,
78216,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRuleNot(t *testing.T) {
common.Log.Debug("Entering function: %s", common.GetFunctionName())
sqls := [][]string{
{
`select id from t where num not in(1,2,3);`,
`select id from t where num not like "a%"`,
},
{
`select id from t where num in(1,2,3);`,
`select id from t where num like "a%"`,
},
}
for _, sql := range sqls[0] {
q, err := NewQuery4Audit(sql)
if err == nil {
rule := q.RuleNot()
if rule.Item != "ARG.011" {
t.Error("Rule not match:", rule.Item, "Expect : ARG.011")
}
} else {
t.Error("sqlparser.Parse Error:", err)
}
}
for _, sql := range sqls[1] {
q, err := NewQuery4Audit(sql)
if err == nil {
rule := q.RuleNot()
if rule.Item != "OK" {
t.Error("Rule not match:", rule.Item, "Expect : OK")
}
} else {
t.Error("sqlparser.Parse Error:", err)
}
}
common.Log.Debug("Exiting function: %s", common.GetFunctionName())
} | explode_data.jsonl/76809 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 407
} | [
2830,
3393,
11337,
2623,
1155,
353,
8840,
836,
8,
341,
83825,
5247,
20345,
445,
82867,
729,
25,
1018,
82,
497,
4185,
2234,
5152,
675,
2398,
30633,
82,
1669,
52931,
917,
515,
197,
197,
515,
298,
197,
63,
1742,
877,
504,
259,
1380,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestValueAtQuantile_Quantile(t *testing.T) {
ms := NewValueAtQuantile()
assert.EqualValues(t, float64(0.0), ms.Quantile())
testValQuantile := float64(17.13)
ms.SetQuantile(testValQuantile)
assert.EqualValues(t, testValQuantile, ms.Quantile())
} | explode_data.jsonl/32760 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 98
} | [
2830,
3393,
1130,
1655,
44220,
457,
62,
44220,
457,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
1130,
1655,
44220,
457,
741,
6948,
12808,
6227,
1155,
11,
2224,
21,
19,
7,
15,
13,
15,
701,
9829,
33907,
517,
457,
2398,
18185,
220... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHOTP_Hex(t *testing.T) {
secret, err := DecodeBase32("KZOSZD7X6RG7HWZUQI2KBJULFU")
assert.NoError(t, err)
otpHex, err := NewHOTP(secret, WithLength(8), FormatHex())
assert.NoError(t, err)
otp, err := otpHex.At(0)
assert.NoError(t, err, "OTP generation failed")
assert.Equal(t, "07a45595", otp)
} | explode_data.jsonl/41256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 149
} | [
2830,
3393,
39,
90146,
2039,
327,
1155,
353,
8840,
836,
8,
341,
197,
20474,
11,
1848,
1669,
50194,
3978,
18,
17,
445,
42,
57,
3126,
57,
35,
22,
55,
21,
32360,
22,
38252,
57,
52,
48,
40,
17,
42,
14978,
1094,
81213,
1138,
6948,
35... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBlobberGRPCService_GetFileStats_Success(t *testing.T) {
req := &blobbergrpc.GetFileStatsRequest{
Context: &blobbergrpc.RequestContext{
Client: "owner",
ClientKey: "",
Allocation: "",
},
Path: "path",
PathHash: "path_hash",
Allocation: "",
}
mockStorageHandler := &storageHandlerI{}
mockReferencePackage := &mocks.PackageHandler{}
mockStorageHandler.On("verifyAllocation", mock.Anything, req.Allocation, true).Return(&allocation.Allocation{
ID: "allocationId",
Tx: req.Allocation,
OwnerID: "owner",
}, nil)
mockReferencePackage.On("GetReferenceFromLookupHash", mock.Anything, mock.Anything, mock.Anything).Return(&reference.Ref{
ID: 123,
Name: "test",
Type: reference.FILE,
}, nil)
mockReferencePackage.On("GetFileStats", mock.Anything, int64(123)).Return(&stats.FileStats{
NumBlockDownloads: 10,
}, nil)
mockReferencePackage.On("GetWriteMarkerEntity", mock.Anything, mock.Anything).Return(nil, nil)
svc := newGRPCBlobberService(mockStorageHandler, mockReferencePackage)
resp, err := svc.GetFileStats(context.Background(), req)
if err != nil {
t.Fatal("unexpected error")
}
assert.Equal(t, resp.MetaData.FileMetaData.Name, "test")
assert.Equal(t, resp.Stats.NumBlockDownloads, int64(10))
} | explode_data.jsonl/66828 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 481
} | [
2830,
3393,
37985,
652,
8626,
4872,
1860,
13614,
1703,
16635,
87161,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
609,
35112,
652,
56585,
2234,
1703,
16635,
1900,
515,
197,
70871,
25,
609,
35112,
652,
56585,
9659,
1972,
515,
298,
71724,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestPathExists(t *testing.T) {
tempFile := filepath.Join(os.TempDir(), "testdir"+uuid.New().String()+"_"+t.Name())
_, err := os.Create(tempFile)
require.NoError(t, err, "couldn't create temp path")
assert.True(t, PathExists(tempFile), "expecting existence of path")
os.RemoveAll(tempFile)
} | explode_data.jsonl/54587 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 115
} | [
2830,
3393,
1820,
15575,
1155,
353,
8840,
836,
8,
341,
16280,
1703,
1669,
26054,
22363,
9638,
65009,
6184,
1507,
330,
1944,
3741,
5572,
17128,
7121,
1005,
703,
25589,
33415,
83,
2967,
2398,
197,
6878,
1848,
1669,
2643,
7251,
9758,
1703,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAsanaUsers(t *testing.T) {
s := createAsanaService()
users, err := s.Users()
if err != nil {
t.Error("error calling users(), err:", err)
}
if len(users) == 0 {
t.Error("should get some users")
}
} | explode_data.jsonl/78894 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 85
} | [
2830,
3393,
2121,
3362,
7137,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
1855,
2121,
3362,
1860,
2822,
90896,
11,
1848,
1669,
274,
36782,
741,
743,
1848,
961,
2092,
341,
197,
3244,
6141,
445,
841,
8098,
3847,
1507,
1848,
12147,
1848,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestLoggingServiceV2WriteLogEntries(t *testing.T) {
var expectedResponse *loggingpb.WriteLogEntriesResponse = &loggingpb.WriteLogEntriesResponse{}
mockLogging.err = nil
mockLogging.reqs = nil
mockLogging.resps = append(mockLogging.resps[:0], expectedResponse)
var entries []*loggingpb.LogEntry = nil
var request = &loggingpb.WriteLogEntriesRequest{
Entries: entries,
}
c, err := NewClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
resp, err := c.WriteLogEntries(context.Background(), request)
if err != nil {
t.Fatal(err)
}
if want, got := request, mockLogging.reqs[0]; !proto.Equal(want, got) {
t.Errorf("wrong request %q, want %q", got, want)
}
if want, got := expectedResponse, resp; !proto.Equal(want, got) {
t.Errorf("wrong response %q, want %q)", got, want)
}
} | explode_data.jsonl/77759 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 314
} | [
2830,
3393,
34575,
1860,
53,
17,
7985,
2201,
24533,
1155,
353,
8840,
836,
8,
341,
2405,
3601,
2582,
353,
25263,
16650,
4073,
2201,
24533,
2582,
284,
609,
25263,
16650,
4073,
2201,
24533,
2582,
31483,
77333,
34575,
18441,
284,
2092,
198,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestMultiStoreQuery(t *testing.T) {
db := dbm.NewMemDB()
multi := newMultiStoreWithMounts(db, types.PruneNothing)
err := multi.LoadLatestVersion()
require.Nil(t, err)
k, v := []byte("wind"), []byte("blows")
k2, v2 := []byte("water"), []byte("flows")
// v3 := []byte("is cold")
cid := multi.Commit()
// Make sure we can get by name.
garbage := multi.getStoreByName("bad-name")
require.Nil(t, garbage)
// Set and commit data in one store.
store1 := multi.getStoreByName("store1").(types.KVStore)
store1.Set(k, v)
// ... and another.
store2 := multi.getStoreByName("store2").(types.KVStore)
store2.Set(k2, v2)
// Commit the multistore.
cid = multi.Commit()
ver := cid.Version
// Reload multistore from database
multi = newMultiStoreWithMounts(db, types.PruneNothing)
err = multi.LoadLatestVersion()
require.Nil(t, err)
// Test bad path.
query := abci.RequestQuery{Path: "/key", Data: k, Height: ver}
qres := multi.Query(query)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.ABCICode(), qres.Code)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.Codespace(), qres.Codespace)
query.Path = "h897fy32890rf63296r92"
qres = multi.Query(query)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.ABCICode(), qres.Code)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.Codespace(), qres.Codespace)
// Test invalid store name.
query.Path = "/garbage/key"
qres = multi.Query(query)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.ABCICode(), qres.Code)
require.EqualValues(t, sdkerrors.ErrUnknownRequest.Codespace(), qres.Codespace)
// Test valid query with data.
query.Path = "/store1/key"
qres = multi.Query(query)
require.EqualValues(t, 0, qres.Code)
require.Equal(t, v, qres.Value)
// Test valid but empty query.
query.Path = "/store2/key"
query.Prove = true
qres = multi.Query(query)
require.EqualValues(t, 0, qres.Code)
require.Nil(t, qres.Value)
// Test store2 data.
query.Data = k2
qres = multi.Query(query)
require.EqualValues(t, 0, qres.Code)
require.Equal(t, v2, qres.Value)
} | explode_data.jsonl/21887 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 796
} | [
2830,
3393,
20358,
6093,
2859,
1155,
353,
8840,
836,
8,
341,
20939,
1669,
2927,
76,
7121,
18816,
3506,
741,
2109,
7068,
1669,
501,
20358,
6093,
2354,
16284,
82,
9791,
11,
4494,
17947,
2886,
23780,
340,
9859,
1669,
7299,
13969,
31992,
56... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMutexLockSessionExpired(t *testing.T) {
cli, err := integration2.NewClient(t, clientv3.Config{Endpoints: exampleEndpoints()})
if err != nil {
t.Fatal(err)
}
defer cli.Close()
// create two separate sessions for lock competition
s1, err := concurrency.NewSession(cli)
if err != nil {
t.Fatal(err)
}
defer s1.Close()
m1 := concurrency.NewMutex(s1, "/my-lock/")
s2, err := concurrency.NewSession(cli)
if err != nil {
t.Fatal(err)
}
m2 := concurrency.NewMutex(s2, "/my-lock/")
// acquire lock for s1
if err := m1.Lock(context.TODO()); err != nil {
t.Fatal(err)
}
fmt.Println("acquired lock for s1")
m2Locked := make(chan struct{})
var err2 error
go func() {
defer close(m2Locked)
// m2 blocks since m1 already acquired lock /my-lock/
if err2 = m2.Lock(context.TODO()); err2 == nil {
t.Error("expect session expired error")
}
}()
// NOTE: 下面注释主要是为了调试锁被持有后,接着申请锁的流程
// // revoke the session of m2 before unlock m1
// err = s2.Close()
// if err != nil {
// t.Fatal(err)
// }
// if err := m1.Unlock(context.TODO()); err != nil {
// t.Fatal(err)
// }
// fmt.Println("released lock for s1")
<-m2Locked
fmt.Println("acquired lock for s2")
} | explode_data.jsonl/81667 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 549
} | [
2830,
3393,
38099,
11989,
5283,
54349,
1155,
353,
8840,
836,
8,
341,
86448,
11,
1848,
1669,
17590,
17,
7121,
2959,
1155,
11,
2943,
85,
18,
10753,
90,
80786,
25,
3110,
80786,
96503,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
34... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRequestCtxFormValue(t *testing.T) {
var ctx RequestCtx
var req Request
req.SetRequestURI("/foo/bar?baz=123&aaa=bbb")
req.SetBodyString("qqq=port&mmm=sddd")
req.Header.SetContentType("application/x-www-form-urlencoded")
ctx.Init(&req, nil, nil)
v := ctx.FormValue("baz")
if string(v) != "123" {
t.Fatalf("unexpected value %q. Expecting %q", v, "123")
}
v = ctx.FormValue("mmm")
if string(v) != "sddd" {
t.Fatalf("unexpected value %q. Expecting %q", v, "sddd")
}
v = ctx.FormValue("aaaasdfsdf")
if len(v) > 0 {
t.Fatalf("unexpected value for unknown key %q", v)
}
} | explode_data.jsonl/73284 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 261
} | [
2830,
3393,
1900,
23684,
1838,
1130,
1155,
353,
8840,
836,
8,
341,
2405,
5635,
6145,
23684,
198,
2405,
4232,
6145,
198,
24395,
4202,
1900,
10301,
4283,
7975,
49513,
30,
42573,
28,
16,
17,
18,
5,
32646,
28,
53151,
1138,
24395,
4202,
54... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestRetryFailed(t *testing.T) {
assert := internal.NewAssert(t, "TestRetryFailed")
var number int
increaseNumber := func() error {
number++
return errors.New("error occurs")
}
err := Retry(increaseNumber, RetryDuration(time.Microsecond*50))
assert.IsNotNil(err)
assert.Equal(DefaultRetryTimes, number)
} | explode_data.jsonl/30738 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 116
} | [
2830,
3393,
51560,
9408,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
5306,
7121,
8534,
1155,
11,
330,
2271,
51560,
9408,
5130,
2405,
1372,
526,
198,
17430,
19947,
2833,
1669,
2915,
368,
1465,
341,
197,
57135,
22940,
197,
853,
5975,
7121,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestS(t *testing.T) {
const want = "sashisuseso"
for _, v := range [2]string{"さしすせそ", "サシスセソ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
} | explode_data.jsonl/11298 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 98
} | [
2830,
3393,
50,
1155,
353,
8840,
836,
8,
341,
4777,
1366,
284,
330,
82,
988,
285,
4776,
78,
1837,
2023,
8358,
348,
1669,
2088,
508,
17,
30953,
4913,
29713,
14682,
17219,
71242,
26831,
497,
330,
59768,
56107,
21660,
63710,
124867,
9207,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestFlagSet_Register_Length(t *testing.T) {
resetFlagSet()
Register(&StringFlag{
Name: "config",
Usage: "--config",
EnvVar: constant.EgoConfigPath,
Default: ConfigDefaultToml,
Action: func(name string, fs *FlagSet) {},
})
assert.Equal(t, 1, len(flagset.flags))
} | explode_data.jsonl/50976 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 117
} | [
2830,
3393,
12135,
1649,
73124,
81620,
1155,
353,
8840,
836,
8,
341,
70343,
12135,
1649,
741,
79096,
2099,
703,
12135,
515,
197,
21297,
25,
262,
330,
1676,
756,
197,
197,
14783,
25,
256,
14482,
1676,
756,
197,
197,
14359,
3962,
25,
22... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExecReport(t *testing.T) {
e := newTestExec()
s, _, _ := stressClient.NewTestStressTest()
rep := e.Report(s)
if rep != "" {
t.Fail()
}
} | explode_data.jsonl/64299 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 66
} | [
2830,
3393,
10216,
10361,
1155,
353,
8840,
836,
8,
341,
7727,
1669,
501,
2271,
10216,
741,
1903,
11,
8358,
716,
1669,
8464,
2959,
7121,
2271,
623,
673,
2271,
741,
73731,
1669,
384,
25702,
1141,
340,
743,
2064,
961,
1591,
341,
197,
324... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestEnQueue(t *testing.T) {
queue := NewQueue()
queue.EnQueue(1)
queue.EnQueue(2)
queue.EnQueue(3)
queue.EnQueue(4)
len1 := queue.Len()
if len1 != 4 {
t.Fatal("Push queue error!")
}
} | explode_data.jsonl/55227 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 90
} | [
2830,
3393,
1702,
7554,
1155,
353,
8840,
836,
8,
341,
46993,
1669,
1532,
7554,
2822,
46993,
22834,
7554,
7,
16,
340,
46993,
22834,
7554,
7,
17,
340,
46993,
22834,
7554,
7,
18,
340,
46993,
22834,
7554,
7,
19,
340,
33111,
16,
1669,
71... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestConvertAnyToKind(t *testing.T) {
t.Parallel()
resource.Require(t, resource.UnitTest)
_, err := ConvertAnyToKind(1234)
assert.NotNil(t, err)
} | explode_data.jsonl/66586 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 64
} | [
2830,
3393,
12012,
8610,
1249,
10629,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
50346,
81288,
1155,
11,
5101,
25159,
2271,
692,
197,
6878,
1848,
1669,
7169,
8610,
1249,
10629,
7,
16,
17,
18,
19,
340,
6948,
93882,
1155,
11,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func Test_PermissionOnWorkflowInferiorOfProject(t *testing.T) {
api, db, router, end := newTestAPI(t, bootstrap.InitiliazeDB)
defer end()
proj := assets.InsertTestProject(t, db, api.Cache, sdk.RandomString(10), sdk.RandomString(10), nil)
u, pass := assets.InsertLambdaUser(api.mustDB(), &proj.ProjectGroups[0].Group)
// Add a new group on project to let us update the previous group permission to READ (because we must have at least one RW permission on project)
newGr := assets.InsertTestGroup(t, db, sdk.RandomString(10))
test.NoError(t, group.InsertGroupInProject(db, proj.ID, newGr.ID, permission.PermissionReadWriteExecute))
test.NoError(t, group.InsertUserInGroup(db, newGr.ID, u.ID, true))
test.NoError(t, group.UpdateGroupRoleInProject(db, proj.ID, proj.ProjectGroups[0].Group.ID, permission.PermissionRead))
//First pipeline
pip := sdk.Pipeline{
ProjectID: proj.ID,
ProjectKey: proj.Key,
Name: "pip1",
}
test.NoError(t, pipeline.InsertPipeline(api.mustDB(), api.Cache, proj, &pip, u))
newWf := sdk.Workflow{
Name: sdk.RandomString(10),
WorkflowData: &sdk.WorkflowData{
Node: sdk.Node{
Name: "root",
Type: sdk.NodeTypePipeline,
Context: &sdk.NodeContext{
PipelineID: pip.ID,
},
},
},
ProjectID: proj.ID,
ProjectKey: proj.Key,
}
(&newWf).RetroMigrate()
//Prepare request to create workflow
vars := map[string]string{
"permProjectKey": proj.Key,
}
uri := router.GetRoute("POST", api.postWorkflowHandler, vars)
test.NotEmpty(t, uri)
req := assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, &newWf)
//Do the request
w := httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 201, w.Code)
test.NoError(t, json.Unmarshal(w.Body.Bytes(), &newWf))
assert.NotEqual(t, 0, newWf.ID)
// Update workflow group to change READ to RWX and get permission on project in READ and permission on workflow in RWX to test edition and run
vars = map[string]string{
"key": proj.Key,
"permWorkflowName": newWf.Name,
"groupName": proj.ProjectGroups[0].Group.Name,
}
uri = router.GetRoute("PUT", api.putWorkflowGroupHandler, vars)
test.NotEmpty(t, uri)
newGp := sdk.GroupPermission{
Group: proj.ProjectGroups[0].Group,
Permission: permission.PermissionReadWriteExecute,
}
req = assets.NewAuthentifiedRequest(t, u, pass, "PUT", uri, &newGp)
//Do the request
w = httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
test.NoError(t, group.DeleteUserFromGroup(db, proj.ProjectGroups[0].Group.ID, u.ID))
proj2, errP := project.Load(api.mustDB(), api.Cache, proj.Key, u, project.LoadOptions.WithPipelines, project.LoadOptions.WithGroups)
test.NoError(t, errP)
wfLoaded, errL := workflow.Load(context.Background(), db, api.Cache, proj2, newWf.Name, u, workflow.LoadOptions{DeepPipeline: true})
test.NoError(t, errL)
assert.Equal(t, 2, len(wfLoaded.Groups))
// Try to update workflow
vars = map[string]string{
"key": proj.Key,
"permWorkflowName": wfLoaded.Name,
}
uri = router.GetRoute("PUT", api.putWorkflowHandler, vars)
test.NotEmpty(t, uri)
wfLoaded.HistoryLength = 300
req = assets.NewAuthentifiedRequest(t, u, pass, "PUT", uri, &wfLoaded)
//Do the request
w = httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
wfLoaded, errL = workflow.Load(context.Background(), db, api.Cache, proj2, newWf.Name, u, workflow.LoadOptions{})
test.NoError(t, errL)
assert.Equal(t, 2, len(wfLoaded.Groups))
assert.Equal(t, int64(300), wfLoaded.HistoryLength)
// Try to run workflow
vars = map[string]string{
"key": proj.Key,
"permWorkflowName": wfLoaded.Name,
}
uri = router.GetRoute("POST", api.postWorkflowRunHandler, vars)
test.NotEmpty(t, uri)
opts := sdk.WorkflowRunPostHandlerOption{FromNodeIDs: []int64{wfLoaded.WorkflowData.Node.ID}, Manual: &sdk.WorkflowNodeRunManual{User: *u}}
req = assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, &opts)
//Do the request
w = httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 202, w.Code)
// Update permission group on workflow to switch RWX to RO
vars = map[string]string{
"key": proj.Key,
"permWorkflowName": newWf.Name,
"groupName": proj.ProjectGroups[0].Group.Name,
}
uri = router.GetRoute("PUT", api.putWorkflowGroupHandler, vars)
test.NotEmpty(t, uri)
newGp = sdk.GroupPermission{
Group: proj.ProjectGroups[0].Group,
Permission: permission.PermissionRead,
}
req = assets.NewAuthentifiedRequest(t, u, pass, "PUT", uri, &newGp)
//Do the request
w = httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
// try to run the workflow with user in read only
vars = map[string]string{
"key": proj.Key,
"permWorkflowName": wfLoaded.Name,
}
uri = router.GetRoute("POST", api.postWorkflowRunHandler, vars)
test.NotEmpty(t, uri)
// create user in read only
userRo, passRo := assets.InsertLambdaUser(api.mustDB(), &proj.ProjectGroups[0].Group)
req = assets.NewAuthentifiedRequest(t, userRo, passRo, "POST", uri, &opts)
//Do the request
w = httptest.NewRecorder()
router.Mux.ServeHTTP(w, req)
assert.Equal(t, 403, w.Code)
} | explode_data.jsonl/64788 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2157
} | [
2830,
3393,
53918,
2728,
1925,
62768,
641,
802,
2462,
2124,
7849,
1155,
353,
8840,
836,
8,
341,
54299,
11,
2927,
11,
9273,
11,
835,
1669,
501,
2271,
7082,
1155,
11,
26925,
26849,
24078,
2986,
3506,
340,
16867,
835,
741,
197,
30386,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAddingWorkflowDefaultValueIfValueNotExist(t *testing.T) {
ans := true
t.Run("WithoutDefaults", func(t *testing.T) {
cancel, controller := newController()
defer cancel()
workflow := unmarshalWF(helloWorldWf)
err := controller.setWorkflowDefaults(workflow)
assert.NoError(t, err)
assert.Equal(t, workflow, unmarshalWF(helloWorldWf))
})
t.Run("WithDefaults", func(t *testing.T) {
cancel, controller := newControllerWithDefaults()
defer cancel()
defaultWorkflowSpec := unmarshalWF(helloWorldWf)
err := controller.setWorkflowDefaults(defaultWorkflowSpec)
assert.NoError(t, err)
assert.Equal(t, defaultWorkflowSpec.Spec.HostNetwork, &ans)
assert.NotEqual(t, defaultWorkflowSpec, unmarshalWF(helloWorldWf))
assert.Equal(t, *defaultWorkflowSpec.Spec.HostNetwork, true)
})
} | explode_data.jsonl/2861 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 296
} | [
2830,
3393,
32308,
62768,
41533,
2679,
1130,
45535,
1155,
353,
8840,
836,
8,
341,
43579,
1669,
830,
198,
3244,
16708,
445,
26040,
16273,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
84441,
11,
6461,
1669,
501,
2051,
741,
197,
16867,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestService_UpdateOrganization(t *testing.T) {
type fields struct {
OrganizationsStore chronograf.OrganizationsStore
Logger chronograf.Logger
}
type args struct {
w *httptest.ResponseRecorder
r *http.Request
org *organizationRequest
}
tests := []struct {
name string
fields fields
args args
id string
wantStatus int
wantContentType string
wantBody string
}{
{
name: "Update Organization name",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{
Name: "The Bad Place",
},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return &chronograf.Organization{
ID: "1337",
Name: "The Good Place",
DefaultRole: roles.ViewerRoleName,
}, nil
},
},
},
id: "1337",
wantStatus: http.StatusOK,
wantContentType: "application/json",
wantBody: `{"id":"1337","name":"The Bad Place","defaultRole":"viewer","links":{"self":"/chronograf/v1/organizations/1337"}}`,
},
{
name: "Update Organization - nothing to update",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return &chronograf.Organization{
ID: "1337",
Name: "The Good Place",
DefaultRole: roles.ViewerRoleName,
}, nil
},
},
},
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"No fields to update"}`,
},
{
name: "Update Organization default role",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{
DefaultRole: roles.ViewerRoleName,
},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return &chronograf.Organization{
ID: "1337",
Name: "The Good Place",
DefaultRole: roles.MemberRoleName,
}, nil
},
},
},
id: "1337",
wantStatus: http.StatusOK,
wantContentType: "application/json",
wantBody: `{"links":{"self":"/chronograf/v1/organizations/1337"},"id":"1337","name":"The Good Place","defaultRole":"viewer"}`,
},
{
name: "Update Organization - invalid update",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return nil, nil
},
},
},
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"No fields to update"}`,
},
{
name: "Update Organization - invalid role",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{
DefaultRole: "sillyrole",
},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return nil, nil
},
},
},
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"default role must be member, viewer, editor, or admin"}`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Service{
Store: &mocks.Store{
OrganizationsStore: tt.fields.OrganizationsStore,
},
Logger: tt.fields.Logger,
}
tt.args.r = tt.args.r.WithContext(httprouter.WithParams(context.Background(),
httprouter.Params{
{
Key: "oid",
Value: tt.id,
},
}))
buf, _ := json.Marshal(tt.args.org)
tt.args.r.Body = ioutil.NopCloser(bytes.NewReader(buf))
s.UpdateOrganization(tt.args.w, tt.args.r)
resp := tt.args.w.Result()
content := resp.Header.Get("Content-Type")
body, _ := ioutil.ReadAll(resp.Body)
if resp.StatusCode != tt.wantStatus {
t.Errorf("%q. NewOrganization() = %v, want %v", tt.name, resp.StatusCode, tt.wantStatus)
}
if tt.wantContentType != "" && content != tt.wantContentType {
t.Errorf("%q. NewOrganization() = %v, want %v", tt.name, content, tt.wantContentType)
}
if eq, _ := jsonEqual(string(body), tt.wantBody); tt.wantBody != "" && !eq {
t.Errorf("%q. NewOrganization() = \n***%v***\n,\nwant\n***%v***", tt.name, string(body), tt.wantBody)
}
})
}
} | explode_data.jsonl/13412 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2880
} | [
2830,
3393,
1860,
47393,
41574,
1155,
353,
8840,
836,
8,
341,
13158,
5043,
2036,
341,
197,
197,
23227,
8040,
6093,
25986,
25058,
8382,
8443,
8040,
6093,
198,
197,
55861,
1797,
25986,
25058,
12750,
198,
197,
532,
13158,
2827,
2036,
341,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.