text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestIrisRedirect(t *testing.T) {
e := irisTester(t)
things := e.POST("/redirect").
Expect().
Status(http.StatusOK).JSON().Array()
things.Length().Equal(2)
things.Element(0).Object().ValueEqual("name", "foo")
things.Element(1).Object().ValueEqual("name", "bar")
} | explode_data.jsonl/66289 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 108
} | [
2830,
3393,
40,
5963,
17725,
1155,
353,
8840,
836,
8,
341,
7727,
1669,
63942,
58699,
1155,
692,
197,
57074,
1669,
384,
14721,
4283,
8117,
38609,
197,
35911,
25829,
197,
58321,
19886,
52989,
568,
5370,
1005,
1857,
2822,
197,
57074,
6833,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_makset(t *testing.T) {
test := func(s, expected string) {
t.Helper()
assert.T(t).This(string(Set(s))).Is(expected)
}
test("", "")
test("foo", "foo")
test("^foo", "^foo")
test("-foo", "-foo")
test("foo-", "foo-")
test("m-p", "mnop")
test("-0-9-", "-0123456789-")
} | explode_data.jsonl/56953 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 138
} | [
2830,
3393,
717,
585,
746,
1155,
353,
8840,
836,
8,
341,
18185,
1669,
2915,
1141,
11,
3601,
914,
8,
341,
197,
3244,
69282,
741,
197,
6948,
836,
1155,
568,
1986,
3609,
52474,
1141,
36334,
3872,
15253,
340,
197,
532,
18185,
19814,
14676... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_CheckMovieExists_True(t *testing.T) {
if testMovie == nil {
t.Skip("Skipping due to previous failure")
}
if ok, err := conn.CheckMovieExists(testMovie.Name); err != nil {
t.Fatal(err)
} else if !ok {
t.Fatal("CheckMovieExists() failed")
}
} | explode_data.jsonl/22006 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 104
} | [
2830,
3393,
28188,
19668,
15575,
93577,
1155,
353,
8840,
836,
8,
341,
743,
1273,
19668,
621,
2092,
341,
197,
3244,
57776,
445,
85945,
4152,
311,
3681,
7901,
1138,
197,
630,
743,
5394,
11,
1848,
1669,
4534,
10600,
19668,
15575,
8623,
196... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSpheroDriverSetDataStreaming(t *testing.T) {
d := initTestSpheroDriver()
d.SetDataStreaming(DefaultDataStreamingConfig())
data := <-d.packetChannel
buf := new(bytes.Buffer)
binary.Write(buf, binary.BigEndian, DefaultDataStreamingConfig())
gobottest.Assert(t, data.body, buf.Bytes())
ret := d.Command("SetDataStreaming")(
map[string]interface{}{
"N": 100.0,
"M": 200.0,
"Mask": 300.0,
"Pcnt": 255.0,
"Mask2": 400.0,
},
)
gobottest.Assert(t, ret, nil)
data = <-d.packetChannel
dconfig := DataStreamingConfig{N: 100, M: 200, Mask: 300, Pcnt: 255, Mask2: 400}
buf = new(bytes.Buffer)
binary.Write(buf, binary.BigEndian, dconfig)
gobottest.Assert(t, data.body, buf.Bytes())
} | explode_data.jsonl/7291 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 303
} | [
2830,
3393,
50,
759,
2328,
11349,
1649,
1043,
76509,
1155,
353,
8840,
836,
8,
341,
2698,
1669,
2930,
2271,
50,
759,
2328,
11349,
741,
2698,
4202,
1043,
76509,
87874,
1043,
76509,
2648,
12367,
8924,
1669,
9119,
67,
67139,
9629,
271,
2639... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRunner_ScenarioMap(t *testing.T) {
runner, err := NewRunner(WithScenarios("testdata"))
if err != nil {
t.Fatal(err)
}
for _, file := range runner.ScenarioFiles() {
m, err := runner.ScenarioMap(context.FromT(t), file)
if err != nil {
t.Fatal(err)
}
if len(m) == 0 {
t.Fatal("failed to get scenarios")
}
for _, steps := range m {
if len(steps) == 0 {
t.Fatal("failed to get steps from scenario map")
}
}
}
} | explode_data.jsonl/56031 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 196
} | [
2830,
3393,
19486,
1098,
66,
20413,
2227,
1155,
353,
8840,
836,
8,
341,
197,
41736,
11,
1848,
1669,
1532,
19486,
7,
2354,
3326,
60494,
445,
92425,
5455,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
2023,
8358,
103... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestValidateIgnorePreflightErrors(t *testing.T) {
var tests = []struct {
ignorePreflightErrors []string
expectedLen int
expectedError bool
}{
{[]string{}, 0, false}, // empty list
{[]string{"check1", "check2"}, 2, false}, // non-duplicate
{[]string{"check1", "check2", "check1"}, 2, false}, // duplicates
{[]string{"check1", "check2", "all"}, 3, true}, // non-duplicate, but 'all' present together wth individual checks
{[]string{"all"}, 1, false}, // skip all checks by using new flag
{[]string{"all"}, 1, false}, // skip all checks by using both old and new flags at the same time
}
for _, rt := range tests {
result, err := ValidateIgnorePreflightErrors(rt.ignorePreflightErrors)
switch {
case err != nil && !rt.expectedError:
t.Errorf("ValidateIgnorePreflightErrors: unexpected error for input (%s), error: %v", rt.ignorePreflightErrors, err)
case err == nil && rt.expectedError:
t.Errorf("ValidateIgnorePreflightErrors: expected error for input (%s) but got: %v", rt.ignorePreflightErrors, result)
case result.Len() != rt.expectedLen:
t.Errorf("ValidateIgnorePreflightErrors: expected Len = %d for input (%s) but got: %v, %v", rt.expectedLen, rt.ignorePreflightErrors, result.Len(), result)
}
}
} | explode_data.jsonl/39231 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 544
} | [
2830,
3393,
17926,
12497,
29978,
4145,
13877,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
197,
13130,
29978,
4145,
13877,
3056,
917,
198,
197,
42400,
11271,
1843,
526,
198,
197,
42400,
1454,
260,
1807,
198,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestGetLoadBalancingRuleName(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
az := GetTestCloud(ctrl)
az.PrimaryAvailabilitySetName = "primary"
svc := &v1.Service{
ObjectMeta: meta.ObjectMeta{
Annotations: map[string]string{},
UID: "257b9655-5137-4ad2-b091-ef3f07043ad3",
},
}
cases := []struct {
description string
subnetName string
isInternal bool
useStandardLB bool
protocol v1.Protocol
port int32
expected string
}{
{
description: "internal lb should have subnet name on the rule name",
subnetName: "shortsubnet",
isInternal: true,
useStandardLB: true,
protocol: v1.ProtocolTCP,
port: 9000,
expected: "a257b965551374ad2b091ef3f07043ad-shortsubnet-TCP-9000",
},
{
description: "internal standard lb should have subnet name on the rule name but truncated to 80 characters",
subnetName: "averylonnnngggnnnnnnnnnnnnnnnnnnnnnngggggggggggggggggggggggggggggggggggggsubet",
isInternal: true,
useStandardLB: true,
protocol: v1.ProtocolTCP,
port: 9000,
expected: "a257b965551374ad2b091ef3f07043ad-averylonnnngggnnnnnnnnnnnnnnnnnnnnnngg-TCP-9000",
},
{
description: "internal basic lb should have subnet name on the rule name but truncated to 80 characters",
subnetName: "averylonnnngggnnnnnnnnnnnnnnnnnnnnnngggggggggggggggggggggggggggggggggggggsubet",
isInternal: true,
useStandardLB: false,
protocol: v1.ProtocolTCP,
port: 9000,
expected: "a257b965551374ad2b091ef3f07043ad-averylonnnngggnnnnnnnnnnnnnnnnnnnnnngg-TCP-9000",
},
{
description: "external standard lb should not have subnet name on the rule name",
subnetName: "shortsubnet",
isInternal: false,
useStandardLB: true,
protocol: v1.ProtocolTCP,
port: 9000,
expected: "a257b965551374ad2b091ef3f07043ad-TCP-9000",
},
{
description: "external basic lb should not have subnet name on the rule name",
subnetName: "shortsubnet",
isInternal: false,
useStandardLB: false,
protocol: v1.ProtocolTCP,
port: 9000,
expected: "a257b965551374ad2b091ef3f07043ad-TCP-9000",
},
}
for _, c := range cases {
if c.useStandardLB {
az.Config.LoadBalancerSku = loadBalancerSkuStandard
} else {
az.Config.LoadBalancerSku = loadBalancerSkuBasic
}
svc.Annotations[ServiceAnnotationLoadBalancerInternalSubnet] = c.subnetName
svc.Annotations[ServiceAnnotationLoadBalancerInternal] = strconv.FormatBool(c.isInternal)
loadbalancerRuleName := az.getLoadBalancerRuleName(svc, c.protocol, c.port)
assert.Equal(t, c.expected, loadbalancerRuleName, c.description)
}
} | explode_data.jsonl/7455 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1249
} | [
2830,
3393,
1949,
5879,
37889,
8974,
11337,
675,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
23743,
991,
18176,
741,
197,
1370,
1669,
2126,
2271,
16055,
62100,
340,
197,
1370,
34884,
51703,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestFocusedMovesWithCustomKeyMap(t *testing.T) {
cols := []Column{
NewColumn("id", "ID", 3),
}
customKeys := KeyMap{
RowUp: key.NewBinding(key.WithKeys("ctrl+a")),
RowDown: key.NewBinding(key.WithKeys("ctrl+b")),
RowSelectToggle: key.NewBinding(key.WithKeys("ctrl+c")),
}
model := New(cols).WithRows([]Row{
NewRow(RowData{
"id": "first",
}),
NewRow(RowData{
"id": "second",
}),
NewRow(RowData{
"id": "third",
}),
}).Focused(true).WithKeyMap(customKeys)
keyUp := tea.KeyMsg{Type: tea.KeyUp}
keyDown := tea.KeyMsg{Type: tea.KeyDown}
keyCtrlA := tea.KeyMsg{Type: tea.KeyCtrlA}
keyCtrlB := tea.KeyMsg{Type: tea.KeyCtrlB}
assert.Equal(t, "ctrl+a", keyCtrlA.String(), "Test sanity check failed for ctrl+a")
assert.Equal(t, "ctrl+b", keyCtrlB.String(), "Test sanity check failed for ctrl+b")
curID := func() string {
str, ok := model.HighlightedRow().Data["id"].(string)
assert.True(t, ok, "Failed to convert to string")
return str
}
assert.Equal(t, "first", curID(), "Should start on first row")
model, _ = model.Update(keyDown)
assert.Equal(t, "first", curID(), "Down arrow should do nothing")
model, _ = model.Update(keyCtrlB)
assert.Equal(t, "second", curID(), "Custom key map for down failed")
model, _ = model.Update(keyUp)
assert.Equal(t, "second", curID(), "Up arrow should do nothing")
model, _ = model.Update(keyCtrlA)
assert.Equal(t, "first", curID(), "Custom key map for up failed")
} | explode_data.jsonl/45270 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 587
} | [
2830,
3393,
56906,
45789,
2354,
10268,
1592,
2227,
1155,
353,
8840,
836,
8,
341,
1444,
3069,
1669,
3056,
2933,
515,
197,
197,
3564,
2933,
445,
307,
497,
330,
915,
497,
220,
18,
1326,
197,
630,
1444,
1450,
8850,
1669,
5309,
2227,
515,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidateJobUpdateStatus(t *testing.T) {
type testcase struct {
old batch.Job
update batch.Job
}
successCases := []testcase{
{
old: batch.Job{
ObjectMeta: api.ObjectMeta{Name: "abc", Namespace: api.NamespaceDefault},
Status: batch.JobStatus{
Active: 1,
Succeeded: 2,
Failed: 3,
},
},
update: batch.Job{
ObjectMeta: api.ObjectMeta{Name: "abc", Namespace: api.NamespaceDefault},
Status: batch.JobStatus{
Active: 1,
Succeeded: 1,
Failed: 3,
},
},
},
}
for _, successCase := range successCases {
successCase.old.ObjectMeta.ResourceVersion = "1"
successCase.update.ObjectMeta.ResourceVersion = "1"
if errs := ValidateJobUpdateStatus(&successCase.update, &successCase.old); len(errs) != 0 {
t.Errorf("expected success: %v", errs)
}
}
errorCases := map[string]testcase{
"[status.active: Invalid value: -1: must be greater than or equal to 0, status.succeeded: Invalid value: -2: must be greater than or equal to 0]": {
old: batch.Job{
ObjectMeta: api.ObjectMeta{
Name: "abc",
Namespace: api.NamespaceDefault,
ResourceVersion: "10",
},
Status: batch.JobStatus{
Active: 1,
Succeeded: 2,
Failed: 3,
},
},
update: batch.Job{
ObjectMeta: api.ObjectMeta{
Name: "abc",
Namespace: api.NamespaceDefault,
ResourceVersion: "10",
},
Status: batch.JobStatus{
Active: -1,
Succeeded: -2,
Failed: 3,
},
},
},
}
for testName, errorCase := range errorCases {
errs := ValidateJobUpdateStatus(&errorCase.update, &errorCase.old)
if len(errs) == 0 {
t.Errorf("expected failure: %s", testName)
continue
}
if errs.ToAggregate().Error() != testName {
t.Errorf("expected '%s' got '%s'", errs.ToAggregate().Error(), testName)
}
}
} | explode_data.jsonl/23661 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 864
} | [
2830,
3393,
17926,
12245,
4289,
2522,
1155,
353,
8840,
836,
8,
341,
13158,
70080,
2036,
341,
197,
61828,
262,
7162,
45293,
198,
197,
27175,
7162,
45293,
198,
197,
630,
30553,
37302,
1669,
3056,
1944,
5638,
515,
197,
197,
515,
298,
61828... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func Test_Distinct_String(t *testing.T) {
expected := []string{"a", "b", "c"}
input := []string{"a", "a", "b", "b", "c", "c"}
actual := slices.Distinct(input)
assert.ElementsMatch(t, expected, actual)
} | explode_data.jsonl/12256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 81
} | [
2830,
3393,
1557,
380,
7800,
31777,
1155,
353,
8840,
836,
8,
341,
42400,
1669,
3056,
917,
4913,
64,
497,
330,
65,
497,
330,
66,
16707,
22427,
1669,
3056,
917,
4913,
64,
497,
330,
64,
497,
330,
65,
497,
330,
65,
497,
330,
66,
497,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUniqueValidation(t *testing.T) {
tests := []struct {
param interface{}
expected bool
}{
// Arrays
{[2]string{"a", "b"}, true},
{[2]int{1, 2}, true},
{[2]float64{1, 2}, true},
{[2]interface{}{"a", "b"}, true},
{[2]interface{}{"a", 1}, true},
{[2]float64{1, 1}, false},
{[2]int{1, 1}, false},
{[2]string{"a", "a"}, false},
{[2]interface{}{"a", "a"}, false},
{[4]interface{}{"a", 1, "b", 1}, false},
// Slices
{[]string{"a", "b"}, true},
{[]int{1, 2}, true},
{[]float64{1, 2}, true},
{[]interface{}{"a", "b"}, true},
{[]interface{}{"a", 1}, true},
{[]float64{1, 1}, false},
{[]int{1, 1}, false},
{[]string{"a", "a"}, false},
{[]interface{}{"a", "a"}, false},
{[]interface{}{"a", 1, "b", 1}, false},
// Maps
{map[string]string{"one": "a", "two": "b"}, true},
{map[string]int{"one": 1, "two": 2}, true},
{map[string]float64{"one": 1, "two": 2}, true},
{map[string]interface{}{"one": "a", "two": "b"}, true},
{map[string]interface{}{"one": "a", "two": 1}, true},
{map[string]float64{"one": 1, "two": 1}, false},
{map[string]int{"one": 1, "two": 1}, false},
{map[string]string{"one": "a", "two": "a"}, false},
{map[string]interface{}{"one": "a", "two": "a"}, false},
{map[string]interface{}{"one": "a", "two": 1, "three": "b", "four": 1}, false},
}
validate := New()
for i, test := range tests {
errs := validate.Var(test.param, "unique")
if test.expected {
if !IsEqual(errs, nil) {
t.Fatalf("Index: %d unique failed Error: %v", i, errs)
}
} else {
if IsEqual(errs, nil) {
t.Fatalf("Index: %d unique failed Error: %v", i, errs)
} else {
val := getError(errs, "", "")
if val.Tag() != "unique" {
t.Fatalf("Index: %d unique failed Error: %v", i, errs)
}
}
}
}
PanicMatches(t, func() { _ = validate.Var(1.0, "unique") }, "Bad field type float64")
} | explode_data.jsonl/77356 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 873
} | [
2830,
3393,
22811,
13799,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
36037,
262,
3749,
16094,
197,
42400,
1807,
198,
197,
59403,
197,
197,
322,
22966,
198,
197,
197,
90,
58,
17,
30953,
4913,
64,
497,
330,
65,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUpdateActiveBotsSideEffect(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
bot, err := th.App.CreateBot(&model.Bot{
Username: "username",
Description: "a bot",
OwnerId: th.BasicUser.Id,
})
require.Nil(t, err)
defer th.App.PermanentDeleteBot(bot.UserId)
// Automatic deactivation disabled
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ServiceSettings.DisableBotsWhenOwnerIsDeactivated = false
})
th.App.UpdateActive(th.BasicUser, false)
retbot1, err := th.App.GetBot(bot.UserId, true)
require.Nil(t, err)
require.Zero(t, retbot1.DeleteAt)
user1, err := th.App.GetUser(bot.UserId)
require.Nil(t, err)
require.Zero(t, user1.DeleteAt)
th.App.UpdateActive(th.BasicUser, true)
// Automatic deactivation enabled
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ServiceSettings.DisableBotsWhenOwnerIsDeactivated = true
})
th.App.UpdateActive(th.BasicUser, false)
retbot2, err := th.App.GetBot(bot.UserId, true)
require.Nil(t, err)
require.NotZero(t, retbot2.DeleteAt)
user2, err := th.App.GetUser(bot.UserId)
require.Nil(t, err)
require.NotZero(t, user2.DeleteAt)
th.App.UpdateActive(th.BasicUser, true)
} | explode_data.jsonl/31415 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 470
} | [
2830,
3393,
4289,
5728,
33,
2412,
16384,
7738,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1155,
568,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
2822,
2233,
354,
11,
1848,
1669,
270,
5105,
7251,
23502,
2099,
2528,
82775,
515,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSpanFinishWithError(t *testing.T) {
assert := assert.New(t)
err := errors.New("test error")
span := newBasicSpan("web.request")
span.Finish(WithError(err))
assert.Equal(int32(1), span.Error)
assert.Equal("test error", span.Meta[ext.ErrorMsg])
assert.Equal("*errors.errorString", span.Meta[ext.ErrorType])
assert.NotEmpty(span.Meta[ext.ErrorStack])
} | explode_data.jsonl/42840 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 135
} | [
2830,
3393,
12485,
25664,
66102,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
9859,
1669,
5975,
7121,
445,
1944,
1465,
1138,
197,
1480,
1669,
501,
15944,
12485,
445,
2911,
8223,
1138,
197,
1480,
991,
18176,
7,
66102,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetAllActors(t *testing.T) {
tf.UnitTest(t)
ctx := context.Background()
bs := bstore.NewBlockstore(repo.NewInMemoryRepo().Datastore())
cst := cborutil.NewIpldStore(bs)
tree := NewState(cst)
addr := vmaddr.NewForTestGetter()()
actor := actor.Actor{Code: e.NewCid(builtin.AccountActorCodeID), CallSeqNum: 1234, Balance: abi.NewTokenAmount(123)}
err := tree.SetActor(ctx, addr, &actor)
assert.NoError(t, err)
_, err = tree.Commit(ctx)
require.NoError(t, err)
results := tree.GetAllActors(ctx)
for result := range results {
assert.Equal(t, addr, result.Key)
assert.Equal(t, actor.Code, result.Actor.Code)
assert.Equal(t, actor.CallSeqNum, result.Actor.CallSeqNum)
assert.Equal(t, actor.Balance, result.Actor.Balance)
}
} | explode_data.jsonl/9602 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 298
} | [
2830,
3393,
1949,
2403,
2414,
1087,
1155,
353,
8840,
836,
8,
341,
3244,
69,
25159,
2271,
1155,
692,
20985,
1669,
2266,
19047,
741,
93801,
1669,
293,
4314,
7121,
4713,
4314,
50608,
7121,
641,
10642,
25243,
1005,
1043,
4314,
2398,
1444,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNewClient(t *testing.T) {
secret := "testsecret"
token := "testtoken"
wantURL, _ := url.Parse(APIEndpointBase)
wantDataURL, _ := url.Parse(APIEndpointBaseData)
client, err := New(secret, token)
if err != nil {
t.Fatal(err)
}
if client.channelSecret != secret {
t.Errorf("channelSecret %s; want %s", client.channelSecret, secret)
}
if client.channelToken != token {
t.Errorf("channelToken %s; want %s", client.channelToken, token)
}
if !reflect.DeepEqual(client.endpointBase, wantURL) {
t.Errorf("endpointBase %v; want %v", client.endpointBase, wantURL)
}
if !reflect.DeepEqual(client.endpointBaseData, wantDataURL) {
t.Errorf("endpointBase %v; want %v", client.endpointBaseData, wantDataURL)
}
if client.httpClient != http.DefaultClient {
t.Errorf("httpClient %p; want %p", client.httpClient, http.DefaultClient)
}
} | explode_data.jsonl/69894 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 321
} | [
2830,
3393,
3564,
2959,
1155,
353,
8840,
836,
8,
341,
197,
20474,
1669,
330,
1944,
20474,
698,
43947,
1669,
330,
1944,
5839,
698,
50780,
3144,
11,
716,
1669,
2515,
8937,
48953,
27380,
3978,
340,
50780,
75262,
11,
716,
1669,
2515,
8937,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestMergeMethodCheckerAndPRMergeMethod(t *testing.T) {
squashLabel := "tide/squash"
mergeLabel := "tide/merge"
rebaseLabel := "tide/rebase"
tideConfig := config.Tide{
SquashLabel: squashLabel,
MergeLabel: mergeLabel,
RebaseLabel: rebaseLabel,
MergeType: map[string]github.PullRequestMergeType{
"o/configured-rebase": github.MergeRebase, // GH client allows merge, rebase
"o/configured-squash-allow-rebase": github.MergeSquash, // GH client allows merge, squash, rebase
"o/configure-re-base": github.MergeRebase, // GH client allows merge
},
}
cfg := func() *config.Config { return &config.Config{ProwConfig: config.ProwConfig{Tide: tideConfig}} }
mmc := newMergeChecker(cfg, &fgc{})
testcases := []struct {
name string
repo string
labels []string
conflict bool
expectedMethod github.PullRequestMergeType
expectErr bool
expectConflictErr bool
}{
{
name: "default method without PR label override",
repo: "foo",
expectedMethod: github.MergeMerge,
},
{
name: "irrelevant PR labels ignored",
repo: "foo",
labels: []string{"unrelated"},
expectedMethod: github.MergeMerge,
},
{
name: "default method overridden by a PR label",
repo: "allow-squash-nomerge",
labels: []string{"tide/squash"},
expectedMethod: github.MergeSquash,
},
{
name: "use method configured for repo in tide config",
repo: "configured-squash-allow-rebase",
labels: []string{"unrelated"},
expectedMethod: github.MergeSquash,
},
{
name: "tide config method overridden by a PR label",
repo: "configured-squash-allow-rebase",
labels: []string{"unrelated", "tide/rebase"},
expectedMethod: github.MergeRebase,
},
{
name: "multiple merge method PR labels should not merge",
repo: "foo",
labels: []string{"tide/squash", "tide/rebase"},
expectErr: true,
},
{
name: "merge conflict",
repo: "foo",
labels: []string{"unrelated"},
conflict: true,
expectedMethod: github.MergeMerge,
expectErr: false,
expectConflictErr: true,
},
{
name: "squash label conflicts with merge only GH settings",
repo: "foo",
labels: []string{"tide/squash"},
expectedMethod: github.MergeSquash,
expectErr: false,
expectConflictErr: true,
},
{
name: "rebase method tide config conflicts with merge only GH settings",
repo: "configure-re-base",
labels: []string{"unrelated"},
expectedMethod: github.MergeRebase,
expectErr: false,
expectConflictErr: true,
},
{
name: "default method conflicts with squash only GH settings",
repo: "squash-nomerge",
labels: []string{"unrelated"},
expectedMethod: github.MergeMerge,
expectErr: false,
expectConflictErr: true,
},
}
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
pr := &PullRequest{
Repository: struct {
Name githubql.String
NameWithOwner githubql.String
Owner struct {
Login githubql.String
}
}{
Name: githubql.String(tc.repo),
Owner: struct {
Login githubql.String
}{
Login: githubql.String("o"),
},
},
Labels: struct {
Nodes []struct{ Name githubql.String }
}{
Nodes: []struct{ Name githubql.String }{},
},
}
for _, label := range tc.labels {
labelNode := struct{ Name githubql.String }{Name: githubql.String(label)}
pr.Labels.Nodes = append(pr.Labels.Nodes, labelNode)
}
if tc.conflict {
pr.Mergeable = githubql.MergeableStateConflicting
}
actual, err := prMergeMethod(tideConfig, pr)
if err != nil {
if !tc.expectErr {
t.Errorf("unexpected error: %v", err)
}
return
} else if tc.expectErr {
t.Errorf("missing expected error")
return
}
if tc.expectedMethod != actual {
t.Errorf("wanted: %q, got: %q", tc.expectedMethod, actual)
}
reason, err := mmc.isAllowed(pr)
if err != nil {
t.Errorf("unexpected processing error: %v", err)
} else if reason != "" {
if !tc.expectConflictErr {
t.Errorf("unexpected merge method conflict error: %v", err)
}
return
} else if tc.expectConflictErr {
t.Errorf("missing expected merge method conflict error")
return
}
})
}
} | explode_data.jsonl/42783 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2212
} | [
2830,
3393,
52096,
3523,
35188,
3036,
6480,
52096,
3523,
1155,
353,
8840,
836,
8,
341,
1903,
446,
988,
2476,
1669,
330,
83,
577,
2687,
446,
988,
698,
197,
19052,
2476,
1669,
330,
83,
577,
14,
19052,
698,
17200,
3152,
2476,
1669,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStream_GetZoneID(t *testing.T) {
t.Run("test", func(t *testing.T) {
assert := base.NewAssert(t)
for i := 0; i < 1000; i++ {
v := NewStream()
id := uint16(i)
v.SetZoneID(id)
assert(v.GetZoneID()).Equals(id)
v.Release()
}
})
} | explode_data.jsonl/21168 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 127
} | [
2830,
3393,
3027,
13614,
15363,
915,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
1944,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
6948,
1669,
2331,
7121,
8534,
1155,
340,
197,
2023,
600,
1669,
220,
15,
26,
600,
366,
220,
16,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNilDirectMsg(t *testing.T) {
t.Parallel()
mc := &mockCommitter{Mock: &mock.Mock{}}
mc.On("LedgerHeight", mock.Anything).Return(uint64(1), nil)
g := &mocks.GossipMock{}
g.On("Accept", mock.Anything, false).Return(make(<-chan *proto.GossipMessage), nil)
g.On("Accept", mock.Anything, true).Return(nil, make(chan proto.ReceivedMessage))
p := newPeerNodeWithGossip(0, mc, noopPeerIdentityAcceptor, g)
defer p.shutdown()
p.s.handleStateRequest(nil)
p.s.directMessage(nil)
sMsg, _ := p.s.stateRequestMessage(uint64(10), uint64(8)).NoopSign()
req := &comm.ReceivedMessageImpl{
SignedGossipMessage: sMsg,
}
p.s.directMessage(req)
} | explode_data.jsonl/5633 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 265
} | [
2830,
3393,
19064,
16027,
6611,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
97662,
1669,
609,
16712,
1092,
16126,
90,
11571,
25,
609,
16712,
24664,
6257,
532,
97662,
8071,
445,
60850,
1389,
3640,
497,
7860,
13311,
1596,
568,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTableDeleteDest(t *testing.T) {
peerT := TableCreatePeer()
pathT := TableCreatePath(peerT)
ipv4t := NewTable(bgp.RF_IPv4_UC)
for _, path := range pathT {
dest := NewDestination(path.GetNlri(), 0)
ipv4t.setDestination(dest)
}
dest := NewDestination(pathT[0].GetNlri(), 0)
ipv4t.setDestination(dest)
ipv4t.deleteDest(dest)
gdest := ipv4t.GetDestination(pathT[0].GetNlri())
assert.Nil(t, gdest)
} | explode_data.jsonl/6674 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 185
} | [
2830,
3393,
2556,
6435,
34830,
1155,
353,
8840,
836,
8,
341,
197,
16537,
51,
1669,
6633,
4021,
30888,
741,
26781,
51,
1669,
6633,
4021,
1820,
63372,
51,
340,
197,
42676,
19,
83,
1669,
1532,
2556,
1883,
21888,
2013,
37,
16607,
85,
19,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestEncode(t *testing.T) {
for _, tc := range testCases {
t.Logf("Testing %d\n", tc.n)
if v := Encode(tc.n); v != tc.b62 {
t.Fatalf("%d encoded to %s (should be %s)", tc.n, v, tc.b62)
}
}
} | explode_data.jsonl/164 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
32535,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17130,
1669,
2088,
1273,
37302,
341,
197,
3244,
98954,
445,
16451,
1018,
67,
1699,
497,
17130,
1253,
340,
197,
743,
348,
1669,
56562,
44415,
1253,
1215,
348,
961,
17130,
948,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestTunnel(t *testing.T) {
// create a new tunnel client
tunA := NewTunnel(
Address("127.0.0.1:9096"),
Nodes("127.0.0.1:9097"),
)
// create a new tunnel server
tunB := NewTunnel(
Address("127.0.0.1:9097"),
)
// start tunB
err := tunB.Connect()
if err != nil {
t.Fatal(err)
}
defer tunB.Close()
time.Sleep(time.Millisecond * 50)
// start tunA
err = tunA.Connect()
if err != nil {
t.Fatal(err)
}
defer tunA.Close()
time.Sleep(time.Millisecond * 50)
var wg sync.WaitGroup
// start accepting connections
// on tunnel A
wg.Add(1)
go testAccept(t, tunA, &wg)
time.Sleep(time.Millisecond * 50)
// dial and send via B
testSend(t, tunB)
// wait until done
wg.Wait()
} | explode_data.jsonl/67573 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 312
} | [
2830,
3393,
51,
40292,
1155,
353,
8840,
836,
8,
341,
197,
322,
1855,
264,
501,
25629,
2943,
198,
3244,
359,
32,
1669,
1532,
51,
40292,
1006,
197,
98090,
445,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
24,
15,
24,
21,
4461,
197,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestSession_CallMethod(t *testing.T) {
s, err := NewSessionRaw()
if err != nil {
t.Fatal(err)
}
s.Model(&Account{})
_ = s.DropTable()
_ = s.CreateTable()
_, _ = s.Insert(&Account{1, "123456"}, &Account{2, "qwerty"})
u := &Account{}
err = s.First(u)
if err != nil || u.ID != 1001 || u.Password != "******" {
t.Log(err)
t.Fatal("Failed to call hooks after query, got", u)
}
} | explode_data.jsonl/16328 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
5283,
76028,
3523,
1155,
353,
8840,
836,
8,
341,
1903,
11,
1848,
1669,
1532,
5283,
20015,
741,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
630,
1903,
5659,
2099,
7365,
37790,
197,
62,
284,
274,
58626,
741,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestActionModDataLink(t *testing.T) {
var tests = []struct {
desc string
a Action
out string
invalid bool
}{
{
desc: "destination too short",
a: ModDataLinkDestination(net.HardwareAddr{0xde}),
invalid: true,
},
{
desc: "destination too long",
a: ModDataLinkDestination(net.HardwareAddr{0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe}),
invalid: true,
},
{
desc: "source too short",
a: ModDataLinkSource(net.HardwareAddr{0xde}),
invalid: true,
},
{
desc: "source too long",
a: ModDataLinkSource(net.HardwareAddr{0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe}),
invalid: true,
},
{
desc: "destination OK",
a: ModDataLinkDestination(net.HardwareAddr{0xde, 0xad, 0xbe, 0xef, 0xde, 0xad}),
out: "mod_dl_dst:de:ad:be:ef:de:ad",
},
{
desc: "source OK",
a: ModDataLinkSource(net.HardwareAddr{0xde, 0xad, 0xbe, 0xef, 0xde, 0xad}),
out: "mod_dl_src:de:ad:be:ef:de:ad",
},
}
for _, tt := range tests {
t.Run(tt.desc, func(t *testing.T) {
action, err := tt.a.MarshalText()
if err != nil && !tt.invalid {
t.Fatalf("unexpected error: %v", err)
}
if want, got := tt.out, string(action); want != got {
t.Fatalf("unexpected Action:\n- want: %q\n- got: %q",
want, got)
}
})
}
} | explode_data.jsonl/49509 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 693
} | [
2830,
3393,
2512,
4459,
1043,
3939,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
41653,
262,
914,
198,
197,
11323,
981,
5586,
198,
197,
13967,
257,
914,
198,
197,
197,
11808,
1807,
198,
197,
59403,
197,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDaoTagCounts(t *testing.T) {
var (
c = context.TODO()
tids = []int64{1, 2, 3}
)
convey.Convey("TagCounts", t, func(ctx convey.C) {
res, err := d.TagCounts(c, tids)
ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
} | explode_data.jsonl/36695 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 164
} | [
2830,
3393,
12197,
5668,
63731,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
1444,
262,
284,
2266,
90988,
741,
197,
3244,
3365,
284,
3056,
396,
21,
19,
90,
16,
11,
220,
17,
11,
220,
18,
532,
197,
340,
37203,
5617,
4801,
5617,
44... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServer_Rejects_HeadersEnd_Then_Continuation(t *testing.T) {
testServerRejectsConn(t, func(st *serverTester) {
st.writeHeaders(HeadersFrameParam{
StreamID: 1,
BlockFragment: st.encodeHeader(),
EndStream: true,
EndHeaders: true,
})
st.wantHeaders()
if err := st.fr.WriteContinuation(1, true, encodeHeaderNoImplicit(t, "foo", "bar")); err != nil {
t.Fatal(err)
}
})
} | explode_data.jsonl/71653 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 182
} | [
2830,
3393,
5475,
50693,
583,
82,
62,
10574,
3727,
62,
12209,
62,
36139,
4002,
1155,
353,
8840,
836,
8,
341,
18185,
5475,
78413,
82,
9701,
1155,
11,
2915,
5895,
353,
4030,
58699,
8,
341,
197,
18388,
3836,
10574,
7,
10574,
4369,
2001,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSendActiveMetric(t *testing.T) {
zabbixHost := "127.0.0.1:10051"
// Simulate a Zabbix server to get the data sent
listener, lerr := net.Listen("tcp", zabbixHost)
if lerr != nil {
t.Fatal(lerr)
}
defer listener.Close()
errs := make(chan error, 1)
go func(chan error) {
conn, err := listener.Accept()
if err != nil {
errs <- err
}
// Obtain request from the mock zabbix server
// Read protocol header and version
header := make([]byte, 5)
_, err = conn.Read(header)
if err != nil {
errs <- err
}
// Read data length
dataLengthRaw := make([]byte, 8)
_, err = conn.Read(dataLengthRaw)
if err != nil {
errs <- err
}
dataLength := binary.LittleEndian.Uint64(dataLengthRaw)
// Read data content
content := make([]byte, dataLength)
_, err = conn.Read(content)
if err != nil {
errs <- err
}
// The zabbix output checks that there are not errors
// Zabbix header length not used, set to 1
resp := []byte("ZBXD\x01\x00\x00\x00\x00\x00\x00\x00\x00{\"response\":\"success\",\"info\":\"processed: 1; failed: 0; total: 1; seconds spent: 0.000030\"}")
_, err = conn.Write(resp)
if err != nil {
errs <- err
}
// Close connection after reading the client data
conn.Close()
// Strip zabbix header and get JSON request
var request ZabbixRequest
err = json.Unmarshal(content, &request)
if err != nil {
errs <- err
}
expectedRequest := "agent data"
if expectedRequest != request.Request {
errs <- fmt.Errorf("Incorrect request field received, expected '%s'", expectedRequest)
}
// End zabbix fake backend
errs <- nil
}(errs)
m := NewMetric("zabbixAgent1", "ping", "13", true)
s := NewSender(zabbixHost)
resActive, errActive, resTrapper, errTrapper := s.SendMetrics([]*Metric{m})
if errActive != nil {
t.Fatalf("error sending active metric: %v", errActive)
}
if errTrapper != nil {
t.Fatalf("trapper error should be nil, we are not sending trapper metrics: %v", errTrapper)
}
raInfo, err := resActive.GetInfo()
if err != nil {
t.Fatalf("error in response Trapper: %v", err)
}
if raInfo.Failed != 0 {
t.Errorf("Failed error expected 0 got %d", raInfo.Failed)
}
if raInfo.Processed != 1 {
t.Errorf("Processed error expected 1 got %d", raInfo.Processed)
}
if raInfo.Total != 1 {
t.Errorf("Total error expected 1 got %d", raInfo.Total)
}
_, err = resTrapper.GetInfo()
if err == nil {
t.Fatalf("No response trapper expected: %v", err)
}
// Wait for zabbix server emulator to finish
err = <-errs
if err != nil {
t.Fatalf("Fake zabbix backend should not produce any errors: %v", err)
}
} | explode_data.jsonl/26571 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1019
} | [
2830,
3393,
11505,
5728,
54310,
1155,
353,
8840,
836,
8,
341,
20832,
95442,
9296,
1669,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
16,
15,
15,
20,
16,
1837,
197,
322,
4467,
6334,
264,
1863,
95442,
3538,
311,
633,
279,
821,
320... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestRecipients(t *testing.T) {
m := NewMessage()
m.SetHeaders(map[string][]string{
"From": {"from@example.com"},
"To": {"to@example.com"},
"Cc": {"cc@example.com"},
"Bcc": {"bcc1@example.com", "bcc2@example.com"},
"Subject": {"Hello!"},
})
m.SetBody("text/plain", "Test message")
want := &message{
from: "from@example.com",
to: []string{"to@example.com", "cc@example.com", "bcc1@example.com", "bcc2@example.com"},
content: "From: from@example.com\r\n" +
"To: to@example.com\r\n" +
"Cc: cc@example.com\r\n" +
"Subject: Hello!\r\n" +
"Content-Type: text/plain; charset=UTF-8\r\n" +
"Content-Transfer-Encoding: quoted-printable\r\n" +
"\r\n" +
"Test message",
}
testMessage(t, m, 0, want)
} | explode_data.jsonl/31572 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 360
} | [
2830,
3393,
3820,
47647,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1532,
2052,
741,
2109,
4202,
10574,
9147,
14032,
45725,
917,
515,
197,
197,
1,
3830,
788,
262,
5212,
1499,
35487,
905,
7115,
197,
197,
65120,
788,
414,
5212,
983,
3548... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_NewMessageDispatcher_ErrorPosting(t *testing.T) {
asserter := assert.New(t)
inputCtx := testutil.NewTestContext()
inputConnectionID := "weeee"
inputMessage := api.Message{
Type: "foo",
Body: "bar",
}
expectedMessageContent, err := ioutil.ReadFile("fixture/errorMessage.json")
if !asserter.NoError(err) {
return
}
expectedError := "stuff went wrong"
poster := &MockConnectionPoster{}
poster.On("PostToConnectionWithContext", inputCtx, &apigatewaymanagementapi.PostToConnectionInput{
ConnectionId: aws.String(inputConnectionID),
Data: expectedMessageContent,
}, emptyOpts).Return(nil, errors.New(expectedError))
err = api.NewMessageDispatcher(poster)(inputCtx, inputConnectionID, inputMessage)
asserter.EqualError(err, expectedError)
} | explode_data.jsonl/19892 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 270
} | [
2830,
3393,
39582,
2052,
21839,
28651,
81652,
1155,
353,
8840,
836,
8,
341,
197,
33758,
465,
1669,
2060,
7121,
1155,
692,
22427,
23684,
1669,
1273,
1314,
7121,
2271,
1972,
741,
22427,
4526,
915,
1669,
330,
896,
34063,
698,
22427,
2052,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMapProxy_ExecuteOnKeys(t *testing.T) {
config := hazelcast.NewHazelcastConfig()
expectedValue := "newValue"
processor := newSimpleEntryProcessor(expectedValue)
config.SerializationConfig().AddDataSerializableFactory(processor.identifiedFactory.factoryId, processor.identifiedFactory)
client, _ := hazelcast.NewHazelcastClientWithConfig(config)
mp2, _ := client.GetMap("testMap2")
for i := 0; i < 10; i++ {
testKey := "testingKey" + strconv.Itoa(i)
testValue := "testingValue" + strconv.Itoa(i)
mp2.Put(testKey, testValue)
}
keys := make([]interface{}, 2)
keys[0] = "testingKey1"
keys[1] = "testingKey2"
result, err := mp2.ExecuteOnKeys(keys, processor)
AssertEqualf(t, err, len(result), 2, "ExecuteOnKeys failed.")
newValue, err := mp2.Get("testingKey1")
AssertEqualf(t, err, newValue, expectedValue, "ExecuteOnKeys failed")
newValue, err = mp2.Get("testingKey2")
AssertEqualf(t, err, newValue, expectedValue, "ExecuteOnKeys failed")
mp2.Clear()
client.Shutdown()
} | explode_data.jsonl/57038 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 360
} | [
2830,
3393,
2227,
16219,
83453,
1925,
8850,
1155,
353,
8840,
836,
8,
1476,
25873,
1669,
20144,
301,
3829,
7121,
39,
68326,
3829,
2648,
741,
42400,
1130,
1669,
330,
52830,
698,
197,
29474,
1669,
501,
16374,
5874,
22946,
15253,
1130,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSetResources(t *testing.T) {
for _, tc := range []struct {
name string
callConfigParse bool
args []string
tiltfileResources []model.ManifestName
expectedResources []model.ManifestName
}{
{"neither", false, nil, nil, []model.ManifestName{"a", "b"}},
{"neither, with config.parse", true, nil, nil, []model.ManifestName{"a", "b"}},
{"args only", false, []string{"a"}, nil, []model.ManifestName{"a"}},
{"args only, with config.parse", true, []string{"a"}, nil, []model.ManifestName{"a", "b"}},
{"tiltfile only", false, nil, []model.ManifestName{"b"}, []model.ManifestName{"b"}},
{"tiltfile only, with config.parse", true, nil, []model.ManifestName{"b"}, []model.ManifestName{"b"}},
{"both", false, []string{"a"}, []model.ManifestName{"b"}, []model.ManifestName{"b"}},
{"both, with config.parse", true, []string{"a"}, []model.ManifestName{"b"}, []model.ManifestName{"b"}},
} {
t.Run(tc.name, func(t *testing.T) {
f := NewFixture(t, model.NewUserConfigState(tc.args))
defer f.TearDown()
setResources := ""
if len(tc.tiltfileResources) > 0 {
var rs []string
for _, mn := range tc.tiltfileResources {
rs = append(rs, fmt.Sprintf("'%s'", mn))
}
setResources = fmt.Sprintf("config.set_enabled_resources([%s])", strings.Join(rs, ", "))
}
configParse := ""
if tc.callConfigParse {
configParse = `
config.define_string_list('resources', args=True)
config.parse()`
}
tiltfile := fmt.Sprintf("%s\n%s\n", setResources, configParse)
f.File("Tiltfile", tiltfile)
result, err := f.ExecFile("Tiltfile")
require.NoError(t, err)
manifests := []model.Manifest{{Name: "a"}, {Name: "b"}}
actual, err := MustState(result).EnabledResources(manifests)
require.NoError(t, err)
expectedResourcesByName := make(map[model.ManifestName]bool)
for _, er := range tc.expectedResources {
expectedResourcesByName[er] = true
}
var expected []model.Manifest
for _, m := range manifests {
if expectedResourcesByName[m.Name] {
expected = append(expected, m)
}
}
require.Equal(t, expected, actual)
})
}
} | explode_data.jsonl/65224 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 867
} | [
2830,
3393,
1649,
11277,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17130,
1669,
2088,
3056,
1235,
341,
197,
11609,
1060,
914,
198,
197,
67288,
2648,
14463,
256,
1807,
198,
197,
31215,
1060,
3056,
917,
198,
197,
197,
1646,
83,
1192,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestTrafficClassV6(t *testing.T) {
c := context.New(t, defaultMTU)
defer c.Cleanup()
c.CreateV6Endpoint(false)
const tos = 0xC0
if err := c.EP.SetSockOptInt(tcpip.IPv6TrafficClassOption, tos); err != nil {
t.Errorf("SetSockOpInt(IPv6TrafficClassOption, %d) failed: %s", tos, err)
}
v, err := c.EP.GetSockOptInt(tcpip.IPv6TrafficClassOption)
if err != nil {
t.Fatalf("GetSockoptInt(IPv6TrafficClassOption) failed: %s", err)
}
if v != tos {
t.Errorf("got GetSockOptInt(IPv6TrafficClassOption) = %d, want = %d", v, tos)
}
// Test the connection request.
testV6Connect(t, c, checker.TOS(tos, 0))
data := []byte{1, 2, 3}
var r bytes.Reader
r.Reset(data)
if _, err := c.EP.Write(&r, tcpip.WriteOptions{}); err != nil {
t.Fatalf("Write failed: %s", err)
}
// Check that data is received.
b := c.GetV6Packet()
iss := seqnum.Value(context.TestInitialSequenceNumber).Add(1)
checker.IPv6(t, b,
checker.PayloadLen(len(data)+header.TCPMinimumSize),
checker.TCP(
checker.DstPort(context.TestPort),
checker.TCPSeqNum(uint32(c.IRS)+1),
checker.TCPAckNum(uint32(iss)),
checker.TCPFlagsMatch(header.TCPFlagAck, ^header.TCPFlagPsh),
),
checker.TOS(tos, 0),
)
if p := b[header.IPv6MinimumSize+header.TCPMinimumSize:]; !bytes.Equal(data, p) {
t.Errorf("got data = %x, want = %x", p, data)
}
} | explode_data.jsonl/75945 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 605
} | [
2830,
3393,
87229,
1957,
53,
21,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
2266,
7121,
1155,
11,
1638,
8505,
52,
340,
16867,
272,
727,
60639,
2822,
1444,
7251,
53,
21,
27380,
3576,
692,
4777,
311,
82,
284,
220,
15,
12125,
15,
198,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestInstallHgHooks(t *testing.T) {
assert := assert.New(t)
repo, client := helpers.CreateHgRepo(t, "hg-repo")
defer helpers.CleanupHgRepo(t, client)
repo.InstallHooks("/tmp/config.json", false)
hgrc, err := ini.Load(filepath.Join(repo.Path, ".hg", "hgrc"))
assert.Nil(err)
exePath, err := filepath.Abs(os.Args[0])
assert.Nil(err)
assert.Equal(
fmt.Sprintf("%s --config /tmp/config.json trigger-webhooks hg-repo push", exePath),
hgrc.Section("hooks").Key("changegroup.rbgateway").String(),
)
} | explode_data.jsonl/57194 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 222
} | [
2830,
3393,
24690,
39,
70,
67769,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
17200,
5368,
11,
2943,
1669,
30187,
7251,
39,
70,
25243,
1155,
11,
330,
66602,
5504,
5368,
1138,
16867,
30187,
727,
60639,
39,
70,
2524... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRedcWithMULX(t *testing.T) {
defer resetCpuFeatures()
if !HasBMI2 {
t.Skip("MULX not supported by the platform")
}
testRedc(t, kUse_MULX, kUse_MUL)
} | explode_data.jsonl/20919 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 76
} | [
2830,
3393,
6033,
66,
2354,
44,
1094,
55,
1155,
353,
8840,
836,
8,
341,
16867,
7585,
34,
5584,
21336,
741,
743,
753,
10281,
95526,
17,
341,
197,
3244,
57776,
445,
44,
1094,
55,
537,
7248,
553,
279,
5339,
1138,
197,
532,
18185,
6033,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestBlobberGRPCService_GetAllocation_Success(t *testing.T) {
req := &blobbergrpc.GetAllocationRequest{
Context: &blobbergrpc.RequestContext{},
Id: "something",
}
mockStorageHandler := &storageHandlerI{}
mockReferencePackage := &mocks.PackageHandler{}
mockStorageHandler.On("verifyAllocation", mock.Anything, req.Id, false).Return(&allocation.Allocation{
Tx: req.Id,
}, nil)
svc := newGRPCBlobberService(mockStorageHandler, mockReferencePackage)
allocation, err := svc.GetAllocation(context.Background(), req)
assert.NoError(t, err)
assert.Equal(t, allocation.Allocation.Tx, req.Id)
} | explode_data.jsonl/66824 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 219
} | [
2830,
3393,
37985,
652,
8626,
4872,
1860,
13614,
78316,
87161,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
609,
35112,
652,
56585,
2234,
78316,
1900,
515,
197,
70871,
25,
609,
35112,
652,
56585,
9659,
1972,
38837,
197,
67211,
25,
414,
33... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServiceDiscoveryDelete(t *testing.T) {
n, c := makeDiscovery(RoleService, NamespaceDiscovery{}, makeService())
k8sDiscoveryTest{
discovery: n,
afterStart: func() {
obj := makeService()
c.CoreV1().Services(obj.Namespace).Delete(obj.Name, &metav1.DeleteOptions{})
},
expectedMaxItems: 2,
expectedRes: map[string]*targetgroup.Group{
"svc/default/testservice": {
Source: "svc/default/testservice",
},
},
}.Run(t)
} | explode_data.jsonl/21242 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 180
} | [
2830,
3393,
1860,
67400,
6435,
1155,
353,
8840,
836,
8,
341,
9038,
11,
272,
1669,
1281,
67400,
73443,
1860,
11,
41962,
67400,
22655,
1281,
1860,
12367,
16463,
23,
82,
67400,
2271,
515,
197,
34597,
7449,
25,
308,
345,
197,
197,
10694,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestYesFilter_SetMatcher(t *testing.T) {
tests := []struct {
name string
matcher Matcher
wantErr bool
}{
{"nil", nil, false},
{"non nil", &yesMatcher{}, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f := &YesFilter{}
if err := f.SetMatcher(tt.matcher); (err != nil) != tt.wantErr {
t.Errorf("SetMatcher() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
} | explode_data.jsonl/25901 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 197
} | [
2830,
3393,
9454,
5632,
14812,
37554,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
262,
914,
198,
197,
2109,
28058,
60632,
198,
197,
50780,
7747,
1807,
198,
197,
59403,
197,
197,
4913,
8385,
497,
2092,
11,
8... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestIPv4FragmentationReceive(t *testing.T) {
o := testObject{t: t, v4: true}
proto := ipv4.NewProtocol()
ep, err := proto.NewEndpoint(1, "\x0a\x00\x00\x01", nil, &o, nil)
if err != nil {
t.Fatalf("NewEndpoint failed: %v", err)
}
totalLen := header.IPv4MinimumSize + 24
frag1 := buffer.NewView(totalLen)
ip1 := header.IPv4(frag1)
ip1.Encode(&header.IPv4Fields{
IHL: header.IPv4MinimumSize,
TotalLength: uint16(totalLen),
TTL: 20,
Protocol: 10,
FragmentOffset: 0,
Flags: header.IPv4FlagMoreFragments,
SrcAddr: "\x0a\x00\x00\x02",
DstAddr: "\x0a\x00\x00\x01",
})
// Make payload be non-zero.
for i := header.IPv4MinimumSize; i < totalLen; i++ {
frag1[i] = uint8(i)
}
frag2 := buffer.NewView(totalLen)
ip2 := header.IPv4(frag2)
ip2.Encode(&header.IPv4Fields{
IHL: header.IPv4MinimumSize,
TotalLength: uint16(totalLen),
TTL: 20,
Protocol: 10,
FragmentOffset: 24,
SrcAddr: "\x0a\x00\x00\x02",
DstAddr: "\x0a\x00\x00\x01",
})
// Make payload be non-zero.
for i := header.IPv4MinimumSize; i < totalLen; i++ {
frag2[i] = uint8(i)
}
// Give packet to ipv4 endpoint, dispatcher will validate that it's ok.
o.protocol = 10
o.srcAddr = "\x0a\x00\x00\x02"
o.dstAddr = "\x0a\x00\x00\x01"
o.contents = append(frag1[header.IPv4MinimumSize:totalLen], frag2[header.IPv4MinimumSize:totalLen]...)
r := stack.Route{
LocalAddress: o.dstAddr,
RemoteAddress: o.srcAddr,
}
// Send first segment.
var views1 [1]buffer.View
vv1 := frag1.ToVectorisedView(views1)
ep.HandlePacket(&r, &vv1)
if o.dataCalls != 0 {
t.Fatalf("Bad number of data calls: got %x, want 0", o.dataCalls)
}
// Send second segment.
var views2 [1]buffer.View
vv2 := frag2.ToVectorisedView(views2)
ep.HandlePacket(&r, &vv2)
if o.dataCalls != 1 {
t.Fatalf("Bad number of data calls: got %x, want 1", o.dataCalls)
}
} | explode_data.jsonl/53349 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 949
} | [
2830,
3393,
58056,
19,
9488,
367,
14742,
1155,
353,
8840,
836,
8,
341,
22229,
1669,
1273,
1190,
90,
83,
25,
259,
11,
348,
19,
25,
830,
532,
197,
15110,
1669,
45475,
19,
7121,
20689,
741,
96626,
11,
1848,
1669,
18433,
7121,
27380,
7,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestLen(t *testing.T) {
m := AtomicMap()
var wg sync.WaitGroup
for i := 1; i <= 100000; i++ {
wg.Add(1)
go func(a int) {
m.Store(a, "a")
wg.Done()
}(i)
}
m.Range(func(k, v interface{}) bool {
m.Delete(k)
return true
})
wg.Wait()
m.Range(func(k, v interface{}) bool {
m.Delete(k)
return true
})
if a := m.Len(); a != 0 {
t.Fatalf("Len: expect: 0, but have: %d", a)
}
m.Clear()
if m.Len() != 0 {
t.Fatalf("after clear len: %d", m.Len())
}
} | explode_data.jsonl/60935 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 245
} | [
2830,
3393,
11271,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
30316,
2227,
741,
2405,
63581,
12811,
28384,
2808,
198,
2023,
600,
1669,
220,
16,
26,
600,
2651,
220,
16,
15,
15,
15,
15,
15,
26,
600,
1027,
341,
197,
72079,
1904,
7,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRefInsert(t *testing.T) {
ref := MustParseRef("test.ex")
cases := []struct {
pos int
term *Term
expected string
}{
{0, VarTerm("foo"), `foo[test].ex`},
{1, StringTerm("foo"), `test.foo.ex`},
{2, StringTerm("foo"), `test.ex.foo`},
}
for i := range cases {
result := ref.Insert(cases[i].term, cases[i].pos)
expected := MustParseRef(cases[i].expected)
if !expected.Equal(result) {
t.Fatalf("Expected %v (len: %d) but got: %v (len: %d)", expected, len(expected), result, len(result))
}
}
} | explode_data.jsonl/2920 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 229
} | [
2830,
3393,
3945,
13780,
1155,
353,
8840,
836,
8,
341,
59504,
1669,
15465,
14463,
3945,
445,
1944,
2223,
1138,
1444,
2264,
1669,
3056,
1235,
341,
197,
28164,
414,
526,
198,
197,
197,
4991,
257,
353,
17249,
198,
197,
42400,
914,
198,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMap(t *testing.T) {
members := Members{
{PKIid: common.PKIidType("p0"), Endpoint: "p0"},
{PKIid: common.PKIidType("p1"), Endpoint: "p1"},
}
expectedMembers := Members{
{PKIid: common.PKIidType("p0"), Endpoint: "p0", Properties: &proto.Properties{LedgerHeight: 2}},
{PKIid: common.PKIidType("p1"), Endpoint: "p1", Properties: &proto.Properties{LedgerHeight: 2}},
}
addProperty := func(member NetworkMember) NetworkMember {
member.Properties = &proto.Properties{
LedgerHeight: 2,
}
return member
}
assert.Equal(t, expectedMembers, members.Map(addProperty))
// Ensure original members didn't change
assert.Nil(t, members[0].Properties)
assert.Nil(t, members[1].Properties)
} | explode_data.jsonl/62278 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 275
} | [
2830,
3393,
2227,
1155,
353,
8840,
836,
8,
341,
2109,
7062,
1669,
16954,
515,
197,
197,
90,
22242,
40,
307,
25,
4185,
1069,
80971,
307,
929,
445,
79,
15,
3975,
47269,
25,
330,
79,
15,
7115,
197,
197,
90,
22242,
40,
307,
25,
4185,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNoBodyOnChunked304Response(t *testing.T) {
defer afterTest(t)
cst := newClientServerTest(t, h1Mode, HandlerFunc(func(w ResponseWriter, r *Request) {
conn, buf, _ := w.(Hijacker).Hijack()
buf.Write([]byte("HTTP/1.1 304 NOT MODIFIED\r\nTransfer-Encoding: chunked\r\n\r\n0\r\n\r\n"))
buf.Flush()
conn.Close()
}))
defer cst.close()
// Our test server above is sending back bogus data after the
// response (the "0\r\n\r\n" part), which causes the Transport
// code to log spam. Disable keep-alives so we never even try
// to reuse the connection.
cst.tr.DisableKeepAlives = true
res, err := cst.c.Get(cst.ts.URL)
if err != nil {
t.Fatal(err)
}
if res.Body != NoBody {
t.Errorf("Unexpected body on 304 response")
}
} | explode_data.jsonl/14174 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 299
} | [
2830,
3393,
2753,
5444,
1925,
28304,
291,
18,
15,
19,
2582,
1155,
353,
8840,
836,
8,
341,
16867,
1283,
2271,
1155,
340,
1444,
267,
1669,
501,
2959,
5475,
2271,
1155,
11,
305,
16,
3636,
11,
19954,
9626,
18552,
3622,
5949,
6492,
11,
4... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExample2(t *testing.T) {
var product = 894797020974016837
//707829217
var first, second int
start := time.Now()
for i := 3; float64(i) < math.Sqrt(float64(product)); i = i + 2 {
if IsPrimeFast(i, nil) {
if product%i == 0 {
second = i
first = product / i
//if !IsPrimeFast(first,nil) {
// t.Fatal(first, second, "not prime")
//}
fmt.Println(first, second)
fmt.Println(time.Now().Sub(start))
return
}
}
}
t.Fatal("failed")
} | explode_data.jsonl/20894 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 226
} | [
2830,
3393,
13314,
17,
1155,
353,
8840,
836,
8,
1476,
2405,
1985,
284,
220,
23,
24,
19,
22,
24,
22,
15,
17,
15,
24,
22,
19,
15,
16,
21,
23,
18,
22,
198,
197,
322,
22,
15,
22,
23,
17,
24,
17,
16,
22,
198,
2405,
1156,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestBaseApp_EndBlock(t *testing.T) {
db := dbm.NewMemDB()
name := t.Name()
logger := defaultLogger()
cp := &tmproto.ConsensusParams{
Block: &tmproto.BlockParams{
MaxGas: 5000000,
},
}
app := baseapp.NewBaseApp(name, logger, db)
app.SetParamStore(¶mStore{db: dbm.NewMemDB()})
app.InitChain(abci.RequestInitChain{
ConsensusParams: cp,
})
app.SetEndBlocker(func(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock {
return abci.ResponseEndBlock{
ValidatorUpdates: []abci.ValidatorUpdate{
{Power: 100},
},
}
})
app.Seal()
res := app.EndBlock(abci.RequestEndBlock{})
require.Len(t, res.GetValidatorUpdates(), 1)
require.Equal(t, int64(100), res.GetValidatorUpdates()[0].Power)
require.Equal(t, cp.Block.MaxGas, res.ConsensusParamUpdates.Block.MaxGas)
} | explode_data.jsonl/30055 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 335
} | [
2830,
3393,
3978,
2164,
49953,
4713,
1155,
353,
8840,
836,
8,
341,
20939,
1669,
2927,
76,
7121,
18816,
3506,
741,
11609,
1669,
259,
2967,
741,
17060,
1669,
1638,
7395,
2822,
52018,
1669,
609,
13730,
15110,
94594,
13626,
4870,
515,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTaskQueueDo_Serial_CalledInOrder(t *testing.T) {
v := validator{t: t}
tq := NewTaskQueue(5)
v.Add(3)
tq.Do(func() { v.Done("a") })
tq.Do(func() { v.Done("b") })
tq.Do(func() { v.Done("c") })
v.Validate("abc")
} | explode_data.jsonl/22721 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 112
} | [
2830,
3393,
6262,
7554,
5404,
1098,
2848,
920,
4736,
641,
4431,
1155,
353,
8840,
836,
8,
341,
5195,
1669,
22935,
90,
83,
25,
259,
532,
3244,
80,
1669,
1532,
6262,
7554,
7,
20,
340,
5195,
1904,
7,
18,
340,
3244,
80,
33596,
18552,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetLeavesByRangeFromLog(t *testing.T) {
tests := []getLeavesByRangeTest{
{start: 0, count: 1, want: []int64{0}},
{start: 0, count: 2, want: []int64{0, 1}},
{start: 1, count: 3, want: []int64{1, 2, 3}},
{start: 10, count: 7, want: []int64{10, 11, 12, 13}},
{start: 13, count: 1, want: []int64{13}},
{start: 14, count: 4, wantErr: true}, // Starts right after tree size.
{start: 19, count: 2, wantErr: true}, // Starts further away.
{start: 3, count: 5, wantErr: true}, // Hits non-contiguous leaves.
{start: 5, count: 5, wantErr: true}, // Starts from a missing leaf.
{start: 1, count: 0, wantErr: true}, // Empty range.
{start: -1, count: 1, wantErr: true}, // Negative start.
{start: 1, count: -1, wantErr: true}, // Negative count.
{start: 100, count: 30, wantErr: true}, // Starts after all stored leaves.
}
testGetLeavesByRangeImpl(t, testonly.LogTree, tests)
} | explode_data.jsonl/30693 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 380
} | [
2830,
3393,
1949,
2304,
4693,
1359,
6046,
3830,
2201,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
455,
2304,
4693,
1359,
6046,
2271,
515,
197,
197,
90,
2468,
25,
220,
15,
11,
1760,
25,
220,
16,
11,
1366,
25,
3056,
396,
21,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAccAzureRMLoadBalancerNatRule_disappears(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test")
r := LoadBalancerNatRule{}
data.ResourceTest(t, r, []acceptance.TestStep{
data.DisappearsStep(acceptance.DisappearsStepData{
Config: func(data acceptance.TestData) string {
return r.basic(data, "Basic")
},
TestResource: r,
}),
})
} | explode_data.jsonl/29072 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 153
} | [
2830,
3393,
14603,
78107,
49,
2668,
2731,
93825,
65214,
11337,
9932,
33431,
1561,
1155,
353,
8840,
836,
8,
341,
8924,
1669,
25505,
25212,
83920,
1155,
11,
330,
1370,
324,
4195,
63601,
38169,
21124,
497,
330,
1944,
1138,
7000,
1669,
8893,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestArray_String(t *testing.T) {
t.Run("return the string of an empty array", func(t *testing.T) {
got := Array{}.String()
assertEquals(t, got, "[]")
})
t.Run("return the string of an array that contains a single element", func(t *testing.T) {
got := Array{Int(1)}.String()
assertEquals(t, got, "[1]")
})
t.Run("return the string of an array that contains multiple elements", func(t *testing.T) {
got := Array{Int(1), Int(2)}.String()
assertEquals(t, got, "[1,2]")
})
} | explode_data.jsonl/4130 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 188
} | [
2830,
3393,
1857,
31777,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
689,
279,
914,
315,
458,
4287,
1334,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
3174,
354,
1669,
2910,
46391,
703,
741,
197,
21729,
1155,
11,
2684,
11,
330,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAirport(t *testing.T) {
airport, err := GetAirportFromCode("EGGW")
if err != nil {
fmt.Println("ExtractDestinationFromJSON errored with")
fmt.Println(err)
t.Fail()
} else if airport.Code != "EGGW" {
fmt.Printf("%v", airport)
fmt.Println("Failed to extract correct airport code: " + airport.Code)
t.Fail()
}
} | explode_data.jsonl/70163 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 135
} | [
2830,
3393,
83113,
1155,
353,
8840,
836,
8,
341,
197,
86615,
11,
1848,
1669,
2126,
83113,
3830,
2078,
445,
9376,
62565,
1138,
743,
1848,
961,
2092,
341,
197,
11009,
12419,
445,
28959,
33605,
3830,
5370,
36310,
1151,
448,
1138,
197,
1100... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestJavaSeqTest(t *testing.T) {
runTest(t, []string{
"github.com/sagernet/gomobile/bind/testdata/testpkg",
"github.com/sagernet/gomobile/bind/testdata/testpkg/secondpkg",
"github.com/sagernet/gomobile/bind/testdata/testpkg/simplepkg",
}, "", "SeqTest")
} | explode_data.jsonl/16769 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 114
} | [
2830,
3393,
15041,
20183,
2271,
1155,
353,
8840,
836,
8,
341,
56742,
2271,
1155,
11,
3056,
917,
515,
197,
197,
9749,
905,
2687,
351,
13845,
4846,
316,
3372,
84480,
12697,
691,
12697,
30069,
756,
197,
197,
9749,
905,
2687,
351,
13845,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLateInitializeSpecs(t *testing.T) {
type args struct {
instance *iamagv2.GroupMembersList
params *v1alpha1.GroupMembershipParameters
}
type want struct {
params *v1alpha1.GroupMembershipParameters
}
cases := map[string]struct {
args args
want want
}{
"AllFilledAlready": {
args: args{
params: params(),
instance: instanceList(),
},
want: want{
params: params()},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
LateInitializeSpec(tc.args.params, tc.args.instance)
if diff := cmp.Diff(tc.want.params, tc.args.params); diff != "" {
t.Errorf("LateInitializeSpec(...): -want, +got:\n%s", diff)
}
})
}
} | explode_data.jsonl/49395 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 288
} | [
2830,
3393,
61457,
9928,
8327,
82,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
56256,
353,
4932,
351,
85,
17,
5407,
24371,
852,
198,
197,
25856,
256,
353,
85,
16,
7141,
16,
5407,
80904,
9706,
198,
197,
532,
13158,
136... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGetBytesRequestPacket(t *testing.T) {
req := OracleRequestPacketData{
ClientID: "test",
OracleScriptID: 1,
Calldata: mustDecodeString("030000004254436400000000000000"),
AskCount: 1,
MinCount: 1,
}
require.Equal(t, []byte(`{"type":"oracle/OracleRequestPacketData","value":{"ask_count":"1","calldata":"AwAAAEJUQ2QAAAAAAAAA","client_id":"test","min_count":"1","oracle_script_id":"1"}}`), req.GetBytes())
} | explode_data.jsonl/33089 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 185
} | [
2830,
3393,
1949,
7078,
1900,
16679,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
21422,
1900,
16679,
1043,
515,
197,
71724,
915,
25,
981,
330,
1944,
756,
197,
197,
48663,
5910,
915,
25,
220,
16,
345,
197,
6258,
278,
52681,
25,
981,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFetchGraphToBlocks(t *testing.T) {
block3, node3, link3 := testutil.EncodeBlock(fluent.MustBuildMap(basicnode.Prototype__Map{}, 1, func(na fluent.MapAssembler) {
na.AssembleEntry("three").AssignBool(true)
}))
block4, node4, link4 := testutil.EncodeBlock(fluent.MustBuildMap(basicnode.Prototype__Map{}, 1, func(na fluent.MapAssembler) {
na.AssembleEntry("four").AssignBool(true)
}))
block2, node2, link2 := testutil.EncodeBlock(fluent.MustBuildMap(basicnode.Prototype__Map{}, 2, func(na fluent.MapAssembler) {
na.AssembleEntry("link3").AssignLink(link3)
na.AssembleEntry("link4").AssignLink(link4)
}))
block1, node1, _ := testutil.EncodeBlock(fluent.MustBuildMap(basicnode.Prototype__Map{}, 3, func(na fluent.MapAssembler) {
na.AssembleEntry("foo").AssignBool(true)
na.AssembleEntry("bar").AssignBool(false)
na.AssembleEntry("nested").CreateMap(2, func(na fluent.MapAssembler) {
na.AssembleEntry("link2").AssignLink(link2)
na.AssembleEntry("nonlink").AssignString("zoo")
})
}))
net := tn.VirtualNetwork(mockrouting.NewServer(), delay.Fixed(0*time.Millisecond))
ig := testinstance.NewTestInstanceGenerator(net, nil, nil)
defer ig.Close()
peers := ig.Instances(2)
hasBlock := peers[0]
defer hasBlock.Exchange.Close()
err := hasBlock.Exchange.HasBlock(block1)
require.NoError(t, err)
err = hasBlock.Exchange.HasBlock(block2)
require.NoError(t, err)
err = hasBlock.Exchange.HasBlock(block3)
require.NoError(t, err)
err = hasBlock.Exchange.HasBlock(block4)
require.NoError(t, err)
wantsBlock := peers[1]
defer wantsBlock.Exchange.Close()
wantsGetter := blockservice.New(wantsBlock.Blockstore(), wantsBlock.Exchange)
fetcherConfig := bsfetcher.NewFetcherConfig(wantsGetter)
session := fetcherConfig.NewSession(context.Background())
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
results := []helpers.BlockResult{}
err = helpers.BlockAll(ctx, session, cidlink.Link{Cid: block1.Cid()}, helpers.OnBlocks(func(res helpers.BlockResult) error {
results = append(results, res)
return nil
}))
require.NoError(t, err)
assertBlocksInOrder(t, results, 4, map[int]ipld.Node{0: node1, 1: node2, 2: node3, 3: node4})
} | explode_data.jsonl/18637 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 839
} | [
2830,
3393,
20714,
11212,
1249,
29804,
1155,
353,
8840,
836,
8,
341,
47996,
18,
11,
2436,
18,
11,
2656,
18,
1669,
1273,
1314,
50217,
4713,
955,
56067,
50463,
11066,
2227,
1883,
5971,
3509,
42825,
4156,
563,
2227,
22655,
220,
16,
11,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRemoveRecordByIdWorks(t *testing.T) {
setupDataFileForTest()
removeById("notes", "1")
// see if it persisted
results, _ := getData()
children, _ := results.S("notes").Children()
for _, child := range children {
if child.S("id").Data().(float64) == 1 {
t.Errorf("New record still found in the json - failed")
break
}
}
} | explode_data.jsonl/81918 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 125
} | [
2830,
3393,
13021,
6471,
2720,
37683,
1155,
353,
8840,
836,
8,
1476,
84571,
1043,
1703,
2461,
2271,
2822,
47233,
2720,
445,
18286,
497,
330,
16,
5130,
197,
322,
1490,
421,
432,
52995,
198,
55497,
11,
716,
1669,
27616,
741,
82470,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestExecInEnvironment(t *testing.T) {
if testing.Short() {
return
}
rootfs, err := newRootfs()
ok(t, err)
defer remove(rootfs)
config := newTemplateConfig(rootfs)
container, err := newContainer(config)
ok(t, err)
defer container.Destroy()
// Execute a first process in the container
stdinR, stdinW, err := os.Pipe()
ok(t, err)
process := &libcontainer.Process{
Cwd: "/",
Args: []string{"cat"},
Env: standardEnvironment,
Stdin: stdinR,
}
err = container.Run(process)
stdinR.Close()
defer stdinW.Close()
ok(t, err)
buffers := newStdBuffers()
process2 := &libcontainer.Process{
Cwd: "/",
Args: []string{"env"},
Env: []string{
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"DEBUG=true",
"DEBUG=false",
"ENV=test",
},
Stdin: buffers.Stdin,
Stdout: buffers.Stdout,
Stderr: buffers.Stderr,
}
err = container.Run(process2)
ok(t, err)
waitProcess(process2, t)
stdinW.Close()
waitProcess(process, t)
out := buffers.Stdout.String()
// check execin's process environment
if !strings.Contains(out, "DEBUG=false") ||
!strings.Contains(out, "ENV=test") ||
!strings.Contains(out, "HOME=/root") ||
!strings.Contains(out, "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin") ||
strings.Contains(out, "DEBUG=true") {
t.Fatalf("unexpected running process, output %q", out)
}
} | explode_data.jsonl/2991 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 579
} | [
2830,
3393,
10216,
641,
12723,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
853,
198,
197,
532,
33698,
3848,
11,
1848,
1669,
501,
8439,
3848,
741,
59268,
1155,
11,
1848,
340,
16867,
4057,
9206,
3848,
340,
25873,
1669... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func Test_toBool(t *testing.T) {
testCases := []struct {
name string
value string
key string
expectedBool bool
}{
{
name: "on",
value: "on",
key: "foo",
expectedBool: true,
},
{
name: "true",
value: "true",
key: "foo",
expectedBool: true,
},
{
name: "enable",
value: "enable",
key: "foo",
expectedBool: true,
},
{
name: "arbitrary string",
value: "bar",
key: "foo",
expectedBool: false,
},
{
name: "no existing entry",
value: "bar",
key: "fii",
expectedBool: false,
},
}
for _, test := range testCases {
test := test
t.Run(test.name, func(t *testing.T) {
t.Parallel()
conf := map[string]string{
"foo": test.value,
}
result := toBool(conf, test.key)
assert.Equal(t, test.expectedBool, result)
})
}
} | explode_data.jsonl/49639 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 546
} | [
2830,
3393,
2346,
11233,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
260,
914,
198,
197,
16309,
286,
914,
198,
197,
23634,
688,
914,
198,
197,
42400,
11233,
1807,
198,
197,
59403,
197,
197,
515,
298,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerStreamingRpc(t *testing.T) {
ss, err := stub.InvokeRpcServerStream(context.Background(), "", serverStreamingMd, &grpc_testing.StreamingOutputCallRequest{
Payload: payload,
ResponseParameters: []*grpc_testing.ResponseParameters{
{}, {}, {}, // three entries means we'll get back three responses
},
})
testutil.Ok(t, err, "Failed to invoke server-streaming RPC")
for i := 0; i < 3; i++ {
resp, err := ss.RecvMsg()
testutil.Ok(t, err, "Failed to receive response message")
dm := resp.(*dynamic.Message)
fd := dm.GetMessageDescriptor().FindFieldByName("payload")
p := dm.GetField(fd)
testutil.Require(t, dynamic.MessagesEqual(p.(proto.Message), payload), "Incorrect payload returned from RPC: %v != %v", p, payload)
}
_, err = ss.RecvMsg()
testutil.Eq(t, io.EOF, err, "Incorrect number of messages in response")
} | explode_data.jsonl/51788 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 310
} | [
2830,
3393,
5475,
76509,
60248,
1155,
353,
8840,
836,
8,
341,
34472,
11,
1848,
1669,
13633,
32784,
60248,
5475,
3027,
5378,
19047,
1507,
7342,
3538,
76509,
72529,
11,
609,
56585,
70962,
33308,
287,
5097,
7220,
1900,
515,
197,
10025,
6989,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHandlerThrowsNotFound(t *testing.T) {
req := events.APIGatewayProxyRequest{
Path: "/test",
HTTPMethod: http.MethodPost,
}
r := NewRouter()
_, err := r.Handle(req)
assert.NotNil(t, err)
assert.Equal(t, "Not found", err.Error())
} | explode_data.jsonl/4491 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 110
} | [
2830,
3393,
3050,
44737,
10372,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
4357,
24922,
40709,
16219,
1900,
515,
197,
69640,
25,
981,
3521,
1944,
756,
197,
197,
9230,
3523,
25,
1758,
20798,
4133,
345,
197,
630,
7000,
1669,
1532,
9523,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFiles_Search(t *testing.T) {
setup()
defer teardown()
fixture := `
{
"files": [
{
"content_type": "video/x-msvideo",
"crc32": "812ed74d",
"created_at": "2013-04-30T21:40:04",
"extension": "avi",
"file_type": "VIDEO",
"first_accessed_at": "2013-12-24T09:18:58",
"folder_type": "REGULAR",
"icon": "https://some-valid-screenhost-url.com",
"id": 79905833,
"is_hidden": false,
"is_mp4_available": true,
"is_shared": false,
"name": "some-file.mkv",
"opensubtitles_hash": "fb5414fd9b9e1e38",
"parent_id": 79905827,
"screenshot": "https://some-valid-screenhost-url.com",
"sender_name": "hafifuyku",
"size": 738705408,
"start_from": 0
},
{
"content_type": "application/x-directory",
"crc32": null,
"created_at": "2013-04-30T21:40:03",
"extension": null,
"file_type": "FOLDER",
"first_accessed_at": null,
"folder_type": "REGULAR",
"icon": "https://some-valid-screenhost-url.com",
"id": 79905827,
"is_hidden": false,
"is_mp4_available": false,
"is_shared": false,
"name": "Movie 43",
"opensubtitles_hash": null,
"parent_id": 2197,
"screenshot": null,
"sender_name": "hafifuyku",
"size": 738831202
},
{
"content_type": "application/x-directory",
"crc32": null,
"created_at": "2010-05-19T22:24:21",
"extension": null,
"file_type": "FOLDER",
"first_accessed_at": null,
"folder_type": "REGULAR",
"icon": "https://some-valid-screenhost-url.com",
"id": 5659875,
"is_hidden": false,
"is_mp4_available": false,
"is_shared": false,
"name": "MOVIE",
"opensubtitles_hash": null,
"parent_id": 0,
"screenshot": null,
"sender_name": "emsel",
"size": 0
}
],
"next": null,
"status": "OK",
"total": 3
}
`
mux.HandleFunc("/v2/files/search/", func(w http.ResponseWriter, r *http.Request) {
testMethod(t, r, "GET")
fmt.Fprintln(w, fixture)
})
ctx := context.Background()
s, err := client.Files.Search(ctx, "naber", 1)
if err != nil {
t.Error(err)
}
if len(s.Files) != 3 {
t.Errorf("got: %v, want: 3", len(s.Files))
}
if s.Files[0].Name != "some-file.mkv" {
t.Errorf("got: %v, want: some-file.mkv", s.Files[0].Name)
}
// invalid page number
_, err = client.Files.Search(ctx, "naber", 0)
if err == nil {
t.Errorf("invalid page number accepted")
}
// empty query
_, err = client.Files.Search(ctx, "", 1)
if err == nil {
t.Errorf("empty query accepted")
}
} | explode_data.jsonl/48321 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1107
} | [
2830,
3393,
10809,
65913,
1155,
353,
8840,
836,
8,
341,
84571,
741,
16867,
49304,
2822,
1166,
12735,
1669,
22074,
515,
1,
7198,
788,
2278,
197,
515,
197,
197,
1,
1796,
1819,
788,
330,
9986,
10776,
64089,
9986,
756,
197,
197,
1,
66083,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestString(t *testing.T) {
tests := []string{
"sin(-x) * pow(1.5,-r)",
"pow(2, sin(y)) * pow(2,sin(x)) / 12",
"sin(x * y / 10)/10",
}
for _, test := range tests {
expr, err := eval.Parse(test)
if err != nil {
t.Error(err)
continue
}
stringified := expr.String()
reexpr, err := eval.Parse(stringified)
if err != nil {
t.Fatalf("parsing %s: %v", stringified, err)
}
env := eval.Env{"x": 0.1, "y": 0.1, "r": 0.3}
if expr.Eval(env) != reexpr.Eval(env) {
t.Fatalf("%s.Eval(env) != %s.Eval(env)", test, stringified)
}
}
} | explode_data.jsonl/69270 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 281
} | [
2830,
3393,
703,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
917,
515,
197,
197,
1,
15940,
4080,
87,
8,
353,
6875,
7,
16,
13,
20,
4999,
81,
15752,
197,
197,
1,
21743,
7,
17,
11,
7437,
7021,
593,
353,
6875,
7,
17,
10671,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestConstraintCombine(t *testing.T) {
st := cluster.MakeTestingClusterSettings()
evalCtx := tree.MakeTestingEvalContext(st)
testData := []struct {
a, b, e string
}{
{
a: "/1/2: [ - /2] [/4 - /4] [/5/30 - /7] [/9 - /9/20]",
b: "/2: [/10 - /10] [/20 - /20] [/30 - /30] [/40 - /40]",
e: "/1/2: [ - /2/40] [/4/10 - /4/10] [/4/20 - /4/20] [/4/30 - /4/30] [/4/40 - /4/40] " +
"[/5/30 - /7/40] [/9/10 - /9/20]",
},
{
a: "/1/2/3: [ - /1/10] [/2 - /3/20] [/4/30 - /5] [/6/10 - /6/10]",
b: "/3: [/50 - /50] [/60 - /70]",
e: "/1/2/3: [ - /1/10/70] [/2 - /3/20/70] [/4/30/50 - /5] [/6/10/50 - /6/10/50] " +
"[/6/10/60 - /6/10/70]",
},
{
a: "/1/2/3/4: [ - /10] [/15 - /15] [/20 - /20/10] [/30 - /40) [/80 - ]",
b: "/2/3/4: [/20 - /20/10] [/30 - /30] [/40 - /40]",
e: "/1/2/3/4: [ - /10/40] [/15/20 - /15/20/10] [/15/30 - /15/30] [/15/40 - /15/40] " +
"[/30/20 - /40) [/80/20 - ]",
},
{
a: "/1/2/3/4: [ - /10/40] [/15/20 - /15/20/10] [/15/30 - /15/30] [/15/40 - /15/40] " +
"[/30/20 - /40) [/80/20 - ]",
b: "/4: [/20/10 - /30] [/40 - /40]",
e: "/1/2/3/4: [ - /10/40] [/15/20 - /15/20/10/40] [/15/30 - /15/30] [/15/40 - /15/40] " +
"[/30/20 - /40) [/80/20 - ]",
},
{
a: "/1/2: [/1 - /1/6]",
b: "/2: [/8 - /8]",
e: "/1/2: contradiction",
},
}
for i, tc := range testData {
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
a := ParseConstraint(&evalCtx, tc.a)
b := ParseConstraint(&evalCtx, tc.b)
a.Combine(&evalCtx, &b)
if res := a.String(); res != tc.e {
t.Errorf("expected\n %s; got\n %s", tc.e, res)
}
})
}
} | explode_data.jsonl/59309 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 954
} | [
2830,
3393,
17890,
81114,
1155,
353,
8840,
836,
8,
341,
18388,
1669,
10652,
50133,
16451,
28678,
6086,
741,
93413,
23684,
1669,
4916,
50133,
16451,
54469,
1972,
5895,
692,
18185,
1043,
1669,
3056,
1235,
341,
197,
11323,
11,
293,
11,
384,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestUnspecifiedPrimaryKey(t *testing.T) {
defer leaktest.AfterTest(t)()
testFn := func(t *testing.T, db *gosql.DB, f cdctest.TestFeedFactory) {
sqlDB := sqlutils.MakeSQLRunner(db)
sqlDB.Exec(t, `CREATE TABLE foo (a INT)`)
var id0 int
sqlDB.QueryRow(t, `INSERT INTO foo VALUES (0) RETURNING rowid`).Scan(&id0)
foo := feed(t, f, `CREATE CHANGEFEED FOR foo`)
defer closeFeed(t, foo)
var id1 int
sqlDB.QueryRow(t, `INSERT INTO foo VALUES (1) RETURNING rowid`).Scan(&id1)
assertPayloads(t, foo, []string{
fmt.Sprintf(`foo: [%d]->{"after": {"a": 0, "rowid": %d}}`, id0, id0),
fmt.Sprintf(`foo: [%d]->{"after": {"a": 1, "rowid": %d}}`, id1, id1),
})
}
t.Run(`sinkless`, sinklessTest(testFn))
t.Run(`enterprise`, enterpriseTest(testFn))
} | explode_data.jsonl/7060 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 348
} | [
2830,
3393,
1806,
53434,
25981,
1155,
353,
8840,
836,
8,
341,
16867,
23352,
1944,
36892,
2271,
1155,
8,
2822,
18185,
24911,
1669,
2915,
1155,
353,
8840,
836,
11,
2927,
353,
34073,
1470,
22537,
11,
282,
15307,
67880,
8787,
28916,
4153,
8... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLoginUser(t *testing.T) {
app := newTestApp(t)
// start up a https test server
ts := newTestServer(t, app.routes())
defer ts.Close()
// make a GET /user/login request
_, _, body := ts.get(t, "/user/login")
// extract the CSRF token from the response body (html signup form)
csrfToken := extractCSRFToken(t, body)
tests := []struct {
name string
userEmail string
userPassword string
csrfToken string
wantCode int
wantBody []byte
}{
{"Valid Submission", "alice@example.com", "validPa$$word", csrfToken, http.StatusSeeOther, nil},
{"Empty Email", "", "validPa$$word", csrfToken, http.StatusOK, []byte("Email or Password is incorrect")},
{"Empty Password", "alice@example.com", "", csrfToken, http.StatusOK, []byte("Email or Password is incorrect")},
{"Invalid Password", "alice@example.com", "FooBarBaz", csrfToken, http.StatusOK, []byte("Email or Password is incorrect")},
{"Invalid CSRF Token", "", "", "wrongToken", http.StatusBadRequest, nil},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
form := url.Values{}
form.Add("email", tt.userEmail)
form.Add("password", tt.userPassword)
form.Add("csrf_token", tt.csrfToken)
code, _, body := ts.postForm(t, "/user/login", form)
if code != tt.wantCode {
t.Errorf("want %d; got %d", tt.wantCode, code)
}
if !bytes.Contains(body, tt.wantBody) {
t.Errorf("want body %s to contain %q", body, tt.wantBody)
}
})
}
} | explode_data.jsonl/12758 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 587
} | [
2830,
3393,
6231,
1474,
1155,
353,
8840,
836,
8,
341,
28236,
1669,
501,
2271,
2164,
1155,
692,
197,
322,
1191,
705,
264,
3703,
1273,
3538,
198,
57441,
1669,
501,
2271,
5475,
1155,
11,
906,
39306,
2398,
16867,
10591,
10421,
2822,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestClientCategoryList(t *testing.T) {
auth, _ := base.TenancyWithLoginTester(t)
defer base.BaseLogOut(auth)
url := "v1/merchant/productCategory/getProductCategoryList"
base.GetList(auth, url, http.StatusOK, "获取成功")
} | explode_data.jsonl/57460 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 90
} | [
2830,
3393,
2959,
6746,
852,
1155,
353,
8840,
836,
8,
341,
78011,
11,
716,
1669,
2331,
836,
268,
6572,
2354,
6231,
58699,
1155,
340,
16867,
2331,
13018,
2201,
2662,
27435,
692,
19320,
1669,
330,
85,
16,
14,
39011,
24788,
6746,
23302,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestStackVal_MarshalJSON_GotTrue(t *testing.T) {
var sampleInp struct {
Val BoolString `json:"val"`
}
sampleInp.Val.Flag = true
var sampleOut = []byte(`{"val":true}`)
data, _ := json.Marshal(sampleInp)
if bytes.Compare(data, sampleOut) != 0 {
t.Errorf("should be %s but got %s", sampleOut, data)
}
} | explode_data.jsonl/31037 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 128
} | [
2830,
3393,
4336,
2208,
1245,
28423,
5370,
2646,
354,
2514,
1155,
353,
8840,
836,
8,
341,
2405,
6077,
641,
79,
2036,
341,
197,
197,
2208,
12608,
703,
1565,
2236,
2974,
831,
8805,
197,
532,
1903,
1516,
641,
79,
77819,
80911,
284,
830,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestBot_GetChatMember(t *testing.T) {
type fields struct {
apiClient apiClient
}
tests := []struct {
name string
fields fields
wantResult axon.O
wantErr bool
}{
{
name: "test1",
fields: fields{
apiClient: &mockAPIClient{
method: "getChatMember",
interfaceMethod: func() interface{} {
return map[string]interface{}{
"chat_id": 123456,
}
},
bytesMethod: func() []byte {
return []byte("true")
},
},
},
wantResult: map[string]interface{}{
"chat_id": 123456,
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
b := &Bot{
apiClient: tt.fields.apiClient,
}
gotResult, err := b.GetChatMember(axon.O{})
if (err != nil) != tt.wantErr {
t.Errorf("Bot.GetChatMember() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotResult, tt.wantResult) {
t.Errorf("Bot.GetChatMember() = %v, want %v", gotResult, tt.wantResult)
}
})
}
} | explode_data.jsonl/46095 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 502
} | [
2830,
3393,
23502,
13614,
15672,
9366,
1155,
353,
8840,
836,
8,
341,
13158,
5043,
2036,
341,
197,
54299,
2959,
6330,
2959,
198,
197,
532,
78216,
1669,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
55276,
257,
5043,
198,
197,
50780,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRunMigratorMigrationErrors(t *testing.T) {
store := NewMockStoreIface()
ticker := glock.NewMockTicker(time.Second)
migrator := NewMockMigrator()
migrator.ProgressFunc.SetDefaultReturn(0.5, nil)
migrator.UpFunc.SetDefaultReturn(errors.New("uh-oh"))
runMigratorWrapped(store, migrator, ticker, func(migrations chan<- Migration) {
migrations <- Migration{ID: 1, Progress: 0.5}
tickN(ticker, 1)
})
if calls := store.AddErrorFunc.history; len(calls) != 1 {
t.Fatalf("unexpected number of calls to AddError. want=%d have=%d", 1, len(calls))
} else {
if calls[0].Arg1 != 1 {
t.Errorf("unexpected migrationId. want=%d have=%d", 1, calls[0].Arg1)
}
if calls[0].Arg2 != "uh-oh" {
t.Errorf("unexpected error message. want=%s have=%s", "uh-oh", calls[0].Arg2)
}
}
} | explode_data.jsonl/11533 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 334
} | [
2830,
3393,
6727,
44,
5233,
850,
20168,
13877,
1155,
353,
8840,
836,
8,
341,
57279,
1669,
1532,
11571,
6093,
40,
1564,
741,
3244,
5215,
1669,
342,
1023,
7121,
11571,
87278,
9730,
32435,
692,
2109,
5233,
850,
1669,
1532,
11571,
44,
5233,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_ksToPrivateECDSAKey_Failure(t *testing.T) {
recipientKey, err := keyset.NewHandle(ecdh.NISTP256ECDHKWKeyTemplate())
require.NoError(t, err)
recipientKeyPub, err := recipientKey.Public()
require.NoError(t, err)
_, err = ksToPrivateECDSAKey(recipientKeyPub)
require.EqualError(t, err, "ksToPrivateECDSAKey: failed to extract sender key: extractPrivKey: "+
"can't extract unsupported private key 'type.hyperledger.org/hyperledger.aries.crypto.tink"+
".NistPEcdhKwPublicKey'")
} | explode_data.jsonl/81256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 181
} | [
2830,
3393,
62,
2787,
1249,
16787,
7498,
72638,
1592,
1400,
9373,
1155,
353,
8840,
836,
8,
341,
197,
42610,
1592,
11,
1848,
1669,
1376,
746,
7121,
6999,
62540,
30621,
2067,
3846,
47,
17,
20,
21,
36,
6484,
57447,
54,
1592,
7275,
2398,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestJobSpecsController_Create_CaseInsensitiveTypes(t *testing.T) {
t.Parallel()
rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t)
defer assertMocksCalled()
app, cleanup := cltest.NewApplication(t,
eth.NewClientWith(rpcClient, gethClient),
)
defer cleanup()
require.NoError(t, app.Start())
j := cltest.FixtureCreateJobViaWeb(t, app, "testdata/caseinsensitive_hello_world_job.json")
adapter1, _ := adapters.For(j.Tasks[0], app.Store.Config, app.Store.ORM)
httpGet := adapter1.BaseAdapter.(*adapters.HTTPGet)
assert.Equal(t, httpGet.GetURL(), "https://bitstamp.net/api/ticker/")
adapter2, _ := adapters.For(j.Tasks[1], app.Store.Config, app.Store.ORM)
jsonParse := adapter2.BaseAdapter.(*adapters.JSONParse)
assert.Equal(t, []string(jsonParse.Path), []string{"last"})
assert.Equal(t, "ethbytes32", j.Tasks[2].Type.String())
adapter4, _ := adapters.For(j.Tasks[3], app.Store.Config, app.Store.ORM)
signTx := adapter4.BaseAdapter.(*adapters.EthTx)
assert.Equal(t, "0x356a04bCe728ba4c62A30294A55E6A8600a320B3", signTx.ToAddress.String())
assert.Equal(t, "0x609ff1bd", signTx.FunctionSelector.String())
assert.Equal(t, models.InitiatorWeb, j.Initiators[0].Type)
assert.Equal(t, models.InitiatorRunAt, j.Initiators[1].Type)
} | explode_data.jsonl/31808 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 517
} | [
2830,
3393,
12245,
8327,
82,
2051,
34325,
920,
519,
75293,
4173,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
7000,
3992,
2959,
11,
633,
71,
2959,
11,
8358,
2060,
72577,
20960,
1669,
1185,
1944,
7121,
65390,
11571,
16056,
39076... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEtcdListServicesNotFound(t *testing.T) {
fakeClient := tools.NewFakeEtcdClient(t)
ctx := api.NewDefaultContext()
key := makeServiceListKey(ctx)
fakeClient.Data[key] = tools.EtcdResponseWithError{
R: &etcd.Response{},
E: tools.EtcdErrorNotFound,
}
registry := NewTestEtcdRegistry(fakeClient)
services, err := registry.ListServices(ctx)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if len(services.Items) != 0 {
t.Errorf("Unexpected controller list: %#v", services)
}
} | explode_data.jsonl/8155 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 194
} | [
2830,
3393,
31860,
4385,
852,
11025,
10372,
1155,
353,
8840,
836,
8,
341,
1166,
726,
2959,
1669,
7375,
7121,
52317,
31860,
4385,
2959,
1155,
340,
20985,
1669,
6330,
7121,
3675,
1972,
741,
23634,
1669,
1281,
1860,
852,
1592,
7502,
340,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestUpdateStatusCondition(t *testing.T) {
nowish := metav1.Now()
beforeish := metav1.Time{Time: nowish.Add(-10 * time.Second)}
afterish := metav1.Time{Time: nowish.Add(10 * time.Second)}
cases := []struct {
name string
startingConditions []metav1.Condition
newCondition metav1.Condition
expectedUpdated bool
expectedConditions []metav1.Condition
}{
{
name: "add to empty",
startingConditions: []metav1.Condition{},
newCondition: newCondition("test", "True", "my-reason", "my-message", nil),
expectedUpdated: true,
expectedConditions: []metav1.Condition{newCondition("test", "True", "my-reason", "my-message", nil)},
},
{
name: "add to non-conflicting",
startingConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
},
newCondition: newCondition("one", "True", "my-reason", "my-message", nil),
expectedUpdated: true,
expectedConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
newCondition("one", "True", "my-reason", "my-message", nil),
},
},
{
name: "change existing status",
startingConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
newCondition("one", "True", "my-reason", "my-message", nil),
},
newCondition: newCondition("one", "False", "my-different-reason", "my-othermessage", nil),
expectedUpdated: true,
expectedConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
newCondition("one", "False", "my-different-reason", "my-othermessage", nil),
},
},
{
name: "leave existing transition time",
startingConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
newCondition("one", "True", "my-reason", "my-message", &beforeish),
},
newCondition: newCondition("one", "True", "my-reason", "my-message", &afterish),
expectedUpdated: false,
expectedConditions: []metav1.Condition{
newCondition("two", "True", "my-reason", "my-message", nil),
newCondition("one", "True", "my-reason", "my-message", &beforeish),
},
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
manifestWork := &workapiv1.ManifestWork{
ObjectMeta: metav1.ObjectMeta{Name: "work1", Namespace: "cluster1"},
Status: workapiv1.ManifestWorkStatus{
Conditions: c.startingConditions,
},
}
fakeWorkClient := fakeworkclient.NewSimpleClientset(manifestWork)
status, updated, err := UpdateManifestWorkStatus(
context.TODO(),
fakeWorkClient.WorkV1().ManifestWorks("cluster1"),
manifestWork,
updateSpokeClusterConditionFn(c.newCondition),
)
if err != nil {
t.Errorf("unexpected err: %v", err)
}
if updated != c.expectedUpdated {
t.Errorf("expected %t, but %t", c.expectedUpdated, updated)
}
for i := range c.expectedConditions {
expected := c.expectedConditions[i]
actual := status.Conditions[i]
if expected.LastTransitionTime == (metav1.Time{}) {
actual.LastTransitionTime = metav1.Time{}
}
if !equality.Semantic.DeepEqual(expected, actual) {
t.Errorf(diff.ObjectDiff(expected, actual))
}
}
})
}
} | explode_data.jsonl/32201 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1319
} | [
2830,
3393,
4289,
2522,
10547,
1155,
353,
8840,
836,
8,
341,
80922,
812,
1669,
77520,
16,
13244,
741,
63234,
812,
1669,
77520,
16,
16299,
90,
1462,
25,
1431,
812,
1904,
4080,
16,
15,
353,
882,
32435,
10569,
197,
10694,
812,
1669,
7752... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestValidateUpstreams(t *testing.T) {
tests := []struct {
upstreams []v1.Upstream
expectedUpstreamNames sets.String
msg string
}{
{
upstreams: []v1.Upstream{},
expectedUpstreamNames: sets.String{},
msg: "no upstreams",
},
{
upstreams: []v1.Upstream{
{
Name: "upstream1",
Service: "test-1",
Port: 80,
ProxyNextUpstream: "error timeout",
ProxyNextUpstreamTimeout: "10s",
ProxyNextUpstreamTries: 5,
MaxConns: createPointerFromInt(16),
},
{
Name: "upstream2",
Subselector: map[string]string{"version": "test"},
Service: "test-2",
Port: 80,
ProxyNextUpstream: "error timeout",
ProxyNextUpstreamTimeout: "10s",
ProxyNextUpstreamTries: 5,
},
},
expectedUpstreamNames: map[string]sets.Empty{
"upstream1": {},
"upstream2": {},
},
msg: "2 valid upstreams",
},
}
isPlus := false
for _, test := range tests {
allErrs, resultUpstreamNames := validateUpstreams(test.upstreams, field.NewPath("upstreams"), isPlus)
if len(allErrs) > 0 {
t.Errorf("validateUpstreams() returned errors %v for valid input for the case of %s", allErrs, test.msg)
}
if !resultUpstreamNames.Equal(test.expectedUpstreamNames) {
t.Errorf("validateUpstreams() returned %v expected %v for the case of %s", resultUpstreamNames, test.expectedUpstreamNames, test.msg)
}
}
} | explode_data.jsonl/65823 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 817
} | [
2830,
3393,
17926,
2324,
60975,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
59810,
60975,
1797,
3056,
85,
16,
60828,
4027,
198,
197,
42400,
2324,
4027,
7980,
7289,
6431,
198,
197,
21169,
4293,
914,
198,
197,
59403,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func Test_DirectInsert(t *testing.T) {
const (
ddl = `
CREATE TABLE clickhouse_test_direct_insert (
int8 Int8,
int16 Int16,
int32 Int32,
int64 Int64,
uint8 UInt8,
uint16 UInt16,
uint32 UInt32,
uint64 UInt64,
float32 Float32,
float64 Float64,
string String,
fString FixedString(2),
date Date,
datetime DateTime,
enum8 Enum8 ('a' = 1, 'b' = 2),
enum16 Enum16('c' = 1, 'd' = 2)
) Engine=Memory
`
dml = `
INSERT INTO clickhouse_test_direct_insert (
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
string,
fString,
date,
datetime,
enum8,
enum16
) VALUES (
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?
)
`
)
if connect, err := clickhouse.Open("tcp://127.0.0.1:9000?debug=true"); assert.NoError(t, err) {
{
var (
tx, _ = connect.Begin()
stmt, _ = connect.Prepare("DROP TABLE clickhouse_test_direct_insert")
)
stmt.Exec([]driver.Value{})
tx.Commit()
}
{
if tx, err := connect.Begin(); assert.NoError(t, err) {
if stmt, err := connect.Prepare(ddl); assert.NoError(t, err) {
if _, err := stmt.Exec([]driver.Value{}); assert.NoError(t, err) {
assert.NoError(t, tx.Commit())
}
}
}
}
{
if tx, err := connect.Begin(); assert.NoError(t, err) {
if stmt, err := connect.Prepare(dml); assert.NoError(t, err) {
for i := 0; i < 100; i++ {
_, err := stmt.Exec([]driver.Value{
int8(i),
int16(i),
int32(i),
int64(i),
uint8(i),
uint16(i),
uint32(i),
uint64(i),
float32(i),
float64(i),
"string",
"CH",
time.Now(),
time.Now(),
"a",
"d",
})
if !assert.NoError(t, err) {
return
}
}
assert.NoError(t, tx.Commit())
}
}
}
}
} | explode_data.jsonl/22890 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1155
} | [
2830,
3393,
1557,
1226,
13780,
1155,
353,
8840,
836,
8,
341,
4777,
2399,
197,
197,
78372,
284,
22074,
298,
6258,
15489,
14363,
4205,
7675,
4452,
32871,
17678,
2399,
571,
2084,
23,
220,
1333,
23,
345,
571,
2084,
16,
21,
1333,
16,
21,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestMigrateWithIndexComment(t *testing.T) {
if DB.Dialector.Name() != "mysql" {
t.Skip()
}
type UserWithIndexComment struct {
gorm.Model
Name string `gorm:"size:111;index:,comment:这是一个index"`
}
if err := DB.Migrator().DropTable(&UserWithIndexComment{}); err != nil {
t.Fatalf("Failed to drop table, got error %v", err)
}
if err := DB.AutoMigrate(&UserWithIndexComment{}); err != nil {
t.Fatalf("Failed to auto migrate, but got error %v", err)
}
} | explode_data.jsonl/6496 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 182
} | [
2830,
3393,
44,
34479,
2354,
1552,
10677,
1155,
353,
8840,
836,
8,
341,
743,
5952,
909,
685,
27669,
2967,
368,
961,
330,
12272,
1,
341,
197,
3244,
57776,
741,
197,
630,
13158,
2657,
2354,
1552,
10677,
2036,
341,
197,
3174,
493,
5659,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDecrypt(t *testing.T) {
testCases := []struct {
gotCode []int
gotK int
want []int
}{
{
gotCode: []int{5, 7, 1, 4},
gotK: 3,
want: []int{12, 10, 16, 13},
},
{
gotCode: []int{1, 2, 3, 4},
gotK: 0,
want: []int{0, 0, 0, 0},
},
{
gotCode: []int{2, 4, 9, 3},
gotK: -2,
want: []int{12, 5, 6, 13},
},
}
for _, testCase := range testCases {
actual := decrypt(testCase.gotCode, testCase.gotK)
assert.Check(t, is.DeepEqual(actual, testCase.want))
}
} | explode_data.jsonl/70810 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 286
} | [
2830,
3393,
89660,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
3174,
354,
2078,
3056,
396,
198,
197,
3174,
354,
42,
262,
526,
198,
197,
50780,
262,
3056,
396,
198,
197,
59403,
197,
197,
515,
298,
3174,
35... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCallContext_Call(t *testing.T) {
tcc := &testCallContext{CallContext: newCallContext(), trail: ""}
ah := newHandlerWithNoCall(false, tcc)
sh := newHandlerWithNoCall(true, tcc)
tests := []struct {
name string
call ContractHandler
result string
}{
{
name: "Async(OnCall(Async))",
call: newHandler(false, false, ah, tcc),
result: "aaa",
},
{
name: "Async(OnCall(Sync))",
call: newHandler(false, false, sh, tcc),
result: "asa",
},
{
name: "Sync(Call(Async))",
call: newHandler(true, true, ah, tcc),
result: "sas",
},
{
name: "Sync(Call(Sync))",
call: newHandler(true, true, sh, tcc),
result: "sss",
},
{
name: "Async(Call(Async))",
call: newHandler(false, true, ah, tcc),
result: "aaa",
},
// {
// name: "Sync(OnCall(Sync))",
// call: newHandler(true, false, sh, tcc),
// result: "error",
// },
{
name: "Async(OnCall(Sync(Call(Async))))",
call: newHandler(false, false, newHandler(true, true, ah, tcc), tcc),
result: "asasa",
},
}
var wg sync.WaitGroup
wg.Add(len(tests))
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
defer func() {
if test.result == "error" {
err := recover()
if err == nil {
t.Errorf("It must be failed")
}
wg.Done()
return
}
if test.result != tcc.trail {
t.Errorf("trail(must:%s,cur:%s)\n", test.result, tcc.trail)
}
wg.Done()
}()
tcc.Reset()
tcc.Call(test.call, nil)
})
}
wg.Wait()
} | explode_data.jsonl/54822 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 753
} | [
2830,
3393,
7220,
1972,
76028,
1155,
353,
8840,
836,
8,
341,
3244,
638,
1669,
609,
1944,
7220,
1972,
90,
7220,
1972,
25,
501,
7220,
1972,
1507,
8849,
25,
93196,
197,
1466,
1669,
501,
3050,
2354,
2753,
7220,
3576,
11,
259,
638,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestStartStopUnpulledImage(t *testing.T) {
taskEngine, done, _ := setupWithDefaultConfig(t)
defer done()
// Ensure this image isn't pulled by deleting it
baseImg := os.Getenv("BASE_IMAGE_NAME")
removeImage(t, baseImg)
testTask := createTestTask("testStartUnpulled")
testTask.Containers[0].Image = baseImg
stateChangeEvents := taskEngine.StateChangeEvents()
go taskEngine.AddTask(testTask)
event := <-stateChangeEvents
assert.Equal(t, event.(api.ContainerStateChange).Status, apicontainerstatus.ContainerRunning, "Expected container to be RUNNING")
event = <-stateChangeEvents
assert.Equal(t, event.(api.TaskStateChange).Status, apitaskstatus.TaskRunning, "Expected task to be RUNNING")
event = <-stateChangeEvents
assert.Equal(t, event.(api.ContainerStateChange).Status, apicontainerstatus.ContainerStopped, "Expected container to be STOPPED")
event = <-stateChangeEvents
assert.Equal(t, event.(api.TaskStateChange).Status, apitaskstatus.TaskStopped, "Expected task to be STOPPED")
} | explode_data.jsonl/20155 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 319
} | [
2830,
3393,
3479,
10674,
1806,
79,
91022,
1906,
1155,
353,
8840,
836,
8,
341,
49115,
4571,
11,
2814,
11,
716,
1669,
6505,
2354,
3675,
2648,
1155,
340,
16867,
2814,
741,
197,
322,
29279,
419,
2168,
4436,
944,
13238,
553,
33011,
432,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_humanize(t *testing.T) {
cases := []struct {
n int64
e string
}{
{n: 1, e: "¥1"},
{n: 123, e: "¥123"},
{n: 1234, e: "¥1,234"},
{n: 123456, e: "¥123,456"},
{n: 1234567, e: "¥1,234,567"},
}
for _, c := range cases {
t.Run(fmt.Sprint(c.n), func(t *testing.T) {
a := humanize(c.n)
if c.e != a {
t.Errorf("expected %s, but %s", c.e, a)
}
})
}
} | explode_data.jsonl/5110 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 223
} | [
2830,
3393,
86247,
551,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
9038,
526,
21,
19,
198,
197,
7727,
914,
198,
197,
59403,
197,
197,
91362,
25,
220,
16,
11,
384,
25,
330,
81093,
16,
7115,
197,
197,
9136... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestKeyValueDecoder_decodeok(t *testing.T) {
verifyParses := func(kvLine string, key string, value string) func(t *testing.T) {
return func(t *testing.T) {
d := keyValueDecoder{}
kv, err := d.decode(kvLine)
require.NoError(t, err)
require.Equal(t, kv.Key, key)
require.Equal(t, kv.Value, value)
}
}
t.Run("case=simple", verifyParses("akey: bob", "akey", "bob"))
t.Run("case=emptyvalue", verifyParses("akey: ", "akey", ""))
t.Run("case=spaces", verifyParses("akey: bob", "akey", "bob"))
t.Run("case=strangechars", verifyParses("a--sdfds@#$%$34,>,: bob", "a--sdfds@#$%$34,>,", "bob"))
} | explode_data.jsonl/39868 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 294
} | [
2830,
3393,
72082,
20732,
15227,
562,
1155,
353,
8840,
836,
8,
341,
93587,
47,
1561,
288,
1669,
2915,
90265,
2460,
914,
11,
1376,
914,
11,
897,
914,
8,
2915,
1155,
353,
8840,
836,
8,
341,
197,
853,
2915,
1155,
353,
8840,
836,
8,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDecode(t *testing.T) {
ex := `version https://git-lfs.github.com/spec/v1
oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393
size 12345`
p, err := DecodePointer(bytes.NewBufferString(ex))
assertEqualWithExample(t, ex, nil, err)
assertEqualWithExample(t, ex, latest, p.Version)
assertEqualWithExample(t, ex, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", p.Oid)
assertEqualWithExample(t, ex, "sha256", p.OidType)
assertEqualWithExample(t, ex, int64(12345), p.Size)
} | explode_data.jsonl/50436 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 244
} | [
2830,
3393,
32564,
1155,
353,
8840,
836,
8,
341,
8122,
1669,
1565,
4366,
3703,
1110,
12882,
70162,
11021,
905,
45389,
5457,
16,
198,
588,
15870,
17,
20,
21,
25,
19,
67,
22,
64,
17,
16,
19,
21,
16,
19,
370,
17,
24,
18,
20,
66,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExternalLoadBalancer(t *testing.T) {
runTestAWS(t, "externallb.example.com", "externallb", "v1alpha2", false, 1, true, false, nil)
runTestCloudformation(t, "externallb.example.com", "externallb", "v1alpha2", false, nil)
} | explode_data.jsonl/17508 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 92
} | [
2830,
3393,
25913,
5879,
93825,
1155,
353,
8840,
836,
8,
341,
56742,
2271,
36136,
1155,
11,
330,
4301,
541,
65,
7724,
905,
497,
330,
4301,
541,
65,
497,
330,
85,
16,
7141,
17,
497,
895,
11,
220,
16,
11,
830,
11,
895,
11,
2092,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAccept(t *testing.T) {
comm1, port1 := newCommInstance(t, naiveSec)
comm2, _ := newCommInstance(t, naiveSec)
evenNONCESelector := func(m interface{}) bool {
return m.(protoext.ReceivedMessage).GetGossipMessage().Nonce%2 == 0
}
oddNONCESelector := func(m interface{}) bool {
return m.(protoext.ReceivedMessage).GetGossipMessage().Nonce%2 != 0
}
evenNONCES := comm1.Accept(evenNONCESelector)
oddNONCES := comm1.Accept(oddNONCESelector)
var evenResults []uint64
var oddResults []uint64
out := make(chan uint64)
sem := make(chan struct{})
readIntoSlice := func(a *[]uint64, ch <-chan protoext.ReceivedMessage) {
for m := range ch {
*a = append(*a, m.GetGossipMessage().Nonce)
select {
case out <- m.GetGossipMessage().Nonce:
default: // avoid blocking when we stop reading from out
}
}
sem <- struct{}{}
}
go readIntoSlice(&evenResults, evenNONCES)
go readIntoSlice(&oddResults, oddNONCES)
stopSend := make(chan struct{})
go func() {
for {
select {
case <-stopSend:
return
default:
comm2.Send(createGossipMsg(), remotePeer(port1))
}
}
}()
waitForMessages(t, out, (DefSendBuffSize+DefRecvBuffSize)*2, "Didn't receive all messages sent")
close(stopSend)
comm1.Stop()
comm2.Stop()
<-sem
<-sem
t.Logf("%d even nonces received", len(evenResults))
t.Logf("%d odd nonces received", len(oddResults))
require.NotEmpty(t, evenResults)
require.NotEmpty(t, oddResults)
remainderPredicate := func(a []uint64, rem uint64) {
for _, n := range a {
require.Equal(t, n%2, rem)
}
}
remainderPredicate(evenResults, 0)
remainderPredicate(oddResults, 1)
} | explode_data.jsonl/42171 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 661
} | [
2830,
3393,
16646,
1155,
353,
8840,
836,
8,
341,
197,
3621,
16,
11,
2635,
16,
1669,
501,
17977,
2523,
1155,
11,
49665,
8430,
340,
197,
3621,
17,
11,
716,
1669,
501,
17977,
2523,
1155,
11,
49665,
8430,
692,
7727,
1037,
29563,
2104,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUploadOrderMultiFailureOnComplete(t *testing.T) {
s, ops, _ := loggingSvc(emptyList)
s.Handlers.Send.PushBack(func(r *request.Request) {
switch r.Data.(type) {
case *s3.CompleteMultipartUploadOutput:
r.HTTPResponse.StatusCode = 400
}
})
mgr := s3manager.NewUploaderWithClient(s, func(u *s3manager.Uploader) {
u.Concurrency = 1
})
_, err := mgr.Upload(&s3manager.UploadInput{
Bucket: aws.String("Bucket"),
Key: aws.String("Key"),
Body: bytes.NewReader(buf12MB),
})
if err == nil {
t.Error("Expected error, but receievd nil")
}
if e, a := []string{"CreateMultipartUpload", "UploadPart", "UploadPart",
"UploadPart", "CompleteMultipartUpload", "AbortMultipartUpload"}, *ops; !reflect.DeepEqual(e, a) {
t.Errorf("Expected %v, but received %v", e, a)
}
} | explode_data.jsonl/55639 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 323
} | [
2830,
3393,
13844,
4431,
20358,
17507,
1925,
12548,
1155,
353,
8840,
836,
8,
341,
1903,
11,
27132,
11,
716,
1669,
8392,
92766,
24216,
852,
340,
1903,
35308,
9254,
20176,
34981,
3707,
18552,
2601,
353,
2035,
9659,
8,
341,
197,
8961,
435,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestConcurrency(t *testing.T) {
f := NewFile()
wg := new(sync.WaitGroup)
for i := 1; i <= 5; i++ {
wg.Add(1)
go func(val int) {
f.SetCellValue("Sheet1", fmt.Sprintf("A%d", val), val)
f.SetCellValue("Sheet1", fmt.Sprintf("B%d", val), strconv.Itoa(val))
f.GetCellValue("Sheet1", fmt.Sprintf("A%d", val))
wg.Done()
}(i)
}
wg.Wait()
val, err := f.GetCellValue("Sheet1", "A1")
if err != nil {
t.Error(err)
}
assert.Equal(t, "1", val)
} | explode_data.jsonl/945 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 231
} | [
2830,
3393,
79611,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
1532,
1703,
741,
72079,
1669,
501,
97233,
28384,
2808,
340,
2023,
600,
1669,
220,
16,
26,
600,
2651,
220,
20,
26,
600,
1027,
341,
197,
72079,
1904,
7,
16,
340,
197,
30680,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUpdateUserPassword(t *testing.T) {
th := Setup().InitBasic().InitSystemAdmin()
defer th.TearDown()
Client := th.Client
password := "newpassword1"
pass, resp := Client.UpdateUserPassword(th.BasicUser.Id, th.BasicUser.Password, password)
CheckNoError(t, resp)
if !pass {
t.Fatal("should have returned true")
}
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, password, "")
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, password, "junk")
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword("junk", password, password)
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, "", password)
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, "junk", password)
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, password, th.BasicUser.Password)
CheckNoError(t, resp)
Client.Logout()
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, password, password)
CheckUnauthorizedStatus(t, resp)
th.LoginBasic2()
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, password, password)
CheckForbiddenStatus(t, resp)
th.LoginBasic()
// Test lockout
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.MaximumLoginAttempts = 2 })
// Fail twice
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, "badpwd", "newpwd")
CheckBadRequestStatus(t, resp)
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, "badpwd", "newpwd")
CheckBadRequestStatus(t, resp)
// Should fail because account is locked out
_, resp = Client.UpdateUserPassword(th.BasicUser.Id, th.BasicUser.Password, "newpwd")
CheckErrorMessage(t, resp, "api.user.check_user_login_attempts.too_many.app_error")
CheckUnauthorizedStatus(t, resp)
// System admin can update another user's password
adminSetPassword := "pwdsetbyadmin"
pass, resp = th.SystemAdminClient.UpdateUserPassword(th.BasicUser.Id, "", adminSetPassword)
CheckNoError(t, resp)
if !pass {
t.Fatal("should have returned true")
}
_, resp = Client.Login(th.BasicUser.Email, adminSetPassword)
CheckNoError(t, resp)
} | explode_data.jsonl/21546 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 737
} | [
2830,
3393,
4289,
1474,
4876,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1005,
3803,
15944,
1005,
3803,
2320,
7210,
741,
16867,
270,
836,
682,
4454,
741,
71724,
1669,
270,
11716,
271,
58199,
1669,
330,
931,
3833,
16,
698,
41431,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_splitAndCall_NoMessages(t *testing.T) {
ctx := context.CLIContext{}
txBldr := createFakeTxBuilder()
err := splitAndApply(nil, ctx, txBldr, nil, 10)
assert.NoError(t, err, "")
} | explode_data.jsonl/72080 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 79
} | [
2830,
3393,
17052,
3036,
7220,
36989,
15820,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
727,
18537,
1972,
16094,
3244,
14377,
72377,
1669,
1855,
52317,
31584,
3297,
2822,
9859,
1669,
6718,
3036,
28497,
27907,
11,
5635,
11,
9854,
33,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestApp_Run_DoesNotOverwriteErrorFromBefore(t *testing.T) {
app := &App{
Action: func(c *Context) error { return nil },
Before: func(c *Context) error { return fmt.Errorf("before error") },
After: func(c *Context) error { return fmt.Errorf("after error") },
}
err := app.Run([]string{"foo"})
if err == nil {
t.Fatalf("expected to receive error from Run, got none")
}
if !strings.Contains(err.Error(), "before error") {
t.Errorf("expected text of error from Before method, but got none in \"%v\"", err)
}
if !strings.Contains(err.Error(), "after error") {
t.Errorf("expected text of error from After method, but got none in \"%v\"", err)
}
} | explode_data.jsonl/52592 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 234
} | [
2830,
3393,
2164,
84158,
1557,
7072,
2623,
1918,
4934,
1454,
3830,
10227,
1155,
353,
8840,
836,
8,
341,
28236,
1669,
609,
2164,
515,
197,
67607,
25,
2915,
1337,
353,
1972,
8,
1465,
314,
470,
2092,
1153,
197,
197,
10227,
25,
2915,
1337... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMapProxy_ExecuteOnKeysWithNonSerializableProcessor(t *testing.T) {
_, err := mp.ExecuteOnKeys(nil, student{})
AssertErrorNotNil(t, err, "executeOnKeys did not return an error for nonserializable processor")
mp.Clear()
} | explode_data.jsonl/57079 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 77
} | [
2830,
3393,
2227,
16219,
83453,
1925,
8850,
2354,
8121,
29268,
22946,
1155,
353,
8840,
836,
8,
1476,
197,
6878,
1848,
1669,
10490,
13827,
1925,
8850,
27907,
11,
5458,
37790,
18017,
1454,
96144,
1155,
11,
1848,
11,
330,
10257,
1925,
8850,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestRaceFlushVersusParsePacket(t *testing.T) {
port, err := getAvailableUDPPort()
require.NoError(t, err)
config.Datadog.SetDefault("dogstatsd_port", port)
opts := aggregator.DefaultDemultiplexerOptions(nil)
opts.FlushInterval = 10 * time.Millisecond
opts.DontStartForwarders = true
demux := aggregator.InitAndStartServerlessDemultiplexer(nil, "serverless", time.Second*1000)
s, err := dogstatsd.NewServer(demux)
require.NoError(t, err, "cannot start DSD")
defer s.Stop()
url := fmt.Sprintf("127.0.0.1:%d", config.Datadog.GetInt("dogstatsd_port"))
conn, err := net.Dial("udp", url)
require.NoError(t, err, "cannot connect to DSD socket")
defer conn.Close()
finish := &sync.WaitGroup{}
finish.Add(2)
go func(wg *sync.WaitGroup) {
for i := 0; i < 1000; i++ {
conn.Write([]byte("daemon:666|g|#sometag1:somevalue1,sometag2:somevalue2"))
time.Sleep(10 * time.Millisecond)
}
finish.Done()
}(finish)
go func(wg *sync.WaitGroup) {
for i := 0; i < 1000; i++ {
s.ServerlessFlush()
}
finish.Done()
}(finish)
finish.Wait()
} | explode_data.jsonl/50627 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 449
} | [
2830,
3393,
55991,
46874,
83956,
355,
14463,
16679,
1155,
353,
8840,
836,
8,
341,
52257,
11,
1848,
1669,
633,
16485,
4656,
4406,
371,
741,
17957,
35699,
1155,
11,
1848,
340,
25873,
909,
266,
329,
538,
4202,
3675,
445,
18457,
16260,
67,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMapNanGrowIterator(t *testing.T) {
m := make(map[float64]int)
nan := math.NaN()
const nBuckets = 16
// To fill nBuckets buckets takes LOAD * nBuckets keys.
nKeys := int(nBuckets * runtime.HashLoad)
// Get map to full point with nan keys.
for i := 0; i < nKeys; i++ {
m[nan] = i
}
// Trigger grow
m[1.0] = 1
delete(m, 1.0)
// Run iterator
found := make(map[int]struct{})
for _, v := range m {
if v != -1 {
if _, repeat := found[v]; repeat {
t.Fatalf("repeat of value %d", v)
}
found[v] = struct{}{}
}
if len(found) == nKeys/2 {
// Halfway through iteration, finish grow.
for i := 0; i < nBuckets; i++ {
delete(m, 1.0)
}
}
}
if len(found) != nKeys {
t.Fatalf("missing value")
}
} | explode_data.jsonl/19918 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 338
} | [
2830,
3393,
2227,
45,
276,
56788,
11951,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1281,
9147,
95381,
21,
19,
63025,
340,
9038,
276,
1669,
6888,
78706,
741,
4777,
308,
33,
38551,
284,
220,
16,
21,
198,
197,
322,
2014,
5155,
308,
33,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestGenerateTrainStmt(t *testing.T) {
a := assert.New(t)
normal := `SELECT c1, c2, c3, c4 FROM my_table
TO TRAIN DNNClassifier
WITH
model.n_classes=2,
train.optimizer="adam",
model.hidden_units=[128,64],
validation.select="SELECT c1, c2, c3, c4 FROM my_table LIMIT 10"
COLUMN c1,DENSE(c2, [128, 32]),CATEGORY_ID(c3, 512),
SEQ_CATEGORY_ID(c3, 512),
CROSS([c1,c2], 64),
BUCKET(DENSE(c1, [100]), 100),
EMBEDDING(CATEGORY_ID(c3, 512), 128, mean),
DENSE(c1, 64, COMMA),
CATEGORY_ID(SPARSE(c2, 10000, COMMA), 128),
SEQ_CATEGORY_ID(SPARSE(c2, 10000, COMMA), 128),
EMBEDDING(c1, 128, sum),
EMBEDDING(SPARSE(c2, 10000, COMMA, "int"), 128, sum),
INDICATOR(CATEGORY_ID(c3, 512)),
INDICATOR(c1),
INDICATOR(SPARSE(c2, 10000, COMMA, "int"))
LABEL c4
INTO mymodel;
`
r, e := parser.ParseStatement("mysql", normal)
a.NoError(e)
trainStmt, err := GenerateTrainStmt(r.SQLFlowSelectStmt)
a.NoError(err)
a.Equal("DNNClassifier", trainStmt.Estimator)
a.Equal(`SELECT c1, c2, c3, c4 FROM my_table
`, trainStmt.Select)
a.Equal("SELECT c1, c2, c3, c4 FROM my_table LIMIT 10", trainStmt.ValidationSelect)
for key, attr := range trainStmt.Attributes {
if key == "model.n_classes" {
a.Equal(2, attr.(int))
} else if key == "train.optimizer" {
a.Equal("adam", attr.(string))
} else if key == "model.stddev" {
a.Equal(float32(0.001), attr.(float32))
} else if key == "model.hidden_units" {
l, ok := attr.([]interface{})
a.True(ok)
a.Equal(128, l[0].(int))
a.Equal(64, l[1].(int))
} else if key != "validation.select" {
a.Failf("error key", key)
}
}
nc, ok := trainStmt.Features["feature_columns"][0].(*NumericColumn)
a.True(ok)
a.Equal([]int{1}, nc.FieldDesc.Shape)
nc, ok = trainStmt.Features["feature_columns"][1].(*NumericColumn)
a.True(ok)
a.Equal("c2", nc.FieldDesc.Name)
a.Equal([]int{128, 32}, nc.FieldDesc.Shape)
cc, ok := trainStmt.Features["feature_columns"][2].(*CategoryIDColumn)
a.True(ok)
a.Equal("c3", cc.FieldDesc.Name)
a.Equal(int64(512), cc.BucketSize)
seqcc, ok := trainStmt.Features["feature_columns"][3].(*SeqCategoryIDColumn)
a.True(ok)
a.Equal("c3", seqcc.FieldDesc.Name)
cross, ok := trainStmt.Features["feature_columns"][4].(*CrossColumn)
a.True(ok)
a.Equal("c1", cross.Keys[0].(string))
a.Equal("c2", cross.Keys[1].(string))
a.Equal(int64(64), cross.HashBucketSize)
bucket, ok := trainStmt.Features["feature_columns"][5].(*BucketColumn)
a.True(ok)
a.Equal(100, bucket.Boundaries[0])
a.Equal("c1", bucket.SourceColumn.FieldDesc.Name)
emb, ok := trainStmt.Features["feature_columns"][6].(*EmbeddingColumn)
a.True(ok)
a.Equal("mean", emb.Combiner)
a.Equal(128, emb.Dimension)
embInner, ok := emb.CategoryColumn.(*CategoryIDColumn)
a.True(ok)
a.Equal("c3", embInner.FieldDesc.Name)
a.Equal(int64(512), embInner.BucketSize)
// DENSE(c1, [64], COMMA), [128]
nc, ok = trainStmt.Features["feature_columns"][7].(*NumericColumn)
a.True(ok)
a.Equal(64, nc.FieldDesc.Shape[0])
a.Equal(",", nc.FieldDesc.Delimiter)
a.False(nc.FieldDesc.IsSparse)
// CATEGORY_ID(SPARSE(c2, 10000, COMMA), 128),
cc, ok = trainStmt.Features["feature_columns"][8].(*CategoryIDColumn)
a.True(ok)
a.True(cc.FieldDesc.IsSparse)
a.Equal("c2", cc.FieldDesc.Name)
a.Equal(10000, cc.FieldDesc.Shape[0])
a.Equal(",", cc.FieldDesc.Delimiter)
a.Equal(int64(128), cc.BucketSize)
// SEQ_CATEGORY_ID(SPARSE(c2, 10000, COMMA), 128)
scc, ok := trainStmt.Features["feature_columns"][9].(*SeqCategoryIDColumn)
a.True(ok)
a.True(scc.FieldDesc.IsSparse)
a.Equal("c2", scc.FieldDesc.Name)
a.Equal(10000, scc.FieldDesc.Shape[0])
// EMBEDDING(c1, 128)
emb, ok = trainStmt.Features["feature_columns"][10].(*EmbeddingColumn)
a.True(ok)
a.Equal(nil, emb.CategoryColumn)
a.Equal(128, emb.Dimension)
// EMBEDDING(SPARSE(c2, 10000, COMMA, "int"), 128)
emb, ok = trainStmt.Features["feature_columns"][11].(*EmbeddingColumn)
a.True(ok)
catCol, ok := emb.CategoryColumn.(*CategoryIDColumn)
a.True(ok)
a.True(catCol.FieldDesc.IsSparse)
a.Equal("c2", catCol.FieldDesc.Name)
a.Equal(10000, catCol.FieldDesc.Shape[0])
a.Equal(",", catCol.FieldDesc.Delimiter)
// INDICATOR(CATEGORY_ID(c3, 512)),
ic, ok := trainStmt.Features["feature_columns"][12].(*IndicatorColumn)
a.True(ok)
catCol, ok = ic.CategoryColumn.(*CategoryIDColumn)
a.True(ok)
a.Equal("c3", catCol.FieldDesc.Name)
a.Equal(int64(512), catCol.BucketSize)
// INDICATOR(c1)
ic, ok = trainStmt.Features["feature_columns"][13].(*IndicatorColumn)
a.True(ok)
a.Equal(nil, ic.CategoryColumn)
a.Equal("c1", ic.Name)
// INDICATOR(SPARSE(c2, 10000, COMMA, "int"))
ic, ok = trainStmt.Features["feature_columns"][14].(*IndicatorColumn)
a.True(ok)
catCol, ok = ic.CategoryColumn.(*CategoryIDColumn)
a.True(ok)
a.True(catCol.FieldDesc.IsSparse)
a.Equal("c2", catCol.FieldDesc.Name)
a.Equal(10000, catCol.FieldDesc.Shape[0])
l, ok := trainStmt.Label.(*NumericColumn)
a.True(ok)
a.Equal("c4", l.FieldDesc.Name)
a.Equal("mymodel", trainStmt.Into)
} | explode_data.jsonl/45944 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2286
} | [
2830,
3393,
31115,
34613,
31063,
1155,
353,
8840,
836,
8,
341,
11323,
1669,
2060,
7121,
1155,
340,
197,
8252,
1669,
1565,
4858,
272,
16,
11,
272,
17,
11,
272,
18,
11,
272,
19,
4295,
847,
5237,
198,
197,
5207,
67509,
422,
9745,
33895... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestAgent_Self(t *testing.T) {
t.Parallel()
a := NewTestAgent(t.Name(), `
node_meta {
somekey = "somevalue"
}
`)
defer a.Shutdown()
req, _ := http.NewRequest("GET", "/v1/agent/self", nil)
obj, err := a.srv.AgentSelf(nil, req)
if err != nil {
t.Fatalf("err: %v", err)
}
val := obj.(Self)
if int(val.Member.Port) != a.Config.SerfPortLAN {
t.Fatalf("incorrect port: %v", obj)
}
if val.DebugConfig["SerfPortLAN"].(int) != a.Config.SerfPortLAN {
t.Fatalf("incorrect port: %v", obj)
}
cs, err := a.GetLANCoordinate()
if err != nil {
t.Fatalf("err: %v", err)
}
if c := cs[a.config.SegmentName]; !reflect.DeepEqual(c, val.Coord) {
t.Fatalf("coordinates are not equal: %v != %v", c, val.Coord)
}
delete(val.Meta, structs.MetaSegmentKey) // Added later, not in config.
if !reflect.DeepEqual(a.config.NodeMeta, val.Meta) {
t.Fatalf("meta fields are not equal: %v != %v", a.config.NodeMeta, val.Meta)
}
} | explode_data.jsonl/33594 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 410
} | [
2830,
3393,
16810,
1098,
490,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
11323,
1669,
1532,
2271,
16810,
1155,
2967,
1507,
22074,
197,
20831,
13381,
341,
298,
1903,
635,
792,
284,
330,
14689,
957,
698,
197,
197,
532,
197,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestStreamerClient(t *testing.T) {
streamer_test_init.StartTestService(t)
streamerClient := streamer_client.NewStreamerClient(mockedCloudRegistry{})
mockProvider := &mockStreamProvider{name: testStreamName, retVal: expected}
providers.RegisterStreamProvider(mockProvider)
l := testListener{}
l.err = make(chan error)
l.updateErr = make(chan error)
assert.NoError(t, streamerClient.AddListener(l))
go streamerClient.Stream(l)
select {
case e := <-l.err:
assert.NoError(t, e)
case e := <-l.updateErr:
assert.NoError(t, e)
var extra protos.DataUpdate
err := ptypes.UnmarshalAny(mockProvider.extra, &extra)
assert.NoError(t, err)
assert.Equal(t, protos.TestMarshal(expected[0]), protos.TestMarshal(&extra))
case <-time.After(10 * time.Second):
assert.Fail(t, "Test Timeout")
}
} | explode_data.jsonl/51623 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 304
} | [
2830,
3393,
80063,
2959,
1155,
353,
8840,
836,
8,
341,
44440,
261,
4452,
6137,
12101,
2271,
1860,
1155,
692,
44440,
261,
2959,
1669,
4269,
261,
8179,
7121,
80063,
2959,
30389,
291,
16055,
15603,
37790,
77333,
5179,
1669,
609,
16712,
3027,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestBlockPoolTimeout(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
logger := log.TestingLogger()
start := int64(42)
peers := makePeers(10, start+1, 1000)
errorsCh := make(chan peerError, 1000)
requestsCh := make(chan BlockRequest, 1000)
pool := NewBlockPool(logger, start, requestsCh, errorsCh)
err := pool.Start(ctx)
if err != nil {
t.Error(err)
}
t.Cleanup(func() { cancel(); pool.Wait() })
// Introduce each peer.
go func() {
for _, peer := range peers {
pool.SetPeerRange(peer.id, peer.base, peer.height)
}
}()
// Start a goroutine to pull blocks
go func() {
for {
if !pool.IsRunning() {
return
}
first, second := pool.PeekTwoBlocks()
if first != nil && second != nil {
pool.PopRequest()
} else {
time.Sleep(1 * time.Second)
}
}
}()
// Pull from channels
counter := 0
timedOut := map[types.NodeID]struct{}{}
for {
select {
case err := <-errorsCh:
// consider error to be always timeout here
if _, ok := timedOut[err.peerID]; !ok {
counter++
if counter == len(peers) {
return // Done!
}
}
case request := <-requestsCh:
logger.Debug("received request",
"counter", counter,
"request", request)
}
}
} | explode_data.jsonl/73040 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 510
} | [
2830,
3393,
4713,
10551,
7636,
1155,
353,
8840,
836,
8,
341,
20985,
11,
9121,
1669,
2266,
26124,
9269,
5378,
19047,
2398,
16867,
9121,
2822,
17060,
1669,
1487,
8787,
287,
7395,
2822,
21375,
1669,
526,
21,
19,
7,
19,
17,
340,
197,
375,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMemCache_Set(t *testing.T) {
assert := assert.New(t)
err := cMem.Set("test", "aaa", time.Second)
assert.Nil(err)
v, err := cMem.Get("test")
assert.Nil(err)
assert.Equal("aaa", v)
} | explode_data.jsonl/74753 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 86
} | [
2830,
3393,
18816,
8233,
14812,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
9859,
1669,
272,
18816,
4202,
445,
1944,
497,
330,
32646,
497,
882,
32435,
340,
6948,
59678,
3964,
340,
5195,
11,
1848,
1669,
272,
18816,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestReadASN1OptionalInteger(t *testing.T) {
for _, test := range optionalIntTestData {
t.Run(test.name, func(t *testing.T) {
in := String(test.in)
var out int
ok := in.ReadOptionalASN1Integer(&out, test.tag, defaultInt)
if ok != test.ok || ok && out != test.out.(int) {
t.Errorf("in.ReadOptionalASN1Integer() = %v, want %v; out = %v, want %v", ok, test.ok, out, test.out)
}
})
}
} | explode_data.jsonl/16723 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 176
} | [
2830,
3393,
4418,
68134,
16,
15309,
3486,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
10101,
1072,
83920,
341,
197,
3244,
16708,
8623,
2644,
11,
2915,
1155,
353,
8840,
836,
8,
341,
298,
17430,
1669,
923,
8623,
1858,
34... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestReadCheckpoint(t *testing.T) {
pageSize := 32 * 1024
const seriesCount = 10
const samplesCount = 250
for _, compress := range []bool{false, true} {
t.Run(fmt.Sprintf("compress=%t", compress), func(t *testing.T) {
dir, err := ioutil.TempDir("", "readCheckpoint")
require.NoError(t, err)
defer func() {
require.NoError(t, os.RemoveAll(dir))
}()
wdir := path.Join(dir, "wal")
err = os.Mkdir(wdir, 0777)
require.NoError(t, err)
os.Create(SegmentName(wdir, 30))
enc := record.Encoder{}
w, err := NewSize(nil, nil, wdir, 128*pageSize, compress)
require.NoError(t, err)
defer func() {
require.NoError(t, w.Close())
}()
// Write to the initial segment then checkpoint.
for i := 0; i < seriesCount; i++ {
ref := i + 100
series := enc.Series([]record.RefSeries{
{
Ref: uint64(ref),
Labels: labels.Labels{labels.Label{Name: "__name__", Value: fmt.Sprintf("metric_%d", i)}},
},
}, nil)
require.NoError(t, w.Log(series))
for j := 0; j < samplesCount; j++ {
inner := rand.Intn(ref + 1)
sample := enc.Samples([]record.RefSample{
{
Ref: uint64(inner),
T: int64(i),
V: float64(i),
},
}, nil)
require.NoError(t, w.Log(sample))
}
}
Checkpoint(log.NewNopLogger(), w, 30, 31, func(x uint64) bool { return true }, 0)
w.Truncate(32)
// Start read after checkpoint, no more data written.
_, _, err = Segments(w.Dir())
require.NoError(t, err)
wt := newWriteToMock()
watcher := NewWatcher(wMetrics, nil, nil, "", wt, dir, false)
go watcher.Start()
expectedSeries := seriesCount
retry(t, defaultRetryInterval, defaultRetries, func() bool {
return wt.checkNumLabels() >= expectedSeries
})
watcher.Stop()
require.Equal(t, expectedSeries, wt.checkNumLabels())
})
}
} | explode_data.jsonl/76538 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 844
} | [
2830,
3393,
4418,
92688,
1155,
353,
8840,
836,
8,
341,
35272,
1695,
1669,
220,
18,
17,
353,
220,
16,
15,
17,
19,
198,
4777,
4013,
2507,
284,
220,
16,
15,
198,
4777,
10469,
2507,
284,
220,
17,
20,
15,
271,
2023,
8358,
24611,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_mapper_MapRules(t *testing.T) {
l := log.NewLogfmtLogger(os.Stdout)
l = level.NewFilter(l, level.AllowInfo())
setupRuleSets()
m := &mapper{
Path: "/rules",
FS: afero.NewMemMapFs(),
logger: l,
}
t.Run("basic rulegroup", func(t *testing.T) {
updated, files, err := m.MapRules(testUser, initialRuleSet)
require.True(t, updated)
require.Len(t, files, 1)
require.Equal(t, fileOnePath, files[0])
require.NoError(t, err)
exists, err := afero.Exists(m.FS, fileOnePath)
require.True(t, exists)
require.NoError(t, err)
})
t.Run("identical rulegroup", func(t *testing.T) {
updated, files, err := m.MapRules(testUser, initialRuleSet)
require.False(t, updated)
require.Len(t, files, 1)
require.NoError(t, err)
exists, err := afero.Exists(m.FS, fileOnePath)
require.True(t, exists)
require.NoError(t, err)
})
t.Run("out of order identical rulegroup", func(t *testing.T) {
updated, files, err := m.MapRules(testUser, outOfOrderRuleSet)
require.False(t, updated)
require.Len(t, files, 1)
require.NoError(t, err)
exists, err := afero.Exists(m.FS, fileOnePath)
require.True(t, exists)
require.NoError(t, err)
})
t.Run("updated rulegroup", func(t *testing.T) {
updated, files, err := m.MapRules(testUser, updatedRuleSet)
require.True(t, updated)
require.Len(t, files, 1)
require.Equal(t, fileOnePath, files[0])
require.NoError(t, err)
exists, err := afero.Exists(m.FS, fileOnePath)
require.True(t, exists)
require.NoError(t, err)
})
} | explode_data.jsonl/34363 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 653
} | [
2830,
3393,
76664,
56992,
26008,
1155,
353,
8840,
836,
8,
341,
8810,
1669,
1487,
7121,
2201,
12501,
7395,
9638,
83225,
340,
8810,
284,
2188,
7121,
5632,
2333,
11,
2188,
29081,
1731,
2398,
84571,
11337,
30175,
741,
2109,
1669,
609,
38076,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEnvAddOnManagedNodeGroupsInvalidInstanceType(t *testing.T) {
cfg := NewDefault()
defer func() {
os.RemoveAll(cfg.ConfigPath)
os.RemoveAll(cfg.KubectlCommandsOutputPath)
os.RemoveAll(cfg.RemoteAccessCommandsOutputPath)
}()
os.Setenv("AWS_K8S_TESTER_EKS_ADD_ON_MANAGED_NODE_GROUPS_ENABLE", `true`)
defer os.Unsetenv("AWS_K8S_TESTER_EKS_ADD_ON_MANAGED_NODE_GROUPS_ENABLE")
os.Setenv("AWS_K8S_TESTER_EKS_REMOTE_ACCESS_PRIVATE_KEY_PATH", `a`)
defer os.Unsetenv("AWS_K8S_TESTER_EKS_REMOTE_ACCESS_PRIVATE_KEY_PATH")
os.Setenv("AWS_K8S_TESTER_EKS_ADD_ON_MANAGED_NODE_GROUPS_MNGS", `{"test-mng-for-cni":{"name":"test-mng-for-cni","tags":{"group":"amazon-vpc-cni-k8s"},"ami-type":"AL2_x86_64","asg-min-size":3,"asg-max-size":3,"asg-desired-capacity":3,"instance-types":["m3.xlarge"]}}`)
defer os.Unsetenv("AWS_K8S_TESTER_EKS_ADD_ON_MANAGED_NODE_GROUPS_MNGS")
os.Setenv("AWS_K8S_TESTER_EKS_ADD_ON_NLB_HELLO_WORLD_ENABLE", `true`)
defer os.Unsetenv("AWS_K8S_TESTER_EKS_ADD_ON_NLB_HELLO_WORLD_ENABLE")
if err := cfg.UpdateFromEnvs(); err != nil {
t.Fatal(err)
}
err := cfg.ValidateAndSetDefaults()
if err == nil {
t.Fatal("expected error")
}
if !strings.Contains(err.Error(), "older instance type InstanceTypes") {
t.Fatalf("unexpected error %v", err)
}
} | explode_data.jsonl/69901 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 618
} | [
2830,
3393,
14359,
2212,
1925,
27192,
1955,
22173,
7928,
2523,
929,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
1532,
3675,
741,
16867,
2915,
368,
341,
197,
25078,
84427,
28272,
10753,
1820,
340,
197,
25078,
84427,
28272,
11352,
53380,
304... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSensitiveValidate(t *testing.T) {
filter := New()
filter.AddWord("有一个东西")
filter.AddWord("一个东西")
filter.AddWord("一个")
filter.AddWord("东西")
filter.AddWord("个东")
filter.AddWord("有一个东西")
filter.AddWord("一个东西")
filter.AddWord("一个")
filter.AddWord("东西")
testcases := []struct {
Text string
ExpectPass bool
ExpectFirst string
DelWords []string
ThenExpectPass bool
ThenExpectFirst string
}{
{"我有一@ |个东东西", false, "一个", []string{"一个"}, false, "个东"},
{"我有一个东东西", false, "一个", []string{"一个"}, false, "个东"},
{"我有一个东西", false, "有一个东西", []string{"有一个东西", "一个"}, false, "一个东西"},
{"一个东西", false, "一个", []string{"个东", "一个"}, false, "一个东西"},
{"两个东西", false, "个东", []string{"个东", "东西"}, true, ""},
{"一样东西", false, "东西", []string{"东西"}, true, ""},
}
for _, tc := range testcases {
if pass, first := filter.Validate(tc.Text); pass != tc.ExpectPass || first != tc.ExpectFirst {
t.Errorf("validate %s, got %v, %s, expect %v, %s", tc.Text, pass, first, tc.ExpectPass, tc.ExpectFirst)
}
filter.DelWord(tc.DelWords...)
if pass, first := filter.Validate(tc.Text); pass != tc.ThenExpectPass || first != tc.ThenExpectFirst {
t.Errorf("after del, validate %s, got %v, %s, expect %v, %s", tc.Text, pass, first, tc.ThenExpectPass, tc.ThenExpectFirst)
}
filter.AddWord(tc.DelWords...)
}
} | explode_data.jsonl/80993 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 718
} | [
2830,
3393,
63316,
17926,
1155,
353,
8840,
836,
8,
341,
50108,
1669,
1532,
741,
50108,
1904,
10879,
445,
104133,
100413,
1138,
50108,
1904,
10879,
445,
46944,
100413,
1138,
50108,
1904,
10879,
445,
46944,
1138,
50108,
1904,
10879,
445,
1004... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestBuild(t *testing.T) {
runWithDindSwarmAndRegistry(t, func(info dindSwarmAndRegistryInfo) {
cmd := info.configuredCmd
tmp := fs.NewDir(t, "TestBuild")
testDir := path.Join("testdata", "build")
iidfile := tmp.Join("iidfile")
cmd.Command = dockerCli.Command("app", "build", "--tag", "single:1.0.0", "--iidfile", iidfile, "-f", path.Join(testDir, "single.dockerapp"), testDir)
icmd.RunCmd(cmd).Assert(t, icmd.Success)
_, err := os.Stat(iidfile)
assert.NilError(t, err)
bytes, err := ioutil.ReadFile(iidfile)
assert.NilError(t, err)
iid := string(bytes)
cfg := getDockerConfigDir(t, cmd)
s := strings.Split(iid, ":")
f := path.Join(cfg, "app", "bundles", "contents", s[0], s[1], image.BundleFilename)
bndl, err := image.FromFile(f)
assert.NilError(t, err)
built := []string{bndl.InvocationImages[0].Digest, bndl.Images["web"].Digest, bndl.Images["worker"].Digest}
for _, ref := range built {
cmd.Command = dockerCli.Command("inspect", ref)
icmd.RunCmd(cmd).Assert(t, icmd.Success)
}
for _, img := range bndl.Images {
// Check all image not being built locally get a fixed reference
assert.Assert(t, img.Image == "" || strings.Contains(img.Image, "@sha256:"))
}
actualID, err := store.FromAppImage(bndl)
assert.NilError(t, err)
assert.Equal(t, iid, fmt.Sprintf("sha256:%s", actualID.String()))
})
} | explode_data.jsonl/72000 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 574
} | [
2830,
3393,
11066,
1155,
353,
8840,
836,
8,
341,
56742,
2354,
35,
484,
13218,
2178,
3036,
15603,
1155,
11,
2915,
14208,
294,
484,
13218,
2178,
3036,
15603,
1731,
8,
341,
197,
25920,
1669,
3546,
5423,
3073,
15613,
198,
197,
20082,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.