text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestTriggerResolver_CreateTrigger(t *testing.T) {
triggerName := "TestName"
for testName, testData := range map[string]struct {
trigger gqlschema.TriggerCreateInput
ownerRef []*gqlschema.OwnerReference
triggerMatcher types.GomegaMatcher
errorMatcher types.GomegaMatcher
//Mocks
toTrigger *v1alpha1.Trigger
toTriggerError error
createTrigger *v1alpha1.Trigger
createTriggerError error
toGQL *gqlschema.Trigger
toGQLError error
}{
"Success": {
trigger: gqlschema.TriggerCreateInput{
Name: &triggerName,
},
ownerRef: []*gqlschema.OwnerReference{},
toTrigger: &v1alpha1.Trigger{},
toTriggerError: nil,
createTrigger: &v1alpha1.Trigger{},
createTriggerError: nil,
toGQL: &gqlschema.Trigger{},
toGQLError: nil,
triggerMatcher: gomega.Not(gomega.BeNil()),
errorMatcher: gomega.BeNil(),
},
"ToTrigger error": {
trigger: gqlschema.TriggerCreateInput{
Name: &triggerName,
},
ownerRef: []*gqlschema.OwnerReference{},
toTrigger: &v1alpha1.Trigger{},
toTriggerError: errors.New(""),
createTrigger: &v1alpha1.Trigger{},
createTriggerError: nil,
toGQL: &gqlschema.Trigger{},
toGQLError: nil,
triggerMatcher: gomega.BeNil(),
errorMatcher: gomega.HaveOccurred(),
},
"List error": {
trigger: gqlschema.TriggerCreateInput{
Name: &triggerName,
},
ownerRef: []*gqlschema.OwnerReference{},
toTrigger: &v1alpha1.Trigger{},
toTriggerError: nil,
createTrigger: &v1alpha1.Trigger{},
createTriggerError: errors.New(""),
toGQL: &gqlschema.Trigger{},
toGQLError: nil,
triggerMatcher: gomega.BeNil(),
errorMatcher: gomega.HaveOccurred(),
},
"ToGQL error": {
trigger: gqlschema.TriggerCreateInput{
Name: &triggerName,
},
ownerRef: []*gqlschema.OwnerReference{},
toTrigger: &v1alpha1.Trigger{},
toTriggerError: nil,
createTrigger: &v1alpha1.Trigger{},
createTriggerError: nil,
toGQL: &gqlschema.Trigger{},
toGQLError: errors.New(""),
triggerMatcher: gomega.BeNil(),
errorMatcher: gomega.HaveOccurred(),
},
} {
t.Run(testName, func(t *testing.T) {
//given
g := gomega.NewWithT(t)
ctx, cancel := context.WithTimeout(context.Background(), -24*time.Hour)
cancel()
service := &automock.Service{}
converter := &automock.GQLConverter{}
extractor := extractor.TriggerUnstructuredExtractor{}
converter.On(
"ToTrigger", &testData.trigger, testData.ownerRef,
).Return(testData.toTrigger, testData.toTriggerError)
service.On(
"Create", testData.toTrigger,
).Return(testData.createTrigger, testData.createTriggerError)
converter.On(
"ToGQL", testData.createTrigger,
).Return(testData.toGQL, testData.toGQLError)
//when
res := newTriggerResolver(service, converter, extractor, name.Generate)
trigger, err := res.CreateTrigger(ctx, "a", testData.trigger, testData.ownerRef)
//then
g.Expect(err).To(testData.errorMatcher)
g.Expect(trigger).To(testData.triggerMatcher)
})
}
} | explode_data.jsonl/35526 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1555
} | [
2830,
3393,
17939,
18190,
34325,
17939,
1155,
353,
8840,
836,
8,
341,
83228,
675,
1669,
330,
2271,
675,
698,
2023,
94396,
11,
67348,
1669,
2088,
2415,
14032,
60,
1235,
341,
197,
83228,
286,
53045,
17349,
8240,
4500,
4021,
2505,
198,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseWeirdDateFormat(t *testing.T) {
dates := []string{
"Sun, 17 Dec 2017 1:55 PM EST",
"9 Dec 2016 12:00 GMT",
"Friday, December 22, 2017 - 3:09pm",
"Friday, December 8, 2017 - 3:07pm",
"Thu, 25 Feb 2016 00:00:00 Europe/Brussels",
"Mon, 09 Apr 2018, 16:04",
"Di, 23 Jan 2018 00:00:00 +0100",
"Do, 29 Mär 2018 00:00:00 +0200",
"mer, 9 avr 2018 00:00:00 +0200",
"1520932969",
}
for _, date := range dates {
if _, err := Parse(date); err != nil {
t.Fatalf(`Unable to parse date: %q`, date)
}
}
} | explode_data.jsonl/21383 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 240
} | [
2830,
3393,
14463,
1654,
2603,
16611,
1155,
353,
8840,
836,
8,
341,
2698,
973,
1669,
3056,
917,
515,
197,
197,
1,
30092,
11,
220,
16,
22,
3714,
220,
17,
15,
16,
22,
220,
16,
25,
20,
20,
5851,
25877,
756,
197,
197,
1,
24,
3714,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestBindPFlagsStringSlice(t *testing.T) {
for _, testValue := range []struct {
Expected []string
Value string
}{
{[]string{}, ""},
{[]string{"jeden"}, "jeden"},
{[]string{"dwa", "trzy"}, "dwa,trzy"},
{[]string{"cztery", "piec , szesc"}, "cztery,\"piec , szesc\""}} {
for _, changed := range []bool{true, false} {
v := New() // create independent Viper object
flagSet := pflag.NewFlagSet("test", pflag.ContinueOnError)
flagSet.StringSlice("stringslice", testValue.Expected, "test")
flagSet.Visit(func(f *pflag.Flag) {
if len(testValue.Value) > 0 {
f.Value.Set(testValue.Value)
f.Changed = changed
}
})
err := v.BindPFlags(flagSet)
if err != nil {
t.Fatalf("error binding flag set, %v", err)
}
type TestStr struct {
StringSlice []string
}
val := &TestStr{}
if err := v.Unmarshal(val); err != nil {
t.Fatalf("%+#v cannot unmarshal: %s", testValue.Value, err)
}
assert.Equal(t, testValue.Expected, val.StringSlice)
}
}
} | explode_data.jsonl/5571 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 445
} | [
2830,
3393,
9950,
47,
9195,
703,
33236,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1130,
1669,
2088,
3056,
1235,
341,
197,
197,
18896,
3056,
917,
198,
197,
47399,
262,
914,
198,
197,
59403,
197,
197,
90,
1294,
917,
22655,
77496,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestBSTree_IsSubTree(t *testing.T) {
assert := internal.NewAssert(t, "TestBSTree_IsSubTree")
superTree := NewBSTree(8, &intComparator{})
superTree.InsertNode(4)
superTree.InsertNode(5)
superTree.InsertNode(6)
superTree.InsertNode(9)
superTree.InsertNode(4)
superTree.Print()
subTree := NewBSTree(5, &intComparator{})
subTree.InsertNode(4)
subTree.InsertNode(6)
subTree.Print()
assert.Equal(true, superTree.HasSubTree(subTree))
assert.Equal(false, subTree.HasSubTree(superTree))
} | explode_data.jsonl/41666 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 199
} | [
2830,
3393,
61006,
765,
31879,
3136,
6533,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
5306,
7121,
8534,
1155,
11,
330,
2271,
61006,
765,
31879,
3136,
6533,
5130,
12468,
6533,
1669,
1532,
61006,
765,
7,
23,
11,
609,
396,
38658,
37790,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestVersionFlag(t *testing.T) {
c := StartCapture()
exit = func(r int) {}
os.Args = []string{"dosa", "--version"}
main()
output := c.stop(false)
assert.Contains(t, output, "Version:")
assert.Contains(t, output, "Git Commit:")
assert.Contains(t, output, "UTC Build Time:")
} | explode_data.jsonl/16646 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 110
} | [
2830,
3393,
5637,
12135,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
5145,
27429,
741,
14519,
284,
2915,
2601,
526,
8,
5613,
25078,
51015,
284,
3056,
917,
4913,
67,
11983,
497,
14482,
4366,
16707,
36641,
741,
21170,
1669,
272,
13227,
3576... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAttrsUpdateLD(t *testing.T) {
c := setupTest([]string{"update", "attrs", "--host", "orion-ld", "--id", "urn:ngsi-ld:Product:010", "--data", "{\"specialOffer\":{\"value\": true}}", "--context", "[\"http://context\"]"})
reqRes := helper.MockHTTPReqRes{}
reqRes.Res.StatusCode = http.StatusNoContent
reqRes.ReqData = []byte(`{"@context":["http://context"],"specialOffer":{"value":true}}`)
reqRes.Path = "/ngsi-ld/v1/entities/urn:ngsi-ld:Product:010/attrs"
helper.SetClientHTTP(c, reqRes)
err := attrsUpdate(c, c.Ngsi, c.Client)
assert.NoError(t, err)
} | explode_data.jsonl/33073 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 227
} | [
2830,
3393,
53671,
4289,
12335,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6505,
2271,
10556,
917,
4913,
2386,
497,
330,
20468,
497,
14482,
3790,
497,
330,
269,
290,
12,
507,
497,
14482,
307,
497,
330,
399,
25,
968,
6321,
12,
507,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBatchDelete(t *testing.T) {
cases := []struct {
objects []BatchDeleteObject
size int
expected int
}{
{ // 0
[]BatchDeleteObject{
{
Object: &s3.DeleteObjectInput{
Key: aws.String("1"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("2"),
Bucket: aws.String("bucket2"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("3"),
Bucket: aws.String("bucket3"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("4"),
Bucket: aws.String("bucket4"),
},
},
},
1,
4,
},
{ // 1
[]BatchDeleteObject{
{
Object: &s3.DeleteObjectInput{
Key: aws.String("1"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("2"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("3"),
Bucket: aws.String("bucket3"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("4"),
Bucket: aws.String("bucket3"),
},
},
},
1,
4,
},
{ // 2
[]BatchDeleteObject{
{
Object: &s3.DeleteObjectInput{
Key: aws.String("1"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("2"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("3"),
Bucket: aws.String("bucket3"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("4"),
Bucket: aws.String("bucket3"),
},
},
},
4,
2,
},
{ // 3
[]BatchDeleteObject{
{
Object: &s3.DeleteObjectInput{
Key: aws.String("1"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("2"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("3"),
Bucket: aws.String("bucket3"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("4"),
Bucket: aws.String("bucket3"),
},
},
},
10,
2,
},
{ // 4
[]BatchDeleteObject{
{
Object: &s3.DeleteObjectInput{
Key: aws.String("1"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("2"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("3"),
Bucket: aws.String("bucket1"),
},
},
{
Object: &s3.DeleteObjectInput{
Key: aws.String("4"),
Bucket: aws.String("bucket3"),
},
},
},
2,
3,
},
}
count := 0
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
count++
}))
svc := &mockS3Client{S3: buildS3SvcClient(server.URL)}
for i, c := range cases {
batcher := BatchDelete{
Client: svc,
BatchSize: c.size,
}
if err := batcher.Delete(aws.BackgroundContext(), &DeleteObjectsIterator{Objects: c.objects}); err != nil {
t.Errorf("expected no error, but received %v", err)
}
if count != c.expected {
t.Errorf("Case %d: expected %d, but received %d", i, c.expected, count)
}
count = 0
}
} | explode_data.jsonl/62307 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1941
} | [
2830,
3393,
21074,
6435,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
197,
19210,
220,
3056,
21074,
6435,
1190,
198,
197,
13832,
257,
526,
198,
197,
42400,
526,
198,
197,
59403,
197,
197,
90,
442,
220,
15,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMessagerCacheFull(t *testing.T) {
mc := newCache(2)
if !mc.Add(&MessageRow{
TimeNext: 1,
Epoch: 0,
Row: []sqltypes.Value{sqltypes.NewVarBinary("row01")},
}) {
t.Fatal("Add returned false")
}
if !mc.Add(&MessageRow{
TimeNext: 2,
Epoch: 0,
Row: []sqltypes.Value{sqltypes.NewVarBinary("row02")},
}) {
t.Fatal("Add returned false")
}
if mc.Add(&MessageRow{
TimeNext: 2,
Epoch: 1,
Row: []sqltypes.Value{sqltypes.NewVarBinary("row12")},
}) {
t.Error("Add(full): returned true, want false")
}
} | explode_data.jsonl/24951 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 260
} | [
2830,
3393,
84423,
1409,
8233,
9432,
1155,
353,
8840,
836,
8,
341,
97662,
1669,
501,
8233,
7,
17,
340,
743,
753,
12887,
1904,
2099,
2052,
3102,
515,
197,
67567,
5847,
25,
220,
16,
345,
197,
197,
44338,
25,
262,
220,
15,
345,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestIsUpToDate(t *testing.T) {
type args struct {
params *v1alpha1.GroupMembershipParameters
instanceList *iamagv2.GroupMembersList
}
type want struct {
upToDate bool
isErr bool
}
cases := map[string]struct {
args args
want want
}{
"IsUpToDate": {
args: args{
params: params(),
instanceList: instanceList(),
},
want: want{upToDate: true, isErr: false},
},
"NeedsUpdate": {
args: args{
params: params(func(crp *v1alpha1.GroupMembershipParameters) {
crp.Members = []v1alpha1.AddGroupMembersRequestMembersItem{
{
IamID: memberIamID1,
Type: MemberTypeUser,
},
}
}),
instanceList: instanceList(func(i *iamagv2.GroupMembersList) {
}),
},
want: want{upToDate: false, isErr: false},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
r, err := IsUpToDate(tc.args.params, tc.args.instanceList, logging.NewNopLogger())
if err != nil && !tc.want.isErr {
t.Error("IsUpToDate(...) unexpected error")
}
if diff := cmp.Diff(tc.want.upToDate, r); diff != "" {
t.Errorf("IsUpToDate(...): -want, +got:\n%s", diff)
}
})
}
} | explode_data.jsonl/49397 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 544
} | [
2830,
3393,
3872,
2324,
76054,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
25856,
981,
353,
85,
16,
7141,
16,
5407,
80904,
9706,
198,
197,
56256,
852,
353,
4932,
351,
85,
17,
5407,
24371,
852,
198,
197,
532,
13158,
13... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSinglyLinkedList(t *testing.T) {
list := New()
_, err := list.GetFirstValue()
assert.NotNil(t, err)
_, err = list.GetLastValue()
assert.NotNil(t, err)
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{}))
assert.Equal(t, list.IsEmpty(), true)
assert.Equal(t, list.Size(), 0)
err = list.InsertAt(3, "hello")
assert.NotNil(t, err)
list.Add("hello")
value, err := list.GetFirstValue()
assert.Nil(t, err)
assert.Equal(t, value, "hello")
value, err = list.GetLastValue()
assert.Nil(t, err)
assert.Equal(t, value, "hello")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"hello"}))
assert.Equal(t, list.GetIndexOf("hello"), 0)
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 1)
list.InsertAt(0, "cool")
value, err = list.GetFirstValue()
assert.Nil(t, err)
assert.Equal(t, value, "cool")
value, err = list.GetLastValue()
assert.Nil(t, err)
assert.Equal(t, value, "hello")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"cool", "hello"}))
assert.Equal(t, list.GetIndexOf("cool"), 0)
assert.Equal(t, list.GetIndexOf("hello"), 1)
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 2)
list.InsertAt(1, "awesome")
value, err = list.GetFirstValue()
assert.Nil(t, err)
assert.Equal(t, value, "cool")
value, err = list.GetLastValue()
assert.Nil(t, err)
assert.Equal(t, value, "hello")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"cool", "awesome", "hello"}))
assert.Equal(t, list.GetIndexOf("cool"), 0)
assert.Equal(t, list.GetIndexOf("awesome"), 1)
assert.Equal(t, list.GetIndexOf("hello"), 2)
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 3)
list.InsertAt(2, "fantastic")
value, err = list.GetFirstValue()
assert.Nil(t, err)
assert.Equal(t, value, "cool")
value, err = list.GetLastValue()
assert.Nil(t, err)
assert.Equal(t, value, "hello")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"cool", "awesome", "fantastic", "hello"}))
assert.Equal(t, list.GetIndexOf("cool"), 0)
assert.Equal(t, list.GetIndexOf("awesome"), 1)
assert.Equal(t, list.GetIndexOf("fantastic"), 2)
assert.Equal(t, list.GetIndexOf("hello"), 3)
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 4)
value, err = list.RemoveAt(3)
assert.Equal(t, value, "hello")
assert.Nil(t, err)
assert.Equal(t, list.GetIndexOf("hello"), -1)
_, err = list.RemoveAt(3)
assert.NotNil(t, err)
list.Clear()
assert.Equal(t, list.GetIndexOf("hello"), -1)
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{}))
assert.Equal(t, list.IsEmpty(), true)
assert.Equal(t, list.Size(), 0)
list.Add("1", "2", "3", "4")
assert.Equal(t, list.GetIndexOf("1"), 0)
assert.Equal(t, list.GetIndexOf("2"), 1)
assert.Equal(t, list.GetIndexOf("3"), 2)
assert.Equal(t, list.GetIndexOf("4"), 3)
value, err = list.GetFirstValue()
assert.Nil(t, err)
assert.Equal(t, value, "1")
value, err = list.GetLastValue()
assert.Nil(t, err)
assert.Equal(t, value, "4")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"1", "2", "3", "4"}))
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 4)
value, err = list.RemoveAt(2)
assert.Equal(t, value, "3")
assert.Nil(t, err)
assert.Equal(t, list.GetIndexOf("1"), 0)
assert.Equal(t, list.GetIndexOf("2"), 1)
assert.Equal(t, list.GetIndexOf("4"), 2)
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"1", "2", "4"}))
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 3)
list.Clear()
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{}))
assert.Equal(t, list.IsEmpty(), true)
assert.Equal(t, list.Size(), 0)
list.Add("a", "b", "c", "d")
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"a", "b", "c", "d"}))
value, err = list.RemoveAt(3)
assert.Equal(t, value, "d")
assert.Nil(t, err)
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{"a", "b", "c"}))
assert.Equal(t, list.IsEmpty(), false)
assert.Equal(t, list.Size(), 3)
for !list.IsEmpty() {
list.RemoveAt(0)
}
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{}))
assert.Equal(t, list.IsEmpty(), true)
assert.Equal(t, list.Size(), 0)
list.Clear()
assert.True(t, reflect.DeepEqual(list.GetValues(), []interface{}{}))
assert.Equal(t, list.IsEmpty(), true)
assert.Equal(t, list.Size(), 0)
_, err = list.RemoveAt(0)
assert.NotNil(t, err)
} | explode_data.jsonl/45619 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1885
} | [
2830,
3393,
50,
11307,
33915,
1155,
353,
8840,
836,
8,
341,
14440,
1669,
1532,
2822,
197,
6878,
1848,
1669,
1140,
2234,
5338,
1130,
741,
6948,
93882,
1155,
11,
1848,
692,
197,
6878,
1848,
284,
1140,
2234,
5842,
1130,
741,
6948,
93882,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestModuleChangesOnDisk(t *testing.T) {
testenv.NeedsGo1Point(t, 14)
const mod = `
-- go.mod --
module mod.com
go 1.12
require example.com v1.2.3
-- main.go --
package main
func main() {
fmt.Println(blah.Name)
`
runModfileTest(t, mod, proxy, func(t *testing.T, env *Env) {
env.Await(env.DiagnosticAtRegexp("go.mod", "require"))
env.RunGoCommand("mod", "tidy")
env.Await(
EmptyDiagnostics("go.mod"),
)
})
} | explode_data.jsonl/3743 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
3332,
11317,
1925,
47583,
1155,
353,
8840,
836,
8,
341,
18185,
3160,
2067,
68,
6767,
10850,
16,
2609,
1155,
11,
220,
16,
19,
692,
4777,
1463,
284,
22074,
313,
728,
10929,
39514,
4352,
1463,
905,
271,
3346,
220,
16,
13,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClient_CreateAggregator(t *testing.T) {
ctx := context.Background()
asUser := userClient(t)
withToken := withToken(t, asUser)
got, err := withToken.CreateAggregator(ctx, types.CreateAggregator{
Name: "test-aggregator",
Version: types.DefaultAggregatorVersion,
AddHealthCheckPipeline: true,
})
wantEqual(t, err, nil)
wantEqual(t, got.Version, types.DefaultAggregatorVersion)
wantEqual(t, got.Name, "test-aggregator")
wantNoEqual(t, got.Token, "")
wantNoEqual(t, got.PrivateRSAKey, "")
wantNoEqual(t, got.PublicRSAKey, "")
wantNoTimeZero(t, got.CreatedAt)
wantNoEqual(t, got.HealthCheckPipeline, nil)
wantEqual(t, len(got.ResourceProfiles), 3)
t.Run("name exists", func(t *testing.T) {
_, err := withToken.CreateAggregator(ctx, types.CreateAggregator{
Name: "duplicate",
})
wantEqual(t, err, nil)
_, err = withToken.CreateAggregator(ctx, types.CreateAggregator{
Name: "duplicate",
})
wantErrMsg(t, err, "aggregator name already exists")
})
} | explode_data.jsonl/30416 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 430
} | [
2830,
3393,
2959,
34325,
9042,
58131,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
741,
60451,
1474,
1669,
1196,
2959,
1155,
340,
46948,
3323,
1669,
448,
3323,
1155,
11,
438,
1474,
692,
3174,
354,
11,
1848,
1669,
448,
3323,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestResourceRecordSetsAdditionVisible(t *testing.T) {
zone := firstZone(t)
sets := rrs(t, zone)
rrset := getExampleRrs(zone)
addRrsetOrFail(t, sets, rrset)
defer sets.StartChangeset().Remove(rrset).Apply()
t.Logf("Successfully added resource record set: %v", rrset)
record := getRrOrFail(t, sets, rrset.Name())
if record == nil {
t.Errorf("Failed to find added resource record set %s", rrset.Name())
}
} | explode_data.jsonl/75412 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 162
} | [
2830,
3393,
4783,
6471,
30175,
2212,
680,
5715,
1155,
353,
8840,
836,
8,
341,
197,
8684,
1669,
1156,
15363,
1155,
340,
197,
4917,
1669,
435,
5428,
1155,
11,
10143,
340,
197,
634,
746,
1669,
633,
13314,
49,
5428,
74228,
340,
12718,
49,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestReconnect(t *testing.T) {
// Scenario: node 1 and node 2 are connected,
// and node 2 is taken offline.
// Node 1 tries to send a message to node 2 but fails,
// and afterwards node 2 is brought back, after which
// node 1 sends more messages, and it should succeed
// sending a message to node 2 eventually.
node1 := newTestNode(t)
defer node1.stop()
conf := node1.dialer.Config
conf.Timeout = time.Hour
node2 := newTestNode(t)
node2.handler.On("OnSubmit", testChannel, node1.nodeInfo.ID, mock.Anything).Return(nil)
defer node2.stop()
config := []cluster.RemoteNode{node1.nodeInfo, node2.nodeInfo}
node1.c.Configure(testChannel, config)
node2.c.Configure(testChannel, config)
// Make node 2 be offline by shutting down its gRPC service
node2.srv.Stop()
// Obtain the stub for node 2.
// Should succeed, because the connection was created at time of configuration
stub, err := node1.c.Remote(testChannel, node2.nodeInfo.ID)
assert.NoError(t, err)
// Try to obtain a stream. Should not Succeed.
gt := gomega.NewGomegaWithT(t)
gt.Eventually(func() error {
_, err = stub.NewStream(time.Hour)
return err
}).Should(gomega.Not(gomega.Succeed()))
// Wait for the port to be released
for {
lsnr, err := net.Listen("tcp", node2.nodeInfo.Endpoint)
if err == nil {
lsnr.Close()
break
}
}
// Resurrect node 2
node2.resurrect()
// Send a message from node 1 to node 2.
// Should succeed eventually
assertEventualSendMessage(t, stub, testReq)
} | explode_data.jsonl/39839 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 526
} | [
2830,
3393,
693,
6459,
1155,
353,
8840,
836,
8,
341,
197,
322,
58663,
25,
2436,
220,
16,
323,
2436,
220,
17,
525,
8433,
345,
197,
322,
323,
2436,
220,
17,
374,
4429,
26166,
624,
197,
322,
6018,
220,
16,
16297,
311,
3624,
264,
1943... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClientNotify(t *testing.T) {
server := newTestServer()
defer server.Stop()
client := DialInProc(server)
defer client.Close()
if err := client.Notify(context.Background(), "test_echo", "hello", 10, &Args{"world"}); err != nil {
//t.Fatal(err)
logs.Error(err)
}
} | explode_data.jsonl/64460 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 108
} | [
2830,
3393,
2959,
28962,
1155,
353,
8840,
836,
8,
341,
41057,
1669,
501,
2271,
5475,
741,
16867,
3538,
30213,
741,
25291,
1669,
66155,
641,
24508,
21421,
340,
16867,
2943,
10421,
2822,
743,
1848,
1669,
2943,
80435,
5378,
19047,
1507,
330,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestWriteMapDescDetailed(t *testing.T) {
buf := &bytes.Buffer{}
dw := printers.NewBarePrefixWriter(buf)
WriteMapDesc(dw, testMap, "eggs", true)
assert.Equal(t, buf.String(), "eggs:\ta=b\n\tc=d\n\tfoo=bar\n\tserving.knative.dev/funky=chicken\n")
} | explode_data.jsonl/36444 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 112
} | [
2830,
3393,
7985,
2227,
11065,
63484,
1155,
353,
8840,
836,
8,
341,
26398,
1669,
609,
9651,
22622,
16094,
2698,
86,
1669,
55953,
7121,
33,
546,
14335,
6492,
10731,
340,
60373,
2227,
11065,
97342,
11,
1273,
2227,
11,
330,
791,
5857,
497,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHyperComplexBoolReplace(t *testing.T) {
patch, e := Diff([]byte(hyperComplexBase), []byte(hyperComplexA))
assert.NoError(t, e)
assert.Equal(t, 3, len(patch), "they should be equal")
sort.Sort(ByPath(patch))
for _, v := range patch {
t.Log(v.JSON())
}
change := patch[0]
assert.Equal(t, "replace", change.Operation, "they should be equal")
assert.Equal(t, "/goods/0/batters/batter/2/type", change.Path, "they should be equal")
assert.Equal(t, "Strawberry", change.Value, "they should be equal")
change = patch[1]
assert.Equal(t, "add", change.Operation, "they should be equal")
assert.Equal(t, "/goods/2/batters/batter/2", change.Path, "they should be equal")
assert.Equal(t, map[string]interface{}{"id": "1003", "type": "Vanilla"}, change.Value, "they should be equal")
change = patch[2]
assert.Equal(t, "remove", change.Operation, "they should be equal")
assert.Equal(t, "/goods/2/topping/2", change.Path, "they should be equal")
assert.Equal(t, nil, change.Value, "they should be equal")
} | explode_data.jsonl/62374 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 383
} | [
2830,
3393,
73946,
31137,
11233,
23107,
1155,
353,
8840,
836,
8,
341,
3223,
754,
11,
384,
1669,
28369,
10556,
3782,
3203,
39252,
31137,
3978,
701,
3056,
3782,
3203,
39252,
31137,
32,
1171,
6948,
35699,
1155,
11,
384,
340,
6948,
12808,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_ErrCouldNotRollbackMigration_GenericError(t *testing.T) {
err := ErrCouldNotRollbackMigration{
Name: "some-migration",
Wrapped: errors.New("the root cause"),
}
assert.Equal(t, "error rolling back migration (some-migration): the root cause", err.Error())
} | explode_data.jsonl/55405 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
93623,
12895,
2623,
32355,
1419,
20168,
29085,
1454,
1155,
353,
8840,
836,
8,
341,
9859,
1669,
15495,
12895,
2623,
32355,
1419,
20168,
515,
197,
21297,
25,
262,
330,
14689,
1448,
5033,
756,
197,
17300,
56289,
25,
5975,
7121,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFoldDup(t *testing.T) {
for _, tt := range foldDupTests {
f1, f2 := foldDup(tt.list)
if f1 != tt.f1 || f2 != tt.f2 {
t.Errorf("foldDup(%q) = %q, %q, want %q, %q", tt.list, f1, f2, tt.f1, tt.f2)
}
}
} | explode_data.jsonl/47401 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
75536,
85713,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17853,
1669,
2088,
11555,
85713,
18200,
341,
197,
1166,
16,
11,
282,
17,
1669,
11555,
85713,
47152,
6420,
340,
197,
743,
282,
16,
961,
17853,
833,
16,
1369,
282,
17,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestLoadProfile(t *testing.T) {
t.Parallel()
profileName := "proxy.example.com"
t.Run("normal profile", func(t *testing.T) {
t.Parallel()
dir := t.TempDir()
writeProfile(t, &profile.Profile{
WebProxyAddr: profileName + ":3080",
SiteName: "example.com",
Username: "testUser",
Dir: dir,
})
testProfileContents(t, dir, profileName)
})
t.Run("non existent profile", func(t *testing.T) {
t.Parallel()
// Load non existent profile.
creds := LoadProfile("invalid_dir", "invalid_name")
_, err := creds.TLSConfig()
require.Error(t, err)
_, err = creds.SSHClientConfig()
require.Error(t, err)
_, err = creds.Dialer(Config{})
require.Error(t, err)
})
} | explode_data.jsonl/55482 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 308
} | [
2830,
3393,
5879,
8526,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
197,
5365,
675,
1669,
330,
22803,
7724,
905,
1837,
3244,
16708,
445,
8252,
5526,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
3244,
41288,
7957,
741,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClosing(t *testing.T) {
catalogStore, err := store.Open("catalog_closing", store.DefaultOptions())
require.NoError(t, err)
defer os.RemoveAll("catalog_closing")
dataStore, err := store.Open("sqldata_closing", store.DefaultOptions())
require.NoError(t, err)
defer os.RemoveAll("sqldata_closing")
_, err = NewEngine(nil, nil, nil)
require.Equal(t, ErrIllegalArguments, err)
engine, err := NewEngine(catalogStore, dataStore, DefaultOptions().WithPrefix(sqlPrefix))
require.NoError(t, err)
err = engine.Close()
require.NoError(t, err)
err = engine.Close()
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.ExistDatabase("db1")
require.Equal(t, ErrAlreadyClosed, err)
err = engine.UseDatabase("db1")
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.GetDatabaseByName("db1")
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.GetTableByName("db1", "table1")
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.DatabaseInUse()
require.Equal(t, ErrAlreadyClosed, err)
err = engine.UseSnapshot(0, 0)
require.Equal(t, ErrAlreadyClosed, err)
err = engine.RenewSnapshot()
require.Equal(t, ErrAlreadyClosed, err)
err = engine.CloseSnapshot()
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.InferParameters("CREATE DATABASE db1")
require.Equal(t, ErrAlreadyClosed, err)
_, err = engine.InferParametersPreparedStmt(&TxStmt{})
require.Equal(t, ErrAlreadyClosed, err)
err = engine.EnsureCatalogReady(nil)
require.Equal(t, ErrAlreadyClosed, err)
err = engine.ReloadCatalog(nil)
require.Equal(t, ErrAlreadyClosed, err)
} | explode_data.jsonl/64067 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 595
} | [
2830,
3393,
36294,
1155,
353,
8840,
836,
8,
341,
1444,
7750,
6093,
11,
1848,
1669,
3553,
12953,
445,
26539,
666,
17831,
497,
3553,
13275,
3798,
2398,
17957,
35699,
1155,
11,
1848,
340,
16867,
2643,
84427,
445,
26539,
666,
17831,
5130,
8... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestByte(t *testing.T) {
modulus := Modulus()
// test values
var bs [3][]byte
r1, _ := rand.Int(rand.Reader, modulus)
bs[0] = r1.Bytes() // should be r1 as Element
r2, _ := rand.Int(rand.Reader, modulus)
r2.Add(modulus, r2)
bs[1] = r2.Bytes() // should be r2 as Element
var tmp big.Int
tmp.SetUint64(0)
bs[2] = tmp.Bytes() // should be 0 as Element
// witness values as Element
var el [3]Element
el[0].SetBigInt(r1)
el[1].SetBigInt(r2)
el[2].SetUint64(0)
// check conversions
for i := 0; i < 3; i++ {
var z Element
z.SetBytes(bs[i])
if !z.Equal(&el[i]) {
t.Fatal("SetBytes fails")
}
// check conversion Element to Bytes
b := z.Bytes()
z.SetBytes(b)
if !z.Equal(&el[i]) {
t.Fatal("Bytes fails")
}
}
} | explode_data.jsonl/70336 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 344
} | [
2830,
3393,
7153,
1155,
353,
8840,
836,
8,
1476,
42228,
19425,
1669,
5650,
19425,
2822,
197,
322,
1273,
2750,
198,
2405,
17065,
508,
18,
45725,
3782,
198,
7000,
16,
11,
716,
1669,
10382,
7371,
37595,
47431,
11,
74024,
340,
93801,
58,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestTypeError(t *testing.T) {
const SCRIPT = `
function F() {
return new TypeError("test");
}
var e = F();
e.message == "test" && e.name == "TypeError";
`
testScript1(SCRIPT, valueTrue, t)
} | explode_data.jsonl/75246 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 82
} | [
2830,
3393,
80076,
1155,
353,
8840,
836,
8,
341,
4777,
53679,
284,
22074,
7527,
434,
368,
341,
197,
853,
501,
25030,
445,
1944,
797,
197,
630,
2405,
384,
284,
434,
543,
7727,
6698,
621,
330,
1944,
1,
1009,
384,
2644,
621,
330,
80076... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestMuxMounts(t *testing.T) {
r := NewRouter()
r.Get("/{hash}", func(w http.ResponseWriter, r *http.Request) {
v := URLParam(r, "hash")
w.Write([]byte(fmt.Sprintf("/%s", v)))
})
r.Route("/{hash}/share", func(r Router) {
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
v := URLParam(r, "hash")
w.Write([]byte(fmt.Sprintf("/%s/share", v)))
})
r.Get("/{network}", func(w http.ResponseWriter, r *http.Request) {
v := URLParam(r, "hash")
n := URLParam(r, "network")
w.Write([]byte(fmt.Sprintf("/%s/share/%s", v, n)))
})
})
m := NewRouter()
m.Mount("/sharing", r)
ts := httptest.NewServer(m)
defer ts.Close()
if _, body := testRequest(t, ts, "GET", "/sharing/aBc", nil); body != "/aBc" {
t.Fatalf(body)
}
if _, body := testRequest(t, ts, "GET", "/sharing/aBc/share", nil); body != "/aBc/share" {
t.Fatalf(body)
}
if _, body := testRequest(t, ts, "GET", "/sharing/aBc/share/twitter", nil); body != "/aBc/share/twitter" {
t.Fatalf(body)
}
} | explode_data.jsonl/42864 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 459
} | [
2830,
3393,
44,
2200,
16284,
82,
1155,
353,
8840,
836,
8,
341,
7000,
1669,
1532,
9523,
2822,
7000,
2234,
65871,
8296,
9545,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
5195,
1669,
5548,
2001,
2601,
11,
330,
8296,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestXegerInvalidPattern(t *testing.T) {
pattern := "abc[abc]("
_, err := NewXeger(pattern)
if err == nil {
t.Errorf("Not failed on invalid pattern %s: %s\n", pattern, err.Error())
}
} | explode_data.jsonl/1408 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 75
} | [
2830,
3393,
55,
1878,
7928,
15760,
1155,
353,
8840,
836,
8,
341,
3223,
3227,
1669,
330,
13683,
58,
13683,
60,
70576,
197,
6878,
1848,
1669,
1532,
55,
1878,
30948,
340,
743,
1848,
621,
2092,
341,
197,
3244,
13080,
445,
2623,
4641,
389,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestUTXOIDVerifyEmpty(t *testing.T) {
utxoID := &UTXOID{}
if err := utxoID.Verify(); err == nil {
t.Fatalf("Should have errored due to an empty utxo ID")
}
} | explode_data.jsonl/47033 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 74
} | [
2830,
3393,
1381,
55,
29805,
32627,
3522,
1155,
353,
8840,
836,
8,
341,
197,
332,
40822,
915,
1669,
609,
1381,
55,
29805,
31483,
743,
1848,
1669,
8621,
40822,
915,
54853,
2129,
1848,
621,
2092,
341,
197,
3244,
30762,
445,
14996,
614,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestPaymentControlFailsWithoutInFlight(t *testing.T) {
t.Parallel()
db, err := initDB()
if err != nil {
t.Fatalf("unable to init db: %v", err)
}
pControl := NewPaymentControl(db)
info, _, _, err := genInfo()
if err != nil {
t.Fatalf("unable to generate htlc message: %v", err)
}
// Calling Fail should return an error.
_, err = pControl.Fail(info.PaymentHash, FailureReasonNoRoute)
if err != ErrPaymentNotInitiated {
t.Fatalf("expected ErrPaymentNotInitiated, got %v", err)
}
assertPaymentStatus(t, pControl, info.PaymentHash, StatusUnknown)
} | explode_data.jsonl/73791 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 214
} | [
2830,
3393,
20188,
3273,
37,
6209,
26040,
641,
45305,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
20939,
11,
1848,
1669,
2930,
3506,
741,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
45928,
311,
2930,
2927,
25,
1018,
8... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestPostReceiveLineTwoLine(t *testing.T) {
var multiLineGitDiffs = "bab28f2c50267bbfda0c3ec93d5b1f19cc3a943d 2684f4499fc90bf92382dd0569d22e4300dfb1f2 refs/heads/master\nbab28f2c50267bbfda0c3ec93d5b1f19cc3a943d 2684f4499fc90bf92382dd0569d22e4300dfb1f2 refs/heads/else"
out := ParsePostReceiveLine(strings.NewReader(multiLineGitDiffs))
if len(out) != 2 {
t.Errorf("Parsing two line should return two entry")
return
}
verifyEntry(t, out[0], "bab28f2c50267bbfda0c3ec93d5b1f19cc3a943d", "2684f4499fc90bf92382dd0569d22e4300dfb1f2", "refs/heads/master")
verifyEntry(t, out[1], "bab28f2c50267bbfda0c3ec93d5b1f19cc3a943d", "2684f4499fc90bf92382dd0569d22e4300dfb1f2", "refs/heads/else")
} | explode_data.jsonl/12445 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 357
} | [
2830,
3393,
4133,
14742,
2460,
11613,
2460,
1155,
353,
8840,
836,
8,
341,
2405,
7299,
2460,
46562,
35,
18852,
284,
330,
47722,
17,
23,
69,
17,
66,
20,
15,
17,
21,
22,
6066,
69,
3235,
15,
66,
18,
757,
24,
18,
67,
20,
65,
16,
69... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_PrintSummary(t *testing.T) {
r := runtime.Result{
Duration: 10,
Failed: 1,
TestResults: createFakeTestResults(),
}
var buf bytes.Buffer
writer := NewCliOutput(true)
writer.out = &buf
outResult := writer.PrintSummary(r)
assert.False(t, outResult)
output := buf.String()
assert.Contains(t, output, "✗ [192.168.0.1] 'Failed test', on property 'Stdout'")
assert.NotContains(t, output, "✓ [docker-host] Successful test")
} | explode_data.jsonl/45995 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 177
} | [
2830,
3393,
45788,
19237,
1155,
353,
8840,
836,
8,
341,
7000,
1669,
15592,
18456,
515,
197,
10957,
2017,
25,
262,
220,
16,
15,
345,
197,
197,
9408,
25,
414,
220,
16,
345,
197,
73866,
9801,
25,
1855,
52317,
2271,
9801,
3148,
197,
630... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestChallengeChecksumVerification(t *testing.T) {
t.Parallel()
challenge, _ := hex.DecodeString(testChallengeFromAcceptor)
var wt WrapToken
wt.Unmarshal(challenge, true)
challengeOk, cErr := wt.Verify(getSessionKey(), acceptorSeal)
assert.Nil(t, cErr, "Error occurred during checksum verification.")
assert.True(t, challengeOk, "Checksum verification failed.")
} | explode_data.jsonl/54231 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 131
} | [
2830,
3393,
62078,
73190,
62339,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
23049,
15832,
11,
716,
1669,
12371,
56372,
703,
8623,
62078,
3830,
11654,
15349,
340,
2405,
40473,
42187,
3323,
198,
6692,
83,
38097,
7520,
15832,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAddStringsPrefixes(t *testing.T) {
m := NewStringMatcher([]string{"Superman", "Superma", "Superm", "Super"})
m.AddString("Supe")
m.AddString("Sup")
hits := m.Match([]byte("The Man Of Steel: Superman"))
assert(t, len(hits) == 6)
assert(t, hits[0].Index == 5)
assert(t, hits[1].Index == 4)
assert(t, hits[2].Index == 3)
assert(t, hits[3].Index == 2)
assert(t, hits[4].Index == 1)
assert(t, hits[5].Index == 0)
hits = m.MatchThreadSafe([]byte("The Man Of Steel: Superman"))
assert(t, len(hits) == 6)
assert(t, hits[0].Index == 5)
assert(t, hits[1].Index == 4)
assert(t, hits[2].Index == 3)
assert(t, hits[3].Index == 2)
assert(t, hits[4].Index == 1)
assert(t, hits[5].Index == 0)
} | explode_data.jsonl/22655 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 300
} | [
2830,
3393,
2212,
20859,
14335,
288,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1532,
703,
37554,
10556,
917,
4913,
19284,
1515,
497,
330,
19284,
1728,
497,
330,
19284,
76,
497,
330,
19284,
23625,
2109,
1904,
703,
445,
36459,
375,
1138,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetConversationInfo(t *testing.T) {
http.HandleFunc("/conversations.info", okChannelJsonHandler)
once.Do(startServer)
api := New("testing-token", OptionAPIURL("http://"+serverAddr+"/"))
channel, err := api.GetConversationInfo("CXXXXXXXX", false)
if err != nil {
t.Errorf("Unexpected error: %s", err)
return
}
if channel == nil {
t.Error("channel should not be nil")
return
}
} | explode_data.jsonl/78548 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 150
} | [
2830,
3393,
1949,
60313,
1731,
1155,
353,
8840,
836,
8,
341,
28080,
63623,
4283,
443,
72995,
5391,
497,
5394,
9629,
5014,
3050,
340,
197,
13184,
33596,
10639,
5475,
340,
54299,
1669,
1532,
445,
8840,
34841,
497,
6959,
7082,
3144,
445,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMruInventoryMapStringer(t *testing.T) {
// Create a couple of fake inventory vectors to use in testing the mru
// inventory stringer code.
hash1 := &chainhash.Hash{0x01}
hash2 := &chainhash.Hash{0x02}
iv1 := wire.NewInvVect(wire.InvTypeBlock, hash1)
iv2 := wire.NewInvVect(wire.InvTypeBlock, hash2)
// Create new mru inventory map and add the inventory vectors.
mruInvMap := newMruInventoryMap(uint(2))
mruInvMap.Add(iv1)
mruInvMap.Add(iv2)
// Ensure the stringer gives the expected result. Since map iteration
// is not ordered, either entry could be first, so account for both
// cases.
wantStr1 := fmt.Sprintf("<%d>[%s, %s]", 2, *iv1, *iv2)
wantStr2 := fmt.Sprintf("<%d>[%s, %s]", 2, *iv2, *iv1)
gotStr := mruInvMap.String()
if gotStr != wantStr1 && gotStr != wantStr2 {
t.Fatalf("unexpected string representation - got %q, want %q "+
"or %q", gotStr, wantStr1, wantStr2)
}
} | explode_data.jsonl/74529 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 348
} | [
2830,
3393,
44,
2672,
22319,
2227,
703,
261,
1155,
353,
8840,
836,
8,
341,
197,
322,
4230,
264,
5625,
315,
12418,
15444,
22879,
311,
990,
304,
7497,
279,
296,
2672,
198,
197,
322,
15444,
914,
261,
2038,
624,
50333,
16,
1669,
609,
88... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAdmin_ReadOnlyWithAge(t *testing.T) {
ts, srv, teardown := startupT(t)
defer teardown()
c1 := store.Comment{Text: "test test #1", Locator: store.Locator{SiteID: "radio-t",
URL: "https://radio-t.com/blah"}, User: store.User{Name: "user1 name", ID: "user1"},
Timestamp: time.Date(2001, 1, 1, 1, 1, 1, 0, time.Local)}
_, err := srv.DataService.Create(c1)
assert.Nil(t, err)
info, err := srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 10)
assert.Nil(t, err)
assert.True(t, info.ReadOnly, "ro by age")
// set post to read-only
req, err := http.NewRequest(http.MethodPut,
fmt.Sprintf("%s/api/v1/admin/readonly?site=radio-t&url=https://radio-t.com/blah&ro=1", ts.URL), nil)
assert.Nil(t, err)
requireAdminOnly(t, req)
resp, err := sendReq(t, req, adminUmputunToken)
require.NoError(t, err)
assert.Equal(t, 200, resp.StatusCode)
info, err = srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 0)
assert.NoError(t, err)
assert.True(t, info.ReadOnly)
// reset post's read-only
req, err = http.NewRequest(http.MethodPut,
fmt.Sprintf("%s/api/v1/admin/readonly?site=radio-t&url=https://radio-t.com/blah&ro=0", ts.URL), nil)
assert.Nil(t, err)
resp, err = sendReq(t, req, adminUmputunToken)
require.NoError(t, err)
assert.Equal(t, 403, resp.StatusCode)
info, err = srv.DataService.Info(store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, 0)
assert.NoError(t, err)
assert.True(t, info.ReadOnly)
} | explode_data.jsonl/51830 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 652
} | [
2830,
3393,
7210,
62,
20914,
2354,
16749,
1155,
353,
8840,
836,
8,
341,
57441,
11,
43578,
11,
49304,
1669,
20567,
51,
1155,
340,
16867,
49304,
2822,
1444,
16,
1669,
3553,
56730,
90,
1178,
25,
330,
1944,
1273,
671,
16,
497,
98653,
25,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAuthMethodFallback(t *testing.T) {
var passwordCalled bool
config := &ClientConfig{
User: "testuser",
Auth: []AuthMethod{
PublicKeys(testSigners["rsa"]),
PasswordCallback(
func() (string, error) {
passwordCalled = true
return "WRONG", nil
}),
},
HostKeyCallback: InsecureIgnoreHostKey(),
}
if err := tryAuth(t, config); err != nil {
t.Fatalf("unable to dial remote side: %s", err)
}
if passwordCalled {
t.Errorf("password auth tried before public-key auth.")
}
} | explode_data.jsonl/6933 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 201
} | [
2830,
3393,
5087,
3523,
87206,
1155,
353,
8840,
836,
8,
341,
2405,
3552,
20960,
1807,
198,
25873,
1669,
609,
2959,
2648,
515,
197,
31672,
25,
330,
1944,
872,
756,
197,
197,
5087,
25,
3056,
5087,
3523,
515,
298,
73146,
8850,
8623,
7264... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSetHeaders(t *testing.T) {
headers := map[string]string{"Host": "guoyao.me", "Content-Type": "text/plain"}
req, err := NewRequest("GET", "guoyao.me", nil)
if err != nil {
t.Error(util.FormatTest("SetHeaders", err.Error(), "nil"))
}
req.Header = map[string][]string{
"Host": {"tocloud.org"},
"Accept-Encoding": {"gzip, deflate"},
}
req.SetHeaders(headers)
if len(req.Header) != 3 {
t.Error(util.FormatTest("SetHeaders", strconv.Itoa(len(req.Header)), strconv.Itoa(3)))
}
if len(req.Header["Host"]) != 1 {
t.Error(util.FormatTest("SetHeaders", strconv.Itoa(len(req.Header["Host"])), strconv.Itoa(1)))
}
} | explode_data.jsonl/27165 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 271
} | [
2830,
3393,
1649,
10574,
1155,
353,
8840,
836,
8,
341,
67378,
1669,
2415,
14032,
30953,
4913,
9296,
788,
330,
8717,
2253,
3441,
17326,
497,
330,
2762,
10804,
788,
330,
1318,
36971,
16707,
24395,
11,
1848,
1669,
1532,
1900,
445,
3806,
49... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestError(t *testing.T) {
msg := "This is an error"
// redirect stdout temporarily to catch the GitHub annotation output
origStdout, outC := redirectStdout(t)
Error(&testing.T{}, msg)
out := restoreStdout(outC, origStdout)
if !strings.HasSuffix(strings.TrimSpace(out), "testutil/annotations_test.go,line=48:: - This is an error") {
t.Fatalf("unexpected stdout content: %s", out)
}
} | explode_data.jsonl/73182 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
1454,
1155,
353,
8840,
836,
8,
341,
21169,
1669,
330,
1986,
374,
458,
1465,
1837,
197,
322,
6423,
20075,
27092,
311,
2287,
279,
32095,
21223,
2550,
198,
197,
4670,
22748,
411,
11,
700,
34,
1669,
6423,
22748,
411,
1155,
340... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMatcherCreationFunction(t *testing.T) {
for i, currFn := range []func(matchers ...matcher.Matcher) matcher.Matcher{
matcher.Any,
matcher.All,
} {
m := currFn()
assert.False(t, m.Match("foo"), "Case %d: unexpected match for empty call", i)
m = currFn(nil)
assert.False(t, m.Match("foo"), "Case %d: unexpected match for nil call", i)
}
} | explode_data.jsonl/81365 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 147
} | [
2830,
3393,
37554,
32701,
5152,
1155,
353,
8840,
836,
8,
341,
2023,
600,
11,
9804,
24911,
1669,
2088,
3056,
2830,
25401,
388,
2503,
70826,
76452,
8,
36052,
76452,
515,
197,
2109,
28058,
13311,
345,
197,
2109,
28058,
16764,
345,
197,
92,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFormat_AutoNumber(t *testing.T) {
str := "some test {} and {test1} {} with {test2}"
got := Format(str, map[string]interface{}{"test1": "abc", "test2": 123}, "position1", []string{"sliceElem1", "sliceElem2"})
want := "some test position1 and abc [sliceElem1 sliceElem2] with 123"
assert.Equal(t, want, got)
} | explode_data.jsonl/54320 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 120
} | [
2830,
3393,
4061,
1566,
1535,
2833,
1155,
353,
8840,
836,
8,
341,
11355,
1669,
330,
14689,
1273,
4687,
323,
314,
1944,
16,
92,
4687,
448,
314,
1944,
17,
11195,
3174,
354,
1669,
15042,
4199,
11,
2415,
14032,
31344,
6257,
4913,
1944,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestApplicationContext_DependsOn(t *testing.T) {
t.Run("random", func(t *testing.T) {
c := gs.New()
c.Object(&BeanZero{5})
c.Object(new(BeanOne))
c.Object(new(BeanFour))
err := c.Refresh()
assert.Nil(t, err)
})
t.Run("dependsOn", func(t *testing.T) {
dependsOn := []env.BeanSelector{
(*BeanOne)(nil), // 通过类型定义查找
"github.com/go-spring/spring-core/gs_test/gs_test.BeanZero:BeanZero",
}
c := gs.New()
c.Object(&BeanZero{5})
c.Object(new(BeanOne))
c.Object(new(BeanFour)).DependsOn(dependsOn...)
err := c.Refresh()
assert.Nil(t, err)
})
} | explode_data.jsonl/17407 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 294
} | [
2830,
3393,
19736,
90680,
1412,
1925,
1155,
353,
8840,
836,
8,
1476,
3244,
16708,
445,
11463,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
1444,
1669,
28081,
7121,
741,
197,
1444,
8348,
2099,
10437,
17999,
90,
20,
3518,
197,
1444,
83... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParams(t *testing.T) {
p := NewParams()
p.Append(ParamFunc(func(r *http.Request) (string, bool) {
return "test", true
}))
s, ok := p.GetParam(nil)
if s != "test" || ok != true {
t.Fatal(s)
}
p.Append(ParamFunc(func(r *http.Request) (string, bool) {
return "test2", true
}))
s, ok = p.GetParam(nil)
if s != "test"+cache.KeyPrefix+"test2" || ok != true {
t.Fatal(s)
}
p.Append(ParamFunc(func(r *http.Request) (string, bool) {
return "", false
}))
s, ok = p.GetParam(nil)
if s != "" || ok != false {
t.Fatal(s)
}
} | explode_data.jsonl/29319 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 259
} | [
2830,
3393,
4870,
1155,
353,
8840,
836,
8,
1476,
3223,
1669,
1532,
4870,
741,
3223,
8982,
7,
2001,
9626,
18552,
2601,
353,
1254,
9659,
8,
320,
917,
11,
1807,
8,
341,
197,
853,
330,
1944,
497,
830,
198,
197,
44194,
1903,
11,
5394,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestArrayDeclConforms(t *testing.T) {
two := 2
checkConforms(t,
context{},
&ArrayDecl{
schema: testSchema(t),
typ: fidlir.Type{
Kind: fidlir.ArrayType,
ElementCount: &two,
ElementType: &fidlir.Type{
Kind: fidlir.PrimitiveType,
PrimitiveSubtype: fidlir.Uint8,
},
},
},
[]conformTest{
conformOk{[]interface{}{uint64(1), uint64(2)}},
conformFail{[]interface{}{}, "expecting 2 elements"},
conformFail{[]interface{}{uint64(1)}, "expecting 2 elements"},
conformFail{[]interface{}{uint64(1), uint64(1), uint64(1)}, "expecting 2 elements"},
conformFail{[]interface{}{"a", "b"}, "[0]: expecting int64 or uint64"},
conformFail{[]interface{}{nil, nil}, "[0]: expecting int64 or uint64"},
},
)
} | explode_data.jsonl/21406 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 347
} | [
2830,
3393,
1857,
21629,
1109,
9807,
1155,
353,
8840,
836,
8,
341,
3244,
1126,
1669,
220,
17,
198,
25157,
1109,
9807,
1155,
345,
197,
28413,
38837,
197,
197,
5,
1857,
21629,
515,
298,
1903,
3416,
25,
1273,
8632,
1155,
1326,
298,
25314... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_archivedWorkflowServer(t *testing.T) {
repo := &mocks.WorkflowArchive{}
kubeClient := &kubefake.Clientset{}
wfClient := &argofake.Clientset{}
w := NewWorkflowArchiveServer(repo)
allowed := true
kubeClient.AddReactor("create", "selfsubjectaccessreviews", func(action k8stesting.Action) (handled bool, ret runtime.Object, err error) {
return true, &authorizationv1.SelfSubjectAccessReview{
Status: authorizationv1.SubjectAccessReviewStatus{Allowed: allowed},
}, nil
})
kubeClient.AddReactor("create", "selfsubjectrulesreviews", func(action k8stesting.Action) (handled bool, ret runtime.Object, err error) {
var rules []authorizationv1.ResourceRule
if allowed {
rules = append(rules, authorizationv1.ResourceRule{})
}
return true, &authorizationv1.SelfSubjectRulesReview{
Status: authorizationv1.SubjectRulesReviewStatus{
ResourceRules: rules,
},
}, nil
})
// two pages of results for limit 1
repo.On("ListWorkflows", "", 1, 0).Return(wfv1.Workflows{{}}, nil)
repo.On("ListWorkflows", "", 1, 1).Return(wfv1.Workflows{}, nil)
repo.On("GetWorkflow", "").Return(nil, nil)
repo.On("GetWorkflow", "my-uid").Return(&wfv1.Workflow{
ObjectMeta: metav1.ObjectMeta{Name: "my-name"},
Spec: wfv1.WorkflowSpec{
Entrypoint: "my-entrypoint",
Templates: []wfv1.Template{
{Name: "my-entrypoint", Container: &apiv1.Container{}},
},
},
}, nil)
wfClient.AddReactor("create", "workflows", func(action k8stesting.Action) (handled bool, ret runtime.Object, err error) {
return true, &wfv1.Workflow{
ObjectMeta: metav1.ObjectMeta{Name: "my-name-resubmitted"},
}, nil
})
repo.On("DeleteWorkflow", "my-uid").Return(nil)
ctx := context.WithValue(context.WithValue(context.TODO(), auth.WfKey, wfClient), auth.KubeKey, kubeClient)
t.Run("ListArchivedWorkflows", func(t *testing.T) {
allowed = false
resp, err := w.ListArchivedWorkflows(ctx, &ListArchivedWorkflowsRequest{ListOptions: &metav1.ListOptions{Limit: 1}})
if assert.NoError(t, err) {
assert.Len(t, resp.Items, 0)
}
allowed = true
resp, err = w.ListArchivedWorkflows(ctx, &ListArchivedWorkflowsRequest{ListOptions: &metav1.ListOptions{Limit: 1}})
if assert.NoError(t, err) {
assert.Len(t, resp.Items, 1)
assert.Equal(t, "1", resp.Continue)
}
resp, err = w.ListArchivedWorkflows(ctx, &ListArchivedWorkflowsRequest{ListOptions: &metav1.ListOptions{Continue: "1", Limit: 1}})
if assert.NoError(t, err) {
assert.Len(t, resp.Items, 0)
assert.Empty(t, resp.Continue)
}
})
t.Run("GetArchivedWorkflow", func(t *testing.T) {
allowed = false
_, err := w.GetArchivedWorkflow(ctx, &GetArchivedWorkflowRequest{Uid: "my-uid"})
assert.Equal(t, err, status.Error(codes.PermissionDenied, "permission denied"))
allowed = true
_, err = w.GetArchivedWorkflow(ctx, &GetArchivedWorkflowRequest{})
assert.Equal(t, err, status.Error(codes.NotFound, "not found"))
wf, err := w.GetArchivedWorkflow(ctx, &GetArchivedWorkflowRequest{Uid: "my-uid"})
assert.NoError(t, err)
assert.NotNil(t, wf)
})
t.Run("DeleteArchivedWorkflow", func(t *testing.T) {
allowed = false
_, err := w.DeleteArchivedWorkflow(ctx, &DeleteArchivedWorkflowRequest{Uid: "my-uid"})
assert.Equal(t, err, status.Error(codes.PermissionDenied, "permission denied"))
allowed = true
_, err = w.DeleteArchivedWorkflow(ctx, &DeleteArchivedWorkflowRequest{Uid: "my-uid"})
assert.NoError(t, err)
})
} | explode_data.jsonl/17081 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1326
} | [
2830,
3393,
34330,
2221,
62768,
5475,
1155,
353,
8840,
836,
8,
341,
17200,
5368,
1669,
609,
16712,
82,
28748,
4965,
42502,
16094,
16463,
3760,
2959,
1669,
609,
74,
392,
823,
726,
11716,
746,
16094,
6692,
69,
2959,
1669,
609,
858,
1055,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseReader(t *testing.T) {
t.Parallel()
v := viper.New()
log := tools.DiscardLogger()
v.SetConfigType("yaml")
v.ReadConfig(bytes.NewBuffer([]byte("")))
_, err := parseReader(v, log, "non_existence_plugin", "readers.reader1")
if _, ok := errors.Cause(err).(NotSupportedError); !ok {
t.Errorf("err.(NotSupportedError) = (%v); want (NotSupportedError)", err)
}
if !strings.Contains(err.Error(), "non_existence_plugin") {
t.Errorf("want (non_existence_plugin) in (%s)", err)
}
_, err = parseReader(v, log, "expvar", "readers.reader1")
if errors.Cause(err) == nil {
t.Error("err = (nil); want (error)")
}
_, err = parseReader(v, log, "self", "readers.reader1")
if errors.Cause(err) == nil {
t.Error("err = (nil); want (error)")
}
input, err := FixtureWithSection("various.txt", "ParseReader")
if err != nil {
t.Fatalf("error getting section: %v", err)
}
v.ReadConfig(input.Body)
c, err := parseReader(v, log, "expvar", "reader1")
if err != nil {
t.Errorf("err = (%v); want (nil)", err)
}
if _, ok := c.(reader.DataReader); !ok {
t.Errorf("want (reader.DataReader) type, got (%v)", c)
}
} | explode_data.jsonl/33307 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 456
} | [
2830,
3393,
14463,
5062,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
5195,
1669,
95132,
7121,
741,
6725,
1669,
7375,
909,
47560,
7395,
741,
5195,
4202,
2648,
929,
445,
41466,
5130,
5195,
6503,
2648,
23158,
7121,
4095,
10556,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestDownloadJobConcurrency(t *testing.T) {
tutils.CheckSkip(t, tutils.SkipTestArgs{Long: true})
var (
proxyURL = tutils.RandomProxyURL(t)
baseParams = tutils.BaseAPIParams(proxyURL)
bck = cmn.Bck{
Name: cos.RandString(10),
Provider: cmn.ProviderAIS,
}
template = "https://storage.googleapis.com/minikube/iso/minikube-v0.{18..35}.0.iso"
)
tutils.CreateBucketWithCleanup(t, proxyURL, bck, nil)
smap, err := api.GetClusterMap(baseParams)
tassert.CheckFatal(t, err)
tlog.Logln("Starting first download...")
id1, err := api.DownloadWithParam(baseParams, downloader.DlTypeRange, downloader.DlRangeBody{
DlBase: downloader.DlBase{
Bck: bck,
Description: generateDownloadDesc(),
Limits: downloader.DlLimits{
Connections: 1,
BytesPerHour: 100 * cos.MiB,
},
},
Template: template,
})
tassert.CheckFatal(t, err)
t.Cleanup(func() {
abortDownload(t, id1)
})
tlog.Logln("Starting second download...")
id2, err := api.DownloadWithParam(baseParams, downloader.DlTypeRange, downloader.DlRangeBody{
DlBase: downloader.DlBase{
Bck: bck,
Description: generateDownloadDesc(),
Limits: downloader.DlLimits{
BytesPerHour: 100 * cos.MiB,
},
},
Template: template,
})
tassert.CheckFatal(t, err)
t.Cleanup(func() {
abortDownload(t, id2)
})
tlog.Logln("Waiting for checks...")
var (
concurrentJobs bool
resp1, resp2 downloader.DlStatusResp
)
for i := 0; i < 10; i++ {
resp1, err = api.DownloadStatus(baseParams, id1)
tassert.CheckFatal(t, err)
// Expect that number of tasks never exceeds the defined limit.
targetCnt := smap.CountActiveTargets()
tassert.Errorf(
t, len(resp1.CurrentTasks) <= targetCnt,
"number of tasks mismatch (expected at most: %d, got: %d)",
targetCnt, len(resp1.CurrentTasks),
)
// Expect that at some point the second job will be run concurrently.
resp2, err = api.DownloadStatus(baseParams, id2)
tassert.CheckFatal(t, err)
if len(resp2.CurrentTasks) > 0 && len(resp1.CurrentTasks) > 0 {
concurrentJobs = true
}
time.Sleep(time.Second)
}
tassert.Errorf(t, concurrentJobs, "expected jobs to run concurrently")
tlog.Logln("Done waiting")
} | explode_data.jsonl/70397 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 923
} | [
2830,
3393,
11377,
12245,
79611,
1155,
353,
8840,
836,
8,
341,
3244,
6031,
10600,
35134,
1155,
11,
259,
6031,
57776,
2271,
4117,
90,
6583,
25,
830,
8824,
2405,
2399,
197,
197,
22803,
3144,
256,
284,
259,
6031,
26709,
16219,
3144,
1155,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestARN_IsUserId( t *testing.T ) {
a := &ARN{}
a.Parse( "12345shdfgfhsdfr" )
if a.IsAnonymous() {
t.Errorf( "Expected !IsAnonymous got %v for %s", a.IsAnonymous(), a )
}
if !a.IsUserId() {
t.Errorf( "Expected IsUserId got %v for %s", a.IsUserId(), a )
}
if a.IsNil() {
t.Errorf( "Expected !IsNil got %v for %s", a.IsNil(), a )
}
} | explode_data.jsonl/61996 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
9051,
31879,
13504,
7,
259,
353,
8840,
836,
873,
341,
220,
264,
1669,
609,
9051,
16094,
220,
264,
8937,
7,
330,
16,
17,
18,
19,
20,
927,
2940,
45124,
4997,
67,
1626,
1,
1727,
220,
421,
264,
4506,
32684,
368,
341,
262,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestAddTokensValidatorBonded(t *testing.T) {
validator := newValidator(t, valAddr1, pk1)
validator = validator.UpdateStatus(types.Bonded)
validator, delShares := validator.AddTokensFromDel(sdk.NewInt(10))
assert.True(sdk.DecEq(t, sdk.NewDec(10), delShares))
assert.True(sdk.IntEq(t, sdk.NewInt(10), validator.BondedTokens()))
assert.True(sdk.DecEq(t, sdk.NewDec(10), validator.DelegatorShares))
} | explode_data.jsonl/2328 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 169
} | [
2830,
3393,
2212,
29300,
14256,
89844,
291,
1155,
353,
8840,
836,
8,
341,
197,
16112,
1669,
501,
14256,
1155,
11,
1044,
13986,
16,
11,
22458,
16,
340,
197,
16112,
284,
22935,
16689,
2522,
52613,
1785,
2111,
291,
340,
197,
16112,
11,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExtensionDescsWithMissingExtensions(t *testing.T) {
msg := &pb.MyMessage{Count: proto.Int32(0)}
extdesc1 := pb.E_Ext_More
if descs, err := proto.ExtensionDescs(msg); len(descs) != 0 || err != nil {
t.Errorf("proto.ExtensionDescs: got %d descs, error %v; want 0, nil", len(descs), err)
}
ext1 := &pb.Ext{}
if err := proto.SetExtension(msg, extdesc1, ext1); err != nil {
t.Fatalf("Could not set ext1: %s", err)
}
extdesc2 := &proto.ExtensionDesc{
ExtendedType: (*pb.MyMessage)(nil),
ExtensionType: (*bool)(nil),
Field: 123456789,
Name: "a.b",
Tag: "varint,123456789,opt",
}
ext2 := proto.Bool(false)
if err := proto.SetExtension(msg, extdesc2, ext2); err != nil {
t.Fatalf("Could not set ext2: %s", err)
}
b, err := proto.Marshal(msg)
if err != nil {
t.Fatalf("Could not marshal msg: %v", err)
}
if err = proto.Unmarshal(b, msg); err != nil {
t.Fatalf("Could not unmarshal into msg: %v", err)
}
descs, err := proto.ExtensionDescs(msg)
if err != nil {
t.Fatalf("proto.ExtensionDescs: got error %v", err)
}
sortExtDescs(descs)
wantDescs := []*proto.ExtensionDesc{extdesc1, {Field: extdesc2.Field}}
if !reflect.DeepEqual(descs, wantDescs) {
t.Errorf("proto.ExtensionDescs(msg) sorted extension ids: got %+v, want %+v", descs, wantDescs)
}
} | explode_data.jsonl/55062 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 580
} | [
2830,
3393,
12049,
11065,
16056,
25080,
31282,
1155,
353,
8840,
836,
8,
341,
21169,
1669,
609,
16650,
27054,
2052,
90,
2507,
25,
18433,
7371,
18,
17,
7,
15,
10569,
95450,
8614,
16,
1669,
17310,
5142,
62,
6756,
1245,
460,
198,
743,
939... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestNewPollerFail1(t *testing.T) {
srv, close := mock.NewServer()
srv.AppendResponse(mock.WithStatusCode(http.StatusAccepted))
srv.AppendResponse(mock.WithStatusCode(http.StatusConflict)) // terminal
defer close()
pl := pipeline.NewPipeline(srv)
firstResp := &http.Response{
StatusCode: http.StatusAccepted,
}
p := NewPoller(&fakePoller{Ep: srv.URL()}, firstResp, pl, func(*http.Response) error {
return errors.New("failed")
})
resp, err := p.PollUntilDone(context.Background(), time.Second, nil)
if err == nil {
t.Fatal("unexpected nil error")
} else if s := err.Error(); s != "failed" {
t.Fatalf("unexpected error %s", s)
}
if resp != nil {
t.Fatal("expected nil response")
}
} | explode_data.jsonl/74221 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 266
} | [
2830,
3393,
3564,
49207,
261,
19524,
16,
1155,
353,
8840,
836,
8,
341,
1903,
10553,
11,
3265,
1669,
7860,
7121,
5475,
741,
1903,
10553,
8982,
2582,
30389,
26124,
15872,
19886,
10538,
65906,
1171,
1903,
10553,
8982,
2582,
30389,
26124,
158... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStorageBoxClient_Info(t *testing.T) {
client, s := testServer(t)
defer s.Close()
b, err := client.StorageBox().Info(1234)
require.NoError(t, err)
expected := &StorageBox{
ID: 1234,
Login: "u12345",
Name: "",
Product: "BX10 - inclusive",
Cancelled: false,
Locked: false,
Location: "FSN1",
LinkedServer: 5678,
PaidUntil: "2021-12-31",
DiskQuota: 102400,
DiskUsage: 0,
DiskUsageData: 0,
DiskUsageSnapshots: 0,
Webdav: false,
Samba: false,
SSH: false,
ExternalReachability: false,
ZFS: false,
Server: "u12345.your-storagebox.de",
HostSystem: "FSN1-BX123",
}
require.Equal(t, expected, b)
} | explode_data.jsonl/45863 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 524
} | [
2830,
3393,
5793,
1611,
2959,
39624,
1155,
353,
8840,
836,
8,
341,
25291,
11,
274,
1669,
1273,
5475,
1155,
340,
16867,
274,
10421,
2822,
2233,
11,
1848,
1669,
2943,
43771,
1611,
1005,
1731,
7,
16,
17,
18,
19,
340,
17957,
35699,
1155,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCreateEmoji(t *testing.T) {
th := Setup(t).InitBasic()
defer th.TearDown()
Client := th.Client
EnableCustomEmoji := *th.App.Config().ServiceSettings.EnableCustomEmoji
defer func() {
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = EnableCustomEmoji })
}()
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = false })
defaultRolePermissions := th.SaveDefaultRolePermissions()
defer func() {
th.RestoreDefaultRolePermissions(defaultRolePermissions)
}()
// constants to be used along with checkEmojiFile
emojiWidth := app.MaxEmojiWidth
emojiHeight := app.MaxEmojiHeight * 2
// check that emoji gets resized correctly, respecting proportions, and is of expected type
checkEmojiFile := func(id, expectedImageType string) {
path, _ := fileutils.FindDir("data")
file, fileErr := os.Open(filepath.Join(path, "/emoji/"+id+"/image"))
require.NoError(t, fileErr)
defer file.Close()
config, imageType, err := image.DecodeConfig(file)
require.NoError(t, err)
require.Equal(t, expectedImageType, imageType)
require.Equal(t, emojiWidth/2, config.Width)
require.Equal(t, emojiHeight/2, config.Height)
}
emoji := &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
// try to create an emoji when they're disabled
_, resp := Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckNotImplementedStatus(t, resp)
// enable emoji creation for next cases
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = true })
// try to create a valid gif emoji when they're enabled
newEmoji, resp := Client.CreateEmoji(emoji, utils.CreateTestGif(t, emojiWidth, emojiHeight), "image.gif")
CheckNoError(t, resp)
require.Equal(t, newEmoji.Name, emoji.Name, "create with wrong name")
checkEmojiFile(newEmoji.Id, "gif")
// try to create an emoji with a duplicate name
emoji2 := &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: newEmoji.Name,
}
_, resp = Client.CreateEmoji(emoji2, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckBadRequestStatus(t, resp)
CheckErrorMessage(t, resp, "api.emoji.create.duplicate.app_error")
// try to create a valid animated gif emoji
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestAnimatedGif(t, emojiWidth, emojiHeight, 10), "image.gif")
CheckNoError(t, resp)
require.Equal(t, newEmoji.Name, emoji.Name, "create with wrong name")
checkEmojiFile(newEmoji.Id, "gif")
// try to create a valid jpeg emoji
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestJpeg(t, emojiWidth, emojiHeight), "image.jpeg")
CheckNoError(t, resp)
require.Equal(t, newEmoji.Name, emoji.Name, "create with wrong name")
checkEmojiFile(newEmoji.Id, "png") // emoji must be converted from jpeg to png
// try to create a valid png emoji
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestPng(t, emojiWidth, emojiHeight), "image.png")
CheckNoError(t, resp)
require.Equal(t, newEmoji.Name, emoji.Name, "create with wrong name")
checkEmojiFile(newEmoji.Id, "png")
// try to create an emoji that's too wide
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, 1000, 10), "image.gif")
CheckNoError(t, resp)
require.Equal(t, newEmoji.Name, emoji.Name, "create with wrong name")
// try to create an emoji that's too wide
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, app.MaxEmojiOriginalWidth+1), "image.gif")
require.NotNil(t, resp.Error, "should fail - emoji is too wide")
// try to create an emoji that's too tall
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
newEmoji, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, app.MaxEmojiOriginalHeight+1, 10), "image.gif")
require.NotNil(t, resp.Error, "should fail - emoji is too tall")
// try to create an emoji that's too large
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
_, resp = Client.CreateEmoji(emoji, utils.CreateTestAnimatedGif(t, 100, 100, 10000), "image.gif")
require.NotNil(t, resp.Error, "should fail - emoji is too big")
// try to create an emoji with data that isn't an image
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
_, resp = Client.CreateEmoji(emoji, make([]byte, 100), "image.gif")
CheckBadRequestStatus(t, resp)
CheckErrorMessage(t, resp, "api.emoji.upload.image.app_error")
// try to create an emoji as another user
emoji = &model.Emoji{
CreatorId: th.BasicUser2.Id,
Name: model.NewId(),
}
_, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckForbiddenStatus(t, resp)
// try to create an emoji without permissions
th.RemovePermissionFromRole(model.PERMISSION_CREATE_EMOJIS.Id, model.SYSTEM_USER_ROLE_ID)
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
_, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckForbiddenStatus(t, resp)
// create an emoji with permissions in one team
th.AddPermissionToRole(model.PERMISSION_CREATE_EMOJIS.Id, model.TEAM_USER_ROLE_ID)
emoji = &model.Emoji{
CreatorId: th.BasicUser.Id,
Name: model.NewId(),
}
_, resp = Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif")
CheckNoError(t, resp)
} | explode_data.jsonl/26609 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2274
} | [
2830,
3393,
4021,
92731,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1155,
568,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
741,
71724,
1669,
270,
11716,
271,
197,
11084,
10268,
92731,
1669,
353,
339,
5105,
10753,
1005,
1860,
60... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRequestV4SigningFlag(t *testing.T) {
testCases := []struct {
name string
bp string
expected string
}{
{
name: "default",
bp: `
android_app {
name: "foo",
srcs: ["a.java"],
sdk_version: "current",
}
`,
expected: "",
},
{
name: "default",
bp: `
android_app {
name: "foo",
srcs: ["a.java"],
sdk_version: "current",
v4_signature: false,
}
`,
expected: "",
},
{
name: "module certificate property",
bp: `
android_app {
name: "foo",
srcs: ["a.java"],
sdk_version: "current",
v4_signature: true,
}
`,
expected: "--enable-v4",
},
}
for _, test := range testCases {
t.Run(test.name, func(t *testing.T) {
result := android.GroupFixturePreparers(
PrepareForTestWithJavaDefaultModules,
).RunTestWithBp(t, test.bp)
foo := result.ModuleForTests("foo", "android_common")
signapk := foo.Output("foo.apk")
signFlags := signapk.Args["flags"]
android.AssertStringEquals(t, "signing flags", test.expected, signFlags)
})
}
} | explode_data.jsonl/58494 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 529
} | [
2830,
3393,
1900,
53,
19,
93358,
12135,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
2233,
79,
981,
914,
198,
197,
42400,
914,
198,
197,
59403,
197,
197,
515,
298,
11609,
25,
330... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_APHash(t *testing.T) {
var x uint32 = 3998202516
gtest.C(t, func(t *gtest.T) {
j := ghash.APHash(strBasic)
t.Assert(j, x)
})
} | explode_data.jsonl/60240 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 75
} | [
2830,
3393,
21899,
6370,
1155,
353,
8840,
836,
8,
341,
2405,
856,
2622,
18,
17,
284,
220,
18,
24,
24,
23,
17,
15,
17,
20,
16,
21,
198,
3174,
1944,
727,
1155,
11,
2915,
1155,
353,
82038,
836,
8,
341,
197,
12428,
1669,
342,
8296,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestValidateUpdate(t *testing.T) {
defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.InPlacePodVerticalScaling, true)()
namespace := "test"
handler := NewPodResourceAllocation()
resources := api.ResourceList{
api.ResourceCPU: resource.MustParse("1"),
api.ResourceMemory: resource.MustParse("1Gi"),
}
oldPod := &api.Pod{
ObjectMeta: metav1.ObjectMeta{Name: "pod1", Namespace: namespace},
Spec: api.PodSpec{
NodeName: "foonode",
Containers: []api.Container{
{
Name: "c1",
Image: "image",
Resources: api.ResourceRequirements{Requests: resources, Limits: resources},
},
},
},
}
newPod := oldPod.DeepCopy()
newPod.Spec.Containers[0].ResourcesAllocated = resources
tests := []struct {
name string
userInfo user.Info
expectError bool
}{
{
name: "update existing pod - system:node user",
userInfo: &user.DefaultInfo{Name: "system:node:foonode", Groups: []string{user.AllAuthenticated, user.NodesGroup}},
expectError: false,
},
{
name: "update existing pod not owned by node - system:node user",
userInfo: &user.DefaultInfo{Name: "system:node:barnode", Groups: []string{user.AllAuthenticated, user.NodesGroup}},
expectError: true,
},
{
name: "update existing pod - system:admin user",
userInfo: &user.DefaultInfo{Name: "system:admin", Groups: []string{user.AllAuthenticated, user.SystemPrivilegedGroup}},
expectError: true,
},
//TODO: more unit tests and negative tests
//TODO: add QoS immutable tests
}
for _, tc := range tests {
err := handler.Validate(admission.NewAttributesRecord(newPod, oldPod, api.Kind("Pod").WithVersion("version"),
newPod.Tenant, newPod.Namespace, newPod.Name, api.Resource("pods").WithVersion("version"), "",
admission.Update, nil, false, tc.userInfo), nil)
if tc.expectError && err == nil {
t.Fatal(fmt.Sprintf("Test: %s - missing expected error", tc.name))
}
if !tc.expectError && err != nil {
t.Fatal(fmt.Sprintf("Test: %s - received unexpected error %+v", tc.name, err))
}
}
} | explode_data.jsonl/16637 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 821
} | [
2830,
3393,
17926,
4289,
1155,
353,
8840,
836,
8,
341,
16867,
4565,
70,
266,
57824,
287,
4202,
13859,
42318,
16014,
2271,
1155,
11,
4094,
12753,
13275,
13859,
42318,
11,
4419,
5337,
17371,
23527,
18612,
59684,
11,
830,
8,
741,
56623,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestDebugInfoLifecycle(t *testing.T) {
sb, err := fake.NewSandbox(&fake.SandboxConfig{Files: fake.UnpackTxt(exampleProgram)})
if err != nil {
t.Fatal(err)
}
defer func() {
if err := sb.Close(); err != nil {
// TODO(golang/go#38490): we can't currently make this an error because
// it fails on Windows: the workspace directory is still locked by a
// separate Go process.
// Once we have a reliable way to wait for proper shutdown, make this an
// error.
t.Logf("closing workspace failed: %v", err)
}
}()
baseCtx, cancel := context.WithCancel(context.Background())
defer cancel()
clientCtx := debug.WithInstance(baseCtx, "", "")
serverCtx := debug.WithInstance(baseCtx, "", "")
cache := cache.New(nil)
ss := NewStreamServer(cache, false)
tsBackend := servertest.NewTCPServer(serverCtx, ss, nil)
forwarder, err := NewForwarder("tcp;"+tsBackend.Addr, nil)
if err != nil {
t.Fatal(err)
}
tsForwarder := servertest.NewPipeServer(clientCtx, forwarder, nil)
conn1 := tsForwarder.Connect(clientCtx)
ed1, err := fake.NewEditor(sb, fake.EditorConfig{}).Connect(clientCtx, conn1, fake.ClientHooks{})
if err != nil {
t.Fatal(err)
}
defer ed1.Close(clientCtx)
conn2 := tsBackend.Connect(baseCtx)
ed2, err := fake.NewEditor(sb, fake.EditorConfig{}).Connect(baseCtx, conn2, fake.ClientHooks{})
if err != nil {
t.Fatal(err)
}
defer ed2.Close(baseCtx)
serverDebug := debug.GetInstance(serverCtx)
if got, want := len(serverDebug.State.Clients()), 2; got != want {
t.Errorf("len(server:Clients) = %d, want %d", got, want)
}
if got, want := len(serverDebug.State.Sessions()), 2; got != want {
t.Errorf("len(server:Sessions) = %d, want %d", got, want)
}
clientDebug := debug.GetInstance(clientCtx)
if got, want := len(clientDebug.State.Servers()), 1; got != want {
t.Errorf("len(client:Servers) = %d, want %d", got, want)
}
// Close one of the connections to verify that the client and session were
// dropped.
if err := ed1.Close(clientCtx); err != nil {
t.Fatal(err)
}
/*TODO: at this point we have verified the editor is closed
However there is no way currently to wait for all associated go routines to
go away, and we need to wait for those to trigger the client drop
for now we just give it a little bit of time, but we need to fix this
in a principled way
*/
start := time.Now()
delay := time.Millisecond
const maxWait = time.Second
for len(serverDebug.State.Clients()) > 1 {
if time.Since(start) > maxWait {
break
}
time.Sleep(delay)
delay *= 2
}
if got, want := len(serverDebug.State.Clients()), 1; got != want {
t.Errorf("len(server:Clients) = %d, want %d", got, want)
}
if got, want := len(serverDebug.State.Sessions()), 1; got != want {
t.Errorf("len(server:Sessions()) = %d, want %d", got, want)
}
} | explode_data.jsonl/49906 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1055
} | [
2830,
3393,
7939,
1731,
62731,
1155,
353,
8840,
836,
8,
341,
24842,
11,
1848,
1669,
12418,
7121,
50,
31536,
2099,
30570,
808,
31536,
2648,
90,
10809,
25,
12418,
10616,
4748,
35629,
66203,
10690,
59209,
743,
1848,
961,
2092,
341,
197,
32... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestImportImportRequestResponsePairs_ReturnsWarningsContentLengthAndTransferEncodingSet(t *testing.T) {
RegisterTestingT(t)
cache := cache.NewInMemoryCache()
cfg := Configuration{Webserver: false}
cacheMatcher := matching.CacheMatcher{RequestCache: cache, Webserver: cfg.Webserver}
hv := Hoverfly{Cfg: &cfg, CacheMatcher: cacheMatcher, Simulation: models.NewSimulation()}
RegisterTestingT(t)
encodedPair := v2.RequestMatcherResponsePairViewV5{
Response: v2.ResponseDetailsViewV5{
Status: 200,
Body: base64String("hello_world"),
EncodedBody: true,
Headers: map[string][]string{
"Content-Length": []string{"16"},
"Transfer-Encoding": []string{"chunked"},
},
},
RequestMatcher: v2.RequestMatcherViewV5{
Destination: []v2.MatcherViewV5{
v2.MatcherViewV5{
Matcher: "exact",
Value: "hoverfly.io",
},
},
},
}
result := hv.importRequestResponsePairViews([]v2.RequestMatcherResponsePairViewV5{encodedPair})
Expect(result.WarningMessages).To(HaveLen(1))
Expect(result.WarningMessages[0].Message).To(ContainSubstring("Response contains both Content-Length and Transfer-Encoding headers on data.pairs[0].response"))
} | explode_data.jsonl/75459 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 455
} | [
2830,
3393,
11511,
11511,
1900,
2582,
54228,
53316,
82,
20140,
2762,
4373,
3036,
21970,
14690,
1649,
1155,
353,
8840,
836,
8,
341,
79096,
16451,
51,
1155,
692,
52680,
1669,
6500,
7121,
641,
10642,
8233,
741,
50286,
1669,
12221,
90,
5981,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMySQLClusterServiceAll(t *testing.T) {
TestMySQLClusterService_GetMySQLServers(t)
TestMySQLClusterService_GetAll(t)
TestMySQLClusterService_GetByID(t)
TestMySQLClusterService_Create(t)
TestMySQLClusterService_Update(t)
TestMySQLClusterService_Delete(t)
TestMySQLClusterService_Marshal(t)
TestMySQLClusterService_MarshalWithFields(t)
} | explode_data.jsonl/6147 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 136
} | [
2830,
3393,
59224,
28678,
1860,
2403,
1155,
353,
8840,
836,
8,
341,
73866,
59224,
28678,
1860,
13614,
59224,
78139,
1155,
340,
73866,
59224,
28678,
1860,
13614,
2403,
1155,
340,
73866,
59224,
28678,
1860,
13614,
60572,
1155,
340,
73866,
592... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBreachSpends(t *testing.T) {
for _, test := range breachTests {
tc := test
t.Run(tc.name, func(t *testing.T) {
testBreachSpends(t, tc)
})
}
} | explode_data.jsonl/2939 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 75
} | [
2830,
3393,
33,
22606,
6406,
1412,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
30371,
18200,
341,
197,
78255,
1669,
1273,
198,
197,
3244,
16708,
44415,
2644,
11,
2915,
1155,
353,
8840,
836,
8,
341,
298,
18185,
33,
2260... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestPoolBorrowJSON(t *testing.T) {
assert := assert.New(t)
pool := NewDecoderPool(1)
decoder := pool.Borrow("application/json")
_, ok := decoder.(*jsonDecoder)
assert.True(ok)
} | explode_data.jsonl/35763 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
10551,
33,
7768,
5370,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
85273,
1669,
1532,
20732,
10551,
7,
16,
340,
197,
48110,
1669,
7314,
1785,
7768,
445,
5132,
8931,
1138,
197,
6878,
5394,
1669,
24551,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestTxOut_GetDustThreshold(t *testing.T) {
script := script.NewScriptRaw([]byte{opcodes.OP_RETURN, 0x01, 0x01})
txout := NewTxOut(9, script)
assert.Equal(t, int64(0), txout.GetDustThreshold(&util.FeeRate{SataoshisPerK: 1}))
} | explode_data.jsonl/38873 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 99
} | [
2830,
3393,
31584,
2662,
13614,
35,
590,
37841,
1155,
353,
8840,
836,
8,
341,
86956,
1669,
5316,
7121,
5910,
20015,
10556,
3782,
90,
453,
25814,
81563,
21909,
11,
220,
15,
87,
15,
16,
11,
220,
15,
87,
15,
16,
3518,
46237,
411,
1669,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWithRouter(t *testing.T) {
// test increments in the handler
deliverKey := []byte("deliver-key")
txHandlerOpt := func(bapp *baseapp.BaseApp) {
customRouter := &testCustomRouter{routes: sync.Map{}}
r := sdk.NewRoute(routeMsgCounter, handlerMsgCounter(t, capKey1, deliverKey))
customRouter.AddRoute(r)
txHandler := middleware.ComposeMiddlewares(
middleware.NewRunMsgsTxHandler(middleware.NewMsgServiceRouter(encCfg.InterfaceRegistry), customRouter),
middleware.NewTxDecoderMiddleware(testTxDecoder(encCfg.Amino)),
)
bapp.SetTxHandler(txHandler)
}
app := setupBaseApp(t, txHandlerOpt)
app.InitChain(abci.RequestInitChain{})
nBlocks := 3
txPerHeight := 5
for blockN := 0; blockN < nBlocks; blockN++ {
header := tmproto.Header{Height: int64(blockN) + 1}
app.BeginBlock(abci.RequestBeginBlock{Header: header})
for i := 0; i < txPerHeight; i++ {
counter := int64(blockN*txPerHeight + i)
tx := newTxCounter(counter, counter)
txBytes, err := encCfg.Amino.Marshal(tx)
require.NoError(t, err)
res := app.DeliverTx(abci.RequestDeliverTx{Tx: txBytes})
require.True(t, res.IsOK(), fmt.Sprintf("%v", res))
}
app.EndBlock(abci.RequestEndBlock{})
app.Commit()
}
} | explode_data.jsonl/30054 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 481
} | [
2830,
3393,
2354,
9523,
1155,
353,
8840,
836,
8,
341,
197,
322,
1273,
61600,
304,
279,
7013,
198,
69509,
1524,
1592,
1669,
3056,
3782,
445,
74728,
16173,
5130,
46237,
3050,
21367,
1669,
2915,
1883,
676,
353,
3152,
676,
13018,
2164,
8,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWatch(t *testing.T) {
b, tmpPath := backend.NewDefaultTmpBackend()
s := newWatchableStore(zap.NewExample(), b, &lease.FakeLessor{}, nil, StoreConfig{})
defer func() {
s.store.Close()
os.Remove(tmpPath)
}()
testKey := []byte("foo")
testValue := []byte("bar")
s.Put(testKey, testValue, lease.NoLease)
w := s.NewWatchStream()
w.Watch(0, testKey, nil, 0)
if !s.synced.contains(string(testKey)) {
// the key must have had an entry in synced
t.Errorf("existence = false, want true")
}
} | explode_data.jsonl/67201 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 203
} | [
2830,
3393,
14247,
1155,
353,
8840,
836,
8,
341,
2233,
11,
4174,
1820,
1669,
19163,
7121,
3675,
35986,
29699,
741,
1903,
1669,
501,
14247,
480,
6093,
13174,
391,
7121,
13314,
1507,
293,
11,
609,
1623,
991,
726,
43,
8309,
22655,
2092,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnderlineSectionIndentMismatch(t *testing.T) {
src := `
abc
---`
expected := `
Doc
Paragraph
Text[abc ---]
`
assertParse(t, expected, src)
} | explode_data.jsonl/21251 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 67
} | [
2830,
3393,
16250,
1056,
9620,
42729,
82572,
1155,
353,
8840,
836,
8,
341,
41144,
1669,
22074,
39022,
198,
262,
12448,
3989,
42400,
1669,
22074,
9550,
198,
197,
42165,
198,
197,
49635,
58,
13683,
12448,
921,
3989,
6948,
14463,
1155,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestEmptyJoinRecursive(t *testing.T) {
executor, sbc1, _, _ := createLegacyExecutorEnv()
// Make sure it also works recursively.
sbc1.SetResults([]*sqltypes.Result{{
Fields: []*querypb.Field{
{Name: "id", Type: sqltypes.Int32},
},
}, {
Fields: []*querypb.Field{
{Name: "id", Type: sqltypes.Int32},
{Name: "col", Type: sqltypes.Int32},
},
}, {
Fields: []*querypb.Field{
{Name: "id", Type: sqltypes.Int32},
},
}})
result, err := executorExec(executor, "select u1.id, u2.id, u3.id from user u1 join (user u2 join user u3 on u3.id = u2.col) where u1.id = 1", nil)
require.NoError(t, err)
wantQueries := []*querypb.BoundQuery{{
Sql: "select u1.id from user as u1 where u1.id = 1",
BindVariables: map[string]*querypb.BindVariable{},
}, {
Sql: "select u2.id, u2.col from user as u2 where 1 != 1",
BindVariables: map[string]*querypb.BindVariable{},
}, {
Sql: "select u3.id from user as u3 where 1 != 1",
BindVariables: map[string]*querypb.BindVariable{
"u2_col": sqltypes.NullBindVariable,
},
}}
if !reflect.DeepEqual(sbc1.Queries, wantQueries) {
t.Errorf("sbc1.Queries:\n%+v, want\n%+v\n", sbc1.Queries, wantQueries)
}
wantResult := &sqltypes.Result{
Fields: []*querypb.Field{
{Name: "id", Type: sqltypes.Int32},
{Name: "id", Type: sqltypes.Int32},
{Name: "id", Type: sqltypes.Int32},
},
}
if !result.Equal(wantResult) {
t.Errorf("result: %+v, want %+v", result, wantResult)
}
} | explode_data.jsonl/67429 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 660
} | [
2830,
3393,
3522,
12292,
78542,
1155,
353,
8840,
836,
8,
341,
67328,
4831,
11,
7898,
66,
16,
11,
8358,
716,
1669,
1855,
77415,
25255,
14359,
741,
197,
322,
7405,
2704,
432,
1083,
4278,
52847,
624,
1903,
8904,
16,
4202,
9801,
85288,
35... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestEngine_DeleteWALLoadMetadata(t *testing.T) {
for _, index := range tsdb.RegisteredIndexes() {
t.Run(index, func(t *testing.T) {
e := MustOpenEngine(index)
defer e.Close()
if err := e.WritePointsString(
`cpu,host=A value=1.1 1000000000`,
`cpu,host=B value=1.2 2000000000`,
); err != nil {
t.Fatalf("failed to write points: %s", err.Error())
}
// Remove series.
itr := &seriesIterator{keys: [][]byte{[]byte("cpu,host=A")}}
if err := e.DeleteSeriesRange(itr, math.MinInt64, math.MaxInt64); err != nil {
t.Fatalf("failed to delete series: %s", err.Error())
}
// Ensure we can close and load index from the WAL
if err := e.Reopen(); err != nil {
t.Fatal(err)
}
if exp, got := 0, len(e.Cache.Values(tsm1.SeriesFieldKeyBytes("cpu,host=A", "value"))); exp != got {
t.Fatalf("unexpected number of values: got: %d. exp: %d", got, exp)
}
if exp, got := 1, len(e.Cache.Values(tsm1.SeriesFieldKeyBytes("cpu,host=B", "value"))); exp != got {
t.Fatalf("unexpected number of values: got: %d. exp: %d", got, exp)
}
})
}
} | explode_data.jsonl/28077 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 468
} | [
2830,
3393,
4571,
57418,
54,
3919,
2731,
14610,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1922,
1669,
2088,
10591,
1999,
19983,
291,
62229,
368,
341,
197,
3244,
16708,
7195,
11,
2915,
1155,
353,
8840,
836,
8,
341,
298,
7727,
1669,
154... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestPcrExtend(t *testing.T) {
rwc := openTPMOrSkip(t)
defer rwc.Close()
var pcrValue [20]byte
var value = "FFFFFFFFFFFFFFFFFFFF"
copy(pcrValue[:], value)
oldPcrValue, err := ReadPCR(rwc, 12)
if err != nil {
t.Fatal("Couldn't read PCR 12 from the TPM:", err)
}
newPcrValue, err := PcrExtend(rwc, 12, pcrValue)
if err != nil {
t.Fatal("Couldn't extend PCR 12 from the TPM:", err)
}
finalPcr := sha1.Sum(append(oldPcrValue, pcrValue[:]...))
if bytes.Equal(finalPcr[:], newPcrValue) {
t.Logf("PCR are equal!\n")
} else {
t.Fatal("PCR are not equal! Test failed.\n")
}
} | explode_data.jsonl/75341 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 268
} | [
2830,
3393,
47,
5082,
72136,
1155,
353,
8840,
836,
8,
341,
7000,
24028,
1669,
1787,
4239,
44,
2195,
35134,
1155,
340,
16867,
435,
24028,
10421,
2822,
2405,
281,
5082,
1130,
508,
17,
15,
90184,
198,
2405,
897,
284,
330,
98843,
98843,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDelayedTask_RunAfterWait_ShouldNotBlock(t *testing.T) {
delayer := newDelayer()
defer delayer.Stop(duration, duration)
probe := make(chan taskProbe, 1)
task := delayer.NewTask(duration, func(ctx context.Context, interrupted bool) error {
probe <- newTaskProbe(ctx, interrupted)
<-ctx.Done()
return nil
})
task.RunAsync()
select {
case <-probe:
assert.Assert(t, false, "Task shouldn't have ran when RunAsync returned")
default:
}
} | explode_data.jsonl/66927 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 163
} | [
2830,
3393,
57361,
6262,
84158,
6025,
14190,
36578,
616,
2623,
4713,
1155,
353,
8840,
836,
8,
341,
69509,
1135,
1669,
501,
16532,
1135,
741,
16867,
1594,
1135,
30213,
48148,
11,
8090,
340,
197,
52329,
1669,
1281,
35190,
3383,
81426,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewCompressionMiddleware_Headers(t *testing.T) {
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) })
middleware := NewCompressionMiddleware(8, "/metrics")
tests := map[string]struct {
cm MiddlewareFunc
statusCode int
encodingExpected string
}{
"gzip": {cm: middleware, statusCode: http.StatusOK, encodingExpected: gzipHeader},
"deflate": {cm: middleware, statusCode: http.StatusOK, encodingExpected: deflateHeader},
"gzip, *": {cm: middleware, statusCode: http.StatusOK, encodingExpected: gzipHeader},
"deflate, *": {cm: middleware, statusCode: http.StatusOK, encodingExpected: deflateHeader},
"invalid, gzip, *": {cm: middleware, statusCode: http.StatusOK, encodingExpected: gzipHeader},
"invalid, deflate, *": {cm: middleware, statusCode: http.StatusOK, encodingExpected: deflateHeader},
"invalid": {cm: middleware, statusCode: http.StatusNotAcceptable, encodingExpected: ""},
"invalid, *": {cm: middleware, statusCode: http.StatusOK, encodingExpected: ""},
"identity": {cm: middleware, statusCode: http.StatusOK, encodingExpected: identityHeader},
"gzip, identity": {cm: middleware, statusCode: http.StatusOK, encodingExpected: gzipHeader},
"*": {cm: middleware, statusCode: http.StatusOK, encodingExpected: ""},
"": {cm: middleware, statusCode: http.StatusOK, encodingExpected: identityHeader},
"not present": {cm: middleware, statusCode: http.StatusOK, encodingExpected: identityHeader},
}
for encodingName, tc := range tests {
t.Run(fmt.Sprintf("%q: compression middleware acts according the Accept-Encoding header", encodingName), func(t *testing.T) {
require.NotNil(t, tc.cm)
// given
req1, err := http.NewRequest("GET", "/alive", nil)
require.NoError(t, err)
if encodingName != "not present" {
req1.Header.Set("Accept-Encoding", encodingName)
}
// when
rc1 := httptest.NewRecorder()
tc.cm(handler).ServeHTTP(rc1, req1)
// then
assert.Equal(t, tc.statusCode, rc1.Code)
contentEncodingHeader := rc1.Header().Get("Content-Encoding")
assert.NotNil(t, contentEncodingHeader)
assert.Equal(t, tc.encodingExpected, contentEncodingHeader)
})
}
} | explode_data.jsonl/52418 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 931
} | [
2830,
3393,
3564,
81411,
24684,
62,
10574,
1155,
353,
8840,
836,
8,
341,
53326,
1669,
1758,
89164,
18552,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
314,
289,
69794,
19886,
52989,
8,
2751,
2109,
11603,
1669,
1532,
81411,
24684,
7,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestResetSequence(t *testing.T) {
client := framework.NewClient()
want := sqltypes.Result{
Fields: []*querypb.Field{{
Name: "nextval",
Type: sqltypes.Int64,
}},
RowsAffected: 1,
Rows: [][]sqltypes.Value{{
sqltypes.NewInt64(1),
}},
}
qr, err := client.Execute("select next value from vitess_reset_seq", nil)
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(*qr, want) {
t.Errorf("Execute: \n%#v, want \n%#v", *qr, want)
}
// Reset mastership
err = client.SetServingType(topodatapb.TabletType_REPLICA)
if err != nil {
t.Fatal(err)
}
err = client.SetServingType(topodatapb.TabletType_MASTER)
if err != nil {
t.Fatal(err)
}
// Ensure the next value skips previously cached values.
want.Rows[0][0] = sqltypes.NewInt64(4)
qr, err = client.Execute("select next value from vitess_reset_seq", nil)
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(*qr, want) {
t.Errorf("Execute: \n%#v, want \n%#v", *qr, want)
}
} | explode_data.jsonl/17260 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 437
} | [
2830,
3393,
14828,
14076,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
12626,
7121,
2959,
741,
50780,
1669,
5704,
9242,
18456,
515,
197,
197,
8941,
25,
29838,
1631,
16650,
17087,
90,
515,
298,
21297,
25,
330,
3600,
831,
756,
298,
27725,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestNewQemuHypervisorConfigImageAndInitrd(t *testing.T) {
assert := assert.New(t)
tmpdir, err := ioutil.TempDir(testDir, "")
assert.NoError(err)
defer os.RemoveAll(tmpdir)
imagePath := filepath.Join(tmpdir, "image")
initrdPath := filepath.Join(tmpdir, "initrd")
hypervisorPath := path.Join(tmpdir, "hypervisor")
kernelPath := path.Join(tmpdir, "kernel")
for _, file := range []string{imagePath, initrdPath, hypervisorPath, kernelPath} {
err = createEmptyFile(file)
assert.NoError(err)
}
machineType := "machineType"
disableBlock := true
enableIOThreads := true
hotplugVFIOOnRootBus := true
pcieRootPort := uint32(2)
hypervisor := hypervisor{
Path: hypervisorPath,
Kernel: kernelPath,
Image: imagePath,
Initrd: initrdPath,
MachineType: machineType,
DisableBlockDeviceUse: disableBlock,
EnableIOThreads: enableIOThreads,
HotplugVFIOOnRootBus: hotplugVFIOOnRootBus,
PCIeRootPort: pcieRootPort,
}
_, err = newQemuHypervisorConfig(hypervisor)
// specifying both an image+initrd is invalid
assert.Error(err)
} | explode_data.jsonl/11730 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 477
} | [
2830,
3393,
3564,
48,
33063,
39,
1082,
31396,
2648,
1906,
3036,
3803,
6498,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
20082,
3741,
11,
1848,
1669,
43144,
65009,
6184,
8623,
6184,
11,
14676,
6948,
35699,
3964,
340,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDutchNationalFlag(t *testing.T) {
colors := []Color{
Blue,
Red,
Red,
Blue,
White,
Red,
White,
}
pvtIdx := 1
DutchNationalFlag(colors, pvtIdx)
assert.Equal(t, colors[0], Red)
assert.Equal(t, colors[1], Red)
assert.Equal(t, colors[2], Red)
for _, c := range colors {
fmt.Printf("%v\n", c)
}
} | explode_data.jsonl/12218 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 157
} | [
2830,
3393,
35,
14061,
30812,
12135,
1155,
353,
8840,
836,
8,
341,
21481,
82,
1669,
3056,
1636,
515,
197,
197,
10331,
345,
197,
197,
6033,
345,
197,
197,
6033,
345,
197,
197,
10331,
345,
197,
197,
14075,
345,
197,
197,
6033,
345,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGenerateConfigForDCADatabaseCandles(t *testing.T) {
cfg := Config{
Nickname: "ExampleStrategyDCADatabaseCandles",
Goal: "To demonstrate the DCA strategy using database candle data",
StrategySettings: StrategySettings{
Name: dca,
},
CurrencySettings: []CurrencySettings{
{
ExchangeName: testExchange,
Asset: asset.Spot.String(),
Base: currency.BTC.String(),
Quote: currency.USDT.String(),
InitialQuoteFunds: initialQuoteFunds2,
BuySide: minMax,
SellSide: minMax,
Leverage: Leverage{
CanUseLeverage: false,
},
MakerFee: makerFee,
TakerFee: takerFee,
},
},
DataSettings: DataSettings{
Interval: kline.OneDay.Duration(),
DataType: common.CandleStr,
DatabaseData: &DatabaseData{
StartDate: startDate,
EndDate: endDate,
ConfigOverride: &database.Config{
Enabled: true,
Verbose: false,
Driver: "sqlite",
ConnectionDetails: drivers.ConnectionDetails{
Host: "localhost",
Database: "testsqlite.db",
},
},
InclusiveEndDate: false,
},
},
PortfolioSettings: PortfolioSettings{
BuySide: minMax,
SellSide: minMax,
Leverage: Leverage{
CanUseLeverage: false,
},
},
StatisticSettings: StatisticSettings{
RiskFreeRate: decimal.NewFromFloat(0.03),
},
}
if saveConfig {
result, err := json.MarshalIndent(cfg, "", " ")
if err != nil {
t.Fatal(err)
}
p, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
err = ioutil.WriteFile(filepath.Join(p, "examples", "dca-database-candles.strat"), result, 0770)
if err != nil {
t.Error(err)
}
}
} | explode_data.jsonl/58414 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 781
} | [
2830,
3393,
31115,
2648,
2461,
5626,
1808,
2211,
34,
20125,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
5532,
515,
197,
18317,
41052,
25,
330,
13314,
19816,
5626,
1808,
2211,
34,
20125,
756,
197,
9600,
78,
278,
25,
257,
330,
1249,
1986... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestClient_HasPermission(t *testing.T) {
client := NewClientWithPermissions("token", []string{"a", "b"})
if !client.HasPermission("a") {
t.Errorf("client has permissions %s, therefore HasPermission(a) should've been true", client.Permissions)
}
if !client.HasPermission("b") {
t.Errorf("client has permissions %s, therefore HasPermission(b) should've been true", client.Permissions)
}
if client.HasPermission("c") {
t.Errorf("client has permissions %s, therefore HasPermission(c) should've been false", client.Permissions)
}
if client.HasPermission("ab") {
t.Errorf("client has permissions %s, therefore HasPermission(ab) should've been false", client.Permissions)
}
} | explode_data.jsonl/38970 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 224
} | [
2830,
3393,
2959,
2039,
300,
14966,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
1532,
2959,
2354,
23851,
445,
5839,
497,
3056,
917,
4913,
64,
497,
330,
65,
23625,
743,
753,
2972,
16152,
14966,
445,
64,
899,
341,
197,
3244,
13080,
445,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestMakeService(t *testing.T) {
pa := &pav1a1.PodAutoscaler{
ObjectMeta: metav1.ObjectMeta{
Namespace: "here",
Name: "with-you",
UID: "2006",
// Those labels are propagated from the Revision->KPA.
Labels: map[string]string{
serving.RevisionLabelKey: "with-you",
serving.RevisionUID: "2009",
},
Annotations: map[string]string{
"a": "b",
},
},
Spec: pav1a1.PodAutoscalerSpec{
ScaleTargetRef: autoscalingv1.CrossVersionObjectReference{
APIVersion: "apps/v1",
Kind: "Deployment",
Name: "with-you",
},
ServiceName: "with-you-service",
},
}
selector := map[string]string{"cant": "stop"}
want := &corev1.Service{
ObjectMeta: metav1.ObjectMeta{
Namespace: "here",
Name: "with-you-metrics",
Labels: map[string]string{
// Those should be propagated.
serving.RevisionLabelKey: "with-you",
serving.RevisionUID: "2009",
kpaLabelKey: "with-you",
},
Annotations: map[string]string{
"a": "b",
},
OwnerReferences: []metav1.OwnerReference{{
APIVersion: pav1a1.SchemeGroupVersion.String(),
Kind: "PodAutoscaler",
Name: "with-you",
UID: "2006",
Controller: &boolTrue,
BlockOwnerDeletion: &boolTrue,
}},
},
Spec: corev1.ServiceSpec{
Ports: []corev1.ServicePort{{
Name: "metrics",
Protocol: corev1.ProtocolTCP,
Port: 9090,
TargetPort: intstr.FromString("queue-metrics"),
}},
Selector: selector,
},
}
got := MakeMetricsService(pa, selector)
if diff := cmp.Diff(want, got); diff != "" {
t.Errorf("Metrics K8s Service mismatch (-want, +got) = %v", diff)
}
} | explode_data.jsonl/2 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 842
} | [
2830,
3393,
8078,
1860,
1155,
353,
8840,
836,
8,
341,
3223,
64,
1669,
609,
79,
402,
16,
64,
16,
88823,
19602,
436,
63084,
515,
197,
23816,
12175,
25,
77520,
16,
80222,
515,
298,
90823,
25,
330,
6739,
756,
298,
21297,
25,
414,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestFleetStrategyValidation(t *testing.T) {
t.Parallel()
ctx := context.Background()
flt := defaultFleet(framework.Namespace)
client := framework.AgonesClient.AgonesV1()
flt, err := client.Fleets(framework.Namespace).Create(ctx, flt, metav1.CreateOptions{})
if assert.Nil(t, err) {
defer client.Fleets(framework.Namespace).Delete(ctx, flt.ObjectMeta.Name, metav1.DeleteOptions{}) // nolint:errcheck
}
framework.AssertFleetCondition(t, flt, e2e.FleetReadyCount(flt.Spec.Replicas))
flt, err = client.Fleets(framework.Namespace).Get(ctx, flt.ObjectMeta.GetName(), metav1.GetOptions{})
assert.NoError(t, err)
// func to check that we receive an expected error
verifyErr := func(err error) {
assert.NotNil(t, err)
statusErr, ok := err.(*k8serrors.StatusError)
assert.True(t, ok)
fmt.Println(statusErr)
CausesMessages := []string{"Strategy Type should be one of: RollingUpdate, Recreate."}
assert.Len(t, statusErr.Status().Details.Causes, 1)
assert.Equal(t, metav1.CauseTypeFieldValueInvalid, statusErr.Status().Details.Causes[0].Type)
assert.Contains(t, CausesMessages, statusErr.Status().Details.Causes[0].Message)
}
// Change DeploymentStrategy Type, set it to empty string, which is forbidden
fltCopy := flt.DeepCopy()
fltCopy.Spec.Strategy.Type = appsv1.DeploymentStrategyType("")
_, err = client.Fleets(framework.Namespace).Update(ctx, fltCopy, metav1.UpdateOptions{})
verifyErr(err)
// Try to remove whole DeploymentStrategy in a patch
patch := `[{ "op": "remove", "path": "/spec/strategy"},
{ "op": "replace", "path": "/spec/replicas", "value": 3}]`
_, err = framework.AgonesClient.AgonesV1().Fleets(framework.Namespace).Patch(ctx, flt.ObjectMeta.Name, types.JSONPatchType, []byte(patch), metav1.PatchOptions{})
verifyErr(err)
} | explode_data.jsonl/15416 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 655
} | [
2830,
3393,
37,
18973,
19816,
13799,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
20985,
1669,
2266,
19047,
2822,
1166,
4832,
1669,
1638,
37,
18973,
955,
5794,
46011,
692,
25291,
1669,
12626,
49850,
3154,
2959,
49850,
3154,
53,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMemory_GetPluginInfo(t *testing.T) {
m, err := NewWithDefault()
require.NoError(t, err)
res, err := m.GetPluginInfo(&spi.GetPluginInfoRequest{})
require.NoError(t, err)
assert.NotNil(t, res)
} | explode_data.jsonl/73853 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 82
} | [
2830,
3393,
10642,
13614,
11546,
1731,
1155,
353,
8840,
836,
8,
341,
2109,
11,
1848,
1669,
1532,
2354,
3675,
741,
17957,
35699,
1155,
11,
1848,
340,
10202,
11,
1848,
1669,
296,
2234,
11546,
1731,
2099,
39157,
2234,
11546,
1731,
1900,
37... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestMapProxy_ValuesWithPredicateWithNilPredicate(t *testing.T) {
_, err := mp.ValuesWithPredicate(nil)
AssertErrorNotNil(t, err, "ValuesWithPredicate did not return an error for nil predicate")
mp.Clear()
} | explode_data.jsonl/57020 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 72
} | [
2830,
3393,
2227,
16219,
62,
6227,
2354,
36329,
2354,
19064,
36329,
1155,
353,
8840,
836,
8,
341,
197,
6878,
1848,
1669,
10490,
35145,
2354,
36329,
27907,
340,
18017,
1454,
96144,
1155,
11,
1848,
11,
330,
6227,
2354,
36329,
1521,
537,
4... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestTool(t *testing.T) {
tests := []struct {
it string
mode Mode
job string
setValues yamlx.Values
globalValues string
templates map[string]string
vault getter
want *fakeDoer
}{
{
it: "should_apply_one_doc_with_tmplt_scoped_values",
mode: ModeGenerate,
job: `
steps:
- tmplt: tpl/example.txt
values:
audience: all
team:
lead: pipo`,
templates: map[string]string{
"tpl/example.txt": `
{{ .Values.team.lead }} says hello {{ .Values.audience }}!`,
},
want: &fakeDoer{
apply: []string{"\npipo says hello all!"},
},
},
{
it: "should_apply_one_doc_with_global_and_tmplt_scoped_values",
mode: ModeGenerate,
job: `
steps:
- tmplt: tpl/example.txt
values:
team:
lead: pipo
defaults:
audience: all
team:
lead: klukkluk`,
globalValues: `
audience: world
`,
templates: map[string]string{
"tpl/example.txt": `
{{ .Values.team.lead }} says hello {{ .Values.audience }}!`,
},
want: &fakeDoer{
apply: []string{"\npipo says hello world!"},
},
},
{
it: "should_apply_one_doc_with_setvalue_overriding_all_others",
mode: ModeGenerate,
job: `
steps:
- tmplt: tpl/example.txt
values:
name: pipo
defaults:
name: klukkluk`,
setValues: yamlx.Values{"name": "dikkedeur"},
globalValues: `
name: mamaloe
`,
templates: map[string]string{
"tpl/example.txt": `
{{ .Values.name }}`,
},
want: &fakeDoer{
apply: []string{"\ndikkedeur"},
},
},
{
it: "should_wait",
job: `
steps:
- wait: --one 1 --two 2`,
want: &fakeDoer{
wait: []string{"--one 1 --two 2"},
},
},
{
it: "should_handle_action_with_portforward_arg",
mode: ModeGenerateWithActions,
job: `
steps:
- action: action/get.txt
portForward: --forward-flags
values:
type: getSecret`,
templates: map[string]string{
"action/get.txt": `
type: {{ .Values.type }}`,
},
want: &fakeDoer{
action: []string{"\ntype: getSecret"},
portForward: []string{"--forward-flags"},
passedValues: yamlx.Values{},
actionTally: 1,
},
},
{
it: "should_handle_action_with_passed_values",
mode: ModeGenerateWithActions,
job: `
steps:
- action: action/nop.txt
- action: action/value.txt`,
templates: map[string]string{
"action/nop.txt": `
no operation`,
"action/value.txt": `
tally: {{ .Get.tally }}`,
},
want: &fakeDoer{
action: []string{"\nno operation", "\ntally: 1"},
portForward: []string{"", ""},
passedValues: yamlx.Values{"tally": 1},
actionTally: 2,
},
},
{
it: "should_handle_reads_from_vault",
mode: ModeGenerateWithActions,
job: `
steps:
- tmplt: tpl/vault.txt`,
templates: map[string]string{
"tpl/vault.txt": `
secret: {{ vault "object" "field" }}`,
},
vault: &fakeVault{
"object/field": "value",
},
want: &fakeDoer{
apply: []string{"\nsecret: value"},
},
},
{
it: "should_expand_variables_in_job",
mode: ModeGenerate,
job: `
steps:
- tmplt: tpl/example.txt
values:
text: "{{ .Values.first }}" # note the quotes to make this valid yaml (arguably)
defaults:
first: "hello"
`,
templates: map[string]string{
"tpl/example.txt": `text={{ .Values.text }}`,
},
want: &fakeDoer{
apply: []string{"text=hello"},
},
},
}
for _, tst := range tests {
t.Run(tst.it, func(t *testing.T) {
// create function to read template file content.
readFile := func(path string) (string, []byte, error) {
s, ok := tst.templates[path]
if !ok {
return "", nil, fmt.Errorf("not found: %s", path)
}
return path, []byte(s), nil
}
m := &fakeDoer{}
tl := Tool{
Mode: tst.mode,
Environ: []string{},
Execute: m,
readFileFn: readFile,
vault: tst.vault,
}
err := tl.run(tst.setValues, []byte(tst.globalValues), []byte(tst.job))
if assert.NoError(t, err) {
assert.Equal(t, tst.want, m)
}
})
}
} | explode_data.jsonl/74256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1924
} | [
2830,
3393,
7740,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
23374,
1843,
914,
198,
197,
60247,
260,
14562,
198,
197,
68577,
688,
914,
198,
197,
8196,
6227,
262,
32246,
87,
35145,
198,
197,
18842,
6227,
914,
198,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMqMsgStream_Produce(t *testing.T) {
f := &fixture{t: t}
parameters := f.setup()
defer f.teardown()
factory := &ProtoUDFactory{}
for i := range parameters {
func(client mqclient.Client) {
m, err := NewMqMsgStream(context.Background(), 100, 100, client, factory.NewUnmarshalDispatcher())
assert.Nil(t, err)
// Produce before called AsProducer
insertMsg := &InsertMsg{
BaseMsg: generateBaseMsg(),
InsertRequest: internalpb.InsertRequest{
Base: &commonpb.MsgBase{
MsgType: commonpb.MsgType_Insert,
MsgID: 1,
Timestamp: 2,
SourceID: 3,
},
DbName: "test_db",
CollectionName: "test_collection",
PartitionName: "test_partition",
DbID: 4,
CollectionID: 5,
PartitionID: 6,
SegmentID: 7,
ShardName: "test-channel",
Timestamps: []uint64{2, 1, 3},
RowData: []*commonpb.Blob{},
},
}
msgPack := &MsgPack{
Msgs: []TsMsg{insertMsg},
}
err = m.Produce(msgPack)
assert.NotNil(t, err)
}(parameters[i].client)
}
} | explode_data.jsonl/55281 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 537
} | [
2830,
3393,
44,
80,
6611,
3027,
1088,
47845,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
609,
59612,
90,
83,
25,
259,
532,
67543,
1669,
282,
25338,
741,
16867,
282,
31853,
37496,
2822,
1166,
2919,
1669,
609,
31549,
4656,
4153,
16094,
20... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerSendMessageToChannel(t *testing.T) {
s := NewTestServer()
go s.Start()
s.SendMessageToChannel("C123456789", "some text")
time.Sleep(2 * time.Second)
assert.True(t, s.SawOutgoingMessage("some text"))
s.Stop()
} | explode_data.jsonl/7545 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 88
} | [
2830,
3393,
5475,
80863,
1249,
9629,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
1532,
2271,
5475,
741,
30680,
274,
12101,
741,
1903,
66330,
1249,
9629,
445,
34,
16,
17,
18,
19,
20,
21,
22,
23,
24,
497,
330,
14689,
1467,
1138,
21957,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLowerCase(t *testing.T) {
var tests = map[string]string{
"": "",
"ID": "id",
"ColumnName": "columnname",
"COLUMN_NAME": "column_name",
"column_name": "column_name",
"UserID": "userid",
"UserNameRaw": "usernameraw",
}
for i, e := range tests {
if v := LowerCase(i); v != e {
t.Errorf("LowerCase(\"%s\"): expected %s, got %s", i, e, v)
}
}
} | explode_data.jsonl/65315 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 198
} | [
2830,
3393,
12451,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
2415,
14032,
30953,
515,
197,
197,
28796,
310,
8324,
197,
197,
1,
915,
788,
688,
330,
307,
756,
197,
197,
1,
26162,
788,
220,
330,
6229,
606,
756,
197,
197,
46316,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAction_Validate_RequireMixinData(t *testing.T) {
cxt := context.NewTestContext(t)
cxt.AddTestFile("testdata/simple.porter.yaml", config.Name)
m, err := LoadManifestFrom(cxt.Context, config.Name)
require.NoError(t, err, "could not load manifest")
// Sabotage!
m.Install[0].Data = nil
err = m.Install.Validate(m)
assert.EqualError(t, err, "no mixin specified")
} | explode_data.jsonl/37704 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
2512,
62,
17926,
62,
17959,
38456,
1043,
1155,
353,
8840,
836,
8,
341,
1444,
2252,
1669,
2266,
7121,
2271,
1972,
1155,
692,
1444,
2252,
1904,
2271,
1703,
445,
92425,
67195,
14598,
261,
33406,
497,
2193,
2967,
692,
2109,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_podTracker_HandlePodAdd(t *testing.T) {
// setup types
logger := logrus.NewEntry(logrus.StandardLogger())
tests := []struct {
name string
trackedPod string // namespace/podName
obj interface{}
}{
{
name: "got-tracked-pod",
trackedPod: "test/github-octocat-1",
obj: _pod,
},
{
name: "wrong-pod",
trackedPod: "test/github-octocat-2",
obj: _pod,
},
{
name: "invalid-type",
trackedPod: "test/github-octocat-1",
obj: new(v1.PodTemplate),
},
{
name: "nil",
trackedPod: "test/nil",
obj: nil,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
p := &podTracker{
Logger: logger,
TrackedPod: test.trackedPod,
// other fields not used by getTrackedPod
// if they're needed, use newPodTracker
}
// just make sure this doesn't panic
p.HandlePodAdd(test.obj)
})
}
} | explode_data.jsonl/62939 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 469
} | [
2830,
3393,
85337,
31133,
42714,
23527,
2212,
1155,
353,
8840,
836,
8,
341,
197,
322,
6505,
4494,
198,
17060,
1669,
1487,
20341,
7121,
5874,
12531,
20341,
53615,
7395,
12367,
78216,
1669,
3056,
1235,
341,
197,
11609,
981,
914,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCore(t *testing.T) {
testData := make(map[float64]uint32)
const maxDataSize = 10000
s := newSummary(maxDataSize)
checkSorted(s, t)
if s.Len() != 0 {
t.Errorf("Initial size should be zero regardless of capacity. Got %d", s.Len())
}
// construct a summary made of unique items only
for i := 0; i < maxDataSize; i++ {
k := rand.Float64()
v := rand.Uint32()
_, exists := testData[k]
if !exists {
_ = s.Add(k, v)
testData[k] = v
}
}
checkSorted(s, t)
if s.Len() != len(testData) {
t.Errorf("Got Len() == %d. Expected %d", s.Len(), len(testData))
}
for k, v := range testData {
i := s.findIndex(k)
if i == s.Len() {
t.Errorf("Couldn't find previously added key on summary")
continue
}
if s.means[i] != k || s.counts[i] != v {
t.Errorf("Wanted to find {%.4f,%d}, but found {%.4f,%d} instead", k, v, s.means[i], s.counts[i])
}
}
} | explode_data.jsonl/77559 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 395
} | [
2830,
3393,
5386,
1155,
353,
8840,
836,
8,
1476,
18185,
1043,
1669,
1281,
9147,
95381,
21,
19,
60,
2496,
18,
17,
692,
4777,
1932,
1043,
1695,
284,
220,
16,
15,
15,
15,
15,
198,
1903,
1669,
501,
19237,
8739,
1043,
1695,
340,
25157,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestStackReferenceNodeJS(t *testing.T) {
if owner := os.Getenv("PULUMI_TEST_OWNER"); owner == "" {
t.Skipf("Skipping: PULUMI_TEST_OWNER is not set")
}
opts := &integration.ProgramTestOptions{
Dir: "stack_reference",
Dependencies: []string{"@pulumi/pulumi"},
Quick: true,
Config: map[string]string{
"org": os.Getenv("PULUMI_TEST_OWNER"),
},
EditDirs: []integration.EditDir{
{
Dir: "step1",
Additive: true,
},
{
Dir: "step2",
Additive: true,
},
},
}
integration.ProgramTest(t, opts)
} | explode_data.jsonl/76366 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 279
} | [
2830,
3393,
4336,
8856,
1955,
12545,
1155,
353,
8840,
836,
8,
341,
743,
6372,
1669,
2643,
64883,
445,
47,
1094,
2794,
40,
11641,
74323,
5038,
6372,
621,
1591,
341,
197,
3244,
57776,
69,
445,
85945,
25,
393,
1094,
2794,
40,
11641,
7432... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSmiRouter_Sync(t *testing.T) {
canary := newTestSMICanary()
mocks := newFixture(canary)
router := &SmiRouter{
logger: mocks.logger,
flaggerClient: mocks.flaggerClient,
smiClient: mocks.meshClient,
kubeClient: mocks.kubeClient,
}
err := router.Reconcile(canary)
require.NoError(t, err)
// test insert
ts, err := router.smiClient.SplitV1alpha1().TrafficSplits("default").Get("podinfo", metav1.GetOptions{})
require.NoError(t, err)
dests := ts.Spec.Backends
assert.Len(t, dests, 2)
apexName, primaryName, canaryName := canary.GetServiceNames()
assert.Equal(t, ts.Spec.Service, apexName)
var pRoute smiv1.TrafficSplitBackend
var cRoute smiv1.TrafficSplitBackend
for _, dest := range ts.Spec.Backends {
if dest.Service == primaryName {
pRoute = dest
}
if dest.Service == canaryName {
cRoute = dest
}
}
assert.Equal(t, strconv.Itoa(100), pRoute.Weight.String())
assert.Equal(t, strconv.Itoa(0), cRoute.Weight.String())
// test update
host := "test"
canary.Spec.Service.Name = host
err = router.Reconcile(canary)
require.NoError(t, err)
ts, err = router.smiClient.SplitV1alpha1().TrafficSplits("default").Get("test", metav1.GetOptions{})
require.NoError(t, err)
assert.Equal(t, host, ts.Spec.Service)
} | explode_data.jsonl/8372 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 520
} | [
2830,
3393,
50,
8155,
9523,
1098,
1721,
1155,
353,
8840,
836,
8,
341,
92936,
658,
1669,
501,
2271,
9501,
1317,
276,
658,
741,
2109,
25183,
1669,
501,
18930,
90651,
658,
340,
67009,
1669,
609,
50,
8155,
9523,
515,
197,
17060,
25,
286,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestRunPipeNoFormats(t *testing.T) {
var ctx = &context.Context{
Version: "1.0.0",
Git: context.GitInfo{
CurrentTag: "v1.0.0",
},
Config: config.Project{
NFPMs: []config.NFPM{
{},
},
},
Parallelism: runtime.NumCPU(),
}
require.NoError(t, Pipe{}.Default(ctx))
testlib.AssertSkipped(t, Pipe{}.Run(ctx))
} | explode_data.jsonl/21979 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 162
} | [
2830,
3393,
6727,
34077,
2753,
44599,
1155,
353,
8840,
836,
8,
341,
2405,
5635,
284,
609,
2147,
9328,
515,
197,
77847,
25,
330,
16,
13,
15,
13,
15,
756,
197,
9600,
275,
25,
2266,
1224,
275,
1731,
515,
298,
197,
5405,
5668,
25,
330... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDownloadMountpath(t *testing.T) {
var (
proxyURL = tutils.RandomProxyURL(t)
baseParams = tutils.BaseAPIParams(proxyURL)
bck = cmn.Bck{
Name: testBucketName,
Provider: cmn.ProviderAIS,
}
objsCnt = 100
template = "storage.googleapis.com/nvdata-openimages/openimages-train-{000000..000050}.tar"
m = make(map[string]string, objsCnt)
)
clearDownloadList(t)
// Prepare objects to be downloaded to targets. Multiple objects to make
// sure that at least one of them gets into target with disabled mountpath.
for i := 0; i < objsCnt; i++ {
m[strconv.FormatInt(int64(i), 10)] = "https://raw.githubusercontent.com/NVIDIA/aistore/master/README.md"
}
tutils.CreateBucketWithCleanup(t, proxyURL, bck, nil)
id1, err := api.DownloadRange(baseParams, generateDownloadDesc(), bck, template)
tassert.CheckFatal(t, err)
tlog.Logf("Started very large download job %s (intended to be aborted)\n", id1)
// Abort just in case something goes wrong.
t.Cleanup(func() {
abortDownload(t, id1)
})
tlog.Logln("Wait a while for downloaders to pick up...")
time.Sleep(3 * time.Second)
smap := tutils.GetClusterMap(t, proxyURL)
selectedTarget, _ := smap.GetRandTarget()
mpathList, err := api.GetMountpaths(baseParams, selectedTarget)
tassert.CheckFatal(t, err)
tassert.Fatalf(t, len(mpathList.Available) >= 2, "%s requires 2 or more mountpaths", t.Name())
mpathID := cos.NowRand().Intn(len(mpathList.Available))
removeMpath := mpathList.Available[mpathID]
tlog.Logf("Disabling mountpath %q at %s\n", removeMpath, selectedTarget.StringEx())
err = api.DisableMountpath(baseParams, selectedTarget, removeMpath, false /*dont-resil*/)
tassert.CheckFatal(t, err)
defer func() {
tlog.Logf("Enabling mountpath %q at %s\n", removeMpath, selectedTarget.StringEx())
err = api.EnableMountpath(baseParams, selectedTarget, removeMpath)
tassert.CheckFatal(t, err)
}()
// Wait for resilvering
args := api.XactReqArgs{Node: selectedTarget.ID(), Kind: cmn.ActResilver, Timeout: rebalanceTimeout}
_, err = api.WaitForXaction(baseParams, args)
tassert.CheckFatal(t, err)
// Downloader finished on the target `selectedTarget`, safe to abort the rest.
tlog.Logf("Aborting download job %s\n", id1)
abortDownload(t, id1)
tlog.Logf("Listing %s\n", bck)
objs, err := tutils.ListObjectNames(proxyURL, bck, "", 0)
tassert.CheckError(t, err)
tassert.Fatalf(t, len(objs) == 0, "objects should not have been downloaded, download should have been aborted\n")
id2, err := api.DownloadMulti(baseParams, generateDownloadDesc(), bck, m)
tassert.CheckFatal(t, err)
tlog.Logf("Started download job %s, waiting for it to finish\n", id2)
waitForDownload(t, id2, 2*time.Minute)
objs, err = tutils.ListObjectNames(proxyURL, bck, "", 0)
tassert.CheckError(t, err)
tassert.Fatalf(t, len(objs) == objsCnt, "Expected %d objects to be present, got: %d", objsCnt, len(objs))
} | explode_data.jsonl/70389 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1105
} | [
2830,
3393,
11377,
16284,
2343,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
197,
22803,
3144,
256,
284,
259,
6031,
26709,
16219,
3144,
1155,
340,
197,
24195,
4870,
284,
259,
6031,
13018,
7082,
4870,
65787,
3144,
340,
197,
2233,
377,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAlphaNumeric(t *testing.T) {
validate := New()
s := "abcd123"
errs := validate.Var(s, "alphanum")
Equal(t, errs, nil)
s = "abc!23"
errs = validate.Var(s, "alphanum")
NotEqual(t, errs, nil)
AssertError(t, errs, "", "", "", "", "alphanum")
errs = validate.Var(1, "alphanum")
NotEqual(t, errs, nil)
AssertError(t, errs, "", "", "", "", "alphanum")
} | explode_data.jsonl/77327 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 174
} | [
2830,
3393,
19384,
36296,
1155,
353,
8840,
836,
8,
1476,
197,
7067,
1669,
1532,
2822,
1903,
1669,
330,
68644,
16,
17,
18,
698,
9859,
82,
1669,
9593,
87968,
1141,
11,
330,
278,
9943,
372,
1138,
197,
2993,
1155,
11,
70817,
11,
2092,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewTestSigmoidExample(t *testing.T) {
mytest := NewTestSigmoidExample()
var model ir.ModelProto
err := proto.Unmarshal(mytest.ModelB, &model)
if err != nil {
t.Fatal(err)
}
if model.Graph == nil {
t.Fatal("graph is nil")
}
if len(model.Graph.Input) != len(mytest.Input) {
t.Fatalf("invalid test: model has %v input, but test only provide %v", len(model.Graph.Input), len(mytest.Input))
}
if len(model.Graph.Output) != len(mytest.ExpectedOutput) {
t.Fatalf("invalid test: model has %v input, but test only provide %v", len(model.Graph.Output), len(mytest.ExpectedOutput))
}
} | explode_data.jsonl/25487 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 233
} | [
2830,
3393,
3564,
2271,
50,
52111,
13314,
1155,
353,
8840,
836,
8,
341,
13624,
1944,
1669,
1532,
2271,
50,
52111,
13314,
741,
2405,
1614,
6216,
5659,
31549,
198,
9859,
1669,
18433,
38097,
13941,
1944,
5659,
33,
11,
609,
2528,
340,
743,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestVerifyChecksum(t *testing.T) {
t.Parallel()
fName := "TestVerifyChecksum"
defer cleanup(fName)
writeOldFile(fName, t)
err := Apply(bytes.NewReader(newFile), Options{
TargetPath: fName,
Checksum: newFileChecksum[:],
})
validateUpdate(fName, err, t)
} | explode_data.jsonl/73982 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 115
} | [
2830,
3393,
32627,
73190,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
1166,
675,
1669,
330,
2271,
32627,
73190,
698,
16867,
21290,
955,
675,
340,
24945,
18284,
1703,
955,
675,
11,
259,
692,
9859,
1669,
20552,
23158,
68587,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestListConsumerGroupsMultiBroker(t *testing.T) {
seedBroker := NewMockBroker(t, 1)
defer seedBroker.Close()
secondBroker := NewMockBroker(t, 2)
defer secondBroker.Close()
firstGroup := "first"
secondGroup := "second"
nonExistingGroup := "non-existing-group"
seedBroker.SetHandlerByMap(map[string]MockResponse{
"MetadataRequest": NewMockMetadataResponse(t).
SetController(seedBroker.BrokerID()).
SetBroker(seedBroker.Addr(), seedBroker.BrokerID()).
SetBroker(secondBroker.Addr(), secondBroker.BrokerID()),
"ListGroupsRequest": NewMockListGroupsResponse(t).
AddGroup(firstGroup, "consumer"),
})
secondBroker.SetHandlerByMap(map[string]MockResponse{
"MetadataRequest": NewMockMetadataResponse(t).
SetController(seedBroker.BrokerID()).
SetBroker(seedBroker.Addr(), seedBroker.BrokerID()).
SetBroker(secondBroker.Addr(), secondBroker.BrokerID()),
"ListGroupsRequest": NewMockListGroupsResponse(t).
AddGroup(secondGroup, "consumer"),
})
config := NewTestConfig()
config.Version = V1_0_0_0
admin, err := NewClusterAdmin([]string{seedBroker.Addr()}, config)
if err != nil {
t.Fatal(err)
}
groups, err := admin.ListConsumerGroups()
if err != nil {
t.Fatal(err)
}
if len(groups) != 2 {
t.Fatalf("Expected %v results, got %v", 1, len(groups))
}
if _, found := groups[firstGroup]; !found {
t.Fatalf("Expected group %v to be present in result set, but it isn't", firstGroup)
}
if _, found := groups[secondGroup]; !found {
t.Fatalf("Expected group %v to be present in result set, but it isn't", secondGroup)
}
if _, found := groups[nonExistingGroup]; found {
t.Fatalf("Expected group %v to not exist, but it exists", nonExistingGroup)
}
err = admin.Close()
if err != nil {
t.Fatal(err)
}
} | explode_data.jsonl/40806 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 672
} | [
2830,
3393,
852,
29968,
22173,
20358,
65545,
1155,
353,
8840,
836,
8,
341,
197,
22602,
65545,
1669,
1532,
11571,
65545,
1155,
11,
220,
16,
340,
16867,
10320,
65545,
10421,
2822,
197,
5569,
65545,
1669,
1532,
11571,
65545,
1155,
11,
220,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestRuleAddDelimiter(t *testing.T) {
common.Log.Debug("Entering function: %s", common.GetFunctionName())
sqls := [][]string{
{
`use sakila
select * from film`,
`use sakila`,
`show databases`,
},
{
`use sakila;`,
},
}
for _, sql := range sqls[0] {
q, _ := NewQuery4Audit(sql)
rule := q.RuleAddDelimiter()
if rule.Item != "LIT.004" {
t.Error("Rule not match:", rule.Item, "Expect : LIT.004")
}
}
for _, sql := range sqls[1] {
q, _ := NewQuery4Audit(sql)
rule := q.RuleAddDelimiter()
if rule.Item != "OK" {
t.Error("Rule not match:", rule.Item, "Expect : OK")
}
}
common.Log.Debug("Exiting function: %s", common.GetFunctionName())
} | explode_data.jsonl/76781 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 308
} | [
2830,
3393,
11337,
2212,
91098,
1155,
353,
8840,
836,
8,
341,
83825,
5247,
20345,
445,
82867,
729,
25,
1018,
82,
497,
4185,
2234,
5152,
675,
2398,
30633,
82,
1669,
52931,
917,
515,
197,
197,
515,
298,
197,
63,
810,
77310,
10524,
198,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestVoteSignable(t *testing.T) {
vote := examplePrecommit()
signBytes := vote.SignBytes("test_chain_id")
expected, err := cdc.MarshalBinaryLengthPrefixed(CanonicalizeVote("test_chain_id", vote))
require.NoError(t, err)
require.Equal(t, expected, signBytes, "Got unexpected sign bytes for Vote.")
} | explode_data.jsonl/54531 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
41412,
7264,
480,
1155,
353,
8840,
836,
8,
341,
5195,
1272,
1669,
3110,
4703,
17413,
741,
69054,
7078,
1669,
6910,
41152,
7078,
445,
1944,
30583,
842,
5130,
42400,
11,
1848,
1669,
272,
7628,
37271,
21338,
4373,
29978,
3286,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReadTimeout(t *testing.T) {
c, s, err := getConnection(nil)
if err != nil {
t.Fatal(err)
}
defer c.Close()
defer s.Close()
c.SetReadDeadline(time.Now().Add(10 * time.Millisecond))
buf := make([]byte, 10)
_, err = c.Read(buf)
if err != ErrTimeout {
t.Fatalf("expected ErrTimeout, got %v", err)
}
} | explode_data.jsonl/11423 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 138
} | [
2830,
3393,
4418,
7636,
1155,
353,
8840,
836,
8,
341,
1444,
11,
274,
11,
1848,
1669,
65313,
27907,
340,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
272,
10421,
741,
16867,
274,
10421,
2822,
1444,
4202,
441... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNewInitCmd(t *testing.T) {
fs := afero.NewMemMapFs()
var tests = []struct {
name string
flags map[string]string
parameters []string
errorMessage string
}{
{name: "arguments invalid", parameters: []string{"foo"}, errorMessage: "this command does not accept arguments"},
{name: "name and version together invalid", flags: map[string]string{"kudo-image": "foo", "version": "bar"}, errorMessage: "specify either 'kudo-image' or 'version', not both"},
{name: "crd-only and wait together invalid", flags: map[string]string{"crd-only": "true", "wait": "true"}, errorMessage: "wait is not allowed with crd-only"},
{name: "wait-timeout invalid without wait", flags: map[string]string{"wait-timeout": "400"}, errorMessage: "wait-timeout is only useful when using the flag '--wait'"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
out := &bytes.Buffer{}
initCmd := newInitCmd(fs, out)
for key, value := range tt.flags {
if err := initCmd.Flags().Set(key, value); err != nil {
t.Fatal(err)
}
}
err := initCmd.RunE(initCmd, tt.parameters)
assert.EqualError(t, err, tt.errorMessage)
})
}
} | explode_data.jsonl/53643 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 438
} | [
2830,
3393,
3564,
3803,
15613,
1155,
353,
8840,
836,
8,
341,
53584,
1669,
264,
802,
78,
7121,
18816,
2227,
48300,
741,
2405,
7032,
284,
3056,
1235,
341,
197,
11609,
260,
914,
198,
197,
59516,
286,
2415,
14032,
30953,
198,
197,
67543,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestApplyAndReturnValidatorSetUpdatesNewValidator(t *testing.T) {
app, ctx, _, _ := bootstrapValidatorTest(t, 1000, 20)
params := app.StakingKeeper.GetParams(ctx)
params.MaxValidators = uint32(3)
app.StakingKeeper.SetParams(ctx, params)
powers := []int64{100, 100}
var validators [2]types.Validator
// initialize some validators into the state
for i, power := range powers {
valPubKey := PKs[i+1]
valAddr := sdk.ValAddress(valPubKey.Address().Bytes())
validators[i] = types.NewValidator(valAddr, valPubKey, types.Description{})
tokens := sdk.TokensFromConsensusPower(power)
validators[i], _ = validators[i].AddTokensFromDel(tokens)
app.StakingKeeper.SetValidator(ctx, validators[i])
app.StakingKeeper.SetValidatorByPowerIndex(ctx, validators[i])
}
// verify initial Tendermint updates are correct
updates := app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx)
require.Equal(t, len(validators), len(updates))
validators[0], _ = app.StakingKeeper.GetValidator(ctx, validators[0].OperatorAddress)
validators[1], _ = app.StakingKeeper.GetValidator(ctx, validators[1].OperatorAddress)
require.Equal(t, validators[0].ABCIValidatorUpdate(), updates[0])
require.Equal(t, validators[1].ABCIValidatorUpdate(), updates[1])
require.Equal(t, 0, len(app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx)))
// update initial validator set
for i, power := range powers {
app.StakingKeeper.DeleteValidatorByPowerIndex(ctx, validators[i])
tokens := sdk.TokensFromConsensusPower(power)
validators[i], _ = validators[i].AddTokensFromDel(tokens)
app.StakingKeeper.SetValidator(ctx, validators[i])
app.StakingKeeper.SetValidatorByPowerIndex(ctx, validators[i])
}
// add a new validator that goes from zero power, to non-zero power, back to
// zero power
valPubKey := PKs[len(validators)+1]
valAddr := sdk.ValAddress(valPubKey.Address().Bytes())
amt := sdk.NewInt(100)
validator := types.NewValidator(valAddr, valPubKey, types.Description{})
validator, _ = validator.AddTokensFromDel(amt)
app.StakingKeeper.SetValidator(ctx, validator)
validator, _ = validator.RemoveDelShares(amt.ToDec())
app.StakingKeeper.SetValidator(ctx, validator)
app.StakingKeeper.SetValidatorByPowerIndex(ctx, validator)
// add a new validator that increases in power
valPubKey = PKs[len(validators)+2]
valAddr = sdk.ValAddress(valPubKey.Address().Bytes())
validator = types.NewValidator(valAddr, valPubKey, types.Description{})
tokens := sdk.TokensFromConsensusPower(500)
validator, _ = validator.AddTokensFromDel(tokens)
app.StakingKeeper.SetValidator(ctx, validator)
app.StakingKeeper.SetValidatorByPowerIndex(ctx, validator)
// verify initial Tendermint updates are correct
updates = app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx)
validator, _ = app.StakingKeeper.GetValidator(ctx, validator.OperatorAddress)
validators[0], _ = app.StakingKeeper.GetValidator(ctx, validators[0].OperatorAddress)
validators[1], _ = app.StakingKeeper.GetValidator(ctx, validators[1].OperatorAddress)
require.Equal(t, len(validators)+1, len(updates))
require.Equal(t, validator.ABCIValidatorUpdate(), updates[0])
require.Equal(t, validators[0].ABCIValidatorUpdate(), updates[1])
require.Equal(t, validators[1].ABCIValidatorUpdate(), updates[2])
} | explode_data.jsonl/6109 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1172
} | [
2830,
3393,
28497,
3036,
5598,
14256,
1649,
37091,
3564,
14256,
1155,
353,
8840,
836,
8,
341,
28236,
11,
5635,
11,
8358,
716,
1669,
26925,
14256,
2271,
1155,
11,
220,
16,
15,
15,
15,
11,
220,
17,
15,
340,
25856,
1669,
906,
7758,
176... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestSuggestions(t *testing.T) {
th := Setup(t)
defer th.TearDown()
jira := createJiraAutocompleteData()
emptyCmdArgs := &model.CommandArgs{}
suggestions := th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "ji", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, jira.Trigger, suggestions[0].Complete)
assert.Equal(t, jira.Trigger, suggestions[0].Suggestion)
assert.Equal(t, "[command]", suggestions[0].Hint)
assert.Equal(t, jira.HelpText, suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira crea", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira create", suggestions[0].Complete)
assert.Equal(t, "create", suggestions[0].Suggestion)
assert.Equal(t, "[issue text]", suggestions[0].Hint)
assert.Equal(t, "Create a new Issue", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira c", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 2)
assert.Equal(t, "jira create", suggestions[1].Complete)
assert.Equal(t, "create", suggestions[1].Suggestion)
assert.Equal(t, "[issue text]", suggestions[1].Hint)
assert.Equal(t, "Create a new Issue", suggestions[1].Description)
assert.Equal(t, "jira connect", suggestions[0].Complete)
assert.Equal(t, "connect", suggestions[0].Suggestion)
assert.Equal(t, "[url]", suggestions[0].Hint)
assert.Equal(t, "Connect your Mattermost account to your Jira account", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira create ", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira create ", suggestions[0].Complete)
assert.Equal(t, "", suggestions[0].Suggestion)
assert.Equal(t, "[text]", suggestions[0].Hint)
assert.Equal(t, "This text is optional, will be inserted into the description field", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira create some", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira create some", suggestions[0].Complete)
assert.Equal(t, "", suggestions[0].Suggestion)
assert.Equal(t, "[text]", suggestions[0].Hint)
assert.Equal(t, "This text is optional, will be inserted into the description field", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira create some text ", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 0)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "invalid command", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 0)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira settings notifications o", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 2)
assert.Equal(t, "jira settings notifications On", suggestions[0].Complete)
assert.Equal(t, "On", suggestions[0].Suggestion)
assert.Equal(t, "Turn notifications on", suggestions[0].Hint)
assert.Equal(t, "", suggestions[0].Description)
assert.Equal(t, "jira settings notifications Off", suggestions[1].Complete)
assert.Equal(t, "Off", suggestions[1].Suggestion)
assert.Equal(t, "Turn notifications off", suggestions[1].Hint)
assert.Equal(t, "", suggestions[1].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira ", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 11)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira ", model.SYSTEM_USER_ROLE_ID)
assert.Len(t, suggestions, 9)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira create \"some issue text", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira create \"some issue text", suggestions[0].Complete)
assert.Equal(t, "", suggestions[0].Suggestion)
assert.Equal(t, "[text]", suggestions[0].Hint)
assert.Equal(t, "This text is optional, will be inserted into the description field", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira timezone ", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira timezone --zone ", suggestions[0].Complete)
assert.Equal(t, "--zone", suggestions[0].Suggestion)
assert.Equal(t, "", suggestions[0].Hint)
assert.Equal(t, "Set timezone", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira timezone --", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira timezone --zone ", suggestions[0].Complete)
assert.Equal(t, "--zone", suggestions[0].Suggestion)
assert.Equal(t, "", suggestions[0].Hint)
assert.Equal(t, "Set timezone", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira timezone --zone ", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira timezone --zone ", suggestions[0].Complete)
assert.Equal(t, "", suggestions[0].Suggestion)
assert.Equal(t, "[UTC+07:00]", suggestions[0].Hint)
assert.Equal(t, "Set timezone", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira timezone --zone bla", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 1)
assert.Equal(t, "jira timezone --zone bla", suggestions[0].Complete)
assert.Equal(t, "", suggestions[0].Suggestion)
assert.Equal(t, "[UTC+07:00]", suggestions[0].Hint)
assert.Equal(t, "Set timezone", suggestions[0].Description)
suggestions = th.App.getSuggestions(emptyCmdArgs, []*model.AutocompleteData{jira}, "", "jira timezone bla", model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 0)
commandA := &model.Command{
Trigger: "alice",
AutocompleteData: model.NewAutocompleteData("alice", "", ""),
}
commandB := &model.Command{
Trigger: "bob",
AutocompleteData: model.NewAutocompleteData("bob", "", ""),
}
commandC := &model.Command{
Trigger: "charles",
AutocompleteData: model.NewAutocompleteData("charles", "", ""),
}
suggestions = th.App.GetSuggestions(emptyCmdArgs, []*model.Command{commandB, commandC, commandA}, model.SYSTEM_ADMIN_ROLE_ID)
assert.Len(t, suggestions, 3)
assert.Equal(t, "alice", suggestions[0].Complete)
assert.Equal(t, "bob", suggestions[1].Complete)
assert.Equal(t, "charles", suggestions[2].Complete)
} | explode_data.jsonl/58280 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2417
} | [
2830,
3393,
98846,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1155,
340,
16867,
270,
836,
682,
4454,
2822,
12428,
8832,
1669,
1855,
41,
8832,
19602,
20104,
1043,
741,
197,
3194,
15613,
4117,
1669,
609,
2528,
12714,
4117,
31483,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewFilters(t *testing.T) {
t.Run("Make single filter properly", func(t *testing.T) {
f, _ := makeNewFilterSet([]MetricFilter{
{
MetricNames: []string{
"cpu.utilization",
"memory.utilization",
},
},
})
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "cpu.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "memory.utilization"}))
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "disk.utilization"}))
})
t.Run("Merges two filters properly (ORed together)", func(t *testing.T) {
f, _ := makeNewFilterSet([]MetricFilter{
{
MetricNames: []string{
"cpu.utilization",
"memory.utilization",
},
},
{
MetricNames: []string{
"disk.utilization",
},
},
})
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "cpu.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "memory.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "disk.utilization"}))
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "other.utilization"}))
})
t.Run("Filters can be overridden within a single filter", func(t *testing.T) {
f, _ := makeNewFilterSet([]MetricFilter{
{
MetricNames: []string{
"*.utilization",
"!memory.utilization",
"!/[a-c].*.utilization/",
},
},
{
MetricNames: []string{
"disk.utilization",
},
},
})
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "network.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "disk.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "other.utilization"}))
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "cpu.utilization"}))
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "memory.utilization"}))
})
t.Run("Filters respect both metric names and dimensions", func(t *testing.T) {
f, err := makeNewFilterSet([]MetricFilter{
{
MetricNames: []string{
"*.utilization",
"!memory.utilization",
},
Dimensions: map[string]interface{}{
"env": []interface{}{"prod", "dev"},
"service": []interface{}{"db"},
},
},
{
MetricNames: []string{
"disk.utilization",
},
},
{
Dimensions: map[string]interface{}{
"service": "es",
},
},
})
assert.Nil(t, err)
assert.True(t, f.Matches(&datapoint.Datapoint{Metric: "disk.utilization"}))
assert.True(t, f.Matches(&datapoint.Datapoint{
Metric: "disk.utilization",
Dimensions: map[string]string{
"env": "prod",
}}))
// No env dimension and metric name negated so not filtered
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "memory.utilization"}))
assert.False(t, f.Matches(&datapoint.Datapoint{
Metric: "memory.utilization",
Dimensions: map[string]string{
"env": "prod",
}}))
// Metric name is negated
assert.False(t, f.Matches(&datapoint.Datapoint{
Metric: "memory.utilization",
Dimensions: map[string]string{
"env": "prod",
"service": "db",
}}))
assert.True(t, f.Matches(&datapoint.Datapoint{
Metric: "cpu.utilization",
Dimensions: map[string]string{
"env": "prod",
"service": "db",
}}))
// One dimension missing
assert.False(t, f.Matches(&datapoint.Datapoint{
Metric: "cpu.utilization",
Dimensions: map[string]string{
"env": "prod",
}}))
assert.False(t, f.Matches(&datapoint.Datapoint{Metric: "cpu.utilization"}))
// Matches by dimension only
assert.True(t, f.Matches(&datapoint.Datapoint{
Metric: "random.metric",
Dimensions: map[string]string{
"service": "es",
}}))
})
} | explode_data.jsonl/55630 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1612
} | [
2830,
3393,
3564,
28351,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
8078,
3175,
4051,
10277,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
1166,
11,
716,
1669,
1281,
3564,
5632,
1649,
10556,
54310,
5632,
515,
298,
197,
515,
571,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTooManyPositional(t *testing.T) {
var args struct {
Input string `arg:"positional"`
Output string `arg:"positional"`
}
err := parse("foo bar baz", &args)
assert.Error(t, err)
} | explode_data.jsonl/13004 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 74
} | [
2830,
3393,
31246,
8441,
3812,
278,
1155,
353,
8840,
836,
8,
341,
2405,
2827,
2036,
341,
197,
66588,
220,
914,
1565,
858,
2974,
966,
3005,
8805,
197,
80487,
914,
1565,
858,
2974,
966,
3005,
8805,
197,
532,
9859,
1669,
4715,
445,
7975,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.