text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestFormatterBlink(test *testing.T) { formatted, err := formatter.Format("{blink}text{blink | off}") assert.NoError(test, err) assert.Equal(test, "\x1b[5mtext\x1b[25m", formatted) }
explode_data.jsonl/39773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 14183, 33, 2080, 8623, 353, 8840, 836, 8, 341, 37410, 12127, 11, 1848, 1669, 24814, 9978, 13976, 54316, 92, 1318, 90, 54316, 760, 1007, 55266, 6948, 35699, 8623, 11, 1848, 340, 6948, 12808, 8623, 11, 2917, 87, 16, 65, 58, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRun(t *testing.T) { s := getScheduler() defer s.Stop() intl := 1 * time.Second s.Enter(&TestCheck{intl: intl}) s.Run() assert.Equal(t, uint32(1), s.running) assert.True(t, s.jobQueues[intl].running) // Calling Run again should be a non blocking, noop procedure s.Run() }
explode_data.jsonl/23205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 6727, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 633, 38878, 741, 16867, 274, 30213, 2822, 2084, 75, 1669, 220, 16, 353, 882, 32435, 198, 1903, 52267, 2099, 2271, 3973, 90, 97642, 25, 95015, 3518, 1903, 16708, 741, 6948, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKVPutWithIgnoreLease(t *testing.T) { defer testutil.AfterTest(t) clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 1}) defer clus.Terminate(t) kv := clus.RandClient() lapi := clus.RandClient() resp, err := lapi.Grant(context.Background(), 10) if err != nil { t.Errorf("failed to create lease %v", err) } if _, err := kv.Put(context.TODO(), "zoo", "bar", clientv3.WithIgnoreLease()); err != rpctypes.ErrKeyNotFound { t.Fatalf("err expected %v, got %v", rpctypes.ErrKeyNotFound, err) } if _, err := kv.Put(context.TODO(), "zoo", "bar", clientv3.WithLease(resp.ID)); err != nil { t.Fatal(err) } if _, err := kv.Put(context.TODO(), "zoo", "bar1", clientv3.WithIgnoreLease()); err != nil { t.Fatal(err) } rr, rerr := kv.Get(context.TODO(), "zoo") if rerr != nil { t.Fatal(rerr) } if len(rr.Kvs) != 1 { t.Fatalf("len(rr.Kvs) expected 1, got %d", len(rr.Kvs)) } if rr.Kvs[0].Lease != int64(resp.ID) { t.Fatalf("lease expected %v, got %v", resp.ID, rr.Kvs[0].Lease) } }
explode_data.jsonl/16400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 42, 13378, 332, 2354, 12497, 2304, 519, 1155, 353, 8840, 836, 8, 341, 16867, 1273, 1314, 36892, 2271, 1155, 692, 197, 4163, 1669, 17590, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 72883, 2648, 90, 1695, 25, 220, 16, 3518, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestGetIngressInformation(t *testing.T) { validIngress := &ingress.Ingress{} invalidIngress := "wrongtype" validPath := "/ok" invalidPath := 10 info := getIngressInformation(invalidIngress, validPath) expected := &ingressInformation{} if !info.Equal(expected) { t.Errorf("Expected %v, but got %v", expected, info) } info = getIngressInformation(validIngress, invalidPath) if !info.Equal(expected) { t.Errorf("Expected %v, but got %v", expected, info) } // Setup Ingress Resource validIngress.Namespace = "default" validIngress.Name = "validIng" validIngress.Annotations = map[string]string{ "ingress.annotation": "ok", } validIngress.Spec.Backend = &extensions.IngressBackend{ ServiceName: "a-svc", } info = getIngressInformation(validIngress, validPath) expected = &ingressInformation{ Namespace: "default", Rule: "validIng", Annotations: map[string]string{ "ingress.annotation": "ok", }, Service: "a-svc", } if !info.Equal(expected) { t.Errorf("Expected %v, but got %v", expected, info) } validIngress.Spec.Backend = nil validIngress.Spec.Rules = []extensions.IngressRule{ { IngressRuleValue: extensions.IngressRuleValue{ HTTP: &extensions.HTTPIngressRuleValue{ Paths: []extensions.HTTPIngressPath{ { Path: "/ok", Backend: extensions.IngressBackend{ ServiceName: "b-svc", }, }, }, }, }, }, {}, } info = getIngressInformation(validIngress, validPath) expected = &ingressInformation{ Namespace: "default", Rule: "validIng", Annotations: map[string]string{ "ingress.annotation": "ok", }, Service: "b-svc", } if !info.Equal(expected) { t.Errorf("Expected %v, but got %v", expected, info) } }
explode_data.jsonl/80599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 717 }
[ 2830, 3393, 1949, 641, 2483, 14873, 1155, 353, 8840, 836, 8, 341, 56322, 641, 2483, 1669, 609, 287, 673, 5337, 2483, 16094, 197, 11808, 641, 2483, 1669, 330, 34870, 1313, 698, 56322, 1820, 1669, 3521, 562, 698, 197, 11808, 1820, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSplit(t *testing.T) { g := got.T(t) check := func(in string, expect ...string) { g.Helper() out := diff.Split(strings.Repeat("\t", 100) + in)[100:] g.Eq(out, expect) } check("find a place to eat 热干面", "find", " ", "a", " ", "place", " ", "to", " ", "eat", " ", "热", "干", "面") check(" as.Equal(arr, arr) test", " ", "as", ".", "Equal", "(", "arr", ",", " ", "arr", ")", " ", "test") check("English-Words紧接着中文", "English", "-", "Words", "紧", "接", "着", "中", "文") check("123456test12345", "123", "456", "test", "123", "45") check("WordVeryVeryVeryVeryVeryVeryVerylong", "WordVeryVery", "VeryVeryVery", "VeryVerylong") }
explode_data.jsonl/77092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 20193, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 2684, 836, 1155, 692, 25157, 1669, 2915, 5900, 914, 11, 1720, 2503, 917, 8, 341, 197, 3174, 69282, 741, 197, 13967, 1669, 3638, 19823, 51442, 2817, 10979, 4921, 83, 497, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchRuleSetGroups(t *testing.T) { server := testRuleSetGroupServer() defer server.Close() ac := &Config{ TokenKey: "abc123", TokenApp: "test", URL: server.URL, } apih, err := NewAPI(ac) if err != nil { t.Errorf("Expected no error, got '%v'", err) } rulesets, err := apih.FetchRuleSetGroups() if err != nil { t.Fatalf("Expected no error, got '%v'", err) } actualType := reflect.TypeOf(rulesets) expectedType := "*[]api.RuleSetGroup" if actualType.String() != expectedType { t.Fatalf("Expected %s, got %s", expectedType, actualType.String()) } }
explode_data.jsonl/16625
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 20714, 11337, 1649, 22173, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 1273, 11337, 1649, 2808, 5475, 741, 16867, 3538, 10421, 2822, 81200, 1669, 609, 2648, 515, 197, 33299, 1592, 25, 330, 13683, 16, 17, 18, 756, 197, 33299, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDisabledLimit(t *testing.T) { limiters := NewFakeLimiters() // DisabledLimit() does not do rate limiting and should succeed. m := DisabledLimit() require.NoError(t, m.RateLimit(context.Background(), 99)) // There should be no rate limiters configured as DisabledLimit() doesn't use one. assert.Equal(t, 0, limiters.Count) }
explode_data.jsonl/5205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 25907, 16527, 1155, 353, 8840, 836, 8, 341, 8810, 67645, 1669, 1532, 52317, 16527, 388, 2822, 197, 322, 58206, 16527, 368, 1558, 537, 653, 4379, 32894, 323, 1265, 11996, 624, 2109, 1669, 58206, 16527, 741, 17957, 35699, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogStore_FirstIndex(t *testing.T) { ls, err := newLogStore() if err != nil { t.Error(err) t.Fail() return } if index, err := ls.FirstIndex(); err != nil { t.Error(err) t.Fail() return } else { if index != 0 { t.Error("first index should be 0") t.Fail() return } } }
explode_data.jsonl/59210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 2201, 6093, 79790, 1552, 1155, 353, 8840, 836, 8, 341, 197, 4730, 11, 1848, 1669, 501, 2201, 6093, 741, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 3244, 57243, 741, 197, 853, 198, 197, 630, 743, 1922, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCodecEncode(t *testing.T) { sourceUrl, _ := url.Parse("http://example.com/source") source := &types.URLRef{URL: *sourceUrl} testCases := map[string]struct { codec amqp.Codec event cloudevents.Event want *amqp.Message wantErr error }{ "simple v02 structured": { codec: amqp.Codec{Encoding: amqp.StructuredV02}, event: cloudevents.Event{ Context: cloudevents.EventContextV02{ Type: "com.example.test", Source: *source, ID: "ABC-123", }.AsV02(), }, want: &amqp.Message{ ContentType: "application/cloudevents+json", Body: func() []byte { body := map[string]interface{}{ "contenttype": "application/json", "specversion": "0.2", "id": "ABC-123", "type": "com.example.test", "source": "http://example.com/source", } return toBytes(body) }(), }, }, "simple v03 structured": { codec: amqp.Codec{Encoding: amqp.StructuredV03}, event: cloudevents.Event{ Context: cloudevents.EventContextV03{ Type: "com.example.test", Source: *source, ID: "ABC-123", Subject: strptr("a-subject"), }.AsV03(), }, want: &amqp.Message{ ContentType: "application/cloudevents+json", Body: func() []byte { body := map[string]interface{}{ "datacontenttype": "application/json", "specversion": "0.3", "id": "ABC-123", "type": "com.example.test", "source": "http://example.com/source", "subject": "a-subject", } return toBytes(body) }(), }, }, "simple v02 binary": { codec: amqp.Codec{Encoding: amqp.BinaryV02}, event: cloudevents.Event{ Context: &cloudevents.EventContextV02{ Type: "com.example.test", Source: *source, ID: "ABC-123", }, }, want: &amqp.Message{ ContentType: "application/json", ApplicationProperties: map[string]interface{}{ "cloudEvents:specversion": "0.2", "cloudEvents:id": "ABC-123", "cloudEvents:type": "com.example.test", "cloudEvents:source": "http://example.com/source", }, }, }, "simple v03 binary": { codec: amqp.Codec{Encoding: amqp.BinaryV03}, event: cloudevents.Event{ Context: &cloudevents.EventContextV03{ Type: "com.example.test", Source: *source, ID: "ABC-123", Subject: strptr("a-subject"), }, }, want: &amqp.Message{ ContentType: "application/json", ApplicationProperties: map[string]interface{}{ "cloudEvents:specversion": "0.3", "cloudEvents:id": "ABC-123", "cloudEvents:type": "com.example.test", "cloudEvents:source": "http://example.com/source", "cloudEvents:subject": "a-subject", }, }, }, } for n, tc := range testCases { t.Run(n, func(t *testing.T) { got, err := tc.codec.Encode(context.TODO(), tc.event) if tc.wantErr != nil || err != nil { if diff := cmp.Diff(tc.wantErr, err); diff != "" { t.Errorf("unexpected error (-want, +got) = %v", diff) } return } if diff := cmp.Diff(tc.want, got); diff != "" { if msg, ok := got.(*amqp.Message); ok { // It is hard to read the byte dump want := string(tc.want.Body) got := string(msg.Body) if diff := cmp.Diff(want, got); diff != "" { t.Errorf("unexpected (-want, +got) = %v", diff) return } } t.Errorf("unexpected message (-want, +got) = %v", diff) } }) } }
explode_data.jsonl/10761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1724 }
[ 2830, 3393, 36913, 32535, 1155, 353, 8840, 836, 8, 341, 47418, 2864, 11, 716, 1669, 2515, 8937, 445, 1254, 1110, 8687, 905, 54373, 1138, 47418, 1669, 609, 9242, 20893, 3945, 90, 3144, 25, 353, 2427, 2864, 630, 18185, 37302, 1669, 2415, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_addBlobsFromBlobsJSONToState_blobLookup(t *testing.T) { tests := []struct { name string pkgPath string blobMap map[string]*Blob blob BlobFromJSON expectedPathInTree string }{ { "Adding non config-data blob", "path/to/pkg/non-config-data", map[string]*Blob{"hash": {size: 1, dep: []string{"not used"}}}, BlobFromJSON{Path: "data/test/foo.txt", Merkle: "hash"}, "path/to/pkg/non-config-data", }, { "Adding config-data blob meta far", "path/to/pkg/config-data", map[string]*Blob{"hash": {size: 1, dep: []string{"not used"}}}, BlobFromJSON{Path: "meta/", Merkle: "hash"}, "path/to/pkg/config-data", }, { "Adding config-data blob", "path/to/pkg/config-data", map[string]*Blob{"hash": {size: 1, dep: []string{"not used"}}}, BlobFromJSON{Path: "data/test/foo.txt", Merkle: "hash"}, "path/to/pkg/config-data/test/foo.txt", }, } var dummyMap map[string]*Node for _, test := range tests { t.Run(test.name, func(t *testing.T) { root := newDummyNode() st := processingState{ test.blobMap, dummyMap, dummyMap, root, } addBlobsFromBlobsJSONToState(&st, []BlobFromJSON{test.blob}, test.pkgPath) expectedNode := root.detachByPath(test.expectedPathInTree) if expectedNode == nil { t.Fatalf("tree.detachByPath(%s) returns nil; expect to find a node", test.expectedPathInTree) } expectedSize := test.blobMap[test.blob.Merkle].size if expectedNode.size != expectedSize { t.Fatalf("tree.detachByPath(%s).size returns %d; expect %d", test.expectedPathInTree, expectedNode.size, expectedSize) } }) } }
explode_data.jsonl/12618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 752 }
[ 2830, 3393, 2891, 33, 68164, 3830, 33, 68164, 5370, 1249, 1397, 45908, 34247, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 3223, 7351, 1820, 310, 914, 198, 197, 2233, 1684, 2227, 310, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnaryServerInterceptorWithoutRequestId(t *testing.T) { handler := func(ctx context.Context, req interface{}) (interface{}, error) { reqID, exists := FromContext(ctx) if exists && reqID == "" { t.Errorf("requestId must be generated by interceptor") } return &testResponse{}, nil } ctx := DummyContextWithServerTransportStream() _, err := UnaryServerInterceptor()(ctx, testRequest{}, nil, handler) if err != nil { t.Fatalf("unexpected error: %v", err) } }
explode_data.jsonl/74062
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 94545, 5475, 32786, 26040, 61774, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 2915, 7502, 2266, 9328, 11, 4232, 3749, 28875, 320, 4970, 22655, 1465, 8, 341, 197, 24395, 915, 11, 6724, 1669, 5542, 1972, 7502, 340, 197, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestError(t *testing.T) { tests := []struct { err error check func(error) bool want bool }{ {sql.ErrNoRows, zdb.ErrNoRows, true}, {fmt.Errorf("x: %w", sql.ErrNoRows), zdb.ErrNoRows, true}, {errors.New("X"), zdb.ErrNoRows, false}, } for i, tt := range tests { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { out := tt.check(tt.err) if out != tt.want { t.Errorf("out: %t; want: %t", out, tt.want) } }) } }
explode_data.jsonl/57559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 1454, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 9859, 256, 1465, 198, 197, 25157, 2915, 6390, 8, 1807, 198, 197, 50780, 220, 1807, 198, 197, 59403, 197, 197, 90, 3544, 27862, 2753, 9024, 11, 1147, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToken_lexSymbol(t *testing.T) { tests := []struct { symbol bool value string }{ { symbol: true, value: "= ", }, { symbol: true, value: "||", }, } for _, test := range tests { tok, _, ok := lexSymbol(test.value, cursor{}) assert.Equal(t, test.symbol, ok, test.value) if ok { test.value = strings.TrimSpace(test.value) assert.Equal(t, test.value, tok.Value, test.value) } } }
explode_data.jsonl/60001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 3323, 74547, 15090, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 1903, 4001, 1807, 198, 197, 16309, 220, 914, 198, 197, 59403, 197, 197, 515, 298, 1903, 4001, 25, 830, 345, 298, 16309, 25, 220, 37827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAnnotationsWithCustomSchemaFailure(t *testing.T) { schema := &KubeValidatorConfigSchema{ Version: "1.6.0", } var schemas []*KubeValidatorConfigSchema schemas = append(schemas, schema) candidate := NewCandidate( &Context{ Event: &github.CheckSuiteEvent{}, }, &github.CommitFile{ BlobURL: github.String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/volumeerror.yaml"), Filename: github.String("volumeerror.yaml"), }, schemas) filePath, _ := filepath.Abs("../fixtures/invalid/1.6.0/volumeerror.yaml") fileContents, _ := ioutil.ReadFile(filePath) candidate.setBytes(&fileContents) annotations := candidate.Validate() want := []*github.CheckRunAnnotation{{ Path: github.String("volumeerror.yaml"), BlobHRef: github.String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/volumeerror.yaml"), StartLine: github.Int(1), EndLine: github.Int(1), AnnotationLevel: github.String("failure"), Title: github.String("Error validating VolumeError against 1.6.0 schema"), Message: github.String("1 error occurred:\n\t* Problem loading schema from the network at https://raw.githubusercontent.com/garethr/kubernetes-json-schema/master/v1.6.0-standalone-strict/volumeerror.json: Could not read schema from HTTP, response status is 404 Not Found\n\n"), }} if len(annotations) != len(want) { t.Errorf("a total of %d annotations were returned, wanted %d", len(annotations), len(want)) } for i, annotation := range annotations { if diff := deep.Equal(annotation, want[i]); diff != nil { t.Error(diff) } } }
explode_data.jsonl/53896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 650 }
[ 2830, 3393, 21418, 2354, 10268, 8632, 17507, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 609, 42, 3760, 14256, 2648, 8632, 515, 197, 77847, 25, 330, 16, 13, 21, 13, 15, 756, 197, 532, 2405, 61800, 29838, 42, 3760, 14256, 2648, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInterfaceToInt(t *testing.T) { var out int var ok bool out, ok = InterfaceToInt(int(1)) require.True(t, ok) require.Equal(t, int(1), out) _, ok = InterfaceToInt(float32(2)) require.False(t, ok) _, ok = InterfaceToInt("test") require.False(t, ok) }
explode_data.jsonl/52294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 5051, 38544, 1155, 353, 8840, 836, 8, 341, 2405, 700, 526, 198, 2405, 5394, 1807, 271, 13967, 11, 5394, 284, 20019, 38544, 1548, 7, 16, 1171, 17957, 32443, 1155, 11, 5394, 340, 17957, 12808, 1155, 11, 526, 7, 16, 701, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFloat32Slice(t *testing.T) { val := float32(1) m := map[string]interface{}{"value": []float32{val}, "nothing": nil} assert.Equal(t, val, New(m).Get("value").Float32Slice()[0]) assert.Equal(t, val, New(m).Get("value").MustFloat32Slice()[0]) assert.Equal(t, []float32(nil), New(m).Get("nothing").Float32Slice()) assert.Equal(t, val, New(m).Get("nothing").Float32Slice([]float32{float32(1)})[0]) assert.Panics(t, func() { New(m).Get("nothing").MustFloat32Slice() }) }
explode_data.jsonl/23505
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 5442, 18, 17, 33236, 1155, 353, 8840, 836, 8, 1476, 19302, 1669, 2224, 18, 17, 7, 16, 340, 2109, 1669, 2415, 14032, 31344, 6257, 4913, 957, 788, 3056, 3649, 18, 17, 90, 831, 2137, 330, 41212, 788, 2092, 532, 6948, 12808,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateS3BucketReplicationDestinationStorageClass(t *testing.T) { validStorageClass := []string{ s3.StorageClassStandard, s3.StorageClassStandardIa, s3.StorageClassReducedRedundancy, } for _, v := range validStorageClass { _, errors := validateS3BucketReplicationDestinationStorageClass(v, "storage_class") if len(errors) != 0 { t.Fatalf("%q should be valid storage class: %q", v, errors) } } invalidStorageClass := []string{ "FOO", "1234", } for _, v := range invalidStorageClass { _, errors := validateS3BucketReplicationDestinationStorageClass(v, "storage_class") if len(errors) == 0 { t.Fatalf("%q should be invalid storage class", v) } } }
explode_data.jsonl/78578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 17926, 50, 18, 36018, 18327, 1693, 33605, 5793, 1957, 1155, 353, 8840, 836, 8, 341, 56322, 5793, 1957, 1669, 3056, 917, 515, 197, 1903, 18, 43771, 1957, 19781, 345, 197, 1903, 18, 43771, 1957, 19781, 40, 64, 345, 197, 1903...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBuilds(t *testing.T) { testClient(t, func(e *cli.Engine, i *mocksdk.Interface) { b1 := structs.Builds{ *fxBuild(), *fxBuildRunning(), *fxBuildFailed(), } i.On("BuildList", "app1", structs.BuildListOptions{}).Return(b1, nil) res, err := testExecute(e, "builds -a app1", nil) require.NoError(t, err) require.Equal(t, 0, res.Code) res.RequireStderr(t, []string{""}) res.RequireStdout(t, []string{ "ID STATUS RELEASE STARTED ELAPSED DESCRIPTION", "build1 complete release1 2 days ago 2m0s desc ", "build4 running 2 days ago ", "build3 failed 2 days ago ", }) }) }
explode_data.jsonl/65789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 361 }
[ 2830, 3393, 11066, 82, 1155, 353, 8840, 836, 8, 341, 18185, 2959, 1155, 11, 2915, 2026, 353, 19521, 54424, 11, 600, 353, 16712, 51295, 41065, 8, 341, 197, 2233, 16, 1669, 62845, 25212, 82, 515, 298, 197, 9, 8298, 11066, 3148, 298, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollection_InsertMany(t *testing.T) { ast := require.New(t) cli := initClient("test") defer cli.Close(context.Background()) defer cli.DropCollection(context.Background()) cli.EnsureIndexes(context.Background(), []string{"name"}, nil) var err error newDocs := []UserInfo{{Id: NewObjectID(), Name: "Alice", Age: 10}, {Id: NewObjectID(), Name: "Lucas", Age: 11}} res, err := cli.InsertMany(context.Background(), newDocs) ast.NoError(err) ast.NotEmpty(res) ast.Equal(2, len(res.InsertedIDs)) newPDocs := []*UserInfo{{Id: NewObjectID(), Name: "Alice03", Age: 10}, {Id: NewObjectID(), Name: "Lucas03", Age: 11}} res, err = cli.InsertMany(context.Background(), newPDocs) ast.NoError(err) ast.NotEmpty(res) ast.Equal(2, len(res.InsertedIDs)) docs2 := []UserInfo{ {Name: "Alice"}, {Name: "Lucas"}, } opts := options.InsertManyOptions{} opts.InsertManyOptions = officialOpts.InsertMany().SetBypassDocumentValidation(true) res, err = cli.InsertMany(context.Background(), docs2, opts) ast.Equal(true, IsDup(err)) ast.Equal(0, len(res.InsertedIDs)) docs4 := []UserInfo{} res, err = cli.InsertMany(context.Background(), []interface{}{docs4}) ast.Error(err) ast.Empty(res) }
explode_data.jsonl/18373
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 6482, 76417, 8441, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 1373, 7121, 1155, 340, 86448, 1669, 2930, 2959, 445, 1944, 1138, 16867, 21348, 10421, 5378, 19047, 2398, 16867, 21348, 21688, 6482, 5378, 19047, 2398, 86448, 22834, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMachinePoolGetNodeReference(t *testing.T) { r := &MachinePoolReconciler{ Client: fake.NewClientBuilder().Build(), recorder: record.NewFakeRecorder(32), } nodeList := []client.Object{ &corev1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "node-1", }, Spec: corev1.NodeSpec{ ProviderID: "aws://us-east-1/id-node-1", }, }, &corev1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "node-2", }, Spec: corev1.NodeSpec{ ProviderID: "aws://us-west-2/id-node-2", }, }, &corev1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "gce-node-2", }, Spec: corev1.NodeSpec{ ProviderID: "gce://us-central1/gce-id-node-2", }, }, &corev1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "azure-node-4", }, Spec: corev1.NodeSpec{ ProviderID: "azure://westus2/id-node-4", }, }, } client := fake.NewClientBuilder().WithObjects(nodeList...).Build() testCases := []struct { name string providerIDList []string expected *getNodeReferencesResult err error }{ { name: "valid provider id, valid aws node", providerIDList: []string{"aws://us-east-1/id-node-1"}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{ {Name: "node-1"}, }, }, }, { name: "valid provider id, valid aws node", providerIDList: []string{"aws://us-west-2/id-node-2"}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{ {Name: "node-2"}, }, }, }, { name: "valid provider id, valid gce node", providerIDList: []string{"gce://us-central1/gce-id-node-2"}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{ {Name: "gce-node-2"}, }, }, }, { name: "valid provider id, valid azure node", providerIDList: []string{"azure://westus2/id-node-4"}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{ {Name: "azure-node-4"}, }, }, }, { name: "valid provider ids, valid azure and aws nodes", providerIDList: []string{"aws://us-east-1/id-node-1", "azure://westus2/id-node-4"}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{ {Name: "node-1"}, {Name: "azure-node-4"}, }, }, }, { name: "valid provider id, no node found", providerIDList: []string{"aws:///id-node-100"}, expected: nil, err: errNoAvailableNodes, }, { name: "no provider id, no node found", providerIDList: []string{}, expected: &getNodeReferencesResult{ references: []corev1.ObjectReference{}, available: 0, ready: 0, }, }, } for _, test := range testCases { t.Run(test.name, func(t *testing.T) { g := NewWithT(t) result, err := r.getNodeReferences(ctx, client, test.providerIDList) if test.err == nil { g.Expect(err).To(BeNil()) } else { g.Expect(err).NotTo(BeNil()) g.Expect(err).To(Equal(test.err), "Expected error %v, got %v", test.err, err) } if test.expected == nil && len(result.references) == 0 { return } g.Expect(len(result.references)).To(Equal(len(test.expected.references)), "Expected NodeRef count to be %v, got %v", len(result.references), len(test.expected.references)) for n := range test.expected.references { g.Expect(result.references[n].Name).To(Equal(test.expected.references[n].Name), "Expected NodeRef's name to be %v, got %v", result.references[n].Name, test.expected.references[n].Name) g.Expect(result.references[n].Namespace).To(Equal(test.expected.references[n].Namespace), "Expected NodeRef's namespace to be %v, got %v", result.references[n].Namespace, test.expected.references[n].Namespace) } }) } }
explode_data.jsonl/71245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1753 }
[ 2830, 3393, 21605, 10551, 1949, 1955, 8856, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 609, 21605, 10551, 693, 40446, 5769, 515, 197, 71724, 25, 256, 12418, 7121, 2959, 3297, 1005, 11066, 3148, 197, 67904, 1358, 25, 3255, 7121, 52317, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIdleTimeout(t *testing.T) { t.Parallel() sessionManager := New() sessionManager.IdleTimeout = 200 * time.Millisecond sessionManager.Lifetime = time.Second mux := http.NewServeMux() mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { sessionManager.Put(r.Context(), "foo", "bar") })) mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { v := sessionManager.Get(r.Context(), "foo") if v == nil { http.Error(w, "foo does not exist in session", 500) return } w.Write([]byte(v.(string))) })) ts := newTestServer(t, sessionManager.LoadAndSave(mux)) defer ts.Close() ts.execute(t, "/put") time.Sleep(100 * time.Millisecond) ts.execute(t, "/get") time.Sleep(150 * time.Millisecond) _, body := ts.execute(t, "/get") if body != "bar" { t.Errorf("want %q; got %q", "bar", body) } time.Sleep(200 * time.Millisecond) _, body = ts.execute(t, "/get") if body != "foo does not exist in session\n" { t.Errorf("want %q; got %q", "foo does not exist in session\n", body) } }
explode_data.jsonl/453
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 432 }
[ 2830, 3393, 41370, 7636, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25054, 2043, 1669, 1532, 741, 25054, 2043, 6444, 273, 7636, 284, 220, 17, 15, 15, 353, 882, 71482, 198, 25054, 2043, 1214, 28515, 284, 882, 32435, 271, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodReadyCount_GetReadyPodsCount(t *testing.T) { equateErrorMessage := cmp.Comparer(func(x, y error) bool { if x == nil || y == nil { return x == nil && y == nil } return x.Error() == y.Error() }) var tests = []struct { description string expected int64 expectedErr error podLister corelisters.PodLister namespace string selector labels.Selector }{ { "Fail to get pods", 0, errors.New("unable to get pods while calculating replica count: fail to get pods"), &fake.PodLister{ PodsReactor: func(namespace string) corelisters.PodNamespaceLister { return &fake.PodNamespaceLister{ ListReactor: func(selector labels.Selector) (ret []*corev1.Pod, err error) { return nil, errors.New("fail to get pods") }, } }, }, "test-namespace", nil, }, { "0 pods, success", 0, nil, &fake.PodLister{ PodsReactor: func(namespace string) corelisters.PodNamespaceLister { return &fake.PodNamespaceLister{ ListReactor: func(selector labels.Selector) (ret []*corev1.Pod, err error) { return []*corev1.Pod{}, nil }, } }, }, "test-namespace", nil, }, { "1 ready pod, success", 1, nil, &fake.PodLister{ PodsReactor: func(namespace string) corelisters.PodNamespaceLister { return &fake.PodNamespaceLister{ ListReactor: func(selector labels.Selector) (ret []*corev1.Pod, err error) { return []*corev1.Pod{ { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionTrue, }, }, }, }, }, nil }, } }, }, "test-namespace", nil, }, { "1 not ready pod, success", 0, nil, &fake.PodLister{ PodsReactor: func(namespace string) corelisters.PodNamespaceLister { return &fake.PodNamespaceLister{ ListReactor: func(selector labels.Selector) (ret []*corev1.Pod, err error) { return []*corev1.Pod{ { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionFalse, }, }, }, }, }, nil }, } }, }, "test-namespace", nil, }, { "2 ready pods, 2 not ready pods, success", 2, nil, &fake.PodLister{ PodsReactor: func(namespace string) corelisters.PodNamespaceLister { return &fake.PodNamespaceLister{ ListReactor: func(selector labels.Selector) (ret []*corev1.Pod, err error) { return []*corev1.Pod{ { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionTrue, }, }, }, }, { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionTrue, }, }, }, }, { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionFalse, }, }, }, }, { Status: corev1.PodStatus{ Phase: corev1.PodRunning, Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionFalse, }, }, }, }, }, nil }, } }, }, "test-namespace", nil, }, } for _, test := range tests { t.Run(test.description, func(t *testing.T) { podReadyCounter := &podutil.PodReadyCount{ PodLister: test.podLister, } readyPods, err := podReadyCounter.GetReadyPodsCount(test.namespace, test.selector) if !cmp.Equal(&err, &test.expectedErr, equateErrorMessage) { t.Errorf("error mismatch (-want +got):\n%s", cmp.Diff(test.expectedErr, err, equateErrorMessage)) return } if !cmp.Equal(test.expected, readyPods) { t.Errorf("ready pods mismatch (-want +got):\n%s", cmp.Diff(test.expected, readyPods)) } }) } }
explode_data.jsonl/50897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2393 }
[ 2830, 3393, 23527, 19202, 2507, 13614, 19202, 23527, 82, 2507, 1155, 353, 8840, 836, 8, 341, 7727, 19137, 21349, 1669, 26089, 2961, 61119, 18552, 2075, 11, 379, 1465, 8, 1807, 341, 197, 743, 856, 621, 2092, 1369, 379, 621, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEnumerationNotCoveringResourceNameEmpty(t *testing.T) { escalationTest{ ownerRules: []authorizationapi.PolicyRule{ {Verbs: sets.NewString("get"), Resources: sets.NewString("pods"), ResourceNames: sets.NewString("foo", "bar")}, }, servantRules: []authorizationapi.PolicyRule{ {Verbs: sets.NewString("get"), Resources: sets.NewString("pods"), ResourceNames: sets.NewString()}, }, expectedCovered: false, expectedUncoveredRules: []authorizationapi.PolicyRule{ {Verbs: sets.NewString("get"), Resources: sets.NewString("pods")}, }, }.test(t) }
explode_data.jsonl/9060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 67117, 2623, 30896, 287, 4783, 675, 3522, 1155, 353, 8840, 836, 8, 341, 80629, 278, 367, 2271, 515, 197, 197, 8118, 26008, 25, 3056, 39554, 2068, 1069, 8018, 11337, 515, 298, 197, 90, 10141, 1279, 25, 7289, 7121, 703, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFuncs_NoFuncs(t *testing.T) { var src = `package foo` opts := &ParserOptions{ Src: []byte(src), } parser, err := NewParser(opts) if err != nil { t.Fatal(err) } funcs := parser.Funcs() if len(funcs) != 0 { t.Errorf("There should be no functions, but got %d", len(funcs)) } }
explode_data.jsonl/60365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 9626, 82, 36989, 9626, 82, 1155, 353, 8840, 836, 8, 341, 2405, 2286, 284, 1565, 1722, 15229, 19324, 64734, 1669, 609, 6570, 3798, 515, 197, 7568, 1287, 25, 3056, 3782, 14705, 1326, 197, 532, 55804, 11, 1848, 1669, 1532, 65...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBasic(t *testing.T) { lp := listPayload{ WhiteList: []string{"10.10.11.2", "10.10.11.3", "9.9.9.9/28"}, } ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { out, err := yaml.Marshal(lp) if err != nil { w.WriteHeader(http.StatusInternalServerError) return } w.Write(out) })) defer ts.Close() config := AdapterConfig{ ProviderURL: ts.URL, RefreshInterval: time.Second, TimeToLive: time.Second * 10, } a, err := newAdapter(&config) if err != nil { t.Error("unable to create adapter " + err.Error()) } var ok bool ok, err = a.CheckList("10.10.11.2") if !ok { t.Error("Expecting check to pass") } ok, err = a.CheckList("9.9.9.1") if !ok { t.Error("Expecting check to pass") } ok, err = a.CheckList("120.10.11.2") if ok { t.Error("Expecting check to fail") } }
explode_data.jsonl/39433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 382 }
[ 2830, 3393, 15944, 1155, 353, 8840, 836, 8, 341, 92406, 1669, 1140, 29683, 515, 197, 197, 14075, 852, 25, 3056, 917, 4913, 16, 15, 13, 16, 15, 13, 16, 16, 13, 17, 497, 330, 16, 15, 13, 16, 15, 13, 16, 16, 13, 18, 497, 330, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNew_Payload(t *testing.T) { // get a buffer to hold the packet data want := make([]byte, 102) // write 6 byte preamble copy(want[:6], []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}) // write 16 repetitions of target mac address offset := 6 addr, _ := net.ParseMAC("00:00:5e:00:53:01") for i := 0; i < 16; i++ { copy(want[offset+i*6:offset+i*6+6], addr) } // construct the magic packet var buf bytes.Buffer mp, _ := New("00:00:5e:00:53:01") mp.Broadcast(&buf) // compare! got := buf.Bytes() if bytes.Compare(want, got) != 0 { t.Errorf("want %+v, but got %+v", want, got) } }
explode_data.jsonl/8290
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 3564, 1088, 6989, 1155, 353, 8840, 836, 8, 341, 197, 322, 633, 264, 4147, 311, 3331, 279, 10151, 821, 198, 50780, 1669, 1281, 10556, 3782, 11, 220, 16, 15, 17, 692, 197, 322, 3270, 220, 21, 4922, 89454, 198, 49124, 3622,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCapability_CompatibleBitstring(t *testing.T) { // sanity check a simple case compatible := NewCapabilityString([]Capability{0, 1, 2, 3}).CompatibleWith([]uint64{15}) assert.True(t, compatible) rapid.Check(t, func(t *rapid.T) { assert := assert.New(t) // in order to pick up the rapid rng // generate initial list of caps nbCaps := rapid.IntRange(0, 512).Draw(t, "nbCaps").(int) isSet := rapid.IntRange(0, 1) caps := []Capability{} for i := 0; i < nbCaps; i++ { if 1 == isSet.Draw(t, "isSet").(int) { caps = append(caps, Capability(i)) } } // generate a subset of caps reductionSz := rapid.IntRange(0, len(caps)).Draw(t, "reductionSz").(int) subsetCaps := make([]Capability, len(caps)) copy(subsetCaps, caps) for i := 0; i < reductionSz; i++ { // select an index k, and remove it k := rapid.IntRange(0, len(subsetCaps)-1).Draw(t, "k").(int) subsetCaps[k] = subsetCaps[len(subsetCaps)-1] subsetCaps = subsetCaps[:len(subsetCaps)-1] } assert.Len(subsetCaps, len(caps)-reductionSz) // sanity check c1 := NewCapabilityString(subsetCaps) c2 := NewCapabilityString(caps) // caps should be compatible with subset assert.True(c1.CompatibleWith(c2), "caps is not compatible with subset") if reductionSz > 0 { // subset should not be compatible with caps assert.False(c2.CompatibleWith(c1), "subset was compatible with caps") } else { assert.Equal(c2, c1) } }) }
explode_data.jsonl/74078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 585 }
[ 2830, 3393, 63746, 16946, 37079, 8344, 917, 1155, 353, 8840, 836, 8, 341, 197, 322, 46842, 1779, 264, 4285, 1142, 198, 197, 34842, 1669, 1532, 63746, 703, 10556, 63746, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 16630, 29161, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestParseEntry(t *testing.T) { for _, tt := range []struct { name string data []byte }{ { name: "Test valid data32 unmarshalling", data: validEntry32Bytes, }, { name: "Test valid data64 unmarshalling", data: validEntry64Bytes, }, } { t.Run(tt.name, func(t *testing.T) { _, _, err := ParseEntry(tt.data) if err != nil { t.Errorf("ParseEntry(): %v", err) } }) } }
explode_data.jsonl/66117
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 14463, 5874, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 8924, 3056, 3782, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 2271, 2697, 821, 18, 17, 650, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeletePod(t *testing.T) { labels := map[string]string{ "app": "test-pod", } podObj := createPod("test-pod", "test-namespace", "0", "1.2.3.4", labels, NonHostNetwork, corev1.PodRunning) podKey := getKey(podObj, t) calls := []testutils.TestCmd{ // add pod {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("ns-test-namespace"), "nethash"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("all-namespaces"), "setlist"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("all-namespaces"), util.GetHashedName("ns-test-namespace")}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("ns-test-namespace"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("app"), "nethash"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("app"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("app:test-pod"), "nethash"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("app:test-pod"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("namedport:app:test-pod"), "hash:ip,port"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("namedport:app:test-pod"), "1.2.3.4,8080"}}, // delete pod {Cmd: []string{"ipset", "-D", "-exist", util.GetHashedName("ns-test-namespace"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-X", "-exist", util.GetHashedName("ns-test-namespace")}}, {Cmd: []string{"ipset", "-D", "-exist", util.GetHashedName("app"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-X", "-exist", util.GetHashedName("app")}}, {Cmd: []string{"ipset", "-D", "-exist", util.GetHashedName("app:test-pod"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-X", "-exist", util.GetHashedName("app:test-pod")}}, {Cmd: []string{"ipset", "-D", "-exist", util.GetHashedName("namedport:app:test-pod"), "1.2.3.4,8080"}}, {Cmd: []string{"ipset", "-X", "-exist", util.GetHashedName("namedport:app:test-pod")}}, } fexec := testutils.GetFakeExecWithScripts(calls) defer testutils.VerifyCalls(t, fexec, calls) f := newFixture(t, fexec) f.podLister = append(f.podLister, podObj) f.kubeobjects = append(f.kubeobjects, podObj) stopCh := make(chan struct{}) defer close(stopCh) f.newPodController(stopCh) deletePod(t, f, podObj, DeletedFinalStateknownObject) testCases := []expectedValues{ {0, 1, 0}, } checkPodTestResult("TestDeletePod", f, testCases) if _, exists := f.podController.podMap[podKey]; exists { t.Error("TestDeletePod failed @ cached pod obj exists check") } }
explode_data.jsonl/35407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1040 }
[ 2830, 3393, 6435, 23527, 1155, 353, 8840, 836, 8, 341, 95143, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 676, 788, 330, 1944, 2268, 347, 756, 197, 532, 3223, 347, 5261, 1669, 1855, 23527, 445, 1944, 2268, 347, 497, 330, 1944, 12, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFileSize(t *testing.T) { var size int64 = 512 assert.Equal(t, "512 B", FileSize(size)) size *= 1024 assert.Equal(t, "512 KiB", FileSize(size)) size *= 1024 assert.Equal(t, "512 MiB", FileSize(size)) size *= 1024 assert.Equal(t, "512 GiB", FileSize(size)) size *= 1024 assert.Equal(t, "512 TiB", FileSize(size)) size *= 1024 assert.Equal(t, "512 PiB", FileSize(size)) size *= 4 assert.Equal(t, "2.0 EiB", FileSize(size)) }
explode_data.jsonl/14315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 67649, 1155, 353, 8840, 836, 8, 341, 2405, 1379, 526, 21, 19, 284, 220, 20, 16, 17, 198, 6948, 12808, 1155, 11, 330, 20, 16, 17, 425, 497, 2887, 1695, 6856, 1171, 13832, 11404, 220, 16, 15, 17, 19, 198, 6948, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEquality(t *testing.T) { for i, tt := range parseTests() { s := tt.in tag := Make(s) t1 := Make(tag.String()) if tag != t1 { t.Errorf("%d:%s: equality test 1 failed\n got: %#v\nwant: %#v)", i, s, t1, tag) } t2, _ := Compose(tag) if tag != t2 { t.Errorf("%d:%s: equality test 2 failed\n got: %#v\nwant: %#v", i, s, t2, tag) } } }
explode_data.jsonl/15829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 50745, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 4715, 18200, 368, 341, 197, 1903, 1669, 17853, 1858, 198, 197, 60439, 1669, 7405, 1141, 340, 197, 3244, 16, 1669, 7405, 19343, 6431, 2398, 197, 743, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBotDirectMessageBotHandler(t *testing.T) { s := NewTestServer() go s.Start() s.SendDirectMessageToBot("some text") expectedMsg := fmt.Sprintf("some text") time.Sleep(2 * time.Second) assert.True(t, s.SawOutgoingMessage(expectedMsg)) s.Stop() }
explode_data.jsonl/7547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 23502, 16027, 2052, 23502, 3050, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 2271, 5475, 741, 30680, 274, 12101, 741, 1903, 20176, 16027, 2052, 1249, 23502, 445, 14689, 1467, 1138, 42400, 6611, 1669, 8879, 17305, 445, 1468...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_fieldsNames(t *testing.T) { for _, test := range []struct { in []string out []string alias []string }{ { //case: binary expr(valRef) in: []string{"value+value"}, out: []string{"value", "value"}, alias: []string{"value_value"}, }, { //case: binary expr + valRef in: []string{"value+value", "temperature"}, out: []string{"value", "value", "temperature"}, alias: []string{"value_value", "temperature"}, }, { //case: aggregate expr in: []string{"mean(value)"}, out: []string{"mean"}, alias: []string{"mean"}, }, { //case: binary expr(aggregate expr) in: []string{"mean(value) + max(value)"}, out: []string{"value", "value"}, alias: []string{"mean_max"}, }, { //case: binary expr(aggregate expr) + valRef in: []string{"mean(value) + max(value)", "temperature"}, out: []string{"value", "value", "temperature"}, alias: []string{"mean_max", "temperature"}, }, { //case: mixed aggregate and varRef in: []string{"mean(value) + temperature"}, out: []string{"value", "temperature"}, alias: []string{"mean_temperature"}, }, { //case: ParenExpr(varRef) in: []string{"(value)"}, out: []string{"value"}, alias: []string{"value"}, }, { //case: ParenExpr(varRef + varRef) in: []string{"(value + value)"}, out: []string{"value", "value"}, alias: []string{"value_value"}, }, { //case: ParenExpr(aggregate) in: []string{"(mean(value))"}, out: []string{"value"}, alias: []string{"mean"}, }, { //case: ParenExpr(aggregate + aggregate) in: []string{"(mean(value) + max(value))"}, out: []string{"value", "value"}, alias: []string{"mean_max"}, }, } { fields := influxql.Fields{} for _, s := range test.in { expr := MustParseExpr(s) fields = append(fields, &influxql.Field{Expr: expr}) } got := fields.Names() if !reflect.DeepEqual(got, test.out) { t.Errorf("get fields name:\nexp=%v\ngot=%v\n", test.out, got) } alias := fields.AliasNames() if !reflect.DeepEqual(alias, test.alias) { t.Errorf("get fields alias name:\nexp=%v\ngot=%v\n", test.alias, alias) } } }
explode_data.jsonl/24822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 924 }
[ 2830, 3393, 12132, 7980, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 17430, 262, 3056, 917, 198, 197, 13967, 256, 3056, 917, 198, 197, 197, 14956, 3056, 917, 198, 197, 59403, 197, 197, 90, 442, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSuccessHasRemoteBranch(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) for _, repo := range []string{testRepo.branchName, git.DefaultBranch} { branchExists, err := testRepo.sut.HasRemoteBranch(repo) require.Nil(t, err) require.Equal(t, branchExists, true) } }
explode_data.jsonl/13979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 7188, 10281, 24703, 18197, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 2023, 8358, 15867, 1669, 2088, 3056, 917, 90, 1944, 25243, 52560, 675, 11, 16345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProvider(t *testing.T) { test.IntegrationTest(t) log, err := logger.NewLogger(true) require.NoError(t, err) wsFactory, err := workspace.NewFactory("test", log) require.NoError(t, err) cleanupFct := func(t *testing.T) { require.NoError(t, wsFactory.Delete(kymaVersion)) } //cleanup before test runs (to delete relicts of previous test executions) and after test is finished cleanupFct(t) defer cleanupFct(t) t.Parallel() prov, err := NewProvider(wsFactory, log) require.NoError(t, err) t.Run("Render manifest", func(t *testing.T) { ws, err := wsFactory.Get(kymaVersion) require.NoError(t, err) for _, component := range componentList(t, filepath.Join(ws.InstallationResourceDir, "components.yaml")) { t.Logf("Rendering Kyma HELM component '%s'", component.name) manifest, err := prov.RenderManifest(component) require.NoError(t, err) require.Equal(t, component.name, manifest.Name) require.Equal(t, HelmChart, manifest.Type) require.NotEmpty(t, manifest.Manifest) require.NoError(t, yaml.Unmarshal([]byte(manifest.Manifest), make(map[string]interface{}))) //valid YAML } }) t.Run("Render CRDs", func(t *testing.T) { crds, err := prov.RenderCRD(kymaVersion) require.NoError(t, err) require.NotEmpty(t, crds) require.Equal(t, crds[0].Type, CRD) }) }
explode_data.jsonl/36946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 514 }
[ 2830, 3393, 5179, 1155, 353, 8840, 836, 8, 341, 18185, 7371, 17376, 2271, 1155, 692, 6725, 11, 1848, 1669, 5925, 7121, 7395, 3715, 340, 17957, 35699, 1155, 11, 1848, 692, 6692, 82, 4153, 11, 1848, 1669, 27514, 7121, 4153, 445, 1944, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCleanupClient(t *testing.T) { clientID := "1" wait := 100 * time.Millisecond defaultC := NewDefaultConfig() state := NewNetworkState(wait, defaultC.MaxClosedConnectionsBuffered, defaultC.MaxConnectionsStateBuffered) clients := state.(*networkState).getClients() assert.Equal(t, 0, len(clients)) conns := state.Connections(clientID, latestEpochTime(), nil) assert.Equal(t, 0, len(conns)) // Should be a no op state.(*networkState).RemoveExpiredClients(time.Now()) clients = state.(*networkState).getClients() assert.Equal(t, 1, len(clients)) assert.Equal(t, "1", clients[0]) // Should delete the client 1 state.(*networkState).RemoveExpiredClients(time.Now().Add(150 * time.Millisecond)) clients = state.(*networkState).getClients() assert.Equal(t, 0, len(clients)) }
explode_data.jsonl/22145
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 67335, 2959, 1155, 353, 8840, 836, 8, 341, 25291, 915, 1669, 330, 16, 1837, 48750, 1669, 220, 16, 15, 15, 353, 882, 71482, 271, 11940, 34, 1669, 1532, 3675, 2648, 741, 24291, 1669, 1532, 12320, 1397, 64092, 11, 1638, 34, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeEnvVariables(t *testing.T) { container := api.Container{ Env: []api.EnvVar{ { Name: "foo", Value: "bar", }, { Name: "baz", Value: "blah", }, }, } vars := makeEnvironmentVariables(&container) if len(vars) != len(container.Env) { t.Errorf("Vars don't match. Expected: %#v Found: %#v", container.Env, vars) } for ix, env := range container.Env { value := fmt.Sprintf("%s=%s", env.Name, env.Value) if value != vars[ix] { t.Errorf("Unexpected value: %s. Expected: %s", vars[ix], value) } } }
explode_data.jsonl/2837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 8078, 14359, 22678, 1155, 353, 8840, 836, 8, 341, 53290, 1669, 6330, 33672, 515, 197, 197, 14359, 25, 3056, 2068, 81214, 3962, 515, 298, 197, 515, 571, 21297, 25, 220, 330, 7975, 756, 571, 47399, 25, 330, 2257, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestConfigure(t *testing.T) { cfg := Configure() if cfg == nil { t.Fatalf("TestConfigure failed") } cfg.WithPrefix("/test") if cfg.Key != "/test" { t.Fatalf("TestConfigure failed") } cfg.WithTimeout(2 * time.Second) if cfg.Timeout != 2*time.Second { t.Fatalf("TestConfigure failed") } cfg.WithInitSize(1) if cfg.InitSize != 1 { t.Fatalf("TestConfigure failed") } cfg.WithPeriod(3 * time.Second) if cfg.Period != 3*time.Second { t.Fatalf("TestConfigure failed") } cfg.WithDeferHandler(&mockDeferHandler{}) if cfg.DeferHandler == nil { t.Fatalf("TestConfigure failed") } i := 0 cfg.WithEventFunc(func(evt KvEvent) { i++ }) cfg.AppendEventFunc(func(evt KvEvent) { i += 2 }) cfg.OnEvent(KvEvent{}) if i != 3 { t.Fatalf("TestConfigure failed") } cfg.WithParser(pb.MapParser) if cfg.Parser != pb.MapParser { t.Fatalf("TestConfigure failed") } if cfg.String() != "{key: /test, timeout: 2s, period: 3s}" { t.Fatalf("TestConfigure failed") } }
explode_data.jsonl/57200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 28560, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 22169, 741, 743, 13286, 621, 2092, 341, 197, 3244, 30762, 445, 2271, 28560, 4641, 1138, 197, 532, 50286, 26124, 14335, 4283, 1944, 1138, 743, 13286, 9610, 961, 3521, 1944, 1,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRegisterWithApiServer(t *testing.T) { testKubelet := newTestKubelet(t, false /* controllerAttachDetachEnabled */) defer testKubelet.Cleanup() kubelet := testKubelet.kubelet kubeClient := testKubelet.fakeKubeClient kubeClient.AddReactor("create", "nodes", func(action core.Action) (bool, runtime.Object, error) { // Return an error on create. return true, &v1.Node{}, &apierrors.StatusError{ ErrStatus: metav1.Status{Reason: metav1.StatusReasonAlreadyExists}, } }) kubeClient.AddReactor("get", "nodes", func(action core.Action) (bool, runtime.Object, error) { // Return an existing (matching) node on get. return true, &v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: testKubeletHostname, Labels: map[string]string{ kubeletapis.LabelHostname: testKubeletHostname, kubeletapis.LabelOS: goruntime.GOOS, kubeletapis.LabelArch: goruntime.GOARCH, }, }, }, nil }) kubeClient.AddReactor("*", "*", func(action core.Action) (bool, runtime.Object, error) { return true, nil, fmt.Errorf("no reaction implemented for %s", action) }) machineInfo := &cadvisorapi.MachineInfo{ MachineID: "123", SystemUUID: "abc", BootID: "1b3", NumCores: 2, MemoryCapacity: 1024, } mockCadvisor := testKubelet.fakeCadvisor mockCadvisor.On("MachineInfo").Return(machineInfo, nil) versionInfo := &cadvisorapi.VersionInfo{ KernelVersion: "3.16.0-0.bpo.4-amd64", ContainerOsVersion: "Debian GNU/Linux 7 (wheezy)", DockerVersion: "1.5.0", } mockCadvisor.On("VersionInfo").Return(versionInfo, nil) mockCadvisor.On("ImagesFsInfo").Return(cadvisorapiv2.FsInfo{ Usage: 400, Capacity: 1000, Available: 600, }, nil) mockCadvisor.On("RootFsInfo").Return(cadvisorapiv2.FsInfo{ Usage: 9, Capacity: 10, }, nil) kubelet.machineInfo = machineInfo done := make(chan struct{}) go func() { kubelet.registerWithAPIServer() done <- struct{}{} }() select { case <-time.After(wait.ForeverTestTimeout): assert.Fail(t, "timed out waiting for registration") case <-done: return } }
explode_data.jsonl/82106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 872 }
[ 2830, 3393, 8690, 2354, 6563, 5475, 1155, 353, 8840, 836, 8, 341, 18185, 42, 3760, 1149, 1669, 501, 2271, 42, 3760, 1149, 1155, 11, 895, 1391, 6461, 30485, 89306, 5462, 639, 340, 16867, 1273, 42, 3760, 1149, 727, 60639, 741, 16463, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_readMessagesCheckBroadcastMessages(t *testing.T) { tbl := []struct { broadcastUsers SuperUserMock in []bot.Message out []bot.Message }{ { SuperUserMock{}, []bot.Message{}, []bot.Message{}, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, }, []bot.Message{}, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: bot.MsgBroadcastStarted + "\n_pong_", From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: "message-0", From: bot.User{Username: "user-0"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-3", From: bot.User{Username: "user-3"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: "message-0", From: bot.User{Username: "user-0"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-3", From: bot.User{Username: "user-3"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{}, []bot.Message{ {Text: "message-0", From: bot.User{Username: "user-0"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-3", From: bot.User{Username: "user-3"}}, }, []bot.Message{ {Text: "message-0", From: bot.User{Username: "user-0"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-3", From: bot.User{Username: "user-3"}}, }, }, { SuperUserMock{"radio-t-bot": true, "umputun": true}, []bot.Message{ {Text: "message-0", From: bot.User{Username: "user-0"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "umputun"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-3", From: bot.User{Username: "user-3"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: bot.MsgBroadcastStarted, From: bot.User{Username: "radio-t-bot"}}, {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, { SuperUserMock{"radio-t-bot": true}, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, {Text: bot.MsgBroadcastFinished, From: bot.User{Username: "radio-t-bot"}}, }, []bot.Message{ {Text: "message-1", From: bot.User{Username: "user-1"}}, {Text: "message-2", From: bot.User{Username: "user-2"}}, }, }, } for i, tt := range tbl { t.Run(strconv.Itoa(i), func(t *testing.T) { err := createFile(testFile, tt.in) defer os.Remove(testFile) assert.NoError(t, err) msgs, err := readMessages(testFile, tt.broadcastUsers) assert.NoError(t, err) assert.Equal(t, tt.out, msgs) }) } }
explode_data.jsonl/37339
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2610 }
[ 2830, 3393, 6443, 15820, 3973, 43362, 15820, 1155, 353, 8840, 836, 8, 341, 3244, 2024, 1669, 3056, 1235, 341, 197, 2233, 19105, 7137, 7297, 1474, 11571, 198, 197, 17430, 1797, 3056, 6331, 8472, 198, 197, 13967, 310, 3056, 6331, 8472, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistence(t *testing.T) { t.Parallel() // Create a new database to run tests against. dbPath := filepath.Join(os.TempDir(), "ffldb-persistencetest") _ = os.RemoveAll(dbPath) db, err := database.Create(dbType, dbPath, blockDataNet) if err != nil { t.Errorf("Failed to create test database (%s) %v", dbType, err) return } defer os.RemoveAll(dbPath) defer db.Close() // Create a bucket, put some values into it, and store a block so they // can be tested for existence on re-open. bucket1Key := []byte("bucket1") storeValues := map[string]string{ "b1key1": "foo1", "b1key2": "foo2", "b1key3": "foo3", } genesisBlock := godashutil.NewBlock(chaincfg.MainNetParams.GenesisBlock) genesisHash := chaincfg.MainNetParams.GenesisHash err = db.Update(func(tx database.Tx) error { metadataBucket := tx.Metadata() if metadataBucket == nil { return fmt.Errorf("Metadata: unexpected nil bucket") } bucket1, err := metadataBucket.CreateBucket(bucket1Key) if err != nil { return fmt.Errorf("CreateBucket: unexpected error: %v", err) } for k, v := range storeValues { err := bucket1.Put([]byte(k), []byte(v)) if err != nil { return fmt.Errorf("Put: unexpected error: %v", err) } } if err := tx.StoreBlock(genesisBlock); err != nil { return fmt.Errorf("StoreBlock: unexpected error: %v", err) } return nil }) if err != nil { t.Errorf("Update: unexpected error: %v", err) return } // Close and reopen the database to ensure the values persist. db.Close() db, err = database.Open(dbType, dbPath, blockDataNet) if err != nil { t.Errorf("Failed to open test database (%s) %v", dbType, err) return } defer db.Close() // Ensure the values previously stored in the 3rd namespace still exist // and are correct. err = db.View(func(tx database.Tx) error { metadataBucket := tx.Metadata() if metadataBucket == nil { return fmt.Errorf("Metadata: unexpected nil bucket") } bucket1 := metadataBucket.Bucket(bucket1Key) if bucket1 == nil { return fmt.Errorf("Bucket1: unexpected nil bucket") } for k, v := range storeValues { gotVal := bucket1.Get([]byte(k)) if !reflect.DeepEqual(gotVal, []byte(v)) { return fmt.Errorf("Get: key '%s' does not "+ "match expected value - got %s, want %s", k, gotVal, v) } } genesisBlockBytes, _ := genesisBlock.Bytes() gotBytes, err := tx.FetchBlock(genesisHash) if err != nil { return fmt.Errorf("FetchBlock: unexpected error: %v", err) } if !reflect.DeepEqual(gotBytes, genesisBlockBytes) { return fmt.Errorf("FetchBlock: stored block mismatch") } return nil }) if err != nil { t.Errorf("View: unexpected error: %v", err) return } }
explode_data.jsonl/26987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1063 }
[ 2830, 3393, 71562, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 4230, 264, 501, 4625, 311, 1598, 7032, 2348, 624, 20939, 1820, 1669, 26054, 22363, 9638, 65009, 6184, 1507, 330, 542, 56925, 2268, 4975, 954, 57824, 113...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestOnContainersUnableToTransitionStateForDesiredStoppedTask(t *testing.T) { stateChangeEvents := make(chan statechange.Event) task := &managedTask{ Task: &apitask.Task{ Containers: []*apicontainer.Container{}, DesiredStatusUnsafe: apitaskstatus.TaskStopped, }, engine: &DockerTaskEngine{ stateChangeEvents: stateChangeEvents, }, stateChangeEvents: stateChangeEvents, } eventsGenerated := sync.WaitGroup{} eventsGenerated.Add(1) go func() { event := <-stateChangeEvents assert.Equal(t, event.(api.TaskStateChange).Reason, taskUnableToTransitionToStoppedReason) eventsGenerated.Done() }() task.handleContainersUnableToTransitionState() eventsGenerated.Wait() assert.Equal(t, task.GetDesiredStatus(), apitaskstatus.TaskStopped) }
explode_data.jsonl/24577
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 1925, 74632, 17075, 1249, 21768, 1397, 2461, 4896, 2690, 59803, 6262, 1155, 353, 8840, 836, 8, 341, 24291, 4072, 7900, 1669, 1281, 35190, 1584, 3373, 6904, 340, 49115, 1669, 609, 25270, 6262, 515, 197, 81153, 25, 609, 391, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindPairSumEqual(t *testing.T) { var dummy = []int{123, 30, 20, 70} a, b, err := findPairSumEqual(dummy, 100) assert.NoError(t, err) assert.Equal(t, a, 30) assert.Equal(t, b, 70) var fake = []int{50, 0, 0, 0} a, b, err = findPairSumEqual(fake, 100) assert.Error(t, err) example, _ := util.ParseFileInts("day1.example") a, b, err = findPairSumEqual(example, 2020) assert.NoError(t, err) assert.Equal(t, a, 1721) assert.Equal(t, b, 299) }
explode_data.jsonl/60129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 9885, 12443, 9190, 2993, 1155, 353, 8840, 836, 8, 341, 2405, 17292, 284, 3056, 396, 90, 16, 17, 18, 11, 220, 18, 15, 11, 220, 17, 15, 11, 220, 22, 15, 532, 11323, 11, 293, 11, 1848, 1669, 1477, 12443, 9190, 2993, 836...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGocloak_CreateClientScope_DeleteClientScope(t *testing.T) { t.Parallel() client := NewClientWithDebug(t) defer ClearRealmCache(t, client) tearDown, _ := CreateClientScope(t, client, nil) tearDown() }
explode_data.jsonl/79525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 38, 509, 385, 585, 34325, 2959, 10803, 57418, 2959, 10803, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 16867, 12023, 64290, 8233, 1155, 11, 2943, 340, 197, 82892, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAutoSaveHasOneAssociation(t *testing.T) { type Company struct { orm.Model UserID uint Name string } type User struct { orm.Model Name string Company Company `orm:"association_autoupdate:false;association_autocreate:false;"` } DB.Where("name = ?", "auto_save_has_one_association").Delete(&Company{}) DB.AutoMigrate(&Company{}, &User{}) DB.Save(&User{Name: "jinzhu", Company: Company{Name: "auto_save_has_one_association"}}) if !DB.Where("name = ?", "auto_save_has_one_association").First(&Company{}).RecordNotFound() { t.Errorf("Company auto_save_has_one_association should not have been saved when autosave is false") } company := Company{Name: "auto_save_has_one_association"} DB.Save(&company) company.Name = "auto_save_has_one_association_new_name" user := User{Name: "jinzhu", Company: company} DB.Save(&user) if !DB.Where("name = ?", "auto_save_has_one_association_new_name").First(&Company{}).RecordNotFound() { t.Errorf("Company should not have been updated") } if !DB.Where("name = ? AND user_id = ?", "auto_save_has_one_association", user.ID).First(&Company{}).RecordNotFound() { t.Errorf("Company should not have been updated") } if user.Company.UserID == 0 { t.Errorf("UserID should be assigned") } company.Name = "auto_save_has_one_association_2_new_name" DB.Set("orm:association_autoupdate", true).Save(&user) if DB.Where("name = ? AND user_id = ?", "auto_save_has_one_association_new_name", user.ID).First(&Company{}).RecordNotFound() { t.Errorf("Company should been updated") } user2 := User{Name: "jinzhu_2", Company: Company{Name: "auto_save_has_one_association_2"}} DB.Set("orm:association_autocreate", true).Save(&user2) if DB.Where("name = ?", "auto_save_has_one_association_2").First(&Company{}).RecordNotFound() { t.Errorf("Company auto_save_has_one_association_2 should been created when autocreate is true") } }
explode_data.jsonl/15891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 703 }
[ 2830, 3393, 13253, 8784, 10281, 3966, 63461, 1155, 353, 8840, 836, 8, 341, 13158, 8188, 2036, 341, 197, 197, 493, 5659, 198, 197, 31672, 915, 2622, 198, 197, 21297, 256, 914, 198, 197, 630, 13158, 2657, 2036, 341, 197, 197, 493, 5659,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestFileStoreReInit(t *testing.T) { s := NewStore(store.Table("aaa")) defer cleanup(DefaultDatabase, s) s.Init(store.Table("bbb")) if s.Options().Table != "bbb" { t.Error("Init didn't reinitialise the store") } }
explode_data.jsonl/9707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 1703, 6093, 693, 3803, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 6093, 31200, 18257, 445, 32646, 5455, 16867, 21290, 87874, 5988, 11, 274, 340, 1903, 26849, 31200, 18257, 445, 53151, 5455, 743, 274, 22179, 1005, 2556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_rmPushFailTask(t *testing.T) { chain, mock33 := createBlockChain(t) chain.push.postFail2Sleep = int32(1) ps := &bcMocks.PostService{} ps.On("PostData", mock.Anything, mock.Anything, mock.Anything).Return(errors.New("timeout")) chain.push.postService = ps createBlocks(t, mock33, chain, 10) var pushNames []string subCnt := 10 for i := 0; i < subCnt; i++ { subscribe := new(types.PushSubscribeReq) subscribe.Name = "push-test" subscribe.URL = "http://localhost" subscribe.Type = PushTxReceipt subscribe.Contract = make(map[string]bool) subscribe.Contract["coins"] = true subscribe.Name = fmt.Sprintf("%d", i) + "-push-test-" err := chain.push.addSubscriber(subscribe) pushNames = append(pushNames, subscribe.Name) assert.Equal(t, err, nil) } chain.push.mu.Lock() assert.Equal(t, len(chain.push.tasks), subCnt) chain.push.mu.Unlock() createBlocks(t, mock33, chain, 10) time.Sleep(1 * time.Second) createBlocks(t, mock33, chain, 10) time.Sleep(1 * time.Second) closeChan := make(chan struct{}) go func() { sleepCnt := 30 for { chain.push.mu.Lock() if 0 == len(chain.push.tasks) { chain.push.mu.Unlock() close(closeChan) return } chain.push.mu.Unlock() sleepCnt-- if sleepCnt <= 0 { close(closeChan) return } time.Sleep(time.Second) } }() <-closeChan fmt.Println("stoping Test_rmPushFailTask") chain.push.mu.Lock() assert.Equal(t, 0, len(chain.push.tasks)) chain.push.mu.Unlock() defer mock33.Close() }
explode_data.jsonl/61729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 632 }
[ 2830, 3393, 58537, 16644, 19524, 6262, 1155, 353, 8840, 836, 8, 341, 197, 8819, 11, 7860, 18, 18, 1669, 1855, 4713, 18837, 1155, 340, 197, 8819, 2552, 6542, 19524, 17, 41745, 284, 526, 18, 17, 7, 16, 340, 35009, 1669, 609, 8904, 725...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_VirtualNetworks_Spec_Properties_Subnets_Properties_DelegationsARM_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of VirtualNetworks_Spec_Properties_Subnets_Properties_DelegationsARM via JSON returns original", prop.ForAll(RunJSONSerializationTestForVirtualNetworksSpecPropertiesSubnetsPropertiesDelegationsARM, VirtualNetworksSpecPropertiesSubnetsPropertiesDelegationsARMGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/2902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 2334, 2901, 12320, 82, 1098, 992, 1088, 9249, 36359, 52861, 1088, 9249, 24597, 1937, 804, 17911, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 67543, 1669, 728, 73137, 13275, 2271, 9706, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplyOrCreateBatchSpecWithPublicationStates(t *testing.T) { if testing.Short() { t.Skip() } ctx := context.Background() db := dbtest.NewDB(t, "") // Ensure our site configuration doesn't have rollout windows so we get a // consistent initial state. ct.MockConfig(t, &conf.Unified{}) userID := ct.CreateTestUser(t, db, true).ID userAPIID := string(graphqlbackend.MarshalUserID(userID)) apiUser := &apitest.User{ ID: userAPIID, DatabaseID: userID, SiteAdmin: true, } actorCtx := actor.WithActor(ctx, actor.FromUser(userID)) now := timeutil.Now() clock := func() time.Time { return now } cstore := store.NewWithClock(db, nil, clock) repoStore := database.ReposWith(cstore) esStore := database.ExternalServicesWith(cstore) repo := newGitHubTestRepo("github.com/sourcegraph/apply-create-batch-change-test", newGitHubExternalService(t, esStore)) if err := repoStore.Create(ctx, repo); err != nil { t.Fatal(err) } r := &Resolver{store: cstore} s, err := graphqlbackend.NewSchema(db, r, nil, nil, nil, nil, nil, nil) if err != nil { t.Fatal(err) } // Since apply and create are essentially the same underneath, we can test // them with the same test code provided we special case the response type // handling. for name, tc := range map[string]struct { exec func(ctx context.Context, t testing.TB, s *graphql.Schema, in map[string]interface{}) (*apitest.BatchChange, error) }{ "applyBatchChange": { exec: func(ctx context.Context, t testing.TB, s *graphql.Schema, in map[string]interface{}) (*apitest.BatchChange, error) { var response struct{ ApplyBatchChange apitest.BatchChange } if errs := apitest.Exec(ctx, t, s, in, &response, mutationApplyBatchChange); errs != nil { return nil, errors.Newf("GraphQL errors: %v", errs) } return &response.ApplyBatchChange, nil }, }, "createBatchChange": { exec: func(ctx context.Context, t testing.TB, s *graphql.Schema, in map[string]interface{}) (*apitest.BatchChange, error) { var response struct{ CreateBatchChange apitest.BatchChange } if errs := apitest.Exec(ctx, t, s, in, &response, mutationCreateBatchChange); errs != nil { return nil, errors.Newf("GraphQL errors: %v", errs) } return &response.CreateBatchChange, nil }, }, } { // Create initial specs. Note that we have to append the test case name // to the batch spec ID to avoid cross-contamination between the test // cases. batchSpec := ct.CreateBatchSpec(t, ctx, cstore, "batch-spec-"+name, userID) changesetSpec := ct.CreateChangesetSpec(t, ctx, cstore, ct.TestSpecOpts{ User: userID, Repo: repo.ID, BatchSpec: batchSpec.ID, HeadRef: "main", }) // We need a couple more changeset specs to make this useful: we need to // be able to test that changeset specs attached to other batch specs // cannot be modified, and that changeset specs with explicit published // fields cause errors. otherBatchSpec := ct.CreateBatchSpec(t, ctx, cstore, "other-batch-spec-"+name, userID) otherChangesetSpec := ct.CreateChangesetSpec(t, ctx, cstore, ct.TestSpecOpts{ User: userID, Repo: repo.ID, BatchSpec: otherBatchSpec.ID, HeadRef: "main", }) publishedChangesetSpec := ct.CreateChangesetSpec(t, ctx, cstore, ct.TestSpecOpts{ User: userID, Repo: repo.ID, BatchSpec: batchSpec.ID, HeadRef: "main", Published: true, }) t.Run(name, func(t *testing.T) { // Handle the interesting error cases for different // publicationStates inputs. for name, states := range map[string][]map[string]interface{}{ "other batch spec": { { "changesetSpec": marshalChangesetSpecRandID(otherChangesetSpec.RandID), "publicationState": true, }, }, "duplicate batch specs": { { "changesetSpec": marshalChangesetSpecRandID(changesetSpec.RandID), "publicationState": true, }, { "changesetSpec": marshalChangesetSpecRandID(changesetSpec.RandID), "publicationState": true, }, }, "invalid publication state": { { "changesetSpec": marshalChangesetSpecRandID(changesetSpec.RandID), "publicationState": "foo", }, }, "invalid changeset spec ID": { { "changesetSpec": "foo", "publicationState": true, }, }, "changeset spec with a published state": { { "changesetSpec": marshalChangesetSpecRandID(publishedChangesetSpec.RandID), "publicationState": true, }, }, } { t.Run(name, func(t *testing.T) { input := map[string]interface{}{ "batchSpec": string(marshalBatchSpecRandID(batchSpec.RandID)), "publicationStates": states, } if _, errs := tc.exec(actorCtx, t, s, input); errs == nil { t.Fatalf("expected errors, got none") } }) } // Finally, let's actually make a legit apply. t.Run("success", func(t *testing.T) { input := map[string]interface{}{ "batchSpec": string(marshalBatchSpecRandID(batchSpec.RandID)), "publicationStates": []map[string]interface{}{ { "changesetSpec": marshalChangesetSpecRandID(changesetSpec.RandID), "publicationState": true, }, }, } have, err := tc.exec(actorCtx, t, s, input) if err != nil { t.Error(err) } want := &apitest.BatchChange{ ID: have.ID, Name: batchSpec.Spec.Name, Description: batchSpec.Spec.Description, Namespace: apitest.UserOrg{ ID: userAPIID, DatabaseID: userID, SiteAdmin: true, }, InitialApplier: apiUser, LastApplier: apiUser, LastAppliedAt: marshalDateTime(t, now), Changesets: apitest.ChangesetConnection{ Nodes: []apitest.Changeset{ {Typename: "ExternalChangeset", State: string(btypes.ChangesetStateProcessing)}, {Typename: "ExternalChangeset", State: string(btypes.ChangesetStateProcessing)}, }, TotalCount: 2, }, } if diff := cmp.Diff(want, have); diff != "" { t.Errorf("unexpected response (-want +have):\n%s", diff) } }) }) } }
explode_data.jsonl/53214
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2581 }
[ 2830, 3393, 28497, 57111, 21074, 8327, 2354, 72390, 23256, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 630, 20985, 1669, 2266, 19047, 741, 20939, 1669, 2927, 1944, 7121, 3506, 1155, 11, 85617, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Execution_CleanupSuccess_DetachError(t *testing.T) { // Should try 10 times to detach release := models.MockRelease(t) maws := models.MockAwsClients(release) maws.ASG.DescribeLoadBalancerTargetGroupsOutput = &autoscaling.DescribeLoadBalancerTargetGroupsOutput{ LoadBalancerTargetGroups: []*autoscaling.LoadBalancerTargetGroupState{ &autoscaling.LoadBalancerTargetGroupState{ LoadBalancerTargetGroupARN: to.Strp("arn"), State: to.Strp("aaa"), }, }, } stateMachine := createTestStateMachine(t, maws) exec, err := stateMachine.Execute(release) // We force delete rather than suffer the consequences assert.NoError(t, err) ep := exec.Path() steps := []string{ "Validate", "Lock", "ValidateResources", "Deploy", "WaitForDeploy", "WaitForHealthy", "CheckHealthy", "Healthy?", "WaitForDetach", } // Check detach 60 times (for 10 minutes) for i := 0; i <= 60; i++ { steps = append(steps, "DetachForSuccess") } steps = append(steps, "WaitDetachForSuccess", "CleanUpSuccess", "Success") assert.Equal(t, steps, ep) assert.Regexp(t, "DetachError", exec.LastOutputJSON) }
explode_data.jsonl/3588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 62, 20294, 920, 60639, 7188, 1557, 295, 610, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 12260, 1430, 220, 16, 15, 3039, 311, 46781, 198, 17200, 1623, 1669, 4119, 24664, 16077, 1155, 692, 2109, 8635, 1669, 4119, 24664, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVolumeMounts(t *testing.T) { _, cc := helperInitCluster(t, "cassandracluster-2DC.yaml") volumeMounts := generateContainerVolumeMount(cc, initContainer) assert.Equal(t, 1, len(volumeMounts)) assert.Equal(t, "/bootstrap", volumeMounts[getPos(volumeMounts, "bootstrap")].MountPath) volumeMounts = generateContainerVolumeMount(cc, bootstrapContainer) assert.Equal(t, 3, len(volumeMounts)) assert.Equal(t, "/etc/cassandra", volumeMounts[getPos(volumeMounts, "bootstrap")].MountPath) assert.Equal(t, "/extra-lib", volumeMounts[getPos(volumeMounts, "extra-lib")].MountPath) assert.Equal(t, "/opt/bin", volumeMounts[getPos(volumeMounts, "tools")].MountPath) volumeMounts = generateContainerVolumeMount(cc, cassandraContainer) assert.Equal(t, 6, len(volumeMounts)) assert.Equal(t, "/etc/cassandra", volumeMounts[getPos(volumeMounts, "bootstrap")].MountPath) assert.Equal(t, "/extra-lib", volumeMounts[getPos(volumeMounts, "extra-lib")].MountPath) assert.Equal(t, "/opt/bin", volumeMounts[getPos(volumeMounts, "tools")].MountPath) assert.Equal(t, "/tmp", volumeMounts[getPos(volumeMounts, "tmp")].MountPath) assert.Equal(t, "/var/lib/cassandra", volumeMounts[getPos(volumeMounts, "data")].MountPath) assert.Equal(t, "/var/log/cassandra", volumeMounts[getPos(volumeMounts, "log")].MountPath) }
explode_data.jsonl/54109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 18902, 16284, 82, 1155, 353, 8840, 836, 8, 341, 197, 6878, 12527, 1669, 13137, 3803, 28678, 1155, 11, 330, 66, 70093, 18855, 12, 17, 5626, 33406, 5130, 5195, 4661, 16284, 82, 1669, 6923, 4502, 18902, 16284, 31424, 11, 2930, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCanvas_Tapped(t *testing.T) { tapped := false altTapped := false buttonTap := false var pointEvent *fyne.PointEvent var tappedObj fyne.Tappable button := widget.NewButton("Test", func() { buttonTap = true }) c := NewCanvas().(*mobileCanvas) c.SetContent(button) c.resize(fyne.NewSize(36, 24)) button.Move(fyne.NewPos(3, 3)) tapPos := fyne.NewPos(6, 6) c.tapDown(tapPos, 0) c.tapUp(tapPos, 0, func(wid fyne.Tappable, ev *fyne.PointEvent) { tapped = true tappedObj = wid pointEvent = ev wid.Tapped(ev) }, func(wid fyne.SecondaryTappable, ev *fyne.PointEvent) { altTapped = true wid.TappedSecondary(ev) }, func(wid fyne.DoubleTappable, ev *fyne.PointEvent) { wid.DoubleTapped(ev) }, func(wid fyne.Draggable) { }) assert.True(t, tapped, "tap primary") assert.False(t, altTapped, "don't tap secondary") assert.True(t, buttonTap, "button should be tapped") assert.Equal(t, button, tappedObj) if assert.NotNil(t, pointEvent) { assert.Equal(t, fyne.NewPos(6, 6), pointEvent.AbsolutePosition) assert.Equal(t, fyne.NewPos(3, 3), pointEvent.Position) } }
explode_data.jsonl/43160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 18226, 1139, 5677, 1155, 353, 8840, 836, 8, 341, 3244, 5677, 1669, 895, 198, 197, 3145, 51, 5677, 1669, 895, 198, 37359, 23879, 1669, 895, 198, 2405, 1459, 1556, 353, 30595, 811, 3775, 1556, 198, 2405, 48423, 5261, 51941, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddTokensValidatorUnbonding(t *testing.T) { validator := NewValidator(sdk.ValAddress(pk1.Address().Bytes()), pk1, Description{}) validator = validator.UpdateStatus(sdk.Unbonding) validator, delShares := validator.AddTokensFromDel(sdk.NewInt(10)) assert.True(sdk.DecEq(t, sdk.NewDec(10), delShares)) assert.Equal(t, sdk.Unbonding, validator.Status) assert.True(sdk.IntEq(t, sdk.NewInt(10), validator.Tokens)) assert.True(sdk.DecEq(t, sdk.NewDec(10), validator.DelegatorShares)) }
explode_data.jsonl/739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 2212, 29300, 14256, 1806, 64239, 287, 1155, 353, 8840, 836, 8, 341, 197, 16112, 1669, 1532, 14256, 1141, 7584, 77819, 4286, 39928, 16, 26979, 1005, 7078, 11858, 22458, 16, 11, 7662, 37790, 197, 16112, 284, 22935, 16689, 2522, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrimThisValue(t *testing.T) { const SCRIPT = ` function t() { 'use strict'; Boolean.prototype.toString = function() { return typeof this; }; assert.sameValue(true.toLocaleString(), "boolean"); Boolean.prototype[Symbol.iterator] = function() { return [typeof this][Symbol.iterator](); } var s = new Set(true); assert.sameValue(s.size, 1, "size"); assert.sameValue(s.has("boolean"), true, "s.has('boolean')"); } t(); ` testScript1(TESTLIB+SCRIPT, _undefined, t) }
explode_data.jsonl/10513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 66191, 1986, 1130, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 259, 368, 341, 197, 197, 19176, 7304, 2357, 197, 90448, 6003, 5070, 284, 729, 368, 341, 7847, 470, 7804, 419, 280, 197, 197, 2315, 197, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStepper_Ascend(t *testing.T) { st := Stepper{ N: 1, U: Unum, } rng, err := st.Ascend(100, 3) assert.NoError(t, err) var vals []int for rng.HasNext() { vals = append(vals, rng.Next().(int)) } assert.Equal(t, []int{100, 101, 102}, vals) }
explode_data.jsonl/78069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 20903, 6922, 1566, 2388, 408, 1155, 353, 8840, 836, 8, 341, 18388, 1669, 3360, 6922, 515, 197, 18317, 25, 220, 16, 345, 197, 15980, 25, 1230, 372, 345, 197, 630, 7000, 968, 11, 1848, 1669, 357, 875, 2388, 408, 7, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScaledWindowAccept(t *testing.T) { // This test ensures that window scaling is used when the peer // does advertise it and connection is established with Accept(). c := context.New(t, defaultMTU) defer c.Cleanup() // Create EP and start listening. wq := &waiter.Queue{} ep, err := c.Stack().NewEndpoint(tcp.ProtocolNumber, ipv4.ProtocolNumber, wq) if err != nil { t.Fatalf("NewEndpoint failed: %s", err) } defer ep.Close() // Set the window size greater than the maximum non-scaled window. ep.SocketOptions().SetReceiveBufferSize(65535*6, true /* notify */) if err := ep.Bind(tcpip.FullAddress{Port: context.StackPort}); err != nil { t.Fatalf("Bind failed: %s", err) } if err := ep.Listen(10); err != nil { t.Fatalf("Listen failed: %s", err) } // Do 3-way handshake. // wndScale expected is 3 as 65535 * 3 * 2 < 65535 * 2^3 but > 65535 *2 *2 c.PassiveConnectWithOptions(100, 3 /* wndScale */, header.TCPSynOptions{MSS: defaultIPv4MSS}) // Try to accept the connection. we, ch := waiter.NewChannelEntry(nil) wq.EventRegister(&we, waiter.ReadableEvents) defer wq.EventUnregister(&we) c.EP, _, err = ep.Accept(nil) if cmp.Equal(&tcpip.ErrWouldBlock{}, err) { // Wait for connection to be established. select { case <-ch: c.EP, _, err = ep.Accept(nil) if err != nil { t.Fatalf("Accept failed: %s", err) } case <-time.After(1 * time.Second): t.Fatalf("Timed out waiting for accept") } } data := []byte{1, 2, 3} var r bytes.Reader r.Reset(data) if _, err := c.EP.Write(&r, tcpip.WriteOptions{}); err != nil { t.Fatalf("Write failed: %s", err) } // Check that data is received, and that advertised window is 0x5fff, // that is, that it is scaled. b := c.GetPacket() iss := seqnum.Value(context.TestInitialSequenceNumber).Add(1) checker.IPv4(t, b, checker.PayloadLen(len(data)+header.TCPMinimumSize), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)), checker.TCPWindow(0x5fff), checker.TCPFlagsMatch(header.TCPFlagAck, ^header.TCPFlagPsh), ), ) }
explode_data.jsonl/75960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 838 }
[ 2830, 3393, 94201, 4267, 16646, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 1273, 25351, 429, 3241, 26943, 374, 1483, 979, 279, 14397, 198, 197, 322, 1558, 50836, 432, 323, 3633, 374, 9555, 448, 20829, 25829, 1444, 1669, 2266, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestShouldSplitIntoUnevenStringsOfFour(t *testing.T) { input := testStringInput + "m" arrayOfStrings := SliceString(input, 4) assert.Equal(t, len(arrayOfStrings), 4) assert.Equal(t, "abcd", arrayOfStrings[0]) assert.Equal(t, "efgh", arrayOfStrings[1]) assert.Equal(t, "ijkl", arrayOfStrings[2]) assert.Equal(t, "m", arrayOfStrings[3]) }
explode_data.jsonl/45962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 14996, 20193, 26591, 55848, 1037, 20859, 2124, 26972, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 1273, 703, 2505, 488, 330, 76, 1837, 11923, 2124, 20859, 1669, 56476, 703, 5384, 11, 220, 19, 692, 6948, 12808, 1155, 11, 2422,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTFJobBasic(t *testing.T) { if testing.Short() { t.Skip("Skipping integration tests in short mode") } c := getPachClient(t) require.NoError(t, c.DeleteAll()) dataRepo := tu.UniqueString("TestSimplePipeline_data") require.NoError(t, c.CreateRepo(dataRepo)) pipeline := tu.UniqueString("pipeline1") tfJobString := YAMLToJSONString(t, ` apiVersion: kubeflow.org/v1 kind: TFJob metadata: generateName: tfjob namespace: kubeflow spec: tfReplicaSpecs: PS: replicas: 1 restartPolicy: OnFailure template: spec: containers: - name: tensorflow image: gcr.io/your-project/your-image command: - python - -m - trainer.task - --batch_size=32 - --training_steps=1000 Worker: replicas: 3 restartPolicy: OnFailure template: spec: containers: - name: tensorflow image: gcr.io/your-project/your-image command: - python - -m - trainer.task - --batch_size=32 - --training_steps=1000 `) _, err := c.PpsAPIClient.CreatePipeline( context.Background(), &pps.CreatePipelineRequest{ Pipeline: client.NewPipeline(pipeline), Input: client.NewPFSInput(dataRepo, "/*"), TFJob: &pps.TFJob{TFJob: tfJobString}, }) require.YesError(t, err) require.Matches(t, "not supported yet", err.Error()) }
explode_data.jsonl/49533
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 875 }
[ 2830, 3393, 10808, 12245, 15944, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 85945, 17590, 7032, 304, 2805, 3856, 1138, 197, 630, 1444, 1669, 57720, 610, 2959, 1155, 340, 17957, 35699, 1155, 11, 272,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMeterZero(t *testing.T) { m := NewMeter() if count := m.Count(); count != 0 { t.Errorf("m.Count(): 0 != %v\n", count) } }
explode_data.jsonl/72438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 60 }
[ 2830, 3393, 68224, 17999, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 68224, 741, 743, 1760, 1669, 296, 6134, 2129, 1760, 961, 220, 15, 341, 197, 3244, 13080, 445, 76, 6134, 4555, 220, 15, 961, 1018, 85, 1699, 497, 1760, 340, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestReadBufConfig(t *testing.T) { v := New() v.SetConfigType("yaml") v.ReadConfig(bytes.NewBuffer(yamlExample)) t.Log(v.AllKeys()) assert.True(t, v.InConfig("name")) assert.False(t, v.InConfig("state")) assert.Equal(t, "steve", v.Get("name")) assert.Equal(t, []interface{}{"skateboarding", "snowboarding", "go"}, v.Get("hobbies")) assert.Equal(t, map[string]interface{}{"jacket": "leather", "trousers": "denim", "pants": map[string]interface{}{"size": "large"}}, v.Get("clothing")) assert.Equal(t, 35, v.Get("age")) }
explode_data.jsonl/5576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 4418, 15064, 2648, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 1532, 741, 5195, 4202, 2648, 929, 445, 41466, 1138, 5195, 6503, 2648, 23158, 7121, 4095, 7021, 9467, 13314, 1171, 3244, 5247, 3747, 16764, 8850, 12367, 6948, 32443, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeDockerConfigFieldAuth(t *testing.T) { tests := []struct { input string username string password string fail bool }{ // auth field decodes to username & password { input: "Zm9vOmJhcg==", username: "foo", password: "bar", }, // good base64 data, but no colon separating username & password { input: "cGFudHM=", fail: true, }, // bad base64 data { input: "pants", fail: true, }, } for i, tt := range tests { username, password, err := decodeDockerConfigFieldAuth(tt.input) if (err != nil) != tt.fail { t.Errorf("case %d: expected fail=%t, got err=%v", i, tt.fail, err) } if tt.username != username { t.Errorf("case %d: expected username %q, got %q", i, tt.username, username) } if tt.password != password { t.Errorf("case %d: expected password %q, got %q", i, tt.password, password) } } }
explode_data.jsonl/40712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 32564, 35, 13659, 2648, 1877, 5087, 1155, 353, 8840, 836, 8, 972, 78216, 1669, 3056, 1235, 972, 197, 22427, 262, 914, 319, 197, 72358, 914, 319, 197, 58199, 914, 319, 197, 63052, 257, 1807, 319, 197, 92, 1666, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStartingCounts(t *testing.T) { db := pgtest.NewGormDB(t) counts := getStartingResponseCounts(db, logger.Default) assert.Equal(t, 0, len(counts)) ks := keystore.New(db, utils.FastScryptParams) err := ks.Unlock("p4SsW0rD1!@#_") require.NoError(t, err) k, err := ks.Eth().Create(big.NewInt(0)) require.NoError(t, err) b := time.Now() n1, n2, n3, n4 := int64(0), int64(1), int64(2), int64(3) m1 := bulletprooftxmanager.EthTxMeta{ RequestID: utils.PadByteToHash(0x10), } md1, err := json.Marshal(&m1) require.NoError(t, err) md1_ := datatypes.JSON(md1) m2 := bulletprooftxmanager.EthTxMeta{ RequestID: utils.PadByteToHash(0x11), } md2, err := json.Marshal(&m2) md2_ := datatypes.JSON(md2) require.NoError(t, err) var txes = []bulletprooftxmanager.EthTx{ { Nonce: &n1, FromAddress: k.Address.Address(), Error: null.String{}, BroadcastAt: &b, CreatedAt: b, State: bulletprooftxmanager.EthTxConfirmed, Meta: &datatypes.JSON{}, EncodedPayload: []byte{}, }, { Nonce: &n2, FromAddress: k.Address.Address(), Error: null.String{}, BroadcastAt: &b, CreatedAt: b, State: bulletprooftxmanager.EthTxConfirmed, Meta: &md1_, EncodedPayload: []byte{}, }, { Nonce: &n3, FromAddress: k.Address.Address(), Error: null.String{}, BroadcastAt: &b, CreatedAt: b, State: bulletprooftxmanager.EthTxConfirmed, Meta: &md2_, EncodedPayload: []byte{}, }, { Nonce: &n4, FromAddress: k.Address.Address(), Error: null.String{}, BroadcastAt: &b, CreatedAt: b, State: bulletprooftxmanager.EthTxConfirmed, Meta: &md2_, EncodedPayload: []byte{}, }, } require.NoError(t, db.Create(&txes).Error) counts = getStartingResponseCounts(db, logger.Default) assert.Equal(t, 2, len(counts)) assert.Equal(t, uint64(1), counts[utils.PadByteToHash(0x10)]) assert.Equal(t, uint64(2), counts[utils.PadByteToHash(0x11)]) }
explode_data.jsonl/79426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1081 }
[ 2830, 3393, 24617, 63731, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 17495, 1944, 7121, 38, 493, 3506, 1155, 340, 18032, 82, 1669, 633, 24617, 2582, 63731, 9791, 11, 5925, 13275, 340, 6948, 12808, 1155, 11, 220, 15, 11, 2422, 11512, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindParamsToRequestParameters(t *testing.T) { lParams := &FindParams{} paramList := communicator.RequestParams{} paramRequestCmp(t, lParams, paramList) { lParams.CountryCode = new(string) *lParams.CountryCode = "NL" param, _ := communicator.NewRequestParam("countryCode", "NL") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { lParams.CurrencyCode = new(string) *lParams.CurrencyCode = "EUR" param, _ := communicator.NewRequestParam("currencyCode", "EUR") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { lParams.Locale = new(string) *lParams.Locale = "nl_NL" param, _ := communicator.NewRequestParam("locale", "nl_NL") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { amount := new(int64) *amount = 1000 lParams.Amount = amount param, _ := communicator.NewRequestParam("amount", "1000") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { isRecurring := new(bool) *isRecurring = true lParams.IsRecurring = isRecurring param, _ := communicator.NewRequestParam("isRecurring", "true") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { lParams.AddHide("fields") param, _ := communicator.NewRequestParam("hide", "fields") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { lParams.AddHide("accountsOnFile") param, _ := communicator.NewRequestParam("hide", "accountsOnFile") paramList = append(paramList, *param) } paramRequestCmp(t, lParams, paramList) { lParams.Amount = nil paramList = append(paramList[0:3], paramList[4:]...) } paramRequestCmp(t, lParams, paramList) }
explode_data.jsonl/3845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 685 }
[ 2830, 3393, 9885, 4870, 1249, 1900, 9706, 1155, 353, 8840, 836, 8, 341, 8810, 4870, 1669, 609, 9885, 4870, 16094, 36037, 852, 1669, 86671, 9659, 4870, 31483, 36037, 1900, 34, 1307, 1155, 11, 326, 4870, 11, 1685, 852, 692, 197, 515, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalGoLineSlice(t *testing.T) { a := assert.New(t) test := func(msg string, expected []types.GoLine, sliceLen, golineBytes []byte) { var buf bytes.Buffer buf.Write(sliceLen) buf.Write(golineBytes) actual, _ := unmarshalGoLineSlice(buf.Bytes()) a.Equal(expected, actual, msg) } test("empty", []types.GoLine{}, []byte{0, 0, 0, 0, 0, 0, 0, 0}, nil) test("contains item", []types.GoLine{*goLine}, []byte{0, 0, 0, 0, 0, 0, 0, 1}, goLineBytes) }
explode_data.jsonl/72610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 1806, 27121, 10850, 2460, 33236, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 18185, 1669, 2915, 8119, 914, 11, 3601, 3056, 9242, 67131, 2460, 11, 15983, 11271, 11, 342, 21908, 7078, 3056, 3782, 8, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMinTimes1(t *testing.T) { // It fails if there are no calls reporter, ctrl := createFixtures(t) subject := new(Subject) ctrl.RecordCall(subject, "FooMethod", "argument").MinTimes(1) reporter.assertFatal(func() { ctrl.Finish() }) // It succeeds if there is one call reporter, ctrl = createFixtures(t) subject = new(Subject) ctrl.RecordCall(subject, "FooMethod", "argument").MinTimes(1) ctrl.Call(subject, "FooMethod", "argument") ctrl.Finish() // It succeeds if there are many calls reporter, ctrl = createFixtures(t) subject = new(Subject) ctrl.RecordCall(subject, "FooMethod", "argument").MinTimes(1) for i := 0; i < 100; i++ { ctrl.Call(subject, "FooMethod", "argument") } ctrl.Finish() }
explode_data.jsonl/17279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 6217, 18889, 16, 1155, 353, 8840, 836, 8, 341, 197, 322, 1084, 14525, 421, 1052, 525, 902, 6738, 198, 69931, 261, 11, 23743, 1669, 1855, 25958, 18513, 1155, 340, 28624, 583, 1669, 501, 7, 13019, 340, 84381, 49959, 7220, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOvirtActuator(t *testing.T) { tests := []struct { name string clusterDeployment *hivev1.ClusterDeployment pool *hivev1.MachinePool expectedMachineSetReplicas map[string]int64 expectedErr bool }{ { name: "generate machineset", clusterDeployment: testOvirtClusterDeployment(), pool: testOvirtPool(), expectedMachineSetReplicas: map[string]int64{ fmt.Sprintf("%s-worker-0", testInfraID): 3, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() actuator := &OvirtActuator{ logger: log.WithField("actuator", "ovirtactuator_test"), } generatedMachineSets, _, err := actuator.GenerateMachineSets(test.clusterDeployment, test.pool, actuator.logger) if test.expectedErr { assert.Error(t, err, "expected error for test case") } else { require.NoError(t, err, "unexpected error for test cast") validateOvirtMachineSets(t, generatedMachineSets, test.expectedMachineSetReplicas) } }) } }
explode_data.jsonl/15865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 517 }
[ 2830, 3393, 46, 47381, 2414, 45162, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 5180, 914, 198, 197, 197, 18855, 75286, 688, 353, 88568, 85, 16, 72883, 75286, 198, 197, 85273, 5180, 353, 88568, 85, 16, 1321...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMasterFor(t *testing.T) { ctx := context.Background() firstIDs := []int64{1, 2, 3, 4} allIDs := []int64{1, 2, 3, 4, 5, 6} var tests = []struct { factory util.ElectionFactory want1 []int64 want2 []int64 }{ {factory: nil, want1: firstIDs, want2: allIDs}, {factory: util.NoopElectionFactory{InstanceID: "test"}, want1: firstIDs, want2: allIDs}, {factory: masterForEvenFactory{}, want1: []int64{2, 4}, want2: []int64{2, 4, 6}}, {factory: failureFactory{}, want1: nil, want2: nil}, } for _, test := range tests { testCtx, cancel := context.WithCancel(ctx) registry := extension.Registry{ElectionFactory: test.factory} info := LogOperationInfo{ Registry: registry, TimeSource: util.SystemTimeSource{}, } lom := NewLogOperationManager(info, nil) // Check mastership twice, to give the election threads a chance to get started and report. lom.masterFor(testCtx, firstIDs) time.Sleep(2 * minMasterCheckInterval) logIDs, err := lom.masterFor(testCtx, firstIDs) if !reflect.DeepEqual(logIDs, test.want1) { t.Errorf("masterFor(factory=%T)=%v,%v; want %v,_", test.factory, logIDs, err, test.want1) cancel() continue } // Now add extra IDs and re-check. lom.masterFor(testCtx, allIDs) time.Sleep(2 * minMasterCheckInterval) logIDs, err = lom.masterFor(testCtx, allIDs) if !reflect.DeepEqual(logIDs, test.want2) { t.Errorf("masterFor(factory=%T)=%v,%v; want %v,_", test.factory, logIDs, err, test.want2) cancel() continue } cancel() time.Sleep(2 * info.MasterCheckInterval) } }
explode_data.jsonl/55527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 643 }
[ 2830, 3393, 18041, 2461, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 42190, 30466, 1669, 3056, 396, 21, 19, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 532, 50960, 30466, 1669, 3056, 396, 21, 19, 90, 16, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMultipleHostPrune(t *testing.T) { setTestingT(t) testDir := path.Join(os.TempDir(), "duplicacy_test", "snapshot_test") snapshotManager := createTestSnapshotManager(testDir) chunkSize := 1024 chunkHash1 := uploadRandomChunk(snapshotManager, chunkSize) chunkHash2 := uploadRandomChunk(snapshotManager, chunkSize) chunkHash3 := uploadRandomChunk(snapshotManager, chunkSize) chunkHash4 := uploadRandomChunk(snapshotManager, chunkSize) now := time.Now().Unix() day := int64(24 * 3600) t.Logf("Creating 3 snapshot") createTestSnapshot(snapshotManager, "vm1@host1", 1, now - 3 * day - 3600, now - 3 * day - 60, []string { chunkHash1, chunkHash2 }) createTestSnapshot(snapshotManager, "vm1@host1", 2, now - 2 * day - 3600, now - 2 * day - 60, []string { chunkHash2, chunkHash3 }) createTestSnapshot(snapshotManager, "vm2@host2", 1, now - 3 * day - 3600, now - 3 * day - 60, []string { chunkHash3, chunkHash4 }) checkTestSnapshots(snapshotManager, 3, 0) t.Logf("Removing snapshot vm1@host1 revision 1 without --exclusive") snapshotManager.PruneSnapshots(testDir, "vm1@host1", "vm1@host1", []int{1}, []string{}, []string{}, false, false, []string{}, false, false, false) checkTestSnapshots(snapshotManager, 2, 2) t.Logf("Prune without removing any snapshots -- no fossils will be deleted") snapshotManager.PruneSnapshots(testDir, "vm1@host1", "vm1@host1", []int{}, []string{}, []string{}, false, false, []string{}, false, false, false) checkTestSnapshots(snapshotManager, 2, 2) t.Logf("Creating 1 snapshot") chunkHash5 := uploadRandomChunk(snapshotManager, chunkSize) createTestSnapshot(snapshotManager, "vm2@host2", 2, now + 1 * day - 3600 , now + 1 * day, []string {chunkHash4, chunkHash5}) checkTestSnapshots(snapshotManager, 3, 2) t.Logf("Prune without removing any snapshots -- no fossils will be deleted") snapshotManager.PruneSnapshots(testDir, "vm1@host1", "vm1@host1", []int{}, []string{}, []string{}, false, false, []string{}, false, false, false) checkTestSnapshots(snapshotManager, 3, 2) t.Logf("Creating 1 snapshot") chunkHash6 := uploadRandomChunk(snapshotManager, chunkSize) createTestSnapshot(snapshotManager, "vm1@host1", 3, now + 1 * day - 3600 , now + 1 * day, []string {chunkHash5, chunkHash6}) checkTestSnapshots(snapshotManager, 4, 2) t.Logf("Prune without removing any snapshots -- fossils will be deleted") snapshotManager.PruneSnapshots(testDir, "vm1@host1", "vm1@host1", []int{}, []string{}, []string{}, false, false, []string{}, false, false, false) checkTestSnapshots(snapshotManager, 4, 0) }
explode_data.jsonl/54060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 941 }
[ 2830, 3393, 32089, 9296, 3533, 2886, 1155, 353, 8840, 836, 8, 1476, 262, 738, 16451, 51, 1155, 692, 262, 1273, 6184, 1669, 1815, 22363, 9638, 65009, 6184, 1507, 330, 67, 9846, 2757, 4452, 497, 330, 35501, 4452, 5130, 262, 16295, 2043, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidKubernetesVersion(t *testing.T) { testCases := []string{ ` kubernetesVersion: v1.x.3 `, ` kubernetesVersion: v1.11.3yes `, ` kubernetesVersion: $v1.11.3 `} for _, testCase := range testCases { confBody := singleAzConfigYaml + testCase _, err := ClusterFromBytes([]byte(confBody)) if err == nil || !strings.Contains(err.Error(), "must be a valid version") { t.Errorf("expected kubernetesVersion to be validated: %s\n%s", err, confBody) } } }
explode_data.jsonl/4378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 7928, 42, 29827, 5637, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 917, 515, 197, 197, 3989, 74, 29827, 5637, 25, 348, 16, 1993, 13, 18, 198, 12892, 197, 197, 3989, 74, 29827, 5637, 25, 348, 16, 13, 16, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestThis1(t *testing.T) { const SCRIPT = ` function independent() { return this.prop; } var o = {}; o.b = {g: independent, prop: 42}; var rv = o.b.g(); ` testScript(SCRIPT, intToValue(42), t) }
explode_data.jsonl/75207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 1986, 16, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 9489, 368, 341, 197, 853, 419, 18417, 280, 197, 532, 2405, 297, 284, 9321, 22229, 948, 284, 314, 70, 25, 9489, 11, 2004, 25, 220, 19, 17, 2315, 240...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCurrentDateString(t *testing.T) { monkey.Patch(otime.Now, func(timezone string) otime.OTime { t, _ := time.Parse("02-Jan-06", "02-Jan-06") return otime.OTime{t} }) defer monkey.Unpatch(otime.Now) assert.Equal(t, "20060102", GetCurrentDateString("Asia/Kolkata")) }
explode_data.jsonl/43864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 62981, 46770, 1155, 353, 8840, 836, 8, 341, 197, 96016, 1069, 754, 7, 19175, 13244, 11, 2915, 9730, 8684, 914, 8, 297, 1678, 8382, 1462, 341, 197, 3244, 11, 716, 1669, 882, 8937, 445, 15, 17, 66452, 12, 15, 21, 497, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMedianFetcher_MajorityFetchesCalculatesCorrectMedian(t *testing.T) { hf50 := newFixedPricedFetcher(decimal.NewFromInt(50)) hf75 := newFixedPricedFetcher(decimal.NewFromInt(75)) hf100 := newFixedPricedFetcher(decimal.NewFromInt(100)) hf999 := newFixedPricedFetcher(decimal.NewFromInt(999)) ef := newErroringPricedFetcher() tests := []struct { name string fetchers []Fetcher expectedMedian string }{ {"3/5", []Fetcher{hf50, hf75, hf100, ef, ef}, "75"}, {"4/7", []Fetcher{hf50, hf75, hf100, hf999, ef, ef, ef}, "87.5"}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { medianFetcher, err := newMedianFetcher(test.fetchers...) require.NoError(t, err) medianPrice, err := medianFetcher.Fetch(context.Background(), emptyMeta) assert.NoError(t, err) assert.Equal(t, medianPrice.String(), test.expectedMedian) }) } }
explode_data.jsonl/26504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 400 }
[ 2830, 3393, 79514, 97492, 1245, 3035, 487, 20714, 288, 57908, 973, 33092, 79514, 1155, 353, 8840, 836, 8, 341, 9598, 69, 20, 15, 1669, 501, 13520, 47, 2216, 291, 97492, 71100, 7121, 3830, 1072, 7, 20, 15, 1171, 9598, 69, 22, 20, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToArtifactKey(t *testing.T) { artifactKey := ToArtifactKey(datasetID, "artifactID-1") assert.Equal(t, datasetID.Project, artifactKey.DatasetProject) assert.Equal(t, datasetID.Domain, artifactKey.DatasetDomain) assert.Equal(t, datasetID.Name, artifactKey.DatasetName) assert.Equal(t, datasetID.Version, artifactKey.DatasetVersion) assert.Equal(t, artifactKey.ArtifactID, "artifactID-1") }
explode_data.jsonl/8785
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 1249, 85578, 1592, 1155, 353, 8840, 836, 8, 341, 197, 63722, 1592, 1669, 2014, 85578, 1592, 30535, 915, 11, 330, 63722, 915, 12, 16, 1138, 6948, 12808, 1155, 11, 10337, 915, 30944, 11, 36639, 1592, 79356, 7849, 340, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMarginOrders(t *testing.T) { request := okgroup.GetSpotOrdersRequest{ InstrumentID: spotCurrency, Status: "all", } _, err := o.GetMarginOrders(request) testStandardErrorHandling(t, err) }
explode_data.jsonl/30187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 1949, 21681, 24898, 1155, 353, 8840, 836, 8, 341, 23555, 1669, 5394, 4074, 2234, 47049, 24898, 1900, 515, 197, 197, 56324, 915, 25, 7702, 26321, 345, 197, 58321, 25, 981, 330, 541, 756, 197, 532, 197, 6878, 1848, 1669, 297...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAddDuplicateDriver(t *testing.T) { supportedDBs := database.SupportedDBs() if len(supportedDBs) == 0 { t.Errorf("TestAddDuplicateDriver: No backends to test") return } dbType := supportedDBs[0] // bogusCreateDB is a function which acts as a bogus create and open // driver function and intentionally returns a failure that can be // detected if the interface allows a duplicate driver to overwrite an // existing one. bogusCreateDB := func(args ...interface{}) (database.Db, error) { return nil, fmt.Errorf("duplicate driver allowed for database "+ "type [%v]", dbType) } // Create a driver that tries to replace an existing one. Set its // create and open functions to a function that causes a test failure if // they are invoked. driver := database.DriverDB{ DbType: dbType, CreateDB: bogusCreateDB, OpenDB: bogusCreateDB, } database.AddDBDriver(driver) // Ensure creating a database of the type that we tried to replace // doesn't fail (if it does, it indicates the driver was erroneously // replaced). _, teardown, err := createDB(dbType, "dupdrivertest", true) if err != nil { t.Errorf("TestAddDuplicateDriver: %v", err) return } teardown() }
explode_data.jsonl/4353
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 2212, 53979, 11349, 1155, 353, 8840, 836, 8, 341, 1903, 12513, 3506, 82, 1669, 4625, 808, 12513, 3506, 82, 741, 743, 2422, 1141, 12513, 3506, 82, 8, 621, 220, 15, 341, 197, 3244, 13080, 445, 2271, 2212, 53979, 11349, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilBoolColumnValue_Type(t *testing.T) { tests := []struct { name string n *NilBoolColumnValue want ColumnType }{ { name: "1", n: NewNilBoolColumnValue().(*NilBoolColumnValue), want: TypeBool, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.n.Type(); !reflect.DeepEqual(got, tt.want) { t.Errorf("NilBoolColumnValue.Type() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/73236
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 19064, 11233, 2933, 1130, 13729, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 9038, 262, 353, 19064, 11233, 2933, 1130, 198, 197, 50780, 9332, 929, 198, 197, 59403, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPodSpecSecurityContextValidation(t *testing.T) { // Note the feature flag is always enabled on this test tests := []struct { name string sc *corev1.PodSecurityContext want *apis.FieldError }{{ name: "nil", }, { name: "disallowed fields", sc: &corev1.PodSecurityContext{ SELinuxOptions: &corev1.SELinuxOptions{}, WindowsOptions: &corev1.WindowsSecurityContextOptions{}, Sysctls: []corev1.Sysctl{}, }, want: apis.ErrDisallowedFields("seLinuxOptions", "sysctls", "windowsOptions"), }, { name: "too large uid", sc: &corev1.PodSecurityContext{ RunAsUser: ptr.Int64(math.MaxInt32 + 1), }, want: apis.ErrOutOfBoundsValue(int64(math.MaxInt32+1), 0, math.MaxInt32, "runAsUser"), }, { name: "negative uid", sc: &corev1.PodSecurityContext{ RunAsUser: ptr.Int64(-10), }, want: apis.ErrOutOfBoundsValue(-10, 0, math.MaxInt32, "runAsUser"), }, { name: "too large gid", sc: &corev1.PodSecurityContext{ RunAsGroup: ptr.Int64(math.MaxInt32 + 1), }, want: apis.ErrOutOfBoundsValue(int64(math.MaxInt32+1), 0, math.MaxInt32, "runAsGroup"), }, { name: "negative gid", sc: &corev1.PodSecurityContext{ RunAsGroup: ptr.Int64(-10), }, want: apis.ErrOutOfBoundsValue(-10, 0, math.MaxInt32, "runAsGroup"), }, { name: "too large fsGroup", sc: &corev1.PodSecurityContext{ FSGroup: ptr.Int64(math.MaxInt32 + 1), }, want: apis.ErrOutOfBoundsValue(int64(math.MaxInt32+1), 0, math.MaxInt32, "fsGroup"), }, { name: "negative fsGroup", sc: &corev1.PodSecurityContext{ FSGroup: ptr.Int64(-10), }, want: apis.ErrOutOfBoundsValue(-10, 0, math.MaxInt32, "fsGroup"), }, { name: "too large supplementalGroups", sc: &corev1.PodSecurityContext{ SupplementalGroups: []int64{int64(math.MaxInt32 + 1)}, }, want: apis.ErrOutOfBoundsValue(int64(math.MaxInt32+1), 0, math.MaxInt32, "supplementalGroups[0]"), }, { name: "negative supplementalGroups", sc: &corev1.PodSecurityContext{ SupplementalGroups: []int64{-10}, }, want: apis.ErrOutOfBoundsValue(-10, 0, math.MaxInt32, "supplementalGroups[0]"), }} for _, test := range tests { ctx := config.ToContext(context.Background(), &config.Config{ Features: &config.Features{ PodSpecSecurityContext: config.Enabled, }, }) t.Run(test.name, func(t *testing.T) { got := ValidatePodSecurityContext(ctx, test.sc) if diff := cmp.Diff(test.want.Error(), got.Error()); diff != "" { t.Errorf("ValidatePodSecurityContext(-want, +got): \n%s", diff) } }) } }
explode_data.jsonl/48125
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1082 }
[ 2830, 3393, 23527, 8327, 15352, 1972, 13799, 1155, 353, 8840, 836, 8, 341, 197, 322, 7036, 279, 4565, 5181, 374, 2677, 8970, 389, 419, 1273, 198, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 29928, 256, 353, 98645, 16, 888...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestJetStreamSubscribe_ConfigCantChange(t *testing.T) { s := RunBasicJetStreamServer() defer s.Shutdown() if config := s.JetStreamConfig(); config != nil { defer os.RemoveAll(config.StoreDir) } nc, err := nats.Connect(s.ClientURL()) if err != nil { t.Fatalf("Unexpected error: %v", err) } defer nc.Close() js, err := nc.JetStream() if err != nil { t.Fatalf("Unexpected error: %v", err) } // Create the stream using our client API. _, err = js.AddStream(&nats.StreamConfig{ Name: "TEST", Subjects: []string{"foo"}, }) if err != nil { t.Fatalf("Unexpected error: %v", err) } for _, test := range []struct { name string first nats.SubOpt second nats.SubOpt }{ {"description", nats.Description("a"), nats.Description("b")}, {"deliver policy", nats.DeliverAll(), nats.DeliverLast()}, {"optional start sequence", nats.StartSequence(1), nats.StartSequence(10)}, {"optional start time", nats.StartTime(time.Now()), nats.StartTime(time.Now().Add(-2 * time.Hour))}, {"ack wait", nats.AckWait(10 * time.Second), nats.AckWait(15 * time.Second)}, {"max deliver", nats.MaxDeliver(3), nats.MaxDeliver(5)}, {"replay policy", nats.ReplayOriginal(), nats.ReplayInstant()}, {"max waiting", nats.PullMaxWaiting(10), nats.PullMaxWaiting(20)}, {"max ack pending", nats.MaxAckPending(10), nats.MaxAckPending(20)}, } { t.Run(test.name, func(t *testing.T) { durName := nuid.Next() sub, err := js.PullSubscribe("foo", durName, test.first) if err != nil { t.Fatalf("Error on subscribe: %v", err) } // Once it is created, options can't be changed. _, err = js.PullSubscribe("foo", durName, test.second) if err == nil || !strings.Contains(err.Error(), test.name) { t.Fatalf("Unexpected error: %v", err) } sub.Unsubscribe() }) } for _, test := range []struct { name string cc *nats.ConsumerConfig opt nats.SubOpt }{ {"ack policy", &nats.ConsumerConfig{AckPolicy: nats.AckAllPolicy}, nats.AckNone()}, {"rate limit", &nats.ConsumerConfig{RateLimit: 10}, nats.RateLimit(100)}, {"flow control", &nats.ConsumerConfig{FlowControl: false}, nats.EnableFlowControl()}, {"heartbeat", &nats.ConsumerConfig{Heartbeat: 10 * time.Second}, nats.IdleHeartbeat(20 * time.Second)}, } { t.Run(test.name, func(t *testing.T) { durName := nuid.Next() cc := test.cc cc.Durable = durName cc.DeliverSubject = nuid.Next() if _, err := js.AddConsumer("TEST", cc); err != nil { t.Fatalf("Error creating consumer: %v", err) } sub, err := js.SubscribeSync("foo", nats.Durable(durName), test.opt) if err == nil || !strings.Contains(err.Error(), test.name) { t.Fatalf("Unexpected error: %v", err) } sub.Unsubscribe() }) } // Verify that we don't fail if user did not set it. for _, test := range []struct { name string opt nats.SubOpt }{ {"description", nats.Description("a")}, {"deliver policy", nats.DeliverAll()}, {"optional start sequence", nats.StartSequence(10)}, {"optional start time", nats.StartTime(time.Now())}, {"ack wait", nats.AckWait(10 * time.Second)}, {"max deliver", nats.MaxDeliver(3)}, {"replay policy", nats.ReplayOriginal()}, {"max waiting", nats.PullMaxWaiting(10)}, {"max ack pending", nats.MaxAckPending(10)}, } { t.Run(test.name+" not set", func(t *testing.T) { durName := nuid.Next() sub, err := js.PullSubscribe("foo", durName, test.opt) if err != nil { t.Fatalf("Error on subscribe: %v", err) } // If not explicitly asked by the user, we are ok _, err = js.PullSubscribe("foo", durName) if err != nil { t.Fatalf("Unexpected error: %v", err) } sub.Unsubscribe() }) } for _, test := range []struct { name string opt nats.SubOpt }{ {"default deliver policy", nats.DeliverAll()}, {"default ack wait", nats.AckWait(30 * time.Second)}, {"default replay policy", nats.ReplayInstant()}, {"default max waiting", nats.PullMaxWaiting(512)}, {"default ack pending", nats.MaxAckPending(65536)}, } { t.Run(test.name, func(t *testing.T) { durName := nuid.Next() sub, err := js.PullSubscribe("foo", durName) if err != nil { t.Fatalf("Error on subscribe: %v", err) } // If the option is the same as the server default, it is not an error either. _, err = js.PullSubscribe("foo", durName, test.opt) if err != nil { t.Fatalf("Unexpected error: %v", err) } sub.Unsubscribe() }) } for _, test := range []struct { name string opt nats.SubOpt }{ {"policy", nats.DeliverNew()}, {"ack wait", nats.AckWait(31 * time.Second)}, {"replay policy", nats.ReplayOriginal()}, {"max waiting", nats.PullMaxWaiting(513)}, {"ack pending", nats.MaxAckPending(2)}, } { t.Run(test.name+" changed from default", func(t *testing.T) { durName := nuid.Next() sub, err := js.PullSubscribe("foo", durName) if err != nil { t.Fatalf("Error on subscribe: %v", err) } // First time it was created with defaults and the // second time a change is attempted, so it is an error. _, err = js.PullSubscribe("foo", durName, test.opt) if err == nil || !strings.Contains(err.Error(), test.name) { t.Fatalf("Unexpected error: %v", err) } sub.Unsubscribe() }) } // Check that binding to a durable (without specifying durable option) works if _, err := js.AddConsumer("TEST", &nats.ConsumerConfig{ Durable: "BindDurable", DeliverSubject: "bar", }); err != nil { t.Fatalf("Failed to create consumer: %v", err) } if _, err := js.SubscribeSync("foo", nats.Bind("TEST", "BindDurable")); err != nil { t.Fatalf("Error on subscribe: %v", err) } }
explode_data.jsonl/29180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2282 }
[ 2830, 3393, 35641, 3027, 28573, 35412, 34, 517, 4072, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6452, 15944, 35641, 3027, 5475, 741, 16867, 274, 10849, 18452, 2822, 743, 2193, 1669, 274, 3503, 295, 3027, 2648, 2129, 2193, 961, 2092, 341...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestV1SetKeyWithBadTTL(t *testing.T) { tests.RunServer(func(s *server.Server) { v := url.Values{} v.Set("value", "XXX") v.Set("ttl", "bad_ttl") resp, _ := tests.PutForm(fmt.Sprintf("%s%s", s.URL(), "/v1/keys/foo/bar"), v) assert.Equal(t, resp.StatusCode, http.StatusBadRequest) body := tests.ReadBodyJSON(resp) assert.Equal(t, body["errorCode"], 202, "") assert.Equal(t, body["message"], "The given TTL in POST form is not a number", "") assert.Equal(t, body["cause"], "Set", "") }) }
explode_data.jsonl/24836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 53, 16, 1649, 1592, 2354, 17082, 51, 13470, 1155, 353, 8840, 836, 8, 341, 78216, 16708, 5475, 18552, 1141, 353, 4030, 22997, 8, 341, 197, 5195, 1669, 2515, 35145, 16094, 197, 5195, 4202, 445, 957, 497, 330, 30100, 1138, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportReturnsDataPaddingFlowControl(t *testing.T) { ct := newClientTester(t) unblockClient := make(chan bool, 1) ct.client = func() error { req, _ := http.NewRequest("GET", "https://dummy.tld/", nil) res, err := ct.tr.RoundTrip(req) if err != nil { return err } defer res.Body.Close() <-unblockClient return nil } ct.server = func() error { ct.greet() var hf *HeadersFrame for { f, err := ct.fr.ReadFrame() if err != nil { return fmt.Errorf("ReadFrame while waiting for Headers: %v", err) } switch f.(type) { case *WindowUpdateFrame, *SettingsFrame: continue } var ok bool hf, ok = f.(*HeadersFrame) if !ok { return fmt.Errorf("Got %T; want HeadersFrame", f) } break } var buf bytes.Buffer enc := hpack.NewEncoder(&buf) enc.WriteField(hpack.HeaderField{Name: ":status", Value: "200"}) enc.WriteField(hpack.HeaderField{Name: "content-length", Value: "5000"}) ct.fr.WriteHeaders(HeadersFrameParam{ StreamID: hf.StreamID, EndHeaders: true, EndStream: false, BlockFragment: buf.Bytes(), }) pad := make([]byte, 5) ct.fr.WriteDataPadded(hf.StreamID, false, make([]byte, 5000), pad) // without ending stream f, err := ct.readNonSettingsFrame() if err != nil { return fmt.Errorf("ReadFrame while waiting for first WindowUpdateFrame: %v", err) } wantBack := uint32(len(pad)) + 1 // one byte for the length of the padding if wuf, ok := f.(*WindowUpdateFrame); !ok || wuf.Increment != wantBack || wuf.StreamID != 0 { return fmt.Errorf("Expected conn WindowUpdateFrame for %d bytes; got %v", wantBack, summarizeFrame(f)) } f, err = ct.readNonSettingsFrame() if err != nil { return fmt.Errorf("ReadFrame while waiting for second WindowUpdateFrame: %v", err) } if wuf, ok := f.(*WindowUpdateFrame); !ok || wuf.Increment != wantBack || wuf.StreamID == 0 { return fmt.Errorf("Expected stream WindowUpdateFrame for %d bytes; got %v", wantBack, summarizeFrame(f)) } unblockClient <- true return nil } ct.run() }
explode_data.jsonl/16129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 824 }
[ 2830, 3393, 27560, 16446, 1043, 21616, 18878, 3273, 1155, 353, 8840, 836, 8, 341, 89216, 1669, 501, 2959, 58699, 1155, 692, 20479, 4574, 2959, 1669, 1281, 35190, 1807, 11, 220, 16, 692, 89216, 6581, 284, 2915, 368, 1465, 341, 197, 24395...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDelegate_InvalidLog(t *testing.T) { vuni, listener, jb := setup(t) vuni.lb.On("WasAlreadyConsumed", mock.Anything, mock.Anything).Return(false, nil) done := make(chan struct{}) vuni.lb.On("MarkConsumed", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { done <- struct{}{} }).Return(nil).Once() // Expect a call to check if the req is already fulfilled. vuni.ec.On("CallContract", mock.Anything, mock.Anything, mock.Anything).Return(generateCallbackReturnValues(t, false), nil) added := make(chan struct{}) listener.reqAdded = func() { added <- struct{}{} } // Send an invalid log (keyhash doesnt match) listener.HandleLog(log.NewLogBroadcast(types.Log{ // Data has all the NON-indexed parameters Data: append(append(append(append( utils.NewHash().Bytes(), // key hash common.BigToHash(big.NewInt(42)).Bytes()...), // seed utils.NewHash().Bytes()...), // sender utils.NewHash().Bytes()...), // fee utils.NewHash().Bytes()...), // requestID // JobID is indexed, that's why it lives in the Topics. Topics: []common.Hash{ VRFRandomnessRequestLogTopic(), jb.ExternalIDEncodeBytesToTopic(), // jobID }, Address: common.Address{}, BlockNumber: 10, TxHash: common.Hash{}, TxIndex: 0, BlockHash: common.Hash{}, Index: 0, Removed: false, }, vuni.cid, nil)) waitForChannel(t, added, time.Second, "request not queued") // Feed it a head which confirms it. listener.OnNewLongestChain(context.Background(), &eth.Head{Number: 16}) waitForChannel(t, done, time.Second, "log not consumed") // Should create a run that errors in the vrf task runs, err := vuni.prm.GetAllRuns() require.NoError(t, err) require.Equal(t, len(runs), 1) for _, tr := range runs[0].PipelineTaskRuns { if tr.Type == pipeline.TaskTypeVRF { assert.Contains(t, tr.Error.String, "invalid key hash") } // Log parsing task itself should succeed. if tr.Type != pipeline.TaskTypeETHABIDecodeLog { assert.False(t, tr.Output.Valid) } } // Ensure we have NOT queued up an eth transaction var ethTxes []bulletprooftxmanager.EthTx err = vuni.prm.GetQ().Select(&ethTxes, `SELECT * FROM eth_txes;`) require.NoError(t, err) require.Len(t, ethTxes, 0) }
explode_data.jsonl/45856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 836 }
[ 2830, 3393, 9381, 62, 7928, 2201, 1155, 353, 8840, 836, 8, 341, 5195, 15705, 11, 11446, 11, 87118, 1669, 6505, 1155, 340, 5195, 15705, 44262, 8071, 445, 26034, 38370, 41966, 291, 497, 7860, 13311, 1596, 11, 7860, 13311, 1596, 568, 5598,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMiddleWareSecurityHeaders(t *testing.T) { setting.ErrTemplateName = errorTemplate Convey("Given the grafana middleware", t, func() { middlewareScenario(t, "middleware should get correct x-xss-protection header", func(sc *scenarioContext) { setting.XSSProtectionHeader = true sc.fakeReq("GET", "/api/").exec() So(sc.resp.Header().Get("X-XSS-Protection"), ShouldEqual, "1; mode=block") }) middlewareScenario(t, "middleware should not get x-xss-protection when disabled", func(sc *scenarioContext) { setting.XSSProtectionHeader = false sc.fakeReq("GET", "/api/").exec() So(sc.resp.Header().Get("X-XSS-Protection"), ShouldBeEmpty) }) middlewareScenario(t, "middleware should add correct Strict-Transport-Security header", func(sc *scenarioContext) { setting.StrictTransportSecurity = true setting.Protocol = setting.HTTPSScheme setting.StrictTransportSecurityMaxAge = 64000 sc.fakeReq("GET", "/api/").exec() So(sc.resp.Header().Get("Strict-Transport-Security"), ShouldEqual, "max-age=64000") setting.StrictTransportSecurityPreload = true sc.fakeReq("GET", "/api/").exec() So(sc.resp.Header().Get("Strict-Transport-Security"), ShouldEqual, "max-age=64000; preload") setting.StrictTransportSecuritySubDomains = true sc.fakeReq("GET", "/api/").exec() So(sc.resp.Header().Get("Strict-Transport-Security"), ShouldEqual, "max-age=64000; preload; includeSubDomains") }) }) }
explode_data.jsonl/19356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 525 }
[ 2830, 3393, 43935, 71770, 15352, 10574, 1155, 353, 8840, 836, 8, 341, 8196, 1280, 27862, 68582, 284, 1465, 7275, 271, 93070, 5617, 445, 22043, 279, 58300, 3362, 29679, 497, 259, 11, 2915, 368, 341, 197, 2109, 11603, 54031, 1155, 11, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIngressStore(t *testing.T) { startTime := 1501569018 metav1StartTime := metav1.Unix(int64(startTime), 0) // Fixed metadata on type and help text. We prepend this to every expected // output so we only have to modify a single place when doing adjustments. const metadata = ` # HELP kube_ingress_created Unix creation timestamp # HELP kube_ingress_info Information about ingress. # HELP kube_ingress_labels Kubernetes labels converted to Prometheus labels. # HELP kube_ingress_metadata_resource_version Resource version representing a specific version of ingress. # HELP kube_ingress_path Ingress host, paths and backend service information. # HELP kube_ingress_tls Ingress TLS host and secret information. # TYPE kube_ingress_created gauge # TYPE kube_ingress_info gauge # TYPE kube_ingress_labels gauge # TYPE kube_ingress_metadata_resource_version gauge # TYPE kube_ingress_path gauge # TYPE kube_ingress_tls gauge ` cases := []generateMetricsTestCase{ { Obj: &networkingv1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "ingress1", Namespace: "ns1", ResourceVersion: "000000", }, }, Want: metadata + ` kube_ingress_info{namespace="ns1",ingress="ingress1"} 1 kube_ingress_metadata_resource_version{namespace="ns1",ingress="ingress1"} 0 kube_ingress_labels{namespace="ns1",ingress="ingress1"} 1 `, MetricNames: []string{"kube_ingress_info", "kube_ingress_metadata_resource_version", "kube_ingress_created", "kube_ingress_labels", "kube_ingress_path", "kube_ingress_tls"}, }, { Obj: &networkingv1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "ingress2", Namespace: "ns2", CreationTimestamp: metav1StartTime, ResourceVersion: "123456", }, }, Want: metadata + ` kube_ingress_info{namespace="ns2",ingress="ingress2"} 1 kube_ingress_created{namespace="ns2",ingress="ingress2"} 1.501569018e+09 kube_ingress_metadata_resource_version{namespace="ns2",ingress="ingress2"} 123456 kube_ingress_labels{namespace="ns2",ingress="ingress2"} 1 `, MetricNames: []string{"kube_ingress_info", "kube_ingress_metadata_resource_version", "kube_ingress_created", "kube_ingress_labels", "kube_ingress_path", "kube_ingress_tls"}, }, { Obj: &networkingv1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "ingress3", Namespace: "ns3", CreationTimestamp: metav1StartTime, Labels: map[string]string{"test-3": "test-3"}, ResourceVersion: "abcdef", }, }, Want: metadata + ` kube_ingress_info{namespace="ns3",ingress="ingress3"} 1 kube_ingress_created{namespace="ns3",ingress="ingress3"} 1.501569018e+09 kube_ingress_labels{namespace="ns3",ingress="ingress3"} 1 `, MetricNames: []string{"kube_ingress_info", "kube_ingress_metadata_resource_version", "kube_ingress_created", "kube_ingress_labels", "kube_ingress_path", "kube_ingress_tls"}, }, { Obj: &networkingv1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "ingress4", Namespace: "ns4", CreationTimestamp: metav1StartTime, Labels: map[string]string{"test-4": "test-4"}, ResourceVersion: "abcdef", }, Spec: networkingv1.IngressSpec{ Rules: []networkingv1.IngressRule{ { Host: "somehost", IngressRuleValue: networkingv1.IngressRuleValue{ HTTP: &networkingv1.HTTPIngressRuleValue{ Paths: []networkingv1.HTTPIngressPath{ { Path: "/somepath", Backend: networkingv1.IngressBackend{ Service: &networkingv1.IngressServiceBackend{ Name: "someservice", Port: networkingv1.ServiceBackendPort{ Number: 1234, }, }, }, }, }, }, }, }, { Host: "somehost2", }, }, }, }, Want: metadata + ` kube_ingress_info{namespace="ns4",ingress="ingress4"} 1 kube_ingress_created{namespace="ns4",ingress="ingress4"} 1.501569018e+09 kube_ingress_labels{namespace="ns4",ingress="ingress4"} 1 kube_ingress_path{namespace="ns4",ingress="ingress4",host="somehost",path="/somepath",service_name="someservice",service_port="1234"} 1 `, MetricNames: []string{"kube_ingress_info", "kube_ingress_metadata_resource_version", "kube_ingress_created", "kube_ingress_labels", "kube_ingress_path", "kube_ingress_tls"}, }, { Obj: &networkingv1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "ingress5", Namespace: "ns5", CreationTimestamp: metav1StartTime, Labels: map[string]string{"test-5": "test-5"}, ResourceVersion: "abcdef", }, Spec: networkingv1.IngressSpec{ TLS: []networkingv1.IngressTLS{ { Hosts: []string{"somehost1", "somehost2"}, SecretName: "somesecret", }, }, }, }, Want: metadata + ` kube_ingress_info{namespace="ns5",ingress="ingress5"} 1 kube_ingress_created{namespace="ns5",ingress="ingress5"} 1.501569018e+09 kube_ingress_labels{namespace="ns5",ingress="ingress5"} 1 kube_ingress_tls{namespace="ns5",ingress="ingress5",tls_host="somehost1",secret="somesecret"} 1 kube_ingress_tls{namespace="ns5",ingress="ingress5",tls_host="somehost2",secret="somesecret"} 1 `, MetricNames: []string{"kube_ingress_info", "kube_ingress_metadata_resource_version", "kube_ingress_created", "kube_ingress_labels", "kube_ingress_path", "kube_ingress_tls"}, }, } for i, c := range cases { c.Func = generator.ComposeMetricGenFuncs(ingressMetricFamilies(nil)) c.Headers = generator.ExtractMetricFamilyHeaders(ingressMetricFamilies(nil)) if err := c.run(); err != nil { t.Errorf("unexpected collecting result in %vth run:\n%s", i, err) } } }
explode_data.jsonl/38898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2693 }
[ 2830, 3393, 641, 2483, 6093, 1155, 353, 8840, 836, 8, 341, 21375, 1462, 1669, 220, 16, 20, 15, 16, 20, 21, 24, 15, 16, 23, 198, 2109, 295, 402, 16, 40203, 1669, 77520, 16, 10616, 941, 1548, 21, 19, 88090, 701, 220, 15, 692, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJoinConds(t *testing.T) { var user = *GetUser("joins-conds", Config{Account: true, Pets: 3}) DB.Save(&user) var users1 []User DB.Joins("left join pets on pets.user_id = users.id").Where("users.name = ?", user.Name).Find(&users1) if len(users1) != 3 { t.Errorf("should find two users using left join, but got %v", len(users1)) } var users2 []User DB.Joins("left join pets on pets.user_id = users.id AND pets.name = ?", user.Pets[0].Name).Where("users.name = ?", user.Name).First(&users2) if len(users2) != 1 { t.Errorf("should find one users using left join with conditions, but got %v", len(users2)) } var users3 []User DB.Joins("left join pets on pets.user_id = users.id AND pets.name = ?", user.Pets[0].Name).Joins("join accounts on accounts.user_id = users.id AND accounts.number = ?", user.Account.Number).Where("users.name = ?", user.Name).First(&users3) if len(users3) != 1 { t.Errorf("should find one users using multiple left join conditions, but got %v", len(users3)) } var users4 []User DB.Joins("left join pets on pets.user_id = users.id AND pets.name = ?", user.Pets[0].Name).Joins("join accounts on accounts.user_id = users.id AND accounts.number = ?", user.Account.Number+"non-exist").Where("users.name = ?", user.Name).First(&users4) if len(users4) != 0 { t.Errorf("should find no user when searching with unexisting credit card, but got %v", len(users4)) } var users5 []User db5 := DB.Joins("left join pets on pets.user_id = users.id AND pets.name = ?", user.Pets[0].Name).Joins("join accounts on accounts.user_id = users.id AND accounts.number = ?", user.Account.Number).Where(User{Model: gorm.Model{ID: 1}}).Where(Account{Model: gorm.Model{ID: 1}}).Not(Pet{Model: gorm.Model{ID: 1}}).Find(&users5) if db5.Error != nil { t.Errorf("Should not raise error for join where identical fields in different tables. Error: %s", db5.Error.Error()) } dryDB := DB.Session(&gorm.Session{DryRun: true}) stmt := dryDB.Joins("left join pets on pets.user_id = users.id AND pets.name = ?", user.Pets[0].Name).Joins("join accounts on accounts.user_id = users.id AND accounts.number = ?", user.Account.Number).Where(User{Model: gorm.Model{ID: 1}}).Where(Account{Model: gorm.Model{ID: 1}}).Not(Pet{Model: gorm.Model{ID: 1}}).Find(&users5).Statement if !regexp.MustCompile("SELECT .* FROM .users. left join pets.*join accounts.*").MatchString(stmt.SQL.String()) { t.Errorf("joins should be ordered, but got %v", stmt.SQL.String()) } }
explode_data.jsonl/20075
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 884 }
[ 2830, 3393, 12292, 1109, 5356, 1155, 353, 8840, 836, 8, 341, 2405, 1196, 284, 353, 1949, 1474, 445, 7305, 1330, 12, 7848, 497, 5532, 90, 7365, 25, 830, 11, 35886, 25, 220, 18, 3518, 45409, 13599, 2099, 872, 692, 2405, 3847, 16, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestLenWrapper(t *testing.T) { assert := New(t) mockAssert := New(new(testing.T)) assert.False(mockAssert.Len(nil, 0), "nil does not have length") assert.False(mockAssert.Len(0, 0), "int does not have length") assert.False(mockAssert.Len(true, 0), "true does not have length") assert.False(mockAssert.Len(false, 0), "false does not have length") assert.False(mockAssert.Len('A', 0), "Rune does not have length") assert.False(mockAssert.Len(struct{}{}, 0), "Struct does not have length") ch := make(chan int, 5) ch <- 1 ch <- 2 ch <- 3 cases := []struct { v interface{} l int }{ {[]int{1, 2, 3}, 3}, {[...]int{1, 2, 3}, 3}, {"ABC", 3}, {map[int]int{1: 2, 2: 4, 3: 6}, 3}, {ch, 3}, {[]int{}, 0}, {map[int]int{}, 0}, {make(chan int), 0}, {[]int(nil), 0}, {map[int]int(nil), 0}, {(chan int)(nil), 0}, } for _, c := range cases { assert.True(mockAssert.Len(c.v, c.l), "%#v have %d items", c.v, c.l) } }
explode_data.jsonl/54983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 429 }
[ 2830, 3393, 11271, 11542, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 1532, 1155, 340, 77333, 8534, 1669, 1532, 1755, 8623, 287, 836, 4390, 6948, 50757, 30389, 8534, 65819, 27907, 11, 220, 15, 701, 330, 8385, 1558, 537, 614, 3084, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClient_Upload_Item(t *testing.T) { dat, err := getTestStructure() require.NoError(t, err) cfg := config.Spec{ Name: "azure-storage-files", Kind: "azure.storage.files", Properties: map[string]string{ "storage_access_key": dat.storageAccessKey, "storage_account": dat.storageAccount, }, } tests := []struct { name string request *types.Request wantErr bool }{ { name: "valid upload item", request: types.NewRequest(). SetMetadataKeyValue("method", "upload"). SetMetadataKeyValue("service_url", dat.serviceURL). SetData(dat.file), wantErr: false, }, { name: "valid upload item with metadata", request: types.NewRequest(). SetMetadataKeyValue("method", "upload"). SetMetadataKeyValue("blob_metadata", `{"tag":"test","name":"myname"}`). SetMetadataKeyValue("service_url", dat.serviceURL). SetData(dat.file), wantErr: false, }, { name: "invalid upload item - missing service_url", request: types.NewRequest(). SetMetadataKeyValue("method", "upload"). SetData(dat.file), wantErr: true, }, { name: "invalid upload item - missing data", request: types.NewRequest(). SetMetadataKeyValue("method", "upload"). SetMetadataKeyValue("service_url", dat.serviceURL), wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second) defer cancel() c := New() err = c.Init(ctx, cfg, nil) require.NoError(t, err) got, err := c.Do(ctx, tt.request) if tt.wantErr { require.Error(t, err) t.Logf("init() error = %v, wantSetErr %v", err, tt.wantErr) return } require.NoError(t, err) require.NotNil(t, got) }) } }
explode_data.jsonl/25067
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 736 }
[ 2830, 3393, 2959, 62, 13844, 27518, 1155, 353, 8840, 836, 8, 341, 2698, 266, 11, 1848, 1669, 633, 2271, 22952, 741, 17957, 35699, 1155, 11, 1848, 340, 50286, 1669, 2193, 36473, 515, 197, 21297, 25, 330, 39495, 62795, 46048, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetSourceReader(t *testing.T) { testtype.SkipUnlessTestType(t, testtype.UnitTestType) Convey("Given a mongoimport instance, on calling getSourceReader", t, func() { Convey("an error should be thrown if the given file referenced by "+ "the reader does not exist", func() { imp := NewMockMongoImport() imp.InputOptions.File = "/path/to/input/file/dot/input.txt" imp.InputOptions.Type = CSV imp.ToolOptions.Namespace.Collection = "" _, _, err := imp.getSourceReader() So(err, ShouldNotBeNil) }) Convey("no error should be thrown if the file exists", func() { imp := NewMockMongoImport() imp.InputOptions.File = "testdata/test_array.json" imp.InputOptions.Type = JSON _, _, err := imp.getSourceReader() So(err, ShouldBeNil) }) Convey("no error should be thrown if stdin is used", func() { imp := NewMockMongoImport() imp.InputOptions.File = "" _, _, err := imp.getSourceReader() So(err, ShouldBeNil) }) }) }
explode_data.jsonl/78459
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 1949, 3608, 5062, 1155, 353, 8840, 836, 8, 341, 18185, 1313, 57776, 35587, 2271, 929, 1155, 11, 1273, 1313, 25159, 2271, 929, 340, 93070, 5617, 445, 22043, 264, 33814, 474, 2867, 11, 389, 8098, 71003, 5062, 497, 259, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoggerMiddleware(t *testing.T) { var buf bytes.Buffer Logger = log.New(&buf, "", 0) router := New(Context{}) router.Middleware(LoggerMiddleware) router.Get("/action", (*Context).A) // Hit an action: rw, req := newTestRequest("GET", "/action") router.ServeHTTP(rw, req) assertResponse(t, rw, "context-A", 200) // Make sure our buf has something good: logRegexp := regexp.MustCompile("\\[\\d+ .{2}\\] 200 '/action'") if !logRegexp.MatchString(buf.String()) { t.Error("Got invalid log entry: ", buf.String()) } // Do a 404: buf.Reset() rw, req = newTestRequest("GET", "/wat") router.ServeHTTP(rw, req) assertResponse(t, rw, "Not Found", 404) // Make sure our buf has something good: logRegexpNotFound := regexp.MustCompile("\\[\\d+ .{2}\\] 404 '/wat'") if !logRegexpNotFound.MatchString(buf.String()) { t.Error("Got invalid log entry: ", buf.String()) } }
explode_data.jsonl/40636
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 7395, 24684, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 5820, 22622, 198, 55861, 284, 1487, 7121, 2099, 5909, 11, 7342, 220, 15, 692, 67009, 1669, 1532, 14001, 37790, 67009, 1321, 11603, 91018, 24684, 340, 67009, 2234, 4283, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestActivateVotingPeriod(t *testing.T) { mapp, keeper, _, _, _, _ := getMockApp(t, 0, GenesisState{}, nil) header := abci.Header{Height: mapp.LastBlockHeight() + 1} mapp.BeginBlock(abci.RequestBeginBlock{Header: header}) ctx := mapp.BaseApp.NewContext(false, abci.Header{}) tp := testProposal() proposal, err := keeper.SubmitProposal(ctx, tp) require.NoError(t, err) require.True(t, proposal.GetVotingStartTime().Equal(time.Time{})) keeper.activateVotingPeriod(ctx, proposal) require.True(t, proposal.GetVotingStartTime().Equal(ctx.BlockHeader().Time)) proposal, ok := keeper.GetProposal(ctx, proposal.GetProposalID()) require.True(t, ok) activeIterator := keeper.ActiveProposalQueueIterator(ctx, proposal.GetVotingEndTime()) require.True(t, activeIterator.Valid()) var proposalID uint64 keeper.cdc.UnmarshalBinaryLengthPrefixed(activeIterator.Value(), &proposalID) require.Equal(t, proposalID, proposal.GetProposalID()) activeIterator.Close() }
explode_data.jsonl/60866
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 31242, 53, 11519, 23750, 1155, 353, 8840, 836, 8, 341, 2109, 676, 11, 53416, 11, 8358, 8358, 8358, 716, 1669, 633, 11571, 2164, 1155, 11, 220, 15, 11, 40788, 1397, 22655, 2092, 692, 20883, 1669, 668, 5855, 15753, 90, 3640,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClock_Create(t *testing.T) { myClock := Default().Reset() if myClock.WaitJobs() != 0 || myClock.Count() != 0 { t.Errorf("JobList init have error.len=%d,count=%d", myClock.WaitJobs(), myClock.Count()) //joblist.Debug() } }
explode_data.jsonl/2015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 26104, 34325, 1155, 353, 8840, 836, 8, 341, 13624, 26104, 1669, 7899, 1005, 14828, 741, 743, 847, 26104, 28384, 40667, 368, 961, 220, 15, 1369, 847, 26104, 6134, 368, 961, 220, 15, 341, 197, 3244, 13080, 445, 12245, 852, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNameNamespace(t *testing.T) { type Inner2Namespace struct { String []string `validate:"dive,required" json:"JSONString"` } type Inner1Namespace struct { Inner2 *Inner2Namespace `json:"Inner2JSON"` } type Namespace struct { Inner1 *Inner1Namespace `json:"Inner1JSON"` } validate := New() validate.RegisterTagNameFunc(func(fld reflect.StructField) string { name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0] if name == "-" { return "" } return name }) i2 := &Inner2Namespace{String: []string{"ok", "ok", "ok"}} i1 := &Inner1Namespace{Inner2: i2} ns := &Namespace{Inner1: i1} errs := validate.Struct(ns) Equal(t, errs, nil) i2.String[1] = "" errs = validate.Struct(ns) NotEqual(t, errs, nil) ve := errs.(ValidationErrors) Equal(t, len(ve), 1) AssertError(t, errs, "Namespace.Inner1JSON.Inner2JSON.JSONString[1]", "Namespace.Inner1.Inner2.String[1]", "JSONString[1]", "String[1]", "required") fe := getError(ve, "Namespace.Inner1JSON.Inner2JSON.JSONString[1]", "Namespace.Inner1.Inner2.String[1]") NotEqual(t, fe, nil) Equal(t, fe.Field(), "JSONString[1]") Equal(t, fe.StructField(), "String[1]") Equal(t, fe.Namespace(), "Namespace.Inner1JSON.Inner2JSON.JSONString[1]") Equal(t, fe.StructNamespace(), "Namespace.Inner1.Inner2.String[1]") }
explode_data.jsonl/77216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 675, 22699, 1155, 353, 8840, 836, 8, 1476, 13158, 36356, 17, 22699, 2036, 341, 197, 4980, 3056, 917, 1565, 7067, 2974, 67, 533, 11, 6279, 1, 2951, 2974, 5370, 703, 8805, 197, 630, 13158, 36356, 16, 22699, 2036, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWriteHumanReadableStruct(t *testing.T) { str := NewStruct("S1", StructData{ "x": Number(1), "y": Number(2), }) assertWriteHRSEqual(t, "struct S1 {\n x: 1,\n y: 2,\n}", str) }
explode_data.jsonl/60900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 7985, 33975, 57938, 9422, 1155, 353, 8840, 836, 8, 341, 11355, 1669, 1532, 9422, 445, 50, 16, 497, 16139, 1043, 515, 197, 197, 65438, 788, 5624, 7, 16, 1326, 197, 197, 1, 88, 788, 5624, 7, 17, 1326, 197, 3518, 6948, 79...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStart_FirstCallToRevPortForwardFails_CausesASecondCalltoConnectToPod(t *testing.T) { unittest.SmallTest(t) switchboardMock := &mocks.Switchboard{} switchboardMock.On("ClearMeetingPoint", testutils.AnyContext, meetingPoint).Times(2).Return(nil) var reserveMeetingPointWG sync.WaitGroup reserveMeetingPointWG.Add(2) switchboardMock.On("ReserveMeetingPoint", testutils.AnyContext, hostname, username).Times(2).Run(func(args mock.Arguments) { reserveMeetingPointWG.Done() }).Return(meetingPoint, nil) c := New(switchboardMock, &mockRevPortForwardSuccessOnSecondCallToStart{}, hostname, username) ctx, cancel := context.WithCancel(context.Background()) // Call Start() in a Go routine since we need to cancel the Context after // Start() is called, and Start() doesn't return. var wg sync.WaitGroup wg.Add(1) go func() { defer wg.Done() err := c.Start(ctx) require.Error(t, err) }() // Wait until ReserveMeetingPoint has been called twice. reserveMeetingPointWG.Wait() cancel() // Wait for Start() to return. wg.Wait() require.Equal(t, int64(2), c.stepsCounter.Get()) c.stepsCounter.Reset() switchboardMock.AssertExpectations(t) }
explode_data.jsonl/82349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 3479, 79790, 7220, 1249, 36184, 7084, 25925, 37, 6209, 920, 35143, 32, 15666, 7220, 983, 14611, 1249, 23527, 1155, 353, 8840, 836, 8, 341, 20479, 14267, 90183, 2271, 1155, 340, 8961, 2482, 11571, 1669, 609, 16712, 82, 808, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRegression(t *testing.T) { var ( branching = 128 chunkSize = 4096 hashSize = 32 writes = 67100000 / 4096 span = make([]byte, 8) s = mock.NewStorer() pf = func() pipeline.ChainWriter { lsw := store.NewStoreWriter(ctx, s, mode, nil) return bmt.NewBmtWriter(lsw) } ht = hashtrie.NewHashTrieWriter(chunkSize, branching, hashSize, pf) ) binary.LittleEndian.PutUint64(span, 4096) for i := 0; i < writes; i++ { a := &pipeline.PipeWriteArgs{Ref: addr.Bytes(), Span: span} err := ht.ChainWrite(a) if err != nil { t.Fatal(err) } } ref, err := ht.Sum() if err != nil { t.Fatal(err) } rootch, err := s.Get(ctx, storage.ModeGetRequest, penguin.NewAddress(ref)) if err != nil { t.Fatal(err) } sp := binary.LittleEndian.Uint64(rootch.Data()[:penguin.SpanSize]) if sp != uint64(writes*4096) { t.Fatalf("want span %d got %d", writes*4096, sp) } }
explode_data.jsonl/29830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 45200, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 17940, 287, 284, 220, 16, 17, 23, 198, 197, 23049, 3122, 1695, 284, 220, 19, 15, 24, 21, 198, 197, 50333, 1695, 220, 284, 220, 18, 17, 198, 197, 6692, 23262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterManager_ClearFilters(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() state := octantFake.NewMockState(controller) state.EXPECT().SetFilters([]octant.Filter{}) state.EXPECT().SendAlert(gomock.Any()) manager := api.NewFilterManager() payload := action.Payload{} require.NoError(t, manager.ClearFilters(state, payload)) }
explode_data.jsonl/48846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 5632, 2043, 57744, 28351, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 24291, 1669, 18491, 517, 52317, 7121, 11571, 1397, 40845, 340, 24291, 22402, 7285, 1005...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOutput(t *testing.T) { dlvbin, tmpdir := getDlvBin(t) defer os.RemoveAll(tmpdir) for _, output := range []string{"", "myownname", filepath.Join(tmpdir, "absolute.path")} { testOutput(t, dlvbin, output, []string{"exit"}) const hello = "hello world!" stdout, _ := testOutput(t, dlvbin, output, []string{"continue", "exit"}) if !strings.Contains(string(stdout), hello) { t.Errorf("stdout %q should contain %q", stdout, hello) } } }
explode_data.jsonl/49186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 5097, 1155, 353, 8840, 836, 8, 341, 2698, 21827, 6863, 11, 4174, 3741, 1669, 633, 35, 21827, 28794, 1155, 340, 16867, 2643, 84427, 10368, 3741, 692, 2023, 8358, 2550, 1669, 2088, 3056, 917, 4913, 497, 330, 2408, 779, 606, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInterpretStringDecodeHex(t *testing.T) { t.Parallel() inter := parseCheckAndInterpret(t, ` fun test(): [UInt8] { return "01CADE".decodeHex() } `) result, err := inter.Invoke("test") require.NoError(t, err) RequireValuesEqual( t, inter, interpreter.NewArrayValue( inter, interpreter.VariableSizedStaticType{ Type: interpreter.PrimitiveStaticTypeUInt8, }, common.Address{}, interpreter.UInt8Value(1), interpreter.UInt8Value(0xCA), interpreter.UInt8Value(0xDE), ), result, ) }
explode_data.jsonl/73416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 3306, 8043, 703, 32564, 20335, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 58915, 1669, 4715, 3973, 3036, 3306, 8043, 1155, 11, 22074, 414, 2464, 1273, 4555, 508, 18777, 23, 60, 341, 688, 470, 330, 15, 16, 2805...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBoringServerCurves(t *testing.T) { serverConfig := testConfig.Clone() serverConfig.Certificates = make([]Certificate, 1) serverConfig.Certificates[0].Certificate = [][]byte{testECDSACertificate} serverConfig.Certificates[0].PrivateKey = testECDSAPrivateKey serverConfig.BuildNameToCertificate() for _, curveid := range defaultCurvePreferences { t.Run(fmt.Sprintf("curve=%d", curveid), func(t *testing.T) { clientHello := &clientHelloMsg{ vers: VersionTLS12, random: make([]byte, 32), cipherSuites: []uint16{TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256}, compressionMethods: []uint8{compressionNone}, supportedCurves: []CurveID{curveid}, supportedPoints: []uint8{pointFormatUncompressed}, } testClientHello(t, serverConfig, clientHello) // With fipstls forced, bad curves should be rejected. t.Run("fipstls", func(t *testing.T) { fipstls.Force() defer fipstls.Abandon() msg := "" if !isBoringCurve(curveid) { msg = "no cipher suite supported by both client and server" } testClientHelloFailure(t, serverConfig, clientHello, msg) }) }) } }
explode_data.jsonl/9398
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 487 }
[ 2830, 3393, 33, 5503, 5475, 16704, 2342, 1155, 353, 8840, 836, 8, 341, 41057, 2648, 1669, 1273, 2648, 64463, 741, 41057, 2648, 727, 529, 24405, 284, 1281, 10556, 33202, 11, 220, 16, 340, 41057, 2648, 727, 529, 24405, 58, 15, 936, 3320...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKickUserFromConversation(t *testing.T) { http.HandleFunc("/conversations.kick", okJSONHandler) once.Do(startServer) api := New("testing-token", OptionAPIURL("http://"+serverAddr+"/")) err := api.KickUserFromConversation("CXXXXXXXX", "UXXXXXXXX") if err != nil { t.Errorf("Unexpected error: %s", err) return } }
explode_data.jsonl/78545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 88264, 1474, 3830, 60313, 1155, 353, 8840, 836, 8, 341, 28080, 63623, 4283, 443, 72995, 5202, 865, 497, 5394, 5370, 3050, 340, 197, 13184, 33596, 10639, 5475, 340, 54299, 1669, 1532, 445, 8840, 34841, 497, 6959, 7082, 3144, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSignals(t *testing.T) { type testCase struct { op ltl.Operator testInputs []testInput } tc := func(op ltl.Operator, testInputs ...testInput) testCase { return testCase{ op: op, testInputs: testInputs, } } tests := []testCase{ tc(ops.Then(sm("a"), sm("b")), m("a,!b;!a,b"), nm("!a,b;!a,b"), ), // Release and Until are dual. tc(ops.Release(sm("b"), sm("a")), m("a;a,b"), nm("a;!a,b"), m("a;a;a,b"), m("a;a;a"), ), tc(ops.Not(ops.Until(ops.Not(sm("b")), ops.Not(sm("a")))), m("a;a,b"), nm("a;!a,b"), m("a;a;a,b"), m("a;a;a"), ), tc(ops.Until(sm("a"), sm("b")), m("a;a;b"), nm("a;c"), nm("a;a;a"), ), tc(ops.Not(ops.Release(ops.Not(sm("a")), ops.Not(sm("b")))), m("a;a;b"), nm("a;c"), nm("a;a;a"), ), // Globally and Eventually are dual. tc(ops.Globally(sm("a")), m("a;a,b;a,!b"), nm("a;b"), ), tc(ops.Not(ops.Eventually(ops.Not(sm("a")))), m("a;a,b;a,!b"), nm("a;b"), ), tc(ops.Eventually(ops.Then(sm("a"), sm("b"))), m("c;d;b;a;b"), nm("c;d;b;a"), ), tc(ops.Not(ops.Globally(ops.Then(ops.Not(sm("a")), ops.Not(sm("b"))))), m("c;d;b;a;b"), nm("c;d;b;a"), ), } for _, test := range tests { for _, testInput := range test.testInputs { t.Run(ops.PrettyPrint(test.op, ops.Inline())+" <- "+testInput.input, func(t *testing.T) { op := test.op var env ltl.Environment for index, tok := range testInput.toks { if op == nil { t.Fatalf("op became nil") } op, env = ltl.Match(op, tok) if env.Err() != nil { t.Fatalf("at index %d unexpected error %s", index, env.Err()) } } if testInput.wantMatch != env.Matching() { t.Fatalf("wanted match state %t, got %t", testInput.wantMatch, env.Matching()) } }) } } }
explode_data.jsonl/82647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1000 }
[ 2830, 3393, 96659, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 39703, 260, 326, 11544, 85546, 198, 197, 18185, 31946, 3056, 1944, 2505, 198, 197, 532, 78255, 1669, 2915, 17096, 326, 11544, 85546, 11, 1273, 31946, 2503, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1