text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestFetchDataFeedList(t *testing.T) { // Read mock data from CSV csvContent, err := readCSVFileContents("testdata/data_feed_list.csv") if err != nil { t.Fatalf("coult not parse csv file '%v'", err) } // Create mock response response := &http.Response{ StatusCode: 200, Body: ioutil.NopCloser(bytes.NewBufferString(csvContent)), } // Create test client to run tests on awinClient := awin.NewAwinClient(&http.Client{Transport: mockRoundTripper{response: response, requestTestFunc: func(r *http.Request) error { expectedUrl := "https://productdata.awin.com/datafeed/list/apikey/apiKey" if r.URL.String() != expectedUrl { err := errors.New(fmt.Sprintf("invalid url found in test\nexpected '%s'\nfound '%s'", expectedUrl, r.URL.String())) t.Error(err) return err } expectedMethod := "GET" if r.Method != expectedMethod { err := errors.New(fmt.Sprintf("invalid request method in test\nexpected '%s'\nfound '%s'", expectedMethod, r.Method)) t.Error(err) return err } return nil }}}) result, err := awinClient.FetchDataFeedList("apiKey") if err != nil { t.Fatalf("err is not null '%v'", err) } if len(*result) != 10 { t.Fatalf("Invalid amount of data rows received %d", len(*result)) } // Check if received rows and expected rows match expectedRows, _ := parseCSVToDataFeedRow(csvContent) for i, expectedRow := range *expectedRows { receivedRow := (*result)[i] if expectedRow != receivedRow { t.Fatalf("Invalid row parsed\nexpected '%v'\nreceived '%v'", expectedRow, receivedRow) } } }
explode_data.jsonl/40124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 578 }
[ 2830, 3393, 20714, 1043, 28916, 852, 1155, 353, 8840, 836, 8, 341, 197, 322, 4457, 7860, 821, 504, 27445, 198, 1444, 3492, 2762, 11, 1848, 1669, 1349, 44209, 1703, 14803, 445, 92425, 13167, 42390, 2019, 11219, 1138, 743, 1848, 961, 2092...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWatcherWatchWrongRange(t *testing.T) { b, tmpPath := backend.NewDefaultTmpBackend() s := WatchableKV(newWatchableStore(zap.NewExample(), b, &lease.FakeLessor{}, nil, StoreConfig{})) defer cleanup(s, b, tmpPath) w := s.NewWatchStream() defer w.Close() if _, err := w.Watch(0, []byte("foa"), []byte("foa"), 1); err != ErrEmptyWatcherRange { t.Fatalf("key == end range given; expected ErrEmptyWatcherRange, got %+v", err) } if _, err := w.Watch(0, []byte("fob"), []byte("foa"), 1); err != ErrEmptyWatcherRange { t.Fatalf("key > end range given; expected ErrEmptyWatcherRange, got %+v", err) } // watch request with 'WithFromKey' has empty-byte range end if id, _ := w.Watch(0, []byte("foo"), []byte{}, 1); id != 0 { t.Fatalf("\x00 is range given; id expected 0, got %d", id) } }
explode_data.jsonl/39810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 47248, 14247, 29185, 6046, 1155, 353, 8840, 836, 8, 341, 2233, 11, 4174, 1820, 1669, 19163, 7121, 3675, 35986, 29699, 741, 1903, 1669, 10357, 480, 82707, 1755, 14247, 480, 6093, 13174, 391, 7121, 13314, 1507, 293, 11, 609, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRegisterClass(t *testing.T) { kernel32 := GetDLL(t, "kernel32.dll") user32 := GetDLL(t, "user32.dll") mh, _, _ := kernel32.Proc("GetModuleHandleW").Call(0) cb := syscall.NewCallback(func(hwnd syscall.Handle, msg uint32, wparam, lparam uintptr) (rc uintptr) { t.Fatal("callback should never get called") return 0 }) type Wndclassex struct { Size uint32 Style uint32 WndProc uintptr ClsExtra int32 WndExtra int32 Instance syscall.Handle Icon syscall.Handle Cursor syscall.Handle Background syscall.Handle MenuName *uint16 ClassName *uint16 IconSm syscall.Handle } name := syscall.StringToUTF16Ptr("test_window") wc := Wndclassex{ WndProc: cb, Instance: syscall.Handle(mh), ClassName: name, } wc.Size = uint32(unsafe.Sizeof(wc)) a, _, err := user32.Proc("RegisterClassExW").Call(uintptr(unsafe.Pointer(&wc))) if a == 0 { t.Fatalf("RegisterClassEx failed: %v", err) } r, _, err := user32.Proc("UnregisterClassW").Call(uintptr(unsafe.Pointer(name)), 0) if r == 0 { t.Fatalf("UnregisterClass failed: %v", err) } }
explode_data.jsonl/54666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 493 }
[ 2830, 3393, 8690, 1957, 1155, 353, 8840, 836, 8, 341, 16463, 5454, 18, 17, 1669, 2126, 64187, 1155, 11, 330, 23248, 18, 17, 22200, 1138, 19060, 18, 17, 1669, 2126, 64187, 1155, 11, 330, 872, 18, 17, 22200, 1138, 2109, 71, 11, 8358, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDB_SubscribePush_multiple(t *testing.T) { db := newTestDB(t, nil) addrs := make([]swarm.Address, 0) var addrsMu sync.Mutex uploadRandomChunks := func(count int) { addrsMu.Lock() defer addrsMu.Unlock() for i := 0; i < count; i++ { ch := generateTestRandomChunk() _, err := db.Put(context.Background(), storage.ModePutUpload, ch) if err != nil { t.Fatal(err) } addrs = append(addrs, ch.Address()) } } // prepopulate database with some chunks // before the subscription uploadRandomChunks(10) // set a timeout on subscription ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() // collect all errors from validating addresses, even nil ones // to validate the number of addresses received by the subscription errChan := make(chan error) subsCount := 10 // start a number of subscriptions // that all of them will write every addresses error to errChan for j := 0; j < subsCount; j++ { ch, stop := db.SubscribePush(ctx) defer stop() // receive and validate addresses from the subscription go func(j int) { var err error var i int // address index for { select { case got, ok := <-ch: if !ok { return } addrsMu.Lock() aIndex := i want := addrs[aIndex] addrsMu.Unlock() if !got.Address().Equal(want) { err = fmt.Errorf("got chunk %v address on subscription %v %s, want %s", i, j, got, want) } i++ // send one and only one error per received address select { case errChan <- err: case <-ctx.Done(): return } case <-ctx.Done(): return } } }(j) } // upload some chunks just after subscribe uploadRandomChunks(5) time.Sleep(200 * time.Millisecond) // upload some chunks after some short time // to ensure that subscription will include them // in a dynamic environment uploadRandomChunks(3) // number of addresses received by all subscriptions wantedChunksCount := len(addrs) * subsCount checkErrChan(ctx, t, errChan, wantedChunksCount) }
explode_data.jsonl/68486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 776 }
[ 2830, 3393, 3506, 36359, 6273, 16644, 45233, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 2092, 692, 12718, 5428, 1669, 1281, 10556, 2280, 2178, 26979, 11, 220, 15, 340, 2405, 912, 5428, 39120, 12811, 99014, 271...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetResolvableHostnamesForUpstreamService(t *testing.T) { assert := tassert.New(t) mc := newFakeMeshCatalog() testCases := []struct { downstream service.MeshService expectedHostnames []string }{ { downstream: service.MeshService{ Namespace: "default", Name: "foo", }, expectedHostnames: []string{ "bookstore-apex", "bookstore-apex.default", "bookstore-apex.default.svc", "bookstore-apex.default.svc.cluster", "bookstore-apex.default.svc.cluster.local", "bookstore-apex:8888", "bookstore-apex.default:8888", "bookstore-apex.default.svc:8888", "bookstore-apex.default.svc.cluster:8888", "bookstore-apex.default.svc.cluster.local:8888", "bookstore-v1", "bookstore-v1.default", "bookstore-v1.default.svc", "bookstore-v1.default.svc.cluster", "bookstore-v1.default.svc.cluster.local", "bookstore-v1:8888", "bookstore-v1.default:8888", "bookstore-v1.default.svc:8888", "bookstore-v1.default.svc.cluster:8888", "bookstore-v1.default.svc.cluster.local:8888", }, }, { downstream: service.MeshService{ Namespace: "bar", Name: "foo", }, expectedHostnames: []string{ "bookstore-apex.default", "bookstore-apex.default.svc", "bookstore-apex.default.svc.cluster", "bookstore-apex.default.svc.cluster.local", "bookstore-apex.default:8888", "bookstore-apex.default.svc:8888", "bookstore-apex.default.svc.cluster:8888", "bookstore-apex.default.svc.cluster.local:8888", "bookstore-v1.default", "bookstore-v1.default.svc", "bookstore-v1.default.svc.cluster", "bookstore-v1.default.svc.cluster.local", "bookstore-v1.default:8888", "bookstore-v1.default.svc:8888", "bookstore-v1.default.svc.cluster:8888", "bookstore-v1.default.svc.cluster.local:8888", }, }, } for _, tc := range testCases { t.Run(fmt.Sprintf("Testing hostnames when %s svc reaches %s svc", tc.downstream, tests.BookstoreV1Service), func(t *testing.T) { actual, err := mc.GetResolvableHostnamesForUpstreamService(tc.downstream, tests.BookstoreV1Service) assert.Nil(err) assert.Equal(actual, tc.expectedHostnames) }) } }
explode_data.jsonl/69761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1031 }
[ 2830, 3393, 1949, 1061, 88097, 9296, 11400, 2461, 2324, 4027, 1860, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 259, 2207, 7121, 1155, 692, 97662, 1669, 501, 52317, 14194, 41606, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 2698, 779, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadPrivateKey(t *testing.T) { f, err := ioutil.TempFile("", "") if err != nil { t.Fatalf("error creating tmpfile: %v", err) } defer os.Remove(f.Name()) if err := ioutil.WriteFile(f.Name(), []byte(rsaPrivateKey), os.FileMode(0600)); err != nil { t.Fatalf("error writing private key to tmpfile: %v", err) } if _, err := serviceaccount.ReadPrivateKey(f.Name()); err != nil { t.Fatalf("error reading private RSA key: %v", err) } if err := ioutil.WriteFile(f.Name(), []byte(ecdsaPrivateKey), os.FileMode(0600)); err != nil { t.Fatalf("error writing private key to tmpfile: %v", err) } if _, err := serviceaccount.ReadPrivateKey(f.Name()); err != nil { t.Fatalf("error reading private ECDSA key: %v", err) } }
explode_data.jsonl/37979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 4418, 75981, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 43144, 65009, 1703, 19814, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 841, 6825, 4174, 1192, 25, 1018, 85, 497, 1848, 340, 197, 532, 16867, 264...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUnmarshalTag(t *testing.T) { actual := new(Tag) if err := json.Unmarshal([]byte(`["foo","bar"]`), actual); err != nil { t.Fatal("unable to decode JSON:", err) } expected := &Tag{Key: "foo", Value: "bar"} if !reflect.DeepEqual(actual, expected) { t.Errorf("incorrect Tag: wanted '%+v' and got '%+v'", expected, actual) } }
explode_data.jsonl/29783
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 1806, 27121, 5668, 1155, 353, 8840, 836, 8, 341, 88814, 1669, 501, 69161, 340, 743, 1848, 1669, 2951, 38097, 10556, 3782, 5809, 1183, 7975, 2198, 2257, 1341, 63, 701, 5042, 1215, 1848, 961, 2092, 341, 197, 3244, 26133, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMergeConfig(t *testing.T) { for _, tc := range []struct { desc string configA *Config configB *Config expect *Config }{ { "merges b over a", &Config{Delim: "a", Glue: "a", Prefix: "a", Empty: "a"}, &Config{Delim: "b", Glue: "b", Prefix: "b", Empty: "b"}, &Config{Delim: "b", Glue: "b", Prefix: "b", Empty: "b"}, }, { "merges only non-empty config values", &Config{Delim: "a", Glue: "a", Prefix: "a", Empty: "a"}, &Config{Delim: "b", Prefix: "b"}, &Config{Delim: "b", Glue: "a", Prefix: "b", Empty: "a"}, }, { "takes b if a is nil", nil, &Config{Delim: "b", Glue: "b", Prefix: "b", Empty: "b"}, &Config{Delim: "b", Glue: "b", Prefix: "b", Empty: "b"}, }, { "takes a if b is nil", &Config{Delim: "a", Glue: "a", Prefix: "a", Empty: "a"}, nil, &Config{Delim: "a", Glue: "a", Prefix: "a", Empty: "a"}, }, } { t.Run(tc.desc, func(t *testing.T) { m := MergeConfig(tc.configA, tc.configB) if !reflect.DeepEqual(m, tc.expect) { t.Fatalf("\nexpect:\n%#v\n\nactual:\n%#v", tc.expect, m) } }) } }
explode_data.jsonl/10648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 554 }
[ 2830, 3393, 52096, 2648, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 41653, 262, 914, 198, 197, 25873, 32, 353, 2648, 198, 197, 25873, 33, 353, 2648, 198, 197, 24952, 220, 353, 2648, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScheduleGoroutine(t *testing.T) { runtime.GOMAXPROCS(1) go func() { for { doSomething(0) } }() for { doSomething(1) } }
explode_data.jsonl/12919
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 32210, 38, 269, 14159, 1155, 353, 8840, 836, 8, 341, 7000, 4466, 1224, 1898, 2954, 9117, 6412, 7, 16, 340, 30680, 2915, 368, 341, 197, 2023, 341, 298, 19935, 23087, 7, 15, 340, 197, 197, 532, 197, 66816, 2023, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCustomer_SendStatementLetter(t *testing.T) { key := "test api key" mockResponse := new(invdendpoint.LetterResponse) mockResponse.Id = "abcdef" mockResponse.State = "queued" mockResponse.CreatedAt = time.Now().UnixNano() server, err := invdmockserver.New(200, mockResponse, "json", true) if err != nil { t.Fatal(err) } defer server.Close() conn := mockConnection(key, server) subjectEntity := conn.NewCustomer() sendResponse, err := subjectEntity.SendStatementLetter(nil) if err != nil { t.Fatal("Error with send", err) } if sendResponse.State != "queued" { t.Fatal("Error: send not completed correctly") } }
explode_data.jsonl/15013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 12792, 46267, 8636, 34264, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 330, 1944, 6330, 1376, 1837, 77333, 2582, 1669, 501, 5900, 16598, 32540, 1214, 21405, 2582, 340, 77333, 2582, 6444, 284, 330, 41202, 698, 77333, 2582, 18942...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAllMatchModel(t *testing.T) { e, _ := NewEnforcer("examples/rbac_with_all_pattern_model.conf", "examples/rbac_with_all_pattern_policy.csv") e.AddNamedMatchingFunc("g", "keyMatch2", util.KeyMatch2) e.AddNamedDomainMatchingFunc("g", "keyMatch2", util.KeyMatch2) testDomainEnforce(t, e, "alice", "domain1", "/book/1", "read", true) testDomainEnforce(t, e, "alice", "domain1", "/book/1", "write", false) testDomainEnforce(t, e, "alice", "domain2", "/book/1", "read", false) testDomainEnforce(t, e, "alice", "domain2", "/book/1", "write", true) }
explode_data.jsonl/57143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 2403, 8331, 1712, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 1532, 1702, 82010, 445, 51668, 7382, 55877, 6615, 5705, 21260, 5047, 13937, 497, 330, 51668, 7382, 55877, 6615, 5705, 21260, 22773, 11219, 1138, 7727, 1904, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMongo_GetForUserCounter(t *testing.T) { m, skip := prepMongo(t, true) // adds two comments if skip { return } count, err := m.UserCount("radio-t", "user1") assert.Nil(t, err) assert.Equal(t, 2, count) count, err = m.UserCount("bad", "user1") assert.Nil(t, err) assert.Equal(t, 0, count) }
explode_data.jsonl/54207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 54998, 13614, 2461, 1474, 14099, 1155, 353, 8840, 836, 8, 341, 2109, 11, 10706, 1669, 21327, 54998, 1155, 11, 830, 8, 442, 11367, 1378, 6042, 198, 743, 10706, 341, 197, 853, 198, 197, 532, 18032, 11, 1848, 1669, 296, 7344,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTLSForwardTunnel(t *testing.T) { httpSrv := httptest.NewServer(httpTestHandler) defer httpSrv.Close() sendData := make([]byte, 128) rand.Read(sendData) err := tlsForwardTunnelRoundtrip(httpSrv.URL, sendData) if err != nil { t.Error(err) } }
explode_data.jsonl/69778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 45439, 25925, 51, 40292, 1155, 353, 8840, 836, 8, 341, 28080, 50, 10553, 1669, 54320, 70334, 7121, 5475, 19886, 2271, 3050, 340, 16867, 1758, 50, 10553, 10421, 2822, 32817, 1043, 1669, 1281, 10556, 3782, 11, 220, 16, 17, 23,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRefPtr(t *testing.T) { cases := []string{ "", "a", "a/b", "/a/b", "/a/b/", "a%2Fb", } for _, tc := range cases { ref, err := PtrRef(DefaultRootDocument.Copy(), tc) if err != nil { t.Fatal("Unexpected error:", err) } ptr, err := ref.Ptr() if err != nil { t.Fatal("Unexpected error:", err) } roundtrip, err := PtrRef(DefaultRootDocument.Copy(), ptr) if err != nil { t.Fatal("Unexpected error:", err) } if !ref.Equal(roundtrip) { t.Fatalf("Expected roundtrip of %q to be equal but got %v and %v", tc, ref, roundtrip) } } if _, err := PtrRef(DefaultRootDocument.Copy(), "2%"); err == nil { t.Fatalf("Expected error from %q", "2%") } ref := Ref{VarTerm("x"), IntNumberTerm(1)} if _, err := ref.Ptr(); err == nil { t.Fatal("Expected error from x[1]") } }
explode_data.jsonl/2924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 76629, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 917, 515, 197, 197, 39680, 197, 197, 56693, 756, 197, 197, 56693, 3470, 756, 197, 197, 3115, 64, 3470, 756, 197, 197, 3115, 64, 3470, 35075, 197, 197, 56693, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestXdsVersion(t *testing.T) { framework. NewTest(t).Features("usability.observability.version"). RequiresSingleCluster(). RequireIstioVersion("1.10.0"). Run(func(t framework.TestContext) { cfg := i.Settings() istioCtl := istioctl.NewOrFail(t, t, istioctl.Config{Cluster: t.Clusters().Default()}) args := []string{"x", "version", "--remote=true", fmt.Sprintf("--istioNamespace=%s", cfg.SystemNamespace)} output, _ := istioCtl.InvokeOrFail(t, args) // istioctl will return a single "control plane version" if all control plane versions match. // This test accepts any version with a "." (period) in it -- we mostly want to fail on "MISSING CP VERSION" controlPlaneRegex := regexp.MustCompile(`control plane version: [a-z0-9\-]+\.[a-z0-9\-]+`) if controlPlaneRegex.MatchString(output) { return } t.Fatalf("Did not find valid control plane version: %v", output) }) }
explode_data.jsonl/57531
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 55, 5356, 5637, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 624, 197, 197, 3564, 2271, 1155, 568, 21336, 445, 355, 2897, 13, 22764, 2897, 19484, 38609, 197, 197, 46961, 10888, 28678, 25829, 197, 197, 17959, 40, 267, 815, 563...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnmarshalBoolean(t *testing.T) { input := []byte(`{ "BOOL": true}`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) assert.Equal(t, DataTypeBoolean, av.DataType()) assert.Equal(t, true, av.Boolean()) }
explode_data.jsonl/61694
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 1806, 27121, 6890, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 10395, 788, 830, 5541, 692, 2405, 1822, 71813, 3506, 78554, 198, 9859, 1669, 2951, 38097, 5384, 11, 609, 402, 692, 6948, 59678, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateBroadcastMessages(t *testing.T) { mux, server, client := setup(t) defer teardown(server) wantedStartsAt := time.Date(2017, time.June, 26, 6, 0, 0, 0, time.UTC) wantedEndsAt := time.Date(2017, time.June, 27, 12, 59, 0, 0, time.UTC) mux.HandleFunc("/api/v4/broadcast_messages", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") fmt.Fprintf(w, `{ "message": "Some Message", "starts_at": "2017-06-26T06:00:00.000Z", "ends_at": "2017-06-27T12:59:00.000Z", "color": "#E75E40", "font": "#FFFFFF", "id": 42, "active": false }`) }) opt := &CreateBroadcastMessageOptions{ Message: String("Some Message"), StartsAt: &wantedStartsAt, EndsAt: &wantedEndsAt, Color: String("#E75E40"), Font: String("#FFFFFF"), } got, _, err := client.BroadcastMessage.CreateBroadcastMessage(opt) if err != nil { t.Errorf("CreateBroadcastMessage returned error: %v", err) } want := &BroadcastMessage{ Message: "Some Message", StartsAt: &wantedStartsAt, EndsAt: &wantedEndsAt, Color: "#E75E40", Font: "#FFFFFF", ID: 42, Active: false, } if !reflect.DeepEqual(got, want) { t.Errorf("CreateBroadcastMessage returned \ngot:\n%v\nwant:\n%v", Stringify(got), Stringify(want)) } }
explode_data.jsonl/42198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 572 }
[ 2830, 3393, 4021, 43362, 15820, 1155, 353, 8840, 836, 8, 341, 2109, 2200, 11, 3538, 11, 2943, 1669, 6505, 1155, 340, 16867, 49304, 21421, 692, 6692, 7566, 3479, 82, 1655, 1669, 882, 8518, 7, 17, 15, 16, 22, 11, 882, 3503, 2886, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSkipGenerationControllerFlow(t *testing.T) { stopChannel := make(chan struct{}) defer close(stopChannel) received := make(chan bool) expectedSecretName := "new-secret" serviceName := "svc-name" serviceUID := "some-uid" namespace := "ns" caName, kubeclient, fakeWatch, fakeSecretWatch, controller, informerFactory := controllerSetup([]runtime.Object{}, stopChannel, t) kubeclient.PrependReactor("update", "service", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { return true, &v1.Service{}, kapierrors.NewForbidden(v1.Resource("fdsa"), "new-service", fmt.Errorf("any service reason")) }) kubeclient.PrependReactor("create", "secret", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { return true, &v1.Secret{}, kapierrors.NewForbidden(v1.Resource("asdf"), "new-secret", fmt.Errorf("any reason")) }) kubeclient.PrependReactor("update", "secret", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { return true, &v1.Secret{}, kapierrors.NewForbidden(v1.Resource("asdf"), "new-secret", fmt.Errorf("any reason")) }) controller.syncHandler = func(serviceKey string) error { defer func() { received <- true }() err := controller.syncService(serviceKey) if err != nil { t.Errorf("unexpected error: %v", err) } return err } informerFactory.Start(stopChannel) go controller.Run(1, stopChannel) serviceToAdd := &v1.Service{} serviceToAdd.Name = serviceName serviceToAdd.Namespace = namespace serviceToAdd.UID = types.UID(serviceUID) serviceToAdd.Annotations = map[string]string{ServingCertSecretAnnotation: expectedSecretName, ServingCertErrorAnnotation: "any-error", ServingCertErrorNumAnnotation: "11"} fakeWatch.Add(serviceToAdd) t.Log("waiting to reach syncHandler") select { case <-received: case <-time.After(time.Duration(30 * time.Second)): t.Fatalf("failed to call into syncService") } for _, action := range kubeclient.Actions() { switch action.GetVerb() { case "update", "create": t.Errorf("no mutation expected, but we got %v", action) } } secretToAdd := &v1.Secret{} secretToAdd.Name = expectedSecretName secretToAdd.Namespace = namespace fakeSecretWatch.Add(secretToAdd) // makes sure that our lister has the secret. Given wiring, I think it's this or kill the test time.Sleep(2 * time.Second) kubeclient.ClearActions() serviceToAdd.Annotations = map[string]string{ServingCertSecretAnnotation: expectedSecretName, ServingCertCreatedByAnnotation: caName} fakeWatch.Add(serviceToAdd) t.Log("waiting to reach syncHandler") select { case <-received: case <-time.After(time.Duration(30 * time.Second)): t.Fatalf("failed to call into syncService") } for _, action := range kubeclient.Actions() { switch action.GetVerb() { case "update", "create": t.Errorf("no mutation expected, but we got %v", action) } } }
explode_data.jsonl/49902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 975 }
[ 2830, 3393, 35134, 37138, 2051, 18878, 1155, 353, 8840, 836, 8, 341, 62644, 9629, 1669, 1281, 35190, 2036, 37790, 16867, 3265, 60170, 9629, 340, 17200, 8771, 1669, 1281, 35190, 1807, 692, 42400, 19773, 675, 1669, 330, 931, 68892, 698, 529...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainerNextStateWithAvoidingDanglingContainers(t *testing.T) { container := &apicontainer.Container{ DesiredStatusUnsafe: apicontainerstatus.ContainerStopped, KnownStatusUnsafe: apicontainerstatus.ContainerCreated, AppliedStatus: apicontainerstatus.ContainerRunning, TransitionDependenciesMap: make(map[apicontainerstatus.ContainerStatus]apicontainer.TransitionDependencySet), } task := &managedTask{ Task: &apitask.Task{ Containers: []*apicontainer.Container{ container, }, DesiredStatusUnsafe: apitaskstatus.TaskStopped, }, engine: &DockerTaskEngine{}, } transition := task.containerNextState(container) assert.Equal(t, apicontainerstatus.ContainerStatusNone, transition.nextState, "Expected next state [%s] != Retrieved next state [%s]", apicontainerstatus.ContainerStatusNone.String(), transition.nextState.String()) assert.Equal(t, false, transition.actionRequired, "Mismatch transition actionable") assert.Equal(t, nil, transition.reason, "Mismatch transition possible") }
explode_data.jsonl/24571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 4502, 5847, 1397, 2354, 52116, 287, 35, 90104, 74632, 1155, 353, 8840, 836, 8, 341, 53290, 1669, 609, 391, 51160, 1743, 33672, 515, 197, 197, 4896, 2690, 2522, 78770, 25, 981, 1443, 51160, 1743, 2829, 33672, 59803, 345, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkDoneProgressCancelParams(t *testing.T) { t.Parallel() const ( wantToken = int32(1569) invalidToken = int32(1348) ) var ( want = `{"token":` + strconv.FormatInt(int64(wantToken), 10) + `}` wantInvalid = `{"token":` + strconv.FormatInt(int64(invalidToken), 10) + `}` ) token := NewNumberProgressToken(wantToken) wantType := WorkDoneProgressCancelParams{ Token: *token, } t.Run("Marshal", func(t *testing.T) { tests := []struct { name string field WorkDoneProgressCancelParams want string wantMarshalErr bool wantErr bool }{ { name: "Valid", field: wantType, want: want, wantMarshalErr: false, wantErr: false, }, { name: "Invalid", field: wantType, want: wantInvalid, wantMarshalErr: false, wantErr: true, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() got, err := json.Marshal(&tt.field) if (err != nil) != tt.wantMarshalErr { t.Fatal(err) } if diff := cmp.Diff(tt.want, string(got)); (diff != "") != tt.wantErr { t.Errorf("%s: wantErr: %t\n(-want +got)\n%s", tt.name, tt.wantErr, diff) } }) } }) t.Run("Unmarshal", func(t *testing.T) { tests := []struct { name string field string want WorkDoneProgressCancelParams wantUnmarshalErr bool wantErr bool }{ { name: "Valid", field: want, want: wantType, wantUnmarshalErr: false, wantErr: false, }, { name: "Invalid", field: wantInvalid, want: wantType, wantUnmarshalErr: false, wantErr: true, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() var got WorkDoneProgressCancelParams if err := json.Unmarshal([]byte(tt.field), &got); (err != nil) != tt.wantUnmarshalErr { t.Fatal(err) } if diff := cmp.Diff(fmt.Sprint(got.Token), strconv.FormatInt(int64(wantToken), 10)); (diff != "") != tt.wantErr { t.Errorf("%s: wantErr: %t\n(-want +got)\n%s", tt.name, tt.wantErr, diff) } }) } }) }
explode_data.jsonl/16163
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1270 }
[ 2830, 3393, 6776, 17453, 9496, 9269, 4870, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 4777, 2399, 197, 50780, 3323, 262, 284, 526, 18, 17, 7, 16, 20, 21, 24, 340, 197, 197, 11808, 3323, 284, 526, 18, 17, 7, 16, 18, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMirroring(t *testing.T) { cases := []testCaseMirror{ { name: "mirror-percent-absent", absent: true, percentage: 100.0, threshold: 0.0, }, { name: "mirror-50", percentage: 50.0, threshold: 10.0, }, { name: "mirror-10", percentage: 10.0, threshold: 5.0, }, { name: "mirror-0", percentage: 0.0, threshold: 0.0, }, } runMirrorTest(mirrorTestOptions{ t: t, cases: cases, }) }
explode_data.jsonl/39134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 58461, 89579, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 66194, 54216, 515, 197, 197, 515, 298, 11609, 25, 981, 330, 73225, 70624, 96410, 306, 756, 298, 197, 3435, 306, 25, 257, 830, 345, 298, 197, 40550, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProfileTranslator(t *testing.T) { t.Run("Sends update", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(profile) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profile, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } actualPbProfile := mockGetProfileServer.profilesReceived[0] if !proto.Equal(actualPbProfile, pbProfile) { t.Fatalf("Expected profile sent to be [%v] but was [%v]", pbProfile, actualPbProfile) } }) t.Run("Request match with more than one field becomes ALL", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(multipleRequestMatches) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profiles, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } actualPbProfile := mockGetProfileServer.profilesReceived[0] if !proto.Equal(actualPbProfile, pbRequestMatchAll) { t.Fatalf("Expected profile sent to be [%v] but was [%v]", pbRequestMatchAll, actualPbProfile) } }) t.Run("Ignores request match without any fields", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(notEnoughRequestMatches) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 0 { t.Fatalf("Expecting [0] profiles, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } }) t.Run("Response match with more than one field becomes ALL", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(multipleResponseMatches) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profiles, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } actualPbProfile := mockGetProfileServer.profilesReceived[0] if !proto.Equal(actualPbProfile, pbResponseMatchAll) { t.Fatalf("Expected profile sent to be [%v] but was [%v]", pbResponseMatchAll, actualPbProfile) } }) t.Run("Ignores response match without any fields", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(notEnoughResponseMatches) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 0 { t.Fatalf("Expecting [0] profiles, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } }) t.Run("Ignores response match with invalid status range", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(invalidStatusRange) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 0 { t.Fatalf("Expecting [0] profiles, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } }) t.Run("Sends update for one sided status range", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(oneSidedStatusRange) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profile, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } }) t.Run("Sends empty update", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(nil) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profile, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } actualPbProfile := mockGetProfileServer.profilesReceived[0] if !proto.Equal(actualPbProfile, defaultPbProfile) { t.Fatalf("Expected profile sent to be [%v] but was [%v]", defaultPbProfile, actualPbProfile) } }) t.Run("Sends update with custom timeout", func(t *testing.T) { mockGetProfileServer := &mockDestinationGetProfileServer{profilesReceived: []*pb.DestinationProfile{}} translator := &profileTranslator{ stream: mockGetProfileServer, log: logging.WithField("test", t.Name()), } translator.Update(profileWithTimeout) numProfiles := len(mockGetProfileServer.profilesReceived) if numProfiles != 1 { t.Fatalf("Expecting [1] profile, got [%d]. Updates: %v", numProfiles, mockGetProfileServer.profilesReceived) } actualPbProfile := mockGetProfileServer.profilesReceived[0] if !proto.Equal(actualPbProfile, pbProfileWithTimeout) { t.Fatalf("Expected profile sent to be [%v] but was [%v]", pbProfileWithTimeout, actualPbProfile) } }) }
explode_data.jsonl/49954
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2151 }
[ 2830, 3393, 8526, 51653, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 50, 1412, 2647, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 77333, 1949, 8526, 5475, 1669, 609, 16712, 33605, 1949, 8526, 5475, 90, 56195, 23260, 25, 29838, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBrokerV1ControlPlane(t *testing.T) { brokerTestRunner.RunTests(t, testlib.FeatureBasic, func(t *testing.T, channel metav1.TypeMeta) { client := testlib.Setup(t, true, testlib.SetupClientOptionNoop) defer testlib.TearDown(client) helpers.BrokerV1ControlPlaneTest( t, func(client *testlib.Client, name string) { helpers.BrokerDataPlaneSetupHelper(context.Background(), client, brokerClass, brokerTestRunner) }, testlib.SetupClientOptionNoop, ) }) }
explode_data.jsonl/65192
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 65545, 53, 16, 3273, 34570, 1155, 353, 8840, 836, 8, 341, 2233, 45985, 2271, 19486, 16708, 18200, 1155, 11, 1273, 2740, 58434, 15944, 11, 2915, 1155, 353, 8840, 836, 11, 5496, 77520, 16, 10184, 12175, 8, 1476, 197, 25291, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestService_Name(t *testing.T) { t.Run("test success", func(t *testing.T) { prov, err := New(&protocol.MockProvider{ ServiceMap: map[string]interface{}{ mediator.Coordination: &mockroute.MockMediatorSvc{}, }, }) require.NoError(t, err) require.Equal(t, DIDExchange, prov.Name()) }) }
explode_data.jsonl/30513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 1860, 19015, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 2393, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 42947, 11, 1848, 1669, 1532, 2099, 17014, 24664, 5179, 515, 298, 91619, 2227, 25, 2415, 14032, 313...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewConnWithCache(t *testing.T) { r, clean, err := redistest.CreateRedis() assert.Nil(t, err) defer clean() var conn trackedConn c := NewConnWithCache(&conn, cache.NewNode(r, singleFlights, stats, sql.ErrNoRows)) _, err = c.ExecNoCache("delete from user_table where id='kevin'") assert.Nil(t, err) assert.True(t, conn.execValue) }
explode_data.jsonl/64138
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 3564, 9701, 2354, 8233, 1155, 353, 8840, 836, 8, 341, 7000, 11, 4240, 11, 1848, 1669, 2518, 380, 477, 7251, 48137, 741, 6948, 59678, 1155, 11, 1848, 340, 16867, 4240, 2822, 2405, 4534, 33056, 9701, 198, 1444, 1669, 1532, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOwnerReadSRK(t *testing.T) { rwc := openTPMOrSkip(t) defer rwc.Close() // This test code assumes that the owner auth is the well-known value. ownerAuth := getAuth(ownerAuthEnvVar) srkb, err := OwnerReadSRK(rwc, ownerAuth) if err != nil { t.Fatal("Couldn't read the SRK using owner auth:", err) } if len(srkb) == 0 { t.Fatal("Couldn't get an SRK blob from the TPM") } }
explode_data.jsonl/75356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 13801, 4418, 14557, 42, 1155, 353, 8840, 836, 8, 341, 7000, 24028, 1669, 1787, 4239, 44, 2195, 35134, 1155, 340, 16867, 435, 24028, 10421, 2822, 197, 322, 1096, 1273, 2038, 21484, 429, 279, 6372, 4166, 374, 279, 1632, 21309,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReadArchiveCustomHandler(t *testing.T) { // first create archive, that we will be able to read updateTestDir, _ := ioutil.TempDir("", "update") defer os.RemoveAll(updateTestDir) archive, err := WriteRootfsImageArchive(updateTestDir, RootfsImageStructOK) assert.NoError(t, err) assert.NotEqual(t, "", archive) // open archive file f, err := os.Open(archive) defer f.Close() assert.NoError(t, err) assert.NotNil(t, f) var called bool rp := &parser.RootfsParser{ DataFunc: func(r io.Reader, uf parser.UpdateFile) error { called = true assert.Equal(t, "update.ext4", uf.Name) b := bytes.Buffer{} n, err := io.Copy(&b, r) assert.NoError(t, err) assert.Equal(t, uf.Size, n) assert.Equal(t, []byte("my first update"), b.Bytes()) return nil }, } aReader := NewReader(f) aReader.Register(rp) _, err = aReader.Read() assert.NoError(t, err) assert.True(t, called) }
explode_data.jsonl/35296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 374 }
[ 2830, 3393, 4418, 42502, 10268, 3050, 1155, 353, 8840, 836, 8, 341, 197, 322, 1156, 1855, 18132, 11, 429, 582, 686, 387, 2952, 311, 1349, 198, 27175, 2271, 6184, 11, 716, 1669, 43144, 65009, 6184, 19814, 330, 2386, 1138, 16867, 2643, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTubeRatio(t *testing.T) { cases := map[string]struct { tubeRatio float64 req *segment.SetupReq setupDB func(db *mock_backend.MockDB) globalCapacity uint64 interfaces []uint16 }{ "empty": { tubeRatio: 1, req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{} db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3}, }, "one source, one ingress": { tubeRatio: 1, req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "00000001", 1, 2, 5, 5, 5), } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3}, }, "one source, two ingress": { tubeRatio: .5, req: newTestRequest(t, 1, 2, 3, 3), // 64Kbps setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "00000001", 1, 2, 3, 3, 3), // 64Kbps testNewRsv(t, "ff00:1:1", "00000002", 3, 2, 5, 5, 5), // 128Kbps } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3}, }, "two sources, request already present": { tubeRatio: .5, req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "beefcafe", 1, 2, 5, 9, 9), // will be ignored testNewRsv(t, "ff00:1:1", "00000002", 3, 2, 5, 5, 5), } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3}, }, "multiple sources, multiple ingress": { tubeRatio: .75, req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:2", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:1", "00000002", 3, 2, 5, 5, 5), } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3}, }, "exceeding ingress capacity": { tubeRatio: 10. / 13., // 10 / (10 + 0 + 3) req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:2", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:1", "00000002", 3, 2, 5, 5, 5), } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 10, interfaces: []uint16{1, 2, 3}, }, "with many other irrelevant reservations": { tubeRatio: .75, req: newTestRequest(t, 1, 2, 5, 5), setupDB: func(db *mock_backend.MockDB) { rsvs := []*segment.Reservation{ testNewRsv(t, "ff00:1:1", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:2", "00000001", 1, 2, 5, 5, 5), testNewRsv(t, "ff00:1:1", "00000002", 3, 2, 5, 5, 5), testNewRsv(t, "ff00:1:3", "00000001", 4, 5, 5, 9, 9), testNewRsv(t, "ff00:1:3", "00000002", 4, 5, 5, 9, 9), testNewRsv(t, "ff00:1:4", "00000001", 5, 4, 5, 9, 9), testNewRsv(t, "ff00:1:4", "00000002", 5, 4, 5, 9, 9), } db.EXPECT().GetAllSegmentRsvs(gomock.Any()).AnyTimes().Return(rsvs, nil) }, globalCapacity: 1024 * 1024, interfaces: []uint16{1, 2, 3, 4, 5}, }, } for name, tc := range cases { name, tc := name, tc t.Run(name, func(t *testing.T) { t.Parallel() db, finish := newTestDB(t) adm := newTestAdmitter(t) defer finish() adm.Caps = &testCapacities{ Cap: tc.globalCapacity, Ifaces: tc.interfaces, } tc.setupDB(db.(*mock_backend.MockDB)) ctx := context.Background() ratio, err := adm.tubeRatio(ctx, db, *tc.req) require.NoError(t, err) require.Equal(t, tc.tubeRatio, ratio) }) } }
explode_data.jsonl/62141
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2165 }
[ 2830, 3393, 11851, 22777, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 2415, 14032, 60, 1235, 341, 197, 3244, 3760, 22777, 414, 2224, 21, 19, 198, 197, 24395, 310, 353, 23169, 39820, 27234, 198, 197, 84571, 3506, 286, 2915, 9791, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJobRunsController_Cancel(t *testing.T) { t.Parallel() ethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, ethClient, ) defer cleanup() app.Start() client := app.NewHTTPClient() t.Run("invalid run id", func(t *testing.T) { response, cleanup := client.Put("/v2/runs/xxx/cancellation", nil) defer cleanup() cltest.AssertServerResponse(t, response, http.StatusUnprocessableEntity) }) t.Run("missing run", func(t *testing.T) { resp, cleanup := client.Put("/v2/runs/29023583-0D39-4844-9696-451102590936/cancellation", nil) defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusNotFound) }) job := cltest.NewJobWithWebInitiator() require.NoError(t, app.Store.CreateJob(&job)) run := cltest.NewJobRun(job) require.NoError(t, app.Store.CreateJobRun(&run)) t.Run("valid run", func(t *testing.T) { resp, cleanup := client.Put(fmt.Sprintf("/v2/runs/%s/cancellation", run.ID), nil) defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusOK) r, err := app.Store.FindJobRun(run.ID) assert.NoError(t, err) assert.Equal(t, models.RunStatusCancelled, r.GetStatus()) }) }
explode_data.jsonl/49861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 12245, 73920, 2051, 97485, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 769, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867, 2060, 72577, 20960, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnexpectedHandlerError(t *testing.T) { opts := testutils.NewOpts(). AddLogFilter("Unexpected handler error", 1) testutils.WithTestServer(t, opts, func(t testing.TB, ts *testutils.TestServer) { ts.Register(ErrorHandlerFunc(func(ctx context.Context, call *InboundCall) error { if _, err := raw.ReadArgs(call); err != nil { return err } return fmt.Errorf("nope") }), "nope") ctx, cancel := NewContext(time.Second) defer cancel() _, _, _, err := raw.Call(ctx, ts.Server(), ts.HostPort(), ts.ServiceName(), "nope", []byte("Arg2"), []byte("Arg3")) require.NotNil(t, err) assert.Equal(t, ErrCodeUnexpected, GetSystemErrorCode(err), "err: %v", err) calls := relaytest.NewMockStats() calls.Add(ts.ServiceName(), ts.ServiceName(), "nope").Failed("unexpected-error").End() ts.AssertRelayStats(calls) }) }
explode_data.jsonl/78185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 29430, 3050, 1454, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 1273, 6031, 7121, 43451, 25829, 197, 37972, 2201, 5632, 445, 29430, 7013, 1465, 497, 220, 16, 692, 18185, 6031, 26124, 2271, 5475, 1155, 11, 12185, 11, 2915, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEvalAutoescape(t *testing.T) { set := NewHTMLSet() RunJetTestWithSet(t, set, nil, nil, "Autoescapee_Test1", `<h1>{{"<h1>Hello Buddy!</h1>" }}</h1>`, "<h1>&lt;h1&gt;Hello Buddy!&lt;/h1&gt;</h1>") RunJetTestWithSet(t, set, nil, nil, "Autoescapee_Test2", `<h1>{{"<h1>Hello Buddy!</h1>" |unsafe }}</h1>`, "<h1><h1>Hello Buddy!</h1></h1>") }
explode_data.jsonl/22905
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 54469, 13253, 12998, 1155, 353, 8840, 836, 8, 341, 8196, 1669, 1532, 5835, 1649, 741, 85952, 35641, 2271, 2354, 1649, 1155, 11, 738, 11, 2092, 11, 2092, 11, 330, 13253, 12998, 68, 32541, 16, 497, 30586, 71, 16, 11764, 2247...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSQLite_AddIndexedColumns(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "id", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) // Insert 2 records to the users table, and make sure they are there // after executing migration. _, err := t.db.Exec("INSERT INTO users (id) VALUES (1), (2)") require.NoError(t, err) usersT.Columns = append(usersT.Columns, &schema.Column{ Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "10"}, }, &schema.Column{ Name: "b", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "20"}, }, &schema.Column{ Name: "c", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}, Null: true}, Default: &schema.Literal{V: "30"}, }) usersT.Indexes = append(usersT.Indexes, &schema.Index{ Unique: true, Name: "id_a_b_c_unique", Parts: []*schema.IndexPart{{C: usersT.Columns[0]}, {C: usersT.Columns[1]}, {C: usersT.Columns[2]}, {C: usersT.Columns[3]}}, }) changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 4, "usersT contains 3 new columns and 1 new index") t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) // Scan records from the table to ensure correctness of // the rows transferring. rows, err := t.db.Query("SELECT * FROM users") require.NoError(t, err) require.True(t, rows.Next()) var v [4]int require.NoError(t, rows.Scan(&v[0], &v[1], &v[2], &v[3])) require.Equal(t, [4]int{1, 10, 20, 30}, v) require.True(t, rows.Next()) require.NoError(t, rows.Scan(&v[0], &v[1], &v[2], &v[3])) require.Equal(t, [4]int{2, 10, 20, 30}, v) require.False(t, rows.Next()) require.NoError(t, rows.Close()) // Dropping a column from both table and index. usersT = t.loadUsers() idx, ok := usersT.Index("id_a_b_c_unique") require.True(t, ok) require.Len(t, idx.Parts, 4) usersT.Columns = usersT.Columns[:len(usersT.Columns)-1] idx.Parts = idx.Parts[:len(idx.Parts)-1] changes = t.diff(t.loadUsers(), usersT) require.Len(t, changes, 2) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, t.loadUsers()) // Scan records from the table to ensure correctness of // the rows transferring. rows, err = t.db.Query("SELECT * FROM users") require.NoError(t, err) require.True(t, rows.Next()) var u [3]int require.NoError(t, rows.Scan(&u[0], &u[1], &u[2])) require.Equal(t, [3]int{1, 10, 20}, u) require.True(t, rows.Next()) require.NoError(t, rows.Scan(&u[0], &u[1], &u[2])) require.Equal(t, [3]int{2, 10, 20}, u) require.False(t, rows.Next()) require.NoError(t, rows.Close()) }) }
explode_data.jsonl/20085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1288 }
[ 2830, 3393, 81772, 21346, 69941, 13965, 1155, 353, 8840, 836, 8, 341, 8810, 632, 6727, 1155, 11, 2915, 1155, 353, 68078, 2271, 8, 341, 197, 90896, 51, 1669, 609, 17349, 18257, 515, 298, 21297, 25, 262, 330, 4218, 756, 298, 197, 13965,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteRepo(t *testing.T) { ctx, cancel, config, tempdir := initConfig(t) defer cancel() defer os.RemoveAll(tempdir) defer libkbfs.CheckConfigAndShutdown(ctx, t, config) clock := &clocktest.TestClock{} clock.Set(time.Now()) config.SetClock(clock) h, err := tlfhandle.ParseHandle( ctx, config.KBPKI(), config.MDOps(), nil, "user1", tlf.Private) require.NoError(t, err) _, err = CreateRepoAndID(ctx, config, h, "Repo1") require.NoError(t, err) rootNode, _, err := config.KBFSOps().GetOrCreateRootNode( ctx, h, data.MasterBranch) require.NoError(t, err) jManager, err := libkbfs.GetJournalManager(config) require.NoError(t, err) err = jManager.FinishSingleOp(ctx, rootNode.GetFolderBranch().Tlf, nil, keybase1.MDPriorityGit) require.NoError(t, err) err = DeleteRepo(ctx, config, h, "Repo1") require.NoError(t, err) gitNode, _, err := config.KBFSOps().Lookup(ctx, rootNode, kbfsRepoDir) require.NoError(t, err) children, err := config.KBFSOps().GetDirChildren(ctx, gitNode) require.NoError(t, err) require.Len(t, children, 0) // .kbfs_deleted_repos is hidden deletedReposNode, _, err := config.KBFSOps().Lookup( ctx, gitNode, kbfsDeletedReposDir) require.NoError(t, err) children, err = config.KBFSOps().GetDirChildren(ctx, deletedReposNode) require.NoError(t, err) require.Len(t, children, 1) // If cleanup happens too soon, it shouldn't clean the repo. err = CleanOldDeletedRepos(ctx, config, h) require.NoError(t, err) children, err = config.KBFSOps().GetDirChildren(ctx, deletedReposNode) require.NoError(t, err) require.Len(t, children, 1) // After a long time, cleanup should succeed. clock.Add(minDeletedAgeForCleaning) err = CleanOldDeletedRepos(ctx, config, h) require.NoError(t, err) children, err = config.KBFSOps().GetDirChildren(ctx, deletedReposNode) require.NoError(t, err) require.Len(t, children, 0) err = jManager.FinishSingleOp(ctx, rootNode.GetFolderBranch().Tlf, nil, keybase1.MDPriorityGit) require.NoError(t, err) }
explode_data.jsonl/26653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 6435, 25243, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 11, 2193, 11, 2730, 3741, 1669, 2930, 2648, 1155, 340, 16867, 9121, 741, 16867, 2643, 84427, 9758, 3741, 340, 16867, 3051, 21310, 3848, 10600, 2648, 3036, 62004, 75...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreScanMultipleIntents(t *testing.T) { defer leaktest.AfterTest(t)() var resolveCount int32 manual := hlc.NewManualClock(123) cfg := TestStoreConfig(hlc.NewClock(manual.UnixNano, time.Nanosecond)) cfg.TestingKnobs.EvalKnobs.TestingEvalFilter = func(filterArgs kvserverbase.FilterArgs) *roachpb.Error { if _, ok := filterArgs.Req.(*roachpb.ResolveIntentRequest); ok { atomic.AddInt32(&resolveCount, 1) } return nil } stopper := stop.NewStopper() defer stopper.Stop(context.Background()) store := createTestStoreWithConfig(t, stopper, testStoreOpts{createSystemRanges: true}, &cfg) // Lay down ten intents from a single txn. key1 := roachpb.Key("key00") key10 := roachpb.Key("key09") txn := newTransaction("test", key1, 1, store.cfg.Clock) ba := roachpb.BatchRequest{} for i := 0; i < 10; i++ { pArgs := putArgs(roachpb.Key(fmt.Sprintf("key%02d", i)), []byte("value")) ba.Add(&pArgs) assignSeqNumsForReqs(txn, &pArgs) } ba.Header = roachpb.Header{Txn: txn} if _, pErr := store.TestSender().Send(context.Background(), ba); pErr != nil { t.Fatal(pErr) } // Now, expire the transactions by moving the clock forward. This will // result in the subsequent scan operation pushing both transactions // in a single batch. manual.Increment(txnwait.TxnLivenessThreshold.Nanoseconds() + 1) // Query the range with a single scan, which should cause all intents // to be resolved. sArgs := scanArgs(key1, key10.Next()) if _, pErr := kv.SendWrapped(context.Background(), store.TestSender(), sArgs); pErr != nil { t.Fatal(pErr) } // Verify all ten intents are resolved from the single inconsistent scan. testutils.SucceedsSoon(t, func() error { if a, e := atomic.LoadInt32(&resolveCount), int32(10); a != e { return fmt.Errorf("expected %d; got %d resolves", e, a) } return nil }) }
explode_data.jsonl/109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 700 }
[ 2830, 3393, 6093, 26570, 32089, 1072, 805, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 2405, 8830, 2507, 526, 18, 17, 198, 197, 19730, 1669, 305, 17257, 7121, 52092, 26104, 7, 16, 17, 18, 340, 50286, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestListQuotasCommand(t *testing.T) { t.Parallel() for tn, tc := range map[string]struct { namespace string wantErr error args []string setup func(t *testing.T, fakeLister *fake.FakeClient) assert func(t *testing.T, buffer *bytes.Buffer) }{ "invalid number of args": { args: []string{"invalid"}, wantErr: errors.New("accepts 0 arg(s), received 1"), }, "configured namespace": { namespace: "some-namespace", setup: func(t *testing.T, fakeLister *fake.FakeClient) { fakeLister. EXPECT(). List("some-namespace") }, }, "returns error without specify namespace": { wantErr: errors.New(utils.EmptyNamespaceError), setup: func(t *testing.T, fakeLister *fake.FakeClient) { fakeLister. EXPECT(). List("some-namespace") }, }, "formats multiple quotas": { namespace: "some-namespace", setup: func(t *testing.T, fakeLister *fake.FakeClient) { fakeLister. EXPECT(). List(gomock.Any()). Return([]v1.ResourceQuota{ {ObjectMeta: metav1.ObjectMeta{Name: "quota-a"}}, {ObjectMeta: metav1.ObjectMeta{Name: "quota-b"}}, }, nil) }, assert: func(t *testing.T, buffer *bytes.Buffer) { header1 := "Getting quotas in namespace: " header2 := "Found 2 quotas in namespace " testutil.AssertContainsAll(t, buffer.String(), []string{header1, header2, "quota-a", "quota-b"}) }, }, } { t.Run(tn, func(t *testing.T) { ctrl := gomock.NewController(t) fakeLister := fake.NewFakeClient(ctrl) if tc.setup != nil { tc.setup(t, fakeLister) } buffer := &bytes.Buffer{} c := NewListQuotasCommand(&config.KfParams{ Namespace: tc.namespace, }, fakeLister) c.SetOutput(buffer) c.SetArgs(tc.args) gotErr := c.Execute() if tc.wantErr != nil { testutil.AssertErrorsEqual(t, tc.wantErr, gotErr) return } if tc.assert != nil { tc.assert(t, buffer) } testutil.AssertNil(t, "Command err", gotErr) ctrl.Finish() }) } }
explode_data.jsonl/10992
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 904 }
[ 2830, 3393, 852, 2183, 53524, 4062, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 43308, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 56623, 914, 198, 197, 50780, 7747, 256, 1465, 198, 197, 31215, 414, 3056, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertCPUSharesToCgroupV2Value(t *testing.T) { cases := map[uint64]uint64{ 0: 0, 2: 1, 262144: 10000, } for i, expected := range cases { got := ConvertCPUSharesToCgroupV2Value(i) if got != expected { t.Errorf("expected ConvertCPUSharesToCgroupV2Value(%d) to be %d, got %d", i, expected, got) } } }
explode_data.jsonl/34407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 12012, 31615, 73015, 1249, 34, 4074, 53, 17, 1130, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 2415, 58, 2496, 21, 19, 60, 2496, 21, 19, 515, 197, 197, 15, 25, 414, 220, 15, 345, 197, 197, 17, 25, 414, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewExportedPort_RandomHostPort(t *testing.T) { controller := gomock.NewController(t) mockKV := mock_etcd.NewMockKV(controller) mockLease := mock_etcd.NewMockLease(controller) ctx := context.TODO() mockLease.EXPECT().Grant(gomock.Eq(ctx), gomock.Eq(int64(30000))).Return( &clientv3.LeaseGrantResponse{ ID: 23, TTL: 30000, Error: "", }, nil) mockLease.EXPECT().KeepAlive( gomock.Eq(context.Background()), gomock.Eq(clientv3.LeaseID(23))).Return( make(chan *clientv3.LeaseKeepAliveResponse), nil) exporter, err := NewExporterFromClient(ctx, mockKV, mockLease, 30000) if err != nil { t.Error("NewExporterFromClient reports error: ", err) } if exporter == nil { t.Fatal("NewExporterFromClient returned nil exporter") } record := discovery.ExportedServiceRecord{ Protocol: "tcp", Address: "127.0.0.1", } recvRecord := discovery.ExportedServiceRecord{} mockKV.EXPECT().Put(ctx, "/ns/service/test/0000000000000017", gomock.Any(), gomock.Any()).Return(&clientv3.PutResponse{}, nil).Do( func(ctx context.Context, path string, val string, opts ...clientv3.OpOption) { otherErr := proto.Unmarshal([]byte(val), &recvRecord) if otherErr != nil { t.Error("Error parsing protobuf message from Put: ", otherErr) } }) l, err := exporter.NewExportedPort(ctx, "tcp", "127.0.0.1", "test") if err != nil { t.Error("NewExportedPort reports error: ", err) } if l == nil { t.Fatal("NewExportedPort returned nil listener") } defer l.Close() _, hostport, err := net.SplitHostPort(l.Addr().String()) if err != nil { t.Error("Error parsing host:port pair ", l.Addr().String(), ": ", err) } port, err := strconv.Atoi(hostport) if err != nil { t.Error("Error converting port ", hostport, ": ", err) } record.Port = int32(port) if !proto.Equal(&record, &recvRecord) { t.Error("Mismatch between proto records: ", record, ", ", recvRecord) } }
explode_data.jsonl/66152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 779 }
[ 2830, 3393, 3564, 16894, 291, 7084, 2568, 2206, 9296, 7084, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 77333, 82707, 1669, 7860, 45668, 4385, 7121, 11571, 82707, 40845, 340, 77333, 2304, 519, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSyncInstallPlanUnhappy(t *testing.T) { namespace := "ns" tests := []struct { testName string in *v1alpha1.InstallPlan err error }{ { testName: "NoStatus", in: installPlan("p", namespace, v1alpha1.InstallPlanPhaseNone), err: nil, }, { // This checks that installplans are not applied when no operatorgroup is present testName: "HasSteps/NoOperatorGroup", in: withSteps(installPlan("p", namespace, v1alpha1.InstallPlanPhaseInstalling, "csv"), []*v1alpha1.Step{ { Resource: v1alpha1.StepResource{ CatalogSource: "catalog", CatalogSourceNamespace: namespace, Group: "", Version: "v1", Kind: "ServiceAccount", Name: "sa", Manifest: toManifest(t, serviceAccount("sa", namespace, "", objectReference("init secret"))), }, Status: v1alpha1.StepStatusUnknown, }, }, ), err: fmt.Errorf("attenuated service account query failed - no operator group found that is managing this namespace"), }, } for _, tt := range tests { t.Run(tt.testName, func(t *testing.T) { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() op, err := NewFakeOperator(ctx, namespace, []string{namespace}, withClientObjs(tt.in)) require.NoError(t, err) err = op.syncInstallPlans(tt.in) require.Equal(t, tt.err, err) }) } }
explode_data.jsonl/37277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 669 }
[ 2830, 3393, 12154, 24690, 20485, 1806, 56521, 1155, 353, 8840, 836, 8, 341, 56623, 1669, 330, 4412, 1837, 78216, 1669, 3056, 1235, 341, 197, 18185, 675, 914, 198, 197, 17430, 981, 353, 85, 16, 7141, 16, 71207, 541, 20485, 198, 197, 98...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRegionErrorInfoLogRateLimitedHint(t *testing.T) { t.Parallel() errInfo := newRegionErrorInfo(singleRegionInfo{}, nil) errInfo.logRateLimitDuration = time.Second // True on the first rate limited. require.True(t, errInfo.logRateLimitedHint()) require.False(t, errInfo.logRateLimitedHint()) // True if it lasts too long. time.Sleep(2 * errInfo.logRateLimitDuration) require.True(t, errInfo.logRateLimitedHint()) require.False(t, errInfo.logRateLimitedHint()) }
explode_data.jsonl/32891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 14091, 1454, 1731, 2201, 11564, 74477, 26987, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 9859, 1731, 1669, 501, 14091, 1454, 1731, 89903, 14091, 1731, 22655, 2092, 340, 9859, 1731, 1665, 11564, 16527, 12945, 284, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcess_OnAnalyzeProjectUsecase_WhenNoProjectFound_ShouldReturnError(t *testing.T) { projectRepositoryMock := projectRepositoryMock{ project: entity.Project{}, getErr: repository.ErrProjectNoResults, } uc := usecase.NewAnalyzeProjectUsecase(projectRepositoryMock, nil, nil, nil, &entity.AnalysisConfig{}) projectID, _ := uuid.NewUUID() results, err := uc.Process(context.TODO(), projectID) assert.EqualError(t, err, usecase.ErrProjectNotFound.Error()) assert.Empty(t, results) }
explode_data.jsonl/64101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 7423, 35482, 2082, 55856, 7849, 52, 5024, 519, 62, 4498, 2753, 7849, 6650, 36578, 616, 5598, 1454, 1155, 353, 8840, 836, 8, 341, 72470, 4624, 11571, 1669, 2390, 4624, 11571, 515, 197, 72470, 25, 5387, 30944, 38837, 197, 1036...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeRegexRemapDotConfig(t *testing.T) { cdnName := tc.CDNName("mycdn") toToolName := "my-to" toURL := "my-to.example.net" fileName := "regex_remap_myds.config" dses := map[tc.DeliveryServiceName]CDNDS{ "myds": CDNDS{ OrgServerFQDN: "https://myorigin.example.net", // DS "origin_server_fqdn" is actually a URL including the scheme, the name is wrong. QStringIgnore: 0, CacheURL: "https://mycacheurl.net", RegexRemap: "myregexremap", }, } txt := MakeRegexRemapDotConfig(cdnName, toToolName, toURL, fileName, dses) if !strings.Contains(txt, string(cdnName)) { t.Errorf("expected: cdnName '" + string(cdnName) + "', actual: missing") } if !strings.Contains(txt, toToolName) { t.Errorf("expected: toToolName '" + toToolName + "', actual: missing") } if !strings.Contains(txt, toURL) { t.Errorf("expected: toURL '" + toURL + "', actual: missing") } if !strings.HasPrefix(strings.TrimSpace(txt), "#") { t.Errorf("expected: header comment, actual: missing") } if strings.Contains(txt, "mycacheurl") { t.Errorf("expected: regex remap to not contain cacheurl, actual: '%v'", txt) } if strings.Contains(txt, "myorigin") { t.Errorf("expected: regex remap to not contain org server fqdn, actual: '%v'", txt) } if !strings.Contains(txt, "myregexremap") { t.Errorf("expected: regex remap to contain regex remap, actual: '%v'", txt) } }
explode_data.jsonl/2045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 560 }
[ 2830, 3393, 8078, 32464, 6590, 391, 34207, 2648, 1155, 353, 8840, 836, 8, 341, 1444, 17395, 675, 1669, 17130, 727, 31264, 675, 445, 2408, 12254, 1138, 31709, 7740, 675, 1669, 330, 2408, 4686, 698, 31709, 3144, 1669, 330, 2408, 4686, 772...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSetSTSCreds(t *testing.T) { tests := []struct { descrip string role Role credentialsNil bool externalID string }{ { "sets the sts creds if the role arn is set", Role{ RoleArn: "this:arn", }, false, "", }, { "does not set the creds if role arn is not set", Role{}, true, "", }, { "does not set the creds if role arn is not set & external id is set", Role{ ExternalID: "thing", }, true, "", }, } for _, l := range tests { test := l t.Run(test.descrip, func(t *testing.T) { t.Parallel() conf := setSTSCreds(mock.Session, &aws.Config{}, test.role) if test.credentialsNil { if conf.Credentials != nil { t.Fail() } } else { if conf.Credentials == nil { t.Fail() } } }) } }
explode_data.jsonl/18782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 1649, 784, 3540, 53369, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 52912, 740, 79, 286, 914, 198, 197, 197, 5778, 1843, 15404, 198, 197, 197, 32353, 19064, 1807, 198, 197, 197, 20921, 915, 257, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFieldNotFoundIsSuccess(t *testing.T) { metrics := []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{}, time.Now()), } compares := &health.Compares{ Field: "time_idle", GT: addr(42.0), } result := compares.Check(metrics) require.True(t, result) }
explode_data.jsonl/65113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 1877, 10372, 3872, 7188, 1155, 353, 8840, 836, 8, 972, 2109, 13468, 1669, 3056, 15410, 76039, 1321, 16340, 1666, 197, 18185, 1314, 50463, 54310, 7805, 298, 197, 1, 16475, 4723, 298, 19567, 14032, 30953, 6257, 1871, 298, 19567,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { t.Parallel() cfg, fs := newTestCfg() writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) require.Len(t, s.RegularPages(), 1) p := s.RegularPages()[0] if p.Summary() != template.HTML( "<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>") { t.Fatalf("Got summary:\n%q", p.Summary()) } c := content(p) if c != "<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a href=\"#fn:1\">1</a></sup></p>\n\n<div class=\"footnotes\">\n\n<hr />\n\n<ol>\n<li id=\"fn:1\">Many people say so.\n <a class=\"footnote-return\" href=\"#fnref:1\"><sup>[return]</sup></a></li>\n</ol>\n</div>" { t.Fatalf("Got content:\n%q", c) } }
explode_data.jsonl/60604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 413 }
[ 2830, 3393, 2665, 2354, 91098, 2461, 68005, 4792, 28501, 288, 10691, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 50286, 11, 8619, 1669, 501, 2271, 42467, 2822, 24945, 3608, 1155, 11, 8619, 11, 26054, 22363, 445, 1796, 497, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExponentialBackoff(t *testing.T) { for _, tc := range []struct { retran time.Duration iteration uint jitter time.Duration want time.Duration }{ {retran: time.Millisecond, iteration: 0, jitter: -100 * time.Second, want: 0}, {retran: 50 * time.Millisecond, iteration: 1, want: 100 * time.Millisecond}, {retran: 100 * time.Millisecond, iteration: 2, want: 400 * time.Millisecond}, {retran: time.Second, iteration: 0, want: time.Second}, {retran: time.Second, iteration: 0, jitter: -400 * time.Millisecond, want: 600 * time.Millisecond}, {retran: time.Second, iteration: 1, want: 2 * time.Second}, {retran: time.Second, iteration: 2, want: 4 * time.Second}, {retran: time.Second, iteration: 3, want: 8 * time.Second}, {retran: time.Second, iteration: 6, want: 64 * time.Second}, {retran: time.Second, iteration: 7, want: 64 * time.Second}, {retran: time.Second, iteration: 10, want: 64 * time.Second}, } { t.Run(fmt.Sprintf("baseRetransmission=%s,jitter=%s,iteration=%d", tc.retran, tc.jitter, tc.iteration), func(t *testing.T) { c := NewClient(nil, 0, "", 0, 0, tc.retran, nil) // When used to add jitter to backoff, 1s is subtracted from random number // to map [0s, +2s] -> [-1s, +1s], so add 1s here to compensate for that. c.rand = rand.New(&randSourceStub{src: int64(time.Second + tc.jitter)}) if got := c.exponentialBackoff(tc.iteration); got != tc.want { t.Errorf("c.exponentialBackoff(%d) = %s, want: %s", tc.iteration, got, tc.want) } }) } }
explode_data.jsonl/20576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 608 }
[ 2830, 3393, 840, 59825, 3707, 1847, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 17200, 43369, 262, 882, 33795, 198, 197, 197, 36722, 2622, 198, 197, 12428, 3248, 262, 882, 33795, 198, 197, 50780, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServerHangup(t *testing.T) { // skip until flakiness will be fixed. t.SkipNow() srv := serve(func(conn conn) { _ = conn.Close() }) defer srv.Close() conn, err := DefaultDialer.Dial("ws://" + srv.Listener.Addr().String()) require.NoError(t, err) defer conn.Close() _, err = conn.Execute(context.Background(), gremlin.NewEvalRequest("g.V()")) assert.EqualError(t, err, ErrConnClosed.Error()) assert.Error(t, conn.ctx.Err()) }
explode_data.jsonl/9357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 5475, 57038, 454, 1155, 353, 8840, 836, 8, 341, 197, 322, 10706, 3080, 1320, 585, 1880, 686, 387, 8356, 624, 3244, 57776, 7039, 741, 1903, 10553, 1669, 8683, 18552, 20571, 4534, 8, 314, 716, 284, 4534, 10421, 368, 2751, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDiv(t *testing.T) { tests := []struct { name string money1 Money money2 Money expResult Money expPanic bool }{ { name: "Rational number with repeating decimals get truncated to 2 decimals", money1: Money{1000, "EUR"}, money2: Money{300, "EUR"}, expResult: Money{333, "EUR"}, }, { name: "Rational number with repeating decimals get truncated and rounded to 2 decimals", money1: Money{2000, "EUR"}, money2: Money{300, "EUR"}, expResult: Money{667, "EUR"}, }, { name: "Rational number with repeating decimals get truncated and rounded to 0 decimals", money1: Money{2000, "JPY"}, money2: Money{300, "JPY"}, expResult: Money{7, "JPY"}, }, { name: "Rational number requiring truncation rounded correctly", money1: Money{21252, "CAD"}, money2: MakeMoney("CAD", 24), expResult: Money{886, "CAD"}, }, { name: "Rational number requiring truncation rounded correctly, second test", money1: Money{22668, "USD"}, money2: MakeMoney("USD", 24), expResult: Money{945, "USD"}, }, { name: "Rational number not requiring truncation is divided correctly", money1: Money{90672, "CAD"}, money2: MakeMoney("CAD", 24), expResult: Money{3778, "CAD"}, }, // Testing for division by invalid numbers or amounts { name: "Should panic when dividing by zero", money1: Money{10, "CAD"}, money2: MakeMoney("CAD", 0), expResult: Money{0, "CAD"}, expPanic: true, }, } for _, test := range tests { // Used for catching panics from dividing by 0! if test.expPanic { defer func() { if r := recover(); r == nil { t.Errorf("%s: Division by zero, should have paniced!", test.name) } }() } got := test.money1.Div(test.money2) if !test.expPanic && !reflect.DeepEqual(test.expResult, got) { t.Errorf("%s: expected money amount to be %v, got %v", test.name, test.expResult, got) } } }
explode_data.jsonl/61371
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 869 }
[ 2830, 3393, 12509, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 2109, 2534, 16, 262, 17633, 198, 197, 2109, 2534, 17, 262, 17633, 198, 197, 48558, 2077, 17633, 198, 197, 48558, 47, 31270,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsConflictError(t *testing.T) { cases := []struct { name string err error expected bool }{ { name: "non-http error", err: errors.New("some error"), expected: false, }, { name: "http non-conflict error", err: HTTPStatusCodeError{ StatusCode: http.StatusForbidden, }, expected: false, }, { name: "http conflict error", err: HTTPStatusCodeError{ StatusCode: http.StatusConflict, }, expected: true, }, } for _, tc := range cases { if e, a := tc.expected, IsConflictError(tc.err); e != a { t.Errorf("%v: expected %v, got %v", tc.name, e, a) } } }
explode_data.jsonl/53557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 3872, 57974, 1454, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 9859, 414, 1465, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 257, 330, 6280, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMemory_bootstrap(t *testing.T) { m, err := NewWithDefault() require.NoError(t, err) upca, err := upca.NewWithDefault("../upstreamca-memory/pkg/_test_data/keys/private_key.pem", "../upstreamca-memory/pkg/_test_data/keys/cert.pem") require.NoError(t, err) generateCsrResp, err := m.GenerateCsr(&ca.GenerateCsrRequest{}) require.NoError(t, err) submitCSRResp, err := upca.SubmitCSR(&upstreamca.SubmitCSRRequest{Csr: generateCsrResp.Csr}) require.NoError(t, err) _, err = m.LoadCertificate(&ca.LoadCertificateRequest{SignedIntermediateCert: submitCSRResp.Cert}) require.NoError(t, err) fetchCertificateResp, err := m.FetchCertificate(&ca.FetchCertificateRequest{}) require.NoError(t, err) assert.Equal(t, submitCSRResp.Cert, fetchCertificateResp.StoredIntermediateCert) wcsr := createWorkloadCSR(t, "spiffe://localhost") wcert, err := m.SignCsr(&ca.SignCsrRequest{Csr: wcsr}) require.NoError(t, err) assert.NotEmpty(t, wcert) }
explode_data.jsonl/73858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 10642, 88424, 1155, 353, 8840, 836, 8, 341, 2109, 11, 1848, 1669, 1532, 2354, 3675, 741, 17957, 35699, 1155, 11, 1848, 692, 59810, 924, 11, 1848, 1669, 705, 924, 7121, 2354, 3675, 17409, 454, 4027, 924, 64096, 22523, 19632, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateV4WithSignature(t *testing.T) { credentials := ec2tokens.AuthOptions{ Access: "a7f1e798b7c2417cba4a02de97dc3cdc", BodyHash: new(string), Path: "/", Signature: "f5cd6995be98e5576a130b30cca277375f10439217ea82169aa8386e83965611", Verb: "GET", Headers: map[string]string{ "Foo": "Bar", "Host": "localhost", "Authorization": "AWS4-HMAC-SHA256 Credential=a7f1e798b7c2417cba4a02de97dc3cdc/00010101/region1/ec2/aws4_request, SignedHeaders=, Signature=f5cd6995be98e5576a130b30cca277375f10439217ea82169aa8386e83965611", "X-Amz-Date": "00010101T000000Z", }, Params: map[string]string{ "Action": "Test", }, } authTokenPost(t, credentials, `{ "credentials": { "access": "a7f1e798b7c2417cba4a02de97dc3cdc", "body_hash": "", "host": "", "headers": { "Foo": "Bar", "Host": "localhost", "Authorization": "AWS4-HMAC-SHA256 Credential=a7f1e798b7c2417cba4a02de97dc3cdc/00010101/region1/ec2/aws4_request, SignedHeaders=, Signature=f5cd6995be98e5576a130b30cca277375f10439217ea82169aa8386e83965611", "X-Amz-Date": "00010101T000000Z" }, "params": { "Action": "Test" }, "path": "/", "signature": "f5cd6995be98e5576a130b30cca277375f10439217ea82169aa8386e83965611", "verb": "GET" } }`) }
explode_data.jsonl/68583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 754 }
[ 2830, 3393, 4021, 53, 19, 2354, 25088, 1155, 353, 8840, 836, 8, 341, 197, 32353, 1669, 11942, 17, 30566, 25233, 3798, 515, 197, 197, 6054, 25, 262, 330, 64, 22, 69, 16, 68, 22, 24, 23, 65, 22, 66, 17, 19, 16, 22, 93829, 19, 64...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGrouping(t *testing.T) { const robotsCaseGrouping = `user-agent: a user-agent: b disallow: /a disallow: /b user-agent: ignore Disallow: /separator user-agent: b user-agent: c disallow: /b disallow: /c` r, err := FromString(robotsCaseGrouping) require.NoError(t, err) expectAccess(t, r, false, "/a", "a") expectAccess(t, r, false, "/b", "a") expectAccess(t, r, true, "/c", "a") expectAccess(t, r, false, "/a", "b") expectAccess(t, r, false, "/b", "b") expectAccess(t, r, false, "/c", "b") expectAccess(t, r, true, "/a", "c") expectAccess(t, r, false, "/b", "c") expectAccess(t, r, false, "/c", "c") }
explode_data.jsonl/51684
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 2808, 287, 1155, 353, 8840, 836, 8, 341, 4777, 28707, 4207, 2808, 287, 284, 1565, 872, 41935, 25, 264, 198, 872, 41935, 25, 293, 198, 4243, 7183, 25, 608, 64, 198, 4243, 7183, 25, 608, 65, 271, 872, 41935, 25, 10034, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateLogMasks(t *testing.T) { for name, tc := range map[string]struct { masks string expErr error }{ "empty": {}, "single level; no prefix": { masks: "DEBUG", }, "single level; no prefix; unknown level": { masks: "WARNING", expErr: errors.New("unknown log level"), }, "single assignment": { masks: "mgmt=DEBUG", }, "single level; single assignment": { masks: "ERR,mgmt=DEBUG", }, "single level; single assignment; mixed caae": { masks: "err,mgmt=debuG", }, "single level; single assignment; with space": { masks: "ERR, mgmt=DEBUG", expErr: errors.New("illegal characters"), }, "single level; single assignment; bad level": { masks: "ERR,mgmt=DEG", expErr: errors.New("unknown log level"), }, "single assignment; single level": { masks: "mgmt=DEBUG,ERR", expErr: errors.New("want PREFIX=LEVEL"), }, "multiple assignments": { masks: "mgmt=DEBUG,bio=ERR", }, "multiple assignments; bad format": { masks: "mgmt=DEBUG,bio=ERR=", expErr: errors.New("want PREFIX=LEVEL"), }, "multiple assignments; bad chars": { masks: "mgmt=DEBUG,bio!=ERR", expErr: errors.New("illegal characters"), }, "too long": { masks: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", expErr: errors.New("exceeds maximum length (1024>1023)"), }, } { t.Run(name, func(t *testing.T) { gotErr := ValidateLogMasks(tc.masks) common.CmpErr(t, tc.expErr, gotErr) }) } }
explode_data.jsonl/7244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 881 }
[ 2830, 3393, 17926, 2201, 44, 4604, 1155, 353, 8840, 836, 8, 341, 2023, 829, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 2109, 4604, 220, 914, 198, 197, 48558, 7747, 1465, 198, 197, 59403, 197, 197, 1, 3194, 788, 14573, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientCoordinatorWithoutConsumerOffsetsTopic(t *testing.T) { seedBroker := NewMockBroker(t, 1) coordinator := NewMockBroker(t, 2) metadataResponse1 := new(MetadataResponse) seedBroker.Returns(metadataResponse1) config := NewTestConfig() config.Metadata.Retry.Max = 1 config.Metadata.Retry.Backoff = 0 client, err := NewClient([]string{seedBroker.Addr()}, config) if err != nil { t.Fatal(err) } coordinatorResponse1 := new(ConsumerMetadataResponse) coordinatorResponse1.Err = ErrConsumerCoordinatorNotAvailable seedBroker.Returns(coordinatorResponse1) metadataResponse2 := new(MetadataResponse) metadataResponse2.AddTopic("__consumer_offsets", ErrUnknownTopicOrPartition) seedBroker.Returns(metadataResponse2) replicas := []int32{coordinator.BrokerID()} metadataResponse3 := new(MetadataResponse) metadataResponse3.AddTopicPartition("__consumer_offsets", 0, replicas[0], replicas, replicas, []int32{}, ErrNoError) seedBroker.Returns(metadataResponse3) coordinatorResponse2 := new(ConsumerMetadataResponse) coordinatorResponse2.CoordinatorID = coordinator.BrokerID() coordinatorResponse2.CoordinatorHost = "127.0.0.1" coordinatorResponse2.CoordinatorPort = coordinator.Port() seedBroker.Returns(coordinatorResponse2) broker, err := client.Coordinator("my_group") if err != nil { t.Error(err) } if coordinator.Addr() != broker.Addr() { t.Errorf("Expected coordinator to have address %s, found %s", coordinator.Addr(), broker.Addr()) } if coordinator.BrokerID() != broker.ID() { t.Errorf("Expected coordinator to have ID %d, found %d", coordinator.BrokerID(), broker.ID()) } coordinator.Close() seedBroker.Close() safeClose(t, client) }
explode_data.jsonl/54413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 563 }
[ 2830, 3393, 2959, 64304, 26040, 29968, 81095, 26406, 1155, 353, 8840, 836, 8, 341, 197, 22602, 65545, 1669, 1532, 11571, 65545, 1155, 11, 220, 16, 340, 197, 1015, 17442, 1669, 1532, 11571, 65545, 1155, 11, 220, 17, 692, 2109, 7603, 2582...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCompareConfigValue(t *testing.T) { // Normal equality require.True(t, comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }.equals(comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }), "Should have found identical config values to be identical") // Different Mod Policy require.False(t, comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }.equals(comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "bar", Value: []byte("bar"), }, }), "Should have detected different mod policy") // Different Value require.False(t, comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }.equals(comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("foo"), }, }), "Should have detected different value") // Different Version require.False(t, comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }.equals(comparable{ ConfigValue: &cb.ConfigValue{ Version: 1, ModPolicy: "foo", Value: []byte("bar"), }, }), "Should have detected different version") // One nil value require.False(t, comparable{ ConfigValue: &cb.ConfigValue{ Version: 0, ModPolicy: "foo", Value: []byte("bar"), }, }.equals(comparable{}), "Should have detected nil other value") }
explode_data.jsonl/40375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 655 }
[ 2830, 3393, 27374, 2648, 1130, 1155, 353, 8840, 836, 8, 341, 197, 322, 18437, 21777, 198, 17957, 32443, 1155, 11, 29039, 515, 197, 66156, 1130, 25, 609, 7221, 10753, 1130, 515, 298, 77847, 25, 256, 220, 15, 345, 298, 197, 4459, 13825,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateGroupers(t *testing.T) { t.Parallel() // No imports statement. testValidate(t, grouperCombined{}, vopts{}, "") testValidate(t, grouperGoimports{}, vopts{}, "") testValidate(t, grouperLocalMiddle{}, vopts{}, "") testValidate(t, grouperWeird{}, vopts{}, "") // Just one import. imports := `import "os"` testValidate(t, grouperCombined{}, vopts{}, imports) testValidate(t, grouperGoimports{}, vopts{}, imports) testValidate(t, grouperLocalMiddle{}, vopts{}, imports) testValidate(t, grouperWeird{}, vopts{}, imports) // Multiple imports in same group, ordered ok. imports = `import ( "os" "strings" "testing" )` testValidate(t, grouperCombined{}, vopts{}, imports) testValidate(t, grouperGoimports{}, vopts{}, imports) testValidate(t, grouperLocalMiddle{}, vopts{}, imports) testValidate(t, grouperWeird{}, vopts{}, imports) // Multiple imports in same group, ordered poorly. imports = `import ( "strings" "os" )` testValidate(t, grouperCombined{}, vopts{verrstr: errstrStatementOrder}, imports) testValidate(t, grouperGoimports{}, vopts{verrstr: errstrStatementOrder}, imports) testValidate(t, grouperLocalMiddle{}, vopts{verrstr: errstrStatementOrder}, imports) testValidate(t, grouperWeird{}, vopts{verrstr: errstrStatementOrder}, imports) // Imports grouped together. imports = `import ( "github.com/Sirupsen/logrus" "os" )` testValidate(t, grouperCombined{}, vopts{}, imports) testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports) testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports) testValidate(t, grouperWeird{}, vopts{invalid: true}, imports) // Std/other separated. imports = `import ( "os" "github.com/Sirupsen/logrus" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{}, imports) testValidate(t, grouperLocalMiddle{}, vopts{}, imports) testValidate(t, grouperWeird{}, vopts{}, imports) // Std/other separated but backwards. imports = `import ( "github.com/Sirupsen/logrus" "os" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports) testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports) testValidate(t, grouperWeird{}, vopts{invalid: true}, imports) // Std/other/local. imports = `import ( "os" "github.com/Sirupsen/logrus" "local/foo" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{}, imports) testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports) testValidate(t, grouperWeird{}, vopts{invalid: true}, imports) // Std/other/appengine/local. imports = `import ( "os" "testing" "github.com/Sirupsen/logrus" "appengine" "local/foo" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{}, imports) testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports) testValidate(t, grouperWeird{}, vopts{invalid: true}, imports) // Local in the middle. imports = `import ( "os" "strings" "local/bar" "local/foo" "github.com/Sirupsen/logrus" "gopkg.in/redis.v3" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports) testValidate(t, grouperLocalMiddle{}, vopts{}, imports) testValidate(t, grouperWeird{}, vopts{invalid: true}, imports) // Weird ordering, just to prove we can. imports = `import ( "strings" "go/parser" "gopkg.in/redis.v3" "local/pkg" "github.com/Sirupsen/logrus" "local/foo/bar" )` testValidate(t, grouperCombined{}, vopts{invalid: true}, imports) testValidate(t, grouperGoimports{}, vopts{invalid: true}, imports) testValidate(t, grouperLocalMiddle{}, vopts{invalid: true}, imports) testValidate(t, grouperWeird{}, vopts{}, imports) }
explode_data.jsonl/507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1584 }
[ 2830, 3393, 17926, 2808, 388, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 2308, 15202, 5114, 624, 18185, 17926, 1155, 11, 93768, 712, 94268, 22655, 348, 10518, 22655, 14676, 18185, 17926, 1155, 11, 93768, 712, 10850, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBalances(t *testing.T) { to, path, err := createBalances() assert.NoError(t, err, "createBalances() failed") defer func() { to.stor.close(t) err = common.CleanTemporaryDirs(path) assert.NoError(t, err, "failed to clean test data dirs") }() to.stor.addBlock(t, blockID0) to.stor.addBlock(t, blockID1) wavesTests := []struct { addr string profile balanceProfile blockID proto.BlockID }{ {addr0, balanceProfile{100, 0, 0}, blockID0}, {addr1, balanceProfile{2500, 0, 0}, blockID0}, {addr2, balanceProfile{10, 5, 0}, blockID1}, {addr3, balanceProfile{10, 5, 3}, blockID1}, } for _, tc := range wavesTests { addr, err := proto.NewAddressFromString(tc.addr) assert.NoError(t, err, "NewAddressFromString() failed") if err := to.balances.setWavesBalance(addr.ID(), newWavesValueFromProfile(tc.profile), tc.blockID); err != nil { t.Fatalf("Faied to set waves balance:%v\n", err) } to.stor.flush(t) profile, err := to.balances.wavesBalance(addr.ID(), true) if err != nil { t.Fatalf("Failed to retrieve waves balance: %v\n", err) } if *profile != tc.profile { t.Errorf("Waves balance profiles are not equal: %v and %v\n", profile, tc.profile) } } assetTests := []struct { addr string assetID crypto.Digest balance uint64 blockID proto.BlockID }{ {addr0, genAsset(1), 100, blockID0}, {addr0, genAsset(1), 2500, blockID0}, {addr0, genAsset(1), 10, blockID1}, } for _, tc := range assetTests { addr, err := proto.NewAddressFromString(tc.addr) assert.NoError(t, err, "NewAddressFromString() failed") addTailInfoToAssetsState(to.stor.entities.assets, tc.assetID) if err := to.balances.setAssetBalance(addr.ID(), proto.AssetIDFromDigest(tc.assetID), tc.balance, tc.blockID); err != nil { t.Fatalf("Faied to set asset balance: %v\n", err) } to.stor.flush(t) balance, err := to.balances.assetBalance(addr.ID(), proto.AssetIDFromDigest(tc.assetID), true) if err != nil { t.Fatalf("Failed to retrieve asset balance: %v\n", err) } if balance != tc.balance { t.Errorf("Asset balances are not equal: %d and %d\n", balance, tc.balance) } } }
explode_data.jsonl/37805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 870 }
[ 2830, 3393, 37889, 3020, 1155, 353, 8840, 836, 8, 341, 31709, 11, 1815, 11, 1848, 1669, 1855, 37889, 3020, 741, 6948, 35699, 1155, 11, 1848, 11, 330, 3182, 37889, 3020, 368, 4641, 5130, 16867, 2915, 368, 341, 197, 31709, 1236, 269, 46...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsCountry(t *testing.T) { tests := []struct { reg string country bool }{ {"US", true}, {"001", false}, {"958", false}, {"419", false}, {"203", true}, {"020", true}, {"900", false}, {"999", false}, {"QO", false}, {"EU", false}, {"AA", false}, {"XK", true}, } for i, tt := range tests { r, _ := ParseRegion(tt.reg) if r.IsCountry() != tt.country { t.Errorf("%d: IsCountry(%s) was %v; want %v", i, tt.reg, r.IsCountry(), tt.country) } } }
explode_data.jsonl/15839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 3872, 16408, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 37013, 257, 914, 198, 197, 1444, 4976, 1807, 198, 197, 59403, 197, 197, 4913, 2034, 497, 830, 1583, 197, 197, 4913, 15, 15, 16, 497, 895, 1583...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_PrivateEndpointConnection_Status_SubResourceEmbedded_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of PrivateEndpointConnection_Status_SubResourceEmbedded via JSON returns original", prop.ForAll(RunJSONSerializationTestForPrivateEndpointConnectionStatusSubResourceEmbedded, PrivateEndpointConnectionStatusSubResourceEmbeddedGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/44559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 43830, 27380, 4526, 36449, 36359, 4783, 83466, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFullFlow(t *testing.T) { setup.CleanupPgContainer(database.Gorm) err := database.AddSubscriptions([]models.Subscription{{Coin: 60, Address: "testAddress"}}, context.Background()) assert.Nil(t, err) ctx, cancel := context.WithCancel(context.Background()) stopChan := make(chan struct{}, 1) params := setupParserFull(stopChan) params.Database = database params.Ctx = ctx params.Queue = mq.RawTransactions go parser.RunParser(params) time.Sleep(time.Second * 2) go mq.RunConsumerForChannelWithCancelAndDbConn(notifier.RunNotifier, rawTransactionsChannel, database, ctx) time.Sleep(time.Second * 5) for i := 0; i < 11; i++ { x := transactionsChannel.GetMessage() ConsumerToTestTransactionsFull(x, t, cancel, i) } <-stopChan }
explode_data.jsonl/75673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 9432, 18878, 1155, 353, 8840, 836, 8, 341, 84571, 727, 60639, 82540, 4502, 41649, 1224, 493, 340, 9859, 1669, 4625, 1904, 3136, 29966, 10556, 6507, 12391, 12124, 2979, 41180, 25, 220, 21, 15, 11, 9177, 25, 330, 1944, 4286, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBytesToTime(t *testing.T) { for _, tt := range timeBytesTests { output := bytesToTime(tt.b) expected, err := time.Parse(time.RFC3339, tt.rfc) if err != nil { t.Fatalf("Error parsing expected date: %v", err) } if !expected.Equal(output) { t.Errorf("bytesToTime(%d) expected output %v, actual %v", tt.b, expected, output) } } }
explode_data.jsonl/71544
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 7078, 1249, 1462, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 882, 7078, 18200, 341, 197, 21170, 1669, 5820, 1249, 1462, 47152, 948, 340, 197, 42400, 11, 1848, 1669, 882, 8937, 9730, 2013, 6754, 18, 18, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBinary(t *testing.T) { require := require.New(t) // Validate Binary is reflexive e := NewBinary(NewGetField(0, sql.Text, "foo", true)) require.Equal(eval(t, e, sql.NewRow("hi")), eval(t, e, sql.NewRow("hi"))) // Go through assorted test cases testCases := []struct { val interface{} valType sql.Type expected string }{ {"hi", sql.MustCreateBinary(query.Type_VARBINARY, int64(16)), "hi"}, {int8(1), sql.Int8, "1"}, {true, sql.Boolean, "true"}, {"hello", sql.LongText, "hello"}, } for _, tt := range testCases { f := NewBinary(NewLiteral(tt.val, tt.valType)) require.Equal(tt.expected, eval(t, f, sql.Row{nil})) } // Try with nil case e = NewBinary(NewLiteral(nil, sql.Null)) require.Equal(nil, eval(t, e, sql.Row{nil})) }
explode_data.jsonl/49647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 328 }
[ 2830, 3393, 21338, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 197, 322, 23282, 17718, 374, 32666, 533, 198, 7727, 1669, 1532, 21338, 35063, 1949, 1877, 7, 15, 11, 5704, 1979, 11, 330, 7975, 497, 830, 1171, 17957...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUintSetNumericString(t *testing.T) { v := "56" var expected uint64 = 56 tu := Uint{} err := tu.Set(v) if err != nil { t.Errorf("Not Expected error. error:%v", err.Error()) } if tu.Weak() != expected { t.Errorf("This value should return nil. error:%#v", tu.Weak()) } }
explode_data.jsonl/13393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 21570, 1649, 36296, 703, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 330, 20, 21, 698, 2405, 3601, 2622, 21, 19, 284, 220, 20, 21, 198, 3244, 84, 1669, 27883, 16094, 9859, 1669, 9765, 4202, 3747, 340, 743, 1848, 961, 2092,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFromEntries(t *testing.T) { is := assert.New(t) r1 := FromEntries[string, int]([]Entry[string, int]{ { Key: "foo", Value: 1, }, { Key: "bar", Value: 2, }, }) is.Len(r1, 2) is.Equal(r1["foo"], 1) is.Equal(r1["bar"], 2) }
explode_data.jsonl/52773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 3830, 24533, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 2060, 7121, 1155, 692, 7000, 16, 1669, 5542, 24533, 14032, 11, 526, 9533, 1294, 5874, 14032, 11, 526, 60, 515, 197, 197, 515, 298, 55242, 25, 256, 330, 7975, 756, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddHTMLEntry(t *testing.T) { var modtime = time.Now() var d = NewDirectory("z", GetTemplate(t)) d.AddHTMLEntry("", true, 0, modtime) d.AddHTMLEntry("dir", true, 0, modtime) d.AddHTMLEntry("a/b/c/d.txt", false, 64, modtime) d.AddHTMLEntry("a/b/c/colon:colon.txt", false, 64, modtime) d.AddHTMLEntry("\"quotes\".txt", false, 64, modtime) assert.Equal(t, []DirEntry{ {remote: "", URL: "/", Leaf: "/", IsDir: true, Size: 0, ModTime: modtime}, {remote: "dir", URL: "dir/", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime}, {remote: "a/b/c/d.txt", URL: "d.txt", Leaf: "d.txt", IsDir: false, Size: 64, ModTime: modtime}, {remote: "a/b/c/colon:colon.txt", URL: "./colon:colon.txt", Leaf: "colon:colon.txt", IsDir: false, Size: 64, ModTime: modtime}, {remote: "\"quotes\".txt", URL: "%22quotes%22.txt", Leaf: "\"quotes\".txt", Size: 64, IsDir: false, ModTime: modtime}, }, d.Entries) // Now test with a query parameter d = NewDirectory("z", GetTemplate(t)).SetQuery(url.Values{"potato": []string{"42"}}) d.AddHTMLEntry("file", false, 64, modtime) d.AddHTMLEntry("dir", true, 0, modtime) assert.Equal(t, []DirEntry{ {remote: "file", URL: "file?potato=42", Leaf: "file", IsDir: false, Size: 64, ModTime: modtime}, {remote: "dir", URL: "dir/?potato=42", Leaf: "dir/", IsDir: true, Size: 0, ModTime: modtime}, }, d.Entries) }
explode_data.jsonl/964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 590 }
[ 2830, 3393, 2212, 2545, 25045, 77, 1539, 1155, 353, 8840, 836, 8, 341, 2405, 1463, 1678, 284, 882, 13244, 741, 2405, 294, 284, 1532, 9310, 445, 89, 497, 2126, 7275, 1155, 1171, 2698, 1904, 2545, 25045, 77, 1539, 19814, 830, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArchFirstClass(t *testing.T) { cases := []struct { name string want bool }{ { name: "linux-amd64-longtest", want: true, }, { name: "linux-buzz-longtest", want: false, }, { name: "linux-amd64", want: true, }, { name: "linux", want: false, }, } for _, c := range cases { a := &arch{Name: c.name} if a.FirstClass() != c.want { t.Errorf("%+v.FirstClass() = %v, wanted %v", a, a.FirstClass(), c.want) } } }
explode_data.jsonl/45362
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 18727, 5338, 1957, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 14210, 32217, 67, 21, 19, 23791, 1944, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCanAccessNested(t *testing.T) { for _, v := range []struct { name string roleKey string claims map[string]interface{} requirements []string expected bool }{ { name: "simple_success", roleKey: "role", claims: map[string]interface{}{"role": []interface{}{"a", "b"}}, requirements: []string{"a"}, expected: true, }, { name: "simple_sfail", roleKey: "role", claims: map[string]interface{}{"role": []interface{}{"c", "b"}}, requirements: []string{"a"}, expected: false, }, { name: "multiple_success", roleKey: "role", claims: map[string]interface{}{"role": []interface{}{"c"}}, requirements: []string{"a", "b", "c"}, expected: true, }, { name: "struct_success", roleKey: "data.role", claims: map[string]interface{}{"data": map[string]interface{}{"role": []interface{}{"c"}}}, requirements: []string{"a", "b", "c"}, expected: true, }, { name: "complex_struct_success", roleKey: "data.data.data.data.data.data.data.role", claims: map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "role": []interface{}{"c"}, }, }, }, }, }, }, }, }, requirements: []string{"a", "b", "c"}, expected: true, }, } { t.Run(v.name, func(t *testing.T) { if res := CanAccessNested(v.roleKey, v.claims, v.requirements); res != v.expected { t.Errorf("'%s' have %v, want %v", v.name, res, v.expected) } }) } }
explode_data.jsonl/67488
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 917 }
[ 2830, 3393, 6713, 6054, 71986, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 348, 1669, 2088, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 197, 5778, 1592, 414, 914, 198, 197, 197, 48561, 981, 2415, 14032, 31344, 16094, 197, 17957, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckAuthorization_UntrustedIssuer(t *testing.T) { // Perform exactly the same call as TestCheckAuthorization() except remove trust of the visa issuer string auth := setupAuthorizationTest(t) delete(auth.cfg.TrustedIssuers, "test") delete(auth.cfg.TrustedIssuers, "testBroker") id, err := auth.dam.populateIdentityVisas(auth.ctx, auth.id, auth.cfg) if err != nil { t.Fatalf("unable to obtain passport identity: %v", err) } err = checkAuthorization(auth.ctx, id, auth.ttl, auth.resource, auth.view, auth.role, auth.cfg, test.TestClientID, auth.dam.ValidateCfgOpts(storage.DefaultRealm, nil)) if status.Code(err) != codes.PermissionDenied { t.Errorf("checkAuthorization(ctx, id, %v, %q, %q, %q, cfg, %q) failed, expected %d, got: %v", auth.ttl, auth.resource, auth.view, auth.role, test.TestClientID, codes.PermissionDenied, err) } if errutil.ErrorReason(err) != errUntrustedIssuer { t.Errorf("errutil.ErrorReason() = %s want %s", errutil.ErrorReason(err), errUntrustedIssuer) } }
explode_data.jsonl/18482
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 3973, 18124, 40687, 83837, 98902, 1155, 353, 8840, 836, 8, 341, 197, 322, 25001, 6896, 279, 1852, 1618, 438, 3393, 3973, 18124, 368, 3650, 4057, 6950, 315, 279, 26655, 54835, 914, 198, 78011, 1669, 6505, 18124, 2271, 1155, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteVolumePublication(t *testing.T) { mockCtrl := gomock.NewController(t) // Create a mocked persistent store client mockStoreClient := mockpersistentstore.NewMockStoreClient(mockCtrl) // Set the store client behavior we don't care about for this testcase mockStoreClient.EXPECT().GetVolumeTransactions(gomock.Any()).Return([]*storage.VolumeTransaction{}, nil).AnyTimes() // Create a fake VolumePublication fakePub := &utils.VolumePublication{ Name: "foo/bar", NodeName: "bar", VolumeName: "foo", ReadOnly: true, AccessMode: 1, } fakePub2 := &utils.VolumePublication{ Name: "baz/biz", NodeName: "biz", VolumeName: "baz", ReadOnly: true, AccessMode: 1, } fakePub3 := &utils.VolumePublication{ Name: fmt.Sprintf("%s/buz", fakePub.VolumeName), NodeName: "buz", VolumeName: fakePub.VolumeName, ReadOnly: true, AccessMode: 1, } // Create an instance of the orchestrator for this test orchestrator := getOrchestrator(t) // Add the mocked objects to the orchestrator orchestrator.storeClient = mockStoreClient // Populate volume publications orchestrator.addVolumePublicationToCache(fakePub) orchestrator.addVolumePublicationToCache(fakePub2) orchestrator.addVolumePublicationToCache(fakePub3) // Verify if this is the last nodeID for a given volume the volume entry is completely removed from the cache mockStoreClient.EXPECT().DeleteVolumePublication(gomock.Any(), fakePub2).Return(nil) err := orchestrator.DeleteVolumePublication(context.Background(), fakePub2.VolumeName, fakePub2.NodeName) assert.Nilf(t, err, fmt.Sprintf("unexpected error deleting volume publication: %v", err)) assert.NotContains(t, orchestrator.volumePublications, fakePub2.VolumeName, "publication not properly removed from cache") // Verify if this is not the last nodeID for a given volume the volume entry is not removed from the cache mockStoreClient.EXPECT().DeleteVolumePublication(gomock.Any(), fakePub3).Return(nil) err = orchestrator.DeleteVolumePublication(context.Background(), fakePub3.VolumeName, fakePub3.NodeName) assert.Nilf(t, err, fmt.Sprintf("unexpected error deleting volume publication: %v", err)) assert.NotNil(t, orchestrator.volumePublications[fakePub3.VolumeName], "publication not properly removed from cache") assert.NotContains(t, orchestrator.volumePublications[fakePub3.VolumeName], fakePub3.NodeName, "publication not properly removed from cache") assert.Contains(t, orchestrator.volumePublications[fakePub.VolumeName], fakePub.NodeName, "publication not properly removed from cache") }
explode_data.jsonl/62753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 841 }
[ 2830, 3393, 6435, 18902, 72390, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 197, 322, 4230, 264, 46149, 24999, 3553, 2943, 198, 77333, 6093, 2959, 1669, 7860, 69389, 4314, 7121, 11571, 6093, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { doc, _ := New(Invoice, &Options{ TextTypeInvoice: "Faktura numer 1/01/2021", TextRefTitle: "Data wystawienia", TextDateTitle: "Data sprzedaży", TextVersionTitle: "Data zapłaty", AutoPrint: true, CurrencySymbol: "zł ", TextItemsQuantityTitle: "ilość", TextItemsUnitCostTitle: "Cena jedn. netto", TextItemsTotalHTTitle: "Wartość netto", TextItemsTaxTitle: "Stawka VAT", TextItemsDiscountTitle: "Wartość VAT", TextItemsTotalTTCTitle: "Wartość brutto", TextItemsNameTitle: "Nazwa", DisplayDiscount: false, TextTotalTotal: "Suma netto", TextTotalTax: "Suma VAT", TextTotalWithTax: "Suma Brutto", TextPaymentTermTitle: "Termin płatności", }) doc.SetRef("01/02/2021") doc.SetVersion("02/03/2021") doc.SetDate("02/03/2021") doc.SetPaymentTerm("02/04/2021") doc.SetCompany(&Contact{ Name: "Test Company", Address: &Address{ Address: "89 Rue de Brest", Address2: "Appartement 2", PostalCode: "75000", City: "Paris", Country: "France", }, }) doc.SetCustomer(&Contact{ Name: "Test Customer", Address: &Address{ Address: "89 Rue de Paris", PostalCode: "29200", City: "Brest", Country: "France", }, }) doc.AppendItem(&Item{ Name: "Test", UnitCost: "10000", Quantity: "1", }) doc.SetDefaultTax(&Tax{ Percent: "23", }) pdf, err := doc.Build() if err != nil { t.Errorf(err.Error()) } err = pdf.OutputFileAndClose("out.pdf") if err != nil { t.Errorf(err.Error()) } }
explode_data.jsonl/18540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 694 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 59536, 11, 716, 1669, 1532, 7, 34674, 11, 609, 3798, 515, 197, 49635, 929, 34674, 25, 330, 37, 9913, 5690, 7857, 220, 16, 14, 15, 16, 14, 17, 15, 17, 16, 756, 197, 49635, 3945, 385...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDeleteOutdatedMirrorPod(t *testing.T) { testKubelet := newTestKubelet(t) testKubelet.fakeCadvisor.On("Start").Return(nil) testKubelet.fakeCadvisor.On("MachineInfo").Return(&cadvisorapi.MachineInfo{}, nil) testKubelet.fakeCadvisor.On("DockerImagesFsInfo").Return(cadvisorapiv2.FsInfo{}, nil) testKubelet.fakeCadvisor.On("RootFsInfo").Return(cadvisorapiv2.FsInfo{}, nil) kl := testKubelet.kubelet manager := testKubelet.fakeMirrorClient pod := &api.Pod{ ObjectMeta: api.ObjectMeta{ UID: "12345678", Name: "foo", Namespace: "ns", Annotations: map[string]string{ kubetypes.ConfigSourceAnnotationKey: "file", }, }, Spec: api.PodSpec{ Containers: []api.Container{ {Name: "1234", Image: "foo"}, }, }, } // Mirror pod has an outdated spec. mirrorPod := &api.Pod{ ObjectMeta: api.ObjectMeta{ UID: "11111111", Name: "foo", Namespace: "ns", Annotations: map[string]string{ kubetypes.ConfigSourceAnnotationKey: "api", kubetypes.ConfigMirrorAnnotationKey: "mirror", }, }, Spec: api.PodSpec{ Containers: []api.Container{ {Name: "1234", Image: "bar"}, }, }, } pods := []*api.Pod{pod, mirrorPod} kl.podManager.SetPods(pods) err := kl.syncPod(pod, mirrorPod, &container.PodStatus{}, kubetypes.SyncPodUpdate) if err != nil { t.Errorf("unexpected error: %v", err) } name := kubecontainer.GetPodFullName(pod) creates, deletes := manager.GetCounts(name) if creates != 1 || deletes != 1 { t.Errorf("expected 1 creation and 1 deletion of %q, got %d, %d", name, creates, deletes) } }
explode_data.jsonl/43338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 682 }
[ 2830, 3393, 6435, 2662, 3577, 54216, 23527, 1155, 353, 8840, 836, 8, 341, 18185, 42, 3760, 1149, 1669, 501, 2271, 42, 3760, 1149, 1155, 340, 18185, 42, 3760, 1149, 94624, 34, 81794, 8071, 445, 3479, 1827, 5598, 27907, 340, 18185, 42, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAbsCollection_Avg(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 2, 3}) mode, err := intColl.Avg().ToFloat64() if err != nil { t.Fatal(err.Error()) } if mode != 2.0 { t.Fatal("Avg error") } }
explode_data.jsonl/66451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 27778, 6482, 1566, 7239, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 17, 11, 220, 18, 3518, 60247, 11, 1848, 1669, 526, 15265, 875, 7239, 1005, 1249, 5442,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIPMatchModel(t *testing.T) { e, _ := NewEnforcer("examples/ipmatch_model.conf", "examples/ipmatch_policy.csv") testEnforce(t, e, "192.168.2.123", "data1", "read", true) testEnforce(t, e, "192.168.2.123", "data1", "write", false) testEnforce(t, e, "192.168.2.123", "data2", "read", false) testEnforce(t, e, "192.168.2.123", "data2", "write", false) testEnforce(t, e, "192.168.0.123", "data1", "read", false) testEnforce(t, e, "192.168.0.123", "data1", "write", false) testEnforce(t, e, "192.168.0.123", "data2", "read", false) testEnforce(t, e, "192.168.0.123", "data2", "write", false) testEnforce(t, e, "10.0.0.5", "data1", "read", false) testEnforce(t, e, "10.0.0.5", "data1", "write", false) testEnforce(t, e, "10.0.0.5", "data2", "read", false) testEnforce(t, e, "10.0.0.5", "data2", "write", true) testEnforce(t, e, "192.168.0.1", "data1", "read", false) testEnforce(t, e, "192.168.0.1", "data1", "write", false) testEnforce(t, e, "192.168.0.1", "data2", "read", false) testEnforce(t, e, "192.168.0.1", "data2", "write", false) }
explode_data.jsonl/57135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 494 }
[ 2830, 3393, 3298, 8331, 1712, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 1532, 1702, 82010, 445, 51668, 54919, 6347, 5047, 13937, 497, 330, 51668, 54919, 6347, 22773, 11219, 5130, 18185, 1702, 8833, 1155, 11, 384, 11, 330, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplicaSetController(t *testing.T) { flag.Set("logtostderr", "true") flag.Set("v", "5") flag.Parse() replicaSetReviewDelay = 10 * time.Millisecond clusterAvailableDelay = 20 * time.Millisecond clusterUnavailableDelay = 60 * time.Millisecond allReplicaSetReviewDealy = 120 * time.Millisecond fedclientset := fedclientfake.NewSimpleClientset() fedrswatch := watch.NewFake() fedclientset.PrependWatchReactor("replicasets", core.DefaultWatchReactor(fedrswatch, nil)) fedclientset.Federation().Clusters().Create(testutil.NewCluster("k8s-1", apiv1.ConditionTrue)) fedclientset.Federation().Clusters().Create(testutil.NewCluster("k8s-2", apiv1.ConditionTrue)) kube1clientset := kubeclientfake.NewSimpleClientset() kube1rswatch := watch.NewFake() kube1clientset.PrependWatchReactor("replicasets", core.DefaultWatchReactor(kube1rswatch, nil)) kube1Podwatch := watch.NewFake() kube1clientset.PrependWatchReactor("pods", core.DefaultWatchReactor(kube1Podwatch, nil)) kube2clientset := kubeclientfake.NewSimpleClientset() kube2rswatch := watch.NewFake() kube2clientset.PrependWatchReactor("replicasets", core.DefaultWatchReactor(kube2rswatch, nil)) kube2Podwatch := watch.NewFake() kube2clientset.PrependWatchReactor("pods", core.DefaultWatchReactor(kube2Podwatch, nil)) fedInformerClientFactory := func(cluster *fedv1.Cluster) (kube_release_1_4.Interface, error) { switch cluster.Name { case "k8s-1": return kube1clientset, nil case "k8s-2": return kube2clientset, nil default: return nil, fmt.Errorf("Unknown cluster: %v", cluster.Name) } } replicaSetController := NewReplicaSetController(fedclientset) rsFedinformer := testutil.ToFederatedInformerForTestOnly(replicaSetController.fedReplicaSetInformer) rsFedinformer.SetClientFactory(fedInformerClientFactory) podFedinformer := testutil.ToFederatedInformerForTestOnly(replicaSetController.fedPodInformer) podFedinformer.SetClientFactory(fedInformerClientFactory) stopChan := make(chan struct{}) defer close(stopChan) go replicaSetController.Run(1, stopChan) rs := newReplicaSetWithReplicas("rs", 9) rs, _ = fedclientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Create(rs) fedrswatch.Add(rs) time.Sleep(1 * time.Second) rs1, _ := kube1clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) kube1rswatch.Add(rs1) rs1.Status.Replicas = *rs1.Spec.Replicas rs1, _ = kube1clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).UpdateStatus(rs1) kube1rswatch.Modify(rs1) rs2, _ := kube2clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) kube2rswatch.Add(rs2) rs2.Status.Replicas = *rs2.Spec.Replicas rs2, _ = kube2clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).UpdateStatus(rs2) kube2rswatch.Modify(rs2) time.Sleep(1 * time.Second) rs, _ = fedclientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) assert.Equal(t, *rs.Spec.Replicas, *rs1.Spec.Replicas+*rs2.Spec.Replicas) assert.Equal(t, rs.Status.Replicas, rs1.Status.Replicas+rs2.Status.Replicas) var replicas int32 = 20 rs.Spec.Replicas = &replicas rs, _ = fedclientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Update(rs) fedrswatch.Modify(rs) time.Sleep(1 * time.Second) rs1, _ = kube1clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) rs1.Status.Replicas = *rs1.Spec.Replicas rs1, _ = kube1clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).UpdateStatus(rs1) kube1rswatch.Modify(rs1) rs2, _ = kube2clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) rs2.Status.Replicas = *rs2.Spec.Replicas rs2, _ = kube2clientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).UpdateStatus(rs2) kube2rswatch.Modify(rs2) time.Sleep(1 * time.Second) rs, _ = fedclientset.Extensions().ReplicaSets(apiv1.NamespaceDefault).Get(rs.Name) assert.Equal(t, *rs.Spec.Replicas, *rs1.Spec.Replicas+*rs2.Spec.Replicas) assert.Equal(t, rs.Status.Replicas, rs1.Status.Replicas+rs2.Status.Replicas) }
explode_data.jsonl/61666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1564 }
[ 2830, 3393, 18327, 15317, 1649, 2051, 1155, 353, 8840, 836, 8, 341, 30589, 4202, 445, 839, 83, 535, 67, 615, 497, 330, 1866, 1138, 30589, 4202, 445, 85, 497, 330, 20, 1138, 30589, 8937, 2822, 73731, 15317, 1649, 19432, 20039, 284, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRDSHTTPProxyDuplicateIncludeConditions(t *testing.T) { rh, cc, done := setup(t) defer done() svc1 := &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kuard", Namespace: "default", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8080, TargetPort: intstr.FromInt(8080), }}, }, } rh.OnAdd(svc1) svc2 := &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kuard", Namespace: "teama", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8080, TargetPort: intstr.FromInt(8080), }}, }, } rh.OnAdd(svc2) svc3 := &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kuard", Namespace: "teamb", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8080, TargetPort: intstr.FromInt(8080), }}, }, } rh.OnAdd(svc3) proxyRoot := &projcontour.HTTPProxy{ ObjectMeta: metav1.ObjectMeta{ Name: "root", Namespace: svc1.Namespace, }, Spec: projcontour.HTTPProxySpec{ VirtualHost: &projcontour.VirtualHost{ Fqdn: "example.com", }, Includes: []projcontour.Include{{ Name: "blogteama", Namespace: "teama", Conditions: []projcontour.MatchCondition{{ Prefix: "/blog", Header: &projcontour.HeaderMatchCondition{ Name: "x-header", Contains: "abc", }, }}, }, { Name: "blogteama", Namespace: "teamb", Conditions: []projcontour.MatchCondition{{ Prefix: "/blog", Header: &projcontour.HeaderMatchCondition{ Name: "x-header", Contains: "abc", }, }}, }}, Routes: []projcontour.Route{{ Conditions: []projcontour.MatchCondition{{ Prefix: "/", }}, Services: []projcontour.Service{{ Name: svc1.Name, Port: 8080, }}, }}, }, } proxyChildA := &projcontour.HTTPProxy{ ObjectMeta: metav1.ObjectMeta{ Name: "blogteama", Namespace: "teama", }, Spec: projcontour.HTTPProxySpec{ Routes: []projcontour.Route{{ Services: []projcontour.Service{{ Name: svc2.Name, Port: 8080, }}, }}, }, } proxyChildB := &projcontour.HTTPProxy{ ObjectMeta: metav1.ObjectMeta{ Name: "blogteamb", Namespace: "teamb", }, Spec: projcontour.HTTPProxySpec{ Routes: []projcontour.Route{{ Services: []projcontour.Service{{ Name: svc3.Name, Port: 8080, }}, }}, }, } rh.OnAdd(proxyRoot) rh.OnAdd(proxyChildA) rh.OnAdd(proxyChildB) assert.Equal(t, &v2.DiscoveryResponse{ VersionInfo: "2", Resources: routeResources(t, envoy.RouteConfiguration("ingress_http"), ), TypeUrl: routeType, Nonce: "2", }, streamRDS(t, cc)) }
explode_data.jsonl/70771
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1445 }
[ 2830, 3393, 49, 5936, 9230, 16219, 53979, 22283, 35435, 1155, 353, 8840, 836, 8, 341, 7000, 71, 11, 12527, 11, 2814, 1669, 6505, 1155, 340, 16867, 2814, 2822, 1903, 7362, 16, 1669, 609, 85, 16, 13860, 515, 197, 23816, 12175, 25, 77520...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouter_Load_FailsWhenCustomMatcherDoesNotExist(t *testing.T) { AddHandler(testHandlerFunc, "users.Handler") router := NewRouter() loader := sliceLoader{ RouteDef{ Method: "GET", Path: "/users", Handler: "users.Handler", Options: RouteDefOptions{ Name: "get.users", CustomMatcher: "notExists.CustomMatcher", }, }, } err := router.Load(&loader) assertNotNil(t, err) }
explode_data.jsonl/31760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 9523, 19553, 1400, 6209, 4498, 10268, 37554, 21468, 45535, 1155, 353, 8840, 836, 8, 341, 37972, 3050, 8623, 3050, 9626, 11, 330, 4218, 31010, 5130, 67009, 1669, 1532, 9523, 741, 197, 8355, 1669, 15983, 9181, 515, 197, 47501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCacheBigEnoughHoldsAllFilter(t *testing.T) { // Create different sized filters. b1, f1, s1 := genRandFilter(1, t) b2, f2, s2 := genRandFilter(10, t) b3, f3, s3 := genRandFilter(100, t) cs := &ChainService{ FilterCache: lru.NewCache(s1 + s2 + s3), } // Insert those filters into the cache making sure nothing gets evicted. assertEqual(t, cs.FilterCache.Len(), 0, "") cs.putFilterToCache(b1, filterdb.RegularFilter, f1) assertEqual(t, cs.FilterCache.Len(), 1, "") cs.putFilterToCache(b2, filterdb.RegularFilter, f2) assertEqual(t, cs.FilterCache.Len(), 2, "") cs.putFilterToCache(b3, filterdb.RegularFilter, f3) assertEqual(t, cs.FilterCache.Len(), 3, "") // Check that we can get those filters back independent of Get order. assertEqual(t, getFilter(cs, b1, t), f1, "") assertEqual(t, getFilter(cs, b2, t), f2, "") assertEqual(t, getFilter(cs, b3, t), f3, "") assertEqual(t, getFilter(cs, b2, t), f2, "") assertEqual(t, getFilter(cs, b3, t), f3, "") assertEqual(t, getFilter(cs, b1, t), f1, "") assertEqual(t, getFilter(cs, b3, t), f3, "") assertEqual(t, cs.FilterCache.Len(), 3, "") }
explode_data.jsonl/18464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 8233, 15636, 95801, 39, 18431, 2403, 5632, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 2155, 29287, 13406, 624, 2233, 16, 11, 282, 16, 11, 274, 16, 1669, 4081, 56124, 5632, 7, 16, 11, 259, 340, 2233, 17, 11, 282, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_eventKey(t *testing.T) { tests := []struct { name string namespace string detectorID string deviceID string timestamp time.Time want string }{ { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568110, 0), want: "/ns/detectors/detID/1563568100/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568120, 0), want: "/ns/detectors/detID/1563568100/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568180, 0), want: "/ns/detectors/detID/1563568100/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568190, 0), want: "/ns/detectors/detID/1563568100/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568200, 0), want: "/ns/detectors/detID/1563568200/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568210, 0), want: "/ns/detectors/detID/1563568200/devID", }, { name: "simple", namespace: "ns", detectorID: "detID", deviceID: "devID", timestamp: time.Unix(1563568220, 0), want: "/ns/detectors/detID/1563568200/devID", }, } for _, test := range tests { tt := test t.Run(tt.name, func(t *testing.T) { got := EventKey(tt.namespace, tt.detectorID, tt.deviceID, tt.timestamp) // Need to remove non deterministic part of ulid. sp := strings.Split(got, "/") last := strings.Split(sp[len(sp)-1], ".") sp[len(sp)-1] = last[1] got = strings.Join(sp, "/") if got != tt.want { t.Errorf("detectorEventKey() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/11278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1041 }
[ 2830, 3393, 6748, 1592, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 56623, 220, 914, 198, 197, 2698, 295, 1256, 915, 914, 198, 197, 54719, 915, 256, 914, 198, 197, 3244, 4702, 220, 882...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetFailsOnBadURL(t *testing.T) { c := NewClient(http.DefaultClient) old := os.Getenv(metadataHostEnv) defer os.Setenv(metadataHostEnv, old) os.Setenv(metadataHostEnv, "host:-1") _, err := c.Get("suffix") log.Printf("%v", err) if err == nil { t.Errorf("got %v, want non-nil error", err) } }
explode_data.jsonl/17306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 1949, 37, 6209, 1925, 17082, 3144, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 2959, 19886, 13275, 2959, 340, 61828, 1669, 2643, 64883, 54436, 9296, 14359, 340, 16867, 2643, 4202, 3160, 54436, 9296, 14359, 11, 2310, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAuthRequired(t *testing.T) { store := mockStore{} a := Authenticator{SessionStore: &store, DevPasswd: "123456"} router := chi.NewRouter() router.With(a.Auth(true)).Get("/auth", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(201) }) server := httptest.NewServer(router) defer server.Close() client := &http.Client{Timeout: 1 * time.Second} req, err := http.NewRequest("GET", server.URL+"/auth", nil) req = withBasicAuth(req, "dev", "123456") resp, err := client.Do(req) require.NoError(t, err) assert.Equal(t, 201, resp.StatusCode, "valid auth user") req, err = http.NewRequest("GET", server.URL+"/auth", nil) resp, err = client.Do(req) require.NoError(t, err) assert.Equal(t, 401, resp.StatusCode, "no auth user") req, err = http.NewRequest("GET", server.URL+"/auth", nil) req = withBasicAuth(req, "dev", "xyz") resp, err = client.Do(req) require.NoError(t, err) assert.Equal(t, 401, resp.StatusCode, "wrong auth creds") }
explode_data.jsonl/4151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 5087, 8164, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 7860, 6093, 16094, 11323, 1669, 46367, 850, 90, 5283, 6093, 25, 609, 4314, 11, 6040, 12187, 6377, 25, 330, 16, 17, 18, 19, 20, 21, 16707, 67009, 1669, 25798, 7121, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAcceptancePythonFn(t *testing.T) { builder, cleanup := acceptance.CreateBuilder(t) t.Cleanup(cleanup) testCases := []acceptance.Test{ { Name: "function without framework", App: "without_framework", Path: "/testFunction", Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"}, MustUse: []string{pythonRuntime, pythonFF, pythonPIP}, MustNotUse: []string{entrypoint}, }, { Name: "function with custom source file", App: "custom_file", Path: "/testFunction", Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction", "GOOGLE_FUNCTION_SOURCE=func.py"}, MustUse: []string{pythonRuntime, pythonFF, pythonPIP}, MustNotUse: []string{entrypoint}, }, { Name: "function with dependencies", App: "with_dependencies", Path: "/testFunction", Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"}, MustUse: []string{pythonRuntime, pythonPIP, pythonFF}, MustNotUse: []string{entrypoint}, }, { Name: "function with framework", App: "with_framework", Path: "/testFunction", Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"}, MustUse: []string{pythonRuntime, pythonPIP, pythonFF}, MustNotUse: []string{entrypoint}, }, { Name: "function with runtime env var", App: "with_env_var", Path: "/testFunction", Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"}, RunEnv: []string{"FOO=foo"}, MustUse: []string{pythonRuntime, pythonFF, pythonPIP}, MustNotUse: []string{entrypoint}, }, } for _, tc := range testCases { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() acceptance.TestApp(t, builder, tc) }) } }
explode_data.jsonl/13558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 808 }
[ 2830, 3393, 16646, 681, 30280, 24911, 1155, 353, 8840, 836, 8, 341, 44546, 11, 21290, 1669, 25505, 7251, 3297, 1155, 340, 3244, 727, 60639, 1337, 60639, 692, 18185, 37302, 1669, 3056, 10330, 681, 8787, 515, 197, 197, 515, 298, 21297, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRPC_QueryBlock(t *testing.T) { codec.UpgradeHeight = 7000 _, _, cleanup := NewInMemoryTendermintNode(t, oneValTwoNodeGenesisState()) _, stopCli, evtChan := subscribeTo(t, tmTypes.EventNewBlock) var params = HeightParams{ Height: 1, } <-evtChan // Wait for block q := newQueryRequest("block", newBody(params)) rec := httptest.NewRecorder() Block(rec, q, httprouter.Params{}) resp := getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var blk core_types.ResultBlock err := memCodec().UnmarshalJSON([]byte(resp), &blk) assert.Nil(t, err) assert.NotEmpty(t, blk.Block.Height) <-evtChan // Wait for block q = newQueryRequest("block", newBody(params)) rec = httptest.NewRecorder() Block(rec, q, httprouter.Params{}) resp = getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var blk2 core_types.ResultBlock err = memCodec().UnmarshalJSON([]byte(resp), &blk2) assert.Nil(t, err) assert.NotEmpty(t, blk2.Block.Height) cleanup() stopCli() }
explode_data.jsonl/44706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 29528, 48042, 4713, 1155, 353, 8840, 836, 8, 341, 43343, 66, 13, 43861, 3640, 284, 220, 22, 15, 15, 15, 198, 197, 6878, 8358, 21290, 1669, 1532, 641, 10642, 51, 1659, 67791, 1955, 1155, 11, 825, 2208, 11613, 1955, 84652, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoad(t *testing.T) { gou.APIs = make(map[string]*gou.API) Load(config.Conf) LoadFrom("not a path", "404.") check(t) }
explode_data.jsonl/71107
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 59 }
[ 2830, 3393, 5879, 1155, 353, 8840, 836, 8, 341, 3174, 283, 24922, 82, 284, 1281, 9147, 14032, 8465, 70, 283, 24922, 340, 197, 5879, 8754, 4801, 69, 340, 197, 5879, 3830, 445, 1921, 264, 1815, 497, 330, 19, 15, 19, 13053, 25157, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPolygon(t *testing.T) { p := orb.Polygon{ {{0, 0}, {1, 0}, {1, 1}, {0, 0}}, {{0, 0}, {0, 0}}, } p = DouglasPeucker(0).Polygon(p) if len(p) != 1 { t.Errorf("should remove empty ring") } }
explode_data.jsonl/40644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 37619, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 36366, 1069, 19582, 515, 197, 197, 2979, 15, 11, 220, 15, 2137, 314, 16, 11, 220, 15, 2137, 314, 16, 11, 220, 16, 2137, 314, 15, 11, 220, 15, 22050, 197, 197, 2979, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRolesAdmissionHandlerClaims(t *testing.T) { requests := []struct { Matches map[string]string Request fakeRequest }{ // jose.StringClaim test { Matches: map[string]string{"cal": "test"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{"item": "^tes$"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{"item": "^tes$"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": "tes"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, { Matches: map[string]string{"item": "not_match"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": "test"}, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{"item": "^test", "found": "something"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": "test"}, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{"item": "^test", "found": "something"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{ "item": "tester", "found": "something", }, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, { Matches: map[string]string{"item": ".*"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": "test"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, { Matches: map[string]string{"item": "^t.*$"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": "test"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, // jose.StringsClaim test { Matches: map[string]string{"item": "^t.*t"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": []string{"nonMatchingClaim", "test", "anotherNonMatching"}}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, { Matches: map[string]string{"item": "^t.*t"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": []string{"1test", "2test", "3test"}}, ExpectedProxy: false, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{"item": "^t.*t"}, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{"item": []string{}}, ExpectedProxy: false, ExpectedCode: http.StatusForbidden, }, }, { Matches: map[string]string{ "item1": "^t.*t", "item2": "^another", }, Request: fakeRequest{ URI: testAdminURI, HasToken: true, TokenClaims: jose.Claims{ "item1": []string{"randomItem", "test"}, "item2": []string{"randomItem", "anotherItem"}, "item3": []string{"randomItem2", "anotherItem3"}, }, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, }, } for _, c := range requests { cfg := newFakeKeycloakConfig() cfg.Resources = []*Resource{{URL: "/admin*", Methods: allHTTPMethods}} cfg.MatchClaims = c.Matches newFakeProxy(cfg).RunTests(t, []fakeRequest{c.Request}) } }
explode_data.jsonl/14765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1704 }
[ 2830, 3393, 25116, 2589, 2728, 3050, 51133, 1155, 353, 8840, 836, 8, 341, 23555, 82, 1669, 3056, 1235, 341, 197, 197, 42470, 2415, 14032, 30953, 198, 197, 73806, 12418, 1900, 198, 197, 59403, 197, 197, 322, 89644, 6431, 45544, 1273, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestShouldIgnorePasswordPatterns(t *testing.T) { results := helpers.NewDetectionResults() content := []byte("\"password\" : UnsafePassword") filename := "secret.txt" additions := []gitrepo.Addition{gitrepo.NewAddition(filename, content)} fileIgnoreConfig := talismanrc.FileIgnoreConfig{filename, "833b6c24c8c2c5c7e1663226dc401b29c005492dc76a1150fc0e0f07f29d4cc3", []string{"filecontent"}, []string{}} ignores := &talismanrc.TalismanRC{FileIgnoreConfig: []talismanrc.FileIgnoreConfig{fileIgnoreConfig}} NewPatternDetector(customPatterns).Test(helpers.NewChecksumCompare(nil, utility.DefaultSHA256Hasher{}, talismanrc.NewTalismanRC(nil)), additions, ignores, results, func() {}) assert.True(t, results.Successful(), "Expected file %s to be ignored by pattern", filename) }
explode_data.jsonl/82119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 14996, 12497, 4876, 57656, 1155, 353, 8840, 836, 8, 341, 55497, 1669, 30187, 7121, 54817, 9801, 741, 27751, 1669, 3056, 3782, 38915, 3833, 2105, 549, 73067, 4876, 1138, 66434, 1669, 330, 20474, 3909, 698, 12718, 5930, 1669, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileCertificateUpdate(t *testing.T) { var reconciler *Reconciler ctx, _, _, _, cancel := newTestSetup(t, func(r *Reconciler) { reconciler = r }) defer cancel() r := Route("test-ns", "test-route") certificate := newCerts([]string{"old.example.com"}, r) if _, err := reconciler.reconcileCertificate(ctx, r, certificate); err != nil { t.Errorf("Unexpected error: %v", err) } storedCert := getCertificateFromClient(ctx, t, certificate) fakecertinformer.Get(ctx).Informer().GetIndexer().Add(storedCert) newCertificate := newCerts([]string{"new.example.com"}, r) if _, err := reconciler.reconcileCertificate(ctx, r, newCertificate); err != nil { t.Errorf("Unexpected error: %v", err) } updated := getCertificateFromClient(ctx, t, newCertificate) if diff := cmp.Diff(newCertificate, updated); diff != "" { t.Errorf("Unexpected diff (-want +got): %s", diff) } if diff := cmp.Diff(certificate, updated); diff == "" { t.Error("Expected difference, but found none") } }
explode_data.jsonl/64527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 693, 40446, 457, 33202, 4289, 1155, 353, 8840, 836, 8, 341, 2405, 31445, 5769, 353, 693, 40446, 5769, 198, 20985, 11, 8358, 8358, 8358, 9121, 1669, 501, 2271, 21821, 1155, 11, 2915, 2601, 353, 693, 40446, 5769, 8, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoad(t *testing.T) { dir := testutils.InitTestEnv(ModuleName, t) c, mgr, driver := initTestContext(t, dir) defer driver.Close() defer c.Close() defer mgr.Stop() schema := catalog.MockSchemaAll(14) schema.BlockMaxRows = 10000 schema.SegmentMaxBlocks = 10 schema.PrimaryKey = 13 bat := compute.MockBatch(schema.Types(), 60000, int(schema.PrimaryKey), nil) bats := compute.SplitBatch(bat, 5) txn := mgr.StartTxn(nil) db, _ := txn.CreateDatabase("db") rel, _ := db.CreateRelation(schema) table, _ := txn.GetStore().(*txnStore).getOrSetTable(rel.ID()) tbl := table.(*txnTable) err := tbl.Append(bats[0]) assert.Nil(t, err) t.Log(tbl.store.nodesMgr.String()) v, err := tbl.GetLocalValue(100, 0) assert.Nil(t, err) t.Log(tbl.store.nodesMgr.String()) t.Logf("Row %d, Col %d, Val %v", 100, 0, v) }
explode_data.jsonl/14643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 5879, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 1273, 6031, 26849, 2271, 14359, 75295, 675, 11, 259, 340, 1444, 11, 57897, 11, 5579, 1669, 2930, 2271, 1972, 1155, 11, 5419, 340, 16867, 5579, 10421, 741, 16867, 272, 10421, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPowerbuilderParser(t *testing.T) { // TODO(bramp): Run this test with and without p.BuildParseTrees for _, file := range examples { input, err := newCharStream(file) if err != nil { t.Errorf("Failed to open example file: %s", err) } // Create the Lexer lexer := powerbuilder.NewpowerbuilderLexer(input) stream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel) // Create the Parser p := powerbuilder.NewpowerbuilderParser(stream) p.BuildParseTrees = true p.AddErrorListener(internal.NewTestingErrorListener(t, file)) // Finally test p.Start_rule() // TODO(bramp): If there is a "file.tree", then compare the output // TODO(bramp): If there is a "file.errors", then check the error } }
explode_data.jsonl/54879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 14986, 17850, 6570, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 41237, 1121, 1648, 6452, 419, 1273, 448, 323, 2041, 281, 25212, 14463, 79071, 271, 2023, 8358, 1034, 1669, 2088, 10295, 341, 197, 22427, 11, 1848, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_GetIssueIDsByRepoID(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) ids, err := GetIssueIDsByRepoID(1) assert.NoError(t, err) assert.Len(t, ids, 5) }
explode_data.jsonl/46836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 13614, 42006, 30466, 1359, 25243, 915, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 19905, 28770, 3380, 2271, 5988, 12367, 197, 3365, 11, 1848, 1669, 2126, 42006, 30466, 1359, 25243, 915, 7, 16, 340, 6948, 35699, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSpawnTasksParse_NoInput(t *testing.T) { Convey(`Make sure that Parse handles no input JSON given.`, t, func() { c := spawnTasksRun{} c.Init(auth.Options{}) err := c.GetFlags().Parse([]string{"-server", "http://localhost:9050"}) err = c.Parse([]string{}) So(err, ShouldErrLike, "input JSON") }) }
explode_data.jsonl/27125
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 41005, 25449, 14463, 36989, 2505, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 5809, 8078, 2704, 429, 14775, 13469, 902, 1946, 4718, 2661, 13, 7808, 259, 11, 2915, 368, 341, 197, 1444, 1669, 18042, 25449, 6727, 16094, 197, 144...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptBindAddr(t *testing.T) { assert := assert.New(t) var app App assert.Nil(OptBindAddr(":9999")(&app)) assert.Equal(":9999", app.Config.BindAddr) }
explode_data.jsonl/7706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 21367, 9950, 13986, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 2405, 906, 1845, 198, 6948, 59678, 19238, 417, 9950, 13986, 18893, 24, 24, 24, 24, 899, 2099, 676, 1171, 6948, 12808, 18893, 24, 24, 24, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIntegration_ExternalAdapter_Copy(t *testing.T) { t.Parallel() rpcClient, gethClient, _, assertMockCalls := cltest.NewEthMocksWithStartupAssertions(t) defer assertMockCalls() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() bridgeURL := cltest.WebURL(t, "https://test.chain.link/always") app.Store.Config.Set("BRIDGE_RESPONSE_URL", bridgeURL) require.NoError(t, app.Start()) eaPrice := "1234" eaQuote := "USD" eaResponse := fmt.Sprintf(`{"data":{"price": "%v", "quote": "%v"}}`, eaPrice, eaQuote) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { require.Equal(t, "POST", r.Method) require.Equal(t, "/", r.URL.Path) b, err := ioutil.ReadAll(r.Body) require.NoError(t, err) body := cltest.JSONFromBytes(t, b) data := body.Get("data") require.True(t, data.Exists()) bodyParam := data.Get("bodyParam") require.True(t, bodyParam.Exists()) require.Equal(t, true, bodyParam.Bool()) url := body.Get("responseURL") require.Contains(t, url.String(), "https://test.chain.link/always/v2/runs") w.WriteHeader(http.StatusOK) io.WriteString(w, eaResponse) })) defer ts.Close() bridgeJSON := fmt.Sprintf(`{"name":"assetPrice","url":"%v"}`, ts.URL) cltest.CreateBridgeTypeViaWeb(t, app, bridgeJSON) j := cltest.FixtureCreateJobViaWeb(t, app, "fixtures/web/bridge_type_copy_job.json") jr := cltest.WaitForJobRunToComplete(t, app.Store, cltest.CreateJobRunViaWeb(t, app, j, `{"copyPath": ["price"]}`)) tr := jr.TaskRuns[0] assert.Equal(t, "assetprice", tr.TaskSpec.Type.String()) tr = jr.TaskRuns[1] assert.Equal(t, "copy", tr.TaskSpec.Type.String()) value := cltest.MustResultString(t, tr.Result) assert.Equal(t, eaPrice, value) }
explode_data.jsonl/75895
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 717 }
[ 2830, 3393, 52464, 62, 25913, 5940, 77637, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 11571, 55292, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_RtmTokenBuilder(t *testing.T) { appID := "970CA35de60c44645bbae8a215061b33" appCertificate := "5CFd2fd1755d40ecb72977518be15d3b" userAccount := "test_user" expiredTs := uint32(1446455471) result, err := BuildToken(appID, appCertificate, userAccount, RoleRtmUser, expiredTs) if err != nil { t.Error(err) } token := accesstoken.AccessToken{} token.FromString(result) if token.Message[accesstoken.KLoginRtm] != expiredTs { t.Error("no kLoginRtm ts") } }
explode_data.jsonl/68051
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 2568, 13730, 3323, 3297, 1155, 353, 8840, 836, 8, 341, 262, 906, 915, 1669, 330, 24, 22, 15, 5049, 18, 20, 450, 21, 15, 66, 19, 19, 21, 19, 20, 65, 4645, 68, 23, 64, 17, 16, 20, 15, 21, 16, 65, 18, 18, 698, 262...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJSONBasic(t *testing.T) { render := New() h := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { render.JSON(w, 299, Greeting{"hello", "world"}) }) res := httptest.NewRecorder() req, _ := http.NewRequest("GET", "/foo", nil) h.ServeHTTP(res, req) expect(t, res.Code, 299) expect(t, res.Header().Get(ContentType), ContentJSON+"; charset=UTF-8") expect(t, res.Body.String(), "{\"one\":\"hello\",\"two\":\"world\"}") }
explode_data.jsonl/3669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 5370, 15944, 1155, 353, 8840, 836, 8, 341, 33921, 1669, 1532, 2822, 9598, 1669, 1758, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 33921, 18009, 3622, 11, 220, 17, 24, 24, 11, 479, 43632, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArrayIndex(t *testing.T) { const record = `{x:[1,2,3],i:1 (uint16)} (=0)` testSuccessful(t, "x[0]", record, zint64(1)) testSuccessful(t, "x[1]", record, zint64(2)) testSuccessful(t, "x[2]", record, zint64(3)) testSuccessful(t, "x[i]", record, zint64(2)) testSuccessful(t, "i+1", record, zint64(2)) testSuccessful(t, "x[i+1]", record, zint64(3)) }
explode_data.jsonl/2312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 1857, 1552, 1155, 353, 8840, 836, 8, 341, 4777, 3255, 284, 53692, 87, 7259, 16, 11, 17, 11, 18, 1125, 72, 25, 16, 320, 2496, 16, 21, 9139, 38738, 15, 8, 19324, 18185, 36374, 1155, 11, 330, 87, 58, 15, 19076, 3255, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCasts(t *testing.T) { // Test casts to byte testSuccessful(t, "uint8(10)", "", zed.Value{zed.TypeUint8, zed.EncodeUint(10)}) testSuccessful(t, "uint8(-1)", "", ZSON(`error("cannot cast -1 to type uint8")`)) testSuccessful(t, "uint8(300)", "", ZSON(`error("cannot cast 300 to type uint8")`)) testSuccessful(t, `uint8("foo")`, "", ZSON(`error("cannot cast \"foo\" to type uint8")`)) // Test casts to int16 testSuccessful(t, "int16(10)", "", ZSON(`10(int16)`)) testSuccessful(t, "int16(-33000)", "", ZSON(`error("cannot cast -33000 to type int16")`)) testSuccessful(t, "int16(33000)", "", ZSON(`error("cannot cast 33000 to type int16")`)) testSuccessful(t, `int16("foo")`, "", ZSON(`error("cannot cast \"foo\" to type int16")`)) // Test casts to uint16 testSuccessful(t, "uint16(10)", "", zed.Value{zed.TypeUint16, zed.EncodeUint(10)}) testSuccessful(t, "uint16(-1)", "", ZSON(`error("cannot cast -1 to type uint16")`)) testSuccessful(t, "uint16(66000)", "", ZSON(`error("cannot cast 66000 to type uint16")`)) testSuccessful(t, `uint16("foo")`, "", ZSON(`error("cannot cast \"foo\" to type uint16")`)) // Test casts to int32 testSuccessful(t, "int32(10)", "", zed.Value{zed.TypeInt32, zed.EncodeInt(10)}) testSuccessful(t, "int32(-2200000000)", "", ZSON(`error("cannot cast -2200000000 to type int32")`)) testSuccessful(t, "int32(2200000000)", "", ZSON(`error("cannot cast 2200000000 to type int32")`)) testSuccessful(t, `int32("foo")`, "", ZSON(`error("cannot cast \"foo\" to type int32")`)) // Test casts to uint32 testSuccessful(t, "uint32(10)", "", zed.Value{zed.TypeUint32, zed.EncodeUint(10)}) testSuccessful(t, "uint32(-1)", "", ZSON(`error("cannot cast -1 to type uint32")`)) testSuccessful(t, "uint32(4300000000)", "", ZSON(`error("cannot cast 4300000000 to type uint32")`)) testSuccessful(t, `uint32("foo")`, "", ZSON(`error("cannot cast \"foo\" to type uint32")`)) // Test casts to uint64 testSuccessful(t, "uint64(10)", "", zuint64(10)) testSuccessful(t, "uint64(-1)", "", ZSON(`error("cannot cast -1 to type uint64")`)) testSuccessful(t, `uint64("foo")`, "", ZSON(`error("cannot cast \"foo\" to type uint64")`)) // Test casts to float32 testSuccessful(t, "float32(10)", "", zfloat32(10)) testSuccessful(t, `float32("foo")`, "", ZSON(`error("cannot cast \"foo\" to type float32")`)) // Test casts to float64 testSuccessful(t, "float64(10)", "", zfloat64(10)) testSuccessful(t, `float64("foo")`, "", ZSON(`error("cannot cast \"foo\" to type float64")`)) // Test casts to ip testSuccessful(t, `ip("1.2.3.4")`, "", zip(t, "1.2.3.4")) testSuccessful(t, "ip(1234)", "", ZSON(`error("cannot cast 1234 to type ip")`)) testSuccessful(t, `ip("not an address")`, "", ZSON(`error("cannot cast \"not an address\" to type ip")`)) // Test casts to net testSuccessful(t, `net("1.2.3.0/24")`, "", znet(t, "1.2.3.0/24")) testSuccessful(t, "net(1234)", "", ZSON(`error("cannot cast 1234 to type net")`)) testSuccessful(t, `net("not an address")`, "", ZSON(`error("cannot cast \"not an address\" to type net")`)) testSuccessful(t, `net(1.2.3.4)`, "", ZSON(`error("cannot cast 1.2.3.4 to type net")`)) // Test casts to time const ts = 1589126400_000_000_000 // float32 lacks sufficient precision to represent ts exactly. testSuccessful(t, "time(float32(1589126400000000000))", "", *zed.NewTime(nano.Ts(float32(ts)))) testSuccessful(t, "time(float64(1589126400000000000))", "", *zed.NewTime(ts)) testSuccessful(t, "time(1589126400000000000)", "", *zed.NewTime(ts)) testSuccessful(t, `time("1589126400000000000")`, "", *zed.NewTime(ts)) testSuccessful(t, "string(1.2)", "", zstring("1.2")) testSuccessful(t, "string(5)", "", zstring("5")) testSuccessful(t, "string(1.2.3.4)", "", zstring("1.2.3.4")) testSuccessful(t, `int64("1")`, "", zint64(1)) testSuccessful(t, `int64("-1")`, "", zint64(-1)) testSuccessful(t, `float32("5.5")`, "", zfloat32(5.5)) testSuccessful(t, `float64("5.5")`, "", zfloat64(5.5)) testSuccessful(t, `ip("1.2.3.4")`, "", zaddr("1.2.3.4")) testSuccessful(t, "ip(1)", "", ZSON(`error("cannot cast 1 to type ip")`)) testSuccessful(t, `int64("abc")`, "", ZSON(`error("cannot cast \"abc\" to type int64")`)) testSuccessful(t, `float32("abc")`, "", ZSON(`error("cannot cast \"abc\" to type float32")`)) testSuccessful(t, `float64("abc")`, "", ZSON(`error("cannot cast \"abc\" to type float64")`)) testSuccessful(t, `ip("abc")`, "", ZSON(`error("cannot cast \"abc\" to type ip")`)) }
explode_data.jsonl/2315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1817 }
[ 2830, 3393, 34, 11757, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 56033, 311, 4922, 198, 18185, 36374, 1155, 11, 330, 2496, 23, 7, 16, 15, 11583, 7342, 1147, 291, 6167, 90, 15905, 10184, 21570, 23, 11, 1147, 291, 50217, 21570, 7,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientCredentials_HandleTokenEndpointRequest(t *testing.T) { ctrl := gomock.NewController(t) store := internal.NewMockClientCredentialsGrantStorage(ctrl) chgen := internal.NewMockAccessTokenStrategy(ctrl) areq := internal.NewMockAccessRequester(ctrl) defer ctrl.Finish() h := ClientCredentialsGrantHandler{ HandleHelper: &HandleHelper{ AccessTokenStorage: store, AccessTokenStrategy: chgen, AccessTokenLifespan: time.Hour, }, ScopeStrategy: fosite.HierarchicScopeStrategy, } for k, c := range []struct { description string mock func() req *http.Request expectErr error }{ { description: "should fail because not responsible", expectErr: fosite.ErrUnknownRequest, mock: func() { areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{""}) }, }, { description: "should pass", mock: func() { areq.EXPECT().GetSession().Return(new(fosite.DefaultSession)) areq.EXPECT().GetGrantTypes().Return(fosite.Arguments{"client_credentials"}) areq.EXPECT().GetRequestedScopes().Return([]string{"foo", "bar", "baz.bar"}) areq.EXPECT().GetClient().Return(&fosite.DefaultClient{ GrantTypes: fosite.Arguments{"client_credentials"}, Scopes: []string{"foo", "bar", "baz"}, }) }, }, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { c.mock() err := h.HandleTokenEndpointRequest(nil, areq) if c.expectErr != nil { require.EqualError(t, err, c.expectErr.Error()) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/41175
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 645 }
[ 2830, 3393, 2959, 27025, 42714, 3323, 27380, 1900, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 57279, 1669, 5306, 7121, 11571, 2959, 27025, 67971, 5793, 62100, 340, 23049, 4370, 1669, 5306, 7121, 1157...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestModuleMarkdown_renderMarkDownHandler(t *testing.T) { m := prepareModule() reqStrs := []string{"testcase0", "testcase1"} typeStrs := []string{"default"} for _, str := range reqStrs { for _, typeStr := range typeStrs { mdPath := "./testdata/" + str + ".md" targetPath := "./testdata/" + str + "_" + typeStr + ".output" urlPath := "/" + typeStr req := prepareRequest("unittest", urlPath) res := prepareResponse(mdPath) m.renderMarkDownHandler(req, res) got, err := ioutil.ReadAll(res.Body) if err != nil { t.Errorf("ModuleMarkdown.TestModuleMarkdown_renderMarkDownHandler() error = %v", err) } want, err := ioutil.ReadFile(targetPath) if !reflect.DeepEqual(got, want) { t.Errorf("ModuleMarkdown.TestModuleMarkdown_renderMarkDownHandler(), got[%s], want[%s]", string(got), string(want)) } if int64(len(want)) != res.ContentLength { t.Errorf("ModuleMarkdown.TestModuleMarkdown_renderMarkDownHandler() got[%d], want[%d]", res.ContentLength, len(want)) } } } // test invalid response mdPath := "./testdata/testcase0.md" responses := prepareabnormalResponse(mdPath) for _, res := range responses { err := m.checkResponse(res) if err == nil { t.Errorf("ModuleMarkdown.TestModuleMarkdown_checkResponse() got[nil], want[%s]", err) } } // check not exist product urlPath := "/default" req := prepareRequest("not_exists", urlPath) res := prepareResponse(mdPath) code := m.renderMarkDownHandler(req, res) if code != bfe_module.BfeHandlerGoOn { t.Errorf("ModuleMarkdown.TestModuleMarkdown_checkResponse() got[%d], want[%d]", code, bfe_module.BfeHandlerGoOn) } }
explode_data.jsonl/3778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 3332, 68005, 22781, 8949, 4454, 3050, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 10549, 3332, 741, 24395, 2580, 82, 1669, 3056, 917, 4913, 1944, 5638, 15, 497, 330, 1944, 5638, 16, 16707, 13158, 2580, 82, 1669, 3056, 917, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGetApps(t *testing.T) { var conf Config err := common.MustNewConfigFrom(cftest.GetConfigFromEnv(t)).Unpack(&conf) require.NoError(t, err) log := logp.NewLogger("cloudfoundry") hub := NewHub(&conf, "filebeat", log) client, err := hub.Client() require.NoError(t, err) apps, err := client.ListApps() require.NoError(t, err) t.Logf("%d applications available", len(apps)) t.Run("request one of the available applications", func(t *testing.T) { if len(apps) == 0 { t.Skip("no apps in account?") } client, err := hub.ClientWithCache() require.NoError(t, err) defer client.Close() guid := apps[0].Guid app, err := client.GetAppByGuid(guid) assert.Equal(t, guid, app.Guid) assert.NoError(t, err) }) t.Run("handle error when application is not available", func(t *testing.T) { client, err := hub.ClientWithCache() require.NoError(t, err) defer client.Close() testNotExists := func(t *testing.T) { app, err := client.GetAppByGuid("notexists") assert.Nil(t, app) assert.Error(t, err) assert.True(t, cfclient.IsAppNotFoundError(err), "Error found: %v", err) } var firstTimeDuration time.Duration t.Run("first call", func(t *testing.T) { startTime := time.Now() testNotExists(t) firstTimeDuration = time.Now().Sub(startTime) }) t.Run("second call, in cache, faster, same response", func(t *testing.T) { for i := 0; i < 10; i++ { startTime := time.Now() testNotExists(t) require.True(t, firstTimeDuration > time.Now().Sub(startTime)) } }) }) }
explode_data.jsonl/22047
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 622 }
[ 2830, 3393, 1949, 53602, 1155, 353, 8840, 836, 8, 341, 2405, 2335, 5532, 198, 9859, 1669, 4185, 50463, 3564, 2648, 3830, 1337, 723, 477, 2234, 2648, 3830, 14359, 1155, 4579, 1806, 4748, 2099, 6135, 340, 17957, 35699, 1155, 11, 1848, 692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFlagNameCompletionInGoWithDesc(t *testing.T) { rootCmd := &Command{ Use: "root", Run: emptyRun, } childCmd := &Command{ Use: "childCmd", Short: "first command", Run: emptyRun, } rootCmd.AddCommand(childCmd) rootCmd.Flags().IntP("first", "f", -1, "first flag\nlonger description for flag") rootCmd.PersistentFlags().BoolP("second", "s", false, "second flag") childCmd.Flags().String("subFlag", "", "sub flag") // Test that flag names are not shown if the user has not given the '-' prefix output, err := executeCommand(rootCmd, ShellCompRequestCmd, "") if err != nil { t.Errorf("Unexpected error: %v", err) } expected := strings.Join([]string{ "childCmd\tfirst command", "completion\tgenerate the autocompletion script for the specified shell", "help\tHelp about any command", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } // Test that flag names are completed output, err = executeCommand(rootCmd, ShellCompRequestCmd, "-") if err != nil { t.Errorf("Unexpected error: %v", err) } expected = strings.Join([]string{ "--first\tfirst flag", "-f\tfirst flag", "--second\tsecond flag", "-s\tsecond flag", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } // Test that flag names are completed when a prefix is given output, err = executeCommand(rootCmd, ShellCompRequestCmd, "--f") if err != nil { t.Errorf("Unexpected error: %v", err) } expected = strings.Join([]string{ "--first\tfirst flag", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } // Test that flag names are completed in a sub-cmd output, err = executeCommand(rootCmd, ShellCompRequestCmd, "childCmd", "-") if err != nil { t.Errorf("Unexpected error: %v", err) } expected = strings.Join([]string{ "--second\tsecond flag", "-s\tsecond flag", "--subFlag\tsub flag", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } }
explode_data.jsonl/43741
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 834 }
[ 2830, 3393, 12135, 675, 33190, 641, 10850, 2354, 11065, 1155, 353, 8840, 836, 8, 341, 33698, 15613, 1669, 609, 4062, 515, 197, 95023, 25, 330, 2888, 756, 197, 85952, 25, 4287, 6727, 345, 197, 532, 58391, 15613, 1669, 609, 4062, 515, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9