text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestTrackUnknownCargo(t *testing.T) { var cargos mockCargoRepository var events mock.HandlingEventRepository events.QueryHandlingHistoryFn = func(cargo.TrackingID) cargo.HandlingHistory { return cargo.HandlingHistory{} } s := NewService(&cargos, &events) logger := log.NewLogfmtLogger(ioutil.Discard) h := MakeHandler(s, logger) req, _ := http.NewRequest("GET", "http://example.com/tracking/v1/cargos/not_found", nil) rec := httptest.NewRecorder() h.ServeHTTP(rec, req) if rec.Code != http.StatusNotFound { t.Errorf("rec.Code = %d; want = %d", rec.Code, http.StatusNotFound) } wantContent := "application/json; charset=utf-8" if got := rec.Header().Get("Content-Type"); got != wantContent { t.Errorf("Content-Type = %q; want = %q", got, wantContent) } var response map[string]interface{} if err := json.NewDecoder(rec.Body).Decode(&response); err != nil { t.Error(err) } err, ok := response["error"] if !ok { t.Error("missing error") } if err != "unknown cargo" { t.Errorf(`"error": %q; want = %q`, err, "unknown cargo") } }
explode_data.jsonl/69182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 15667, 13790, 98228, 1155, 353, 8840, 836, 8, 341, 2405, 61662, 436, 7860, 98228, 4624, 271, 2405, 4357, 7860, 35308, 2718, 1556, 4624, 198, 90873, 15685, 38606, 13424, 24911, 284, 2915, 1337, 12088, 8240, 8985, 915, 8, 25652,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetBulkSecret(t *testing.T) { fakeStore := daprt.FakeSecretStore{} fakeStores := map[string]secretstores.SecretStore{ "store1": fakeStore, } secretsConfiguration := map[string]config.SecretsScope{ "store1": { DefaultAccess: config.AllowAccess, DeniedSecrets: []string{"not-allowed"}, }, } expectedResponse := "life is good" testCases := []struct { testName string storeName string key string errorExcepted bool expectedResponse string expectedError codes.Code }{ { testName: "Good Key from unrestricted store", storeName: "store1", key: "good-key", errorExcepted: false, expectedResponse: expectedResponse, }, } // Setup Dapr API server fakeAPI := &api{ id: "fakeAPI", secretStores: fakeStores, secretsConfiguration: secretsConfiguration, } // Run test server port, _ := freeport.GetFreePort() server := startDaprAPIServer(port, fakeAPI, "") defer server.Stop() // Create gRPC test client clientConn := createTestClient(port) defer clientConn.Close() // act client := runtimev1pb.NewDaprClient(clientConn) for _, tt := range testCases { t.Run(tt.testName, func(t *testing.T) { req := &runtimev1pb.GetBulkSecretRequest{ StoreName: tt.storeName, } resp, err := client.GetBulkSecret(context.Background(), req) if !tt.errorExcepted { assert.NoError(t, err, "Expected no error") assert.Equal(t, resp.Data[tt.key].Secrets[tt.key], tt.expectedResponse, "Expected responses to be same") } else { assert.Error(t, err, "Expected error") assert.Equal(t, tt.expectedError, status.Code(err)) } }) } }
explode_data.jsonl/21731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 1949, 88194, 19773, 1155, 353, 8840, 836, 8, 341, 1166, 726, 6093, 1669, 81521, 3342, 991, 726, 19773, 6093, 16094, 1166, 726, 69026, 1669, 2415, 14032, 60, 20474, 43469, 74779, 6093, 515, 197, 197, 1, 4314, 16, 788, 12418, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDBStats_Sub(t *testing.T) { var a, b bolt.Stats a.TxStats.PageCount = 3 a.FreePageN = 4 b.TxStats.PageCount = 10 b.FreePageN = 14 diff := b.Sub(&a) if diff.TxStats.PageCount != 7 { t.Fatalf("unexpected TxStats.PageCount: %d", diff.TxStats.PageCount) } // free page stats are copied from the receiver and not subtracted if diff.FreePageN != 14 { t.Fatalf("unexpected FreePageN: %d", diff.FreePageN) } }
explode_data.jsonl/27489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 3506, 16635, 36359, 1155, 353, 8840, 836, 8, 341, 2405, 264, 11, 293, 31842, 7758, 1862, 198, 11323, 81362, 16635, 17558, 2507, 284, 220, 18, 198, 11323, 52229, 2665, 45, 284, 220, 19, 198, 2233, 81362, 16635, 17558, 2507, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnlimitedPerChannelLimits(t *testing.T) { opts := GetDefaultOptions() opts.StoreLimits.MaxChannels = 2 // Set very small global limits opts.StoreLimits.MaxMsgs = 1 opts.StoreLimits.MaxBytes = 1 opts.StoreLimits.MaxAge = time.Millisecond opts.StoreLimits.MaxSubscriptions = 1 // Add a channel that has all unlimited values cl := &stores.ChannelLimits{} cl.MaxMsgs = -1 cl.MaxBytes = -1 cl.MaxAge = -1 cl.MaxSubscriptions = -1 opts.StoreLimits.AddPerChannel("foo", cl) s := runServerWithOpts(t, opts, nil) defer s.Shutdown() sc := NewDefaultConnection(t) defer sc.Close() // Check that we can send more than 1 message of more than 1 byte total := 10 for i := 0; i < total; i++ { if err := sc.Publish("foo", []byte("hello")); err != nil { t.Fatalf("Unexpected error on publish: %v", err) } } count := int32(0) ch := make(chan bool) cb := func(_ *stan.Msg) { if c := atomic.AddInt32(&count, 1); c == int32(2*total) { ch <- true } } for i := 0; i < 2; i++ { if _, err := sc.Subscribe("foo", cb, stan.DeliverAllAvailable()); err != nil { t.Fatalf("Unexpected error on subscribe: %v", err) } } if err := Wait(ch); err != nil { t.Fatal("Did not get our messages") } // Wait for more than the global limit MaxAge and verify messages // are still there time.Sleep(15 * time.Millisecond) s.mu.RLock() n, _, _ := s.channels.msgsState("foo") s.mu.RUnlock() if n != total { t.Fatalf("Should be %v messages, store reports %v", total, n) } // Now use a channel not defined in PerChannel and we should be subject // to global limits. for i := 0; i < total; i++ { if err := sc.Publish("bar", []byte("hello")); err != nil { t.Fatalf("Unexpected error on publish: %v", err) } } if _, err := sc.Subscribe("bar", func(_ *stan.Msg) {}); err != nil { t.Fatalf("Unexpected error on subscribe: %v", err) } // This one should fail if _, err := sc.Subscribe("bar", func(_ *stan.Msg) {}); err == nil { t.Fatal("Expected to fail to subscribe, did not") } // Wait more than MaxAge time.Sleep(15 * time.Millisecond) // Messages should have all disappear s.mu.RLock() n, _, _ = s.channels.msgsState("bar") s.mu.RUnlock() if n != 0 { t.Fatalf("Expected 0 messages, store reports %v", n) } }
explode_data.jsonl/23097
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 881 }
[ 2830, 3393, 1806, 18235, 3889, 9629, 94588, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 2126, 3675, 3798, 741, 64734, 38047, 94588, 14535, 35925, 284, 220, 17, 198, 197, 322, 2573, 1602, 2613, 3644, 13388, 198, 64734, 38047, 94588, 14535, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExchangeExists(t *testing.T) { newBase := Base{} newBase.AddAddress("someaddress", "LTC", "LTCWALLETTEST", 0.02) if !newBase.ExchangeExists("someaddress") { t.Error("Test Failed - portfolio_test.go - AddressExists error") } if newBase.ExchangeExists("bla") { t.Error("Test Failed - portfolio_test.go - AddressExists error") } }
explode_data.jsonl/33733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 31564, 15575, 1155, 353, 8840, 836, 8, 341, 8638, 3978, 1669, 5351, 16094, 8638, 3978, 1904, 4286, 445, 14689, 4995, 497, 330, 43, 7749, 497, 330, 43, 7749, 54, 969, 20756, 10033, 497, 220, 15, 13, 15, 17, 340, 743, 753,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWalkPrefix(t *testing.T) { type testCase struct { path string expected []string } keys := []string{ "foobar", "foo/bar/baz", "foo/baz/bar", "foo/zip/zap", "zipzap", } tr := New() for _, k := range keys { tr.Insert([]byte(k), nil) } if tr.Len() != len(keys) { t.Fatalf("tree is not an expected size. got: %d, expected: %d", tr.Len(), len(keys)) } for _, tc := range []testCase{ {path: "f", expected: []string{"foo/bar/baz", "foo/baz/bar", "foo/zip/zap", "foobar"}}, {path: "foo", expected: []string{"foo/bar/baz", "foo/baz/bar", "foo/zip/zap", "foobar"}}, {path: "foob", expected: []string{"foobar"}}, {path: "foo/", expected: []string{"foo/bar/baz", "foo/baz/bar", "foo/zip/zap"}}, {path: "foo/b", expected: []string{"foo/bar/baz", "foo/baz/bar"}}, {path: "foo/ba", expected: []string{"foo/bar/baz", "foo/baz/bar"}}, {path: "foo/bar", expected: []string{"foo/bar/baz"}}, {path: "foo/bar/baz", expected: []string{"foo/bar/baz"}}, {path: "foo/bar/bazoo", expected: []string{}}, {path: "z", expected: []string{"zipzap"}}, } { t.Run(fmt.Sprintf("prefix %s", string(tc.path)), func(t *testing.T) { result := []string{} tr.WalkPrefix([]byte(tc.path), func(k []byte, v interface{}) bool { result = append(result, string(k)) return false }) if !reflect.DeepEqual(result, tc.expected) { t.Errorf("returned an unexpected keys. got: %#v, expected: %#v", result, tc.expected) } }) } }
explode_data.jsonl/45927
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 657 }
[ 2830, 3393, 48849, 14335, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 26781, 257, 914, 198, 197, 42400, 3056, 917, 198, 197, 630, 80112, 1669, 3056, 917, 515, 197, 197, 1, 50267, 756, 197, 197, 1, 7975, 49513, 3470, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileWatcherRename(t *testing.T) { interval := time.Millisecond filewatcher.InitialReadInterval = interval writeDelay := interval * 10 timeout := writeDelay * 20 payload1 := []byte("Hello, world!") payload2 := []byte("Bye, world!") dir, err := ioutil.TempDir("", "filewatcher_test_") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) path := filepath.Join(dir, "foo") // Delay writing the file go func() { time.Sleep(writeDelay) f, err := os.Create(path) if err != nil { t.Fatal(err) } defer f.Close() if _, err := f.Write(payload1); err != nil { t.Error(err) } }() ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() data, err := filewatcher.New( ctx, filewatcher.Config{ Path: path, Parser: parser, Logger: log.TestWrapper(t), }, ) if err != nil { t.Fatal(err) } defer data.Stop() compareBytesData(t, data.Get(), payload1) func() { newpath := path + ".bar" f, err := os.Create(newpath) if err != nil { t.Fatal(err) } defer func() { if err := f.Close(); err != nil { t.Fatal(err) } if err := os.Rename(newpath, path); err != nil { t.Fatal(err) } }() if _, err := f.Write(payload2); err != nil { t.Fatal(err) } }() // Give it some time to handle the file content change time.Sleep(interval * 10) compareBytesData(t, data.Get(), payload2) }
explode_data.jsonl/15406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 608 }
[ 2830, 3393, 1703, 47248, 88757, 1155, 353, 8840, 836, 8, 341, 2084, 6152, 1669, 882, 71482, 198, 17661, 14321, 261, 32932, 4418, 10256, 284, 9873, 198, 24945, 20039, 1669, 9873, 353, 220, 16, 15, 198, 78395, 1669, 3270, 20039, 353, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLikeLike(t *testing.T) { convey.Convey("Like", t, func(ctx convey.C) { var ( c = context.Background() id = int64(77) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { res, err := d.Like(c, id) ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(res, convey.ShouldNotBeNil) }) }) }) }
explode_data.jsonl/11260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 12949, 12949, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 12949, 497, 259, 11, 2915, 7502, 20001, 727, 8, 341, 197, 2405, 2399, 298, 1444, 220, 284, 2266, 19047, 741, 298, 15710, 284, 526, 21, 19, 7, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindAllIndex(t *testing.T) { for _, test := range findTests { testFindAllIndex(&test, MustCompile(test.pat).FindAllIndex([]byte(test.text), -1), t) } }
explode_data.jsonl/55089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 9885, 2403, 1552, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 1477, 18200, 341, 197, 18185, 9885, 2403, 1552, 2099, 1944, 11, 15465, 46126, 8623, 96534, 568, 9885, 2403, 1552, 10556, 3782, 8623, 2788, 701, 48...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestMatches(t *testing.T) { tests := []struct { name string repo string repos []string expected bool }{ { name: "repo exists - exact match", repo: "kubernetes/test-infra", repos: []string{ "kubernetes/kubernetes", "kubernetes/test-infra", "kubernetes/community", }, expected: true, }, { name: "repo exists - org match", repo: "kubernetes/test-infra", repos: []string{ "openshift/test-infra", "openshift/origin", "kubernetes-security", "kubernetes", }, expected: true, }, { name: "repo does not exist", repo: "kubernetes/website", repos: []string{ "openshift/test-infra", "openshift/origin", "kubernetes-security", "kubernetes/test-infra", "kubernetes/kubernetes", }, expected: false, }, } for _, test := range tests { t.Logf("running scenario %q", test.name) if got := matches(test.repo, test.repos); got != test.expected { t.Errorf("unexpected result: expected %t, got %t", test.expected, got) } } }
explode_data.jsonl/74654
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 42470, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 271, 197, 17200, 5368, 220, 914, 198, 197, 17200, 966, 3056, 917, 271, 197, 42400, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLastStats(t *testing.T) { client1 := "1" client2 := "2" state := NewDefaultNetworkState() dSent := uint64(42) dRecv := uint64(133) dRetransmits := uint32(7) conn := ConnectionStats{ Pid: 123, Type: TCP, Family: AFINET, Source: util.AddressFromString("127.0.0.1"), Dest: util.AddressFromString("127.0.0.1"), SPort: 31890, DPort: 80, MonotonicSentBytes: 36, MonotonicRecvBytes: 24, MonotonicRetransmits: 2, } conn2 := conn conn2.MonotonicSentBytes += dSent conn2.MonotonicRecvBytes += dRecv conn2.MonotonicRetransmits += dRetransmits conn3 := conn2 conn3.MonotonicSentBytes += dSent conn3.MonotonicRecvBytes += dRecv conn3.MonotonicRetransmits += dRetransmits // First get, we should not have any connections stored conns := state.Connections(client1, latestEpochTime(), nil) assert.Equal(t, 0, len(conns)) // Same for an other client conns = state.Connections(client2, latestEpochTime(), nil) assert.Equal(t, 0, len(conns)) // We should have only one connection but with last stats equal to monotonic conns = state.Connections(client1, latestEpochTime(), []ConnectionStats{conn}) assert.Equal(t, 1, len(conns)) assert.Equal(t, conn.MonotonicSentBytes, conns[0].LastSentBytes) assert.Equal(t, conn.MonotonicRecvBytes, conns[0].LastRecvBytes) assert.Equal(t, conn.MonotonicRetransmits, conns[0].LastRetransmits) assert.Equal(t, conn.MonotonicSentBytes, conns[0].MonotonicSentBytes) assert.Equal(t, conn.MonotonicRecvBytes, conns[0].MonotonicRecvBytes) assert.Equal(t, conn.MonotonicRetransmits, conns[0].MonotonicRetransmits) // This client didn't collect the first connection so last stats = monotonic conns = state.Connections(client2, latestEpochTime(), []ConnectionStats{conn2}) assert.Equal(t, 1, len(conns)) assert.Equal(t, conn2.MonotonicSentBytes, conns[0].LastSentBytes) assert.Equal(t, conn2.MonotonicRecvBytes, conns[0].LastRecvBytes) assert.Equal(t, conn2.MonotonicRetransmits, conns[0].LastRetransmits) assert.Equal(t, conn2.MonotonicSentBytes, conns[0].MonotonicSentBytes) assert.Equal(t, conn2.MonotonicRecvBytes, conns[0].MonotonicRecvBytes) assert.Equal(t, conn2.MonotonicRetransmits, conns[0].MonotonicRetransmits) // client 1 should have conn3 - conn1 since it did not collected conn2 conns = state.Connections(client1, latestEpochTime(), []ConnectionStats{conn3}) assert.Equal(t, 1, len(conns)) assert.Equal(t, 2*dSent, conns[0].LastSentBytes) assert.Equal(t, 2*dRecv, conns[0].LastRecvBytes) assert.Equal(t, 2*dRetransmits, conns[0].LastRetransmits) assert.Equal(t, conn3.MonotonicSentBytes, conns[0].MonotonicSentBytes) assert.Equal(t, conn3.MonotonicRecvBytes, conns[0].MonotonicRecvBytes) assert.Equal(t, conn3.MonotonicRetransmits, conns[0].MonotonicRetransmits) // client 2 should have conn3 - conn2 conns = state.Connections(client2, latestEpochTime(), []ConnectionStats{conn3}) assert.Equal(t, 1, len(conns)) assert.Equal(t, dSent, conns[0].LastSentBytes) assert.Equal(t, dRecv, conns[0].LastRecvBytes) assert.Equal(t, dRetransmits, conns[0].LastRetransmits) assert.Equal(t, conn3.MonotonicSentBytes, conns[0].MonotonicSentBytes) assert.Equal(t, conn3.MonotonicRecvBytes, conns[0].MonotonicRecvBytes) assert.Equal(t, conn3.MonotonicRetransmits, conns[0].MonotonicRetransmits) }
explode_data.jsonl/22146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1401 }
[ 2830, 3393, 5842, 16635, 1155, 353, 8840, 836, 8, 341, 25291, 16, 1669, 330, 16, 698, 25291, 17, 1669, 330, 17, 698, 24291, 1669, 1532, 3675, 12320, 1397, 2822, 2698, 31358, 1669, 2622, 21, 19, 7, 19, 17, 340, 2698, 63483, 1669, 262...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGoodOSArch(t *testing.T) { for _, test := range tests { if Default.goodOSArchFile(test.name, make(map[string]bool)) != test.result { t.Fatalf("goodOSArchFile(%q) != %v", test.name, test.result) } } }
explode_data.jsonl/53254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 15216, 3126, 18727, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 7032, 341, 197, 743, 7899, 59569, 3126, 18727, 1703, 8623, 2644, 11, 1281, 9147, 14032, 96436, 593, 961, 1273, 12071, 341, 298, 3244, 30762, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCallerSubscriberClient_GetAggregatorRound(t *testing.T) { address := cltest.NewAddress() const aggregatorLatestRoundID = "668a0f02" aggregatorLatestRoundSelector := eth.HexToFunctionSelector(aggregatorLatestRoundID) expectedCallArgs := eth.CallArgs{ To: address, Data: aggregatorLatestRoundSelector.Bytes(), } large, ok := new(big.Int).SetString("52050000000000000000", 10) require.True(t, ok) tests := []struct { name, response string expectation *big.Int }{ {"zero", "0", big.NewInt(0)}, {"small", "12", big.NewInt(12)}, {"large", "52050000000000000000", large}, {"hex zero default", "0x", big.NewInt(0)}, {"hex zero", "0x0", big.NewInt(0)}, {"hex", "0x0100", big.NewInt(256)}, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { caller := new(mocks.CallerSubscriber) ethClient := &eth.CallerSubscriberClient{CallerSubscriber: caller} caller.On("Call", mock.Anything, "eth_call", expectedCallArgs, "latest").Return(nil). Run(func(args mock.Arguments) { res := args.Get(0).(*string) *res = test.response }) result, err := ethClient.GetAggregatorRound(address) require.NoError(t, err) assert.Equal(t, test.expectation, result) caller.AssertExpectations(t) }) } }
explode_data.jsonl/63849
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 517 }
[ 2830, 3393, 58735, 40236, 2959, 13614, 9042, 58131, 27497, 1155, 353, 8840, 836, 8, 341, 63202, 1669, 1185, 1944, 7121, 4286, 2822, 4777, 96680, 31992, 27497, 915, 284, 330, 21, 21, 23, 64, 15, 69, 15, 17, 698, 197, 351, 58131, 31992,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidationFailsOnInvalidPattern(t *testing.T) { schema := Schema{ Pattern: "[", Type: "string", } var err = schema.Validate(context.Background()) require.Error(t, err) }
explode_data.jsonl/5380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 13799, 37, 6209, 1925, 7928, 15760, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 12539, 515, 197, 10025, 3227, 25, 10545, 756, 197, 27725, 25, 262, 330, 917, 756, 197, 630, 2405, 1848, 284, 10802, 47667, 5378, 19047, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDump(t *testing.T) { asn := NewASN() asn.Data = map[uint64]ASNInfo{15133: {Descr: "EdgeCast"}} gzipData := []byte{0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xe2, 0xff, 0xdf, 0xcc, 0xc2, 0xc8, 0xf4, 0xbf, 0x85, 0x81, 0x91, 0x8d, 0xf1, 0x7f, 0x13, 0x3, 0x83, 0xd8, 0xff, 0x46, 0x66, 0x46, 0xa6, 0xff, 0x4d, 0xc, 0x8c, 0x8c, 0x8c, 0xac, 0x2e, 0xa9, 0xc5, 0xc9, 0x45, 0x8c, 0x3c, 0xc, 0xc, 0xc, 0x42, 0x20, 0x15, 0xff, 0xac, 0x65, 0x19, 0x39, 0x5c, 0x53, 0xd2, 0x53, 0x9d, 0x13, 0x8b, 0x4b, 0x18, 0x0, 0x1, 0x0, 0x0, 0xff, 0xff, 0x95, 0xcc, 0x8e, 0x16, 0x3a, 0x0, 0x0, 0x0} buf := new(bytes.Buffer) asn.dump(buf) if !reflect.DeepEqual(buf.Bytes(), gzipData) { t.Error("dump unexpected error") } }
explode_data.jsonl/23945
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 51056, 1155, 353, 8840, 836, 8, 341, 60451, 77, 1669, 1532, 68134, 741, 60451, 77, 3336, 284, 2415, 58, 2496, 21, 19, 60, 68134, 1731, 90, 16, 20, 16, 18, 18, 25, 314, 11065, 81, 25, 330, 11656, 18714, 95642, 3174, 996...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTransientStore(t *testing.T) { plugin := &fakeEndorsementPlugin{} factory := &mocks.PluginFactory{} factory.On("New").Return(plugin) sif := &mocks.SigningIdentityFetcher{} cs := &mocks.ChannelStateRetriever{} queryCreator := &mocks.QueryCreator{} queryCreator.On("NewQueryExecutor").Return(&fake.QueryExecutor{}, nil) cs.On("NewQueryCreator", "mychannel").Return(queryCreator, nil) transientStore := newTransientStore(t) defer transientStore.tearDown() rws := &rwset.TxPvtReadWriteSet{ NsPvtRwset: []*rwset.NsPvtReadWriteSet{ { Namespace: "ns", CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{ { CollectionName: "col", }, }, }, }, } transientStore.Persist("tx1", 1, &tspb.TxPvtReadWriteSetWithConfigInfo{ PvtRwset: rws, CollectionConfigs: make(map[string]*peer.CollectionConfigPackage), }) storeRetriever := &mocks.TransientStoreRetriever{} storeRetriever.On("StoreForChannel", mock.Anything).Return(transientStore.store) pluginEndorser := endorser.NewPluginEndorser(&endorser.PluginSupport{ ChannelStateRetriever: cs, SigningIdentityFetcher: sif, PluginMapper: endorser.MapBasedPluginMapper{ "plugin": factory, }, TransientStoreRetriever: storeRetriever, }) _, prpBytes, err := pluginEndorser.EndorseWithPlugin("plugin", "mychannel", nil, nil) assert.NoError(t, err) txrws := &rwset.TxPvtReadWriteSet{} err = proto.Unmarshal(prpBytes, txrws) assert.NoError(t, err) assert.True(t, proto.Equal(rws, txrws)) }
explode_data.jsonl/1350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 616 }
[ 2830, 3393, 48183, 6093, 1155, 353, 8840, 836, 8, 341, 197, 9138, 1669, 609, 30570, 3727, 10836, 478, 11546, 16094, 1166, 2919, 1669, 609, 16712, 82, 64378, 4153, 16094, 1166, 2919, 8071, 445, 3564, 1827, 5598, 46801, 340, 1903, 333, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOutFilterPartial(t *testing.T) { has := `logs-broker\nnullsink\n` tests := []struct { entry Entry shouldPass bool stdout, stderr string }{ { entry: Entry{ Name: "test when partial is true, while match[second] does not exist", Stdout: OutFilters{OutFilter{ Match: []string{"nullsink", "not"}, Partial: true, }}, }, shouldPass: true, stdout: has, }, { // this should fail because "Partial" is per match entry. entry: Entry{ Name: "test when partial is true for the first filter with a single match but second does not exist and partial is false", Stdout: OutFilters{ OutFilter{ Match: []string{"nullsink"}, Partial: true, }, OutFilter{ Match: []string{"failure"}, }, }, }, shouldPass: false, stdout: has, }, { entry: Entry{ Name: "test when partial is true but reverse order, first element does not exist but second does", Stdout: OutFilters{OutFilter{ Match: []string{"not", "logs-broker"}, Partial: true, }}, }, shouldPass: true, stdout: has, }, } for i, tt := range tests { pass, err := tt.entry.Test(tt.stdout, tt.stderr) if tt.shouldPass != pass { if tt.shouldPass { t.Fatalf("[%d] expected to pass but failed for test '%s', error trace: %v", i, tt.entry.Name, err) } else { t.Fatalf("[%d] expected to not pass but passed for test '%s'", i, tt.entry.Name) } } } }
explode_data.jsonl/31276
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 671 }
[ 2830, 3393, 2662, 5632, 37314, 1155, 353, 8840, 836, 8, 341, 63255, 1669, 1565, 22081, 1455, 45985, 1699, 2921, 66738, 1699, 19324, 78216, 1669, 3056, 1235, 341, 197, 48344, 688, 15788, 198, 197, 197, 5445, 12187, 257, 1807, 198, 197, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHelperStringMethods(t *testing.T) { // assert that function returns an error if invalid series is entered s := NewSeries("test", 12, 43, 11, 10) assert.PanicsWithError(t, errors.IncorrectDataType(base.Object).Error(), func() { helperStringMethods(s, func(val string) string { return "" }) }) s = NewSeries("test", "foo", "bar", "baz") s.Data[2] = 5 assert.PanicsWithError(t, errors.InvalidSeriesValError(5, 2, s.column.Name).Error(), func() { helperStringMethods(s, func(val string) string { return "" }) }) // assert that nil values are skipped correctly s = NewSeries("test", nil, "foo", "bar", "baz", "") s = s.Lower() assert.Nil(t, s.Data[0]) }
explode_data.jsonl/54080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 5511, 703, 17856, 1155, 353, 8840, 836, 8, 341, 197, 322, 2060, 429, 729, 4675, 458, 1465, 421, 8318, 4013, 374, 10636, 198, 1903, 1669, 1532, 25544, 445, 1944, 497, 220, 16, 17, 11, 220, 19, 18, 11, 220, 16, 16, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAttachVolmeFailureString(t *testing.T) { var stringTests = []struct { r AttachVolumeFailureReason expected string }{ {AttachVolumeNoInstance, "Instance does not exist"}, {AttachVolumeInvalidPayload, "YAML payload is corrupt"}, {AttachVolumeInvalidData, "Command section of YAML payload is corrupt or missing required information"}, {AttachVolumeAttachFailure, "Failed to attach volume to instance"}, {AttachVolumeAlreadyAttached, "Volume already attached"}, {AttachVolumeStateFailure, "State failure"}, {AttachVolumeInstanceFailure, "Instance failure"}, {AttachVolumeNotSupported, "Not Supported"}, } error := ErrorAttachVolumeFailure{ InstanceUUID: testutil.InstanceUUID, } for _, test := range stringTests { error.Reason = test.r s := error.Reason.String() if s != test.expected { t.Errorf("expected \"%s\", got \"%s\"", test.expected, s) } } }
explode_data.jsonl/76411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 30485, 36361, 2660, 17507, 703, 1155, 353, 8840, 836, 8, 341, 2405, 914, 18200, 284, 3056, 1235, 341, 197, 7000, 286, 48384, 18902, 17507, 25139, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 90, 30485, 18902, 2753, 2523,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildIngressRouteConfiguration(t *testing.T) { testCases := []struct { name string ingressPolicies []*trafficpolicy.InboundTrafficPolicy expectedRouteConfigFields *xds_route.RouteConfiguration }{ { name: "no ingress policies", ingressPolicies: nil, expectedRouteConfigFields: nil, }, { name: "multiple ingress policies", ingressPolicies: []*trafficpolicy.InboundTrafficPolicy{ { Name: "bookstore-v1-default", Hostnames: []string{"bookstore-v1.default.svc.cluster.local"}, Rules: []*trafficpolicy.Rule{ { Route: trafficpolicy.RouteWeightedClusters{ HTTPRouteMatch: tests.BookstoreBuyHTTPRoute, WeightedClusters: mapset.NewSet(tests.BookstoreV1DefaultWeightedCluster), }, AllowedServiceIdentities: mapset.NewSet(identity.WildcardServiceIdentity), }, { Route: trafficpolicy.RouteWeightedClusters{ HTTPRouteMatch: tests.BookstoreSellHTTPRoute, WeightedClusters: mapset.NewSet(tests.BookstoreV1DefaultWeightedCluster), }, AllowedServiceIdentities: mapset.NewSet(identity.WildcardServiceIdentity), }, }, }, { Name: "foo.com", Hostnames: []string{"foo.com"}, Rules: []*trafficpolicy.Rule{ { Route: trafficpolicy.RouteWeightedClusters{ HTTPRouteMatch: tests.BookstoreBuyHTTPRoute, WeightedClusters: mapset.NewSet(tests.BookstoreV1DefaultWeightedCluster), }, AllowedServiceIdentities: mapset.NewSet(identity.WildcardServiceIdentity), }, }, }, }, expectedRouteConfigFields: &xds_route.RouteConfiguration{ Name: "rds-ingress", VirtualHosts: []*xds_route.VirtualHost{ { Name: "ingress_virtual-host|bookstore-v1.default.svc.cluster.local", Routes: []*xds_route.Route{ { // corresponds to ingressPolicies[0].Rules[0] }, { // corresponds to ingressPolicies[0].Rules[1] }, }, }, { Name: "ingress_virtual-host|foo.com", Routes: []*xds_route.Route{ { // corresponds to ingressPolicies[1].Rules[0] }, }, }, }, }, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { assert := tassert.New(t) actual := BuildIngressConfiguration(tc.ingressPolicies) if tc.expectedRouteConfigFields == nil { assert.Nil(actual) return } assert.NotNil(actual) assert.Equal(tc.expectedRouteConfigFields.Name, actual.Name) assert.Len(actual.VirtualHosts, len(tc.expectedRouteConfigFields.VirtualHosts)) for i, vh := range actual.VirtualHosts { assert.Len(vh.Routes, len(tc.expectedRouteConfigFields.VirtualHosts[i].Routes)) } }) } }
explode_data.jsonl/75511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1333 }
[ 2830, 3393, 11066, 641, 2483, 4899, 7688, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 2549, 914, 198, 197, 197, 287, 673, 47, 42038, 1843, 29838, 53987, 34790, 5337, 10891, 87229, 13825, 198, 197, 4240...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRevelWithFieldsfOutput(t *testing.T) { l, b := newBufferedRevelLog() l.WithFields("test", true, "Error", "serious").Errorf("This is a %s.", "message") expectedMatch := "ERROR.*This is a message. test=true Error=serious\n" actual := b.String() if ok, _ := regexp.Match(expectedMatch, []byte(actual)); !ok { t.Errorf("Log output mismatch %s (actual) != %s (expected)", actual, expectedMatch) } }
explode_data.jsonl/3455
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 693, 889, 2354, 8941, 69, 5097, 1155, 353, 8840, 836, 8, 341, 8810, 11, 293, 1669, 501, 4095, 291, 693, 889, 2201, 741, 8810, 26124, 8941, 445, 1944, 497, 830, 11, 330, 1454, 497, 330, 93327, 1827, 1454, 69, 445, 1986, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPullCreate(t *testing.T) { defer gock.Off() input := scm.PullRequestInput{ Title: "JS fix", Body: "Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>", Source: "fix", Target: "master", } gock.New("https://gitlab.com"). Post("/api/v4/projects/diaspora/diaspora/merge_requests"). MatchParam("title", input.Title). MatchParam("description", input.Body). MatchParam("source_branch", input.Source). MatchParam("target_branch", input.Target). Reply(201). Type("application/json"). SetHeaders(mockHeaders). File("testdata/merge.json") client := NewDefault() got, res, err := client.PullRequests.Create(context.Background(), "diaspora/diaspora", &input) if err != nil { t.Error(err) return } want := new(scm.PullRequest) raw, _ := ioutil.ReadFile("testdata/merge.json.golden") json.Unmarshal(raw, want) if diff := cmp.Diff(got, want); diff != "" { t.Errorf("Unexpected Results") t.Log(diff) } t.Run("Request", testRequest(res)) t.Run("Rate", testRate(res)) }
explode_data.jsonl/78656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 36068, 4021, 1155, 353, 8840, 836, 8, 341, 16867, 728, 377, 13, 4596, 2822, 22427, 1669, 85520, 97357, 1900, 2505, 515, 197, 92233, 25, 220, 330, 12545, 5046, 756, 197, 197, 5444, 25, 256, 330, 49312, 12462, 14319, 25, 661...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIssue29WithNumericColumn(t *testing.T) { NegativeTest{ Name: "issue29 with numeric column", Query: "CREATE TABLE foo(1)", }.Run(t) }
explode_data.jsonl/31976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 57 }
[ 2830, 3393, 42006, 17, 24, 2354, 36296, 2933, 1155, 353, 8840, 836, 8, 341, 18317, 15060, 2271, 515, 197, 21297, 25, 220, 330, 11159, 17, 24, 448, 24064, 3250, 756, 197, 60362, 25, 330, 22599, 14363, 15229, 7, 16, 15752, 197, 7810, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFileServerEscapesNames(t *testing.T) { defer afterTest(t) const dirListPrefix = "<pre>\n" const dirListSuffix = "\n</pre>\n" tests := []struct { name, escaped string }{ {`simple_name`, `<a href="simple_name">simple_name</a>`}, {`"'<>&`, `<a href="%22%27%3C%3E&">&#34;&#39;&lt;&gt;&amp;</a>`}, {`?foo=bar#baz`, `<a href="%3Ffoo=bar%23baz">?foo=bar#baz</a>`}, {`<combo>?foo`, `<a href="%3Ccombo%3E%3Ffoo">&lt;combo&gt;?foo</a>`}, } // We put each test file in its own directory in the fakeFS so we can look at it in isolation. fs := make(fakeFS) for i, test := range tests { testFile := &fakeFileInfo{basename: test.name} fs[fmt.Sprintf("/%d", i)] = &fakeFileInfo{ dir: true, modtime: time.Unix(1000000000, 0).UTC(), ents: []*fakeFileInfo{testFile}, } fs[fmt.Sprintf("/%d/%s", i, test.name)] = testFile } ts := httptest.NewServer(FileServer(&fs)) defer ts.Close() for i, test := range tests { url := fmt.Sprintf("%s/%d", ts.URL, i) res, err := Get(url) if err != nil { t.Fatalf("test %q: Get: %v", test.name, err) } b, err := ioutil.ReadAll(res.Body) if err != nil { t.Fatalf("test %q: read Body: %v", test.name, err) } s := string(b) if !strings.HasPrefix(s, dirListPrefix) || !strings.HasSuffix(s, dirListSuffix) { t.Errorf("test %q: listing dir, full output is %q, want prefix %q and suffix %q", test.name, s, dirListPrefix, dirListSuffix) } if trimmed := strings.TrimSuffix(strings.TrimPrefix(s, dirListPrefix), dirListSuffix); trimmed != test.escaped { t.Errorf("test %q: listing dir, filename escaped to %q, want %q", test.name, trimmed, test.escaped) } res.Body.Close() } }
explode_data.jsonl/48227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 755 }
[ 2830, 3393, 1703, 5475, 36121, 9337, 7980, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 4777, 5419, 852, 14335, 284, 4055, 1726, 8449, 77, 698, 4777, 5419, 852, 40177, 284, 2917, 77, 522, 1726, 8449, 77, 698, 78216, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestPeek(t *testing.T) { q := queueint.NewQueueInt() q.Enqueue(1) q.Enqueue(2) v := q.Peek() assert.Equal(t, v, 1) assert.Equal(t, "[1 2]", q.Print()) }
explode_data.jsonl/77135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 10197, 1225, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 7177, 396, 7121, 7554, 1072, 741, 18534, 65625, 7, 16, 340, 18534, 65625, 7, 17, 340, 5195, 1669, 2804, 94279, 2822, 6948, 12808, 1155, 11, 348, 11, 220, 16, 340, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestReadEntries(t *testing.T) { entry := new(pb.Entry) entry.Key = []byte("key") entry.Value = []byte("value") data, err := entry.Marshal() require.Nil(t, err) extent, err := CreateExtent("localtest.ext", 100) defer os.Remove("localtest.ext") extent.Lock() extent.AppendBlocks([]*pb.Block{{Data: data}}, true) extent.Unlock() entries, end, err := extent.ReadEntries(0, 10<<20, true) for i := range entries { require.Equal(t, uint64(100), entries[i].ExtentID) require.Equal(t, []byte("key"), entries[i].Log.Key) require.Equal(t, []byte("value"), entries[i].Log.Value) } fmt.Println(end) }
explode_data.jsonl/14630
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 4418, 24533, 1155, 353, 8840, 836, 8, 341, 48344, 1669, 501, 76878, 22330, 340, 48344, 9610, 284, 3056, 3782, 445, 792, 1138, 48344, 6167, 284, 3056, 3782, 445, 957, 1138, 8924, 11, 1848, 1669, 4343, 37271, 741, 17957, 59678...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestChunkOffset(t *testing.T) { if testing.Short() { t.SkipNow() } t.Parallel() sf := newTestFile() // Set the static pages per chunk to a random value. sf.staticMetadata.StaticPagesPerChunk = uint8(fastrand.Intn(5)) + 1 // Calculate the offset of the first chunk. offset1 := sf.chunkOffset(0) if expectedOffset := sf.staticMetadata.ChunkOffset; expectedOffset != offset1 { t.Fatalf("expected offset %v but got %v", sf.staticMetadata.ChunkOffset, offset1) } // Calculate the offset of the second chunk. offset2 := sf.chunkOffset(1) if expectedOffset := offset1 + int64(sf.staticMetadata.StaticPagesPerChunk)*pageSize; expectedOffset != offset2 { t.Fatalf("expected offset %v but got %v", expectedOffset, offset2) } // Make sure that the offsets we calculated are not the same due to not // initializing the file correctly. if offset2 == offset1 { t.Fatal("the calculated offsets are the same") } }
explode_data.jsonl/14698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 28304, 6446, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 7039, 741, 197, 532, 3244, 41288, 7957, 2822, 53024, 1669, 501, 2271, 1703, 2822, 197, 322, 2573, 279, 1099, 6816, 817, 11879, 311, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRequireAccess(t *testing.T) { chartAPI := &ChartRepositoryAPI{} chartAPI.SecurityCtx = &mockSecurityContext{} ns := "library" if !chartAPI.requireAccess(ns, accessLevelPublic) { t.Fatal("expect true result (public access level is granted) but got false") } if !chartAPI.requireAccess(ns, accessLevelAll) { t.Fatal("expect true result (admin has all perm) but got false") } if !chartAPI.requireAccess(ns, accessLevelRead) { t.Fatal("expect true result (admin has read perm) but got false") } if !chartAPI.requireAccess(ns, accessLevelWrite) { t.Fatal("expect true result (admin has write perm) but got false") } if !chartAPI.requireAccess(ns, accessLevelSystem) { t.Fatal("expect true result (admin has system perm) but got false") } }
explode_data.jsonl/74122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 17959, 6054, 1155, 353, 8840, 836, 8, 341, 197, 15941, 7082, 1669, 609, 14488, 4624, 7082, 16094, 197, 15941, 7082, 21567, 23684, 284, 609, 16712, 15352, 1972, 31483, 84041, 1669, 330, 18065, 698, 743, 753, 15941, 7082, 23712,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestEvenMedian(t *testing.T) { median := Median([]*big.Int{big.NewInt(12), big.NewInt(20), big.NewInt(1), big.NewInt(9), big.NewInt(17)}) if median.Cmp(big.NewInt(12)) != 0 { t.Errorf("Median of this array must be 12 not %d", median.Int64()) } }
explode_data.jsonl/45656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 13159, 79514, 1155, 353, 8840, 836, 8, 341, 197, 55651, 1669, 62590, 85288, 16154, 7371, 90, 16154, 7121, 1072, 7, 16, 17, 701, 2409, 7121, 1072, 7, 17, 15, 701, 2409, 7121, 1072, 7, 16, 701, 2409, 7121, 1072, 7, 24, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPublishJoinLeave(t *testing.T) { app := testNode() err := <-app.PublishJoin(proto.NewJoinMessage("channel-0", proto.ClientInfo{}), nil) assert.Equal(t, nil, err) err = <-app.PublishLeave(proto.NewLeaveMessage("channel-0", proto.ClientInfo{}), nil) assert.Equal(t, nil, err) }
explode_data.jsonl/53961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 50145, 12292, 21833, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1273, 1955, 741, 9859, 1669, 9119, 676, 83935, 12292, 73963, 7121, 12292, 2052, 445, 10119, 12, 15, 497, 18433, 11716, 1731, 6257, 701, 2092, 340, 6948, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewInvalidMemberPresent(t *testing.T) { dir := t.TempDir() v1 := version.NewDefaultVersion(1, 1, 0) v2 := version.NewDefaultVersion(1, 2, 0) dbPath1 := filepath.Join(dir, v1.String()) db1, err := leveldb.New(dbPath1, nil, logging.NoLog{}) if err != nil { t.Fatal(err) } dbPath2 := filepath.Join(dir, v2.String()) db2, err := leveldb.New(dbPath2, nil, logging.NoLog{}) if err != nil { t.Fatal(err) } err = db2.Close() if err != nil { t.Fatal(err) } _, err = NewLevelDB(dir, nil, logging.NoLog{}, v2) assert.Error(t, err, "expected to error creating the manager due to an open db") err = db1.Close() assert.NoError(t, err) f, err := os.Create(filepath.Join(dir, "dummy")) assert.NoError(t, err) err = f.Close() assert.NoError(t, err) db, err := NewLevelDB(dir, nil, logging.NoLog{}, v1) assert.NoError(t, err, "expected not to error with a non-directory file being present") err = db.Close() assert.NoError(t, err) }
explode_data.jsonl/37000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 3564, 7928, 9366, 21195, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 259, 65009, 6184, 2822, 5195, 16, 1669, 2319, 7121, 3675, 5637, 7, 16, 11, 220, 16, 11, 220, 15, 340, 5195, 17, 1669, 2319, 7121, 3675, 5637, 7, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFlowNoResolves(t *testing.T) { workflow, _ := fixture(t, "valid/no-resolves.workflow") assert.Equal(t, "push", workflow.Workflows[0].On.String()) assert.IsType(t, &model.OnEvent{}, workflow.Workflows[0].On) assert.Len(t, workflow.Workflows[0].Resolves, 0) assert.Empty(t, workflow.Workflows[0].Resolves) }
explode_data.jsonl/55673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 18878, 2753, 1061, 18186, 1155, 353, 8840, 836, 8, 341, 197, 56249, 11, 716, 1669, 12507, 1155, 11, 330, 1891, 33100, 11588, 18186, 72774, 1138, 6948, 12808, 1155, 11, 330, 9077, 497, 28288, 28748, 38140, 58, 15, 936, 1925, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRpcServer(t *testing.T) { var currentTestNum int defer func() { // If one of the integration tests caused a panic within the main // goroutine, then tear down all the harnesses in order to avoid // any leaked pktd processes. if r := recover(); r != nil { fmt.Println("recovering from test panic: ", r) if err := rpctest.TearDownAll(); err != nil { fmt.Println("unable to tear down all harnesses: ", err) } t.Fatalf("test #%v panicked: %s", currentTestNum, debug.Stack()) } }() for _, testCase := range rpcTestCases { testCase(primaryHarness, t) currentTestNum++ } }
explode_data.jsonl/28820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 60248, 5475, 1155, 353, 8840, 836, 8, 341, 2405, 1482, 2271, 4651, 526, 198, 16867, 2915, 368, 341, 197, 197, 322, 1416, 825, 315, 279, 17590, 7032, 8881, 264, 21975, 2878, 279, 1887, 198, 197, 197, 322, 45198, 14159, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRowsCache(t *testing.T) { t.Run("basic methods", func(t *testing.T) { require := require.New(t) cache := newRowsCache(mockMemory{}, fixedReporter(5, 50)) require.NoError(cache.Add(Row{1})) require.Len(cache.Get(), 1) cache.Dispose() require.Panics(func() { _ = cache.Add(Row{2}) }) }) t.Run("no memory available", func(t *testing.T) { require := require.New(t) cache := newRowsCache(mockMemory{}, fixedReporter(51, 50)) err := cache.Add(Row{1, "foo"}) require.Error(err) require.True(ErrNoMemoryAvailable.Is(err)) }) t.Run("free required to add entry", func(t *testing.T) { require := require.New(t) var freed bool cache := newRowsCache( mockMemory{func() { freed = true }}, mockReporter{func() uint64 { if freed { return 0 } return 51 }, 50}, ) require.NoError(cache.Add(Row{1, "foo"})) require.Len(cache.Get(), 1) require.True(freed) }) }
explode_data.jsonl/55320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 410 }
[ 2830, 3393, 9024, 8233, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 22342, 5413, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 17957, 1669, 1373, 7121, 1155, 692, 197, 52680, 1669, 501, 9024, 8233, 30389, 10642, 22655, 8356, 52766, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtraPodsAdoptionAndDeletion(t *testing.T) { s, closeFn, rm, informers, c := rmSetup(t) defer closeFn() ns := framework.CreateTestingNamespace("test-extra-pods-adoption-and-deletion", s, t) defer framework.DeleteTestingNamespace(ns, s, t) rs := newRS("rs", ns.Name, 2) // Create 3 pods, RS should adopt only 2 of them podList := []*v1.Pod{} for i := 0; i < 3; i++ { pod := newMatchingPod(fmt.Sprintf("pod-%d", i+1), ns.Name) pod.Labels = labelMap() podList = append(podList, pod) } rss, _ := createRSsPods(t, c, []*apps.ReplicaSet{rs}, podList) rs = rss[0] stopCh := runControllerAndInformers(t, rm, informers, 3) defer close(stopCh) waitRSStable(t, c, rs) // Verify the extra pod is deleted eventually by determining whether number of // all pods within namespace matches .spec.replicas of the RS (2 in this case) podClient := c.CoreV1().Pods(ns.Name) if err := wait.PollImmediate(interval, timeout, func() (bool, error) { // All pods have labelMap as their labels pods := getPods(t, podClient, labelMap()) return int32(len(pods.Items)) == *rs.Spec.Replicas, nil }); err != nil { t.Fatalf("Failed to verify number of all pods within current namespace matches .spec.replicas of rs %s: %v", rs.Name, err) } }
explode_data.jsonl/40121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 11612, 23527, 82, 2589, 2047, 3036, 1912, 52625, 1155, 353, 8840, 836, 8, 341, 1903, 11, 3265, 24911, 11, 18998, 11, 6051, 388, 11, 272, 1669, 18998, 21821, 1155, 340, 16867, 3265, 24911, 741, 84041, 1669, 12626, 7251, 16451...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewPersistent(t *testing.T) { testingVault1FilePath := "/v1.json" v := NewPersistent(testingVault1FilePath, "foobar") assert.NotNil(t, v) assert.Equal(t, "foobar", v.password) assert.Equal(t, testingVault1FilePath, v.path) }
explode_data.jsonl/50663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 3564, 53194, 1155, 353, 8840, 836, 8, 341, 197, 8840, 79177, 16, 19090, 1669, 3521, 85, 16, 4323, 698, 5195, 1669, 1532, 53194, 8623, 287, 79177, 16, 19090, 11, 330, 50267, 1138, 6948, 93882, 1155, 11, 348, 692, 6948, 1280...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestLogin(t *testing.T) { cases := []struct { name string req string wantStatus int wantResp *model.AuthToken udb *mockdb.User jwt *mock.JWT }{ { name: "Invalid request", req: `{"username":"juzernejm"}`, wantStatus: http.StatusBadRequest, }, { name: "Fail on FindByUsername", req: `{"username":"juzernejm","password":"hunter123"}`, wantStatus: http.StatusInternalServerError, udb: &mockdb.User{ FindByUsernameFn: func(context.Context, string) (*model.User, error) { return nil, apperr.DB }, }, }, { name: "Success", req: `{"username":"juzernejm","password":"hunter123"}`, wantStatus: http.StatusOK, udb: &mockdb.User{ FindByUsernameFn: func(context.Context, string) (*model.User, error) { return &model.User{ Password: auth.HashPassword("hunter123"), Active: true, }, nil }, UpdateLoginFn: func(context.Context, *model.User) error { return nil }, }, jwt: &mock.JWT{ GenerateTokenFn: func(*model.User) (string, string, error) { return "jwttokenstring", mock.TestTime(2018).Format(time.RFC3339), nil }, }, wantResp: &model.AuthToken{Token: "jwttokenstring", Expires: mock.TestTime(2018).Format(time.RFC3339)}, }, } gin.SetMode(gin.TestMode) for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { r := gin.New() service.NewAuth(auth.New(tt.udb, tt.jwt), r) ts := httptest.NewServer(r) defer ts.Close() path := ts.URL + "/login" res, err := http.Post(path, "application/json", bytes.NewBufferString(tt.req)) if err != nil { t.Fatal(err) } defer res.Body.Close() if tt.wantResp != nil { response := new(model.AuthToken) if err := json.NewDecoder(res.Body).Decode(response); err != nil { t.Fatal(err) } tt.wantResp.RefreshToken = response.RefreshToken assert.Equal(t, tt.wantResp, response) } assert.Equal(t, tt.wantStatus, res.StatusCode) }) } }
explode_data.jsonl/54619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 956 }
[ 2830, 3393, 6231, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 24395, 286, 914, 198, 197, 50780, 2522, 526, 198, 197, 50780, 36555, 256, 353, 2528, 25233, 3323, 198, 197, 197, 661, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFiles_HLSPlaylist(t *testing.T) { setup() defer teardown() sampleHLS := ` #EXTM3U #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=688301 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/0640_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=165135 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/0150_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=262346 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/0240_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=481677 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/0440_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=1308077 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/1240_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=1927853 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/1840_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=2650941 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/2540_vod.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1, BANDWIDTH=3477293 http://qthttp.apple.com.edgesuite.net/1010qwoeiuryfg/3340_vod.m3u8 ` mux.HandleFunc("/v2/files/1/hls/media.m3u8", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") http.ServeContent(w, r, "media.m3u8", time.Now().UTC(), strings.NewReader(sampleHLS)) }) ctx := context.Background() body, err := client.Files.HLSPlaylist(ctx, 1, "all") if err != nil { t.Error(err) } defer body.Close() var buf bytes.Buffer _, err = io.Copy(&buf, body) if err != nil { t.Error(err) } if buf.String() != sampleHLS { t.Errorf("got: %v, want: %v", buf.String(), sampleHLS) } // negative id _, err = client.Files.HLSPlaylist(ctx, -1, "all") if err == nil { t.Errorf("negative file ID accepted") } // empty key _, err = client.Files.HLSPlaylist(ctx, 1, "") if err == nil { t.Errorf("empty key is accepted") } }
explode_data.jsonl/48324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 904 }
[ 2830, 3393, 10809, 2039, 7268, 52581, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 1903, 1516, 39, 7268, 1669, 22074, 2, 5722, 44, 18, 52, 198, 2, 5722, 30550, 12, 54326, 71470, 25, 80829, 53365, 28, 16, 11, 425, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHasSubdomain(t *testing.T) { //Test cases cases := map[string]bool{ "http://google.com": false, "http://google.com/ding?true": false, "google.com/?ding=false": false, "google.com?ding=false": false, "nonexist.***": false, "google.com": false, "google.co.uk": false, "gama.google.com": true, "gama.google.co.uk": true, "beta.gama.google.co.uk": true, } //Test each domain, some should fail (expected) for url, shouldHaveSubdomain := range cases { hasSubdomain := HasSubdomain(url) if hasSubdomain != shouldHaveSubdomain { t.Errorf("Url (%q) returned %v for HasSubdomain(), but %v was expected", url, hasSubdomain, shouldHaveSubdomain) } } }
explode_data.jsonl/30829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 10281, 3136, 12204, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 5048, 198, 1444, 2264, 1669, 2415, 14032, 96436, 515, 197, 197, 76932, 1110, 17485, 905, 788, 1843, 895, 345, 197, 197, 76932, 1110, 17485, 905, 3446, 287, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFormatWithdrawPermissions(t *testing.T) { expectedResult := exchange.AutoWithdrawCryptoText + " & " + exchange.NoFiatWithdrawalsText withdrawPermissions := o.FormatWithdrawPermissions() if withdrawPermissions != expectedResult { t.Errorf("Expected: %s, Received: %s", expectedResult, withdrawPermissions) } }
explode_data.jsonl/30199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 4061, 92261, 23851, 1155, 353, 8840, 836, 8, 341, 42400, 2077, 1669, 9289, 6477, 92261, 58288, 1178, 488, 330, 609, 330, 488, 9289, 16766, 37, 10358, 92261, 1127, 1178, 198, 46948, 7633, 23851, 1669, 297, 9978, 92261, 23851, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadBBox(t *testing.T) { tests := []struct { filename string want Box }{ {"test_files/multipatch.shp", Box{0, 0, 10, 10}}, {"test_files/multipoint.shp", Box{0, 5, 10, 10}}, {"test_files/multipointm.shp", Box{0, 5, 10, 10}}, {"test_files/multipointz.shp", Box{0, 5, 10, 10}}, {"test_files/point.shp", Box{0, 5, 10, 10}}, {"test_files/pointm.shp", Box{0, 5, 10, 10}}, {"test_files/pointz.shp", Box{0, 5, 10, 10}}, {"test_files/polygon.shp", Box{0, 0, 5, 5}}, {"test_files/polygonm.shp", Box{0, 0, 5, 5}}, {"test_files/polygonz.shp", Box{0, 0, 5, 5}}, {"test_files/polyline.shp", Box{0, 0, 25, 25}}, {"test_files/polylinem.shp", Box{0, 0, 25, 25}}, {"test_files/polylinez.shp", Box{0, 0, 25, 25}}, } for _, tt := range tests { f, _ := os.Open(tt.filename) r, err := ReadFrom(f) if err != nil { t.Fatalf("%v", err) } if got := r.BBox().MinX; got != tt.want.MinX { t.Errorf("got MinX = %v, want %v", got, tt.want.MinX) } if got := r.BBox().MinY; got != tt.want.MinY { t.Errorf("got MinY = %v, want %v", got, tt.want.MinY) } if got := r.BBox().MaxX; got != tt.want.MaxX { t.Errorf("got MaxX = %v, want %v", got, tt.want.MaxX) } if got := r.BBox().MaxY; got != tt.want.MaxY { t.Errorf("got MaxY = %v, want %v", got, tt.want.MaxY) } } }
explode_data.jsonl/37782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 4418, 33, 1611, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 66434, 914, 198, 197, 50780, 257, 8261, 198, 197, 59403, 197, 197, 4913, 1944, 10931, 3183, 9845, 754, 2395, 79, 497, 8261, 90, 15, 11, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAllowActivitiesVendorException(t *testing.T) { noPurposeOrVendorConsentAndPubRestrictsP2 := "CPF_61ePF_61eFxAAAENAiCAAAAAAAAAAAAAACEAAAACEAAgAgAA" noPurposeOrVendorConsentAndPubRestrictsNone := "CPF_61ePF_61eFxAAAENAiCAAAAAAAAAAAAAACEAAAAA" testDefs := []struct { description string p2VendorExceptionMap map[openrtb_ext.BidderName]struct{} sp1VendorExceptionMap map[openrtb_ext.BidderName]struct{} bidder openrtb_ext.BidderName consent string allowBid bool passGeo bool passID bool }{ { description: "Bid/ID blocked by publisher - p2 enabled with p2 vendor exception, pub restricts p2 for vendor", p2VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{openrtb_ext.BidderAppnexus: {}}, bidder: openrtb_ext.BidderAppnexus, consent: noPurposeOrVendorConsentAndPubRestrictsP2, allowBid: false, passGeo: false, passID: false, }, { description: "Bid/ID allowed by vendor exception - p2 enabled with p2 vendor exception, pub restricts none", p2VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{openrtb_ext.BidderAppnexus: {}}, sp1VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{}, bidder: openrtb_ext.BidderAppnexus, consent: noPurposeOrVendorConsentAndPubRestrictsNone, allowBid: true, passGeo: false, passID: true, }, { description: "Geo blocked - sp1 enabled but no consent", p2VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{}, sp1VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{}, bidder: openrtb_ext.BidderAppnexus, consent: noPurposeOrVendorConsentAndPubRestrictsNone, allowBid: false, passGeo: false, passID: false, }, { description: "Geo allowed by vendor exception - sp1 enabled with sp1 vendor exception", p2VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{}, sp1VendorExceptionMap: map[openrtb_ext.BidderName]struct{}{openrtb_ext.BidderAppnexus: {}}, bidder: openrtb_ext.BidderAppnexus, consent: noPurposeOrVendorConsentAndPubRestrictsNone, allowBid: false, passGeo: true, passID: false, }, } for _, td := range testDefs { vendorListData := MarshalVendorList(buildVendorList34()) perms := permissionsImpl{ cfg: config.GDPR{ HostVendorID: 2, TCF2: config.TCF2{ Enabled: true, Purpose2: config.TCF2Purpose{Enabled: true, VendorExceptionMap: td.p2VendorExceptionMap}, SpecialPurpose1: config.TCF2Purpose{Enabled: true, VendorExceptionMap: td.sp1VendorExceptionMap}, }, }, vendorIDs: map[openrtb_ext.BidderName]uint16{ openrtb_ext.BidderAppnexus: 32, }, fetchVendorList: map[uint8]func(ctx context.Context, id uint16) (vendorlist.VendorList, error){ tcf2SpecVersion: listFetcher(map[uint16]vendorlist.VendorList{ 34: parseVendorListDataV2(t, vendorListData), }), }, } perms.purposeConfigs = map[consentconstants.Purpose]config.TCF2Purpose{ consentconstants.Purpose(2): perms.cfg.TCF2.Purpose2, consentconstants.Purpose(3): perms.cfg.TCF2.Purpose3, } allowBid, passGeo, passID, err := perms.AuctionActivitiesAllowed(context.Background(), td.bidder, "", SignalYes, td.consent, false) assert.NoErrorf(t, err, "Error processing AuctionActivitiesAllowed for %s", td.description) assert.EqualValuesf(t, td.allowBid, allowBid, "AllowBid failure on %s", td.description) assert.EqualValuesf(t, td.passGeo, passGeo, "PassGeo failure on %s", td.description) assert.EqualValuesf(t, td.passID, passID, "PassID failure on %s", td.description) } }
explode_data.jsonl/31103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1870 }
[ 2830, 3393, 18605, 67820, 44691, 1354, 1155, 353, 8840, 836, 8, 341, 72104, 74033, 2195, 44691, 15220, 306, 3036, 29162, 50360, 29995, 47, 17, 1669, 330, 73830, 62, 21, 16, 68, 19701, 62, 21, 16, 68, 81856, 50107, 953, 69667, 5049, 57...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogger(t *testing.T) { ch := make(chan int, 100) for i := 0; i < 100; i++ { go func(i int) { Logger(Cerebro) Logger(Web) ch <- i }(i) } for i := 0; i < 100; i++ { fmt.Println(<-ch) } }
explode_data.jsonl/20541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 7395, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1281, 35190, 526, 11, 220, 16, 15, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 15, 26, 600, 1027, 341, 197, 30680, 2915, 1956, 526, 8, 341, 298, 5586...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCloser_Closed(t *testing.T) { t.Parallel() var c sync.Closer assert.NotNil(t, c.Closed()) select { case _, ok := <-c.Closed(): t.Fatalf("Closed() should not yield a value, ok = %t", ok) default: } require.NoError(t, c.Close()) test.AssertTerminates(t, timeout, func() { _, ok := <-c.Closed() assert.False(t, ok) }) }
explode_data.jsonl/47719
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 51236, 799, 920, 9259, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2405, 272, 12811, 727, 69215, 271, 6948, 93882, 1155, 11, 272, 95163, 2398, 38010, 341, 2722, 8358, 5394, 1669, 9119, 66, 95163, 3932, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestThreeColMessage(t *testing.T) { ch := make(chan *sqltypes.Result) done := make(chan struct{}) client := framework.NewClient() if _, err := client.Execute(createThreeColMessage, nil); err != nil { t.Fatal(err) } defer client.Execute("drop table vitess_message3", nil) go func() { if err := client.MessageStream("vitess_message3", func(qr *sqltypes.Result) error { select { case <-done: return io.EOF default: } ch <- qr return nil }); err != nil { t.Error(err) } close(ch) }() // Verify fields. got := <-ch want := &sqltypes.Result{ Fields: []*querypb.Field{{ Name: "id", Type: sqltypes.Int64, }, { Name: "time_scheduled", Type: sqltypes.Int64, }, { Name: "msg1", Type: sqltypes.VarChar, }, { Name: "msg2", Type: sqltypes.Int64, }}, } if !reflect.DeepEqual(got, want) { t.Errorf("message(field) received:\n%v, want\n%v", got, want) } runtime.Gosched() defer func() { close(done) }() err := client.Begin(false) if err != nil { t.Error(err) return } _, err = client.Execute("insert into vitess_message3(id, msg1, msg2) values(1, 'hello world', 3)", nil) if err != nil { t.Error(err) return } err = client.Commit() if err != nil { t.Error(err) return } // Verify row. got = <-ch want = &sqltypes.Result{ Rows: [][]sqltypes.Value{{ sqltypes.NewInt64(1), got.Rows[0][1], sqltypes.NewVarChar("hello world"), sqltypes.NewInt64(3), }}, } if !reflect.DeepEqual(got, want) { t.Errorf("message received:\n%v, want\n%v", got, want) } // Verify Ack. count, err := client.MessageAck("vitess_message3", []string{"1"}) if err != nil { t.Error(err) } if count != 1 { t.Errorf("count: %d, want 1", count) } }
explode_data.jsonl/60595
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 780 }
[ 2830, 3393, 19641, 6127, 2052, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1281, 35190, 353, 3544, 9242, 18456, 340, 40495, 1669, 1281, 35190, 2036, 37790, 25291, 1669, 12626, 7121, 2959, 741, 743, 8358, 1848, 1669, 2943, 13827, 32602, 196...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoadBaseSchema(t *testing.T) { cfg := &Config{Path: "./testdata/base"} spec, err := cfg.Load() require.NoError(t, err) require.Len(t, spec.Schemas, 1) require.Len(t, spec.Schemas[0].Fields, 2, "embedded base schema") f1 := spec.Schemas[0].Fields[0] require.Equal(t, "base_field", f1.Name) require.Equal(t, field.TypeInt, f1.Info.Type) f2 := spec.Schemas[0].Fields[1] require.Equal(t, "user_field", f2.Name) require.Equal(t, field.TypeString, f2.Info.Type) }
explode_data.jsonl/6887
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 5879, 3978, 8632, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 609, 2648, 90, 1820, 25, 5924, 92425, 26090, 16707, 98100, 11, 1848, 1669, 13286, 13969, 741, 17957, 35699, 1155, 11, 1848, 340, 17957, 65819, 1155, 11, 1398, 808,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { ctx, _ := SetupFakeContext(t) configMapWatcher := configmap.NewStaticWatcher(&corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: cfgmap.FeaturesConfigName, Namespace: system.Namespace(), }, Data: map[string]string{}, }, &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: cfgmap.DefaultsConfigName, Namespace: system.Namespace(), }, Data: map[string]string{}, }, &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: autoscalercfg.ConfigName, Namespace: system.Namespace(), }, Data: map[string]string{}, }) c := NewController(ctx, configMapWatcher) if c == nil { t.Fatal("Expected NewController to return a non-nil value") } }
explode_data.jsonl/51910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 20985, 11, 716, 1669, 18626, 52317, 1972, 1155, 692, 25873, 2227, 47248, 1669, 2193, 2186, 7121, 11690, 47248, 2099, 98645, 16, 10753, 2227, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResponse_GetFrameworks_FrameworkStringer(t *testing.T) { popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano())) p := NewPopulatedResponse_GetFrameworks_Framework(popr, false) s1 := p.String() s2 := fmt.Sprintf("%v", p) if s1 != s2 { t.Fatalf("String want %v got %v", s1, s2) } }
explode_data.jsonl/42111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 2582, 13614, 89615, 61995, 703, 261, 1155, 353, 8840, 836, 8, 341, 3223, 46288, 1669, 6888, 33864, 7121, 37270, 33864, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 12145, 3223, 1669, 1532, 11598, 7757, 2582, 13614, 89615, 61995,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateTeam(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() id := model.NewId() team := &model.Team{ DisplayName: "dn_" + id, Name: "name" + id, Email: "success+" + id + "@simulator.amazonses.com", Type: model.TEAM_OPEN, } _, err := th.App.CreateTeam(team) require.Nil(t, err, "Should create a new team") _, err = th.App.CreateTeam(th.BasicTeam) require.NotNil(t, err, "Should not create a new team - team already exist") }
explode_data.jsonl/30267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 4021, 14597, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 15710, 1669, 1614, 7121, 764, 741, 197, 9196, 1669, 609, 2528, 65842, 515, 197, 197, 26456, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTx_HasDataOutputs(t *testing.T) { t.Parallel() t.Run("has data outputs", func(t *testing.T) { tx := bt.NewTx() assert.NotNil(t, tx) err := tx.From( "3c8edde27cb9a9132c22038dac4391496be9db16fd21351565cc1006966fdad5", 0, "76a914eb0bd5edba389198e73f8efabddfc61666969ff788ac", 2000000) assert.NoError(t, err) err = tx.PayTo("n2wmGVP89x3DsLNqk3NvctfQy9m9pvt7mk", 1999942) assert.NoError(t, err) // Add op return data type OpReturnData [][]byte ops := OpReturnData{[]byte("prefix1"), []byte("example data"), []byte{0x13, 0x37}} var out *bt.Output out, err = bt.NewOpReturnPartsOutput(ops) assert.NoError(t, err) tx.AddOutput(out) var wif *bsvutil.WIF wif, err = bsvutil.DecodeWIF("KznvCNc6Yf4iztSThoMH6oHWzH9EgjfodKxmeuUGPq5DEX5maspS") assert.NoError(t, err) assert.NotNil(t, wif) _, err = tx.SignAuto(&bt.InternalSigner{PrivateKey: wif.PrivKey, SigHashFlag: 0}) assert.NoError(t, err) assert.Equal(t, true, tx.HasDataOutputs()) }) t.Run("no data outputs", func(t *testing.T) { tx := bt.NewTx() assert.NotNil(t, tx) err := tx.From( "3c8edde27cb9a9132c22038dac4391496be9db16fd21351565cc1006966fdad5", 0, "76a914eb0bd5edba389198e73f8efabddfc61666969ff788ac", 2000000) assert.NoError(t, err) err = tx.PayTo("n2wmGVP89x3DsLNqk3NvctfQy9m9pvt7mk", 1999942) assert.NoError(t, err) var wif *bsvutil.WIF wif, err = bsvutil.DecodeWIF("KznvCNc6Yf4iztSThoMH6oHWzH9EgjfodKxmeuUGPq5DEX5maspS") assert.NoError(t, err) assert.NotNil(t, wif) _, err = tx.SignAuto(&bt.InternalSigner{PrivateKey: wif.PrivKey, SigHashFlag: 0}) assert.NoError(t, err) assert.Equal(t, false, tx.HasDataOutputs()) }) }
explode_data.jsonl/28747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 865 }
[ 2830, 3393, 31584, 2039, 300, 1043, 61438, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 4648, 821, 16275, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 46237, 1669, 19592, 7121, 31584, 741, 197, 6948, 93882, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileHelper_GetPathFromPathFileName_11(t *testing.T) { fh := FileHelper{} commonDir := fh.AdjustPathSlash("../../../../") expectedDir := fh.AdjustPathSlash("../../../../") result, isEmpty, err := fh.GetPathFromPathFileName(commonDir) if err != nil { t.Errorf("Error returned from fh.GetPathFromPathFileName(commonDir).\n"+ "commonDir='%v'\nError='%v'\n", commonDir, err.Error()) return } if false != isEmpty { t.Errorf("Expected GetPathFromPathFileName isEmpty=='%v'.\n"+ "Instead, isEmpty='%v'\n", false, isEmpty) } if result != expectedDir { t.Errorf("Expected GetPathFromPathFileName to return path == '%v' for valid path/file "+ "name\n"+ "Instead return path == '%v'\n", expectedDir, result) } }
explode_data.jsonl/14469
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 321 }
[ 2830, 3393, 1703, 5511, 13614, 1820, 3830, 1820, 10903, 62, 16, 16, 1155, 353, 8840, 836, 8, 341, 220, 36075, 1669, 2887, 5511, 31483, 220, 4185, 6184, 1669, 36075, 17865, 4250, 1820, 88004, 36800, 2748, 5130, 220, 3601, 6184, 1669, 360...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDoubleDataPoint_StartTime(t *testing.T) { ms := NewDoubleDataPoint() ms.InitEmpty() assert.EqualValues(t, TimestampUnixNano(0), ms.StartTime()) testValStartTime := TimestampUnixNano(1234567890) ms.SetStartTime(testValStartTime) assert.EqualValues(t, testValStartTime, ms.StartTime()) }
explode_data.jsonl/19540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 7378, 1043, 2609, 38056, 1462, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 7378, 1043, 2609, 741, 47691, 26849, 3522, 741, 6948, 12808, 6227, 1155, 11, 32758, 55832, 83819, 7, 15, 701, 9829, 12101, 1462, 2398, 18185, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInstallRelease_KubeVersion(t *testing.T) { c := helm.NewContext() rs := rsFixture() // TODO: Refactor this into a mock. req := &services.InstallReleaseRequest{ Chart: &chart.Chart{ Metadata: &chart.Metadata{Name: "hello", KubeVersion: ">=0.0.0"}, Templates: []*chart.Template{ {Name: "templates/hello", Data: []byte("hello: world")}, {Name: "templates/hooks", Data: []byte(manifestWithHook)}, }, }, } _, err := rs.InstallRelease(c, req) fmt.Println(err) if err != nil { t.Fatalf("Expected valid range. Got %q", err) } }
explode_data.jsonl/45614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 24690, 16077, 10102, 3760, 5637, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 33765, 7121, 1972, 741, 41231, 1669, 10036, 18930, 2822, 197, 322, 5343, 25, 8550, 5621, 419, 1119, 264, 7860, 624, 24395, 1669, 609, 12779, 71207, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRepository_Update(t *testing.T) { updateStmt := `UPDATE public\.applications SET name = \?, description = \?, status_condition = \?, status_timestamp = \?, healthcheck_url = \?, integration_system_id = \? WHERE tenant_id = \? AND id = \?` t.Run("Success", func(t *testing.T) { // given appModel := fixDetailedModelApplication(t, givenID(), givenTenant(), "Test app", "Test app description") appEntity := fixDetailedEntityApplication(t, givenID(), givenTenant(), "Test app", "Test app description") mockConverter := &automock.EntityConverter{} mockConverter.On("ToEntity", appModel).Return(appEntity, nil).Once() defer mockConverter.AssertExpectations(t) db, dbMock := testdb.MockDatabase(t) defer dbMock.AssertExpectations(t) dbMock.ExpectExec(updateStmt). WithArgs(appModel.Name, appModel.Description, appModel.Status.Condition, appModel.Status.Timestamp, appModel.HealthCheckURL, appModel.IntegrationSystemID, givenTenant(), givenID()). WillReturnResult(sqlmock.NewResult(-1, 1)) ctx := persistence.SaveToContext(context.TODO(), db) repo := application.NewRepository(mockConverter) // when err := repo.Update(ctx, appModel) // then assert.NoError(t, err) }) t.Run("DB Error", func(t *testing.T) { // given appModel := fixDetailedModelApplication(t, givenID(), givenTenant(), "Test app", "Test app description") appEntity := fixDetailedEntityApplication(t, givenID(), givenTenant(), "Test app", "Test app description") mockConverter := &automock.EntityConverter{} mockConverter.On("ToEntity", appModel).Return(appEntity, nil).Once() defer mockConverter.AssertExpectations(t) db, dbMock := testdb.MockDatabase(t) defer dbMock.AssertExpectations(t) dbMock.ExpectExec(updateStmt). WillReturnError(givenError()) ctx := persistence.SaveToContext(context.TODO(), db) repo := application.NewRepository(mockConverter) // when err := repo.Update(ctx, appModel) // then require.EqualError(t, err, "while updating single entity: some error") }) t.Run("Converter Error", func(t *testing.T) { // given appModel := fixDetailedModelApplication(t, givenID(), givenTenant(), "Test app", "Test app description") mockConverter := &automock.EntityConverter{} mockConverter.On("ToEntity", appModel).Return(&application.Entity{}, givenError()) defer mockConverter.AssertExpectations(t) repo := application.NewRepository(mockConverter) // when err := repo.Update(context.TODO(), appModel) // then require.EqualError(t, err, "while converting to Application entity: some error") }) }
explode_data.jsonl/52674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 906 }
[ 2830, 3393, 4624, 47393, 1155, 353, 8840, 836, 8, 341, 27175, 31063, 1669, 1565, 9239, 584, 18831, 82453, 9019, 829, 284, 1124, 12622, 4008, 284, 1124, 12622, 2639, 27656, 284, 1124, 12622, 2639, 23073, 284, 1124, 12622, 2820, 2028, 2903,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatchingPort(t *testing.T) { tests := []struct { name string sp *servicePort targetPort backendPort expected bool }{ { name: "svc-port", sp: &servicePort{ Port: 80, TargetPort: &backendPort{value: 5000}, }, targetPort: backendPort{value: 80}, expected: true, }, { name: "svc-name", sp: &servicePort{ Name: "web", Port: 80, TargetPort: &backendPort{value: 5000}, }, targetPort: backendPort{value: "web"}, expected: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.sp.matchingPort(tt.targetPort); got != tt.expected { t.Errorf("matchingPort: %v, expected: %v", got, tt.expected) } }) } }
explode_data.jsonl/60359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 64430, 7084, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 41378, 260, 353, 7936, 7084, 198, 197, 28861, 7084, 19163, 7084, 198, 197, 42400, 256, 1807, 198, 197, 59403, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdate(t *testing.T) { storage, _, server := newStorage(t) defer server.Terminate(t) defer storage.Store.DestroyFunc() test := registrytest.New(t, storage.Store).AllowCreateOnUpdate() test.TestUpdate( // valid validService(), // updateFunc func(obj runtime.Object) runtime.Object { object := obj.(*api.Service) object.Spec = api.ServiceSpec{ Selector: map[string]string{"bar": "baz2"}, ClusterIP: "None", SessionAffinity: api.ServiceAffinityNone, Type: api.ServiceTypeClusterIP, Ports: []api.ServicePort{{ Port: 6502, Protocol: api.ProtocolTCP, TargetPort: intstr.FromInt(6502), }}, } return object }, ) }
explode_data.jsonl/69210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 8358, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 16867, 5819, 38047, 57011, 9626, 741, 18185, 1669, 19424, 1944, 7121, 1155, 11, 5819, 38047...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInitialBoardAdvanceNoWinner(t *testing.T) { b := InitialBoard() w, _ := b.Advance() assert.Equal(t, w, (Winner)(NoWinner)) assert.Equal(t, b.Grid[0][8], (Cell)(P1Tail)) assert.Equal(t, b.Grid[15][8], (Cell)(P2Tail)) assert.Equal(t, b.Grid[1][8], (Cell)(P1Head)) assert.Equal(t, b.Grid[14][8], (Cell)(P2Head)) }
explode_data.jsonl/7799
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 6341, 11932, 95027, 2753, 50955, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 4127, 11932, 2822, 6692, 11, 716, 1669, 293, 17865, 85, 681, 2822, 6948, 12808, 1155, 11, 289, 11, 320, 50955, 2376, 2753, 50955, 4390, 6948, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetExpired(t *testing.T) { cache := NewCache(interval) cache.Set(key(10), value(20), interval) time.Sleep(2 * time.Second) _, ok := cache.Get(key(10)) assert.False(t, ok) }
explode_data.jsonl/8061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 1949, 54349, 1155, 353, 8840, 836, 8, 341, 52680, 1669, 1532, 8233, 55098, 692, 52680, 4202, 4857, 7, 16, 15, 701, 897, 7, 17, 15, 701, 9873, 692, 21957, 31586, 7, 17, 353, 882, 32435, 692, 197, 6878, 5394, 1669, 6500, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDetectLocalByInterfaceNamePrefix(t *testing.T) { cases := []struct { ifacePrefix string chain string args []string expectedJumpIfOutput []string expectedJumpIfNotOutput []string }{ { ifacePrefix: "eth0", expectedJumpIfOutput: []string{"-i", "eth0+"}, expectedJumpIfNotOutput: []string{"!", "-i", "eth0+"}, }, } for _, c := range cases { localDetector, err := NewDetectLocalByInterfaceNamePrefix(c.ifacePrefix) if err != nil { t.Errorf("Error initializing localDetector: %v", err) continue } if !localDetector.IsImplemented() { t.Error("DetectLocalByInterfaceNamePrefix returns false for IsImplemented") } ifLocal := localDetector.IfLocal() ifNotLocal := localDetector.IfNotLocal() if !reflect.DeepEqual(ifLocal, c.expectedJumpIfOutput) { t.Errorf("IfLocal, expected: '%v', but got: '%v'", c.expectedJumpIfOutput, ifLocal) } if !reflect.DeepEqual(ifNotLocal, c.expectedJumpIfNotOutput) { t.Errorf("IfNotLocal, expected: '%v', but got: '%v'", c.expectedJumpIfNotOutput, ifNotLocal) } } }
explode_data.jsonl/10307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 475 }
[ 2830, 3393, 57193, 7319, 1359, 5051, 675, 14335, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 743, 578, 14335, 1797, 914, 198, 197, 197, 8819, 4293, 914, 198, 197, 31215, 503, 3056, 917, 198, 197, 42400, 33979...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestEth_GetFilterChanges_WrongID(t *testing.T) { req, err := json.Marshal(createRequest("eth_getFilterChanges", []string{"0x1122334400000077"})) require.NoError(t, err) var rpcRes *Response time.Sleep(1 * time.Second) /* #nosec */ res, err := http.Post(HOST, "application/json", bytes.NewBuffer(req)) require.NoError(t, err) decoder := json.NewDecoder(res.Body) rpcRes = new(Response) err = decoder.Decode(&rpcRes) require.NoError(t, err) err = res.Body.Close() require.NoError(t, err) require.NotNil(t, "invalid filter ID", rpcRes.Error.Message) }
explode_data.jsonl/854
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 223 }
[ 2830, 3393, 65390, 13614, 5632, 11317, 2763, 14347, 915, 1155, 353, 8840, 836, 8, 341, 24395, 11, 1848, 1669, 2951, 37271, 32602, 1900, 445, 769, 3062, 5632, 11317, 497, 3056, 917, 4913, 15, 87, 16, 16, 17, 17, 18, 18, 19, 19, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvVarsFrom(t *testing.T) { d, err := ioutil.TempDir("", "secrets") assert.NoError(t, err) defer os.RemoveAll(d) secretsManager := createSecrets(t, d) tests := []struct { name string envFrom v1.EnvFromSource options CtrSpecGenOptions succeed bool expected map[string]string }{ { "ConfigMapExists", v1.EnvFromSource{ ConfigMapRef: &v1.ConfigMapEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "foo", }, }, }, CtrSpecGenOptions{ ConfigMaps: configMapList, }, true, map[string]string{ "myvar": "foo", }, }, { "ConfigMapDoesNotExist", v1.EnvFromSource{ ConfigMapRef: &v1.ConfigMapEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "doesnotexist", }, }, }, CtrSpecGenOptions{ ConfigMaps: configMapList, }, false, nil, }, { "OptionalConfigMapDoesNotExist", v1.EnvFromSource{ ConfigMapRef: &v1.ConfigMapEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "doesnotexist", }, Optional: &optional, }, }, CtrSpecGenOptions{ ConfigMaps: configMapList, }, true, map[string]string{}, }, { "EmptyConfigMapList", v1.EnvFromSource{ ConfigMapRef: &v1.ConfigMapEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "foo", }, }, }, CtrSpecGenOptions{ ConfigMaps: []v1.ConfigMap{}, }, false, nil, }, { "OptionalEmptyConfigMapList", v1.EnvFromSource{ ConfigMapRef: &v1.ConfigMapEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "foo", }, Optional: &optional, }, }, CtrSpecGenOptions{ ConfigMaps: []v1.ConfigMap{}, }, true, map[string]string{}, }, { "SecretExists", v1.EnvFromSource{ SecretRef: &v1.SecretEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "foo", }, }, }, CtrSpecGenOptions{ SecretsManager: secretsManager, }, true, map[string]string{ "myvar": "foo", }, }, { "SecretDoesNotExist", v1.EnvFromSource{ SecretRef: &v1.SecretEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "doesnotexist", }, }, }, CtrSpecGenOptions{ SecretsManager: secretsManager, }, false, nil, }, { "OptionalSecretDoesNotExist", v1.EnvFromSource{ SecretRef: &v1.SecretEnvSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "doesnotexist", }, Optional: &optional, }, }, CtrSpecGenOptions{ SecretsManager: secretsManager, }, true, map[string]string{}, }, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { result, err := envVarsFrom(test.envFrom, &test.options) assert.Equal(t, err == nil, test.succeed) assert.Equal(t, test.expected, result) }) } }
explode_data.jsonl/2872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1425 }
[ 2830, 3393, 14359, 28305, 3830, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 325, 52710, 1138, 6948, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 1500, 340, 84686, 52710, 2043, 1669, 1855, 19773, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMethodSignatures_Execute(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ctx := context.Background() mockGetContractUC := mocks2.NewMockGetContractUseCase(ctrl) usecase := NewGetMethodSignaturesUseCase(mockGetContractUC) t.Run("should execute use case successfully", func(t *testing.T) { contract := testutils.FakeContract() mockGetContractUC.EXPECT().Execute(gomock.Any(), contract.Name, contract.Tag).Return(contract, nil) signatures, err := usecase.Execute(ctx, contract.Name, contract.Tag, "transfer") assert.NoError(t, err) assert.Equal(t, signatures[0], "transfer(address,uint256)") }) t.Run("should execute use case successfully if method name is constructor", func(t *testing.T) { contract := testutils.FakeContract() mockGetContractUC.EXPECT().Execute(gomock.Any(), contract.Name, contract.Tag).Return(contract, nil) signatures, err := usecase.Execute(ctx, contract.Name, contract.Tag, constructorMethodName) assert.NoError(t, err) assert.Equal(t, signatures[0], "constructor") }) t.Run("should execute use case successfully and return an empty array if nothing is found", func(t *testing.T) { contract := testutils.FakeContract() mockGetContractUC.EXPECT().Execute(gomock.Any(), contract.Name, contract.Tag).Return(contract, nil) signatures, err := usecase.Execute(ctx, contract.Name, contract.Tag, "inexistentMethod") assert.NoError(t, err) assert.Empty(t, signatures) }) t.Run("should fail with same error if get contract fails", func(t *testing.T) { contract := testutils.FakeContract() expectedErr := fmt.Errorf("error") mockGetContractUC.EXPECT().Execute(gomock.Any(), contract.Name, contract.Tag).Return(nil, expectedErr) signatures, err := usecase.Execute(ctx, contract.Name, contract.Tag, constructorMethodName) assert.Nil(t, signatures) assert.Equal(t, errors.FromError(expectedErr).ExtendComponent(getMethodSignaturesComponent), err) }) t.Run("should fail with DataCorruptedError if fails to get the ABI", func(t *testing.T) { contract := testutils.FakeContract() contract.ABI = "wrongABI" mockGetContractUC.EXPECT().Execute(gomock.Any(), contract.Name, contract.Tag).Return(contract, nil) signatures, err := usecase.Execute(ctx, contract.Name, contract.Tag, constructorMethodName) assert.Nil(t, signatures) assert.True(t, errors.IsDataCorruptedError(err)) }) }
explode_data.jsonl/9604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 794 }
[ 2830, 3393, 85073, 7264, 2789, 83453, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 741, 20985, 1669, 2266, 19047, 2822, 77333, 1949, 14067, 5459, 1669, 68909, 17, 7121, 11571,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStateFullRoundNil(t *testing.T) { cs, vss := randState(1) height, round := cs.Height, cs.Round voteCh := subscribeUnBuffered(cs.eventBus, types.EventQueryVote) cs.enterPrevote(height, round) cs.startRoutines(4) ensurePrevote(voteCh, height, round) // prevote ensurePrecommit(voteCh, height, round) // precommit // should prevote and precommit nil validatePrevoteAndPrecommit(t, cs, round, -1, vss[0], nil, nil) }
explode_data.jsonl/81646
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 1397, 9432, 27497, 19064, 1155, 353, 8840, 836, 8, 341, 71899, 11, 348, 778, 1669, 10382, 1397, 7, 16, 340, 30500, 11, 4778, 1669, 10532, 17743, 11, 10532, 37646, 271, 5195, 1272, 1143, 1669, 17963, 1806, 4095, 291, 41153, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseSdp2LogicContext(t *testing.T) { ctx, err := ParseSdp2LogicContext([]byte(goldenSdp)) assert.Equal(t, nil, err) assert.Equal(t, true, ctx.hasAudio) assert.Equal(t, true, ctx.hasVideo) assert.Equal(t, 44100, ctx.AudioClockRate) assert.Equal(t, 90000, ctx.VideoClockRate) assert.Equal(t, true, ctx.IsAudioPayloadTypeOrigin(97)) assert.Equal(t, true, ctx.IsVideoPayloadTypeOrigin(96)) assert.Equal(t, base.AvPacketPtAac, ctx.GetAudioPayloadTypeBase()) assert.Equal(t, base.AvPacketPtAvc, ctx.GetVideoPayloadTypeBase()) assert.Equal(t, "streamid=1", ctx.audioAControl) assert.Equal(t, "streamid=0", ctx.videoAControl) assert.IsNotNil(t, ctx.Asc) assert.Equal(t, nil, ctx.Vps) assert.IsNotNil(t, ctx.Sps) assert.IsNotNil(t, ctx.Pps) }
explode_data.jsonl/55597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 14463, 50, 9796, 17, 26751, 1972, 1155, 353, 8840, 836, 8, 341, 20985, 11, 1848, 1669, 14775, 50, 9796, 17, 26751, 1972, 10556, 3782, 3268, 813, 268, 50, 9796, 1171, 6948, 12808, 1155, 11, 2092, 11, 1848, 340, 6948, 12808,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestYearly(t *testing.T) { re := parseRe("[] bla (every 2.5.)") assert.NotNil(t, re) assert.Equal(t, moment.RecurYearly, re.Recurrence) assert.Equal(t, 2, re.RefDate.Time.Day()) assert.Equal(t, time.May, re.RefDate.Time.Month()) }
explode_data.jsonl/67517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 9490, 398, 1155, 353, 8840, 836, 8, 341, 17200, 1669, 4715, 693, 445, 1294, 84541, 320, 29015, 220, 17, 13, 20, 6138, 1138, 6948, 93882, 1155, 11, 312, 340, 6948, 12808, 1155, 11, 4445, 2817, 2352, 9490, 398, 11, 312, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAssessRunStatusErrorMessageFromProvider(t *testing.T) { providerMessage := "Provider Error" status, message, dryRunSummary := StartAssessRunStatusErrorMessageFromProvider(t, providerMessage, false) expectedMessage := fmt.Sprintf("Metric \"failed-metric\" assessed Failed due to failed (1) > failureLimit (0): \"Error Message: %s\"", providerMessage) assert.Equal(t, v1alpha1.AnalysisPhaseFailed, status) assert.Equal(t, expectedMessage, message) expectedDryRunSummary := v1alpha1.RunSummary{ Count: 0, Successful: 0, Failed: 0, Inconclusive: 0, Error: 0, } assert.Equal(t, &expectedDryRunSummary, dryRunSummary) }
explode_data.jsonl/75844
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 5615, 433, 6727, 2522, 21349, 3830, 5179, 1155, 353, 8840, 836, 8, 341, 197, 19979, 2052, 1669, 330, 5179, 4600, 698, 23847, 11, 1943, 11, 9058, 6727, 19237, 1669, 5145, 5615, 433, 6727, 2522, 21349, 3830, 5179, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetNodeList(t *testing.T) { var err error wizard.ClearCurrentWizardData() wizardData := wizard.GetCurrentWizard() wizardData.Nodes = []*wizard.Node{ { Name: "master1", Description: "desc1", MachineRoles: []constant.MachineRole{constant.MachineRoleMaster, constant.MachineRoleEtcd}, Labels: []*wizard.Label{ { Key: "kpaas.io/test", Value: "yes", }, }, Taints: []*wizard.Taint{ { Key: "taint1", Value: "taint-value", Effect: wizard.TaintEffectNoExecute, }, }, DockerRootDirectory: "/mnt/docker", ConnectionData: wizard.ConnectionData{ IP: "192.168.31.140", Port: 22, Username: "kpaas", AuthenticationType: wizard.AuthenticationTypePassword, Password: "123456", }, }, } resp := httptest.NewRecorder() gin.SetMode(gin.TestMode) ctx, _ := gin.CreateTestContext(resp) ctx.Request = httptest.NewRequest("GET", "/api/v1/deploy/wizard/nodes", nil) GetNodeList(ctx) resp.Flush() assert.True(t, resp.Body.Len() > 0) fmt.Printf("result: %s\n", resp.Body.String()) responseData := new(api.GetNodeListResponse) err = json.Unmarshal(resp.Body.Bytes(), responseData) assert.Nil(t, err) assert.Equal(t, []api.NodeData{ { NodeBaseData: api.NodeBaseData{ Name: "master1", Description: "desc1", MachineRoles: []constant.MachineRole{constant.MachineRoleMaster, constant.MachineRoleEtcd}, Labels: []api.Label{ { Key: "kpaas.io/test", Value: "yes", }, }, Taints: []api.Taint{ { Key: "taint1", Value: "taint-value", Effect: api.TaintEffectNoExecute, }, }, DockerRootDirectory: "/mnt/docker", }, ConnectionData: api.ConnectionData{ IP: "192.168.31.140", Port: 22, SSHLoginData: api.SSHLoginData{ Username: "kpaas", AuthenticationType: api.AuthenticationTypePassword, Password: "", }, }, }, }, responseData.Nodes) }
explode_data.jsonl/38248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 995 }
[ 2830, 3393, 1949, 1955, 852, 1155, 353, 8840, 836, 8, 1476, 2405, 1848, 1465, 198, 6692, 13722, 13524, 5405, 63094, 1043, 741, 6692, 13722, 1043, 1669, 33968, 44242, 63094, 741, 6692, 13722, 1043, 52184, 284, 29838, 58593, 21714, 515, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWatch(t *testing.T) { storage, server := newStorage(t) defer server.Terminate(t) defer storage.Job.Store.DestroyFunc() test := genericregistrytest.New(t, storage.Job.Store) test.TestWatch( validNewJob(), // matching labels []labels.Set{}, // not matching labels []labels.Set{ {"x": "y"}, }, // matching fields []fields.Set{}, // not matching fields []fields.Set{ {"metadata.name": "xyz"}, {"name": "foo"}, }, ) }
explode_data.jsonl/53321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 14247, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 16867, 5819, 45293, 38047, 57011, 9626, 741, 18185, 1669, 13954, 29172, 1944, 7121, 1155, 11, 58...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_addToArrayForm(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if gotResult := addToArrayForm(tt.args.A, tt.args.K); !reflect.DeepEqual(gotResult, tt.wantResult) { t.Errorf("addToArrayForm() = %v, want %v", gotResult, tt.wantResult) } }) } }
explode_data.jsonl/24507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 2891, 29512, 1838, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 7032, 341, 197, 3244, 16708, 47152, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 743, 2684, 2077, 1669, 912, 29512, 1838, 47152, 16365, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindInBatches(t *testing.T) { var users = []User{ *GetUser("find_in_batches", Config{}), *GetUser("find_in_batches", Config{}), *GetUser("find_in_batches", Config{}), *GetUser("find_in_batches", Config{}), *GetUser("find_in_batches", Config{}), *GetUser("find_in_batches", Config{}), } DB.Create(&users) var ( results []User totalBatch int ) if result := DB.Where("name = ?", users[0].Name).FindInBatches(&results, 2, func(tx *gorm.DB, batch int) error { totalBatch += batch if tx.RowsAffected != 2 { t.Errorf("Incorrect affected rows, expects: 2, got %v", tx.RowsAffected) } if len(results) != 2 { t.Errorf("Incorrect users length, expects: 2, got %v", len(results)) } return nil }); result.Error != nil || result.RowsAffected != 6 { t.Errorf("Failed to batch find, got error %v, rows affected: %v", result.Error, result.RowsAffected) } if totalBatch != 6 { t.Errorf("incorrect total batch, expects: %v, got %v", 6, totalBatch) } }
explode_data.jsonl/48700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 9885, 641, 33, 9118, 1155, 353, 8840, 836, 8, 341, 2405, 3847, 284, 3056, 1474, 515, 197, 197, 9, 1949, 1474, 445, 3903, 1243, 57755, 497, 5532, 6257, 1326, 197, 197, 9, 1949, 1474, 445, 3903, 1243, 57755, 497, 5532, 625...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTracesReceiverStart(t *testing.T) { c := kafkaTracesConsumer{ nextConsumer: consumertest.NewNop(), logger: zap.NewNop(), consumerGroup: &testConsumerGroup{}, } require.NoError(t, c.Start(context.Background(), nil)) require.NoError(t, c.Shutdown(context.Background())) }
explode_data.jsonl/79391
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 1282, 2434, 25436, 3479, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 67852, 1282, 2434, 29968, 515, 197, 28144, 29968, 25, 220, 4662, 83386, 7121, 45, 453, 3148, 197, 17060, 25, 286, 32978, 7121, 45, 453, 3148, 197, 37203, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFuzz(t *testing.T) { maxTestSize := 16384 for i := 0; i < 20000; i++ { minSize := 1024 maxSize := rand.Intn(maxTestSize-minSize) + minSize p := New(minSize, maxSize) bufSize := rand.Intn(maxTestSize) buf := p.Get(bufSize) if len(*buf) != bufSize { t.Fatalf("Invalid length %d, expected %d", len(*buf), bufSize) } sPool := p.findPool(bufSize) if sPool == nil { if cap(*buf) != len(*buf) { t.Fatalf("Invalid cap %d, expected %d", cap(*buf), len(*buf)) } } else { if cap(*buf) != sPool.size { t.Fatalf("Invalid cap %d, expected %d", cap(*buf), sPool.size) } } p.Put(buf) } }
explode_data.jsonl/47641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 37, 8889, 1155, 353, 8840, 836, 8, 341, 22543, 2271, 1695, 1669, 220, 16, 21, 18, 23, 19, 198, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 17, 15, 15, 15, 15, 26, 600, 1027, 1476, 197, 25320, 1695, 1669, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidate_lte(t *testing.T) { assert := assert.New(t) type User struct { Age *int64 `validate:"lte=2"` } assert.NoError( v.Validate(User{}, valis.EachFields(tagrule.Validate)), ) assert.NoError( v.Validate(User{Age: henge.ToIntPtr(1)}, valis.EachFields(tagrule.Validate)), ) assert.NoError( v.Validate(User{Age: henge.ToIntPtr(2)}, valis.EachFields(tagrule.Validate)), ) assert.EqualError( v.Validate(User{Age: henge.ToIntPtr(20)}, valis.EachFields(tagrule.Validate)), "(lte) .Age must be less than or equal to 2", ) }
explode_data.jsonl/17250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 17926, 907, 665, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 13158, 2657, 2036, 341, 197, 197, 16749, 353, 396, 21, 19, 1565, 7067, 2974, 48791, 28, 17, 8805, 197, 532, 6948, 35699, 1006, 197, 5195, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadChallengeTx_doesVerifyHomeDomainFailure(t *testing.T) { serverKP := newKeypair0() clientKP := newKeypair1() txSource := NewSimpleAccount(serverKP.Address(), -1) op := ManageData{ SourceAccount: clientKP.Address(), Name: "testanchor.stellar.org auth", Value: []byte(base64.StdEncoding.EncodeToString(make([]byte, 48))), } webAuthDomainOp := ManageData{ SourceAccount: serverKP.Address(), Name: "web_auth_domain", Value: []byte("testwebauth.stellar.org"), } tx, err := NewTransaction( TransactionParams{ SourceAccount: &txSource, IncrementSequenceNum: true, Operations: []Operation{&op, &webAuthDomainOp}, BaseFee: MinBaseFee, Timebounds: NewTimeout(1000), }, ) assert.NoError(t, err) tx, err = tx.Sign(network.TestNetworkPassphrase, serverKP) assert.NoError(t, err) tx64, err := tx.Base64() require.NoError(t, err) _, _, _, err = ReadChallengeTx(tx64, serverKP.Address(), network.TestNetworkPassphrase, "testwebauth.stellar.org", []string{"willfail"}) assert.EqualError(t, err, "operation key does not match any homeDomains passed (key=\"testanchor.stellar.org auth\", homeDomains=[willfail])") }
explode_data.jsonl/20716
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 507 }
[ 2830, 3393, 4418, 62078, 31584, 96374, 32627, 7623, 13636, 17507, 1155, 353, 8840, 836, 8, 341, 41057, 65036, 1669, 501, 6608, 1082, 1310, 15, 741, 25291, 65036, 1669, 501, 6608, 1082, 1310, 16, 741, 46237, 3608, 1669, 1532, 16374, 7365, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMockGrossAmountRemittanceDocument(t *testing.T) { gard := mockGrossAmountRemittanceDocument() require.NoError(t, gard.Validate(), "mockGrossAmountRemittanceDocument does not validate and will break other tests") }
explode_data.jsonl/41404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 11571, 38, 2128, 10093, 6590, 87191, 7524, 1155, 353, 8840, 836, 8, 341, 3174, 567, 1669, 7860, 38, 2128, 10093, 6590, 87191, 7524, 2822, 17957, 35699, 1155, 11, 21881, 47667, 1507, 330, 16712, 38, 2128, 10093, 6590, 87191, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestV1EnvsnapResult_Print(t *testing.T) { sys := NewSystemResult() sys.OS = "testOS" v1 := NewV1EnvsnapResult() v1.System = sys out := bytes.Buffer{} v1.out = &out err := v1.Print("json") assert.NoError(t, err) assert.Equal(t, "{\"system\":{\"os\":\"testOS\"}}\n", out.String()) }
explode_data.jsonl/62961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 53, 16, 1702, 11562, 6861, 2077, 45788, 1155, 353, 8840, 836, 8, 341, 41709, 1669, 1532, 2320, 2077, 741, 41709, 57054, 284, 330, 1944, 3126, 1837, 5195, 16, 1669, 1532, 53, 16, 1702, 11562, 6861, 2077, 741, 5195, 16, 1662...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileWithoutPVC(t *testing.T) { // TestReconcileWithoutPVC runs "Reconcile" on a PipelineRun that has two unrelated tasks. // It verifies that reconcile is successful and that no PVC is created ps := []*v1beta1.Pipeline{{ ObjectMeta: baseObjectMeta("test-pipeline", "foo"), Spec: v1beta1.PipelineSpec{ Tasks: []v1beta1.PipelineTask{ { Name: "hello-world-1", TaskRef: &v1beta1.TaskRef{ Name: "hello-world", }, }, { Name: "hello-world-2", TaskRef: &v1beta1.TaskRef{ Name: "hello-world", }, }, }, }, }} prs := []*v1beta1.PipelineRun{{ ObjectMeta: baseObjectMeta("test-pipeline-run", "foo"), Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{Name: "test-pipeline"}, }, }} ts := []*v1beta1.Task{simpleHelloWorldTask} d := test.Data{ PipelineRuns: prs, Pipelines: ps, Tasks: ts, } prt := newPipelineRunTest(d, t) defer prt.Cancel() reconciledRun, clients := prt.reconcileRun("foo", "test-pipeline-run", []string{}, false) actions := clients.Pipeline.Actions() // Check that the expected TaskRun was created for _, a := range actions { if ca, ok := a.(ktesting.CreateAction); ok { obj := ca.GetObject() if pvc, ok := obj.(*corev1.PersistentVolumeClaim); ok { t.Errorf("Did not expect to see a PVC created when no resources are linked. %s was created", pvc) } } } if !reconciledRun.Status.GetCondition(apis.ConditionSucceeded).IsUnknown() { t.Errorf("Expected PipelineRun to be running, but condition status is %s", reconciledRun.Status.GetCondition(apis.ConditionSucceeded)) } }
explode_data.jsonl/68271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 697 }
[ 2830, 3393, 693, 40446, 457, 26040, 47, 11287, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 693, 40446, 457, 26040, 47, 11287, 8473, 330, 693, 40446, 457, 1, 389, 264, 40907, 6727, 429, 702, 1378, 45205, 9079, 624, 197, 322, 1084, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestConv1(t *testing.T) { var tests = []struct { arg string res string }{ {"hello\x41world\x42", "helloAworldB"}, {"80 \xe2\x86\x92 53347", "80 → 53347"}, {"hello\x41world\x42 foo \\000 bar", "helloAworldB foo \\000 bar"}, } for _, test := range tests { outs := format.TranslateHexCodes([]byte(test.arg)) assert.Equal(t, string(outs), test.res) } }
explode_data.jsonl/80386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 34892, 16, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 47903, 914, 198, 197, 10202, 914, 198, 197, 59403, 197, 197, 4913, 14990, 3462, 19, 16, 14615, 3462, 19, 17, 497, 330, 14990, 32, 14615, 33,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEngine_ShouldCompactCache(t *testing.T) { nowTime := time.Now() e, err := NewEngine(inmem.IndexName) if err != nil { t.Fatal(err) } // mock the planner so compactions don't run during the test e.CompactionPlan = &mockPlanner{} e.SetEnabled(false) if err := e.Open(); err != nil { t.Fatalf("failed to open tsm1 engine: %s", err.Error()) } defer e.Close() e.CacheFlushMemorySizeThreshold = 1024 e.CacheFlushWriteColdDuration = time.Minute if e.ShouldCompactCache(nowTime) { t.Fatal("nothing written to cache, so should not compact") } if err := e.WritePointsString("m,k=v f=3i"); err != nil { t.Fatal(err) } if e.ShouldCompactCache(nowTime) { t.Fatal("cache size < flush threshold and nothing written to FileStore, so should not compact") } if !e.ShouldCompactCache(nowTime.Add(time.Hour)) { t.Fatal("last compaction was longer than flush write cold threshold, so should compact") } e.CacheFlushMemorySizeThreshold = 1 if !e.ShouldCompactCache(nowTime) { t.Fatal("cache size > flush threshold, so should compact") } }
explode_data.jsonl/28098
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 4571, 36578, 616, 98335, 8233, 1155, 353, 8840, 836, 8, 341, 80922, 1462, 1669, 882, 13244, 2822, 7727, 11, 1848, 1669, 1532, 4571, 5900, 10536, 18338, 675, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestVTGateStreamExecute(t *testing.T) { ks := KsTestUnsharded shard := "0" createSandbox(ks) hcVTGateTest.Reset() sbc := hcVTGateTest.AddTestTablet("aa", "1.1.1.1", 1001, ks, shard, topodatapb.TabletType_MASTER, true, 1, nil) var qrs []*sqltypes.Result err := rpcVTGate.StreamExecute(context.Background(), "select id from t1", nil, "", topodatapb.TabletType_MASTER, executeOptions, func(r *sqltypes.Result) error { qrs = append(qrs, r) return nil }) if err != nil { t.Errorf("want nil, got %v", err) } want := []*sqltypes.Result{sandboxconn.SingleRowResult} if !reflect.DeepEqual(want, qrs) { t.Errorf("want \n%+v, got \n%+v", want, qrs) } if !proto.Equal(sbc.Options[0], executeOptions) { t.Errorf("got ExecuteOptions \n%+v, want \n%+v", sbc.Options[0], executeOptions) } }
explode_data.jsonl/7837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 20457, 42318, 3027, 17174, 1155, 353, 8840, 836, 8, 341, 197, 2787, 1669, 730, 82, 2271, 1806, 927, 20958, 198, 36196, 567, 1669, 330, 15, 698, 39263, 50, 31536, 7, 2787, 340, 9598, 66, 20457, 42318, 2271, 36660, 741, 1903...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAllocation_uploadOrUpdateFile(t *testing.T) { const ( mockFileRefName = "mock file ref name" mockLocalPath = "1.txt" mockActualHash = "4041e3eeb170751544a47af4e4f9d374e76cee1d" mockErrorHash = "1041e3eeb170751544a47af4e4f9d374e76cee1d" mockThumbnailPath = "thumbnail_alloc" ) var mockClient = mocks.HttpClient{} zboxutil.Client = &mockClient client := zclient.GetClient() client.Wallet = &zcncrypto.Wallet{ ClientID: mockClientId, ClientKey: mockClientKey, } setupHttpResponses := func(t *testing.T, testCaseName string, a *Allocation, hash string) (teardown func(t *testing.T)) { for i := 0; i < numBlobbers; i++ { var hash string if i < numBlobbers-1 { hash = mockErrorHash } frName := mockFileRefName + strconv.Itoa(i) url := "TestAllocation_uploadOrUpdateFile" + testCaseName + mockBlobberUrl + strconv.Itoa(i) a.Blobbers = append(a.Blobbers, &blockchain.StorageNode{ Baseurl: url, }) mockClient.On("Do", mock.MatchedBy(func(req *http.Request) bool { return strings.HasPrefix(req.URL.Path, url) })).Return(&http.Response{ StatusCode: http.StatusOK, Body: func(fileRefName, hash string) io.ReadCloser { jsonFR, err := json.Marshal(&fileref.FileRef{ ActualFileHash: hash, Ref: fileref.Ref{ Name: fileRefName, }, }) require.NoError(t, err) return ioutil.NopCloser(bytes.NewReader([]byte(jsonFR))) }(frName, hash), }, nil) } return nil } type parameters struct { localPath string remotePath string status StatusCallback isUpdate bool thumbnailPath string encryption bool isRepair bool attrs fileref.Attributes hash string } tests := []struct { name string setup func(*testing.T, string, *Allocation, string) (teardown func(*testing.T)) parameters parameters wantErr bool errMsg string }{ { name: "Test_Not_Initialize_Failed", setup: func(t *testing.T, testCaseName string, a *Allocation, hash string) (teardown func(t *testing.T)) { a.initialized = false return func(t *testing.T) { a.initialized = true } }, parameters: parameters{ localPath: mockLocalPath, remotePath: "/", isUpdate: false, thumbnailPath: "", encryption: false, isRepair: false, attrs: fileref.Attributes{}, }, wantErr: true, errMsg: "sdk_not_initialized: Please call InitStorageSDK Init and use GetAllocation to get the allocation object", }, { name: "Test_Thumbnail_File_Error_Success", setup: setupHttpResponses, parameters: parameters{ localPath: mockLocalPath, remotePath: "/", isUpdate: false, thumbnailPath: mockThumbnailPath, encryption: false, isRepair: false, attrs: fileref.Attributes{}, hash: mockActualHash, }, }, { name: "Test_Invalid_Remote_Abs_Path_Failed", setup: nil, parameters: parameters{ localPath: mockLocalPath, remotePath: "", isUpdate: false, thumbnailPath: "", encryption: false, isRepair: false, attrs: fileref.Attributes{}, }, wantErr: true, errMsg: "invalid_path: Path should be valid and absolute", }, { name: "Test_Repair_Remote_File_Not_Found_Failed", setup: nil, parameters: parameters{ localPath: mockLocalPath, remotePath: "/x.txt", isUpdate: false, thumbnailPath: "", encryption: false, isRepair: true, attrs: fileref.Attributes{}, }, wantErr: true, errMsg: "File not found for the given remotepath", }, { name: "Test_Repair_Content_Hash_Not_Matches_Failed", setup: setupHttpResponses, parameters: parameters{ localPath: mockLocalPath, remotePath: "/", isUpdate: false, thumbnailPath: "", encryption: false, isRepair: true, attrs: fileref.Attributes{}, hash: mockErrorHash, }, wantErr: true, errMsg: "Content hash doesn't match", }, { name: "Test_Upload_Success", setup: setupHttpResponses, parameters: parameters{ localPath: mockLocalPath, remotePath: "/", isUpdate: false, thumbnailPath: "", encryption: false, isRepair: false, attrs: fileref.Attributes{}, hash: mockActualHash, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { require := require.New(t) if teardown := setupMockFile(t, mockLocalPath); teardown != nil { defer teardown(t) } a := &Allocation{ DataShards: 2, ParityShards: 2, } setupMockAllocation(t, a) if tt.setup != nil { if teardown := tt.setup(t, tt.name, a, tt.parameters.hash); teardown != nil { defer teardown(t) } } err := a.uploadOrUpdateFile(tt.parameters.localPath, tt.parameters.remotePath, tt.parameters.status, tt.parameters.isUpdate, tt.parameters.thumbnailPath, tt.parameters.encryption, tt.parameters.isRepair, tt.parameters.attrs) require.EqualValues(tt.wantErr, err != nil) if err != nil { require.EqualValues(tt.errMsg, errors.Top(err)) return } require.NoErrorf(err, "Unexpected error %v", err) }) } }
explode_data.jsonl/4711
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2464 }
[ 2830, 3393, 78316, 21691, 56059, 1703, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 77333, 1703, 3945, 675, 256, 284, 330, 16712, 1034, 2053, 829, 698, 197, 77333, 7319, 1820, 257, 284, 330, 16, 3909, 698, 197, 77333, 28123, 6370, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRolling_deployRollingHooks(t *testing.T) { config := appstest.OkDeploymentConfig(1) config.Spec.Strategy = appstest.OkRollingStrategy() latest, _ := appsutil.MakeTestOnlyInternalDeployment(config) var hookError error deployments := map[string]*kapi.ReplicationController{latest.Name: latest} client := &fake.Clientset{} client.AddReactor("get", "replicationcontrollers", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { name := action.(clientgotesting.GetAction).GetName() return true, deployments[name], nil }) client.AddReactor("update", "replicationcontrollers", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { updated := action.(clientgotesting.UpdateAction).GetObject().(*kapi.ReplicationController) return true, updated, nil }) strategy := &RollingDeploymentStrategy{ rcClient: client.Core(), eventClient: fake.NewSimpleClientset().Core(), initialStrategy: &testStrategy{ deployFn: func(from *kapi.ReplicationController, to *kapi.ReplicationController, desiredReplicas int, updateAcceptor strat.UpdateAcceptor) error { t.Fatalf("unexpected call to initial strategy") return nil }, }, rollingUpdate: func(config *kubectl.RollingUpdaterConfig) error { return nil }, hookExecutor: &hookExecutorImpl{ executeFunc: func(hook *appsapi.LifecycleHook, deployment *kapi.ReplicationController, suffix, label string) error { return hookError }, }, getUpdateAcceptor: getUpdateAcceptor, apiRetryPeriod: 1 * time.Millisecond, apiRetryTimeout: 10 * time.Millisecond, } cases := []struct { params *appsapi.RollingDeploymentStrategyParams hookShouldFail bool deploymentShouldFail bool }{ {rollingParams(appsapi.LifecycleHookFailurePolicyAbort, ""), true, true}, {rollingParams(appsapi.LifecycleHookFailurePolicyAbort, ""), false, false}, {rollingParams("", appsapi.LifecycleHookFailurePolicyAbort), true, true}, {rollingParams("", appsapi.LifecycleHookFailurePolicyAbort), false, false}, } for _, tc := range cases { config := appstest.OkDeploymentConfig(2) config.Spec.Strategy.RollingParams = tc.params deployment, _ := appsutil.MakeTestOnlyInternalDeployment(config) deployments[deployment.Name] = deployment hookError = nil if tc.hookShouldFail { hookError = fmt.Errorf("hook failure") } strategy.out, strategy.errOut = &bytes.Buffer{}, &bytes.Buffer{} err := strategy.Deploy(latest, deployment, 2) if err != nil && tc.deploymentShouldFail { t.Logf("got expected error: %v", err) } if err == nil && tc.deploymentShouldFail { t.Errorf("expected an error for case: %#v", tc) } if err != nil && !tc.deploymentShouldFail { t.Errorf("unexpected error for case: %#v: %v", tc, err) } } }
explode_data.jsonl/73037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1002 }
[ 2830, 3393, 32355, 287, 91890, 32355, 287, 67769, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 906, 267, 477, 54282, 75286, 2648, 7, 16, 340, 25873, 36473, 27318, 10228, 284, 906, 267, 477, 54282, 32355, 287, 19816, 741, 197, 19350, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreMatches(t *testing.T) { defer leaktest.CheckTimeout(t, 10*time.Second)() cases := []struct { s Client mint, maxt int64 ms []storepb.LabelMatcher ok bool }{ { s: &testClient{labels: []storepb.Label{{"a", "b"}}}, ms: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "b", Value: "1"}, }, ok: true, }, { s: &testClient{minTime: 100, maxTime: 200}, mint: 201, maxt: 300, ok: false, }, { s: &testClient{minTime: 100, maxTime: 200}, mint: 200, maxt: 300, ok: true, }, { s: &testClient{minTime: 100, maxTime: 200}, mint: 50, maxt: 99, ok: false, }, { s: &testClient{minTime: 100, maxTime: 200}, mint: 50, maxt: 100, ok: true, }, { s: &testClient{labels: []storepb.Label{{"a", "b"}}}, ms: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "b"}, }, ok: true, }, { s: &testClient{labels: []storepb.Label{{"a", "b"}}}, ms: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_EQ, Name: "a", Value: "c"}, }, ok: false, }, { s: &testClient{labels: []storepb.Label{{"a", "b"}}}, ms: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_RE, Name: "a", Value: "b|c"}, }, ok: true, }, { s: &testClient{labels: []storepb.Label{{"a", "b"}}}, ms: []storepb.LabelMatcher{ {Type: storepb.LabelMatcher_NEQ, Name: "a", Value: ""}, }, ok: true, }, } for i, c := range cases { ok, err := storeMatches(c.s, c.mint, c.maxt, c.ms...) testutil.Ok(t, err) testutil.Assert(t, c.ok == ok, "test case %d failed", i) } }
explode_data.jsonl/24481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 863 }
[ 2830, 3393, 6093, 42470, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 10600, 7636, 1155, 11, 220, 16, 15, 77053, 32435, 8, 2822, 1444, 2264, 1669, 3056, 1235, 341, 197, 1903, 688, 8423, 198, 197, 2109, 396, 11, 1932, 83, 526, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDatePtrValidationIssueValidation(t *testing.T) { type Test struct { LastViewed *time.Time Reminder *time.Time } test := &Test{} validate := New() errs := validate.Struct(test) Equal(t, errs, nil) }
explode_data.jsonl/77253
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 1916, 5348, 13799, 42006, 13799, 1155, 353, 8840, 836, 8, 1476, 13158, 3393, 2036, 341, 197, 197, 5842, 851, 291, 353, 1678, 16299, 198, 197, 197, 95359, 256, 353, 1678, 16299, 198, 197, 630, 18185, 1669, 609, 2271, 31483, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestScanZeroMode(t *testing.T) { src := makeSource("%s\n") str := src.String() s := new(Scanner).Init(src) s.Mode = 0 // don't recognize any token classes s.Whitespace = 0 // don't skip any whitespace tok := s.Scan() for i, ch := range str { if tok != ch { t.Fatalf("%d. tok = %s, want %s", i, TokenString(tok), TokenString(ch)) } tok = s.Scan() } if tok != EOF { t.Fatalf("tok = %s, want EOF", TokenString(tok)) } if s.ErrorCount != 0 { t.Errorf("%d errors", s.ErrorCount) } }
explode_data.jsonl/56530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 26570, 17999, 3636, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 1281, 3608, 4430, 82, 1699, 1138, 11355, 1669, 2286, 6431, 741, 1903, 1669, 501, 7, 31002, 568, 3803, 14705, 340, 1903, 42852, 284, 220, 15, 981, 442, 1513, 94...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestResponseChecksumVerification(t *testing.T) { t.Parallel() reply, _ := hex.DecodeString(testChallengeReplyFromInitiator) var wt WrapToken wt.Unmarshal(reply, false) replyOk, rErr := wt.Verify(getSessionKey(), initiatorSeal) assert.Nil(t, rErr, "Error occurred during checksum verification.") assert.True(t, replyOk, "Checksum verification failed.") }
explode_data.jsonl/54232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 2582, 73190, 62339, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 86149, 11, 716, 1669, 12371, 56372, 703, 8623, 62078, 20841, 3830, 3803, 36122, 340, 2405, 40473, 42187, 3323, 198, 6692, 83, 38097, 51118, 11, 895, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue11405(t *testing.T) { testCases := []string{ "<root>", "<root><foo>", "<root><foo></foo>", } for _, tc := range testCases { d := NewDecoder(strings.NewReader(tc)) var err error for { _, err = d.Token() if err != nil { break } } if _, ok := err.(*SyntaxError); !ok { t.Errorf("%s: Token: Got error %v, want SyntaxError", tc, err) } } }
explode_data.jsonl/48630
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 42006, 16, 16, 19, 15, 20, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 917, 515, 197, 197, 22476, 2888, 35452, 197, 197, 22476, 2888, 1784, 7975, 35452, 197, 197, 22476, 2888, 1784, 7975, 1472, 7975, 35452, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMapValue(t *testing.T) { mappedValue := "str" field2Value := 2 tests := map[string]interface{}{ "struct": struct{ Field1, Field2 int }{ Field1: 1, Field2: field2Value, }, "map": map[string]int{ "Field1": 1, "Field2": field2Value, }, } t.Run("should map struct field", func(t *testing.T) { for name, object := range tests { t.Run(name, func(t *testing.T) { mapper := mapify.Mapper{ MapValue: func(path string, e mapify.Element) (interface{}, error) { if e.Name() == "Field1" { return mappedValue, nil } return e.Interface(), nil }, } // when v, err := mapper.MapAny(object) // then require.NoError(t, err) expected := map[string]interface{}{ "Field1": mappedValue, "Field2": field2Value, } assert.Equal(t, expected, v) }) } }) t.Run("should map struct field by path", func(t *testing.T) { for name, object := range tests { t.Run(name, func(t *testing.T) { mapper := mapify.Mapper{ MapValue: func(path string, e mapify.Element) (interface{}, error) { if path == ".Field1" { return mappedValue, nil } return e.Interface(), nil }, } // when v, err := mapper.MapAny(object) // then require.NoError(t, err) expected := map[string]interface{}{ "Field1": mappedValue, "Field2": field2Value, } assert.Equal(t, expected, v) }) } }) t.Run("should return error when MapValue returned error", func(t *testing.T) { tests := map[string]interface{}{ "struct": struct{ Field string }{}, "map": map[string]string{"Field": ""}, } for name, object := range tests { t.Run(name, func(t *testing.T) { givenError := stringError("err") mapper := mapify.Mapper{ MapValue: func(path string, e mapify.Element) (interface{}, error) { return nil, givenError }, } // when result, actualErr := mapper.MapAny(object) // then assert.Nil(t, result) assert.ErrorIs(t, actualErr, givenError) }) } }) }
explode_data.jsonl/71715
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 926 }
[ 2830, 3393, 2227, 1130, 1155, 353, 8840, 836, 8, 341, 2109, 5677, 1130, 1669, 330, 495, 698, 39250, 17, 1130, 1669, 220, 17, 271, 78216, 1669, 2415, 14032, 31344, 67066, 197, 197, 80575, 788, 2036, 90, 8601, 16, 11, 8601, 17, 526, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestADS1015DriverAnalogRead(t *testing.T) { d, adaptor := initTestADS1015DriverWithStubbedAdaptor() d.Start() adaptor.i2cReadImpl = func(b []byte) (int, error) { copy(b, []byte{0x7F, 0xFF}) return 2, nil } val, err := d.AnalogRead("0") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("1") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("2") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("3") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("0-1") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("0-3") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("1-3") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("2-3") gobottest.Assert(t, val, 1022) gobottest.Assert(t, err, nil) val, err = d.AnalogRead("3-2") gobottest.Refute(t, err.Error(), nil) }
explode_data.jsonl/42583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 481 }
[ 2830, 3393, 49541, 16, 15, 16, 20, 11349, 2082, 30951, 4418, 1155, 353, 8840, 836, 8, 341, 2698, 11, 91941, 1669, 2930, 2271, 49541, 16, 15, 16, 20, 11349, 2354, 33838, 2721, 2589, 32657, 741, 2698, 12101, 2822, 98780, 32657, 8607, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAllSystems(t *testing.T) { config.SetUpMockConfig(t) defer func() { err := common.TruncateDB(common.InMemory) if err != nil { t.Fatalf("error: %v", err) } err = common.TruncateDB(common.OnDisk) if err != nil { t.Fatalf("error: %v", err) } }() mockTarget(t) resp, err := GetAllSystems() assert.Nil(t, err, "Error Should be nil") assert.Equal(t, 1, len(resp), "response should be same as reqData") }
explode_data.jsonl/49405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 1949, 2403, 48065, 1155, 353, 8840, 836, 8, 341, 25873, 4202, 2324, 11571, 2648, 1155, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 4185, 8240, 26900, 3506, 57802, 5337, 10642, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReadFromTimeout(t *testing.T) { ch := make(chan Addr) defer close(ch) handler := func(ls *localPacketServer, c PacketConn) { if dst, ok := <-ch; ok { c.WriteTo([]byte("READFROM TIMEOUT TEST"), dst) } } ls := newLocalPacketServer(t, "udp") defer ls.teardown() if err := ls.buildup(handler); err != nil { t.Fatal(err) } host, _, err := SplitHostPort(ls.PacketConn.LocalAddr().String()) if err != nil { t.Fatal(err) } c, err := ListenPacket(ls.PacketConn.LocalAddr().Network(), JoinHostPort(host, "0")) if err != nil { t.Fatal(err) } defer c.Close() ch <- c.LocalAddr() for i, tt := range readFromTimeoutTests { if err := c.SetReadDeadline(time.Now().Add(tt.timeout)); err != nil { t.Fatalf("#%d: %v", i, err) } var b [1]byte for j, xerr := range tt.xerrs { for { n, _, err := c.ReadFrom(b[:]) if xerr != nil { if perr := parseReadError(err); perr != nil { t.Errorf("#%d/%d: %v", i, j, perr) } if !isDeadlineExceeded(err) { t.Fatalf("#%d/%d: %v", i, j, err) } } if err == nil { time.Sleep(tt.timeout / 3) continue } if nerr, ok := err.(Error); ok && nerr.Timeout() && n != 0 { t.Fatalf("#%d/%d: read %d; want 0", i, j, n) } break } } } }
explode_data.jsonl/77451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 627 }
[ 2830, 3393, 4418, 3830, 7636, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1281, 35190, 72585, 340, 16867, 3265, 7520, 340, 53326, 1669, 2915, 62991, 353, 2438, 16679, 5475, 11, 272, 28889, 9701, 8, 341, 197, 743, 10648, 11, 5394, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeployExe(t *testing.T) { t.Parallel() ctx := context.Background() Convey("ExeBootstrapper.DeployExe", t, func() { pkg := &fakecipd.Package{ Instances: map[string]*fakecipd.PackageInstance{}, } ctx := cipd.UseCipdClientFactory(ctx, fakecipd.Factory(map[string]*fakecipd.Package{ "fake-package": pkg, })) ctx = cas.UseCasClientFactory(ctx, fakecas.Factory(map[string]*fakecas.Instance{ "non-existent-instance": nil, })) execRoot := t.TempDir() cipdClient, err := cipd.NewClient(ctx, execRoot) PanicOnError(err) casClient := cas.NewClient(ctx, execRoot) bootstrapper := NewExeBootstrapper(cipdClient, casClient) Convey("for CIPD exe", func() { exe := &BootstrappedExe{ Source: &BootstrappedExe_Cipd{ Cipd: &Cipd{ Server: "https://chrome-infra-packages.appspot.com", Package: "fake-package", ActualVersion: "fake-instance-id", }, }, Cmd: []string{"fake-exe", "foo", "bar"}, } Convey("fails if downloading the package fails", func() { pkg.Instances["fake-instance-id"] = nil cmd, err := bootstrapper.DeployExe(ctx, exe) So(err, ShouldNotBeNil) So(cmd, ShouldBeNil) }) Convey("returns the cmd for the executable", func() { cmd, err := bootstrapper.DeployExe(ctx, exe) So(err, ShouldBeNil) So(cmd, ShouldResemble, []string{filepath.Join(execRoot, "fake-exe"), "foo", "bar"}) }) }) Convey("for CAS exe", func() { exe := &BootstrappedExe{ Source: &BootstrappedExe_Cas{ Cas: &apipb.CASReference{ CasInstance: "fake-cas-instance", Digest: &apipb.Digest{ Hash: "fake-hash", SizeBytes: 42, }, }, }, Cmd: []string{"fake-exe", "foo", "bar"}, } Convey("returns the cmd for the executable", func() { cmd, err := bootstrapper.DeployExe(ctx, exe) So(err, ShouldBeNil) So(cmd, ShouldResemble, []string{filepath.Join(execRoot, "fake-exe"), "foo", "bar"}) }) }) }) }
explode_data.jsonl/75913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 908 }
[ 2830, 3393, 69464, 840, 68, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 2266, 19047, 2822, 93070, 5617, 445, 840, 68, 17919, 495, 3106, 34848, 1989, 840, 68, 497, 259, 11, 2915, 368, 341, 197, 3223, 7351, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSanitizeLabels(t *testing.T) { oldNode := BuildTestNode("ng1-1", 1000, 1000) oldNode.Labels = map[string]string{ kubeletapis.LabelHostname: "abc", "x": "y", } node, err := sanitizeTemplateNode(oldNode, "bzium") assert.NoError(t, err) assert.NotEqual(t, node.Labels[kubeletapis.LabelHostname], "abc") assert.Equal(t, node.Labels["x"], "y") assert.NotEqual(t, node.Name, oldNode.Name) assert.Equal(t, node.Labels[kubeletapis.LabelHostname], node.Name) }
explode_data.jsonl/44990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 23729, 26310, 23674, 1155, 353, 8840, 836, 8, 341, 61828, 1955, 1669, 7854, 2271, 1955, 445, 968, 16, 12, 16, 497, 220, 16, 15, 15, 15, 11, 220, 16, 15, 15, 15, 340, 61828, 1955, 4679, 82, 284, 2415, 14032, 30953, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocateTokenInContex(t *testing.T) { id := uuid.NewV4() tk := jwt.New(jwt.SigningMethodRS256) tk.Claims.(jwt.MapClaims)["sub"] = id.String() ctx := goajwt.WithJWT(context.Background(), tk) manager := createManager(t) foundId, err := manager.Locate(ctx) if err != nil { t.Error("Failed not locate token in given context", err) } assert.Equal(t, id, foundId, "ID in created context not equal") }
explode_data.jsonl/54987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 9152, 349, 3323, 641, 818, 327, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 16040, 7121, 53, 19, 2822, 3244, 74, 1669, 24589, 7121, 3325, 9306, 41152, 287, 3523, 11451, 17, 20, 21, 340, 3244, 74, 78625, 12832, 41592, 10104,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPodSpecForCSIWithIncorrectKubernetesVersion(t *testing.T) { fakeClient := fakek8sclient.NewSimpleClientset() coreops.SetInstance(coreops.New(fakeClient)) fakeClient.Discovery().(*fakediscovery.FakeDiscovery).FakedServerVersion = &version.Info{ GitVersion: "invalid-version", } nodeName := "testNode" cluster := &corev1.StorageCluster{ ObjectMeta: metav1.ObjectMeta{ Name: "px-cluster", Namespace: "kube-system", }, Spec: corev1.StorageClusterSpec{ Image: "portworx/oci-monitor:2.1.1", FeatureGates: map[string]string{ string(pxutil.FeatureCSI): "true", }, }, } driver := portworx{} _, err := driver.GetStoragePodSpec(cluster, nodeName) assert.Error(t, err, "Expected an error on GetStoragePodSpec") }
explode_data.jsonl/55464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 23527, 8327, 2461, 48407, 2354, 40468, 42, 29827, 5637, 1155, 353, 8840, 836, 8, 341, 1166, 726, 2959, 1669, 12418, 74, 23, 82, 2972, 7121, 16374, 2959, 746, 741, 71882, 3721, 4202, 2523, 47867, 3721, 7121, 74138, 2959, 1171...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTraceExporter_WithRecordMetrics_ReturnError(t *testing.T) { want := errors.New("my_error") te, err := NewTraceExporter(fakeTraceExporterConfig, newPushTraceData(0, want)) require.Nil(t, err) require.NotNil(t, te) checkRecordedMetricsForTraceExporter(t, te, want, 0) }
explode_data.jsonl/1572
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 6550, 88025, 62, 2354, 6471, 27328, 53316, 1454, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 5975, 7121, 445, 2408, 4096, 1138, 197, 665, 11, 1848, 1669, 1532, 6550, 88025, 74138, 6550, 88025, 2648, 11, 501, 16644, 6550, 1043...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPipelineRunFacts_GetPipelineTaskStatus(t *testing.T) { tcs := []struct { name string state PipelineRunState dagTasks []v1beta1.PipelineTask expectedStatus map[string]string }{{ name: "no-tasks-started", state: noneStartedState, dagTasks: []v1beta1.PipelineTask{pts[0], pts[1]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, PipelineTaskStatusPrefix + pts[1].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "one-task-started", state: oneStartedState, dagTasks: []v1beta1.PipelineTask{pts[0], pts[1]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, PipelineTaskStatusPrefix + pts[1].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "one-task-finished", state: oneFinishedState, dagTasks: []v1beta1.PipelineTask{pts[0], pts[1]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: v1beta1.TaskRunReasonSuccessful.String(), PipelineTaskStatusPrefix + pts[1].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "one-task-failed", state: oneFailedState, dagTasks: []v1beta1.PipelineTask{pts[0], pts[1]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: v1beta1.TaskRunReasonFailed.String(), PipelineTaskStatusPrefix + pts[1].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: v1beta1.PipelineRunReasonFailed.String(), }, }, { name: "all-finished", state: allFinishedState, dagTasks: []v1beta1.PipelineTask{pts[0], pts[1]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: v1beta1.TaskRunReasonSuccessful.String(), PipelineTaskStatusPrefix + pts[1].Name + PipelineTaskStatusSuffix: v1beta1.TaskRunReasonSuccessful.String(), v1beta1.PipelineTasksAggregateStatus: v1beta1.PipelineRunReasonSuccessful.String(), }, }, { name: "task-with-when-expressions-passed", state: PipelineRunState{{ PipelineTask: &pts[9], TaskRunName: "pr-guard-succeeded-task-not-started", TaskRun: nil, ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }}, dagTasks: []v1beta1.PipelineTask{pts[9]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[9].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "tasks-when-expression-failed-and-task-skipped", state: PipelineRunState{{ PipelineTask: &pts[10], TaskRunName: "pr-guardedtask-skipped", ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }}, dagTasks: []v1beta1.PipelineTask{pts[10]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[10].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: v1beta1.PipelineRunReasonCompleted.String(), }, }, { name: "when-expression-task-with-parent-started", state: PipelineRunState{{ PipelineTask: &pts[0], TaskRun: makeStarted(trs[0]), ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }, { PipelineTask: &pts[11], TaskRun: nil, ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }}, dagTasks: []v1beta1.PipelineTask{pts[0], pts[11]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, PipelineTaskStatusPrefix + pts[11].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "task-cancelled", state: taskCancelled, dagTasks: []v1beta1.PipelineTask{pts[4]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[4].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: PipelineTaskStateNone, }, }, { name: "one-skipped-one-failed-aggregate-status-must-be-failed", state: PipelineRunState{{ PipelineTask: &pts[10], TaskRunName: "pr-guardedtask-skipped", ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }, { PipelineTask: &pts[0], TaskRunName: "pipelinerun-mytask1", TaskRun: makeFailed(trs[0]), ResolvedTaskResources: &resources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }}, dagTasks: []v1beta1.PipelineTask{pts[0], pts[10]}, expectedStatus: map[string]string{ PipelineTaskStatusPrefix + pts[0].Name + PipelineTaskStatusSuffix: v1beta1.PipelineRunReasonFailed.String(), PipelineTaskStatusPrefix + pts[10].Name + PipelineTaskStatusSuffix: PipelineTaskStateNone, v1beta1.PipelineTasksAggregateStatus: v1beta1.PipelineRunReasonFailed.String(), }, }} for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { d, err := dag.Build(v1beta1.PipelineTaskList(tc.dagTasks), v1beta1.PipelineTaskList(tc.dagTasks).Deps()) if err != nil { t.Fatalf("Unexpected error while buildig graph for DAG tasks %v: %v", tc.dagTasks, err) } facts := PipelineRunFacts{ State: tc.state, TasksGraph: d, FinalTasksGraph: &dag.Graph{}, } s := facts.GetPipelineTaskStatus() if d := cmp.Diff(tc.expectedStatus, s); d != "" { t.Fatalf("Test failed: %s Mismatch in pipelineTask execution state %s", tc.name, diff.PrintWantGot(d)) } }) } }
explode_data.jsonl/18202
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2759 }
[ 2830, 3393, 34656, 6727, 37, 11359, 13614, 34656, 6262, 2522, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 24291, 688, 40907, 6727, 1397, 198, 197, 2698, 351, 25449, 981, 3056, 85, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrimitivePutLong(t *testing.T) { client := newPrimitiveClient() a, b := int64(1099511627775), int64(-999511627788) result, err := client.PutLong(context.Background(), LongWrapper{Field1: &a, Field2: &b}, nil) if err != nil { t.Fatalf("PutLong: %v", err) } if s := result.RawResponse.StatusCode; s != http.StatusOK { t.Fatalf("unexpected status code %d", s) } }
explode_data.jsonl/61670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 33313, 19103, 6583, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 501, 33313, 2959, 741, 11323, 11, 293, 1669, 526, 21, 19, 7, 16, 15, 24, 24, 20, 16, 16, 21, 17, 22, 22, 22, 20, 701, 526, 21, 19, 4080, 24, 24, 24, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrefixFilter(t *testing.T) { t.Parallel() buf1 := &bytes.Buffer{} buf2 := &bytes.Buffer{} defaultWriter := &bytes.Buffer{} f := utils.NewPrefixFilter(map[string]io.Writer{ "[prefix-1] ": buf1, "[prefix-2] ": buf2, }, defaultWriter, ) f.Write([]byte("[prefix-1] ")) f.Write([]byte("data-0\n")) f.Write([]byte("[prefix-2] data-1\n")) f.Write([]byte("[other-prefix] [prefix-2] data-2\n[prefix-1] data-3\n")) f.Write([]byte("[other-prefix] data-4\n")) testutil.AssertEqual(t, "buf-1", "data-0\ndata-3\n", buf1.String()) testutil.AssertEqual(t, "buf-2", "data-1\ndata-2\n", buf2.String()) testutil.AssertEqual(t, "defaultWriter", "[other-prefix] data-4\n", defaultWriter.String()) }
explode_data.jsonl/48458
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 319 }
[ 2830, 3393, 14335, 5632, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 26398, 16, 1669, 609, 9651, 22622, 16094, 26398, 17, 1669, 609, 9651, 22622, 16094, 11940, 6492, 1669, 609, 9651, 22622, 31483, 1166, 1669, 12439, 7121, 14335...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateDIDKeyByJwk(t *testing.T) { tests := []struct { name string kty string curve elliptic.Curve x string y string DIDKey string DIDKeyID string }{ { name: "test P-256", kty: "EC", curve: elliptic.P256(), x: "igrFmi0whuihKnj9R3Om1SoMph72wUGeFaBbzG2vzns", y: "efsX5b10x8yjyrj4ny3pGfLcY7Xby1KzgqOdqnsrJIM", DIDKey: "did:key:zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv", DIDKeyID: "did:key:zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv#zDnaerx9CtbPJ1q36T5Ln5wYt3MQYeGRG5ehnPAmxcf5mDZpv", //nolint:lll }, { name: "test P-384", kty: "EC", curve: elliptic.P384(), x: "lInTxl8fjLKp_UCrxI0WDklahi-7-_6JbtiHjiRvMvhedhKVdHBfi2HCY8t_QJyc", y: "y6N1IC-2mXxHreETBW7K3mBcw0qGr3CWHCs-yl09yCQRLcyfGv7XhqAngHOu51Zv", DIDKey: "did:key:z82Lm1MpAkeJcix9K8TMiLd5NMAhnwkjjCBeWHXyu3U4oT2MVJJKXkcVBgjGhnLBn2Kaau9", DIDKeyID: "did:key:z82Lm1MpAkeJcix9K8TMiLd5NMAhnwkjjCBeWHXyu3U4oT2MVJJKXkcVBgjGhnLBn2Kaau9#z82Lm1MpAkeJcix9K8TMiLd5NMAhnwkjjCBeWHXyu3U4oT2MVJJKXkcVBgjGhnLBn2Kaau9", //nolint:lll }, { name: "test P-521", kty: "EC", curve: elliptic.P521(), x: "ASUHPMyichQ0QbHZ9ofNx_l4y7luncn5feKLo3OpJ2nSbZoC7mffolj5uy7s6KSKXFmnNWxGJ42IOrjZ47qqwqyS", y: "AW9ziIC4ZQQVSNmLlp59yYKrjRY0_VqO-GOIYQ9tYpPraBKUloEId6cI_vynCzlZWZtWpgOM3HPhYEgawQ703RjC", DIDKey: "did:key:z2J9gaYxrKVpdoG9A4gRnmpnRCcxU6agDtFVVBVdn1JedouoZN7SzcyREXXzWgt3gGiwpoHq7K68X4m32D8HgzG8wv3sY5j7", DIDKeyID: "did:key:z2J9gaYxrKVpdoG9A4gRnmpnRCcxU6agDtFVVBVdn1JedouoZN7SzcyREXXzWgt3gGiwpoHq7K68X4m32D8HgzG8wv3sY5j7#z2J9gaYxrKVpdoG9A4gRnmpnRCcxU6agDtFVVBVdn1JedouoZN7SzcyREXXzWgt3gGiwpoHq7K68X4m32D8HgzG8wv3sY5j7", //nolint:lll }, } for _, test := range tests { tc := test t.Run(tc.name+" CreateDIDKeyByJwk", func(t *testing.T) { x := readBigInt(t, test.x) y := readBigInt(t, test.y) publicKey := ecdsa.PublicKey{ Curve: test.curve, X: x, Y: y, } jwk, err := jose.JWKFromKey(&publicKey) require.NoError(t, err) didKey, keyID, err := CreateDIDKeyByJwk(jwk) require.NoError(t, err) require.Equal(t, tc.DIDKey, didKey) require.Equal(t, tc.DIDKeyID, keyID) }) } t.Run("nil input", func(t *testing.T) { _, _, err := CreateDIDKeyByJwk(nil) require.Error(t, err) require.Contains(t, err.Error(), "jsonWebKey is required") }) t.Run("test invalid type", func(t *testing.T) { jwk := jose.JWK{ Kty: "XX", Crv: elliptic.P256().Params().Name, } _, _, err := CreateDIDKeyByJwk(&jwk) require.Error(t, err) require.Contains(t, err.Error(), "unsupported kty") }) }
explode_data.jsonl/37621
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1729 }
[ 2830, 3393, 4021, 35, 915, 1592, 1359, 41, 50522, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 16463, 1881, 414, 914, 198, 197, 33209, 586, 262, 77783, 292, 727, 73047, 198, 197, 10225, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1