text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestGetMetaData(t *testing.T) { dir, err := ioutil.TempDir("", "commitlog-index") assert.NoError(t, err) fd := NewFileDB(dir, 1024*1024, 256) defer func() { fd.Close() os.Remove(dir) }() _, err = fd.GetMetaData() assert.NoError(t, err) }
explode_data.jsonl/64028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 1949, 37307, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 17413, 839, 21492, 1138, 6948, 35699, 1155, 11, 1848, 340, 61721, 1669, 1532, 1703, 3506, 14161, 11, 220, 16, 15, 17, 19, 9,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOutboundListenerConflict_HTTPWithCurrentUnknownV14(t *testing.T) { _ = os.Setenv(features.EnableProtocolSniffingForOutbound.Name, "true") defer func() { _ = os.Unsetenv(features.EnableProtocolSniffingForOutbound.Name) }() // The oldest service port is unknown. We should encounter conflicts when attempting to add the HTTP ports. Purposely // storing the services out of time order to test that it's being sorted properly. testOutboundListenerConflictV14(t, buildService("test1.com", wildcardIP, protocol.HTTP, tnow.Add(1*time.Second)), buildService("test2.com", wildcardIP, "unknown", tnow), buildService("test3.com", wildcardIP, protocol.HTTP, tnow.Add(2*time.Second))) }
explode_data.jsonl/61255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 2662, 10891, 2743, 57974, 34954, 2354, 5405, 13790, 53, 16, 19, 1155, 353, 8840, 836, 8, 341, 197, 62, 284, 2643, 4202, 3160, 47552, 32287, 20689, 20720, 3092, 287, 2461, 2662, 10891, 2967, 11, 330, 1866, 1138, 16867, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckValidJSON(t *testing.T) { validJSON := `{"Args":["a","b","c"]}` input := &chaincodeInput{} if err := json.Unmarshal([]byte(validJSON), &input); err != nil { t.Fail() t.Logf("Chaincode argument error: %s", err) return } validJSON = `{"Function":"f", "Args":["a","b","c"]}` if err := json.Unmarshal([]byte(validJSON), &input); err != nil { t.Fail() t.Logf("Chaincode argument error: %s", err) return } validJSON = `{"Function":"f", "Args":[]}` if err := json.Unmarshal([]byte(validJSON), &input); err != nil { t.Fail() t.Logf("Chaincode argument error: %s", err) return } validJSON = `{"Function":"f"}` if err := json.Unmarshal([]byte(validJSON), &input); err != nil { t.Fail() t.Logf("Chaincode argument error: %s", err) return } }
explode_data.jsonl/46370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 3973, 4088, 5370, 1155, 353, 8840, 836, 8, 341, 56322, 5370, 1669, 1565, 4913, 4117, 36799, 64, 2198, 65, 2198, 66, 1341, 31257, 22427, 1669, 609, 8819, 1851, 2505, 16094, 743, 1848, 1669, 2951, 38097, 10556, 3782, 41529, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHandshakeServerSNIGetCertificateError(t *testing.T) { const errMsg = "TestHandshakeServerSNIGetCertificateError error" serverConfig := testConfig.Clone() serverConfig.GetCertificate = func(clientHello *ClientHelloInfo) (*Certificate, error) { return nil, errors.New(errMsg) } clientHello := &clientHelloMsg{ vers: VersionTLS12, random: make([]byte, 32), cipherSuites: []uint16{TLS_RSA_WITH_RC4_128_SHA}, compressionMethods: []uint8{compressionNone}, serverName: "test", } testClientHelloFailure(t, serverConfig, clientHello, errMsg) }
explode_data.jsonl/36343
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 2314, 29661, 5475, 18966, 1914, 295, 33202, 1454, 1155, 353, 8840, 836, 8, 341, 4777, 60078, 284, 330, 2271, 2314, 29661, 5475, 18966, 1914, 295, 33202, 1454, 1465, 1837, 41057, 2648, 1669, 1273, 2648, 64463, 741, 41057, 2648,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoundTrip(t *testing.T) { fixtures := make([]Test, 0) err := json.Unmarshal([]byte(fixtureJson), &fixtures) if err != nil { t.Fatal(err) } var buf bytes.Buffer for _, f := range fixtures { buf.Reset() err = f.Serialize(&buf) if err != nil { t.Fatal(err) } datum, err := DeserializeTest(&buf) if err != nil { t.Fatal(err) } assert.Equal(t, *datum, f) } }
explode_data.jsonl/48740
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 27497, 56352, 1155, 353, 8840, 836, 8, 341, 1166, 941, 18513, 1669, 1281, 10556, 2271, 11, 220, 15, 340, 9859, 1669, 2951, 38097, 10556, 3782, 94886, 5014, 701, 609, 45247, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestConfigCheck(t *testing.T) { t.Run("invalid config format due to conflicting package names", func(t *testing.T) { config := Config{ Exec: PackageConfig{Filename: "generated/exec.go", Package: "graphql"}, Model: PackageConfig{Filename: "generated/models.go"}, } require.EqualError(t, config.check(), "exec and model define the same import path (github.com/vndocker/encrypted-graphql/codegen/config/generated) with different package names (graphql vs generated)") }) t.Run("federation must be in exec package", func(t *testing.T) { config := Config{ Exec: PackageConfig{Filename: "generated/exec.go"}, Federation: PackageConfig{Filename: "anotherpkg/federation.go"}, } require.EqualError(t, config.check(), "federation and exec must be in the same package") }) t.Run("federation must have same package name as exec", func(t *testing.T) { config := Config{ Exec: PackageConfig{Filename: "generated/exec.go"}, Federation: PackageConfig{Filename: "generated/federation.go", Package: "federation"}, } require.EqualError(t, config.check(), "exec and federation define the same import path (github.com/vndocker/encrypted-graphql/codegen/config/generated) with different package names (generated vs federation)") }) t.Run("deprecated federated flag raises an error", func(t *testing.T) { config := Config{ Exec: PackageConfig{Filename: "generated/exec.go"}, Federated: true, } require.EqualError(t, config.check(), "federated has been removed, instead use\nfederation:\n filename: path/to/federated.go") }) }
explode_data.jsonl/38432
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 2648, 3973, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 11808, 2193, 3561, 4152, 311, 51033, 6328, 5036, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 25873, 1669, 5532, 515, 298, 197, 10216, 25, 220, 16906, 2648, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDaoReports(t *testing.T) { var ( c = context.TODO() ids = []int64{1, 2, 3} ) convey.Convey("Reports", t, func(ctx convey.C) { rpt, rptMap, rptIDs, err := d.Reports(c, ids) ctx.Convey("Then err should be nil.rpt,rptMap,rptIDs should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(rptIDs, convey.ShouldHaveLength, 0) ctx.So(rptMap, convey.ShouldHaveLength, 0) ctx.So(rpt, convey.ShouldHaveLength, 0) }) }) }
explode_data.jsonl/51293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 12197, 23748, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 256, 284, 2266, 90988, 741, 197, 197, 3365, 284, 3056, 396, 21, 19, 90, 16, 11, 220, 17, 11, 220, 18, 532, 197, 340, 37203, 5617, 4801, 5617, 445, 2374...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogTreeTX_DequeueLeaves_BadConnection(t *testing.T) { ndb, done := openTestDBOrDie() tx, err := ndb.BeginTx(context.Background(), nil /* opts */) require.NoError(t, err) done(context.Background()) _, err = (&logTreeTX{ ls: NewLogStorage(ndb, nil).(*postgresLogStorage), treeTX: treeTX{ tx: tx, }, }).DequeueLeaves(context.Background(), 1, time.Now()) require.Error(t, err) require.Contains(t, err.Error(), "driver: bad connection") }
explode_data.jsonl/30685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 2201, 6533, 22867, 24597, 4584, 2304, 4693, 1668, 329, 4526, 1155, 353, 8840, 836, 8, 341, 197, 303, 65, 11, 2814, 1669, 1787, 2271, 3506, 2195, 18175, 2822, 46237, 11, 1848, 1669, 15581, 65, 28467, 31584, 5378, 19047, 1507,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestService_Delete(t *testing.T) { cases := []struct { Name string Setup func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) Err error CheckIsErr bool }{ { Name: "should start deleting successfully if no long running operation is active", Setup: func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) { s.ResourceGroup().Return("rg") s.InstanceID().Return("0") s.ScaleSetName().Return("scaleset") s.GetLongRunningOperationState("0", serviceName).Return(nil) future := &infrav1.Future{ Type: infrav1.DeleteFuture, } m.DeleteAsync(gomock2.AContext(), "rg", "scaleset", "0").Return(future, nil) s.SetLongRunningOperationState(future) m.GetResultIfDone(gomock2.AContext(), future).Return(compute.VirtualMachineScaleSetVM{}, azure.WithTransientError(azure.NewOperationNotDoneError(future), 15*time.Second)) m.Get(gomock2.AContext(), "rg", "scaleset", "0").Return(compute.VirtualMachineScaleSetVM{}, nil) }, CheckIsErr: true, Err: errors.Wrap(azure.WithTransientError(azure.NewOperationNotDoneError(&infrav1.Future{ Type: infrav1.DeleteFuture, }), 15*time.Second), "failed to get result of long running operation"), }, { Name: "should finish deleting successfully when there's a long running operation that has completed", Setup: func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) { s.ResourceGroup().Return("rg") s.InstanceID().Return("0") s.ScaleSetName().Return("scaleset") future := &infrav1.Future{ Type: infrav1.DeleteFuture, } s.GetLongRunningOperationState("0", serviceName).Return(future) m.GetResultIfDone(gomock2.AContext(), future).Return(compute.VirtualMachineScaleSetVM{}, nil) s.DeleteLongRunningOperationState("0", serviceName) m.Get(gomock2.AContext(), "rg", "scaleset", "0").Return(compute.VirtualMachineScaleSetVM{}, nil) }, }, { Name: "should not error when deleting, but resource is 404", Setup: func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) { s.ResourceGroup().Return("rg") s.InstanceID().Return("0") s.ScaleSetName().Return("scaleset") s.GetLongRunningOperationState("0", serviceName).Return(nil) m.DeleteAsync(gomock2.AContext(), "rg", "scaleset", "0").Return(nil, autorest404) m.Get(gomock2.AContext(), "rg", "scaleset", "0").Return(compute.VirtualMachineScaleSetVM{}, nil) }, }, { Name: "should error when deleting, but a non-404 error is returned from DELETE call", Setup: func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) { s.ResourceGroup().Return("rg") s.InstanceID().Return("0") s.ScaleSetName().Return("scaleset") s.GetLongRunningOperationState("0", serviceName).Return(nil) m.DeleteAsync(gomock2.AContext(), "rg", "scaleset", "0").Return(nil, errors.New("boom")) m.Get(gomock2.AContext(), "rg", "scaleset", "0").Return(compute.VirtualMachineScaleSetVM{}, nil) }, Err: errors.Wrap(errors.New("boom"), "failed to delete instance scaleset/0"), }, { Name: "should return error when a long running operation is active and getting the result returns an error", Setup: func(s *mock_scalesetvms.MockScaleSetVMScopeMockRecorder, m *mock_scalesetvms.MockclientMockRecorder) { s.ResourceGroup().Return("rg") s.InstanceID().Return("0") s.ScaleSetName().Return("scaleset") future := &infrav1.Future{ Type: infrav1.DeleteFuture, } s.GetLongRunningOperationState("0", serviceName).Return(future) m.GetResultIfDone(gomock2.AContext(), future).Return(compute.VirtualMachineScaleSetVM{}, errors.New("boom")) m.Get(gomock2.AContext(), "rg", "scaleset", "0").Return(compute.VirtualMachineScaleSetVM{}, nil) }, Err: errors.Wrap(errors.New("boom"), "failed to get result of long running operation"), }, } for _, c := range cases { t.Run(c.Name, func(t *testing.T) { var ( g = NewWithT(t) mockCtrl = gomock.NewController(t) scopeMock = mock_scalesetvms.NewMockScaleSetVMScope(mockCtrl) clientMock = mock_scalesetvms.NewMockclient(mockCtrl) ) defer mockCtrl.Finish() scopeMock.EXPECT().SubscriptionID().Return("subID") scopeMock.EXPECT().BaseURI().Return("https://localhost/") scopeMock.EXPECT().Authorizer().Return(nil) scopeMock.EXPECT().WithValues(gomock.Any()).Return(scopeMock) scopeMock.EXPECT().V(gomock.Any()).Return(scopeMock).AnyTimes() scopeMock.EXPECT().Info(gomock.Any(), gomock.Any()).AnyTimes() service := NewService(scopeMock) service.Client = clientMock c.Setup(scopeMock.EXPECT(), clientMock.EXPECT()) if err := service.Delete(context.TODO()); c.Err == nil { g.Expect(err).To(Succeed()) } else { g.Expect(err).To(HaveOccurred()) g.Expect(err).To(MatchError(c.Err.Error())) if c.CheckIsErr { g.Expect(errors.Is(err, c.Err)).To(BeTrue()) } } }) } }
explode_data.jsonl/70806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2095 }
[ 2830, 3393, 1860, 57418, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 21297, 981, 914, 198, 197, 197, 21821, 414, 2915, 1141, 353, 16712, 643, 30196, 295, 85, 1011, 24664, 6947, 1649, 53, 4826, 2417, 11571, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPowGo(t *testing.T) { s := time.Now() testPowGo(t) ti := time.Now().Sub(s) sp := float64(countGo) / 1000 / ti.Seconds() t.Logf("%d kH/sec on Go PoW", int(sp)) }
explode_data.jsonl/11020
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 66584, 10850, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 882, 13244, 741, 18185, 66584, 10850, 1155, 340, 72859, 1669, 882, 13244, 1005, 3136, 1141, 692, 41378, 1669, 2224, 21, 19, 11512, 10850, 8, 608, 220, 16, 15, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSetSelectorParser(t *testing.T) { setSelectorParserTests := []struct { In string Out Selector Match bool Valid bool }{ {"", NewSelector(), true, true}, {"\rx", internalSelector{ getRequirement("x", selection.Exists, nil, t), }, true, true}, {"this-is-a-dns.domain.com/key-with-dash", internalSelector{ getRequirement("this-is-a-dns.domain.com/key-with-dash", selection.Exists, nil, t), }, true, true}, {"this-is-another-dns.domain.com/key-with-dash in (so,what)", internalSelector{ getRequirement("this-is-another-dns.domain.com/key-with-dash", selection.In, sets.NewString("so", "what"), t), }, true, true}, {"0.1.2.domain/99 notin (10.10.100.1, tick.tack.clock)", internalSelector{ getRequirement("0.1.2.domain/99", selection.NotIn, sets.NewString("10.10.100.1", "tick.tack.clock"), t), }, true, true}, {"foo in (abc)", internalSelector{ getRequirement("foo", selection.In, sets.NewString("abc"), t), }, true, true}, {"x notin\n (abc)", internalSelector{ getRequirement("x", selection.NotIn, sets.NewString("abc"), t), }, true, true}, {"x notin \t (abc,def)", internalSelector{ getRequirement("x", selection.NotIn, sets.NewString("abc", "def"), t), }, true, true}, {"x in (abc,def)", internalSelector{ getRequirement("x", selection.In, sets.NewString("abc", "def"), t), }, true, true}, {"x in (abc,)", internalSelector{ getRequirement("x", selection.In, sets.NewString("abc", ""), t), }, true, true}, {"x in ()", internalSelector{ getRequirement("x", selection.In, sets.NewString(""), t), }, true, true}, {"x notin (abc,,def),bar,z in (),w", internalSelector{ getRequirement("bar", selection.Exists, nil, t), getRequirement("w", selection.Exists, nil, t), getRequirement("x", selection.NotIn, sets.NewString("abc", "", "def"), t), getRequirement("z", selection.In, sets.NewString(""), t), }, true, true}, {"x,y in (a)", internalSelector{ getRequirement("y", selection.In, sets.NewString("a"), t), getRequirement("x", selection.Exists, nil, t), }, false, true}, {"x=a", internalSelector{ getRequirement("x", selection.Equals, sets.NewString("a"), t), }, true, true}, {"x>1", internalSelector{ getRequirement("x", selection.GreaterThan, sets.NewString("1"), t), }, true, true}, {"x<7", internalSelector{ getRequirement("x", selection.LessThan, sets.NewString("7"), t), }, true, true}, {"x=a,y!=b", internalSelector{ getRequirement("x", selection.Equals, sets.NewString("a"), t), getRequirement("y", selection.NotEquals, sets.NewString("b"), t), }, true, true}, {"x=a,y!=b,z in (h,i,j)", internalSelector{ getRequirement("x", selection.Equals, sets.NewString("a"), t), getRequirement("y", selection.NotEquals, sets.NewString("b"), t), getRequirement("z", selection.In, sets.NewString("h", "i", "j"), t), }, true, true}, {"x=a||y=b", internalSelector{}, false, false}, {"x,,y", nil, true, false}, {",x,y", nil, true, false}, {"x nott in (y)", nil, true, false}, {"x notin ( )", internalSelector{ getRequirement("x", selection.NotIn, sets.NewString(""), t), }, true, true}, {"x notin (, a)", internalSelector{ getRequirement("x", selection.NotIn, sets.NewString("", "a"), t), }, true, true}, {"a in (xyz),", nil, true, false}, {"a in (xyz)b notin ()", nil, true, false}, {"a ", internalSelector{ getRequirement("a", selection.Exists, nil, t), }, true, true}, {"a in (x,y,notin, z,in)", internalSelector{ getRequirement("a", selection.In, sets.NewString("in", "notin", "x", "y", "z"), t), }, true, true}, // operator 'in' inside list of identifiers {"a in (xyz abc)", nil, false, false}, // no comma {"a notin(", nil, true, false}, // bad formed {"a (", nil, false, false}, // cpar {"(", nil, false, false}, // opar } for _, ssp := range setSelectorParserTests { if sel, err := Parse(ssp.In); err != nil && ssp.Valid { t.Errorf("Parse(%s) => %v expected no error", ssp.In, err) } else if err == nil && !ssp.Valid { t.Errorf("Parse(%s) => %+v expected error", ssp.In, sel) } else if ssp.Match && !reflect.DeepEqual(sel, ssp.Out) { t.Errorf("Parse(%s) => parse output '%#v' doesn't match '%#v' expected match", ssp.In, sel, ssp.Out) } } }
explode_data.jsonl/29776
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1757 }
[ 2830, 3393, 1649, 5877, 6570, 1155, 353, 8840, 836, 8, 341, 8196, 5877, 6570, 18200, 1669, 3056, 1235, 341, 197, 70167, 262, 914, 198, 197, 197, 2662, 256, 49610, 198, 197, 197, 8331, 1807, 198, 197, 197, 4088, 1807, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestBuildsLogsError(t *testing.T) { testClient(t, func(e *cli.Engine, i *mocksdk.Interface) { opts := structs.LogsOptions{} i.On("BuildLogs", "app1", "build1", opts).Return(nil, fmt.Errorf("err1")) res, err := testExecute(e, "builds logs build1 -a app1", nil) require.NoError(t, err) require.Equal(t, 1, res.Code) res.RequireStderr(t, []string{"ERROR: err1"}) res.RequireStdout(t, []string{""}) }) }
explode_data.jsonl/65800
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 11066, 82, 51053, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 2959, 1155, 11, 2915, 2026, 353, 19521, 54424, 11, 600, 353, 16712, 51295, 41065, 8, 341, 197, 64734, 1669, 62845, 5247, 82, 3798, 16094, 197, 8230, 8071, 445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSplitRange(t *testing.T) { min := numeric.Float64ToInt64(1.0) max := numeric.Float64ToInt64(5.0) ranges := splitInt64Range(min, max, 4) enumerated := ranges.Enumerate(nil) if len(enumerated) != 135 { t.Errorf("expected 135 terms, got %d", len(enumerated)) } }
explode_data.jsonl/9139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 20193, 6046, 1155, 353, 8840, 836, 8, 341, 25320, 1669, 24064, 29794, 21, 19, 38544, 21, 19, 7, 16, 13, 15, 340, 22543, 1669, 24064, 29794, 21, 19, 38544, 21, 19, 7, 20, 13, 15, 340, 7000, 5520, 1669, 6718, 1072, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVersionedTree(t *testing.T) { require := require.New(t) d, closeDB := getTestDB() defer closeDB() tree := NewMutableTree(d, 0) // We start with zero keys in the databse. require.Equal(0, tree.ndb.size()) require.True(tree.IsEmpty()) // version 0 tree.Set([]byte("key1"), []byte("val0")) tree.Set([]byte("key2"), []byte("val0")) // Still zero keys, since we haven't written them. require.Len(tree.ndb.leafNodes(), 0) require.False(tree.IsEmpty()) // Now let's write the keys to storage. hash1, v, err := tree.SaveVersion() require.NoError(err) require.False(tree.IsEmpty()) require.EqualValues(1, v) // -----1----- // key1 = val0 version=1 // key2 = val0 version=1 // key2 (root) version=1 // ----------- nodes1 := tree.ndb.leafNodes() require.Len(nodes1, 2, "db should have a size of 2") // version 1 tree.Set([]byte("key1"), []byte("val1")) tree.Set([]byte("key2"), []byte("val1")) tree.Set([]byte("key3"), []byte("val1")) require.Len(tree.ndb.leafNodes(), len(nodes1)) hash2, v2, err := tree.SaveVersion() require.NoError(err) require.False(bytes.Equal(hash1, hash2)) require.EqualValues(v+1, v2) // Recreate a new tree and load it, to make sure it works in this // scenario. tree = NewMutableTree(d, 100) _, err = tree.Load() require.NoError(err) require.Len(tree.versions, 2, "wrong number of versions") require.EqualValues(v2, tree.Version()) // -----1----- // key1 = val0 <orphaned> // key2 = val0 <orphaned> // -----2----- // key1 = val1 // key2 = val1 // key3 = val1 // ----------- nodes2 := tree.ndb.leafNodes() require.Len(nodes2, 5, "db should have grown in size") require.Len(tree.ndb.orphans(), 3, "db should have three orphans") // Create two more orphans. tree.Remove([]byte("key1")) tree.Set([]byte("key2"), []byte("val2")) hash3, v3, _ := tree.SaveVersion() require.EqualValues(3, v3) // -----1----- // key1 = val0 <orphaned> (replaced) // key2 = val0 <orphaned> (replaced) // -----2----- // key1 = val1 <orphaned> (removed) // key2 = val1 <orphaned> (replaced) // key3 = val1 // -----3----- // key2 = val2 // ----------- nodes3 := tree.ndb.leafNodes() require.Len(nodes3, 6, "wrong number of nodes") require.Len(tree.ndb.orphans(), 7, "wrong number of orphans") hash4, _, _ := tree.SaveVersion() require.EqualValues(hash3, hash4) require.NotNil(hash4) tree = NewMutableTree(d, 100) _, err = tree.Load() require.NoError(err) // ------------ // DB UNCHANGED // ------------ nodes4 := tree.ndb.leafNodes() require.Len(nodes4, len(nodes3), "db should not have changed in size") tree.Set([]byte("key1"), []byte("val0")) // "key2" _, val := tree.GetVersioned([]byte("key2"), 0) require.Nil(val) _, val = tree.GetVersioned([]byte("key2"), 1) require.Equal("val0", string(val)) _, val = tree.GetVersioned([]byte("key2"), 2) require.Equal("val1", string(val)) _, val = tree.Get([]byte("key2")) require.Equal("val2", string(val)) // "key1" _, val = tree.GetVersioned([]byte("key1"), 1) require.Equal("val0", string(val)) _, val = tree.GetVersioned([]byte("key1"), 2) require.Equal("val1", string(val)) _, val = tree.GetVersioned([]byte("key1"), 3) require.Nil(val) _, val = tree.GetVersioned([]byte("key1"), 4) require.Nil(val) _, val = tree.Get([]byte("key1")) require.Equal("val0", string(val)) // "key3" _, val = tree.GetVersioned([]byte("key3"), 0) require.Nil(val) _, val = tree.GetVersioned([]byte("key3"), 2) require.Equal("val1", string(val)) _, val = tree.GetVersioned([]byte("key3"), 3) require.Equal("val1", string(val)) // Delete a version. After this the keys in that version should not be found. tree.DeleteVersion(2) // -----1----- // key1 = val0 // key2 = val0 // -----2----- // key3 = val1 // -----3----- // key2 = val2 // ----------- nodes5 := tree.ndb.leafNodes() require.True(len(nodes5) < len(nodes4), "db should have shrunk after delete %d !< %d", len(nodes5), len(nodes4)) _, val = tree.GetVersioned([]byte("key2"), 2) require.Nil(val) _, val = tree.GetVersioned([]byte("key3"), 2) require.Nil(val) // But they should still exist in the latest version. _, val = tree.Get([]byte("key2")) require.Equal("val2", string(val)) _, val = tree.Get([]byte("key3")) require.Equal("val1", string(val)) // Version 1 should still be available. _, val = tree.GetVersioned([]byte("key1"), 1) require.Equal("val0", string(val)) _, val = tree.GetVersioned([]byte("key2"), 1) require.Equal("val0", string(val)) }
explode_data.jsonl/25118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1800 }
[ 2830, 3393, 5637, 291, 6533, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 2698, 11, 3265, 3506, 1669, 633, 2271, 3506, 741, 16867, 3265, 3506, 2822, 51968, 1669, 1532, 11217, 6533, 1500, 11, 220, 15, 692, 197, 322...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestUrlScheme(t *testing.T) { req := defaultRequest("GET", "https://localhost", nil, t) urlBase := req.BaseUrl() expected := "https" if urlBase.Scheme != expected { t.Error(expected + " was the expected scheme, but instead got " + urlBase.Scheme) } }
explode_data.jsonl/31840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 1900, 2864, 28906, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 1638, 1900, 445, 3806, 497, 330, 2428, 1110, 8301, 497, 2092, 11, 259, 340, 19320, 3978, 1669, 4232, 13018, 2864, 2822, 42400, 1669, 330, 2428, 698, 743, 2515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLabelsForObject(t *testing.T) { f := NewFactory(nil) tests := []struct { name string object runtime.Object expected string err error }{ { name: "successful re-use of labels", object: &api.Service{ ObjectMeta: metav1.ObjectMeta{Name: "baz", Namespace: "test", Labels: map[string]string{"svc": "test"}}, TypeMeta: metav1.TypeMeta{Kind: "Service", APIVersion: "v1"}, }, expected: "svc=test", err: nil, }, { name: "empty labels", object: &api.Service{ ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "test", Labels: map[string]string{}}, TypeMeta: metav1.TypeMeta{Kind: "Service", APIVersion: "v1"}, }, expected: "", err: nil, }, { name: "nil labels", object: &api.Service{ ObjectMeta: metav1.ObjectMeta{Name: "zen", Namespace: "test", Labels: nil}, TypeMeta: metav1.TypeMeta{Kind: "Service", APIVersion: "v1"}, }, expected: "", err: nil, }, } for _, test := range tests { gotLabels, err := f.LabelsForObject(test.object) if err != test.err { t.Fatalf("%s: Error mismatch: Expected %v, got %v", test.name, test.err, err) } got := kubectl.MakeLabels(gotLabels) if test.expected != got { t.Fatalf("%s: Labels mismatch! Expected %s, got %s", test.name, test.expected, got) } } }
explode_data.jsonl/43204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 593 }
[ 2830, 3393, 23674, 94604, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 4153, 27907, 692, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 35798, 256, 15592, 8348, 198, 197, 42400, 914, 198, 197, 9859, 414, 1465, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDelete(t *testing.T) { p := New() p.Set("foo", "bar") p.Delete("foo") assert.Equal(t, []string{}, p.GetAll("foo")) p.Delete("non-existing") }
explode_data.jsonl/1463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 741, 3223, 4202, 445, 7975, 497, 330, 2257, 1138, 3223, 18872, 445, 7975, 1138, 6948, 12808, 1155, 11, 3056, 917, 22655, 281, 45732, 445, 7975, 5455, 3223, 18872, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestConverterMapListOfStructType(t *testing.T) { fieldMap := make(map[string]codegen.FieldMapperEntry) fieldMap["One"] = codegen.FieldMapperEntry{ QualifiedName: "Two", Override: true, } fieldMap["Two"] = codegen.FieldMapperEntry{ QualifiedName: "One", Override: true, } lines, err := convertTypes( "Foo", "Bar", `struct Inner { 1: optional string field } struct Foo { 1: optional list<Inner> one 2: required list<Inner> two } struct Bar { 1: optional list<Inner> one 2: required list<Inner> two }`, nil, fieldMap, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.One = make([]*structs.Inner, len(in.Two)) for index1, value2 := range in.Two { if value2 != nil { out.One[index1] = &structs.Inner{} if in.Two[index1] != nil { out.One[index1].Field = (*string)(in.Two[index1].Field) } } else { out.One[index1] = nil } } sourceList3 := in.Two isOverridden4 := false if in.One != nil { sourceList3 = in.One isOverridden4 = true } out.Two = make([]*structs.Inner, len(sourceList3)) for index5, value6 := range sourceList3 { if isOverridden4 { if value6 != nil { out.Two[index5] = &structs.Inner{} if in.One[index5] != nil { out.Two[index5].Field = (*string)(in.One[index5].Field) } } else { out.Two[index5] = nil } } else { if value6 != nil { out.Two[index5] = &structs.Inner{} out.Two[index5].Field = (*string)(in.Two[index5].Field) } else { out.Two[index5] = nil } } } `), lines) }
explode_data.jsonl/62083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 745 }
[ 2830, 3393, 14920, 2227, 64090, 9422, 929, 1155, 353, 8840, 836, 8, 341, 39250, 2227, 1669, 1281, 9147, 14032, 60, 95859, 17087, 10989, 5874, 340, 39250, 2227, 1183, 3966, 1341, 284, 2038, 4370, 17087, 10989, 5874, 515, 197, 16995, 928, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigValidate_LogsPatchMatcher(t *testing.T) { tests := []struct { matcherName string matcherConfig map[string]interface{} error bool }{ { matcherName: "", matcherConfig: map[string]interface{}{}, error: false, }, { matcherName: "logs_path", matcherConfig: map[string]interface{}{ "resource_type": "pod", }, error: true, }, { matcherName: "logs_path", matcherConfig: map[string]interface{}{ "resource_type": "pod", "invalid_field": "invalid_value", }, error: true, }, { matcherName: "logs_path", matcherConfig: map[string]interface{}{ "resource_type": "pod", "logs_path": "/var/log/invalid/path/", }, error: true, }, { matcherName: "logs_path", matcherConfig: map[string]interface{}{ "resource_type": "pod", "logs_path": "/var/log/pods/", }, error: false, }, { matcherName: "logs_path", matcherConfig: map[string]interface{}{ "resource_type": "container", "logs_path": "/var/log/containers/", }, error: false, }, } for _, test := range tests { cfg, _ := common.NewConfigFrom(test.matcherConfig) c := defaultKubernetesAnnotatorConfig() c.DefaultMatchers = Enabled{false} err := cfg.Unpack(&c) c.Matchers = PluginConfig{ { test.matcherName: *cfg, }, } err = c.Validate() if test.error { require.NotNil(t, err) } else { require.Nil(t, err) } } }
explode_data.jsonl/29227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 694 }
[ 2830, 3393, 2648, 17926, 44083, 82, 43622, 37554, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 2109, 28058, 675, 256, 914, 198, 197, 2109, 28058, 2648, 2415, 14032, 31344, 16094, 197, 18290, 260, 1807, 198, 197, 5940...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGzipCompression(t *testing.T) { requests := []struct { EnableCompression bool Request fakeRequest }{ { EnableCompression: true, Request: fakeRequest{ URI: "/gambol99.htm", ExpectedProxy: true, Headers: map[string]string{ "Accept-Encoding": "gzip, deflate, br", }, ExpectedHeaders: map[string]string{ "Content-Encoding": "gzip", }, }, }, { EnableCompression: true, Request: fakeRequest{ URI: testAdminURI, ExpectedProxy: false, Headers: map[string]string{ "Accept-Encoding": "gzip, deflate, br", }, ExpectedHeaders: map[string]string{ "Content-Encoding": "gzip", }, }, }, { EnableCompression: false, Request: fakeRequest{ URI: "/gambol99.htm", ExpectedProxy: true, Headers: map[string]string{ "Accept-Encoding": "gzip, deflate, br", }, ExpectedNoProxyHeaders: []string{"Content-Encoding"}, }, }, { EnableCompression: false, Request: fakeRequest{ URI: testAdminURI, ExpectedProxy: false, Headers: map[string]string{ "Accept-Encoding": "gzip, deflate, br", }, ExpectedNoProxyHeaders: []string{"Content-Encoding"}, }, }, } for _, c := range requests { cfg := newFakeKeycloakConfig() cfg.Resources = []*Resource{{URL: "/admin*", Methods: allHTTPMethods}} cfg.EnableCompression = c.EnableCompression newFakeProxy(cfg).RunTests(t, []fakeRequest{c.Request}) } }
explode_data.jsonl/14766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 686 }
[ 2830, 3393, 38, 9964, 81411, 1155, 353, 8840, 836, 8, 341, 23555, 82, 1669, 3056, 1235, 341, 197, 197, 11084, 81411, 1807, 198, 197, 73806, 1843, 12418, 1900, 198, 197, 59403, 197, 197, 515, 298, 197, 11084, 81411, 25, 830, 345, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenRSTNoTag(t *testing.T) { rootCmd.DisableAutoGenTag = true defer func() { rootCmd.DisableAutoGenTag = false }() buf := new(bytes.Buffer) if err := GenReST(rootCmd, buf); err != nil { t.Fatal(err) } output := buf.String() unexpected := "Auto generated" checkStringOmits(t, output, unexpected) }
explode_data.jsonl/17806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 9967, 49, 784, 2753, 5668, 1155, 353, 8840, 836, 8, 341, 33698, 15613, 10166, 480, 13253, 9967, 5668, 284, 830, 198, 16867, 2915, 368, 314, 3704, 15613, 10166, 480, 13253, 9967, 5668, 284, 895, 335, 2822, 26398, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListIssueComments(t *testing.T) { ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { t.Errorf("Bad method: %s", r.Method) } if r.URL.Path == "/repos/k8s/kuber/issues/15/comments" { ics := []IssueComment{{ID: 1}} b, err := json.Marshal(ics) if err != nil { t.Fatalf("Didn't expect error: %v", err) } w.Header().Set("Link", fmt.Sprintf(`<blorp>; rel="first", <https://%s/someotherpath>; rel="next"`, r.Host)) fmt.Fprint(w, string(b)) } else if r.URL.Path == "/someotherpath" { ics := []IssueComment{{ID: 2}} b, err := json.Marshal(ics) if err != nil { t.Fatalf("Didn't expect error: %v", err) } fmt.Fprint(w, string(b)) } else { t.Errorf("Bad request path: %s", r.URL.Path) } })) defer ts.Close() c := getClient(ts.URL) ics, err := c.ListIssueComments("k8s", "kuber", 15) if err != nil { t.Errorf("Didn't expect error: %v", err) } else if len(ics) != 2 { t.Errorf("Expected two issues, found %d: %v", len(ics), ics) } else if ics[0].ID != 1 || ics[1].ID != 2 { t.Errorf("Wrong issue IDs: %v", ics) } }
explode_data.jsonl/6260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 525 }
[ 2830, 3393, 852, 42006, 17373, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 13470, 1220, 2836, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20798, 961, 1758, 20798, 1949, 341...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAdvance(t *testing.T) { it := New(FromStrings([]string{"a", "b", "c"})) n, more := it.Advance(2) if n != 1 || !more { t.Errorf("Advance(2) got index: %d and more: %t, but want: 1 and true.", n, more) } n, more = it.Advance(1) if n != 2 || !more { t.Errorf("Advance(1) after Advance(2) got index: %d and more: %t, but want: 2 and true.", n, more) } n, more = it.Advance(1) if n != 2 || more { t.Errorf("Advance(1) over the Iterator size got index: %d and more: %t, but want: 2 and false.", n, more) } // empty iter. it = New(FromStrings([]string{})) n, more = it.Advance(1) if n != 0 || more { t.Errorf("Advance(1) on an empty Iterator got index: %d and more: %t, but want: 0 and false.", n, more) } }
explode_data.jsonl/56146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 95027, 1155, 353, 8840, 836, 8, 341, 23374, 1669, 1532, 7, 3830, 20859, 10556, 917, 4913, 64, 497, 330, 65, 497, 330, 66, 9207, 1171, 9038, 11, 803, 1669, 432, 17865, 85, 681, 7, 17, 340, 743, 308, 961, 220, 16, 1369, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestTLSOverrideServerName(t *testing.T) { expectedServerName := "server.name" c := NewTLS(nil) c.OverrideServerName(expectedServerName) if c.Info().ServerName != expectedServerName { t.Fatalf("c.Info().ServerName = %v, want %v", c.Info().ServerName, expectedServerName) } }
explode_data.jsonl/17025
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 45439, 2177, 5475, 675, 1155, 353, 8840, 836, 8, 341, 42400, 5475, 675, 1669, 330, 4030, 2644, 698, 1444, 1669, 1532, 45439, 27907, 340, 1444, 90008, 5475, 675, 15253, 5475, 675, 340, 743, 272, 20132, 1005, 5475, 675, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTwoManifestsWithSameTwoImages(t *testing.T) { f := newIBDFixture(t, k8s.EnvGKE) defer f.TearDown() m1, m2 := NewManifestsWithSameTwoImages(f) results1, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(m1), store.BuildStateSet{}) require.NoError(t, err) assert.Equal(t, []string{"image:gcr.io/common", "image:gcr.io/image-1", "k8s:dep-1"}, resultKeys(results1)) stateSet := f.resultsToNextState(results1) results2, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(m2), stateSet) require.NoError(t, err) assert.Equal(t, []string{"k8s:dep-2"}, resultKeys(results2)) }
explode_data.jsonl/38282
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 11613, 38495, 16056, 19198, 11613, 14228, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 3256, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 38, 3390, 340, 16867, 282, 836, 682, 4454, 2822, 2109, 16, 11, 296, 17, 1669, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecordSerializeSimple4(t *testing.T) { var r Record r.Write("k2", "a\nb") r.Write("", "no name") r.Write("bu", "gatti ") r.Write("no value", "") r.Write("bu", " gatti") got := testRoundTrip(t, &r) exp := `k2:+3 a b : no name bu: gatti no value:+0 bu: gatti ` assert.Equal(t, exp, got) }
explode_data.jsonl/44795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 6471, 15680, 16374, 19, 1155, 353, 8840, 836, 8, 341, 2405, 435, 13583, 198, 7000, 4073, 445, 74, 17, 497, 330, 64, 1699, 65, 1138, 7000, 4073, 19814, 330, 2152, 829, 1138, 7000, 4073, 445, 19644, 497, 330, 70, 30955, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadConfig(t *testing.T) { factories, err := config.ExampleComponents() assert.NoError(t, err) factory := &Factory{} factories.Extensions[typeStr] = factory cfg, err := config.LoadConfigFile(t, path.Join(".", "testdata", "config.yaml"), factories) require.Nil(t, err) require.NotNil(t, cfg) ext0 := cfg.Extensions["health_check"] assert.Equal(t, factory.CreateDefaultConfig(), ext0) ext1 := cfg.Extensions["health_check/1"] assert.Equal(t, &Config{ ExtensionSettings: configmodels.ExtensionSettings{ TypeVal: "health_check", NameVal: "health_check/1", }, Port: 13, }, ext1) assert.Equal(t, 1, len(cfg.Service.Extensions)) assert.Equal(t, "health_check/1", cfg.Service.Extensions[0]) }
explode_data.jsonl/73545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 5879, 2648, 1155, 353, 8840, 836, 8, 341, 1166, 52893, 11, 1848, 1669, 2193, 5121, 1516, 10443, 741, 6948, 35699, 1155, 11, 1848, 692, 1166, 2919, 1669, 609, 4153, 16094, 1166, 52893, 20526, 21557, 2580, 60, 284, 8633, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_WorkspaceSku_Status_WhenPropertiesConverted_RoundTripsWithoutLoss(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip from WorkspaceSku_Status to WorkspaceSku_Status via AssignPropertiesToWorkspaceSkuStatus & AssignPropertiesFromWorkspaceSkuStatus returns original", prop.ForAll(RunPropertyAssignmentTestForWorkspaceSkuStatus, WorkspaceSkuStatusGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(false, 240, os.Stdout)) }
explode_data.jsonl/43376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 87471, 8746, 50, 12133, 36449, 62, 4498, 7903, 61941, 2568, 795, 21884, 1690, 26040, 39838, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUploadByteSlicePoolConcurrentMultiPartSize(t *testing.T) { var ( pools []*recordedPartPool mtx sync.Mutex ) unswap := swapByteSlicePool(func(sliceSize int64) byteSlicePool { mtx.Lock() defer mtx.Unlock() b := newRecordedPartPool(sliceSize) pools = append(pools, b) return b }) defer unswap() client, _, _ := s3testing.NewUploadLoggingClient(nil) uploader := NewUploader(client, func(u *Uploader) { u.PartSize = 5 * sdkio.MebiByte u.Concurrency = 2 }) var wg sync.WaitGroup for i := 0; i < 2; i++ { wg.Add(2) go func() { defer wg.Done() expected := s3testing.GetTestBytes(int(15 * sdkio.MebiByte)) _, err := uploader.Upload(context.Background(), &s3.PutObjectInput{ Bucket: aws.String("bucket"), Key: aws.String("key"), Body: &testReader{br: bytes.NewReader(expected)}, }) if err != nil { t.Errorf("expected no error, but got %v", err) } }() go func() { defer wg.Done() expected := s3testing.GetTestBytes(int(15 * sdkio.MebiByte)) _, err := uploader.Upload(context.Background(), &s3.PutObjectInput{ Bucket: aws.String("bucket"), Key: aws.String("key"), Body: &testReader{br: bytes.NewReader(expected)}, }, func(u *Uploader) { u.PartSize = 6 * sdkio.MebiByte }) if err != nil { t.Errorf("expected no error, but got %v", err) } }() } wg.Wait() if e, a := 3, len(pools); e != a { t.Errorf("expected %v, got %v", e, a) } for _, p := range pools { if v := atomic.LoadInt64(&p.recordedOutstanding); v != 0 { t.Fatalf("expected zero outsnatding pool parts, got %d", v) } t.Logf("total gets %v, total allocations %v", atomic.LoadUint64(&p.recordedGets), atomic.LoadUint64(&p.recordedAllocs)) } }
explode_data.jsonl/73210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 779 }
[ 2830, 3393, 13844, 7153, 33236, 10551, 1109, 3231, 20358, 5800, 1695, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 3223, 6178, 29838, 8548, 291, 5800, 10551, 198, 197, 2109, 3998, 256, 12811, 99014, 198, 197, 692, 20479, 25741, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_SelectManyColl_int_rune_string(t *testing.T) { type args struct { source Enumerator[int] collectionSelector func(int) Enumerator[rune] resultSelector func(int, rune) string } tests := []struct { name string args args want Enumerator[string] }{ {name: "FlattenWithProjection", args: args{ source: NewOnSlice(3, 5, 20, 15), collectionSelector: func(x int) Enumerator[rune] { return NewOnSlice([]rune(fmt.Sprint(x))...) }, resultSelector: func(x int, c rune) string { return fmt.Sprintf("%d: %s", x, string(c)) }, }, want: NewOnSlice("3: 3", "5: 5", "20: 2", "20: 0", "15: 1", "15: 5"), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := SelectManyColl(tt.args.source, tt.args.collectionSelector, tt.args.resultSelector); !SequenceEqualMust(got, tt.want) { got.Reset() tt.want.Reset() t.Errorf("SelectManyColl() = '%v', want '%v'", String(got), String(tt.want)) } }) } }
explode_data.jsonl/63900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 465 }
[ 2830, 3393, 58073, 8441, 15265, 4042, 1710, 2886, 3904, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 47418, 1797, 76511, 18640, 921, 197, 1444, 1908, 5877, 2915, 1548, 8, 76511, 16131, 2886, 921, 197, 9559, 5877, 257, 2915...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTokenRefreshRequest(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.String() == "/somethingelse" { return } if r.URL.String() != "/token" { t.Errorf("Unexpected token refresh request URL, %v is found.", r.URL) } headerContentType := r.Header.Get("Content-Type") if headerContentType != "application/x-www-form-urlencoded" { t.Errorf("Unexpected Content-Type header, %v is found.", headerContentType) } body, _ := ioutil.ReadAll(r.Body) if string(body) != "audience=audience1&grant_type=client_credentials&scope=scope1+scope2" { t.Errorf("Unexpected refresh token payload, %v is found.", string(body)) } })) defer ts.Close() conf := newConf(ts.URL) c, _ := conf.Client(nil) c.Get(ts.URL + "/somethingelse") }
explode_data.jsonl/44406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 3323, 14567, 1900, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20893, 6431, 368, 621, 3521, 33331, 1503, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRangeDuplCheck(t *testing.T) { valueMap := make(map[uint64]*metapb.Range) valueMap[uint64(1)] = &metapb.Range{StartKey:[]byte("a"), EndKey:[]byte("b"), Id: uint64(1)} valueMap[uint64(2)] = &metapb.Range{StartKey:[]byte("a"), EndKey:[]byte("b"), Id: uint64(2)} valueMap[uint64(3)] = &metapb.Range{StartKey:[]byte("b"), EndKey:[]byte("c"), Id: uint64(3)} valueMap[uint64(4)] = &metapb.Range{StartKey:[]byte("d"), EndKey:[]byte("e"), Id: uint64(4)} var ranges []*metapb.Range rangeDuplMap := make(map[string][]uint64) jointFunc := func(startKey, endKey []byte) string { return fmt.Sprintf("%s%s", startKey, endKey) } var rangeIdSlice []uint64 for _, r := range valueMap { key := jointFunc(r.StartKey, r.EndKey) rangeIdSlice = rangeDuplMap[key] rangeIdSlice = append(rangeIdSlice, r.GetId()) rangeDuplMap[key] = rangeIdSlice } for _, slice := range rangeDuplMap { if len(slice) > 1 { for _, rngId := range slice { rng := valueMap[rngId] ranges = append(ranges, rng) } } } for _, res := range ranges { t.Logf("====%v", res) } }
explode_data.jsonl/25355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 6046, 85713, 75, 3973, 1155, 353, 8840, 836, 8, 341, 16309, 2227, 1669, 1281, 9147, 58, 2496, 21, 19, 8465, 4059, 391, 65, 24783, 340, 16309, 2227, 58, 2496, 21, 19, 7, 16, 7252, 284, 609, 4059, 391, 65, 24783, 90, 347...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMissingOrdererSection(t *testing.T) { blockDest := filepath.Join(tmpDir, "block") config := configtxgentest.Load(genesisconfig.SampleInsecureSoloProfile) config.Orderer = nil assert.Panics(t, func() { doOutputBlock(config, "foo", blockDest) }, "Missing orderer section") }
explode_data.jsonl/14585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 25080, 4431, 261, 9620, 1155, 353, 8840, 836, 8, 341, 47996, 34830, 1669, 26054, 22363, 10368, 6184, 11, 330, 4574, 5130, 25873, 1669, 2193, 3998, 15772, 477, 13969, 36884, 13774, 1676, 76266, 641, 25132, 89299, 8526, 340, 258...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterAdminDescribeBrokerConfig(t *testing.T) { controllerBroker := NewMockBroker(t, 1) defer controllerBroker.Close() configBroker := NewMockBroker(t, 2) defer configBroker.Close() controllerBroker.SetHandlerByMap(map[string]MockResponse{ "MetadataRequest": NewMockMetadataResponse(t). SetController(controllerBroker.BrokerID()). SetBroker(controllerBroker.Addr(), controllerBroker.BrokerID()). SetBroker(configBroker.Addr(), configBroker.BrokerID()), }) configBroker.SetHandlerByMap(map[string]MockResponse{ "MetadataRequest": NewMockMetadataResponse(t). SetController(controllerBroker.BrokerID()). SetBroker(controllerBroker.Addr(), controllerBroker.BrokerID()). SetBroker(configBroker.Addr(), configBroker.BrokerID()), "DescribeConfigsRequest": NewMockDescribeConfigsResponse(t), }) config := NewTestConfig() config.Version = V1_0_0_0 admin, err := NewClusterAdmin( []string{ controllerBroker.Addr(), configBroker.Addr(), }, config) if err != nil { t.Fatal(err) } for _, resourceType := range []ConfigResourceType{BrokerResource, BrokerLoggerResource} { resource := ConfigResource{Name: "2", Type: resourceType} entries, err := admin.DescribeConfig(resource) if err != nil { t.Fatal(err) } if len(entries) <= 0 { t.Fatal(errors.New("no resource present")) } } err = admin.Close() if err != nil { t.Fatal(err) } }
explode_data.jsonl/40795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 532 }
[ 2830, 3393, 28678, 7210, 74785, 65545, 2648, 1155, 353, 8840, 836, 8, 341, 61615, 65545, 1669, 1532, 11571, 65545, 1155, 11, 220, 16, 340, 16867, 6461, 65545, 10421, 741, 25873, 65545, 1669, 1532, 11571, 65545, 1155, 11, 220, 17, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidateLimitRangeForLocalStorage(t *testing.T) { testCases := []struct { name string spec core.LimitRangeSpec }{ { name: "all-fields-valid", spec: core.LimitRangeSpec{ Limits: []core.LimitRangeItem{ { Type: core.LimitTypePod, Max: getLocalStorageResourceList("10000Mi"), Min: getLocalStorageResourceList("100Mi"), MaxLimitRequestRatio: getLocalStorageResourceList(""), }, { Type: core.LimitTypeContainer, Max: getLocalStorageResourceList("10000Mi"), Min: getLocalStorageResourceList("100Mi"), Default: getLocalStorageResourceList("500Mi"), DefaultRequest: getLocalStorageResourceList("200Mi"), MaxLimitRequestRatio: getLocalStorageResourceList(""), }, }, }, }, } for _, testCase := range testCases { limitRange := &core.LimitRange{ObjectMeta: metav1.ObjectMeta{Name: testCase.name, Namespace: "foo"}, Spec: testCase.spec} if errs := ValidateLimitRange(limitRange); len(errs) != 0 { t.Errorf("Case %v, unexpected error: %v", testCase.name, errs) } } }
explode_data.jsonl/25673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 545 }
[ 2830, 3393, 17926, 16527, 6046, 2461, 90464, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 98100, 6200, 1214, 2353, 6046, 8327, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 541, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCloudHypervisorNetRateLimiter(t *testing.T) { assert := assert.New(t) tapPath := "/path/to/tap" validVeth := &VethEndpoint{} validVeth.NetPair.TapInterface.TAPIface.Name = tapPath type args struct { bwMaxRate int64 bwOneTimeBurst int64 opsMaxRate int64 opsOneTimeBurst int64 } //nolint: govet tests := []struct { name string args args expectsRateLimiter bool expectsBwBucketToken bool expectsOpsBucketToken bool }{ // Bandwidth { "Bandwidth | max rate with one time burst", args{ bwMaxRate: int64(1000), bwOneTimeBurst: int64(10000), }, true, // expectsRateLimiter true, // expectsBwBucketToken false, // expectsOpsBucketToken }, { "Bandwidth | max rate without one time burst", args{ bwMaxRate: int64(1000), }, true, // expectsRateLimiter true, // expectsBwBucketToken false, // expectsOpsBucketToken }, { "Bandwidth | no max rate with one time burst", args{ bwOneTimeBurst: int64(10000), }, false, // expectsRateLimiter false, // expectsBwBucketToken false, // expectsOpsBucketToken }, { "Bandwidth | no max rate and no one time burst", args{}, false, // expectsRateLimiter false, // expectsBwBucketToken false, // expectsOpsBucketToken }, // Operations { "Operations | max rate with one time burst", args{ opsMaxRate: int64(1000), opsOneTimeBurst: int64(10000), }, true, // expectsRateLimiter false, // expectsBwBucketToken true, // expectsOpsBucketToken }, { "Operations | max rate without one time burst", args{ opsMaxRate: int64(1000), }, true, // expectsRateLimiter false, // expectsBwBucketToken true, // expectsOpsBucketToken }, { "Operations | no max rate with one time burst", args{ opsOneTimeBurst: int64(10000), }, false, // expectsRateLimiter false, // expectsBwBucketToken false, // expectsOpsBucketToken }, { "Operations | no max rate and no one time burst", args{}, false, // expectsRateLimiter false, // expectsBwBucketToken false, // expectsOpsBucketToken }, // Bandwidth and Operations { "Bandwidth and Operations | max rate with one time burst", args{ bwMaxRate: int64(1000), bwOneTimeBurst: int64(10000), opsMaxRate: int64(1000), opsOneTimeBurst: int64(10000), }, true, // expectsRateLimiter true, // expectsBwBucketToken true, // expectsOpsBucketToken }, { "Bandwidth and Operations | max rate without one time burst", args{ bwMaxRate: int64(1000), opsMaxRate: int64(1000), }, true, // expectsRateLimiter true, // expectsBwBucketToken true, // expectsOpsBucketToken }, { "Bandwidth and Operations | no max rate with one time burst", args{ bwOneTimeBurst: int64(10000), opsOneTimeBurst: int64(10000), }, false, // expectsRateLimiter false, // expectsBwBucketToken false, // expectsOpsBucketToken }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { clhConfig, err := newClhConfig() assert.NoError(err) clhConfig.NetRateLimiterBwMaxRate = tt.args.bwMaxRate clhConfig.NetRateLimiterBwOneTimeBurst = tt.args.bwOneTimeBurst clhConfig.NetRateLimiterOpsMaxRate = tt.args.opsMaxRate clhConfig.NetRateLimiterOpsOneTimeBurst = tt.args.opsOneTimeBurst clh := &cloudHypervisor{} clh.config = clhConfig clh.APIClient = &clhClientMock{} if err := clh.addNet(validVeth); err != nil { t.Errorf("cloudHypervisor.addNet() error = %v", err) } else { netConfig := (*clh.vmconfig.Net)[0] assert.Equal(netConfig.HasRateLimiterConfig(), tt.expectsRateLimiter) if tt.expectsRateLimiter { rateLimiterConfig := netConfig.GetRateLimiterConfig() assert.Equal(rateLimiterConfig.HasBandwidth(), tt.expectsBwBucketToken) assert.Equal(rateLimiterConfig.HasOps(), tt.expectsOpsBucketToken) if tt.expectsBwBucketToken { bwBucketToken := rateLimiterConfig.GetBandwidth() assert.Equal(bwBucketToken.GetSize(), int64(utils.RevertBytes(uint64(tt.args.bwMaxRate/8)))) assert.Equal(bwBucketToken.GetOneTimeBurst(), int64(utils.RevertBytes(uint64(tt.args.bwOneTimeBurst/8)))) } if tt.expectsOpsBucketToken { opsBucketToken := rateLimiterConfig.GetOps() assert.Equal(opsBucketToken.GetSize(), int64(tt.args.opsMaxRate)) assert.Equal(opsBucketToken.GetOneTimeBurst(), int64(tt.args.opsOneTimeBurst)) } } } }) } }
explode_data.jsonl/68497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1984 }
[ 2830, 3393, 16055, 39, 1082, 31396, 6954, 11564, 43, 17700, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 3244, 391, 1820, 1669, 3521, 2343, 32429, 5523, 391, 1837, 56322, 53, 769, 1669, 609, 53, 769, 27380, 16094, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_NewServer(t *testing.T) { ctx := context.Background() server, err := NewServer(ctx, nil) assert.Nil(t, err) assert.NotNil(t, server) mqn := &MockQueryNode{ states: &internalpb.ComponentStates{State: &internalpb.ComponentInfo{StateCode: internalpb.StateCode_Healthy}}, status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, err: nil, strResp: &milvuspb.StringResponse{Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, infoResp: &querypb.GetSegmentInfoResponse{Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, metricResp: &milvuspb.GetMetricsResponse{Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, } server.querynode = mqn t.Run("Run", func(t *testing.T) { err = server.Run() assert.Nil(t, err) }) t.Run("GetComponentStates", func(t *testing.T) { req := &internalpb.GetComponentStatesRequest{} states, err := server.GetComponentStates(ctx, req) assert.Nil(t, err) assert.Equal(t, internalpb.StateCode_Healthy, states.State.StateCode) }) t.Run("GetStatisticsChannel", func(t *testing.T) { req := &internalpb.GetStatisticsChannelRequest{} resp, err := server.GetStatisticsChannel(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) }) t.Run("GetTimeTickChannel", func(t *testing.T) { req := &internalpb.GetTimeTickChannelRequest{} resp, err := server.GetTimeTickChannel(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) }) t.Run("WatchDmChannels", func(t *testing.T) { req := &querypb.WatchDmChannelsRequest{} resp, err := server.WatchDmChannels(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) }) t.Run("LoadSegments", func(t *testing.T) { req := &querypb.LoadSegmentsRequest{} resp, err := server.LoadSegments(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) }) t.Run("ReleaseCollection", func(t *testing.T) { req := &querypb.ReleaseCollectionRequest{} resp, err := server.ReleaseCollection(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) }) t.Run("ReleasePartitions", func(t *testing.T) { req := &querypb.ReleasePartitionsRequest{} resp, err := server.ReleasePartitions(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) }) t.Run("ReleaseSegments", func(t *testing.T) { req := &querypb.ReleaseSegmentsRequest{} resp, err := server.ReleaseSegments(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) }) t.Run("GetSegmentInfo", func(t *testing.T) { req := &querypb.GetSegmentInfoRequest{} resp, err := server.GetSegmentInfo(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) }) t.Run("GetMetrics", func(t *testing.T) { req := &milvuspb.GetMetricsRequest{ Request: "", } resp, err := server.GetMetrics(ctx, req) assert.Nil(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode) }) t.Run("Search", func(t *testing.T) { req := &querypb.SearchRequest{} resp, err := server.Search(ctx, req) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) }) t.Run("Query", func(t *testing.T) { req := &querypb.QueryRequest{} resp, err := server.Query(ctx, req) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) }) t.Run("SyncReplicaSegments", func(t *testing.T) { req := &querypb.SyncReplicaSegmentsRequest{} resp, err := server.SyncReplicaSegments(ctx, req) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.GetErrorCode()) }) err = server.Stop() assert.Nil(t, err) }
explode_data.jsonl/78823
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1532 }
[ 2830, 3393, 39582, 5475, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 41057, 11, 1848, 1669, 1532, 5475, 7502, 11, 2092, 340, 6948, 59678, 1155, 11, 1848, 340, 6948, 93882, 1155, 11, 3538, 692, 2109, 48350, 1669, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDialTimeoutMaxDuration(t *testing.T) { if runtime.GOOS == "openbsd" { testenv.SkipFlaky(t, 15157) } ln, err := newLocalListener("tcp") if err != nil { t.Fatal(err) } defer ln.Close() for i, tt := range dialTimeoutMaxDurationTests { ch := make(chan error) max := time.NewTimer(250 * time.Millisecond) defer max.Stop() go func() { d := Dialer{Timeout: tt.timeout} if tt.delta != 0 { d.Deadline = time.Now().Add(tt.delta) } c, err := d.Dial(ln.Addr().Network(), ln.Addr().String()) if err == nil { c.Close() } ch <- err }() select { case <-max.C: t.Fatalf("#%d: Dial didn't return in an expected time", i) case err := <-ch: if perr := parseDialError(err); perr != nil { t.Error(perr) } if err != nil { t.Errorf("#%d: %v", i, err) } } } }
explode_data.jsonl/57673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 35, 530, 7636, 5974, 12945, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 2508, 51835, 1, 341, 197, 18185, 3160, 57776, 3882, 28100, 1155, 11, 220, 16, 20, 16, 20, 22, 340, 197, 630, 197, 2261, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestChan(t *testing.T) { gopClTest(t, ` a := make(chan int, 10) a <- 3 var b int = <-a x, ok := <-a `, `package main func main() { a := make(chan int, 10) a <- 3 var b int = <-a x, ok := <-a } `) }
explode_data.jsonl/73633
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 46019, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 64, 1669, 1281, 35190, 526, 11, 220, 16, 15, 340, 64, 9119, 220, 18, 198, 947, 293, 526, 284, 9119, 64, 198, 87, 11, 5394, 1669, 9119, 64, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateOperation_Check(t *testing.T) { log.SetOutput(ioutil.Discard) defer log.SetOutput(os.Stdout) m := &generate.Operation{} _, _ = flags.ParseArgs(m, []string{"--name=op1", "--name=op2"}) m.Shared.DumpData = true m.Name = []string{"op1", "op2"} err := m.Execute([]string{}) assert.Error(t, err) }
explode_data.jsonl/64302
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 31115, 8432, 28188, 1155, 353, 8840, 836, 8, 341, 6725, 4202, 5097, 1956, 30158, 909, 47560, 340, 16867, 1487, 4202, 5097, 9638, 83225, 692, 2109, 1669, 609, 19366, 56297, 16094, 197, 6878, 716, 284, 8042, 8937, 4117, 1255, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckUp(t *testing.T) { host, port, ip, closeEcho, err := startEchoServer(t) require.NoError(t, err) defer closeEcho() configMap := common.MapStr{ "hosts": host, "ports": port, "timeout": "1s", "check.receive": "echo123", "check.send": "echo123", } event := testTCPConfigCheck(t, configMap, host, port) testslike.Test( t, lookslike.Strict(lookslike.Compose( hbtest.BaseChecks(ip, "up", "tcp"), hbtest.RespondingTCPChecks(), hbtest.SimpleURLChecks(t, "tcp", host, port), hbtest.SummaryChecks(1, 0), lookslike.MustCompile(map[string]interface{}{ "resolve": map[string]interface{}{ "ip": ip, "rtt.us": isdef.IsDuration, }, "tcp": map[string]interface{}{ "rtt.validate.us": isdef.IsDuration, }, }), )), event.Fields, ) }
explode_data.jsonl/54639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 3973, 2324, 1155, 353, 8840, 836, 8, 341, 63104, 11, 2635, 11, 5997, 11, 3265, 74994, 11, 1848, 1669, 1191, 74994, 5475, 1155, 340, 17957, 35699, 1155, 11, 1848, 340, 16867, 3265, 74994, 2822, 25873, 2227, 1669, 4185, 10104,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_carbonreceiver_EndToEnd(t *testing.T) { host := "localhost" port := int(testutil.GetAvailablePort(t)) tests := []struct { name string configFn func() *Config clientFn func(t *testing.T) *client.Graphite }{ { name: "default_config", configFn: func() *Config { return createDefaultConfig().(*Config) }, clientFn: func(t *testing.T) *client.Graphite { c, err := client.NewGraphite(client.TCP, host, port) require.NoError(t, err) return c }, }, { name: "default_config_udp", configFn: func() *Config { cfg := createDefaultConfig().(*Config) cfg.Transport = "udp" return cfg }, clientFn: func(t *testing.T) *client.Graphite { c, err := client.NewGraphite(client.UDP, host, port) require.NoError(t, err) return c }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cfg := tt.configFn() cfg.Endpoint = fmt.Sprintf("%s:%d", host, port) sink := new(consumertest.MetricsSink) rcv, err := New(zap.NewNop(), *cfg, sink) require.NoError(t, err) r := rcv.(*carbonReceiver) mr := transport.NewMockReporter(1) r.reporter = mr require.NoError(t, r.Start(context.Background(), componenttest.NewNopHost())) runtime.Gosched() defer r.Shutdown(context.Background()) require.Equal(t, componenterror.ErrAlreadyStarted, r.Start(context.Background(), componenttest.NewNopHost())) snd := tt.clientFn(t) ts := time.Now() carbonMetric := client.Metric{ Name: "tst_dbl", Value: 1.23, Timestamp: ts, } err = snd.SendMetric(carbonMetric) require.NoError(t, err) mr.WaitAllOnMetricsProcessedCalls() mdd := sink.AllMetrics() require.Len(t, mdd, 1) ocmd := internaldata.MetricsToOC(mdd[0]) require.Len(t, ocmd, 1) require.Len(t, ocmd[0].Metrics, 1) metric := ocmd[0].Metrics[0] assert.Equal(t, carbonMetric.Name, metric.GetMetricDescriptor().GetName()) tss := metric.GetTimeseries() require.Equal(t, 1, len(tss)) assert.NoError(t, r.Shutdown(context.Background())) assert.Equal(t, componenterror.ErrAlreadyStopped, r.Shutdown(context.Background())) }) } }
explode_data.jsonl/81122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 970 }
[ 2830, 3393, 26616, 5970, 37553, 49953, 66573, 1155, 353, 8840, 836, 8, 341, 63104, 1669, 330, 8301, 698, 52257, 1669, 526, 8623, 1314, 2234, 16485, 7084, 1155, 1171, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 25873, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTwoManifestsWithTwoCommonAncestors(t *testing.T) { f := newIBDFixture(t, k8s.EnvGKE) defer f.TearDown() m1, m2 := NewManifestsWithTwoCommonAncestors(f) results1, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(m1), store.BuildStateSet{}) require.NoError(t, err) assert.Equal(t, []string{"image:gcr.io/base", "image:gcr.io/common", "image:gcr.io/image-1", "k8s:image-1"}, resultKeys(results1)) stateSet := f.resultsToNextState(results1) results2, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(m2), stateSet) require.NoError(t, err) assert.Equal(t, // We did not return image-common because it didn't need a rebuild. []string{"image:gcr.io/image-2", "k8s:image-2"}, resultKeys(results2)) }
explode_data.jsonl/38281
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 11613, 38495, 16056, 11613, 10839, 2082, 15184, 1087, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 3256, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 38, 3390, 340, 16867, 282, 836, 682, 4454, 2822, 2109, 16, 11, 296, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubscribeIsANoopWhenCalledWithNoKeys(t *testing.T) { n := New() defer n.Shutdown() ch := n.Subscribe(context.Background()) // no keys provided if _, ok := <-ch; ok { t.Fatal("should be closed if no keys provided") } }
explode_data.jsonl/8959
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 28573, 3872, 1093, 47306, 4498, 20960, 2354, 2753, 8850, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 1532, 741, 16867, 308, 10849, 18452, 741, 23049, 1669, 308, 82628, 5378, 19047, 2140, 442, 902, 6894, 3897, 198, 743, 8358, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestKeyAddRemove(t *testing.T) { passwordList := []string{ "OnnyiasyatvodsEvVodyawit", "raicneirvOjEfEigonOmLasOd", } env, cleanup := withTestEnvironment(t) // must list keys more than once env.gopts.backendTestHook = nil defer cleanup() testRunInit(t, env.gopts) testRunKeyPasswd(t, "geheim2", env.gopts) env.gopts.password = "geheim2" t.Logf("changed password to %q", env.gopts.password) for _, newPassword := range passwordList { testRunKeyAddNewKey(t, newPassword, env.gopts) t.Logf("added new password %q", newPassword) env.gopts.password = newPassword testRunKeyRemove(t, env.gopts, testRunKeyListOtherIDs(t, env.gopts)) } env.gopts.password = passwordList[len(passwordList)-1] t.Logf("testing access with last password %q\n", env.gopts.password) rtest.OK(t, runKey(env.gopts, []string{"list"})) testRunCheck(t, env.gopts) testRunKeyAddNewKeyUserHost(t, env.gopts) }
explode_data.jsonl/43555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 1592, 2212, 13021, 1155, 353, 8840, 836, 8, 341, 58199, 852, 1669, 3056, 917, 515, 197, 197, 98603, 3834, 3473, 98615, 85, 29697, 34112, 53, 1076, 672, 275, 756, 197, 197, 1, 956, 292, 811, 404, 85, 46, 73, 79686, 36, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCostomerUsecaseImpl_FindCustomerByID(t *testing.T) { t.Run("Shoul return data", func(t *testing.T) { mockAccountRepo := new(mocks.CustomerRepoMock) mockAccountRepo.On("FindCustomerByID", mockAccount.AccountNumber).Return(&mockAccount, nil) customerUsecase := usecase.CreateCustomerUsecase(mockAccountRepo) res, err := customerUsecase.FindCustomerByID(mockAccount.AccountNumber) assert.NotNil(t, res) assert.Equal(t, mockAccount.AccountNumber, res.AccountNumber) assert.Nil(t, err) }) }
explode_data.jsonl/70250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 14940, 25359, 52, 5024, 519, 9673, 95245, 12792, 60572, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 2016, 10965, 470, 821, 497, 2915, 1155, 353, 8840, 836, 8, 1476, 197, 77333, 7365, 25243, 1669, 501, 1255, 25183, 37293,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFragmentationSlowReader(t *testing.T) { // Inbound forward will timeout and cause a warning log. opts := testutils.NewOpts(). AddLogFilter("Unable to forward frame", 1). AddLogFilter("Connection error", 1) testutils.WithTestServer(t, opts, func(t testing.TB, ts *testutils.TestServer) { startReading, handlerComplete := make(chan struct{}), make(chan struct{}) handler := func(ctx context.Context, call *InboundCall) { <-startReading <-ctx.Done() _, err := raw.ReadArgs(call) assert.Error(t, err, "ReadArgs should fail since frames will be dropped due to slow reading") close(handlerComplete) } ts.Register(HandlerFunc(handler), "echo") arg2 := testutils.RandBytes(MaxFramePayloadSize * MexChannelBufferSize) arg3 := testutils.RandBytes(MaxFramePayloadSize * (MexChannelBufferSize + 1)) ctx, cancel := NewContext(testutils.Timeout(30 * time.Millisecond)) defer cancel() _, _, _, err := raw.Call(ctx, ts.Server(), ts.HostPort(), ts.ServiceName(), "echo", arg2, arg3) assert.Error(t, err, "Call should timeout due to slow reader") close(startReading) select { case <-handlerComplete: case <-time.After(testutils.Timeout(70 * time.Millisecond)): t.Errorf("Handler not called, context timeout may be too low") } calls := relaytest.NewMockStats() calls.Add(ts.ServiceName(), ts.ServiceName(), "echo").Failed("timeout").End() ts.AssertRelayStats(calls) }) }
explode_data.jsonl/78190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 495 }
[ 2830, 3393, 9488, 367, 58289, 5062, 1155, 353, 8840, 836, 8, 341, 197, 322, 758, 10891, 4637, 686, 9632, 323, 5240, 264, 9958, 1487, 624, 64734, 1669, 1273, 6031, 7121, 43451, 25829, 197, 37972, 2201, 5632, 445, 17075, 311, 4637, 4034, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImportVendorFailure(t *testing.T) { testenv.MustHaveGoBuild(t) // really must just have source ctxt := Default ctxt.GOPATH = "" p, err := ctxt.Import("x.com/y/z", filepath.Join(ctxt.GOROOT, "src/net/http"), 0) if err == nil { t.Fatalf("found made-up package x.com/y/z in %s", p.Dir) } e := err.Error() if !strings.Contains(e, " (vendor tree)") { t.Fatalf("error on failed import does not mention GOROOT/src/vendor directory:\n%s", e) } }
explode_data.jsonl/523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 11511, 44691, 17507, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 50463, 12116, 10850, 11066, 1155, 8, 442, 2167, 1969, 1101, 614, 2530, 198, 197, 77492, 1669, 7899, 198, 197, 77492, 1224, 3067, 4827, 284, 8389, 3223, 11, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestComponents(t *testing.T) { host := "quay.io" image := "my/repo" tag := "mytag" fqn := fmt.Sprintf("%v/%v:%v", host, image, tag) i, err := ParseRef(fqn) if err != nil { t.Fatal(err) } for _, x := range []struct { test string expected string }{ {i.Domain, host}, {i.Image, image}, {i.Tag, tag}, {i.String(), fqn}, } { if x.test != x.expected { t.Fatalf("Expected %v, but got %v", x.expected, x.test) } } }
explode_data.jsonl/60184
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 10443, 1155, 353, 8840, 836, 8, 341, 63104, 1669, 330, 446, 352, 4245, 698, 31426, 1669, 330, 2408, 10758, 5368, 698, 60439, 1669, 330, 2408, 4578, 698, 1166, 48350, 1669, 8879, 17305, 4430, 85, 12627, 85, 7533, 85, 497, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Server_ListenAndServe(t *testing.T) { t.Skip("Test is unstable, see https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/1426") tests := []struct { name string buildServerFn func(addr string) (Server, error) buildClientFn func(host string, port int) (*client.StatsD, error) }{ { name: "udp", buildServerFn: func(addr string) (Server, error) { return NewUDPServer(addr) }, buildClientFn: func(host string, port int) (*client.StatsD, error) { return client.NewStatsD(client.UDP, host, port) }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { addr := testutil.GetAvailableLocalAddress(t) srv, err := tt.buildServerFn(addr) require.NoError(t, err) require.NotNil(t, srv) host, portStr, err := net.SplitHostPort(addr) require.NoError(t, err) port, err := strconv.Atoi(portStr) require.NoError(t, err) mc := new(consumertest.MetricsSink) p := &protocol.StatsDParser{} require.NoError(t, err) mr := NewMockReporter(1) var transferChan = make(chan string, 10) wgListenAndServe := sync.WaitGroup{} wgListenAndServe.Add(1) go func() { defer wgListenAndServe.Done() assert.Error(t, srv.ListenAndServe(p, mc, mr, transferChan)) }() runtime.Gosched() gc, err := tt.buildClientFn(host, port) require.NoError(t, err) require.NotNil(t, gc) err = gc.SendMetric(client.Metric{ Name: "test.metric", Value: "42", Type: "c", }) assert.NoError(t, err) runtime.Gosched() err = gc.Disconnect() assert.NoError(t, err) err = srv.Close() assert.NoError(t, err) wgListenAndServe.Wait() assert.Equal(t, 1, len(transferChan)) }) } }
explode_data.jsonl/28555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 781 }
[ 2830, 3393, 62320, 27104, 268, 96059, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 2271, 374, 44211, 11, 1490, 3703, 1110, 5204, 905, 37644, 48793, 35958, 52000, 6817, 35958, 19459, 27669, 85444, 38745, 14, 16, 19, 17, 21, 5130, 7821...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetryDo(t *testing.T) { t.Parallel() r := New( WithMaxAttempts(3), ) attempt := 0 assert.NoError(t, r.Do(context.Background(), func(context.Context) error { attempt++ return failFirstAttempts(3)(attempt) })) assert.Equal(t, 3, attempt) }
explode_data.jsonl/82058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 51560, 5404, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 1669, 1532, 1006, 197, 197, 2354, 5974, 81517, 7, 18, 1326, 197, 692, 35447, 4213, 1669, 220, 15, 198, 6948, 35699, 1155, 11, 435, 33596, 5378, 1904...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRowVisibility(t *testing.T) { xlsx, err := prepareTestBook1() if !assert.NoError(t, err) { t.FailNow() } xlsx.NewSheet("Sheet3") assert.NoError(t, xlsx.SetRowVisible("Sheet3", 2, false)) assert.NoError(t, xlsx.SetRowVisible("Sheet3", 2, true)) xlsx.GetRowVisible("Sheet3", 2) xlsx.GetRowVisible("Sheet3", 25) assert.EqualError(t, xlsx.SetRowVisible("Sheet3", 0, true), "invalid row number 0") visible, err := xlsx.GetRowVisible("Sheet3", 0) assert.Equal(t, false, visible) assert.EqualError(t, err, "invalid row number 0") assert.NoError(t, xlsx.SaveAs(filepath.Join("test", "TestRowVisibility.xlsx"))) }
explode_data.jsonl/30489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 3102, 11432, 1155, 353, 8840, 836, 8, 341, 10225, 29017, 11, 1848, 1669, 10549, 2271, 7134, 16, 741, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 3244, 57243, 7039, 741, 197, 532, 10225, 29017, 7121, 10541, 445, 105...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHighTrafficTCP(t *testing.T) { listener := TcpListener{ ServiceAddress: ":8199", AllowedPendingMessages: 100000, MaxTCPConnections: 250, } listener.parser, _ = parsers.NewInfluxParser() acc := &testutil.Accumulator{} // send multiple messages to socket err := listener.Start(acc) require.NoError(t, err) time.Sleep(time.Millisecond * 25) conn, err := net.Dial("tcp", "127.0.0.1:8199") require.NoError(t, err) for i := 0; i < 100000; i++ { fmt.Fprintf(conn, testMsg) } time.Sleep(time.Millisecond) listener.Stop() assert.Equal(t, 100000, len(acc.Metrics)) }
explode_data.jsonl/33000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 11976, 87229, 49896, 1155, 353, 8840, 836, 8, 341, 14440, 798, 1669, 64876, 2743, 515, 197, 91619, 4286, 25, 260, 13022, 23, 16, 24, 24, 756, 197, 197, 35382, 32027, 15820, 25, 220, 16, 15, 15, 15, 15, 15, 345, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestForwardedMetricWithMetadataToProtoBadMetadata(t *testing.T) { var pb metricpb.ForwardedMetricWithMetadata tm := ForwardedMetricWithMetadata{ ForwardedMetric: testForwardedMetric1, ForwardMetadata: testBadForwardMetadata, } require.Error(t, tm.ToProto(&pb)) }
explode_data.jsonl/75081
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 25925, 291, 54310, 2354, 14610, 1249, 31549, 17082, 14610, 1155, 353, 8840, 836, 8, 341, 2405, 17310, 18266, 16650, 26676, 1606, 291, 54310, 2354, 14610, 198, 3244, 76, 1669, 22164, 291, 54310, 2354, 14610, 515, 197, 197, 2592...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseTimeTomorrow(t *testing.T) { tm, err := NewTimeFromString("02:12>") require.Nil(t, err) should := Ɀ_TimeTomorrow_(2, 12) assert.Equal(t, should, tm) assert.Equal(t, false, tm.IsToday()) assert.Equal(t, false, tm.IsYesterday()) assert.Equal(t, true, tm.IsTomorrow()) }
explode_data.jsonl/4305
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 14463, 1462, 90173, 1155, 353, 8840, 836, 8, 341, 3244, 76, 11, 1848, 1669, 1532, 1462, 44491, 445, 15, 17, 25, 16, 17, 42363, 17957, 59678, 1155, 11, 1848, 340, 197, 5445, 1669, 2858, 109, 123, 39080, 90173, 8361, 17, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigServiceV2UpdateSink(t *testing.T) { var name string = "name3373707" var destination string = "destination-1429847026" var filter string = "filter-1274492040" var writerIdentity string = "writerIdentity775638794" var expectedResponse = &loggingpb.LogSink{ Name: name, Destination: destination, Filter: filter, WriterIdentity: writerIdentity, } mockConfig.err = nil mockConfig.reqs = nil mockConfig.resps = append(mockConfig.resps[:0], expectedResponse) var formattedSinkName string = ConfigSinkPath("[PROJECT]", "[SINK]") var sink *loggingpb.LogSink = &loggingpb.LogSink{} var request = &loggingpb.UpdateSinkRequest{ SinkName: formattedSinkName, Sink: sink, } c, err := NewConfigClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.UpdateSink(context.Background(), request) if err != nil { t.Fatal(err) } if want, got := request, mockConfig.reqs[0]; !proto.Equal(want, got) { t.Errorf("wrong request %q, want %q", got, want) } if want, got := expectedResponse, resp; !proto.Equal(want, got) { t.Errorf("wrong response %q, want %q)", got, want) } }
explode_data.jsonl/77773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 2648, 1860, 53, 17, 4289, 45094, 1155, 353, 8840, 836, 8, 341, 2405, 829, 914, 284, 330, 606, 18, 18, 22, 18, 22, 15, 22, 698, 2405, 9106, 914, 284, 330, 17997, 12, 16, 19, 17, 24, 23, 19, 22, 15, 17, 21, 698, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTenancyInjector(t *testing.T) { db, mock, err := sqlmock.New() require.NoError(t, err) defer func() { err := mock.ExpectationsWereMet() require.NoError(t, err) }() mock.ExpectBegin() tenants := []string{"foo", "bar", "baz"} result := sqlmock.NewResult(0, 0) for _, tenant := range tenants { mock.ExpectExec( fmt.Sprintf("USE `tenant_%s`", tenant), ). WillReturnResult(result) } mock.ExpectCommit() srv := testserver.New() srv.AddTransport(transport.POST{}) srv.Use(gqlutil.DBInjector{DB: db}) srv.Use(graphql.TenancyInjector{ Tenancy: viewer.NewFixedTenancy(&ent.Client{}), Dialect: dialect.MySQL, }) srv.AroundResponses(func(ctx context.Context, _ gql.ResponseHandler) *gql.Response { type tenancyReleaser interface { viewer.Tenancy Release() } tenancy, ok := viewer.TenancyFromContext(ctx).(tenancyReleaser) require.True(t, ok) for _, tenant := range tenants { _, err := tenancy.ClientFor(ctx, tenant) require.NoError(t, err) tenancy.Release() } return &gql.Response{Data: []byte(`{"name":"test"}`)} }) c := client.New(srv) err = c.Post(`mutation { name }`, &struct{ Name string }{}) require.NoError(t, err) }
explode_data.jsonl/58903
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 32687, 6572, 61836, 1155, 353, 8840, 836, 8, 341, 20939, 11, 7860, 11, 1848, 1669, 5704, 16712, 7121, 741, 17957, 35699, 1155, 11, 1848, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 7860, 81893, 804, 88453, 34673, 741, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSkipRules(t *testing.T) { testRuleAWS1 := "AWS.S3Bucket.DS.High.1041" testRuleAWS2 := "AWS.S3Bucket.DS.High.1042" testRuleAWSwithHyphen := "AC-AWS-NS-IN-M-1172" testRuleAzure := "accurics.azure.NS.147" testRuleKubernetesWithHyphen := "AC-K8-DS-PO-M-0143" table := []struct { name string input string expected []output.SkipRule }{ { name: "no rules", input: "no rules here", // expected would be an empty slice of output.SkipRule }, { name: "rule id with no comment, aws", input: "#ts:skip=AWS.S3Bucket.DS.High.1041\n", expected: []output.SkipRule{ {Rule: testRuleAWS1}, }, }, { name: "rule id with no comment, aws, with '-'", input: "#ts:skip=AC-AWS-NS-IN-M-1172\n", expected: []output.SkipRule{ {Rule: testRuleAWSwithHyphen}, }, }, { // gcp, kubernetes, github rules are of same format name: "rule id with no comment, azure", input: "#ts:skip=accurics.azure.NS.147\n", expected: []output.SkipRule{ {Rule: testRuleAzure}, }, }, { name: "rule id with no comment, kubernetes with '-'", input: "#ts:skip=AC-K8-DS-PO-M-0143\n", expected: []output.SkipRule{ {Rule: testRuleKubernetesWithHyphen}, }, }, { name: "rule id with comment", input: "#ts:skip=AWS.S3Bucket.DS.High.1041 This rule should be skipped.\n", expected: []output.SkipRule{ { Rule: testRuleAWS1, Comment: "This rule should be skipped.", }, }, }, { // should match only one rule, we support single rule and comment in one line // everything after the first group match will be considered a comment name: "multiple comma separated no space, with comments", input: "#ts:skip=AWS.S3Bucket.DS.High.1041 some reason to skip. , AWS.S3Bucket.DS.High.1042 should_skip_the_rule.\n", expected: []output.SkipRule{ { Rule: testRuleAWS1, Comment: "some reason to skip. , AWS.S3Bucket.DS.High.1042 should_skip_the_rule.", }, }, }, { name: "rule and comment with random space characters", input: "#ts:skip= AWS.S3Bucket.DS.High.1041 reason_to skip. the rule\n", expected: []output.SkipRule{ { Rule: testRuleAWS1, Comment: "reason_to skip. the rule", }, }, }, { name: "sample resource config", input: `{ #ts:skip=AWS.S3Bucket.DS.High.1041 skip the rule. region = var.region #ts:skip=AWS.S3Bucket.DS.High.1042 AWS.S3Bucket.DS.High.1043 bucket = local.bucket_name #ts:skip=AWS.S3Bucket.DS.High.1044 resource skipped for this rule. force_destroy = true #ts:skip= AWS.S3Bucket.DS.High.1046 acl = "public-read" }`, expected: []output.SkipRule{ { Rule: testRuleAWS1, Comment: "skip the rule.", }, { Rule: testRuleAWS2, Comment: "AWS.S3Bucket.DS.High.1043", }, { Rule: "AWS.S3Bucket.DS.High.1044", Comment: "resource skipped for this rule.", }, { Rule: "AWS.S3Bucket.DS.High.1046", }, }, }, } for _, tt := range table { t.Run(tt.name, func(t *testing.T) { actual := GetSkipRules(tt.input) if !reflect.DeepEqual(actual, tt.expected) { t.Errorf("rule ids got: '%v', want: '%v'", actual, tt.expected) } }) } }
explode_data.jsonl/51011
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1546 }
[ 2830, 3393, 1949, 35134, 26008, 1155, 353, 8840, 836, 8, 341, 18185, 11337, 36136, 16, 1669, 330, 36136, 808, 18, 36018, 909, 50, 46678, 13, 16, 15, 19, 16, 698, 18185, 11337, 36136, 17, 1669, 330, 36136, 808, 18, 36018, 909, 50, 46...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestListProjectPipelines(t *testing.T) { mux, server, client := setup() defer teardown(server) mux.HandleFunc("/projects/1/pipelines", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") fmt.Fprint(w, `[{"id":1},{"id":2}]`) }) piplines, _, err := client.Pipelines.ListProjectPipelines(1) if err != nil { t.Errorf("Pipelines.ListProjectPipelines returned error: %v", err) } want := []*Pipeline{{ID: 1}, {ID: 2}} if !reflect.DeepEqual(want, piplines) { t.Errorf("Pipelines.ListProjectPipelines returned %+v, want %+v", piplines, want) } }
explode_data.jsonl/5213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 852, 7849, 47, 93997, 1155, 353, 8840, 836, 8, 341, 2109, 2200, 11, 3538, 11, 2943, 1669, 6505, 741, 16867, 49304, 21421, 692, 2109, 2200, 63623, 4283, 17161, 14, 16, 4322, 93997, 497, 2915, 3622, 1758, 37508, 11, 435, 353...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFSNode(t *testing.T) { fsn := NewFSNode(TFile) for i := 0; i < 16; i++ { fsn.AddBlockSize(100) } fsn.RemoveBlockSize(15) fsn.SetData(make([]byte, 128)) b, err := fsn.GetBytes() if err != nil { t.Fatal(err) } pbn := new(pb.Data) err = proto.Unmarshal(b, pbn) if err != nil { t.Fatal(err) } ds, err := DataSize(b) if err != nil { t.Fatal(err) } nKids := fsn.NumChildren() if nKids != 15 { t.Fatal("Wrong number of child nodes") } if ds != (100*15)+128 { t.Fatal("Datasize calculations incorrect!") } nfsn, err := FSNodeFromBytes(b) if err != nil { t.Fatal(err) } if nfsn.FileSize() != (100*15)+128 { t.Fatal("fsNode FileSize calculations incorrect") } }
explode_data.jsonl/42602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 8485, 1955, 1155, 353, 8840, 836, 8, 341, 1166, 9613, 1669, 1532, 8485, 1955, 4140, 1703, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 21, 26, 600, 1027, 341, 197, 1166, 9613, 1904, 89932, 7, 16, 15, 15, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestDeepEqual(t *testing.T) { for _, test := range deepEqualTests { if test.b == (self{}) { test.b = test.a } if r := DeepEqual(test.a, test.b); r != test.eq { t.Errorf("DeepEqual(%v, %v) = %v, want %v", test.a, test.b, r, test.eq) } } }
explode_data.jsonl/29537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 33464, 2993, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 5538, 2993, 18200, 341, 197, 743, 1273, 948, 621, 320, 721, 28875, 341, 298, 18185, 948, 284, 1273, 5849, 198, 197, 197, 532, 197, 743, 435, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Validate_DockerConfig_Storage(t *testing.T) { for _, name := range []string{"aufs", "zfs", "overlay"} { config := &kops.DockerConfig{Storage: &name} errs := validateDockerConfig(config, field.NewPath("docker")) if len(errs) != 0 { t.Fatalf("Unexpected errors validating DockerConfig %q", errs) } } for _, name := range []string{"overlayfs", "", "au"} { config := &kops.DockerConfig{Storage: &name} errs := validateDockerConfig(config, field.NewPath("docker")) if len(errs) != 1 { t.Fatalf("Expected errors validating DockerConfig %+v", config) } if errs[0].Field != "docker.storage" || errs[0].Type != field.ErrorTypeNotSupported { t.Fatalf("Not the expected error validating DockerConfig %q", errs) } } }
explode_data.jsonl/61617
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 62, 17926, 1557, 13659, 2648, 62, 5793, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 829, 1669, 2088, 3056, 917, 4913, 50528, 82, 497, 330, 89, 3848, 497, 330, 21118, 9207, 341, 197, 25873, 1669, 609, 74, 3721, 909, 13659, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRangesIntersection(t *testing.T) { for _, test := range []struct { rs Ranges r Range want Ranges }{ { rs: Ranges(nil), r: Range{}, want: Ranges(nil), }, { rs: Ranges{}, r: Range{}, want: Ranges{}, }, { rs: Ranges{}, r: Range{Pos: 1, Size: 0}, want: Ranges{}, }, { rs: Ranges{}, r: Range{Pos: 1, Size: 1}, want: Ranges{}, }, { rs: Ranges{{Pos: 1, Size: 5}}, r: Range{Pos: 1, Size: 3}, want: Ranges{ {Pos: 1, Size: 3}, }, }, { rs: Ranges{{Pos: 1, Size: 5}}, r: Range{Pos: 1, Size: 10}, want: Ranges{ {Pos: 1, Size: 5}, }, }, { rs: Ranges{{Pos: 1, Size: 5}}, r: Range{Pos: 3, Size: 10}, want: Ranges{ {Pos: 3, Size: 3}, }, }, { rs: Ranges{{Pos: 1, Size: 5}}, r: Range{Pos: 6, Size: 10}, want: Ranges(nil), }, { rs: Ranges{ {Pos: 1, Size: 2}, {Pos: 11, Size: 2}, {Pos: 21, Size: 2}, {Pos: 31, Size: 2}, {Pos: 41, Size: 2}, }, r: Range{Pos: 12, Size: 20}, want: Ranges{ {Pos: 12, Size: 1}, {Pos: 21, Size: 2}, {Pos: 31, Size: 1}, }, }, } { got := test.rs.Intersection(test.r) what := fmt.Sprintf("test ra=%v, r=%v", test.rs, test.r) assert.Equal(t, test.want, got, what) checkRanges(t, test.rs, what) checkRanges(t, got, what) } }
explode_data.jsonl/2644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 769 }
[ 2830, 3393, 74902, 72927, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 41231, 256, 431, 5520, 198, 197, 7000, 262, 16437, 198, 197, 50780, 431, 5520, 198, 197, 59403, 197, 197, 515, 298, 41231, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConsulRunnerRunConsulClientError(t *testing.T) { ctrl := gomock.NewController(assert.Tracing(t)) defer ctrl.Finish() mockUpdater := updater.NewMockUpdater(ctrl) mockUpdaterFromFlags := rotor.NewMockUpdaterFromFlags(ctrl) mockUpdaterFromFlags.EXPECT().Validate().Return(nil) mockUpdaterFromFlags.EXPECT().Make().Return(mockUpdater, nil) mockGetClient := newMockGetClientInterface(ctrl) mockGetClient.EXPECT().getClient().Return(nil, errors.New("consul client error")) cmd := Cmd(mockUpdaterFromFlags) cmd.Flags.Parse([]string{}) r := cmd.Runner.(*consulRunner) r.consulSettings.endpoint = mockGetClient r.consulSettings.consulDC = "dc" cmdErr := r.Run(cmd, nil) assert.StringContains(t, cmdErr.Message, "consul client error") }
explode_data.jsonl/71907
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 15220, 360, 19486, 6727, 15220, 360, 2959, 1454, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 75846, 8240, 4527, 1155, 1171, 16867, 23743, 991, 18176, 2822, 77333, 79854, 1669, 68249, 7121, 11571, 798...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsAllowedRemediation(t *testing.T) { // short circuit if ever more than 2 out of 5 go unhealthy maxUnhealthyInt := intstr.FromInt(2) maxUnhealthyNegative := intstr.FromInt(-2) maxUnhealthyString := intstr.FromString("40%") maxUnhealthyIntInString := intstr.FromString("2") maxUnhealthyMixedString := intstr.FromString("foo%50") testCases := []struct { testCase string mhc *mapiv1beta1.MachineHealthCheck expected bool }{ { testCase: "not above maxUnhealthy", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyInt, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(3), }, }, expected: true, }, { testCase: "above maxUnhealthy", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyInt, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(2), }, }, expected: false, }, { testCase: "maxUnhealthy is negative", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyNegative, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(2), }, }, expected: false, }, { testCase: "not above maxUnhealthy (percentange)", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(3), }, }, expected: true, }, { testCase: "above maxUnhealthy (percentange)", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(2), }, }, expected: false, }, { testCase: "not above maxUnhealthy (int in string)", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyIntInString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(3), }, }, expected: true, }, { testCase: "above maxUnhealthy (int in string)", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyIntInString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: IntPtr(5), CurrentHealthy: IntPtr(2), }, }, expected: false, }, { testCase: "nil values", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: nil, CurrentHealthy: nil, }, }, expected: true, }, { testCase: "invalid string value", mhc: &mapiv1beta1.MachineHealthCheck{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "MachineHealthCheck", }, Spec: mapiv1beta1.MachineHealthCheckSpec{ Selector: metav1.LabelSelector{}, MaxUnhealthy: &maxUnhealthyMixedString, }, Status: mapiv1beta1.MachineHealthCheckStatus{ ExpectedMachines: nil, CurrentHealthy: nil, }, }, expected: false, }, } for _, tc := range testCases { t.Run(tc.testCase, func(t *testing.T) { if got := isAllowedRemediation(tc.mhc); got != tc.expected { t.Errorf("Case: %v. Got: %v, expected: %v", tc.testCase, got, tc.expected) } }) } }
explode_data.jsonl/31010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2567 }
[ 2830, 3393, 3872, 35382, 6590, 291, 7101, 1155, 353, 8840, 836, 8, 341, 197, 322, 2805, 16224, 421, 3512, 803, 1091, 220, 17, 700, 315, 220, 20, 728, 52708, 198, 22543, 1806, 37028, 1072, 1669, 526, 495, 11439, 1072, 7, 17, 340, 225...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSimpleMetaFactoryInterpret(t *testing.T) { factory := SimpleMetaFactory{} version, kind, err := factory.Interpret([]byte(`{"apiVersion":"1","kind":"object"}`)) if err != nil { t.Fatalf("unexpected error: %v", err) } if version != "1" || kind != "object" { t.Errorf("unexpected interpret: %s %s", version, kind) } // no kind or version version, kind, err = factory.Interpret([]byte(`{}`)) if err != nil { t.Fatalf("unexpected error: %v", err) } if version != "" || kind != "" { t.Errorf("unexpected interpret: %s %s", version, kind) } // unparsable version, kind, err = factory.Interpret([]byte(`{`)) if err == nil { t.Errorf("unexpected non-error") } }
explode_data.jsonl/34478
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 16374, 12175, 4153, 3306, 8043, 1155, 353, 8840, 836, 8, 341, 1166, 2919, 1669, 8993, 12175, 4153, 16094, 74954, 11, 3093, 11, 1848, 1669, 8633, 15405, 8043, 10556, 3782, 5809, 4913, 2068, 5637, 3252, 16, 2198, 15314, 3252, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestMats_IsAllowed_SimpleDeny(t *testing.T) { URI_IS_ALLOWD := svcs.PolicyAtzPath + "is-allowed" data := &[]testutil.TestCase{ { Name: "DenyUser", Executer: testutil.NewRestTestExecuter, Method: testutil.METHOD_IS_ALLOWED, Data: &testutil.RestTestData{ URI: URI_IS_ALLOWD, InputBody: &JsonContext{ Subject: &JsonSubject{Principals: []*JsonPrincipal{{Type: adsapi.PRINCIPAL_TYPE_USER, Name: "user1"}, {Type: adsapi.PRINCIPAL_TYPE_GROUP, Name: "group1"}}}, ServiceName: SERVICE_SIMPLE, Resource: "res_deny", Action: "get", }, ExpectedStatus: 200, OutputBody: &IsAllowedResponse{}, ExpectedBody: &IsAllowedResponse{Allowed: false, Reason: int32(adsapi.DENY_POLICY_FOUND)}, }, }, { Name: "DenyGroup", Executer: testutil.NewRestTestExecuter, Method: testutil.METHOD_IS_ALLOWED, Data: &testutil.RestTestData{ URI: URI_IS_ALLOWD, InputBody: &JsonContext{ Subject: &JsonSubject{Principals: []*JsonPrincipal{{Type: adsapi.PRINCIPAL_TYPE_USER, Name: "user1"}, {Type: adsapi.PRINCIPAL_TYPE_GROUP, Name: "group1"}}}, ServiceName: SERVICE_SIMPLE, Resource: "res_deny", Action: "get", }, ExpectedStatus: 200, OutputBody: &IsAllowedResponse{}, ExpectedBody: &IsAllowedResponse{Allowed: false, Reason: int32(adsapi.DENY_POLICY_FOUND)}, }, }, { Name: "GrantRoleToUser_RoleDenied", Executer: testutil.NewRestTestExecuter, Method: testutil.METHOD_IS_ALLOWED, Data: &testutil.RestTestData{ URI: URI_IS_ALLOWD, InputBody: &JsonContext{ Subject: &JsonSubject{Principals: []*JsonPrincipal{{Type: adsapi.PRINCIPAL_TYPE_USER, Name: "userWithRole1"}}}, ServiceName: SERVICE_SIMPLE, Resource: "res_deny", Action: "get", }, ExpectedStatus: 200, OutputBody: &IsAllowedResponse{}, ExpectedBody: &IsAllowedResponse{Allowed: false, Reason: int32(adsapi.DENY_POLICY_FOUND)}, }, }, { Name: "DenyRoleToGroup_RoleAllowed", Executer: testutil.NewRestTestExecuter, Method: testutil.METHOD_IS_ALLOWED, Data: &testutil.RestTestData{ URI: URI_IS_ALLOWD, InputBody: &JsonContext{ Subject: &JsonSubject{Principals: []*JsonPrincipal{{Type: adsapi.PRINCIPAL_TYPE_USER, Name: "groupWithRole2"}, {Type: adsapi.PRINCIPAL_TYPE_GROUP, Name: "group1"}}}, ServiceName: SERVICE_SIMPLE, Resource: "res_deny", Action: "get", }, ExpectedStatus: 200, OutputBody: &IsAllowedResponse{}, ExpectedBody: &IsAllowedResponse{Allowed: false, Reason: int32(adsapi.DENY_POLICY_FOUND)}, }, }, } testutil.RunTestCases(t, data, nil) }
explode_data.jsonl/16522
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1270 }
[ 2830, 3393, 44, 1862, 31879, 35382, 1098, 6456, 23619, 88, 1155, 353, 8840, 836, 8, 341, 197, 10301, 12766, 44324, 35, 1669, 13559, 4837, 1069, 8018, 1655, 89, 1820, 488, 330, 285, 12, 20967, 698, 8924, 1669, 609, 1294, 1944, 1314, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegrationsEmptyAwsCredentials(t *testing.T) { var awsData api.AwsIntegrationData credentials := awsData.GetCredentials() externalID := credentials.ExternalID roleArn := credentials.RoleArn assert.Empty(t, externalID) assert.Empty(t, roleArn) }
explode_data.jsonl/81110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 1072, 14412, 804, 3522, 47359, 27025, 1155, 353, 8840, 836, 8, 341, 2405, 31521, 1043, 6330, 875, 8915, 52464, 1043, 198, 197, 32353, 1669, 31521, 1043, 2234, 27025, 2822, 197, 20921, 915, 1669, 16387, 5121, 15342, 915, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCaptivePrepareRange(t *testing.T) { metaChan := make(chan metaResult, 100) // Core will actually start with the last checkpoint before the from ledger // and then rewind to the `from` ledger. for i := 64; i <= 100; i++ { meta := buildLedgerCloseMeta(testLedgerHeader{sequence: uint32(i)}) metaChan <- metaResult{ LedgerCloseMeta: &meta, } } ctx := context.Background() mockRunner := &stellarCoreRunnerMock{} mockRunner.On("catchup", uint32(100), uint32(200)).Return(nil).Once() mockRunner.On("getMetaPipe").Return((<-chan metaResult)(metaChan)) mockRunner.On("context").Return(ctx) mockArchive := &historyarchive.MockArchive{} mockArchive. On("GetRootHAS"). Return(historyarchive.HistoryArchiveState{ CurrentLedger: uint32(200), }, nil) cancelCalled := false captiveBackend := CaptiveStellarCore{ archive: mockArchive, stellarCoreRunnerFactory: func(_ stellarCoreRunnerMode) (stellarCoreRunnerInterface, error) { return mockRunner, nil }, checkpointManager: historyarchive.NewCheckpointManager(64), cancel: context.CancelFunc(func() { cancelCalled = true }), } err := captiveBackend.PrepareRange(ctx, BoundedRange(100, 200)) assert.NoError(t, err) mockRunner.On("close").Return(nil).Once() err = captiveBackend.Close() assert.NoError(t, err) assert.True(t, cancelCalled) mockRunner.AssertExpectations(t) mockArchive.AssertExpectations(t) }
explode_data.jsonl/7311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 508 }
[ 2830, 3393, 34, 27781, 50590, 6046, 1155, 353, 8840, 836, 8, 341, 84004, 46019, 1669, 1281, 35190, 8823, 2077, 11, 220, 16, 15, 15, 692, 197, 322, 9518, 686, 3520, 1191, 448, 279, 1537, 29295, 1573, 279, 504, 46933, 198, 197, 322, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestString(t *testing.T) { items := []int{23, 24, 2, 5, 10} interfaceItems := make([]interface{}, len(items)) for i, v := range items { interfaceItems[i] = v } a := New(interfaceItems) str := a.String() expectedValue := "Array [23 24 2 5 10]" if str != expectedValue { t.Log("Array should return string values of its contents") t.Log("Expected", expectedValue, "\n Got", str) t.Fail() } }
explode_data.jsonl/47089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 703, 1155, 353, 8840, 836, 8, 341, 46413, 1669, 3056, 396, 90, 17, 18, 11, 220, 17, 19, 11, 220, 17, 11, 220, 20, 11, 220, 16, 15, 532, 58915, 1564, 4353, 1669, 1281, 10556, 4970, 22655, 2422, 24337, 4390, 2023, 600, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestProducerConsumerModel(t *testing.T) { const numOperations = 10 var wg sync.WaitGroup wg.Add(2) resultCount := 0 var resultMux sync.Mutex var producer = func(queue *chan TransferOperation) { defer wg.Done() for i := 0; i < numOperations; i++ { wg.Add(1) var transferOf = testTransferBuilder(t, i, &resultCount, &resultMux) t.Logf("Producer: '%d'\n", i) *queue <- transferOf } close(*queue) } queue := make(chan TransferOperation, 5) doneNotifier := NewConditionalBool() consumer := newConsumer(&queue, &wg, 5, doneNotifier) go producer(&queue) go consumer.run() wg.Wait() ds3Testing.AssertInt(t, "Executed Transfer Operations", numOperations, resultCount) ds3Testing.AssertBool(t, "received done notification", true, doneNotifier.Done) }
explode_data.jsonl/31662
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 45008, 29968, 1712, 1155, 353, 8840, 836, 8, 341, 262, 733, 1629, 35120, 284, 220, 16, 15, 198, 262, 762, 63581, 12811, 28384, 2808, 198, 262, 63581, 1904, 7, 17, 692, 262, 1102, 2507, 1669, 220, 15, 198, 262, 762, 1102,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSwarmGetFrontendRule(t *testing.T) { testCases := []struct { service swarm.Service expected string networks map[string]*docker.NetworkResource }{ { service: swarmService(serviceName("foo")), expected: "Host:foo.docker.localhost", networks: map[string]*docker.NetworkResource{}, }, { service: swarmService(serviceName("foo"), serviceLabels(map[string]string{ label.TraefikDomain: "traefik.localhost", })), expected: "Host:foo.traefik.localhost", networks: map[string]*docker.NetworkResource{}, }, { service: swarmService(serviceLabels(map[string]string{ label.TraefikFrontendRule: "Host:foo.bar", })), expected: "Host:foo.bar", networks: map[string]*docker.NetworkResource{}, }, { service: swarmService(serviceLabels(map[string]string{ label.TraefikFrontendRule: "Path:/test", })), expected: "Path:/test", networks: map[string]*docker.NetworkResource{}, }, } for serviceID, test := range testCases { test := test t.Run(strconv.Itoa(serviceID), func(t *testing.T) { t.Parallel() dData := parseService(test.service, test.networks) segmentProperties := label.ExtractTraefikLabels(dData.Labels) provider := &Provider{ Domain: "docker.localhost", SwarmMode: true, } actual := provider.getFrontendRule(dData, segmentProperties[""]) assert.Equal(t, test.expected, actual) }) } }
explode_data.jsonl/1393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 566 }
[ 2830, 3393, 13218, 2178, 1949, 23395, 408, 11337, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 52934, 220, 60841, 13860, 198, 197, 42400, 914, 198, 197, 9038, 2349, 82, 2415, 14032, 8465, 28648, 30149, 4783, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInnodbLockWaitTimeoutWaitStart(t *testing.T) { // prepare work store, clean := createMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") defer tk.MustExec("drop table if exists tk") tk.MustExec("drop table if exists tk") tk.MustExec("create table tk (c1 int primary key, c2 int)") tk.MustExec("insert into tk values(1,1),(2,2),(3,3),(4,4),(5,5)") tk.MustExec("set global innodb_lock_wait_timeout = 1") // raise pessimistic transaction in tk2 and trigger failpoint returning ErrWriteConflict tk2 := testkit.NewTestKit(t, store) tk2.MustExec("use test") tk3 := testkit.NewTestKit(t, store) tk3.MustExec("use test") tk2.MustQuery(`show variables like "innodb_lock_wait_timeout"`).Check(testkit.Rows("innodb_lock_wait_timeout 1")) // tk3 gets the pessimistic lock tk3.MustExec("begin pessimistic") tk3.MustQuery("select * from tk where c1 = 1 for update") tk2.MustExec("begin pessimistic") done := make(chan error) require.NoError(t, failpoint.Enable("tikvclient/PessimisticLockErrWriteConflict", "return")) var duration time.Duration go func() { var err error start := time.Now() defer func() { duration = time.Since(start) done <- err }() _, err = tk2.Exec("select * from tk where c1 = 1 for update") }() time.Sleep(time.Millisecond * 100) require.NoError(t, failpoint.Disable("tikvclient/PessimisticLockErrWriteConflict")) waitErr := <-done require.Error(t, waitErr) require.Equal(t, storeerr.ErrLockWaitTimeout.Error(), waitErr.Error()) require.GreaterOrEqual(t, duration, 1000*time.Millisecond) require.LessOrEqual(t, duration, 3000*time.Millisecond) tk2.MustExec("rollback") tk3.MustExec("commit") }
explode_data.jsonl/12469
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 639 }
[ 2830, 3393, 641, 77, 16853, 11989, 14190, 7636, 14190, 3479, 1155, 353, 8840, 836, 8, 341, 197, 322, 10549, 975, 198, 57279, 11, 4240, 1669, 1855, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAutocompleteUsersInChannel(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() teamId := th.BasicTeam.Id channelId := th.BasicChannel.Id username := th.BasicUser.Username newUser := th.CreateUser() tt := []struct { Name string TeamId string ChannelId string Username string ExpectedResults int MoreThan bool ShouldFail bool }{ { "Autocomplete in channel for specific username", teamId, channelId, username, 1, false, false, }, { "Search for not valid username", teamId, channelId, "amazonses", 0, false, false, }, { "Search for all users", teamId, channelId, "", 2, true, false, }, { "Fail when the teamId is not provided", "", channelId, "", 2, true, true, }, } for _, tc := range tt { t.Run(tc.Name, func(t *testing.T) { th.LoginBasic() rusers, _, err := th.Client.AutocompleteUsersInChannel(tc.TeamId, tc.ChannelId, tc.Username, model.UserSearchDefaultLimit, "") if tc.ShouldFail { CheckErrorID(t, err, "api.user.autocomplete_users.missing_team_id.app_error") } else { require.NoError(t, err) if tc.MoreThan { assert.True(t, len(rusers.Users) >= tc.ExpectedResults) } else { assert.Len(t, rusers.Users, tc.ExpectedResults) } } th.Client.Logout() _, resp, err := th.Client.AutocompleteUsersInChannel(tc.TeamId, tc.ChannelId, tc.Username, model.UserSearchDefaultLimit, "") require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.Client.Login(newUser.Email, newUser.Password) _, resp, err = th.Client.AutocompleteUsersInChannel(tc.TeamId, tc.ChannelId, tc.Username, model.UserSearchDefaultLimit, "") require.Error(t, err) CheckForbiddenStatus(t, resp) }) } t.Run("Check against privacy config settings", func(t *testing.T) { th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PrivacySettings.ShowFullName = false }) th.LoginBasic() rusers, _, err := th.Client.AutocompleteUsersInChannel(teamId, channelId, username, model.UserSearchDefaultLimit, "") require.NoError(t, err) assert.Equal(t, rusers.Users[0].FirstName, "", "should not show first/last name") assert.Equal(t, rusers.Users[0].LastName, "", "should not show first/last name") }) t.Run("Check OutOfChannel results with/without VIEW_MEMBERS permissions", func(t *testing.T) { permissionsUser := th.CreateUser() th.SystemAdminClient.DemoteUserToGuest(permissionsUser.Id) permissionsUser.Roles = "system_guest" th.LinkUserToTeam(permissionsUser, th.BasicTeam) th.AddUserToChannel(permissionsUser, th.BasicChannel) otherUser := th.CreateUser() th.LinkUserToTeam(otherUser, th.BasicTeam) th.Client.Login(permissionsUser.Email, permissionsUser.Password) rusers, _, err := th.Client.AutocompleteUsersInChannel(teamId, channelId, "", model.UserSearchDefaultLimit, "") require.NoError(t, err) assert.Len(t, rusers.OutOfChannel, 1) defaultRolePermissions := th.SaveDefaultRolePermissions() defer func() { th.RestoreDefaultRolePermissions(defaultRolePermissions) }() th.RemovePermissionFromRole(model.PermissionViewMembers.Id, model.SystemUserRoleId) th.RemovePermissionFromRole(model.PermissionViewMembers.Id, model.TeamUserRoleId) rusers, _, err = th.Client.AutocompleteUsersInChannel(teamId, channelId, "", model.UserSearchDefaultLimit, "") require.NoError(t, err) assert.Empty(t, rusers.OutOfChannel) th.App.GetOrCreateDirectChannel(th.Context, permissionsUser.Id, otherUser.Id) rusers, _, err = th.Client.AutocompleteUsersInChannel(teamId, channelId, "", model.UserSearchDefaultLimit, "") require.NoError(t, err) assert.Len(t, rusers.OutOfChannel, 1) }) t.Run("user must have access to team id, especially when it does not match channel's team id", func(t *testing.T) { _, _, err := th.Client.AutocompleteUsersInChannel("otherTeamId", channelId, username, model.UserSearchDefaultLimit, "") CheckErrorID(t, err, "api.context.permissions.app_error") }) }
explode_data.jsonl/47496
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1602 }
[ 2830, 3393, 19602, 20104, 7137, 641, 9629, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 197, 9196, 764, 1669, 270, 48868, 14597, 6444, 198, 71550, 764, 1669, 270, 48868,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvokeNOKCCDoesntExist(t *testing.T) { t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() testInvokeNOKCCDoesntExist(t, l, v) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() testInvokeNOKCCDoesntExist(t, l, v) }) }
explode_data.jsonl/47813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 17604, 45, 3925, 3706, 21468, 406, 25613, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 13, 17, 63746, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 11, 348, 11, 21290, 1669, 6505, 60850, 1389, 3036, 14256, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVolumesCreateDestroy(t *testing.T) { client, err := clients.NewBlockStorageV1Client() if err != nil { t.Fatalf("Unable to create blockstorage client: %v", err) } volume, err := CreateVolume(t, client) if err != nil { t.Fatalf("Unable to create volume: %v", err) } defer DeleteVolume(t, client, volume) newVolume, err := volumes.Get(client, volume.ID).Extract() if err != nil { t.Errorf("Unable to retrieve volume: %v", err) } tools.PrintResource(t, newVolume) }
explode_data.jsonl/72638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 96325, 4021, 14245, 1155, 353, 8840, 836, 8, 972, 25291, 11, 1848, 1669, 8239, 7121, 4713, 5793, 53, 16, 2959, 3568, 743, 1848, 961, 2092, 972, 197, 3244, 30762, 445, 17075, 311, 1855, 2504, 16172, 2943, 25, 1018, 85, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestServer_Request_Connect_InvalidScheme(t *testing.T) { testServerRejectsStream(t, ErrCodeProtocol, func(st *serverTester) { st.writeHeaders(HeadersFrameParam{ StreamID: 1, BlockFragment: st.encodeHeaderRaw( ":method", "CONNECT", ":authority", "example.com:123", ":scheme", "https", ), EndStream: true, EndHeaders: true, }) }) }
explode_data.jsonl/71637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 5475, 44024, 15100, 2321, 62, 7928, 28906, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 78413, 82, 3027, 1155, 11, 15495, 2078, 20689, 11, 2915, 5895, 353, 4030, 58699, 8, 341, 197, 18388, 3836, 10574, 7, 10574, 4369, 2001, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChmod(t *testing.T) { fileName := dirRoot + "/chmod.txt" err := Touch(fileName) if err != nil { panic(err) } t.Cleanup(func() { _ = RemoveWithRecur(dirRoot) }) if !Exists(fileName) { t.Error("Chmod test failed!") } err = Chmod(fileName, 0755) if err != nil { panic(err) } }
explode_data.jsonl/34174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 1143, 2593, 1155, 353, 8840, 836, 8, 341, 17661, 675, 1669, 5419, 8439, 488, 3521, 56274, 3909, 1837, 9859, 1669, 19338, 23014, 340, 743, 1848, 961, 2092, 341, 197, 30764, 3964, 340, 197, 630, 3244, 727, 60639, 18552, 368, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_computedDataCountResource(t *testing.T) { m := testModule(t, "plan-computed-data-count") p := testProvider("aws") p.GetSchemaReturn = &ProviderSchema{ ResourceTypes: map[string]*configschema.Block{ "aws_instance": { Attributes: map[string]*configschema.Attribute{ "num": {Type: cty.String, Optional: true}, "compute": {Type: cty.String, Optional: true}, "foo": {Type: cty.String, Computed: true}, }, }, }, DataSources: map[string]*configschema.Block{ "aws_vpc": { Attributes: map[string]*configschema.Attribute{ "foo": {Type: cty.String, Optional: true}, }, }, }, } p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), }) plan, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } // make sure we created 3 "bar"s for i := 0; i < 3; i++ { addr := addrs.Resource{ Mode: addrs.DataResourceMode, Type: "aws_vpc", Name: "bar", }.Instance(addrs.IntKey(i)).Absolute(addrs.RootModuleInstance) if rcs := plan.Changes.ResourceInstance(addr); rcs == nil { t.Fatalf("missing changes for %s", addr) } } }
explode_data.jsonl/28670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 555 }
[ 2830, 3393, 1972, 17, 20485, 2965, 19292, 1043, 2507, 4783, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 11476, 19292, 13945, 26580, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 2234, 8632, 5598, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNew(t *testing.T) { t.Cleanup(fakeRandReader(fakeBytes(24))) tests := []struct { name string want *Traceparent wantErr bool }{ {"ok", &Traceparent{ version: TraceVersion, traceID: [16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5}, parentID: [8]byte{6, 7, 8, 9, 0, 1, 2, 3}, traceFlags: 0, }, false}, {"fail", nil, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := New() if (err != nil) != tt.wantErr { t.Errorf("New() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("New() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/35198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 3244, 727, 60639, 74138, 56124, 5062, 74138, 7078, 7, 17, 19, 19235, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 50780, 262, 353, 6550, 3765, 198, 197, 50780, 7747, 180...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestChannelsService_Create(t *testing.T) { setup() defer teardown() input := &ChannelRequest{ Name: "name", Description: "desc", Privacy: "anybody", } mux.HandleFunc("/channels", func(w http.ResponseWriter, r *http.Request) { v := &ChannelRequest{} json.NewDecoder(r.Body).Decode(v) testMethod(t, r, "POST") if !reflect.DeepEqual(v, input) { t.Errorf("Channels.Create body is %+v, want %+v", v, input) } fmt.Fprint(w, `{"name": "name"}`) }) channel, _, err := client.Channels.Create(input) if err != nil { t.Errorf("Channels.Create returned unexpected error: %v", err) } want := &Channel{Name: "name"} if !reflect.DeepEqual(channel, want) { t.Errorf("Channels.Create returned %+v, want %+v", channel, want) } }
explode_data.jsonl/49782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 35925, 1860, 34325, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 22427, 1669, 609, 9629, 1900, 515, 197, 21297, 25, 286, 330, 606, 756, 197, 47414, 25, 330, 8614, 756, 197, 197, 47832, 25, 257, 330, 3767, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFetch(t *testing.T) { r := compose.EnsureUp(t, "mongodb") f := mbtest.NewReportingMetricSetV2Error(t, getConfig(r.Host())) events, errs := mbtest.ReportingFetchV2Error(f) if len(errs) > 0 { t.Fatalf("Expected 0 error, had %d. %v\n", len(errs), errs) } assert.NotEmpty(t, events) event := events[0].MetricSetFields // Check a few event Fields findCount, err := event.GetValue("commands.find.total") assert.NoError(t, err) assert.True(t, findCount.(int64) >= 0) deletedDocuments, err := event.GetValue("document.deleted") assert.NoError(t, err) assert.True(t, deletedDocuments.(int64) >= 0) }
explode_data.jsonl/64007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 20714, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 30335, 22834, 19098, 2324, 1155, 11, 330, 37197, 5130, 1166, 1669, 10016, 1944, 7121, 70131, 54310, 1649, 53, 17, 1454, 1155, 11, 66763, 2601, 29840, 12145, 90873, 11, 70817, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseLabel(t *testing.T) { labels := []string{ "foo=bar", "bar:foo", "foobar", } expected := map[string]string{ "foo": "bar", "bar": "foo", "foobar": "", } actual := parseLabel(labels) if eq := reflect.DeepEqual(expected, actual); !eq { t.Errorf("Expect %v got %v", expected, actual) } }
explode_data.jsonl/23320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 14463, 2476, 1155, 353, 8840, 836, 8, 341, 95143, 1669, 3056, 917, 515, 197, 197, 1, 7975, 28, 2257, 756, 197, 197, 1, 2257, 25, 7975, 756, 197, 197, 1, 50267, 756, 197, 532, 42400, 1669, 2415, 14032, 30953, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHandleCallback(t *testing.T) { t.Helper() tests := []struct { name string userIDKey string userNameKey string insecureSkipEmailVerified bool expectUserID string expectUserName string token map[string]interface{} }{ { name: "simpleCase", userIDKey: "", // not configured userNameKey: "", // not configured expectUserID: "subvalue", expectUserName: "namevalue", token: map[string]interface{}{ "sub": "subvalue", "name": "namevalue", "email": "emailvalue", "email_verified": true, }, }, { name: "email_verified not in claims, configured to be skipped", insecureSkipEmailVerified: true, expectUserID: "subvalue", expectUserName: "namevalue", token: map[string]interface{}{ "sub": "subvalue", "name": "namevalue", "email": "emailvalue", }, }, { name: "withUserIDKey", userIDKey: "name", expectUserID: "namevalue", expectUserName: "namevalue", token: map[string]interface{}{ "sub": "subvalue", "name": "namevalue", "email": "emailvalue", "email_verified": true, }, }, { name: "withUserNameKey", userNameKey: "user_name", expectUserID: "subvalue", expectUserName: "username", token: map[string]interface{}{ "sub": "subvalue", "user_name": "username", "email": "emailvalue", "email_verified": true, }, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { testServer, err := setupServer(tc.token) if err != nil { t.Fatal("failed to setup test server", err) } defer testServer.Close() serverURL := testServer.URL config := Config{ Issuer: serverURL, ClientID: "clientID", ClientSecret: "clientSecret", Scopes: []string{"groups"}, RedirectURI: fmt.Sprintf("%s/callback", serverURL), UserIDKey: tc.userIDKey, UserNameKey: tc.userNameKey, InsecureSkipEmailVerified: tc.insecureSkipEmailVerified, } conn, err := newConnector(config) if err != nil { t.Fatal("failed to create new connector", err) } req, err := newRequestWithAuthCode(testServer.URL, "someCode") if err != nil { t.Fatal("failed to create request", err) } identity, err := conn.HandleCallback(connector.Scopes{Groups: true}, req) if err != nil { t.Fatal("handle callback failed", err) } expectEquals(t, identity.UserID, tc.expectUserID) expectEquals(t, identity.Username, tc.expectUserName) expectEquals(t, identity.Email, "emailvalue") expectEquals(t, identity.EmailVerified, true) }) } }
explode_data.jsonl/14396
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1466 }
[ 2830, 3393, 6999, 7494, 1155, 353, 8840, 836, 8, 341, 3244, 69282, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 2549, 914, 198, 197, 19060, 915, 1592, 338, 914, 198, 197, 19060, 675, 1592, 2290, 914, 198, 197, 17430, 25132, 35134, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMtlsStrictK8sCA(t *testing.T) { framework.NewTest(t). Features("security.control-plane.k8s-certs.k8sca"). Run(func(ctx framework.TestContext) { // TODO: due to issue https://github.com/istio/istio/issues/25286, // currently VM does not work in this test rctx := reachability.CreateContext(ctx, false) systemNM := namespace.ClaimSystemNamespaceOrFail(ctx, ctx) testCases := []reachability.TestCase{ { ConfigFile: "global-mtls-on-no-dr.yaml", Namespace: systemNM, Include: func(src echo.Instance, opts echo.CallOptions) bool { // Exclude calls to the headless service. // Auto mtls does not apply to headless service, because for headless service // the cluster discovery type is ORIGINAL_DST, and it will not apply upstream tls setting return !rctx.IsHeadless(opts.Target) }, ExpectSuccess: func(src echo.Instance, opts echo.CallOptions) bool { // When mTLS is in STRICT mode, DR's TLS settings are default to mTLS so the result would // be the same as having global DR rule. if opts.Target == rctx.Naked { // calls to naked should always succeed. return true } // If source is naked, and destination is not, expect failure. return !(rctx.IsNaked(src) && !rctx.IsNaked(opts.Target)) }, }, { ConfigFile: "global-plaintext.yaml", Namespace: systemNM, Include: func(src echo.Instance, opts echo.CallOptions) bool { // Exclude calls to the headless TCP port. if opts.Target == rctx.Headless && opts.PortName == "tcp" { return false } return true }, ExpectSuccess: func(src echo.Instance, opts echo.CallOptions) bool { // When mTLS is disabled, all traffic should work. return true }, }, } rctx.Run(testCases) }) }
explode_data.jsonl/63041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 755 }
[ 2830, 3393, 44, 34488, 41857, 42, 23, 82, 5049, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 7121, 2271, 1155, 4292, 197, 197, 21336, 445, 17039, 16406, 89549, 5202, 23, 82, 1786, 15546, 5202, 23, 82, 924, 38609, 197, 85952, 18552, 7502,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Auto_HTML_Destroy_Redirect(t *testing.T) { r := require.New(t) app := buffalo.New(buffalo.Options{}) app.DELETE("/cars/{id}", func(c buffalo.Context) error { b := Car{ ID: 1, Name: "Honda", } return c.Render(200, render.Auto(c, b)) }) w := willie.New(app) res := w.HTML("/cars/1").Delete() r.Equal("/cars", res.Location()) r.Equal(302, res.Code) }
explode_data.jsonl/2632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 1566, 1535, 56726, 79266, 92940, 1226, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 28236, 1669, 81355, 7121, 39729, 12529, 22179, 37790, 28236, 58931, 4283, 50708, 9388, 307, 9545, 2915, 1337, 81355, 9328, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetricSlice_CopyTo(t *testing.T) { dest := NewMetricSlice() // Test CopyTo to empty NewMetricSlice().CopyTo(dest) assert.EqualValues(t, NewMetricSlice(), dest) // Test CopyTo larger slice generateTestMetricSlice().CopyTo(dest) assert.EqualValues(t, generateTestMetricSlice(), dest) // Test CopyTo same size slice generateTestMetricSlice().CopyTo(dest) assert.EqualValues(t, generateTestMetricSlice(), dest) }
explode_data.jsonl/19507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 54310, 33236, 77637, 1249, 1155, 353, 8840, 836, 8, 341, 49616, 1669, 1532, 54310, 33236, 741, 197, 322, 3393, 14540, 1249, 311, 4287, 198, 197, 3564, 54310, 33236, 1005, 12106, 1249, 27010, 340, 6948, 12808, 6227, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptions_Validate(t *testing.T) { fn := func(args ...interface{}) (interface{}, error) { opt := args[0].(infoOptions) return nil, (&opt).validate() } cases := trial.Cases{ "valid options": { Input: infoOptions{DestTemplate: "nop://", SrcPath: "nop://"}, }, "missing destination": { Input: infoOptions{}, ShouldErr: true, }, } trial.New(fn, cases).Test(t) }
explode_data.jsonl/77119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 3798, 62, 17926, 1155, 353, 8840, 836, 8, 341, 40095, 1669, 2915, 7356, 2503, 4970, 28875, 320, 4970, 22655, 1465, 8, 341, 197, 64838, 1669, 2827, 58, 15, 936, 7, 2733, 3798, 340, 197, 853, 2092, 11, 15899, 2912, 568, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriter(t *testing.T) { Convey(`A Writer instance outputting to a Buffer`, t, func() { buf := bytes.Buffer{} w := Writer{} bundle := logpb.ButlerLogBundle{ Entries: []*logpb.ButlerLogBundle_Entry{ {}, }, } Convey(`When configured to compress with a threshold of 64`, func() { w.Compress = true w.CompressThreshold = 64 Convey(`Will not compress if below the compression threshold.`, func() { So(w.Write(&buf, &bundle), ShouldBeNil) r, err := read(&buf) So(err, ShouldBeNil) So(r.Metadata.Compression, ShouldEqual, logpb.ButlerMetadata_NONE) So(r.Metadata.ProtoVersion, ShouldEqual, logpb.Version) }) Convey(`Will not write data larger than the maximum bundle size.`, func() { w.maxSize = 16 bundle.Secret = bytes.Repeat([]byte{'A'}, 17) err := w.Write(&buf, &bundle) So(err, ShouldNotBeNil) So(err, assertions.ShouldErrLike, "exceeds soft cap") }) Convey(`Will compress data >= the threshold.`, func() { bundle.Secret = bytes.Repeat([]byte{'A'}, 64) So(w.Write(&buf, &bundle), ShouldBeNil) r, err := read(&buf) So(err, ShouldBeNil) So(r.Metadata.Compression, ShouldEqual, logpb.ButlerMetadata_ZLIB) So(r.Metadata.ProtoVersion, ShouldEqual, logpb.Version) Convey(`And can be reused.`, func() { So(w.Write(&buf, &bundle), ShouldBeNil) r, err := read(&buf) So(err, ShouldBeNil) So(r.Metadata.Compression, ShouldEqual, logpb.ButlerMetadata_ZLIB) So(r.Metadata.ProtoVersion, ShouldEqual, logpb.Version) }) }) }) }) }
explode_data.jsonl/7593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 682 }
[ 2830, 3393, 6492, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 5809, 32, 29404, 2867, 2550, 1280, 311, 264, 10312, 7808, 259, 11, 2915, 368, 341, 197, 26398, 1669, 5820, 22622, 16094, 197, 6692, 1669, 29404, 16094, 197, 2233, 4206, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetClient(t *testing.T) { New([]config.Client{ { Serial: "12FF9503829A3A0DDE9CB87191A472D4", Scopes: []string{"READ", "WRITE"}, }, }) if _, err := GetClient("12FF9503829A3A0DDE9CB87191A472D4"); err != nil { t.Fatal(err) } if _, err := GetClient("00FF9503829A3A0DDE9CB87191A472D4"); err == nil { t.Fatal(err) } }
explode_data.jsonl/65812
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 1949, 2959, 1155, 353, 8840, 836, 8, 341, 197, 3564, 10556, 1676, 11716, 515, 197, 197, 515, 298, 93658, 25, 330, 16, 17, 1748, 24, 20, 15, 18, 23, 17, 24, 32, 18, 32, 15, 35, 1150, 24, 12979, 23, 22, 16, 24, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSearchPosts(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() experimentalViewArchivedChannels := *th.App.Config().TeamSettings.ExperimentalViewArchivedChannels defer func() { th.App.UpdateConfig(func(cfg *model.Config) { cfg.TeamSettings.ExperimentalViewArchivedChannels = &experimentalViewArchivedChannels }) }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.ExperimentalViewArchivedChannels = true }) th.LoginBasic() Client := th.Client message := "search for post1" _ = th.CreateMessagePost(message) message = "search for post2" post2 := th.CreateMessagePost(message) message = "#hashtag search for post3" post3 := th.CreateMessagePost(message) message = "hashtag for post4" _ = th.CreateMessagePost(message) archivedChannel := th.CreatePublicChannel() _ = th.CreateMessagePostWithClient(th.Client, archivedChannel, "#hashtag for post3") th.Client.DeleteChannel(archivedChannel.Id) terms := "search" isOrSearch := false timezoneOffset := 5 searchParams := model.SearchParameter{ Terms: &terms, IsOrSearch: &isOrSearch, TimeZoneOffset: &timezoneOffset, } posts, resp := Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) CheckNoError(t, resp) require.Len(t, posts.Order, 3, "wrong search") terms = "search" page := 0 perPage := 2 searchParams = model.SearchParameter{ Terms: &terms, IsOrSearch: &isOrSearch, TimeZoneOffset: &timezoneOffset, Page: &page, PerPage: &perPage, } posts2, resp := Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) CheckNoError(t, resp) // We don't support paging for DB search yet, modify this when we do. require.Len(t, posts2.Order, 3, "Wrong number of posts") assert.Equal(t, posts.Order[0], posts2.Order[0]) assert.Equal(t, posts.Order[1], posts2.Order[1]) page = 1 searchParams = model.SearchParameter{ Terms: &terms, IsOrSearch: &isOrSearch, TimeZoneOffset: &timezoneOffset, Page: &page, PerPage: &perPage, } posts2, resp = Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) CheckNoError(t, resp) // We don't support paging for DB search yet, modify this when we do. require.Empty(t, posts2.Order, "Wrong number of posts") posts, resp = Client.SearchPosts(th.BasicTeam.Id, "search", false) CheckNoError(t, resp) require.Len(t, posts.Order, 3, "wrong search") posts, resp = Client.SearchPosts(th.BasicTeam.Id, "post2", false) CheckNoError(t, resp) require.Len(t, posts.Order, 1, "wrong number of posts") require.Equal(t, post2.Id, posts.Order[0], "wrong search") posts, resp = Client.SearchPosts(th.BasicTeam.Id, "#hashtag", false) CheckNoError(t, resp) require.Len(t, posts.Order, 1, "wrong number of posts") require.Equal(t, post3.Id, posts.Order[0], "wrong search") terms = "#hashtag" includeDeletedChannels := true searchParams = model.SearchParameter{ Terms: &terms, IsOrSearch: &isOrSearch, TimeZoneOffset: &timezoneOffset, IncludeDeletedChannels: &includeDeletedChannels, } posts, resp = Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) CheckNoError(t, resp) require.Len(t, posts.Order, 2, "wrong search") th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.ExperimentalViewArchivedChannels = false }) posts, resp = Client.SearchPostsWithParams(th.BasicTeam.Id, &searchParams) CheckNoError(t, resp) require.Len(t, posts.Order, 1, "wrong search") posts, _ = Client.SearchPosts(th.BasicTeam.Id, "*", false) require.Empty(t, posts.Order, "searching for just * shouldn't return any results") posts, resp = Client.SearchPosts(th.BasicTeam.Id, "post1 post2", true) CheckNoError(t, resp) require.Len(t, posts.Order, 2, "wrong search results") _, resp = Client.SearchPosts("junk", "#sgtitlereview", false) CheckBadRequestStatus(t, resp) _, resp = Client.SearchPosts(model.NewId(), "#sgtitlereview", false) CheckForbiddenStatus(t, resp) _, resp = Client.SearchPosts(th.BasicTeam.Id, "", false) CheckBadRequestStatus(t, resp) Client.Logout() _, resp = Client.SearchPosts(th.BasicTeam.Id, "#sgtitlereview", false) CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/5257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1586 }
[ 2830, 3393, 5890, 19631, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 8122, 25513, 851, 18727, 2221, 35925, 1669, 353, 339, 5105, 10753, 1005, 14597, 6086, 90320, 851, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppMergePatch(t *testing.T) { testApp := newTestApp() ctx := context.Background() ctx = context.WithValue(ctx, "claims", &jwt.StandardClaims{Subject: "admin"}) appServer := newTestAppServer(testApp) appServer.enf.SetDefaultRole("") app, err := appServer.Patch(ctx, &application.ApplicationPatchRequest{ Name: &testApp.Name, Patch: `{"spec": { "source": { "path": "foo" } }}`, PatchType: "merge"}) assert.NoError(t, err) assert.Equal(t, "foo", app.Spec.Source.Path) }
explode_data.jsonl/46622
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 2164, 52096, 43622, 1155, 353, 8840, 836, 8, 341, 18185, 2164, 1669, 501, 2271, 2164, 741, 20985, 1669, 2266, 19047, 741, 20985, 284, 2266, 26124, 1130, 7502, 11, 330, 48561, 497, 609, 41592, 53615, 51133, 90, 13019, 25, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExportClientList(t *testing.T) { for _, isExportClientList := range []bool{true, false} { e := getTestExporter() e.options.ExportClientList = isExportClientList chM := make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() found := false for m := range chM { if strings.Contains(m.Desc().String(), "connected_clients_details") { found = true } } if isExportClientList && !found { t.Errorf("connected_clients_details was *not* found in isExportClientList metrics but expected") } else if !isExportClientList && found { t.Errorf("connected_clients_details was *found* in isExportClientList metrics but *not* expected") } } }
explode_data.jsonl/46981
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 16894, 2959, 852, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 374, 16894, 2959, 852, 1669, 2088, 3056, 2641, 90, 1866, 11, 895, 92, 341, 197, 7727, 1669, 633, 2271, 88025, 741, 197, 7727, 10912, 81077, 2959, 852, 284, 374, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVersionSerialization(t *testing.T) { h1 := NewHeight(10, 100) b := h1.ToBytes() h2, n := NewHeightFromBytes(b) testutil.AssertEquals(t, h2, h1) testutil.AssertEquals(t, n, len(b)) }
explode_data.jsonl/20997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 5637, 35865, 1155, 353, 8840, 836, 8, 341, 9598, 16, 1669, 1532, 3640, 7, 16, 15, 11, 220, 16, 15, 15, 340, 2233, 1669, 305, 16, 3274, 7078, 741, 9598, 17, 11, 308, 1669, 1532, 3640, 3830, 7078, 1883, 340, 18185, 1314,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlockUndoDataDeserializingErrors(t *testing.T) { t.Parallel() tests := []struct { name string serialized []byte errCode ErrorCode }{ { name: "short read", serialized: hexToBytes("00"), errCode: ErrUndoDataShortRead, }, { name: "bad size", serialized: hexToBytes("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"), errCode: ErrUndoDataCorrupt, }, } for _, test := range tests { // Ensure the expected error type is returned. _, err := deserializeBlockUndoData(test.serialized) ticketDBErr, ok := err.(DBError) if !ok { t.Errorf("couldn't convert deserializeBlockUndoData error "+ "to ticket db error (err: %v)", err) continue } if ticketDBErr.GetCode() != test.errCode { t.Errorf("deserializeBlockUndoData (%s): expected error type "+ "does not match - got %v, want %v", test.name, ticketDBErr.ErrorCode, test.errCode) continue } } }
explode_data.jsonl/18322
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 4713, 59815, 1043, 4896, 2848, 4849, 13877, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 197, 75277, 3056, 3782, 198, 197, 9859, 2078, 262, 60084, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClone(t *testing.T) { t1 := manager.Get("t1") t2 := t1.Clone() if t1 == t2 { t.Fatal("error! get clone not working") } }
explode_data.jsonl/19168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 37677, 1155, 353, 8840, 836, 8, 341, 3244, 16, 1669, 6645, 2234, 445, 83, 16, 1138, 3244, 17, 1669, 259, 16, 64463, 2822, 743, 259, 16, 621, 259, 17, 341, 197, 3244, 26133, 445, 841, 0, 633, 14715, 537, 3238, 1138, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestReceiverHasName(t *testing.T) { in := ` route: receivers: - name: '' ` _, err := Load(in) expected := "missing name in receiver" if err == nil { t.Fatalf("no error returned, expected:\n%q", expected) } if err.Error() != expected { t.Errorf("\nexpected:\n%q\ngot:\n%q", expected, err.Error()) } }
explode_data.jsonl/72897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 25436, 10281, 675, 1155, 353, 8840, 836, 8, 341, 17430, 1669, 22074, 8966, 1447, 53387, 1945, 510, 12, 829, 25, 11776, 3989, 197, 6878, 1848, 1669, 8893, 5900, 692, 42400, 1669, 330, 30616, 829, 304, 13964, 1837, 743, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrintGoRushVersion(t *testing.T) { SetVersion("3.0.0") ver := GetVersion() PrintGoRushVersion() assert.Equal(t, "3.0.0", ver) }
explode_data.jsonl/67599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 59 }
[ 2830, 3393, 8994, 10850, 49, 1116, 5637, 1155, 353, 8840, 836, 8, 341, 22212, 5637, 445, 18, 13, 15, 13, 15, 1138, 197, 423, 1669, 2126, 5637, 741, 58702, 10850, 49, 1116, 5637, 2822, 6948, 12808, 1155, 11, 330, 18, 13, 15, 13, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGenerateManifestWithAnnotatedAndRegularGitTagHashes(t *testing.T) { regularGitTagHash := "632039659e542ed7de0c170a4fcc1c571b288fc0" annotatedGitTaghash := "95249be61b028d566c29d47b19e65c5603388a41" invalidGitTaghash := "invalid-tag" actualCommitSHA := "632039659e542ed7de0c170a4fcc1c571b288fc0" tests := []struct { name string ctx context.Context manifestRequest *apiclient.ManifestRequest wantError bool service *Service }{ { name: "Case: Git tag hash matches latest commit SHA (regular tag)", ctx: context.Background(), manifestRequest: &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ TargetRevision: regularGitTagHash, }, NoCache: true, }, wantError: false, service: newServiceWithCommitSHA(".", regularGitTagHash), }, { name: "Case: Git tag hash does not match latest commit SHA (annotated tag)", ctx: context.Background(), manifestRequest: &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ TargetRevision: annotatedGitTaghash, }, NoCache: true, }, wantError: false, service: newServiceWithCommitSHA(".", annotatedGitTaghash), }, { name: "Case: Git tag hash is invalid", ctx: context.Background(), manifestRequest: &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ TargetRevision: invalidGitTaghash, }, NoCache: true, }, wantError: true, service: newServiceWithCommitSHA(".", invalidGitTaghash), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { manifestResponse, err := tt.service.GenerateManifest(tt.ctx, tt.manifestRequest) if !tt.wantError { if err == nil { assert.Equal(t, manifestResponse.Revision, actualCommitSHA) } else { t.Errorf("unexpected error") } } else { if err == nil { t.Errorf("expected an error but did not throw one") } } }) } }
explode_data.jsonl/5699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 909 }
[ 2830, 3393, 31115, 38495, 2354, 2082, 87029, 3036, 30404, 46562, 5668, 6370, 288, 1155, 353, 8840, 836, 8, 341, 197, 22308, 46562, 5668, 6370, 1669, 330, 21, 18, 17, 15, 18, 24, 21, 20, 24, 68, 20, 19, 17, 291, 22, 450, 15, 66, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Exec(t *testing.T) { ctx := context.Background() conn, err := sql.Open("pgx", "postgres://postgres:postgres@localhost:5432/postgres") require.NoError(t, err) defer conn.Close() teardown := func() { _, err := conn.Exec("delete from sample_table where id IN (2)") require.NoError(t, err) } t.Run("table not found", func(t *testing.T) { err := builder.With(conn).InsertTable("not_found").Values(1, "Joe").Exec(ctx) require.EqualError(t, err, "ERROR: relation \"not_found\" does not exist (SQLSTATE 42P01)") }) t.Run("insert successfully", func(t *testing.T) { defer teardown() var affectedRows int64 err := builder.With(conn). InsertTable("sample_table"). Columns("id", "name"). Values(2, "Joe Two"). AffectedRows(&affectedRows). Exec(ctx) require.NoError(t, err) require.EqualValues(t, 1, affectedRows) }) }
explode_data.jsonl/68955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 62, 10216, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 32917, 11, 1848, 1669, 5704, 12953, 445, 3517, 87, 497, 330, 43070, 1110, 43070, 25, 43070, 31, 8301, 25, 20, 19, 18, 17, 29996, 17818, 1138, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1