text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestSolveSudoku(t *testing.T) { board := [][]byte{ {'5', '3', '.', '.', '7', '.', '.', '.', '.'}, {'6', '.', '.', '1', '9', '5', '.', '.', '.'}, {'.', '9', '8', '.', '.', '.', '.', '6', '.'}, {'8', '.', '.', '.', '6', '.', '.', '.', '3'}, {'4', '.', '.', '8', '.', '3', '.', '.', '1'}, {'7', '.', '.', '.', '2', '.', '.', '.', '6'}, {'.', '6', '.', '.', '.', '.', '2', '8', '.'}, {'.', '.', '.', '4', '1', '9', '.', '.', '5'}, {'.', '.', '.', '.', '8', '.', '.', '7', '9'}, } board2 := [][]byte{ {'5', '3', '4', '6', '7', '8', '9', '1', '2'}, {'6', '7', '2', '1', '9', '5', '3', '4', '8'}, {'1', '9', '8', '3', '4', '2', '5', '6', '7'}, {'8', '5', '9', '7', '6', '1', '4', '2', '3'}, {'4', '2', '6', '8', '5', '3', '7', '9', '1'}, {'7', '1', '3', '9', '2', '4', '8', '5', '6'}, {'9', '6', '1', '5', '3', '7', '2', '8', '4'}, {'2', '8', '7', '4', '1', '9', '6', '3', '5'}, {'3', '4', '5', '2', '8', '6', '1', '7', '9'}, } solveSudoku(board) assert.EqualValues(t, board2, board) }
explode_data.jsonl/25179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 639 }
[ 2830, 3393, 50, 3948, 50, 68302, 1155, 353, 8840, 836, 8, 341, 59868, 1669, 52931, 3782, 515, 197, 197, 13608, 20, 516, 364, 18, 516, 45669, 45669, 364, 22, 516, 45669, 45669, 45669, 24361, 1583, 197, 197, 13608, 21, 516, 45669, 45669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndexOf(t *testing.T) { type args struct { s []uint64 x uint64 } tests := []struct { name string args args want int }{ { name: "contains", args: args{ s: []uint64{1, 2, 3, 4}, x: 3, }, want: 2, }, { name: "not_contains", args: args{ s: []uint64{1, 1, 2, 1, 3, 5, 4}, x: 0, }, want: -1, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var got int = uint64s.IndexOf(tt.args.s, tt.args.x) assert.Equal(t, tt.want, got) got = uint64s.New(tt.args.s).IndexOf(tt.args.x) assert.Equal(t, tt.want, got) }) } }
explode_data.jsonl/55619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 333 }
[ 2830, 3393, 27376, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 3056, 2496, 21, 19, 198, 197, 10225, 2622, 21, 19, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCRUDTenantOps(t *testing.T) { ast := assert.New(t) initTntTest(ast) ast.False(dao.HasTenant(tenant)) err := dao.AddTenant(tenant) ast.Nil(err) time.Sleep(1 * time.Second) tenants := make([]string, 0) err = dao.GetTenants(func(t string) bool { tenants = append(tenants, t) return true }) ast.Nil(err) ast.Equal(1, len(tenants)) ast.True(dao.HasTenant(tenant)) ast.Equal(int64(0), dao.GetSize(tenant)) _, err = dao.RemoveTenant(tenant) ast.Nil(err) ast.False(dao.HasTenant(tenant)) tenants = make([]string, 0) err = dao.GetTenants(func(t string) bool { tenants = append(tenants, t) return true }) ast.Nil(err) ast.Equal(0, len(tenants)) closeTntTest(ast) }
explode_data.jsonl/67147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 8973, 4656, 71252, 38904, 1155, 353, 8840, 836, 8, 972, 88836, 1669, 2060, 7121, 1155, 1218, 28248, 51, 406, 2271, 52574, 7229, 88836, 50757, 1500, 3441, 16152, 71252, 1155, 25121, 30991, 9859, 1669, 24775, 1904, 71252, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceRespectsClientIPSessionAffinity(t *testing.T) { az := getTestCloud() svc := getTestService("service-sa-clientip", v1.ProtocolTCP, 7170) svc.Spec.SessionAffinity = v1.ServiceAffinityClientIP clusterResources := getClusterResources(az, 1, 1) lb, err := az.reconcileLoadBalancer(testClusterName, &svc, clusterResources.nodes, true /* wantLb */) if err != nil { t.Errorf("Unexpected error reconciling svc1: %q", err) } validateLoadBalancer(t, lb, svc) lbRule, err := findLBRuleForPort(*lb.LoadBalancingRules, 7170) if err != nil { t.Error(err) } if lbRule.LoadDistribution != network.SourceIP { t.Errorf("Expected LB rule to have SourceIP load distribution but was %s", lbRule.LoadDistribution) } }
explode_data.jsonl/50398
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 1860, 1061, 7973, 2959, 3298, 5283, 25841, 13489, 1155, 353, 8840, 836, 8, 341, 197, 1370, 1669, 633, 2271, 16055, 741, 1903, 7362, 1669, 633, 2271, 1860, 445, 7936, 1331, 64, 30011, 573, 497, 348, 16, 54096, 49896, 11, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAgent_RegisterService_ManagedConnectProxy(t *testing.T) { t.Parallel() assert := assert.New(t) require := require.New(t) a := NewTestAgent(t.Name(), ` connect { proxy { allow_managed_api_registration = true } } `) defer a.Shutdown() // Register a proxy. Note that the destination doesn't exist here on // this agent or in the catalog at all. This is intended and part // of the design. args := &api.AgentServiceRegistration{ Name: "web", Port: 8000, Connect: &api.AgentServiceConnect{ Proxy: &api.AgentServiceConnectProxy{ ExecMode: "script", Command: []string{"proxy.sh"}, Config: map[string]interface{}{ "foo": "bar", }, }, }, } req, _ := http.NewRequest("PUT", "/v1/agent/service/register?token=abc123", jsonReader(args)) resp := httptest.NewRecorder() obj, err := a.srv.AgentRegisterService(resp, req) assert.NoError(err) assert.Nil(obj) require.Equal(200, resp.Code, "request failed with body: %s", resp.Body.String()) // Ensure the target service _, ok := a.State.Services()["web"] assert.True(ok, "has service") // Ensure the proxy service was registered proxySvc, ok := a.State.Services()["web-proxy"] require.True(ok, "has proxy service") assert.Equal(structs.ServiceKindConnectProxy, proxySvc.Kind) assert.Equal("web", proxySvc.ProxyDestination) assert.NotEmpty(proxySvc.Port, "a port should have been assigned") // Ensure proxy itself was registered proxy := a.State.Proxy("web-proxy") require.NotNil(proxy) assert.Equal(structs.ProxyExecModeScript, proxy.Proxy.ExecMode) assert.Equal([]string{"proxy.sh"}, proxy.Proxy.Command) assert.Equal(args.Connect.Proxy.Config, proxy.Proxy.Config) // Ensure the token was configured assert.Equal("abc123", a.State.ServiceToken("web")) assert.Equal("abc123", a.State.ServiceToken("web-proxy")) }
explode_data.jsonl/33629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 661 }
[ 2830, 3393, 16810, 73124, 1860, 71628, 3279, 14611, 16219, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 6948, 1669, 2060, 7121, 1155, 340, 17957, 1669, 1373, 7121, 1155, 340, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOuterLinkV2WithMetadataEmbedderDecode(t *testing.T) { var o outerLinkV2WithMetadataEmbedder err := MsgpackDecode(&o, []byte{0x1, 0x2}) requireErrorHasSuffix(t, errCodecDecodeSelf, err) }
explode_data.jsonl/72241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 51322, 3939, 53, 17, 2354, 14610, 25486, 1107, 32564, 1155, 353, 8840, 836, 8, 341, 2405, 297, 15955, 3939, 53, 17, 2354, 14610, 25486, 1107, 198, 9859, 1669, 24205, 4748, 32564, 2099, 78, 11, 3056, 3782, 90, 15, 87, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMarshalGoLine(t *testing.T) { buf := make([]byte, DefaultMaxSmallPacketSize) a := assert.New(t) n := marshalGoLine(buf, *goLine) buf = buf[:n] a.Equal(goLineBytes, buf) }
explode_data.jsonl/72611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 55438, 10850, 2460, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 1281, 10556, 3782, 11, 7899, 5974, 25307, 16679, 1695, 340, 11323, 1669, 2060, 7121, 1155, 692, 9038, 1669, 60771, 10850, 2460, 10731, 11, 353, 3346, 2460, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestStatefulPodControlUpdatesPodStorage(t *testing.T) { recorder := record.NewFakeRecorder(10) set := newStatefulSet(3) pod := newStatefulSetPod(set, 0) fakeClient := &fake.Clientset{} pvcIndexer := cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}) pvcLister := corelisters.NewPersistentVolumeClaimLister(pvcIndexer) control := NewRealStatefulPodControl(fakeClient, nil, nil, pvcLister, recorder) pvcs := getPersistentVolumeClaims(set, pod) volumes := make([]v1.Volume, 0, len(pod.Spec.Volumes)) for i := range pod.Spec.Volumes { if _, contains := pvcs[pod.Spec.Volumes[i].Name]; !contains { volumes = append(volumes, pod.Spec.Volumes[i]) } } pod.Spec.Volumes = volumes fakeClient.AddReactor("update", "pods", func(action core.Action) (bool, runtime.Object, error) { update := action.(core.UpdateAction) return true, update.GetObject(), nil }) fakeClient.AddReactor("create", "persistentvolumeclaims", func(action core.Action) (bool, runtime.Object, error) { update := action.(core.UpdateAction) return true, update.GetObject(), nil }) var updated *v1.Pod fakeClient.PrependReactor("update", "pods", func(action core.Action) (bool, runtime.Object, error) { update := action.(core.UpdateAction) updated = update.GetObject().(*v1.Pod) return true, update.GetObject(), nil }) if err := control.UpdateStatefulPod(set, pod); err != nil { t.Errorf("Successful update returned an error: %s", err) } events := collectEvents(recorder.Events) if eventCount := len(events); eventCount != 2 { t.Errorf("Pod storage update successful: got %d events, but want 2", eventCount) } for i := range events { if !strings.Contains(events[i], v1.EventTypeNormal) { t.Errorf("Found unexpected non-normal event %s", events[i]) } } if !storageMatches(set, updated) { t.Error("Name update failed identity does not match") } }
explode_data.jsonl/17905
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 676 }
[ 2830, 3393, 1397, 1262, 23527, 3273, 37091, 23527, 5793, 1155, 353, 8840, 836, 8, 341, 67904, 1358, 1669, 3255, 7121, 52317, 47023, 7, 16, 15, 340, 8196, 1669, 36848, 1262, 1649, 7, 18, 340, 3223, 347, 1669, 36848, 1262, 1649, 23527, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFabricData(t *testing.T) { config.SetUpMockConfig(t) defer func() { err := common.TruncateDB(common.InMemory) if err != nil { t.Fatalf("error: %v", err) } err = common.TruncateDB(common.OnDisk) if err != nil { t.Fatalf("error: %v", err) } }() fabuuid := "6d4a0a66-7efa-578e-83cf-44dc68d2874e" mockFabricData(t, fabuuid, "CFM") fabric, err := GetFabricData(fabuuid) assert.Nil(t, err, "Error Should be nil") assert.Equal(t, fabuuid, fabric.FabricUUID, "Fabric uuid should be same") assert.Equal(t, "CFM", fabric.PluginID, "PluginID should be CFM") // Negative Test case // Invalid fabric uuid _, err = GetFabricData("uuid") assert.NotNil(t, err, "Error Should not be nil") }
explode_data.jsonl/49407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 1949, 81731, 1043, 1155, 353, 8840, 836, 8, 341, 25873, 4202, 2324, 11571, 2648, 1155, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 4185, 8240, 26900, 3506, 57802, 5337, 10642, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestServicePendantRec(t *testing.T) { convey.Convey("PendantRec", t, func() { err := s.PendantRec(context.Background(), nil) convey.So(err, convey.ShouldBeNil) }) }
explode_data.jsonl/21137
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 1860, 47, 20372, 3820, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 47, 20372, 3820, 497, 259, 11, 2915, 368, 341, 197, 9859, 1669, 274, 1069, 20372, 3820, 5378, 19047, 1507, 2092, 340, 197, 37203, 5617, 463...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRepoGC(t *testing.T) { clusters, mock := createClusters(t) defer shutdownClusters(t, clusters, mock) f := func(t *testing.T, c *Cluster) { gRepoGC, err := c.RepoGC(context.Background()) if err != nil { t.Fatal("gc should have worked:", err) } if gRepoGC.PeerMap == nil { t.Fatal("expected a non-nil peer map") } if len(gRepoGC.PeerMap) != nClusters { t.Errorf("expected repo gc information for %d peer", nClusters) } for _, repoGC := range gRepoGC.PeerMap { testRepoGC(t, repoGC) } } runF(t, clusters, f) }
explode_data.jsonl/66624
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 25243, 22863, 1155, 353, 8840, 836, 8, 341, 39407, 14605, 11, 7860, 1669, 1855, 94992, 1155, 340, 16867, 23766, 94992, 1155, 11, 26968, 11, 7860, 340, 1166, 1669, 2915, 1155, 353, 8840, 836, 11, 272, 353, 28678, 8, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestProjects(t *testing.T) { cliPath, err := e2e.Bin() if err != nil { t.Fatalf("unexpected error: %v", err) } n, err := e2e.RandInt(1000) if err != nil { t.Fatalf("unexpected error: %v", err) } projectName := fmt.Sprintf("e2e-proj-%v", n) var projectID string t.Run("Create", func(t *testing.T) { // This depends on a ORG_ID ENV cmd := exec.Command(cliPath, iamEntity, projectsEntity, "create", projectName, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() assert.NoError(t, err, string(resp)) var project mongodbatlas.Project if err = json.Unmarshal(resp, &project); err != nil { t.Fatalf("unexpected error: %v", err) } if project.Name != projectName { t.Errorf("got=%#v\nwant=%#v\n", project.Name, projectName) } projectID = project.ID }) t.Run("List", func(t *testing.T) { cmd := exec.Command(cliPath, iamEntity, projectsEntity, "ls", "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() assert.NoError(t, err, string(resp)) }) t.Run("Describe", func(t *testing.T) { cmd := exec.Command(cliPath, iamEntity, projectsEntity, "describe", projectID, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() assert.NoError(t, err, string(resp)) }) t.Run("Users", func(t *testing.T) { cmd := exec.Command(cliPath, iamEntity, projectsEntity, usersEntity, "ls", "--projectId", projectID, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() assert.NoError(t, err, string(resp)) }) t.Run("Delete", func(t *testing.T) { cmd := exec.Command(cliPath, iamEntity, projectsEntity, "delete", projectID, "--force") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() assert.NoError(t, err, string(resp)) expected := fmt.Sprintf("Project '%s' deleted\n", projectID) if string(resp) != expected { t.Errorf("got=%#v\nwant=%#v\n", string(resp), expected) } }) }
explode_data.jsonl/46058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 882 }
[ 2830, 3393, 29958, 1155, 353, 8840, 836, 8, 341, 86448, 1820, 11, 1848, 1669, 384, 17, 68, 1785, 258, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1465, 25, 1018, 85, 497, 1848, 340, 197, 630, 9038, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenerateChildSpan(t *testing.T) { random := rand.Reader traceID := generateTraceID(random) parentID := generateSpanID(random) spanInputs := &PICTSpanInputs{ Parent: SpanParentChild, Tracestate: TraceStateEmpty, Kind: SpanKindClient, Attributes: SpanAttrDatabaseSQL, Events: SpanChildCountEmpty, Links: SpanChildCountNil, Status: SpanStatusOk, } span := GenerateSpan(traceID, parentID, "get_test_info", spanInputs, random) assert.Equal(t, traceID, span.TraceId) assert.Equal(t, parentID, span.ParentSpanId) assert.Equal(t, 12, len(span.Attributes)) assert.Equal(t, otlptrace.Status_STATUS_CODE_OK, span.Status.Code) }
explode_data.jsonl/34132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 31115, 3652, 12485, 1155, 353, 8840, 836, 8, 341, 83628, 1669, 10382, 47431, 198, 65058, 915, 1669, 6923, 6550, 915, 25110, 340, 24804, 915, 1669, 6923, 12485, 915, 25110, 340, 197, 1480, 31946, 1669, 609, 1893, 1162, 12485, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrivateDataPutAndGet(t *testing.T) { sdk := mainSDK orgsContext := setupMultiOrgContext(t, sdk) err := integration.EnsureChannelCreatedAndPeersJoined(t, sdk, orgChannelID, "orgchannel.tx", orgsContext) require.NoError(t, err) coll1 := "collection1" ccID := integration.GenerateExamplePvtID(true) collConfig, err := newCollectionConfig(coll1, "OR('Org1MSP.member','Org2MSP.member')", 0, 2, 1000) require.NoError(t, err) err = integration.InstallExamplePvtChaincode(orgsContext, ccID) require.NoError(t, err) err = integration.InstantiateExamplePvtChaincode(orgsContext, orgChannelID, ccID, "OR('Org1MSP.member','Org2MSP.member')", collConfig) require.NoError(t, err) ctxProvider := sdk.ChannelContext(orgChannelID, fabsdk.WithUser(org1User), fabsdk.WithOrg(org1Name)) chClient, err := channel.New(ctxProvider) require.NoError(t, err) key1 := "key1" key2 := "key2" key3 := "key3" value1 := "pvtValue1" value2 := "pvtValue2" value3 := "pvtValue3" response, err := chClient.Query( channel.Request{ ChaincodeID: ccID, Fcn: "getprivate", Args: [][]byte{[]byte(coll1), []byte(key1)}, }, channel.WithRetry(retry.DefaultChannelOpts), ) require.NoError(t, err) t.Logf("Got response payload: [%s]", string(response.Payload)) require.Nil(t, response.Payload) response, err = chClient.Query( channel.Request{ ChaincodeID: ccID, Fcn: "getprivatebyrange", Args: [][]byte{[]byte(coll1), []byte(key1), []byte(key3)}, }, channel.WithRetry(retry.DefaultChannelOpts), ) require.NoError(t, err) t.Logf("Got response payload: [%s]", string(response.Payload)) require.Empty(t, string(response.Payload)) response, err = chClient.Execute( channel.Request{ ChaincodeID: ccID, Fcn: "putprivate", Args: [][]byte{[]byte(coll1), []byte(key1), []byte(value1)}, }, channel.WithRetry(retry.DefaultChannelOpts), ) require.NoError(t, err) require.NotEmptyf(t, response.Responses, "expecting at least one response") response, err = chClient.Execute( channel.Request{ ChaincodeID: ccID, Fcn: "putprivate", Args: [][]byte{[]byte(coll1), []byte(key2), []byte(value2)}, }, channel.WithRetry(retry.DefaultChannelOpts), ) require.NoError(t, err) require.NotEmptyf(t, response.Responses, "expecting at least one response") response, err = chClient.Execute( channel.Request{ ChaincodeID: ccID, Fcn: "putprivate", Args: [][]byte{[]byte(coll1), []byte(key3), []byte(value3)}, }, channel.WithRetry(retry.TestRetryOpts), ) require.NoError(t, err) require.NotEmptyf(t, response.Responses, "expecting at least one response") response, err = chClient.Query( channel.Request{ ChaincodeID: ccID, Fcn: "getprivate", Args: [][]byte{[]byte(coll1), []byte(key1)}, }, channel.WithRetry(retry.TestRetryOpts), ) require.NoError(t, err) t.Logf("Got response payload: %s", string(response.Payload)) require.Equal(t, value1, string(response.Payload)) response, err = chClient.Query( channel.Request{ ChaincodeID: ccID, Fcn: "getprivatebyrange", Args: [][]byte{[]byte(coll1), []byte(key1), []byte(key3)}, }, channel.WithRetry(retry.DefaultChannelOpts), ) require.NoError(t, err) t.Logf("Got response payload: [%s]", string(response.Payload)) require.NotEmpty(t, string(response.Payload)) }
explode_data.jsonl/5045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1433 }
[ 2830, 3393, 16787, 1043, 19103, 97726, 1155, 353, 8840, 836, 8, 341, 1903, 7584, 1669, 1887, 31534, 271, 87625, 82, 1972, 1669, 6505, 20358, 42437, 1972, 1155, 11, 45402, 340, 9859, 1669, 17590, 22834, 19098, 9629, 11694, 3036, 10197, 388...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSummaryDataPointSlice_RemoveIf(t *testing.T) { // Test RemoveIf on empty slice emptySlice := NewSummaryDataPointSlice() emptySlice.RemoveIf(func(el SummaryDataPoint) bool { t.Fail() return false }) // Test RemoveIf filtered := generateTestSummaryDataPointSlice() pos := 0 filtered.RemoveIf(func(el SummaryDataPoint) bool { pos++ return pos%3 == 0 }) assert.Equal(t, 5, filtered.Len()) }
explode_data.jsonl/32746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 19237, 1043, 2609, 33236, 66843, 2679, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 10783, 2679, 389, 4287, 15983, 198, 197, 3194, 33236, 1669, 1532, 19237, 1043, 2609, 33236, 741, 197, 3194, 33236, 13270, 2679, 18552, 18584,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseKeyValue(t *testing.T) { k, v := parseKeyValue("foo=bar") if k != "foo" { t.Errorf("Expected %s, got %s", "foo", k) } if v != "bar" { t.Errorf("Expected %s, got %s", "bar", v) } k2, v2 := parseKeyValue("baz") if k2 != "" { t.Errorf("Expected %s, got %s", "", k2) } if v2 != "baz" { t.Errorf("Expected %s, got %s", "baz", v2) } }
explode_data.jsonl/14387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 14463, 72082, 1155, 353, 8840, 836, 8, 341, 16463, 11, 348, 1669, 4715, 72082, 445, 7975, 28, 2257, 1138, 743, 595, 961, 330, 7975, 1, 341, 197, 3244, 13080, 445, 18896, 1018, 82, 11, 2684, 1018, 82, 497, 330, 7975, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAdmin_Block(t *testing.T) { srv, ts := prep(t) assert.NotNil(t, srv) defer cleanup(ts) c1 := store.Comment{Text: "test test #1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, User: store.User{Name: "user1 name", ID: "user1"}} c2 := store.Comment{Text: "test test #2", ParentID: "p1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}, User: store.User{Name: "user2", ID: "user2"}} _, err := srv.DataService.Create(c1) assert.Nil(t, err) _, err = srv.DataService.Create(c2) assert.Nil(t, err) block := func(val int) (code int, body []byte) { client := http.Client{} req, e := http.NewRequest(http.MethodPut, fmt.Sprintf("%s/api/v1/admin/user/%s?site=radio-t&block=%d", ts.URL, "user1", val), nil) assert.Nil(t, e) req.SetBasicAuth("dev", "password") resp, e := client.Do(req) require.Nil(t, e) body, e = ioutil.ReadAll(resp.Body) assert.Nil(t, e) resp.Body.Close() return resp.StatusCode, body } code, body := block(1) require.Equal(t, 200, code) j := JSON{} err = json.Unmarshal(body, &j) assert.Nil(t, err) assert.Equal(t, "user1", j["user_id"]) assert.Equal(t, true, j["block"]) assert.Equal(t, "radio-t", j["site_id"]) res, code := get(t, ts.URL+"/api/v1/find?site=radio-t&url=https://radio-t.com/blah&sort=+time") assert.Equal(t, 200, code) comments := commentsWithInfo{} err = json.Unmarshal([]byte(res), &comments) assert.Nil(t, err) assert.Equal(t, 2, len(comments.Comments), "should have 2 comments") assert.Equal(t, "", comments.Comments[0].Text) assert.True(t, comments.Comments[0].Deleted) code, body = block(-1) require.Equal(t, 200, code) err = json.Unmarshal(body, &j) assert.Nil(t, err) assert.Equal(t, false, j["block"]) }
explode_data.jsonl/70319
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 783 }
[ 2830, 3393, 7210, 51779, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 11, 10591, 1669, 21327, 1155, 340, 6948, 93882, 1155, 11, 43578, 340, 16867, 21290, 35864, 692, 1444, 16, 1669, 3553, 56730, 90, 1178, 25, 330, 1944, 1273, 671, 16, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWindowLogicalPlanAmbiguous(t *testing.T) { sql := "select a, max(a) over(), sum(a) over() from t" var planString string // The ambiguous logical plan which contains window function can usually be found in 100 iterations. iterations := 100 s := createPlannerSuite() for i := 0; i < iterations; i++ { stmt, err := s.p.ParseOneStmt(sql, "", "") require.NoError(t, err) p, _, err := BuildLogicalPlanForTest(context.Background(), s.ctx, stmt, s.is) require.NoError(t, err) if planString == "" { planString = ToString(p) } else { require.Equal(t, ToString(p), planString) } } }
explode_data.jsonl/50239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 4267, 64312, 20485, 54032, 27029, 1155, 353, 8840, 836, 8, 341, 30633, 1669, 330, 1742, 264, 11, 1932, 2877, 8, 916, 1507, 2629, 2877, 8, 916, 368, 504, 259, 698, 2405, 3119, 703, 914, 198, 197, 322, 576, 54761, 19819, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddAttachedChildTopic(t *testing.T) { rootTopic := &XMLTopic{ Title: "测试用例", } topic1 := rootTopic for _, dir := range strutil.Split("/d1", "/", true) { topic1 = topic1.AddAttachedChildTopic(dir, true) } topic2 := rootTopic for _, dir := range strutil.Split("/d1/d4", "/", true) { topic2 = topic2.AddAttachedChildTopic(dir, true) } topic3 := topic2 for _, dir := range strutil.Split("/d5/d6", "/", true) { topic3 = topic3.AddAttachedChildTopic(dir, true) } b, err := yaml.Marshal(rootTopic) assert.NoError(t, err) fmt.Println(string(b)) }
explode_data.jsonl/30059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 2212, 65987, 3652, 26406, 1155, 353, 8840, 836, 8, 341, 33698, 26406, 1669, 609, 10609, 26406, 515, 197, 92233, 25, 330, 81705, 11622, 26355, 756, 197, 630, 3244, 24810, 16, 1669, 3704, 26406, 198, 2023, 8358, 5419, 1669, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetBackendPoolName(t *testing.T) { testcases := []struct { name string service v1.Service clusterName string expectedPoolName string }{ { name: "GetBackendPoolName should return <clusterName>-IPv6", service: getTestService("test1", v1.ProtocolTCP, nil, true, 80), clusterName: "azure", expectedPoolName: "azure-IPv6", }, { name: "GetBackendPoolName should return <clusterName>", service: getTestService("test1", v1.ProtocolTCP, nil, false, 80), clusterName: "azure", expectedPoolName: "azure", }, } for _, test := range testcases { backPoolName := getBackendPoolName(test.clusterName, &test.service) assert.Equal(t, test.expectedPoolName, backPoolName, test.name) } }
explode_data.jsonl/7464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 1949, 29699, 10551, 675, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 52934, 688, 348, 16, 13860, 198, 197, 197, 18855, 675, 414, 914, 198, 197, 42400, 10551, 675, 914...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInitConfigFromRawWrongType(t *testing.T) { // get a config byte for testing configPath := filepath.Join(getConfigPath(), configPemTestFile) cBytes, err := loadConfigBytesFromFile(t, configPath) if err != nil { t.Fatalf("Failed to load sample bytes from File. Error: %s", err) } // test init config with empty type _, err = config.FromRaw(cBytes, "")() if err == nil { t.Fatal("Expected error when initializing config with wrong config type but got no error.") } // test init config with wrong type _, err = config.FromRaw(cBytes, "json")() if err == nil { t.Fatal("FromRaw didn't fail when config type is wrong") } }
explode_data.jsonl/34090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 3803, 2648, 3830, 20015, 29185, 929, 1155, 353, 8840, 836, 8, 341, 197, 322, 633, 264, 2193, 4922, 369, 7497, 198, 25873, 1820, 1669, 26054, 22363, 5433, 2648, 1820, 1507, 2193, 47, 336, 2271, 1703, 340, 1444, 7078, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeviceFilter_Privileged(t *testing.T) { devices := []*devices.Rule{ { Type: 'a', Major: -1, Minor: -1, Permissions: "rwm", Allow: true, }, } expected := ` // load parameters into registers 0: LdXMemW dst: r2 src: r1 off: 0 imm: 0 1: And32Imm dst: r2 imm: 65535 2: LdXMemW dst: r3 src: r1 off: 0 imm: 0 3: RSh32Imm dst: r3 imm: 16 4: LdXMemW dst: r4 src: r1 off: 4 imm: 0 5: LdXMemW dst: r5 src: r1 off: 8 imm: 0 block-0: // return 1 (accept) 6: Mov32Imm dst: r0 imm: 1 7: Exit ` testDeviceFilter(t, devices, expected) }
explode_data.jsonl/2943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 6985, 5632, 32716, 344, 68431, 1155, 353, 8840, 836, 8, 341, 27302, 1216, 1669, 29838, 46966, 63961, 515, 197, 197, 515, 298, 27725, 25, 286, 364, 64, 751, 298, 9209, 3035, 25, 981, 481, 16, 345, 298, 197, 57024, 25, 981...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test(t *testing.T) { mt.ParallelTest(t, versions, isReady, func(t *testing.T, i mt.Instance) { p := &Postgres{} addr := pgConnectionString(i.Host(), i.Port()) d, err := p.Open(addr) if err != nil { t.Fatalf("%v", err) } defer d.Close() dt.Test(t, d, []byte("SELECT 1")) }) }
explode_data.jsonl/70105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 1155, 353, 8840, 836, 8, 341, 2109, 83, 41288, 7957, 2271, 1155, 11, 10795, 11, 374, 19202, 345, 197, 29244, 1155, 353, 8840, 836, 11, 600, 11965, 12688, 8, 341, 298, 3223, 1669, 609, 4133, 17818, 16094, 298, 53183, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnexpectedArgValue_GotFormatter(t *testing.T) { reporter, ctrl := createFixtures(t) defer reporter.recoverUnexpectedFatal() subject := new(Subject) expectedArg0 := TestStruct{Number: 123, Message: "hello"} ctrl.RecordCall( subject, "ActOnTestStructMethod", expectedArg0, gomock.GotFormatterAdapter( gomock.GotFormatterFunc(func(i interface{}) string { // Leading 0s return fmt.Sprintf("%02d", i) }), gomock.Eq(15), ), ) reporter.assertFatal(func() { ctrl.Call(subject, "ActOnTestStructMethod", TestStruct{Number: 123, Message: "hello"}, 3) }, "Unexpected call to", "doesn't match the argument at index 1", "Got: 03\nWant: is equal to 15") reporter.assertFatal(func() { // The expected call wasn't made. ctrl.Finish() }) }
explode_data.jsonl/17277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 29430, 2735, 1130, 2646, 354, 14183, 1155, 353, 8840, 836, 8, 341, 69931, 261, 11, 23743, 1669, 1855, 25958, 18513, 1155, 340, 16867, 18960, 1327, 3688, 29430, 62396, 741, 28624, 583, 1669, 501, 7, 13019, 692, 42400, 2735, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUsername(t *testing.T) { for _, testCase := range []struct{ URL, Expected string }{ {"user:pass@google.com", "user"}, {"https://user:pass@google.com", "user"}, {"https://user@google.com", "user"}, {"https://google.com", ""}, {"google.com", ""}, } { if result := Username(testCase.URL); result != testCase.Expected { t.Errorf(`Url (%q) returned %q for Username(), but %q was expected`, testCase.URL, result, testCase.Expected) } } }
explode_data.jsonl/30836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 11115, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 54452, 1669, 2088, 3056, 1235, 90, 5548, 11, 31021, 914, 335, 515, 197, 197, 4913, 872, 25, 6385, 9781, 2671, 905, 497, 330, 872, 7115, 197, 197, 4913, 2428, 1110, 872, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConfigureQuarkusTraitBuildSubmitted(t *testing.T) { quarkusTrait, environment := createNominalQuarkusTest() environment.IntegrationKit.Status.Phase = v1.IntegrationKitPhaseBuildSubmitted configured, err := quarkusTrait.Configure(environment) assert.True(t, configured) assert.Nil(t, err) err = quarkusTrait.Apply(environment) assert.Nil(t, err) build := getBuilderTask(environment.BuildTasks) assert.NotNil(t, t, build) assert.Len(t, build.Steps, len(builder.Quarkus.CommonSteps)+3) }
explode_data.jsonl/28417
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 28560, 2183, 838, 355, 49257, 11066, 46541, 1155, 353, 8840, 836, 8, 341, 197, 446, 838, 355, 49257, 11, 4573, 1669, 1855, 36312, 977, 2183, 838, 355, 2271, 741, 197, 23294, 7371, 17376, 7695, 10538, 35989, 519, 284, 348, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewBinaryMessage(t *testing.T) { b := []byte("TEST") message := wspubsub.NewBinaryMessage(b) require.Equal(t, wspubsub.MessageTypeBinary, message.Type) require.Equal(t, b, message.Payload) s := "TEST" message = wspubsub.NewBinaryMessageFromString(s) require.Equal(t, wspubsub.MessageTypeBinary, message.Type) require.Equal(t, []byte(s), message.Payload) }
explode_data.jsonl/37742
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 3564, 21338, 2052, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 3056, 3782, 445, 10033, 1138, 24753, 1669, 62507, 392, 1966, 7121, 21338, 2052, 1883, 340, 17957, 12808, 1155, 11, 62507, 392, 1966, 8472, 929, 21338, 11, 1943, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJsonDecodeNonStringScalarInStringContext(t *testing.T) { testOnce.Do(testInitAll) var b = `{"s.true": "true", "b.true": true, "s.false": "false", "b.false": false, "s.10": "10", "i.10": 10, "i.-10": -10}` var golden = map[string]string{"s.true": "true", "b.true": "true", "s.false": "false", "b.false": "false", "s.10": "10", "i.10": "10", "i.-10": "-10"} var m map[string]string d := NewDecoderBytes([]byte(b), testJsonH) d.MustDecode(&m) if err := deepEqual(golden, m); err == nil { logT(t, "++++ match: decoded: %#v", m) } else { logT(t, "---- mismatch: %v ==> golden: %#v, decoded: %#v", err, golden, m) failT(t) } }
explode_data.jsonl/21001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 5014, 32564, 8121, 703, 20639, 641, 703, 1972, 1155, 353, 8840, 836, 8, 341, 18185, 12522, 33596, 8623, 3803, 2403, 340, 2405, 293, 284, 1565, 4913, 82, 54138, 788, 330, 1866, 497, 330, 65, 54138, 788, 830, 11, 330, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigTxFlags(t *testing.T) { configTxDest := filepath.Join(tmpDir, "configtx") configTxDestAnchorPeers := filepath.Join(tmpDir, "configtxAnchorPeers") oldArgs := os.Args defer func() { os.Args = oldArgs flag.CommandLine = flag.NewFlagSet(os.Args[0], flag.ExitOnError) }() cleanup := configtest.SetDevFabricConfigPath(t) defer cleanup() devConfigDir, err := configtest.GetDevConfigDir() assert.NoError(t, err, "failed to get dev config dir") os.Args = []string{ "cmd", "-outputCreateChannelTx=" + configTxDest, "-profile=" + genesisconfig.SampleSingleMSPChannelProfile, "-configPath=" + devConfigDir, "-inspectChannelCreateTx=" + configTxDest, "-outputAnchorPeersUpdate=" + configTxDestAnchorPeers, "-asOrg=" + genesisconfig.SampleOrgName, } main() _, err = os.Stat(configTxDest) assert.NoError(t, err, "Configtx file is written successfully") _, err = os.Stat(configTxDestAnchorPeers) assert.NoError(t, err, "Configtx anchor peers file is written successfully") }
explode_data.jsonl/14593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 2648, 31584, 9195, 1155, 353, 8840, 836, 8, 341, 25873, 51, 15764, 477, 1669, 26054, 22363, 10368, 6184, 11, 330, 1676, 3998, 1138, 25873, 51, 15764, 477, 14677, 10197, 388, 1669, 26054, 22363, 10368, 6184, 11, 330, 1676, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseLocation(t *testing.T) { tests := []struct { name string value string want time.Time }{ { "empty value", "", time.Time{}}, { "2012-11-22 21:28:10", "2012-11-22 21:28:10", time.Date(2012, 11, 22, 21, 28, 10, 0, time.Local), }, { "2012-11-22", "2012-11-22", time.Date(2012, 11, 22, 0, 0, 0, 0, time.Local), }, { "2012-11-22 21:28:10", "2012-11-22 21:28:10", time.Date(2012, 11, 22, 21, 28, 10, 0, time.Local), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := ParseLocation(tt.value); !got.Equal(tt.want) { t.Errorf("Parse() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/45313
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 14463, 4707, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 16309, 914, 198, 197, 50780, 220, 882, 16299, 198, 197, 59403, 197, 197, 515, 298, 197, 1, 3194, 897, 756, 298, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEspWiFi_MacAddress(t *testing.T) { tests := []struct { name string file string want string }{ {name: "esp-mega-20190301", file: "esp-mega-20190301.json", want: "12:34:56:78:90:AB"}, {name: "esp-mega-20190903", file: "esp-mega-20190903.json", want: "12:34:56:78:90:AB"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ed, err := testReadEspData(tt.file) require.NoError(t, err) got := ed.WiFi.MacAddress() require.Equal(t, tt.want, got) }) } }
explode_data.jsonl/43006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 76954, 95551, 1245, 580, 4286, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 17661, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 47006, 25, 330, 24610, 1448, 11188, 12, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCodeResult_String(t *testing.T) { v := CodeResult{ Name: String(""), Path: String(""), SHA: String(""), HTMLURL: String(""), Repository: &Repository{}, } want := `github.CodeResult{Name:"", Path:"", SHA:"", HTMLURL:"", Repository:github.Repository{}}` if got := v.String(); got != want { t.Errorf("CodeResult.String = %v, want %v", got, want) } }
explode_data.jsonl/33223
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 2078, 2077, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 6119, 2077, 515, 197, 21297, 25, 981, 923, 445, 4461, 197, 69640, 25, 981, 923, 445, 4461, 197, 7568, 17020, 25, 286, 923, 445, 4461, 197, 197, 5835, 3144, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBodyer(t *testing.T) { //t.SkipNow() for i, tt := range bodyerTests { if got, want := tt.bodyer.Type(), tt.wantType; got != want { t.Errorf("#%d: type got %q, want %q", i, got, want) } r, err := tt.bodyer.Body() if err != tt.err { t.Errorf("#%d: err got %v, want %v", i, err, tt.err) } b, err := ioutil.ReadAll(r) if err != nil { t.Errorf("#%d: ioutil.ReadAll got err %v, want <nil>", i, err) } if got, want := string(b), tt.wantBody; got != want { t.Errorf("#%d: body got %q, want %q", i, got, want) } } }
explode_data.jsonl/777
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 5444, 261, 1155, 353, 8840, 836, 8, 341, 197, 322, 83, 57776, 7039, 741, 2023, 600, 11, 17853, 1669, 2088, 2487, 261, 18200, 341, 197, 743, 2684, 11, 1366, 1669, 17853, 5079, 261, 10184, 1507, 17853, 70212, 929, 26, 2684, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_Properties_ValidateNetworkPolicy(t *testing.T) { p := &Properties{} p.OrchestratorProfile = &OrchestratorProfile{} p.OrchestratorProfile.OrchestratorType = Kubernetes k8sVersion := "1.8.0" for _, policy := range NetworkPolicyValues { p.OrchestratorProfile.KubernetesConfig = &KubernetesConfig{} p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = policy if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, false); err != nil { t.Errorf( "should not error on networkPolicy=\"%s\" on k8sVersion=\"%s\"", policy, k8sVersion, ) } } p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = "not-existing" if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, false); err == nil { t.Errorf( "should error on invalid networkPolicy", ) } k8sVersion = "1.7.9" p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = "azure" p.OrchestratorProfile.KubernetesConfig.NetworkPlugin = "azure" if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, false); err == nil { t.Errorf( "should error on azure networkPolicy + azure networkPlugin with k8s version < 1.8.0", ) } p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = "calico" if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, true); err == nil { t.Errorf( "should error on calico for windows clusters", ) } p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = NetworkPolicyCilium if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, true); err == nil { t.Errorf( "should error on cilium for windows clusters", ) } p.OrchestratorProfile.KubernetesConfig.NetworkPolicy = "flannel" if err := p.OrchestratorProfile.KubernetesConfig.validateNetworkPolicy(k8sVersion, true); err == nil { t.Errorf( "should error on flannel for windows clusters", ) } }
explode_data.jsonl/17864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 727 }
[ 2830, 3393, 1088, 9249, 62, 17926, 12320, 13825, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 609, 7903, 16094, 3223, 90449, 331, 15111, 850, 8526, 284, 609, 2195, 331, 15111, 850, 8526, 16094, 3223, 90449, 331, 15111, 850, 8526, 90449, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestClient_DeleteTLSActivation_validation(t *testing.T) { t.Parallel() var err error record(t, "custom_tls_activation/delete", func(c *Client) { err = c.DeleteTLSActivation(&DeleteTLSActivationInput{ ID: "ACTIVATION_ID", }) }) if err != nil { t.Fatal(err) } err = testClient.DeleteTLSActivation(&DeleteTLSActivationInput{}) if err != ErrMissingID { t.Errorf("bad error: %s", err) } }
explode_data.jsonl/2981
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 2959, 57418, 45439, 61460, 19416, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 1848, 1465, 198, 71952, 1155, 11, 330, 9163, 71262, 52404, 32275, 497, 2915, 1337, 353, 2959, 8, 341, 197, 9859, 284, 272, 18872,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnscopedVariableNames(t *testing.T) { workflow, _ := fixture(t, "valid/no-interpolation.workflow") assert.Equal(t, []string{"${value}"}, workflow.Actions[0].Runs.Split()) }
explode_data.jsonl/55665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 1806, 78740, 7827, 7980, 1155, 353, 8840, 836, 8, 341, 197, 56249, 11, 716, 1669, 12507, 1155, 11, 330, 1891, 33100, 44894, 44686, 72774, 1138, 6948, 12808, 1155, 11, 3056, 917, 4913, 2365, 957, 9863, 2137, 28288, 72044, 58,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestExpandUsernamesToEmails(t *testing.T) { resetMocks() database.Mocks.Users.GetByUsername = func(ctx context.Context, username string) (*types.User, error) { if want := "alice"; username != want { t.Errorf("got %q, want %q", username, want) } return &types.User{ID: 123}, nil } database.Mocks.UserEmails.ListByUser = func(_ context.Context, opt database.UserEmailsListOptions) ([]*database.UserEmail, error) { if want := int32(123); opt.UserID != want { t.Errorf("got %v, want %v", opt.UserID, want) } t := time.Now() return []*database.UserEmail{ {Email: "alice@example.com", VerifiedAt: &t}, {Email: "alice@example.org", VerifiedAt: &t}, }, nil } x, err := expandUsernamesToEmails(context.Background(), []string{"foo", "@alice"}) if err != nil { t.Fatal(err) } if want := []string{"foo", `alice@example\.com`, `alice@example\.org`}; !reflect.DeepEqual(x, want) { t.Errorf("got %q, want %q", x, want) } }
explode_data.jsonl/37797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 38946, 1474, 11400, 1249, 4781, 82, 1155, 353, 8840, 836, 8, 341, 70343, 72577, 741, 2698, 2211, 24664, 82, 36782, 2234, 91519, 284, 2915, 7502, 2266, 9328, 11, 5934, 914, 8, 4609, 9242, 7344, 11, 1465, 8, 341, 197, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTrackUndelegationPeriodicVestingAcc(t *testing.T) { now := osttime.Now() endTime := now.Add(24 * time.Hour) periods := types.Periods{ types.Period{Length: int64(12 * 60 * 60), Amount: sdk.Coins{sdk.NewInt64Coin(feeDenom, 500), sdk.NewInt64Coin(stakeDenom, 50)}}, types.Period{Length: int64(6 * 60 * 60), Amount: sdk.Coins{sdk.NewInt64Coin(feeDenom, 250), sdk.NewInt64Coin(stakeDenom, 25)}}, types.Period{Length: int64(6 * 60 * 60), Amount: sdk.Coins{sdk.NewInt64Coin(feeDenom, 250), sdk.NewInt64Coin(stakeDenom, 25)}}, } _, _, addr := testdata.KeyTestPubAddr() origCoins := sdk.Coins{sdk.NewInt64Coin(feeDenom, 1000), sdk.NewInt64Coin(stakeDenom, 100)} bacc := authtypes.NewBaseAccountWithAddress(addr) // require the ability to undelegate all vesting coins at the beginning of vesting pva := types.NewPeriodicVestingAccount(bacc, origCoins, now.Unix(), periods) pva.TrackDelegation(now, origCoins, origCoins) pva.TrackUndelegation(origCoins) require.Nil(t, pva.DelegatedFree) require.Nil(t, pva.DelegatedVesting) // require the ability to undelegate all vested coins at the end of vesting pva = types.NewPeriodicVestingAccount(bacc, origCoins, now.Unix(), periods) pva.TrackDelegation(endTime, origCoins, origCoins) pva.TrackUndelegation(origCoins) require.Nil(t, pva.DelegatedFree) require.Nil(t, pva.DelegatedVesting) // require the ability to undelegate half of coins pva = types.NewPeriodicVestingAccount(bacc, origCoins, now.Unix(), periods) pva.TrackDelegation(endTime, origCoins, periods[0].Amount) pva.TrackUndelegation(periods[0].Amount) require.Nil(t, pva.DelegatedFree) require.Nil(t, pva.DelegatedVesting) // require no modifications when the undelegation amount is zero pva = types.NewPeriodicVestingAccount(bacc, origCoins, now.Unix(), periods) require.Panics(t, func() { pva.TrackUndelegation(sdk.Coins{sdk.NewInt64Coin(stakeDenom, 0)}) }) require.Nil(t, pva.DelegatedFree) require.Nil(t, pva.DelegatedVesting) // vest 50% and delegate to two validators pva = types.NewPeriodicVestingAccount(bacc, origCoins, now.Unix(), periods) pva.TrackDelegation(now.Add(12*time.Hour), origCoins, sdk.Coins{sdk.NewInt64Coin(stakeDenom, 50)}) pva.TrackDelegation(now.Add(12*time.Hour), origCoins, sdk.Coins{sdk.NewInt64Coin(stakeDenom, 50)}) // undelegate from one validator that got slashed 50% pva.TrackUndelegation(sdk.Coins{sdk.NewInt64Coin(stakeDenom, 25)}) require.Equal(t, sdk.Coins{sdk.NewInt64Coin(stakeDenom, 25)}, pva.DelegatedFree) require.Equal(t, sdk.Coins{sdk.NewInt64Coin(stakeDenom, 50)}, pva.DelegatedVesting) // undelegate from the other validator that did not get slashed pva.TrackUndelegation(sdk.Coins{sdk.NewInt64Coin(stakeDenom, 50)}) require.Nil(t, pva.DelegatedFree) require.Equal(t, sdk.Coins{sdk.NewInt64Coin(stakeDenom, 25)}, pva.DelegatedVesting) }
explode_data.jsonl/31295
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1145 }
[ 2830, 3393, 15667, 19957, 68, 87566, 23750, 292, 53, 59855, 14603, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 36896, 1678, 13244, 741, 6246, 1462, 1669, 1431, 1904, 7, 17, 19, 353, 882, 73550, 340, 197, 19304, 82, 1669, 4494, 14834, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDriver_checkDiskExists_V1(t *testing.T) { d, _ := NewFakeDriver(t) d.setDiskThrottlingCache(consts.ThrottlingKey, "") _, err := d.checkDiskExists(context.TODO(), "testurl/subscriptions/12/resourceGroups/23/providers/Microsoft.Compute/disks/name") assert.Equal(t, err, nil) }
explode_data.jsonl/16678
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 11349, 7200, 47583, 15575, 2334, 16, 1155, 353, 8840, 836, 8, 341, 2698, 11, 716, 1669, 1532, 52317, 11349, 1155, 340, 2698, 980, 47583, 1001, 46689, 2718, 8233, 2741, 82, 5111, 46689, 2718, 1592, 11, 14676, 197, 6878, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComplaints(t *testing.T) { db := openTestDb() store := From(db) now := time.Now().UTC() // Test get empty complaints totalComplaints, err := store.GetTotalComplaints(1, 1) assert.Nil(t, err) assert.Equal(t, int64(0), totalComplaints) complaints := []entities.Complaint{ { ID: 1, UserID: 1, CampaignID: 1, Recipient: "jhon@doe.com", UserAgent: "android", Type: "bla", FeedbackID: "bla", CreatedAt: now, }, { ID: 2, UserID: 1, CampaignID: 1, Recipient: "jhon@email.com", UserAgent: "windows", Type: "bla", FeedbackID: "bla", CreatedAt: now, }, } // test insert opens for i := range complaints { err = store.CreateComplaint(&complaints[i]) assert.Nil(t, err) } // test get total complaints totalComplaints, err = store.GetTotalComplaints(1, 1) assert.Nil(t, err) assert.Equal(t, int64(2), totalComplaints) }
explode_data.jsonl/49926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 1092, 31297, 82, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 1787, 2271, 7994, 2822, 57279, 1669, 5542, 9791, 340, 80922, 1669, 882, 13244, 1005, 21183, 2822, 197, 322, 3393, 633, 4287, 21171, 198, 34493, 1092, 31297, 82, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStateEnterProposeYesPrivValidator(t *testing.T) { config := configSetup(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() cs, _ := makeState(ctx, t, makeStateArgs{config: config, validators: 1}) height, round := cs.Height, cs.Round // Listen for propose timeout event timeoutCh := subscribe(ctx, t, cs.eventBus, types.EventQueryTimeoutPropose) proposalCh := subscribe(ctx, t, cs.eventBus, types.EventQueryCompleteProposal) cs.enterNewRound(ctx, height, round) cs.startRoutines(ctx, 3) ensureNewProposal(t, proposalCh, height, round) // Check that Proposal, ProposalBlock, ProposalBlockParts are set. rs := cs.GetRoundState() if rs.Proposal == nil { t.Error("rs.Proposal should be set") } if rs.ProposalBlock == nil { t.Error("rs.ProposalBlock should be set") } if rs.ProposalBlockParts.Total() == 0 { t.Error("rs.ProposalBlockParts should be set") } // if we're a validator, enterPropose should not timeout ensureNoNewTimeout(t, timeoutCh, cs.state.ConsensusParams.Timeout.ProposeTimeout(round).Nanoseconds()) }
explode_data.jsonl/54258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 1397, 6269, 2008, 960, 9454, 32124, 14256, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2193, 21821, 1155, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 71899, 11, 716, 1669, 1281, 1397, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAppend(t *testing.T) { s := &testSet{ count: 2, } a := &testAppendable{samples: map[uint64][]sample{}} testutil.Ok(t, Append(context.Background(), 2*runtime.GOMAXPROCS(0), a, s)) testutil.Equals(t, map[uint64][]sample{ 0x6577cd4df75e4415: { {T: 10000, V: 140.13863149001767}, {T: 20000, V: 106.88960028354377}, {T: 30000, V: 134.20855473136945}, {T: 40000, V: 156.66629546848895}, {T: 50000, V: 157.9608877899447}, }, 0xc552620224fd8b78: { {T: 10000, V: 106.42558121988247}, {T: 20000, V: 175.7484565559158}, {T: 30000, V: 135.06032974565488}, {T: 40000, V: 194.61995987962968}, {T: 50000, V: 163.6088665433866}, }, }, a.samples) }
explode_data.jsonl/12055
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 23877, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 1944, 1649, 515, 197, 18032, 25, 220, 17, 345, 197, 630, 11323, 1669, 609, 1944, 23877, 480, 90, 41118, 25, 2415, 58, 2496, 21, 19, 45725, 13611, 6257, 532, 18185, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetCellStyleCustomNumberFormat(t *testing.T) { f := NewFile() f.SetCellValue("Sheet1", "A1", 42920.5) f.SetCellValue("Sheet1", "A2", 42920.5) style, err := f.NewStyle(`{"custom_number_format": "[$-380A]dddd\\,\\ dd\" de \"mmmm\" de \"yyyy;@"}`) if err != nil { t.Log(err) } assert.NoError(t, f.SetCellStyle("Sheet1", "A1", "A1", style)) style, err = f.NewStyle(`{"custom_number_format": "[$-380A]dddd\\,\\ dd\" de \"mmmm\" de \"yyyy;@"}`) if err != nil { t.Log(err) } assert.NoError(t, f.SetCellStyle("Sheet1", "A2", "A2", style)) assert.NoError(t, f.SaveAs(filepath.Join("test", "TestSetCellStyleCustomNumberFormat.xlsx"))) }
explode_data.jsonl/36973
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 1649, 15171, 10268, 2833, 4061, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 1703, 741, 1166, 4202, 23885, 445, 10541, 16, 497, 330, 32, 16, 497, 220, 19, 17, 24, 17, 15, 13, 20, 340, 1166, 4202, 23885, 445, 10541, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSaveMeta(t *testing.T) { got, err := saveDoc(&FieldListWithMeta{ Meta: searchMeta, Fields: searchFields, }) if err != nil { t.Fatalf("saveDoc: %v", err) } want := &pb.Document{ Field: protoFields, OrderId: proto.Int32(42), } if !proto.Equal(got, want) { t.Errorf("\ngot %v\nwant %v", got, want) } }
explode_data.jsonl/27952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 8784, 12175, 1155, 353, 8840, 836, 8, 341, 3174, 354, 11, 1848, 1669, 3581, 9550, 2099, 1877, 852, 2354, 12175, 515, 197, 9209, 1915, 25, 256, 2711, 12175, 345, 197, 197, 8941, 25, 2711, 8941, 345, 197, 3518, 743, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSugarConcatenatingLogging(t *testing.T) { tests := []struct { args []interface{} expect string }{ {[]interface{}{nil}, "<nil>"}, } // Common to all test cases. context := []interface{}{"foo", "bar"} expectedFields := []Field{String("foo", "bar")} for _, tt := range tests { withSugar(t, TraceLevel, nil, func(logger *SugaredLogger, logs *observer.ObservedLogs) { logger.With(context...).Trace(tt.args) logger.With(context...).Debug(tt.args...) logger.With(context...).Info(tt.args...) logger.With(context...).Warn(tt.args...) logger.With(context...).Error(tt.args...) logger.With(context...).DPanic(tt.args...) expected := make([]observer.LoggedEntry, 6) for i, lvl := range []zapcore.Level{TraceLevel, DebugLevel, InfoLevel, WarnLevel, ErrorLevel, DPanicLevel} { expected[i] = observer.LoggedEntry{ Entry: zapcore.Entry{Message: tt.expect, Level: lvl}, Context: expectedFields, } } assert.Equal(t, expected, logs.AllUntimed(), "Unexpected log output.") }) } }
explode_data.jsonl/5024
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 83414, 78440, 268, 1095, 34575, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 31215, 256, 3056, 4970, 16094, 197, 24952, 914, 198, 197, 59403, 197, 197, 90, 1294, 4970, 6257, 90, 8385, 2137, 4055, 8385, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVaribaleRefDifferentPrefix(t *testing.T) { th := kusttest_test.NewKustTestHarness(t, "/app/base") th.WriteK("/app/base", ` namePrefix: base- resources: - dev - test `) th.WriteK("/app/base/dev", ` namePrefix: dev- resources: - elasticsearch-dev-service.yml vars: - name: elasticsearch-dev-service-name objref: kind: Service name: elasticsearch apiVersion: v1 fieldref: fieldpath: metadata.name `) th.WriteF("/app/base/dev/elasticsearch-dev-service.yml", ` apiVersion: apps/v1 kind: StatefulSet metadata: name: elasticsearch spec: template: spec: containers: - name: elasticsearch env: - name: DISCOVERY_SERVICE value: "$(elasticsearch-dev-service-name).monitoring.svc.cluster.local" --- apiVersion: v1 kind: Service metadata: name: elasticsearch spec: ports: - name: transport port: 9300 protocol: TCP clusterIP: None `) th.WriteK("/app/base/test", ` namePrefix: test- resources: - elasticsearch-test-service.yml vars: - name: elasticsearch-test-service-name objref: kind: Service name: elasticsearch apiVersion: v1 fieldref: fieldpath: metadata.name `) th.WriteF("/app/base/test/elasticsearch-test-service.yml", ` apiVersion: apps/v1 kind: StatefulSet metadata: name: elasticsearch spec: template: spec: containers: - name: elasticsearch env: - name: DISCOVERY_SERVICE value: "$(elasticsearch-test-service-name).monitoring.svc.cluster.local" --- apiVersion: v1 kind: Service metadata: name: elasticsearch spec: ports: - name: transport port: 9300 protocol: TCP clusterIP: None `) m, err := th.MakeKustTarget().MakeCustomizedResMap() if err != nil { t.Fatalf("Err: %v", err) } th.AssertActualEqualsExpected(m, ` apiVersion: apps/v1 kind: StatefulSet metadata: name: base-dev-elasticsearch spec: template: spec: containers: - env: - name: DISCOVERY_SERVICE value: base-dev-elasticsearch.monitoring.svc.cluster.local name: elasticsearch --- apiVersion: v1 kind: Service metadata: name: base-dev-elasticsearch spec: clusterIP: None ports: - name: transport port: 9300 protocol: TCP --- apiVersion: apps/v1 kind: StatefulSet metadata: name: base-test-elasticsearch spec: template: spec: containers: - env: - name: DISCOVERY_SERVICE value: base-test-elasticsearch.monitoring.svc.cluster.local name: elasticsearch --- apiVersion: v1 kind: Service metadata: name: base-test-elasticsearch spec: clusterIP: None ports: - name: transport port: 9300 protocol: TCP `) }
explode_data.jsonl/53709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1136 }
[ 2830, 3393, 3962, 579, 1574, 3945, 69123, 14335, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 595, 590, 1944, 4452, 7121, 42, 590, 2271, 74248, 1155, 11, 3521, 676, 26090, 1138, 70479, 4073, 42, 4283, 676, 26090, 497, 22074, 606, 14335, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDoNotAdoptOrCreateIfBeingDeletedRace(t *testing.T) { labelMap := map[string]string{"foo": "bar"} // Bare client says it IS deleted. rs := newReplicaSet(2, labelMap) now := metav1.Now() rs.DeletionTimestamp = &now stopCh := make(chan struct{}) defer close(stopCh) manager, fakePodControl, informers := setupManagerWithGCEnabled(stopCh, rs) // Lister (cache) says it's NOT deleted. rs2 := *rs rs2.DeletionTimestamp = nil informers.Apps().V1().ReplicaSets().Informer().GetIndexer().Add(&rs2) // Recheck occurs if a matching orphan is present. pod1 := newPod("pod1", rs, v1.PodRunning, nil, false) informers.Core().V1().Pods().Informer().GetIndexer().Add(pod1) // sync should abort. err := manager.syncReplicaSet(GetKey(rs, t)) if err == nil { t.Error("syncReplicaSet() err = nil, expected non-nil") } // no patch, no create. validateSyncReplicaSet(t, fakePodControl, 0, 0, 0) }
explode_data.jsonl/7987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 5404, 2623, 2589, 2912, 57111, 2679, 33142, 26039, 55991, 1155, 353, 8840, 836, 8, 341, 29277, 2227, 1669, 2415, 14032, 30953, 4913, 7975, 788, 330, 2257, 16707, 197, 322, 60792, 2943, 2727, 432, 3424, 11062, 624, 41231, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetContainerImage(t *testing.T) { clientset := test.New(3) os.Setenv(k8sutil.PodNamespaceEnvVar, "Default") defer os.Unsetenv(k8sutil.PodNamespaceEnvVar) os.Setenv(k8sutil.PodNameEnvVar, "mypod") defer os.Unsetenv(k8sutil.PodNameEnvVar) pod := v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "mypod", Namespace: "Default", }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "mypodContainer", Image: "rook/test", }, }, }, } clientset.CoreV1().Pods("Default").Create(&pod) // start a basic cluster returnPod, err := k8sutil.GetRunningPod(clientset) assert.Nil(t, err) assert.Equal(t, "mypod", returnPod.Name) }
explode_data.jsonl/35521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 316 }
[ 2830, 3393, 1949, 4502, 1906, 1155, 353, 8840, 836, 8, 341, 25291, 746, 1669, 1273, 7121, 7, 18, 692, 25078, 4202, 3160, 5969, 23, 82, 1314, 88823, 22699, 14359, 3962, 11, 330, 3675, 1138, 16867, 2643, 10616, 746, 3160, 5969, 23, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestHandlerSubscribeToEvents(t *testing.T) { deviceID := test.MustFindDeviceByName(test.TestDeviceName) ctx, cancel := context.WithTimeout(context.Background(), config.TEST_TIMEOUT) defer cancel() tearDown := service.SetUp(ctx, t) defer tearDown() // log.Setup(log.Config{Debug: true}) token := oauthTest.GetDefaultAccessToken(t) ctx = kitNetGrpc.CtxWithIncomingToken(kitNetGrpc.CtxWithToken(ctx, token), token) rdConn, err := grpcClient.New(config.MakeGrpcClientConfig(config.RESOURCE_DIRECTORY_HOST), log.Get()) require.NoError(t, err) defer func() { _ = rdConn.Close() }() rdc := pb.NewGrpcGatewayClient(rdConn.GRPC()) raConn, err := grpcClient.New(config.MakeGrpcClientConfig(config.RESOURCE_AGGREGATE_HOST), log.Get()) require.NoError(t, err) defer func() { _ = raConn.Close() }() rac := raservice.NewResourceAggregateClient(raConn.GRPC()) pool, err := ants.NewPool(1) require.NoError(t, err) natsConn, resourceSubscriber, err := natsTest.NewClientAndSubscriber(config.MakeSubscriberConfig(), log.Get(), subscriber.WithGoPool(pool.Submit), subscriber.WithUnmarshaler(utils.Unmarshal)) require.NoError(t, err) defer natsConn.Close() defer resourceSubscriber.Close() owner, err := kitNetGrpc.OwnerFromTokenMD(ctx, config.OWNER_CLAIM) require.NoError(t, err) subCache := subscription.NewSubscriptionsCache(resourceSubscriber.Conn(), func(err error) { t.Log(err) }) correlationID := "testToken" recvChan := make(chan *pb.Event, 1) s := subscription.New(func(e *pb.Event) error { select { case recvChan <- e: case <-ctx.Done(): } return nil }, correlationID, &pb.SubscribeToEvents_CreateSubscription{}) err = s.Init(owner, subCache) require.NoError(t, err) defer func() { err := s.Close() require.NoError(t, err) }() deviceID, shutdownDevSim := test.OnboardDevSim(ctx, t, rdc, deviceID, config.GW_HOST, nil) check(t, waitForEvent(ctx, t, recvChan), &pb.Event{ SubscriptionId: s.Id(), Type: &pb.Event_DeviceRegistered_{ DeviceRegistered: &pb.Event_DeviceRegistered{ DeviceIds: []string{deviceID}, }, }, CorrelationId: correlationID, }) check(t, waitForEvent(ctx, t, recvChan), &pb.Event{ SubscriptionId: s.Id(), Type: &pb.Event_DeviceMetadataUpdated{ DeviceMetadataUpdated: pbTest.MakeDeviceMetadataUpdated(deviceID, commands.ShadowSynchronization_UNSET, ""), }, CorrelationId: correlationID, }) check(t, waitForEvent(ctx, t, recvChan), pbTest.ResourceLinkToPublishEvent(deviceID, correlationID, test.GetAllBackendResourceLinks())) expectedEvents := getResourceChangedEvents(t, deviceID, correlationID, s.Id()) for range expectedEvents { checkResourceChanged(t, waitForEvent(ctx, t, recvChan), expectedEvents) } checkAndValidateUpdate(ctx, t, rac, s, recvChan, correlationID, deviceID, 99) checkAndValidateUpdate(ctx, t, rac, s, recvChan, correlationID, deviceID, 0) checkAndValidateRetrieve(ctx, t, rac, s, recvChan, correlationID, deviceID) shutdownDevSim() run := true for run { ev := waitForEvent(ctx, t, recvChan) switch { case ev.GetDeviceMetadataUpdated() != nil: check(t, ev, &pb.Event{ SubscriptionId: s.Id(), Type: &pb.Event_DeviceMetadataUpdated{ DeviceMetadataUpdated: &events.DeviceMetadataUpdated{ DeviceId: deviceID, Status: &commands.ConnectionStatus{ Value: commands.ConnectionStatus_OFFLINE, }, AuditContext: commands.NewAuditContext(oauthService.DeviceUserID, ""), }, }, CorrelationId: correlationID, }) case ev.GetDeviceUnregistered() != nil: check(t, ev, &pb.Event{ SubscriptionId: s.Id(), Type: &pb.Event_DeviceUnregistered_{ DeviceUnregistered: &pb.Event_DeviceUnregistered{ DeviceIds: []string{deviceID}, }, }, CorrelationId: correlationID, }) run = false case ctx.Err() != nil: require.NoError(t, err) } } }
explode_data.jsonl/63372
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1482 }
[ 2830, 3393, 1900, 3050, 28573, 1249, 7900, 1155, 353, 8840, 836, 8, 341, 54719, 915, 1669, 1273, 50463, 9885, 6985, 16898, 8623, 8787, 6985, 675, 340, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 2193, 73501, 23412, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestABCIPubKey(t *testing.T) { pkBLS := bls12381.GenPrivKey().PubKey() err := testABCIPubKey(t, pkBLS, ABCIPubKeyTypeEd25519) assert.NoError(t, err) }
explode_data.jsonl/65073
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 25411, 3298, 392, 1592, 1155, 353, 8840, 836, 8, 341, 3223, 81524, 7268, 1669, 1501, 82, 16, 17, 18, 23, 16, 65384, 32124, 1592, 1005, 29162, 1592, 741, 9859, 1669, 1273, 25411, 3298, 392, 1592, 1155, 11, 22458, 33, 7268, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestInsertStatementParsing(t *testing.T) { t.Run("Test valid select parsing", func(t *testing.T) { inputs := []string{ "INsert into test values (1,2,3);", } expectedOutputs := []*InsertStatement{ { Table: tokenizer.Token{ Value: "test", Kind: tokenizer.IdentifierKind, }, Values: []*tokenizer.Token{ {Value: "1", Kind: tokenizer.NumericKind}, {Value: "2", Kind: tokenizer.NumericKind}, {Value: "3", Kind: tokenizer.NumericKind}, }, ColumnNames: nil, }, } for testCase := range inputs { tokenList := *tokenizer.ParseTokenSequence(inputs[testCase]) actualResult, err := parseInsertIntoStatement(tokenList) if err != nil { t.Errorf("Parsing failed on set #%d: %v", testCase, err) } if !actualResult.Equals(expectedOutputs[testCase]) { t.Errorf("Assertion failed. Expected: %s, got: %s", actualResult.String(), expectedOutputs[testCase].String()) } } }) // t.Run("Test invalid select parsing", func(t *testing.T) { // inputs := []string{ // "Select 1,b,c from test;", // "INsert into test values (1,2,3);", // "Select from test;", // "Select from test", // "Select a, b, c from", // } // for testCase := range inputs { // tokenList := *tokenizer.ParseTokenSequence(inputs[testCase]) // actualResult, err := parseSelectStatement(tokenList) // if err == nil { // t.Errorf("Expected error on set #%d. Values got: %v", // testCase, actualResult) // } // } // }) }
explode_data.jsonl/55223
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 701 }
[ 2830, 3393, 13780, 8636, 68839, 1155, 353, 8840, 836, 8, 972, 3244, 16708, 445, 2271, 2697, 3293, 22314, 497, 2915, 1155, 353, 8840, 836, 8, 972, 197, 22427, 82, 1669, 3056, 917, 1666, 298, 197, 1, 687, 6047, 1119, 1273, 2750, 320, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInvalidRequest(t *testing.T) { tests := []struct { name string req *safehttp.IncomingRequest wantStatus safehttp.StatusCode wantHeaders map[string][]string wantBody string }{ { name: "Method", req: safehttptest.NewRequest(safehttp.MethodGet, "/collector", nil), wantStatus: safehttp.StatusMethodNotAllowed, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Method Not Allowed\n", }, { name: "Content-Type", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", nil) r.Header.Set("Content-Type", "text/plain") return r }(), wantStatus: safehttp.StatusUnsupportedMediaType, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Unsupported Media Type\n", }, { name: "csp-report, invalid json", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", strings.NewReader(`{"a:"b"}`)) r.Header.Set("Content-Type", "application/csp-report") return r }(), wantStatus: safehttp.StatusBadRequest, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Bad Request\n", }, { name: "reports+json, invalid json", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", strings.NewReader(`[{"a:"b"}]`)) r.Header.Set("Content-Type", "application/reports+json") return r }(), wantStatus: safehttp.StatusBadRequest, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Bad Request\n", }, { name: "csp-report, valid json, csp-report is not an object", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", strings.NewReader(`{"csp-report":"b"}`)) r.Header.Set("Content-Type", "application/csp-report") return r }(), wantStatus: safehttp.StatusBadRequest, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Bad Request\n", }, { name: "reports+json, valid json, body is not an object", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", strings.NewReader(`[{ "type": "xyz", "age": 10, "url": "https://example.com/", "userAgent": "chrome", "body": "not an object" }]`)) r.Header.Set("Content-Type", "application/reports+json") return r }(), wantStatus: safehttp.StatusBadRequest, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Bad Request\n", }, { name: "Negative uints", req: func() *safehttp.IncomingRequest { r := safehttptest.NewRequest(safehttp.MethodPost, "/collector", strings.NewReader(`{ "csp-report": { "status-code": -1, "lineno": -1, "colno": -1, "line-number": -1, "column-number": -1 } }`)) r.Header.Set("Content-Type", "application/csp-report") return r }(), wantStatus: safehttp.StatusBadRequest, wantHeaders: map[string][]string{ "Content-Type": {"text/plain; charset=utf-8"}, "X-Content-Type-Options": {"nosniff"}, }, wantBody: "Bad Request\n", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { h := collector.Handler(func(r collector.Report) { t.Errorf("expected collector not to be called") }, func(r collector.CSPReport) { t.Errorf("expected collector not to be called") }) fakeRW, rr := safehttptest.NewFakeResponseWriter() h.ServeHTTP(fakeRW, tt.req) if got, want := rr.Code, int(tt.wantStatus); got != want { t.Errorf("rr.Code got: %v want: %v", got, want) } if diff := cmp.Diff(map[string][]string{}, map[string][]string(rr.Header())); diff != "" { t.Errorf("rr.Header() mismatch (-want +got):\n%s", diff) } }) } }
explode_data.jsonl/66397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1951 }
[ 2830, 3393, 7928, 1900, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 24395, 260, 353, 18675, 1254, 5337, 4959, 1900, 198, 197, 50780, 2522, 220, 6092, 1254, 37828, 198, 197, 50780, 10574, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCopyIncremental(t *testing.T) { env, cleanup := withTestEnvironment(t) defer cleanup() env2, cleanup2 := withTestEnvironment(t) defer cleanup2() testSetupBackupData(t, env) opts := BackupOptions{} testRunBackup(t, "", []string{filepath.Join(env.testdata, "0", "0", "9")}, opts, env.gopts) testRunBackup(t, "", []string{filepath.Join(env.testdata, "0", "0", "9", "2")}, opts, env.gopts) testRunCheck(t, env.gopts) testRunInit(t, env2.gopts) testRunCopy(t, env.gopts, env2.gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts) copiedSnapshotIDs := testRunList(t, "snapshots", env2.gopts) // Check that the copies size seems reasonable testRunCheck(t, env2.gopts) rtest.Assert(t, len(snapshotIDs) == len(copiedSnapshotIDs), "expected %v snapshots, found %v", len(snapshotIDs), len(copiedSnapshotIDs)) // check that no snapshots are copied, as there are no new ones testRunCopy(t, env.gopts, env2.gopts) testRunCheck(t, env2.gopts) copiedSnapshotIDs = testRunList(t, "snapshots", env2.gopts) rtest.Assert(t, len(snapshotIDs) == len(copiedSnapshotIDs), "still expected %v snapshots, found %v", len(snapshotIDs), len(copiedSnapshotIDs)) // check that only new snapshots are copied testRunBackup(t, "", []string{filepath.Join(env.testdata, "0", "0", "9", "3")}, opts, env.gopts) testRunCopy(t, env.gopts, env2.gopts) testRunCheck(t, env2.gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts) copiedSnapshotIDs = testRunList(t, "snapshots", env2.gopts) rtest.Assert(t, len(snapshotIDs) == len(copiedSnapshotIDs), "still expected %v snapshots, found %v", len(snapshotIDs), len(copiedSnapshotIDs)) // also test the reverse direction testRunCopy(t, env2.gopts, env.gopts) testRunCheck(t, env.gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts) rtest.Assert(t, len(snapshotIDs) == len(copiedSnapshotIDs), "still expected %v snapshots, found %v", len(copiedSnapshotIDs), len(snapshotIDs)) }
explode_data.jsonl/43551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 778 }
[ 2830, 3393, 12106, 38311, 278, 1155, 353, 8840, 836, 8, 341, 57538, 11, 21290, 1669, 448, 2271, 12723, 1155, 340, 16867, 21290, 741, 57538, 17, 11, 21290, 17, 1669, 448, 2271, 12723, 1155, 340, 16867, 21290, 17, 2822, 18185, 21821, 5624...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetServiceOutput(t *testing.T) { credentialmanager.MockAuthCreds = true checkEndPointStatusMock = true cmd := fmt.Sprintf("get service carts --project=sockshop --output=error --mock") _, err := executeActionCommandC(cmd) if err == nil { t.Error("An error occurred: expect an error due to wrong output format") } }
explode_data.jsonl/50933
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 1949, 1860, 5097, 1155, 353, 8840, 836, 8, 1476, 197, 66799, 13297, 24664, 5087, 34, 53369, 284, 830, 198, 25157, 70218, 2522, 11571, 284, 830, 271, 25920, 1669, 8879, 17305, 445, 455, 2473, 65404, 220, 1177, 4987, 28, 13199...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdateCoreDNSCorefile(t *testing.T) { currentImageTag := "1.6.2" originalCorefile := "some-coredns-core-file" depl := &appsv1.Deployment{ ObjectMeta: v1.ObjectMeta{ Name: coreDNSKey, Namespace: metav1.NamespaceSystem, }, Spec: appsv1.DeploymentSpec{ Template: corev1.PodTemplateSpec{ ObjectMeta: v1.ObjectMeta{ Name: coreDNSKey, }, Spec: corev1.PodSpec{ Containers: []corev1.Container{{ Name: coreDNSKey, Image: "k8s.gcr.io/coredns:" + currentImageTag, }}, Volumes: []corev1.Volume{{ Name: "config-volume", VolumeSource: corev1.VolumeSource{ ConfigMap: &corev1.ConfigMapVolumeSource{ LocalObjectReference: corev1.LocalObjectReference{ Name: coreDNSKey, }, Items: []corev1.KeyToPath{{ Key: "Corefile", Path: "Corefile", }}, }, }, }}, }, }, }, } cm := &corev1.ConfigMap{ ObjectMeta: v1.ObjectMeta{ Name: coreDNSKey, Namespace: metav1.NamespaceSystem, }, Data: map[string]string{ "Corefile": originalCorefile, }, } t.Run("returns error if migrate failed to update corefile", func(t *testing.T) { g := NewWithT(t) objs := []runtime.Object{depl, cm} fakeClient := fake.NewFakeClientWithScheme(scheme.Scheme, objs...) fakeMigrator := &fakeMigrator{ migrateErr: errors.New("failed to migrate"), } w := &Workload{ Client: fakeClient, CoreDNSMigrator: fakeMigrator, } info := &coreDNSInfo{ Corefile: "updated-core-file", Deployment: depl, CurrentMajorMinorPatch: "1.6.2", TargetMajorMinorPatch: "1.7.2", } err := w.updateCoreDNSCorefile(context.TODO(), info) g.Expect(err).To(HaveOccurred()) g.Expect(fakeMigrator.migrateCalled).To(BeTrue()) var expectedConfigMap corev1.ConfigMap g.Expect(fakeClient.Get(context.TODO(), ctrlclient.ObjectKey{Name: coreDNSKey, Namespace: metav1.NamespaceSystem}, &expectedConfigMap)).To(Succeed()) g.Expect(expectedConfigMap.Data).To(HaveLen(1)) g.Expect(expectedConfigMap.Data).To(HaveKeyWithValue("Corefile", originalCorefile)) }) t.Run("creates a backup of the corefile", func(t *testing.T) { g := NewWithT(t) // Not including the deployment so as to fail early and verify that // the intermediate config map update occurred objs := []runtime.Object{cm} fakeClient := fake.NewFakeClientWithScheme(scheme.Scheme, objs...) fakeMigrator := &fakeMigrator{ migratedCorefile: "updated-core-file", } w := &Workload{ Client: fakeClient, CoreDNSMigrator: fakeMigrator, } info := &coreDNSInfo{ Corefile: originalCorefile, Deployment: depl, CurrentMajorMinorPatch: currentImageTag, TargetMajorMinorPatch: "1.7.2", } err := w.updateCoreDNSCorefile(context.TODO(), info) g.Expect(err).To(HaveOccurred()) var expectedConfigMap corev1.ConfigMap g.Expect(fakeClient.Get(context.TODO(), ctrlclient.ObjectKey{Name: coreDNSKey, Namespace: metav1.NamespaceSystem}, &expectedConfigMap)).To(Succeed()) g.Expect(expectedConfigMap.Data).To(HaveLen(2)) g.Expect(expectedConfigMap.Data).To(HaveKeyWithValue("Corefile", originalCorefile)) g.Expect(expectedConfigMap.Data).To(HaveKeyWithValue("Corefile-backup", originalCorefile)) }) t.Run("patches the core dns deployment to point to the backup corefile before migration", func(t *testing.T) { g := NewWithT(t) objs := []runtime.Object{depl, cm} fakeClient := fake.NewFakeClientWithScheme(scheme.Scheme, objs...) fakeMigrator := &fakeMigrator{ migratedCorefile: "updated-core-file", } w := &Workload{ Client: fakeClient, CoreDNSMigrator: fakeMigrator, } info := &coreDNSInfo{ Corefile: originalCorefile, Deployment: depl, CurrentMajorMinorPatch: currentImageTag, TargetMajorMinorPatch: "1.7.2", } err := w.updateCoreDNSCorefile(context.TODO(), info) g.Expect(err).ToNot(HaveOccurred()) expectedVolume := corev1.Volume{ Name: coreDNSVolumeKey, VolumeSource: corev1.VolumeSource{ ConfigMap: &corev1.ConfigMapVolumeSource{ LocalObjectReference: corev1.LocalObjectReference{ Name: coreDNSKey, }, Items: []corev1.KeyToPath{{ Key: "Corefile-backup", Path: "Corefile", }}, }, }, } var actualDeployment appsv1.Deployment g.Expect(fakeClient.Get(context.TODO(), ctrlclient.ObjectKey{Name: coreDNSKey, Namespace: metav1.NamespaceSystem}, &actualDeployment)).To(Succeed()) g.Expect(actualDeployment.Spec.Template.Spec.Volumes).To(ConsistOf(expectedVolume)) var expectedConfigMap corev1.ConfigMap g.Expect(fakeClient.Get(context.TODO(), ctrlclient.ObjectKey{Name: coreDNSKey, Namespace: metav1.NamespaceSystem}, &expectedConfigMap)).To(Succeed()) g.Expect(expectedConfigMap.Data).To(HaveLen(2)) g.Expect(expectedConfigMap.Data).To(HaveKeyWithValue("Corefile", "updated-core-file")) g.Expect(expectedConfigMap.Data).To(HaveKeyWithValue("Corefile-backup", originalCorefile)) }) }
explode_data.jsonl/22074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2168 }
[ 2830, 3393, 4289, 5386, 61088, 5386, 1192, 1155, 353, 8840, 836, 8, 341, 20121, 1906, 5668, 1669, 330, 16, 13, 21, 13, 17, 698, 197, 9889, 5386, 1192, 1669, 330, 14689, 1786, 3018, 4412, 23460, 14203, 698, 58351, 500, 1669, 609, 676, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoneAssignmentsGetAssignments(t *testing.T) { a := assert.New(t) service := assignFleetServiceMock{} assignments := []assignmentMock{} service.On("GetAssignments", mock.Anything, mock.Anything).Return(assignments, nil) app := app.Application{ Services: app.Services{ AssignFleet: &service, }, } grpc := NewGrpcServer(app) request := &skysign_proto.GetAssignmentsRequest{ Id: DefaultFleetID, } response, err := grpc.GetAssignments( nil, request, ) expectResponse := &skysign_proto.GetAssignmentsResponse{ Id: DefaultFleetID, } a.Nil(err) a.Equal(response, expectResponse) }
explode_data.jsonl/58072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 4064, 28933, 1368, 1949, 28933, 1368, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 692, 52934, 1669, 9793, 37, 18973, 1860, 11571, 31483, 197, 96310, 1669, 3056, 29951, 11571, 31483, 52934, 8071, 445, 1949, 289...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRomanNumerals(t *testing.T) { for _, test := range cases { t.Run(fmt.Sprintf("%d gets converted to %v", test.Arabic, test.Roman), func(t *testing.T) { got := ConvertToRoman(test.Arabic) want := test.Roman if got != want { t.Errorf("got %v, want %v", got, want) } }) } }
explode_data.jsonl/27664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 60980, 67737, 1127, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 5048, 341, 197, 3244, 16708, 28197, 17305, 4430, 67, 5221, 16099, 311, 1018, 85, 497, 1273, 875, 50105, 292, 11, 1273, 2013, 6908, 701, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestControlPlaneNotAllowedInOperatorNamespace(t *testing.T) { test.PanicOnError(os.Setenv("POD_NAMESPACE", "openshift-operators")) // TODO: make it easier to set the namespace in tests controlPlane := newControlPlaneWithVersion("my-smcp", "openshift-operators", versions.V2_2.String()) validator, _, _ := createControlPlaneValidatorTestFixture() response := validator.Handle(ctx, createCreateRequest(controlPlane)) assert.False(response.Allowed, "Expected validator to reject ServiceMeshControlPlane in operator's namespace", t) }
explode_data.jsonl/10233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 3273, 34570, 97634, 641, 18461, 22699, 1155, 353, 8840, 836, 8, 341, 18185, 1069, 31270, 74945, 9638, 4202, 3160, 445, 2045, 35, 34552, 497, 330, 24175, 47833, 12, 32838, 2761, 442, 5343, 25, 1281, 432, 8661, 311, 738, 279, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue3(t *testing.T) { // https://github.com/microcosm-cc/bluemonday/issues/3 p := UGCPolicy() p.AllowStyling() tests := []test{ { in: `Hello <span class="foo bar bash">there</span> world.`, expected: `Hello <span class="foo bar bash">there</span> world.`, }, { in: `Hello <span class="javascript:alert(123)">there</span> world.`, expected: `Hello <span>there</span> world.`, }, { in: `Hello <span class="><script src="http://hackers.org/XSS.js"></script>">there</span> world.`, expected: `Hello <span>&#34;&gt;there</span> world.`, }, { in: `Hello <span class="><script src='http://hackers.org/XSS.js'></script>">there</span> world.`, expected: `Hello <span>there</span> world.`, }, } wg := sync.WaitGroup{} wg.Add(len(tests)) for ii, tt := range tests { go func(ii int, tt test) { out := p.Sanitize(tt.in) if out != tt.expected { t.Errorf( "test %d failed;\ninput : %s\noutput : %s\nexpected: %s", ii, tt.in, out, tt.expected, ) } wg.Done() }(ii, tt) } wg.Wait() }
explode_data.jsonl/28798
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 524 }
[ 2830, 3393, 42006, 18, 1155, 353, 8840, 836, 8, 341, 197, 322, 3703, 1110, 5204, 905, 3183, 2754, 9407, 76, 12, 638, 89193, 84, 7291, 1292, 38745, 14, 18, 271, 3223, 1669, 547, 38, 7123, 8018, 741, 3223, 29081, 623, 98607, 2822, 782...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRouterNestedGroupMatch(t *testing.T) { // Create empty handler h := new(Handler) // Create empty context c := new(Context) c.Params = Params{} // Create groups l1 := RouteGroup("/level1") l2 := RouteGroup("/level2") l3 := RouteGroup("/level3") // Add one route l3.Add("/test/:param", h) // Neste into: // - /level1/level2/level3/test/:param // - /level2/level3/test/:param // - /level3/test/:param l2.AddGroup(l3) l1.AddGroup(l2) // Level 3 matching routes rs := []string{"/level3/test/test", "/level3/test/:param/"} // Check for _, s := range rs { if !l3.Match(s, c) { t.Errorf("'%s' should match", s) } } // Level 2 matching routes rs = []string{"/level2/level3/test/test", "/level2/level3/test/:param/"} // Check for _, s := range rs { if !l2.Match(s, c) { t.Errorf("'%s' should match", s) } } // Level 1 matching routes rs = []string{"/level1/level2/level3/test/test", "/level1/level2/level3/test/:param/"} // Check for _, s := range rs { if !l1.Match(s, c) { t.Errorf("'%s' should match", s) } } }
explode_data.jsonl/35814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 469 }
[ 2830, 3393, 9523, 71986, 2808, 8331, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 4287, 7013, 198, 9598, 1669, 501, 7, 3050, 692, 197, 322, 4230, 4287, 2266, 198, 1444, 1669, 501, 14001, 340, 1444, 58268, 284, 34352, 31483, 197, 322,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test(t *testing.T) { m1 := map[int]int{1:1, 2:4, 3:9} t.Log(m1[2], len(m1)) m2 := map[int]int{} t.Log(len(m2)) m2[4] = 17 t.Log(len(m2)) m3 := make(map[int]int, 10) t.Log(len(m3)) }
explode_data.jsonl/30660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 1155, 353, 8840, 836, 8, 341, 2109, 16, 1669, 2415, 18640, 63025, 90, 16, 25, 16, 11, 220, 17, 25, 19, 11, 220, 18, 25, 24, 532, 3244, 5247, 1255, 16, 58, 17, 1125, 2422, 1255, 16, 1171, 2109, 17, 1669, 2415, 18640, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_subtractInitialRestartCounts(t *testing.T) { tests := []struct { name string metrics *systemPodsMetrics initMetrics *systemPodsMetrics want *systemPodsMetrics }{ { name: "same-pods-and-containers", metrics: generatePodMetrics("p1", "c1", 5), initMetrics: generatePodMetrics("p1", "c1", 4), want: generatePodMetrics("p1", "c1", 1), }, { name: "different-container-names", metrics: generatePodMetrics("p1", "c1", 5), initMetrics: generatePodMetrics("p1", "c2", 4), want: generatePodMetrics("p1", "c1", 5), }, { name: "different-pod-names", metrics: generatePodMetrics("p1", "c1", 5), initMetrics: generatePodMetrics("p2", "c1", 4), want: generatePodMetrics("p1", "c1", 5), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { subtractInitialRestartCounts(tt.metrics, tt.initMetrics) if !reflect.DeepEqual(*tt.metrics, *tt.want) { t.Errorf("want %v, got %v", *tt.want, *tt.metrics) } }) } }
explode_data.jsonl/18530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 520 }
[ 2830, 3393, 5228, 2144, 6341, 59354, 63731, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 2109, 13468, 257, 353, 8948, 23527, 82, 27328, 198, 197, 28248, 27328, 353, 8948, 23527, 82, 27328, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCacheConcurrency(t *testing.T) { // Clear out anything in the existing cache encoderCache.m = nil structCache.m = nil var wg sync.WaitGroup n := 10 val := &testStruct{String: "123"} for i := 0; i < n; i++ { wg.Add(1) go func() { form := &Values{} AppendTo(form, val) wg.Done() }() } wg.Wait() }
explode_data.jsonl/30326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 8233, 79611, 1155, 353, 8840, 836, 8, 341, 197, 322, 12023, 700, 4113, 304, 279, 6350, 6500, 198, 197, 27008, 8233, 744, 284, 2092, 198, 6472, 8233, 744, 284, 2092, 271, 2405, 63581, 12811, 28384, 2808, 198, 9038, 1669, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccDataSourceGoogleSourceRepoRepository_basic(t *testing.T) { t.Parallel() name := "tf-repository-" + randString(t, 10) vcrTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckSourceRepoRepositoryDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccDataSourceGoogleSourceRepoRepositoryConfig(name), Check: resource.ComposeTestCheckFunc( checkDataSourceStateMatchesResourceState("data.google_sourcerepo_repository.bar", "google_sourcerepo_repository.foo"), ), }, }, }) }
explode_data.jsonl/71711
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 14603, 17173, 14444, 3608, 25243, 4624, 34729, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 11609, 1669, 330, 8935, 5504, 3099, 27651, 488, 10382, 703, 1155, 11, 220, 16, 15, 692, 5195, 5082, 2271, 1155, 11, 5101, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewFsSource_WrongDirectory(t *testing.T) { fsys := os.DirFS("test_nodir") _, err := NewFsSource(fsys, "sample-migrations") if err == nil { t.Fatal("expected an error") } }
explode_data.jsonl/81878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 3564, 48300, 3608, 2763, 14347, 9310, 1155, 353, 8840, 836, 8, 341, 1166, 7791, 1669, 2643, 83757, 8485, 445, 1944, 1089, 347, 404, 1138, 197, 6878, 1848, 1669, 1532, 48300, 3608, 955, 7791, 11, 330, 13611, 1448, 17824, 1138...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestValid(t *testing.T) { config := New() if config.Valid() { t.Fatal("empty config shouldn't be valid") } // Certs and no keys is an invalid config. config.Server.CertPaths = "testdata/server.pem" if config.Valid() { t.Fatal("config shouldn't be valid") } // Keys and no certs is an invalid config. config.Server.CertPaths = "" config.Server.KeyPaths = "testdata/server.key" if config.Valid() { t.Fatal("config shouldn't be valid") } // Key pairs but no address information is an invalid config. config.Server.CertPaths = "testdata/server.pem" if config.Valid() { t.Fatal("config shouldn't be valid") } config.Server.Addr = "localhost:8080" if !config.Valid() { t.Fatal("config should be valid") } config.Server.Addr = "" config.Server.Systemd = true if !config.Valid() { t.Fatal("config should be valid") } }
explode_data.jsonl/57696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 4088, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 1532, 2822, 743, 2193, 47156, 368, 341, 197, 3244, 26133, 445, 3194, 2193, 13133, 944, 387, 2697, 1138, 197, 630, 197, 322, 15304, 82, 323, 902, 6894, 374, 458, 8318, 2193, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestHybiControlFrame(t *testing.T) { frameHeader := &hybiFrameHeader{Fin: true, OpCode: PingFrame} payload := []byte("hello") testHybiFrame(t, []byte{0x89, 0x05}, payload, payload, frameHeader) frameHeader = &hybiFrameHeader{Fin: true, OpCode: PongFrame} testHybiFrame(t, []byte{0x8A, 0x05}, payload, payload, frameHeader) frameHeader = &hybiFrameHeader{Fin: true, OpCode: CloseFrame} payload = []byte{0x03, 0xe8} // 1000 testHybiFrame(t, []byte{0x88, 0x02}, payload, payload, frameHeader) }
explode_data.jsonl/53443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 30816, 8221, 3273, 4369, 1155, 353, 8840, 836, 8, 341, 34465, 4047, 1669, 609, 8503, 8221, 4369, 4047, 90, 9134, 25, 830, 11, 97779, 25, 48657, 4369, 532, 76272, 1669, 3056, 3782, 445, 14990, 1138, 18185, 30816, 8221, 4369, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPerfReader(t *testing.T) { prog, events := mustOutputSamplesProg(t, 5) defer prog.Close() defer events.Close() rd, err := NewReader(events, 4096) if err != nil { t.Fatal(err) } defer rd.Close() ret, _, err := prog.Test(make([]byte, 14)) testutils.SkipIfNotSupported(t, err) if err != nil { t.Fatal(err) } if errno := syscall.Errno(-int32(ret)); errno != 0 { t.Fatal("Expected 0 as return value, got", errno) } record, err := rd.Read() if err != nil { t.Fatal("Can't read samples:", err) } want := []byte{1, 2, 3, 4, 4, 0, 0, 0, 0, 0, 0, 0} if !bytes.Equal(record.RawSample, want) { t.Log(record.RawSample) t.Error("Sample doesn't match expected output") } if record.CPU < 0 { t.Error("Record has invalid CPU number") } }
explode_data.jsonl/18692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 3889, 69, 5062, 1155, 353, 8840, 836, 8, 341, 197, 32992, 11, 4357, 1669, 1969, 5097, 39571, 89535, 1155, 11, 220, 20, 340, 16867, 29271, 10421, 741, 16867, 4357, 10421, 2822, 92356, 11, 1848, 1669, 1532, 5062, 50496, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestUAccountIncomeHistory(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() { t.Skip("skipping test: api keys not set") } _, err := b.UAccountIncomeHistory(context.Background(), currency.EMPTYPAIR, "", 5, time.Now().Add(-time.Hour*48), time.Now()) if err != nil { t.Error(err) } }
explode_data.jsonl/76588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 52, 7365, 58898, 13424, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 25, 6330, 6894, 537, 738, 1138, 197, 532, 197, 6878, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFmtTimeFull(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } // fixed := time.FixedZone("OTHER", -4) tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am Eastern Standard Time", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, fixed), // expected: "8:05:01 pm OTHER", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
explode_data.jsonl/1289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 321 }
[ 2830, 3393, 93322, 1462, 9432, 1155, 353, 8840, 836, 8, 1476, 197, 322, 1329, 11, 1848, 1669, 882, 13969, 4707, 445, 31032, 16731, 269, 9935, 1138, 197, 322, 421, 1848, 961, 2092, 341, 197, 322, 220, 3244, 13080, 445, 18896, 3857, 838...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCheck(t *testing.T) { onecrl := loadRevokedList(t) revoked := parseCertPEM(t) entry := onecrl.Check(revoked) if entry == nil { // this should provide an entry, since cert is revoked and in the provided sst file t.Fail() } if entry.SerialNumber.Cmp(revoked.SerialNumber) != 0 { t.Fail() } }
explode_data.jsonl/82367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 3973, 1155, 353, 8840, 836, 8, 341, 197, 603, 66, 2381, 1669, 2795, 36184, 10823, 852, 1155, 340, 197, 7282, 10823, 1669, 4715, 36934, 1740, 44, 1155, 340, 48344, 1669, 389, 757, 2381, 10600, 5801, 85, 10823, 340, 743, 434...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestListScalesets(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() manager := newTestAzureManager(t) vmssTag := "fake-tag" vmssTagValue := "fake-value" vmssName := "test-vmss" ngdo := cloudprovider.NodeGroupDiscoveryOptions{ NodeGroupAutoDiscoverySpecs: []string{fmt.Sprintf("label:%s=%s", vmssTag, vmssTagValue)}, } specs, err := parseLabelAutoDiscoverySpecs(ngdo) assert.NoError(t, err) testCases := []struct { name string specs map[string]string isListVMSSFail bool expected []cloudprovider.NodeGroup expectedErrString string }{ { name: "ValidMinMax", specs: map[string]string{"min": "5", "max": "50"}, expected: []cloudprovider.NodeGroup{&ScaleSet{ azureRef: azureRef{ Name: vmssName, }, minSize: 5, maxSize: 50, manager: manager, curSize: -1, sizeRefreshPeriod: defaultVmssSizeRefreshPeriod, }}, }, { name: "InvalidMin", specs: map[string]string{"min": "some-invalid-string"}, expectedErrString: "invalid minimum size specified for vmss:", }, { name: "NoMin", specs: map[string]string{"max": "50"}, expectedErrString: fmt.Sprintf("no minimum size specified for vmss: %s", vmssName), }, { name: "InvalidMax", specs: map[string]string{"min": "5", "max": "some-invalid-string"}, expectedErrString: "invalid maximum size specified for vmss:", }, { name: "NoMax", specs: map[string]string{"min": "5"}, expectedErrString: fmt.Sprintf("no maximum size specified for vmss: %s", vmssName), }, { name: "MinLessThanZero", specs: map[string]string{"min": "-4", "max": "20"}, expectedErrString: fmt.Sprintf("minimum size must be a non-negative number of nodes"), }, { name: "MinGreaterThanMax", specs: map[string]string{"min": "50", "max": "5"}, expectedErrString: "maximum size must be greater than minimum size", }, { name: "ListVMSSFail", specs: map[string]string{"min": "5", "max": "50"}, isListVMSSFail: true, expectedErrString: "List VMSS failed", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { tags := make(map[string]*string) tags[vmssTag] = &vmssTagValue if val, ok := tc.specs["min"]; ok { tags["min"] = &val } if val, ok := tc.specs["max"]; ok { tags["max"] = &val } expectedScaleSets := []compute.VirtualMachineScaleSet{fakeVMSSWithTags(vmssName, tags)} mockVMSSClient := mockvmssclient.NewMockInterface(ctrl) if tc.isListVMSSFail { mockVMSSClient.EXPECT().List(gomock.Any(), manager.config.ResourceGroup).Return(nil, &retry.Error{RawError: fmt.Errorf("List VMSS failed")}).AnyTimes() } else { mockVMSSClient.EXPECT().List(gomock.Any(), manager.config.ResourceGroup).Return(expectedScaleSets, nil).AnyTimes() } manager.azClient.virtualMachineScaleSetsClient = mockVMSSClient asgs, err := manager.listScaleSets(specs) if tc.expectedErrString != "" { assert.Error(t, err) assert.Contains(t, err.Error(), tc.expectedErrString) return } assert.NoError(t, err) assert.True(t, assert.ObjectsAreEqualValues(tc.expected, asgs), "expected %#v, but found: %#v", tc.expected, asgs) }) } }
explode_data.jsonl/12794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1582 }
[ 2830, 3393, 852, 50, 30196, 1415, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 92272, 1669, 501, 2271, 78107, 2043, 1155, 340, 54879, 778, 5668, 1669, 330, 30570, 3820...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestReadBytes(t *testing.T) { for _, tt := range readBytesTests { buff := bytes.NewBuffer(tt.in) actual := readBytes(buff, tt.len) if !bytes.Equal(actual, tt.expected) { t.Errorf("actual %#v\nwant %#v", actual, tt.expected) } } }
explode_data.jsonl/10723
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 4418, 7078, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 1349, 7078, 18200, 341, 197, 85696, 1669, 5820, 7121, 4095, 47152, 1858, 340, 197, 88814, 1669, 1349, 7078, 39729, 11, 17853, 19406, 340, 197, 743, 753...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPodControllerLookup(t *testing.T) { manager := NewReplicationManager(clientset.NewForConfigOrDie(&restclient.Config{Host: "", ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()}}), controller.NoResyncPeriodFunc, BurstReplicas, 0) manager.podStoreSynced = alwaysReady testCases := []struct { inRCs []*api.ReplicationController pod *api.Pod outRCName string }{ // pods without labels don't match any rcs { inRCs: []*api.ReplicationController{ {ObjectMeta: api.ObjectMeta{Name: "basic"}}}, pod: &api.Pod{ObjectMeta: api.ObjectMeta{Name: "foo1", Namespace: api.NamespaceAll}}, outRCName: "", }, // Matching labels, not namespace { inRCs: []*api.ReplicationController{ { ObjectMeta: api.ObjectMeta{Name: "foo"}, Spec: api.ReplicationControllerSpec{ Selector: map[string]string{"foo": "bar"}, }, }, }, pod: &api.Pod{ ObjectMeta: api.ObjectMeta{ Name: "foo2", Namespace: "ns", Labels: map[string]string{"foo": "bar"}}}, outRCName: "", }, // Matching ns and labels returns the key to the rc, not the rc name { inRCs: []*api.ReplicationController{ { ObjectMeta: api.ObjectMeta{Name: "bar", Namespace: "ns"}, Spec: api.ReplicationControllerSpec{ Selector: map[string]string{"foo": "bar"}, }, }, }, pod: &api.Pod{ ObjectMeta: api.ObjectMeta{ Name: "foo3", Namespace: "ns", Labels: map[string]string{"foo": "bar"}}}, outRCName: "bar", }, } for _, c := range testCases { for _, r := range c.inRCs { manager.rcStore.Add(r) } if rc := manager.getPodController(c.pod); rc != nil { if c.outRCName != rc.Name { t.Errorf("Got controller %+v expected %+v", rc.Name, c.outRCName) } } else if c.outRCName != "" { t.Errorf("Expected a controller %v pod %v, found none", c.outRCName, c.pod.Name) } } }
explode_data.jsonl/37487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 803 }
[ 2830, 3393, 23527, 2051, 34247, 1155, 353, 8840, 836, 8, 341, 92272, 1669, 1532, 18327, 1693, 2043, 12805, 746, 7121, 2461, 2648, 2195, 18175, 2099, 3927, 2972, 10753, 90, 9296, 25, 7342, 8883, 2648, 25, 2732, 2972, 12614, 2648, 90, 280...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReceiveEventsAndMetadata(t *testing.T) { expectedMessages := generateMessages(5, 100) largeMessages := generateMessages(10, 4096) extraLargeMessages := generateMessages(2, 65*1024) randomGeneratedText := randomString(900000) tests := []struct { name string cfg map[string]interface{} splitFunc bufio.SplitFunc expectedMessages []string messageSent string }{ { name: "NewLine", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("\n")), expectedMessages: expectedMessages, messageSent: strings.Join(expectedMessages, "\n"), }, { name: "NewLineWithCR", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("\r\n")), expectedMessages: expectedMessages, messageSent: strings.Join(expectedMessages, "\r\n"), }, { name: "CustomDelimiter", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte(";")), expectedMessages: expectedMessages, messageSent: strings.Join(expectedMessages, ";"), }, { name: "MultipleCharsCustomDelimiter", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("<END>")), expectedMessages: expectedMessages, messageSent: strings.Join(expectedMessages, "<END>"), }, { name: "SingleCharCustomDelimiterMessageWithoutBoundaries", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte(";")), expectedMessages: []string{"hello"}, messageSent: "hello", }, { name: "MultipleCharCustomDelimiterMessageWithoutBoundaries", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("<END>")), expectedMessages: []string{"hello"}, messageSent: "hello", }, { name: "NewLineMessageWithoutBoundaries", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("\n")), expectedMessages: []string{"hello"}, messageSent: "hello", }, { name: "NewLineLargeMessagePayload", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("\n")), expectedMessages: largeMessages, messageSent: strings.Join(largeMessages, "\n"), }, { name: "CustomLargeMessagePayload", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte(";")), expectedMessages: largeMessages, messageSent: strings.Join(largeMessages, ";"), }, { name: "ReadRandomLargePayload", cfg: map[string]interface{}{}, splitFunc: netcommon.SplitFunc([]byte("\n")), expectedMessages: []string{randomGeneratedText}, messageSent: randomGeneratedText, }, { name: "MaxReadBufferReachedUserConfigured", splitFunc: netcommon.SplitFunc([]byte("\n")), cfg: map[string]interface{}{ "max_message_size": 50000, }, expectedMessages: []string{}, messageSent: randomGeneratedText, }, { name: "MaxBufferSizeSet", splitFunc: netcommon.SplitFunc([]byte("\n")), cfg: map[string]interface{}{ "max_message_size": 66 * 1024, }, expectedMessages: extraLargeMessages, messageSent: strings.Join(extraLargeMessages, "\n"), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { ch := make(chan *info, len(test.expectedMessages)) defer close(ch) to := func(message []byte, mt inputsource.NetworkMetadata) { ch <- &info{message: string(message), mt: mt} } test.cfg["host"] = "localhost:0" cfg, _ := common.NewConfigFrom(test.cfg) config := defaultConfig err := cfg.Unpack(&config) if !assert.NoError(t, err) { return } factory := netcommon.SplitHandlerFactory(netcommon.FamilyTCP, logp.NewLogger("test"), MetadataCallback, to, test.splitFunc) server, err := New(&config, factory) if !assert.NoError(t, err) { return } err = server.Start() if !assert.NoError(t, err) { return } defer server.Stop() conn, err := net.Dial("tcp", server.Listener.Listener.Addr().String()) require.NoError(t, err) fmt.Fprint(conn, test.messageSent) conn.Close() var events []*info for len(events) < len(test.expectedMessages) { select { case event := <-ch: events = append(events, event) default: } } for idx, e := range events { assert.Equal(t, test.expectedMessages[idx], e.message) assert.NotNil(t, e.mt.RemoteAddr) } }) } }
explode_data.jsonl/78877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2161 }
[ 2830, 3393, 14742, 7900, 3036, 14610, 1155, 353, 8840, 836, 8, 341, 42400, 15820, 1669, 6923, 15820, 7, 20, 11, 220, 16, 15, 15, 340, 8810, 2744, 15820, 1669, 6923, 15820, 7, 16, 15, 11, 220, 19, 15, 24, 21, 340, 8122, 2172, 34253...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreToMinionLister(t *testing.T) { store := NewStore(MetaNamespaceKeyFunc) ids := sets.NewString("foo", "bar", "baz") for id := range ids { store.Add(&api.Node{ObjectMeta: api.ObjectMeta{Name: id}}) } sml := StoreToNodeLister{store} gotNodes, err := sml.List() if err != nil { t.Fatalf("Unexpected error: %v", err) } got := make([]string, len(gotNodes.Items)) for ix := range gotNodes.Items { got[ix] = gotNodes.Items[ix].Name } if !ids.HasAll(got...) || len(got) != len(ids) { t.Errorf("Expected %v, got %v", ids, got) } }
explode_data.jsonl/38225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 6093, 1249, 6217, 290, 852, 261, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 6093, 3189, 1915, 22699, 1592, 9626, 340, 197, 3365, 1669, 7289, 7121, 703, 445, 7975, 497, 330, 2257, 497, 330, 42573, 1138, 2023, 877, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetMcLinks(t *testing.T) { mcLink := link.Link{ TypeMeta: metav1.TypeMeta{ APIVersion: link.SchemeGroupVersion.Identifier(), Kind: l5dk8s.LinkKind, }, ObjectMeta: metav1.ObjectMeta{ Name: "linkname", Namespace: "linkns", }, Spec: link.LinkSpec{ TargetClusterName: "tcn", TargetClusterDomain: "tcd", TargetClusterLinkerdNamespace: "tcln", ClusterCredentialsSecret: "ccs", GatewayAddress: "ga", GatewayPort: "555", GatewayIdentity: "identity", ProbeSpec: link.ProbeSpec{ Path: "pth", Port: "80", Period: "8s", }, Selector: *metav1.SetAsLabelSelector(labels.Set(map[string]string{"l": "v"})), }, } client := fakeClient(&mcLink) result, err := client.GetMulticlusterLinks(context.Background()) if err != nil { t.Error(err) } var buf bytes.Buffer jsonSerializer := scheme.DefaultJSONEncoder() if err := jsonSerializer.Encode(&mcLink, &buf); err != nil { t.Error(err) } expected := buf.String() actual := string(result[0].MulticlusterLink) if expected != actual { t.Fatalf("exepected %s, got %s", expected, actual) } }
explode_data.jsonl/8388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 1949, 25286, 24089, 1155, 353, 8840, 836, 8, 341, 97662, 3939, 1669, 2656, 22534, 515, 197, 27725, 12175, 25, 77520, 16, 10184, 12175, 515, 298, 197, 7082, 5637, 25, 2656, 92719, 2808, 5637, 70520, 3148, 298, 197, 10629, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestWriteTheLineInTheFile(t *testing.T) { logFile := outputs.NewSimpleLogFileWithFolderPath("./", "test2") _, writeErr := logFile.Write([]byte("Dumb log line")) if writeErr != nil { t.Errorf("Could not write in the log file : %v", writeErr) } content, readErr := ioutil.ReadFile(logFile.Path()) if readErr != nil { t.Errorf("Could not read the log file : %v", readErr) } if string(content) != "Dumb log line\n" { t.Errorf("The log file content does not match with what was written: %v", string(content)) } removeFile(logFile.Path()) }
explode_data.jsonl/22139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 7985, 785, 2460, 86093, 1703, 1155, 353, 8840, 836, 8, 341, 6725, 1703, 1669, 16275, 7121, 16374, 98857, 2354, 90597, 13988, 497, 330, 1944, 17, 5130, 197, 6878, 3270, 7747, 1669, 1487, 1703, 4073, 10556, 3782, 445, 35, 3551...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSocketAddress(t *testing.T) { const ( addr = "foo.example.com" port = 8123 ) got := SocketAddress(addr, port) want := &envoy_api_v2_core.Address{ Address: &envoy_api_v2_core.Address_SocketAddress{ SocketAddress: &envoy_api_v2_core.SocketAddress{ Protocol: envoy_api_v2_core.SocketAddress_TCP, Address: addr, PortSpecifier: &envoy_api_v2_core.SocketAddress_PortValue{ PortValue: port, }, }, }, } if diff := cmp.Diff(want, got); diff != "" { t.Fatal(diff) } got = SocketAddress("::", port) want = &envoy_api_v2_core.Address{ Address: &envoy_api_v2_core.Address_SocketAddress{ SocketAddress: &envoy_api_v2_core.SocketAddress{ Protocol: envoy_api_v2_core.SocketAddress_TCP, Address: "::", Ipv4Compat: true, // Set only for ipv6-any "::" PortSpecifier: &envoy_api_v2_core.SocketAddress_PortValue{ PortValue: port, }, }, }, } assert.Equal(t, want, got) }
explode_data.jsonl/74894
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 69455, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 53183, 284, 330, 7975, 7724, 905, 698, 197, 52257, 284, 220, 23, 16, 17, 18, 198, 197, 692, 3174, 354, 1669, 20954, 4286, 24497, 11, 2635, 340, 50780, 1669, 609, 3160...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStandardizeWeight(t *testing.T) { tests := []struct { name string input []int output []int }{ {"single", []int{1}, []int{0}}, {"double", []int{1, 1}, []int{50, 50}}, {"zero", []int{1, 0}, []int{100, 0}}, {"all zero", []int{0, 0}, []int{50, 50}}, {"overflow", []int{1, 1, 1}, []int{34, 33, 33}}, {"skewed", []int{9, 1}, []int{90, 10}}, {"multiple overflow", []int{1, 1, 1, 1, 1, 1}, []int{17, 17, 17, 17, 16, 16}}, {"skewed overflow", []int{1, 1, 1, 3}, []int{17, 17, 16, 50}}, {"skewed overflow 2", []int{1, 1, 1, 1, 2}, []int{17, 17, 17, 16, 33}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := standardizeWeights(tt.input) if !reflect.DeepEqual(tt.output, got) { t.Errorf("standardizeWeights() = %v, want %v", got, tt.output) } if len(tt.output) > 1 && intSum(tt.output) != 100 { t.Errorf("invalid weights, should sum to 100: %v", got) } }) } }
explode_data.jsonl/73941
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 445 }
[ 2830, 3393, 19781, 551, 8295, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 22427, 220, 3056, 396, 198, 197, 21170, 3056, 396, 198, 197, 59403, 197, 197, 4913, 15338, 497, 3056, 396, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewGarbageCollector(t *testing.T) { config := &restclient.Config{} config.ContentConfig.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: metaonly.NewMetadataCodecFactory()} metaOnlyClientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc) config.ContentConfig.NegotiatedSerializer = nil clientPool := dynamic.NewClientPool(config, api.Registry.RESTMapper(), dynamic.LegacyAPIPathResolverFunc) podResource := map[schema.GroupVersionResource]struct{}{ {Version: "v1", Resource: "pods"}: {}, // no monitor will be constructed for non-core resource, the GC construction will not fail. {Group: "tpr.io", Version: "v1", Resource: "unknown"}: {}, } client := fake.NewSimpleClientset() sharedInformers := informers.NewSharedInformerFactory(client, 0) alwaysStarted := make(chan struct{}) close(alwaysStarted) gc, err := NewGarbageCollector(metaOnlyClientPool, clientPool, api.Registry.RESTMapper(), podResource, ignoredResources, sharedInformers, alwaysStarted) if err != nil { t.Fatal(err) } assert.Equal(t, 1, len(gc.dependencyGraphBuilder.monitors)) }
explode_data.jsonl/1187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 3564, 43930, 20652, 53694, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 609, 3927, 2972, 10753, 16094, 25873, 12614, 2648, 2067, 65978, 10029, 13909, 284, 21759, 89592, 36913, 4153, 90, 36913, 4153, 25, 8823, 3243, 7121, 14610, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRollupWindowPartialPoints(t *testing.T) { t.Run("beforeStart", func(t *testing.T) { rc := rollupConfig{ Func: rollupLast, Start: 0, End: 20, Step: 5, Window: 8, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{nan, 123, 123, 34, 34} timestampsExpected := []int64{0, 5, 10, 15, 20} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) t.Run("afterEnd", func(t *testing.T) { rc := rollupConfig{ Func: rollupLast, Start: 100, End: 160, Step: 20, Window: 18, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{44, 34, 34, nan} timestampsExpected := []int64{100, 120, 140, 160} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) t.Run("middle", func(t *testing.T) { rc := rollupConfig{ Func: rollupLast, Start: 0, End: 150, Step: 50, Window: 19, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{nan, 54, 44, nan} timestampsExpected := []int64{0, 50, 100, 150} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) }
explode_data.jsonl/23122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 618 }
[ 2830, 3393, 32355, 454, 4267, 37314, 11411, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 14801, 3479, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 30295, 1669, 6502, 454, 2648, 515, 298, 197, 9626, 25, 256, 6502, 454, 5842, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGitCommandUsingGpg(t *testing.T) { type scenario struct { testName string getLocalGitConfig func(string) (string, error) getGlobalGitConfig func(string) (string, error) test func(bool) } scenarios := []scenario{ { "Option global and local config commit.gpgsign is not set", func(string) (string, error) { return "", nil }, func(string) (string, error) { return "", nil }, func(gpgEnabled bool) { assert.False(t, gpgEnabled) }, }, { "Option global config commit.gpgsign is not set, fallback on local config", func(string) (string, error) { return "", nil }, func(string) (string, error) { return "true", nil }, func(gpgEnabled bool) { assert.True(t, gpgEnabled) }, }, { "Option commit.gpgsign is true", func(string) (string, error) { return "True", nil }, func(string) (string, error) { return "", nil }, func(gpgEnabled bool) { assert.True(t, gpgEnabled) }, }, { "Option commit.gpgsign is on", func(string) (string, error) { return "ON", nil }, func(string) (string, error) { return "", nil }, func(gpgEnabled bool) { assert.True(t, gpgEnabled) }, }, { "Option commit.gpgsign is yes", func(string) (string, error) { return "YeS", nil }, func(string) (string, error) { return "", nil }, func(gpgEnabled bool) { assert.True(t, gpgEnabled) }, }, { "Option commit.gpgsign is 1", func(string) (string, error) { return "1", nil }, func(string) (string, error) { return "", nil }, func(gpgEnabled bool) { assert.True(t, gpgEnabled) }, }, } for _, s := range scenarios { t.Run(s.testName, func(t *testing.T) { gitCmd := newDummyGitCommand() gitCmd.getGlobalGitConfig = s.getGlobalGitConfig gitCmd.getLocalGitConfig = s.getLocalGitConfig s.test(gitCmd.usingGpg()) }) } }
explode_data.jsonl/38368
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 900 }
[ 2830, 3393, 46562, 4062, 16429, 38, 3517, 1155, 353, 8840, 836, 8, 341, 13158, 15048, 2036, 341, 197, 18185, 675, 1843, 914, 198, 197, 10366, 7319, 46562, 2648, 220, 2915, 3609, 8, 320, 917, 11, 1465, 340, 197, 10366, 11646, 46562, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterClearWire(t *testing.T) { msgFilterClear := NewMsgFilterClear() msgFilterClearEncoded := []byte{} tests := []struct { in *MsgFilterClear // Message to encode out *MsgFilterClear // Expected decoded message buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding enc MessageEncoding // Message encoding format }{ // Latest protocol version. { msgFilterClear, msgFilterClear, msgFilterClearEncoded, ProtocolVersion, BaseEncoding, }, // Protocol version BIP0037Version + 1. { msgFilterClear, msgFilterClear, msgFilterClearEncoded, BIP0037Version + 1, BaseEncoding, }, // Protocol version BIP0037Version. { msgFilterClear, msgFilterClear, msgFilterClearEncoded, BIP0037Version, BaseEncoding, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode the message to wire format. var buf bytes.Buffer err := test.in.BtcEncode(&buf, test.pver, test.enc) if err != nil { t.Errorf("BtcEncode #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("BtcEncode #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Decode the message from wire format. var msg MsgFilterClear rbuf := bytes.NewReader(test.buf) err = msg.BtcDecode(rbuf, test.pver, test.enc) if err != nil { t.Errorf("BtcDecode #%d error %v", i, err) continue } if !reflect.DeepEqual(&msg, test.out) { t.Errorf("BtcDecode #%d\n got: %s want: %s", i, spew.Sdump(msg), spew.Sdump(test.out)) continue } } }
explode_data.jsonl/15537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 697 }
[ 2830, 3393, 5632, 14008, 37845, 1155, 353, 8840, 836, 8, 341, 21169, 5632, 14008, 1669, 1532, 6611, 5632, 14008, 741, 21169, 5632, 14008, 46795, 1669, 3056, 3782, 31483, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 353, 6611, 5632, 1400...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestClient_UpdateTLSActivation_validation(t *testing.T) { t.Parallel() var err error record(t, "custom_tls_activation/update", func(c *Client) { _, err = c.UpdateTLSActivation(&UpdateTLSActivationInput{ ID: "ACTIVATION_ID", Certificate: &CustomTLSCertificate{ID: "CERTIFICATE_ID"}, }) }) if err != nil { t.Fatal(err) } _, err = testClient.UpdateTLSActivation(&UpdateTLSActivationInput{ ID: "ACTIVATION_ID", }) if err != ErrMissingTLSCertificate { t.Errorf("bad error: %s", err) } _, err = testClient.UpdateTLSActivation(&UpdateTLSActivationInput{ Certificate: &CustomTLSCertificate{ID: "CERTIFICATE_ID"}, }) if err != ErrMissingID { t.Errorf("bad error: %s", err) } }
explode_data.jsonl/2984
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 2959, 47393, 45439, 61460, 19416, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 1848, 1465, 198, 71952, 1155, 11, 330, 9163, 71262, 52404, 29832, 497, 2915, 1337, 353, 2959, 8, 341, 197, 197, 6878, 1848, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLegacyReplicaCalcDisjointResourcesMetrics(t *testing.T) { tc := legacyReplicaCalcTestCase{ currentReplicas: 1, expectedError: fmt.Errorf("no metrics returned matched known pods"), resource: &resourceInfo{ name: v1.ResourceCPU, requests: []resource.Quantity{resource.MustParse("1.0")}, levels: makePodMetricLevels(100), podNames: []string{"an-older-pod-name"}, targetUtilization: 100, }, } tc.runTest(t) }
explode_data.jsonl/26676
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 77415, 18327, 15317, 47168, 4839, 32850, 11277, 27328, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 19588, 18327, 15317, 47168, 16458, 515, 197, 20121, 18327, 52210, 25, 220, 16, 345, 197, 42400, 1454, 25, 256, 8879, 13080, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalNumber(t *testing.T) { input := []byte(`{ "N": "123.45"}`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) assert.Equal(t, DataTypeNumber, av.DataType()) assert.Equal(t, "123.45", av.Number()) }
explode_data.jsonl/61698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 1806, 27121, 2833, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 45, 788, 330, 16, 17, 18, 13, 19, 20, 1, 5541, 692, 2405, 1822, 71813, 3506, 78554, 198, 9859, 1669, 2951, 38097, 5384, 11, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckDelete(t *testing.T) { mocks, checkService := newCheckSvcStack() ch := mocks.pipingCoordinator.taskDeletedChan() mocks.checkSvc.FindCheckByIDFn = func(_ context.Context, id platform.ID) (influxdb.Check, error) { c := &check.Deadman{} c.SetID(id) c.SetTaskID(21) return c, nil } err := checkService.DeleteCheck(context.Background(), 1) if err != nil { t.Fatal(err) } select { case id := <-ch: if id != platform.ID(21) { t.Fatalf("task sent to coordinator doesn't match expected") } default: t.Fatal("didn't receive task") } }
explode_data.jsonl/72197
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 3973, 6435, 1155, 353, 8840, 836, 8, 341, 2109, 25183, 11, 1779, 1860, 1669, 501, 3973, 92766, 4336, 741, 23049, 1669, 68909, 556, 46095, 64304, 15034, 26039, 46019, 2822, 2109, 25183, 9093, 92766, 9998, 3973, 60572, 24911, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBranchName(t *testing.T) { assert.Equal(t, true, IsValidBranchName("t")) assert.Equal(t, true, IsValidBranchName("☃️")) assert.Equal(t, true, IsValidBranchName("user/in-progress/do-some-things")) assert.Equal(t, true, IsValidBranchName("user/in-progress/{}")) assert.Equal(t, true, IsValidBranchName("user/{/a.tt/}")) assert.Equal(t, false, IsValidBranchName("")) assert.Equal(t, false, IsValidBranchName("this-is-a-..-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-@{-test")) assert.Equal(t, false, IsValidBranchName("this-is-a- -test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\t-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-//-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-:-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-?-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-[-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\\-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-^-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-~-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-*-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x00-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x01-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x02-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x03-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x04-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x05-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x06-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x07-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x08-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x09-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0a-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0b-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0c-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0d-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0e-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x0f-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x10-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x11-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x12-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x13-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x14-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x15-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x16-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x17-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x18-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x19-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1a-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1b-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1c-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1d-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1e-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x1f-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\x7f-test")) assert.Equal(t, false, IsValidBranchName("this-is-a-\n-test")) assert.Equal(t, false, IsValidBranchName("user/working/.tt")) assert.Equal(t, false, IsValidBranchName(".user/working/a.tt")) assert.Equal(t, false, IsValidBranchName("user/working/")) assert.Equal(t, false, IsValidBranchName("/user/working/")) assert.Equal(t, false, IsValidBranchName("user/working/mybranch.lock")) assert.Equal(t, false, IsValidBranchName("mybranch.lock")) assert.Equal(t, false, IsValidBranchName("user.lock/working/mybranch")) assert.Equal(t, false, IsValidBranchName("HEAD")) assert.Equal(t, false, IsValidBranchName("-")) }
explode_data.jsonl/21961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1685 }
[ 2830, 3393, 18197, 675, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 830, 11, 70647, 18197, 675, 445, 83, 5455, 6948, 12808, 1155, 11, 830, 11, 70647, 18197, 675, 445, 146024, 30543, 5455, 6948, 12808, 1155, 11, 830, 11, 70647...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessExternalLabels(t *testing.T) { for _, tc := range []struct { labels tsdbLabels.Labels externalLabels labels.Labels expected labels.Labels }{ // Test adding labels at the end. { labels: tsdbLabels.Labels{{Name: "a", Value: "b"}}, externalLabels: labels.Labels{{Name: "c", Value: "d"}}, expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, }, // Test adding labels at the beginning. { labels: tsdbLabels.Labels{{Name: "c", Value: "d"}}, externalLabels: labels.Labels{{Name: "a", Value: "b"}}, expected: labels.Labels{{Name: "a", Value: "b"}, {Name: "c", Value: "d"}}, }, // Test we don't override existing labels. { labels: tsdbLabels.Labels{{Name: "a", Value: "b"}}, externalLabels: labels.Labels{{Name: "a", Value: "c"}}, expected: labels.Labels{{Name: "a", Value: "b"}}, }, } { require.Equal(t, tc.expected, processExternalLabels(tc.labels, tc.externalLabels)) } }
explode_data.jsonl/1312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 449 }
[ 2830, 3393, 7423, 25913, 23674, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 95143, 260, 10591, 1999, 23674, 4679, 82, 198, 197, 197, 20921, 23674, 9201, 4679, 82, 198, 197, 42400, 981, 9201, 4679, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestManifestGenerateHelmValues(t *testing.T) { runTestGroup(t, testGroup{ { desc: "helm_values_enablement", diffSelect: "Deployment:*:istio-egressgateway, Service:*:istio-egressgateway," + " Deployment:*:kiali, Service:*:kiali, Deployment:*:prometheus, Service:*:prometheus", }, }) }
explode_data.jsonl/48767
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 38495, 31115, 39, 23162, 6227, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 2808, 1155, 11, 1273, 2808, 515, 197, 197, 515, 298, 41653, 25, 330, 51899, 9146, 18988, 478, 756, 298, 80564, 3379, 25, 330, 75286, 53386, 25, 380,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { type args struct { address string nextConsumer consumer.TraceConsumerOld } tests := []struct { name string args args wantErr error }{ { name: "nil nextConsumer", args: args{}, wantErr: componenterror.ErrNilNextConsumer, }, { name: "happy path", args: args{ nextConsumer: exportertest.NewNopTraceExporterOld(), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cfg := &Config{ ReceiverSettings: configmodels.ReceiverSettings{ NameVal: zipkinReceiver, }, HTTPServerSettings: confighttp.HTTPServerSettings{ Endpoint: tt.args.address, }, } got, err := New(cfg, tt.args.nextConsumer) require.Equal(t, tt.wantErr, err) if tt.wantErr == nil { require.NotNil(t, got) } else { require.Nil(t, got) } }) } }
explode_data.jsonl/53598
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 63202, 414, 914, 198, 197, 28144, 29968, 11502, 46920, 29968, 18284, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMergesort(t *testing.T) { var a0 = goutil.RandIntArray(10, 1000000) // fmt.Println(a0) Mergesort(a0) // fmt.Println(a0) }
explode_data.jsonl/20290
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 44, 2375, 288, 371, 1155, 353, 8840, 836, 8, 341, 2405, 264, 15, 284, 342, 30158, 2013, 437, 95338, 7, 16, 15, 11, 220, 16, 15, 15, 15, 15, 15, 15, 692, 197, 322, 8879, 12419, 2877, 15, 340, 9209, 2375, 288, 371, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDaoReportCount(t *testing.T) { var ( c = context.TODO() sqlStr = []string{} order = "rpt.id" state = int32(0) ) convey.Convey("ReportCount", t, func(ctx convey.C) { count, err := d.ReportCount(c, sqlStr, order, state) ctx.Convey("Then err should be nil.count should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(count, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/51306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 12197, 10361, 2507, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 414, 284, 2266, 90988, 741, 197, 30633, 2580, 284, 3056, 917, 16094, 197, 42245, 220, 284, 330, 81, 417, 1764, 698, 197, 24291, 220, 284, 526, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSub1(t *testing.T) { bA1, _ := randBitArray(31) bA2, _ := randBitArray(51) bA3 := bA1.Sub(bA2) if bA3.Bits != bA1.Bits { t.Error("Expected bA1 bits") } if len(bA3.Elems) != len(bA1.Elems) { t.Error("Expected bA1 elems length") } for i := 0; i < bA3.Bits; i++ { expected := bA1.GetIndex(i) if bA2.GetIndex(i) { expected = false } if bA3.GetIndex(i) != expected { t.Error("Wrong bit from bA3", i, bA1.GetIndex(i), bA2.GetIndex(i), bA3.GetIndex(i)) } } }
explode_data.jsonl/77648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 3136, 16, 1155, 353, 8840, 836, 8, 1476, 2233, 32, 16, 11, 716, 1669, 10382, 8344, 1857, 7, 18, 16, 340, 2233, 32, 17, 11, 716, 1669, 10382, 8344, 1857, 7, 20, 16, 340, 2233, 32, 18, 1669, 293, 32, 16, 12391, 1883, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestPOSDishonoredReturnWrite(t *testing.T) { file := NewFile().SetHeader(mockFileHeader()) entry := NewEntryDetail() entry.TransactionCode = CheckingDebit entry.SetRDFI("121042882") entry.DFIAccountNumber = "744-5678-99" entry.Amount = 25000 entry.IdentificationNumber = "45689033" entry.IndividualName = "Wade Arnold" entry.SetTraceNumber(mockBatchPOSHeader().ODFIIdentification, 1) entry.DiscretionaryData = "01" entry.AddendaRecordIndicator = 1 entry.Category = CategoryDishonoredReturn addenda99 := mockAddenda99() addenda99.ReturnCode = "R68" addenda99.AddendaInformation = "Untimely Return" entry.Addenda99 = addenda99 posHeader := NewBatchHeader() posHeader.ServiceClassCode = DebitsOnly posHeader.StandardEntryClassCode = POS posHeader.CompanyName = "Payee Name" posHeader.CompanyIdentification = "231380104" posHeader.CompanyEntryDescription = "ACH POS" posHeader.ODFIIdentification = "23138010" batch := NewBatchPOS(posHeader) batch.SetHeader(posHeader) batch.AddEntry(entry) if err := batch.Create(); err != nil { t.Fatal(err) } file.AddBatch(batch) if err := file.Create(); err != nil { t.Errorf("%T: %s", err, err) } if err := file.Validate(); err != nil { t.Errorf("%T: %s", err, err) } b := &bytes.Buffer{} f := NewWriter(b) if err := f.Write(file); err != nil { t.Errorf("%T: %s", err, err) } r := NewReader(strings.NewReader(b.String())) _, err := r.Read() if err != nil { t.Errorf("%T: %s", err, err) } if err = r.File.Validate(); err != nil { t.Errorf("%T: %s", err, err) } }
explode_data.jsonl/68681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 605 }
[ 2830, 3393, 2045, 5491, 812, 263, 3018, 5598, 7985, 1155, 353, 8840, 836, 8, 341, 17661, 1669, 1532, 1703, 1005, 1649, 4047, 30389, 1703, 4047, 2398, 48344, 1669, 1532, 5874, 10649, 741, 48344, 29284, 2078, 284, 46093, 1912, 4489, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestExportTraceDataMinimum(t *testing.T) { spans := []*tracepb.Span{ { TraceId: []byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1}, Name: &tracepb.TruncatableString{Value: "root"}, }, } expected := []Span{ { ID: "0000000000000001", TraceID: "01010101010101010101010101010101", Attributes: map[string]interface{}{ "collector.name": name, "collector.version": version, "name": "root", }, }, } testTraceData(t, expected, nil, spans) }
explode_data.jsonl/16227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 16894, 6550, 1043, 28695, 1155, 353, 8840, 836, 8, 341, 41378, 596, 1669, 29838, 15067, 16650, 85309, 515, 197, 197, 515, 298, 197, 6550, 764, 25, 3056, 3782, 90, 16, 11, 220, 16, 11, 220, 16, 11, 220, 16, 11, 220, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerlessDistributedTracingConfigAbsent(t *testing.T) { // Test that payloads do not get created or accepted when distributed // tracing configuration is not present. cfgFn := func(cfg *Config) { cfg.ServerlessMode.Enabled = true cfg.DistributedTracer.Enabled = true } app := testApp(nil, cfgFn, t) txn := app.StartTransaction("hello") emptyHdrs := http.Header{} txn.InsertDistributedTraceHeaders(emptyHdrs) if len(emptyHdrs) != 0 { t.Error(emptyHdrs) } nonEmptyHdrs := http.Header{} app2 := testApp(nil, func(cfg *Config) { cfgFn(cfg) cfg.ServerlessMode.AccountID = "123" cfg.ServerlessMode.TrustedAccountKey = "trustkey" cfg.ServerlessMode.PrimaryAppID = "456" }, t) app2.StartTransaction("hello").InsertDistributedTraceHeaders(nonEmptyHdrs) if len(nonEmptyHdrs) == 0 { t.Error(nonEmptyHdrs) } txn.AcceptDistributedTraceHeaders(TransportHTTP, nonEmptyHdrs) app.expectNoLoggedErrors(t) txn.End() app.ExpectMetrics(t, []internal.WantMetric{ {Name: "OtherTransaction/Go/hello", Scope: "", Forced: true, Data: nil}, {Name: "OtherTransaction/all", Scope: "", Forced: true, Data: nil}, {Name: "OtherTransactionTotalTime/Go/hello", Scope: "", Forced: false, Data: nil}, {Name: "OtherTransactionTotalTime", Scope: "", Forced: true, Data: nil}, {Name: "DurationByCaller/Unknown/Unknown/Unknown/Unknown/all", Scope: "", Forced: false, Data: nil}, {Name: "DurationByCaller/Unknown/Unknown/Unknown/Unknown/allOther", Scope: "", Forced: false, Data: nil}, }) }
explode_data.jsonl/52864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 555 }
[ 2830, 3393, 5475, 1717, 35, 25146, 1282, 4527, 2648, 80251, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 429, 89523, 653, 537, 633, 3465, 476, 11666, 979, 4237, 198, 197, 322, 45415, 6546, 374, 537, 3042, 624, 50286, 24911, 1669, 291...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddOrgUser(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) testSuccess := func(orgID, userID int64, isPublic bool) { org := AssertExistsAndLoadBean(t, &User{ID: orgID}).(*User) expectedNumMembers := org.NumMembers if !BeanExists(t, &OrgUser{OrgID: orgID, UID: userID}) { expectedNumMembers++ } assert.NoError(t, AddOrgUser(orgID, userID)) ou := &OrgUser{OrgID: orgID, UID: userID} AssertExistsAndLoadBean(t, ou) assert.Equal(t, ou.IsPublic, isPublic) org = AssertExistsAndLoadBean(t, &User{ID: orgID}).(*User) assert.EqualValues(t, expectedNumMembers, org.NumMembers) } setting.Service.DefaultOrgMemberVisible = false testSuccess(3, 5, false) testSuccess(3, 5, false) testSuccess(6, 2, false) setting.Service.DefaultOrgMemberVisible = true testSuccess(6, 3, true) CheckConsistencyFor(t, &User{}, &Team{}) }
explode_data.jsonl/71070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 2212, 42437, 1474, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 18185, 7188, 1669, 2915, 36246, 915, 11, 35204, 526, 21, 19, 11, 374, 12676, 1807, 8, 341, 197, 87625, 1669, 5319, 15575, 303...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKeeperTestSuite(t *testing.T) { s = new(KeeperTestSuite) suite.Run(t, s) // Run Ginkgo integration tests RegisterFailHandler(Fail) RunSpecs(t, "Keeper Suite") }
explode_data.jsonl/60512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 77233, 2271, 28000, 1155, 353, 8840, 836, 8, 341, 1903, 284, 501, 16738, 43031, 2271, 28000, 340, 96572, 16708, 1155, 11, 274, 692, 197, 322, 6452, 479, 766, 3346, 17590, 7032, 198, 79096, 19524, 3050, 7832, 604, 340, 85952,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1