text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestLocalStorageEnvWithFeatureGate(t *testing.T) { testCases := []core.EnvVar{ { Name: "ephemeral-storage-limits", ValueFrom: &core.EnvVarSource{ ResourceFieldRef: &core.ResourceFieldSelector{ ContainerName: "test-container", Resource: "limits.ephemeral-storage", }, }, }, { Name: "ephemeral-storage-requests", ValueFrom: &core.EnvVarSource{ ResourceFieldRef: &core.ResourceFieldSelector{ ContainerName: "test-container", Resource: "requests.ephemeral-storage", }, }, }, } // Enable feature LocalStorageCapacityIsolation defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.LocalStorageCapacityIsolation, true)() for _, testCase := range testCases { if errs := validateEnvVarValueFrom(testCase, field.NewPath("field")); len(errs) != 0 { t.Errorf("expected success, got: %v", errs) } } // Disable feature LocalStorageCapacityIsolation defer utilfeaturetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.LocalStorageCapacityIsolation, false)() for _, testCase := range testCases { if errs := validateEnvVarValueFrom(testCase, field.NewPath("field")); len(errs) == 0 { t.Errorf("expected failure for %v", testCase.Name) } } }
explode_data.jsonl/1012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 479 }
[ 2830, 3393, 90464, 14359, 2354, 13859, 42318, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 2153, 81214, 3962, 515, 197, 197, 515, 298, 21297, 25, 330, 23544, 336, 3253, 62795, 2852, 22866, 756, 298, 47399, 3830, 25, 609, 2153...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAgent_RegisterService_InvalidAddress(t *testing.T) { t.Parallel() a := NewTestAgent(t.Name(), "") defer a.Shutdown() for _, addr := range []string{"0.0.0.0", "::", "[::]"} { t.Run("addr "+addr, func(t *testing.T) { args := &structs.ServiceDefinition{ Name: "test", Address: addr, Port: 8000, } req, _ := http.NewRequest("PUT", "/v1/agent/service/register?token=abc123", jsonReader(args)) resp := httptest.NewRecorder() _, err := a.srv.AgentRegisterService(resp, req) if err != nil { t.Fatalf("got error %v want nil", err) } if got, want := resp.Code, 400; got != want { t.Fatalf("got code %d want %d", got, want) } if got, want := resp.Body.String(), "Invalid service address"; got != want { t.Fatalf("got body %q want %q", got, want) } }) } }
explode_data.jsonl/33628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 16810, 73124, 1860, 62, 7928, 4286, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 14676, 16867, 264, 10849, 18452, 2822, 2023, 8358, 10789, 1669, 2088, 3056, 917, 4913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTaskSortError(t *testing.T) { testTasklist.LoadFromPath(testInputSort) if err := testTasklist.Sort(123); err == nil { t.Errorf("Expected Sort() to fail because of unrecognized sort option, but it didn't!") } else if err.Error() != "unrecognized sort option" { t.Error(err) } }
explode_data.jsonl/24870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 6262, 10231, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 6262, 1607, 13969, 3830, 1820, 8623, 2505, 10231, 692, 743, 1848, 1669, 1273, 6262, 1607, 35976, 7, 16, 17, 18, 1215, 1848, 621, 2092, 341, 197, 3244, 13080, 445, 188...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDownloadMgr(t *testing.T) { tasksChan := make(chan int) mgr := NewDownloadMgr(2) mgr.AddTask("https://content.backcountry.com/images/items/large/TNF/TNF05JY/TNFBK.jpg", func(url string, buf []byte, err error) { if err != nil { t.Fatalf("TestDownloadMgr AddTask %v", err) } tasksChan <- 1 }) mgr.AddTask("https://content.backcountry.com/images/items/large/TNF/TNF05JY/VNWH_D2.jpg", func(url string, buf []byte, err error) { if err != nil { t.Fatalf("TestDownloadMgr AddTask %v", err) } tasksChan <- 1 }) mgr.AddTask("https://content.backcountry.com/images/items/small/TNF/TNF05JY/TNFBK_D2.jpg", func(url string, buf []byte, err error) { if err != nil { t.Fatalf("TestDownloadMgr AddTask %v", err) } tasksChan <- 1 }) mgr.AddTask("https://content.backcountry.com/images/items/900/TNF/TNF05JY/TNFBK_D3.jpg", func(url string, buf []byte, err error) { if err != nil { t.Fatalf("TestDownloadMgr AddTask %v", err) } tasksChan <- 1 }) mgr.AddTask("https://content.backcountry.com/images/items/small/TNF/TNF05JY/VNWH_D3.jpg", func(url string, buf []byte, err error) { if err != nil { t.Fatalf("TestDownloadMgr AddTask %v", err) } tasksChan <- 1 }) tasknums := 0 for { r := <-tasksChan tasknums = tasknums + r if tasknums >= 5 { break } } t.Logf("TestDownloadMgr OK") }
explode_data.jsonl/1809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 630 }
[ 2830, 3393, 11377, 25567, 1155, 353, 8840, 836, 8, 341, 3244, 4604, 46019, 1669, 1281, 35190, 526, 340, 2109, 901, 1669, 1532, 11377, 25567, 7, 17, 692, 2109, 901, 1904, 6262, 445, 2428, 1110, 1796, 7335, 11141, 905, 9737, 61259, 13328,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBoolArrayValue(t *testing.T) { result, err := BoolArray(nil).Value() if err != nil { t.Fatalf("Expected no error for nil, got %v", err) } if result != nil { t.Errorf("Expected nil, got %q", result) } result, err = BoolArray([]bool{}).Value() if err != nil { t.Fatalf("Expected no error for empty, got %v", err) } if expected := `{}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected empty, got %q", result) } result, err = BoolArray([]bool{false, true, false}).Value() if err != nil { t.Fatalf("Expected no error, got %v", err) } if expected := `{f,t,f}`; !reflect.DeepEqual(result, expected) { t.Errorf("Expected %q, got %q", expected, result) } }
explode_data.jsonl/5310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 11233, 1857, 1130, 1155, 353, 8840, 836, 8, 341, 9559, 11, 1848, 1669, 12608, 1857, 27907, 568, 1130, 2822, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 18896, 902, 1465, 369, 2092, 11, 2684, 1018, 85, 497, 1848, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestComputeSplitKeySystemRanges(t *testing.T) { defer leaktest.AfterTest(t)() testCases := []struct { start, end roachpb.RKey split roachpb.Key }{ {roachpb.RKeyMin, roachpb.RKeyMax, keys.NodeLivenessPrefix}, {roachpb.RKeyMin, tkey(1), keys.NodeLivenessPrefix}, {roachpb.RKeyMin, roachpb.RKey(keys.TimeseriesPrefix), keys.NodeLivenessPrefix}, {roachpb.RKeyMin, roachpb.RKey(keys.SystemPrefix.Next()), nil}, {roachpb.RKeyMin, roachpb.RKey(keys.SystemPrefix), nil}, {roachpb.RKeyMin, roachpb.RKey(keys.MetaMax), nil}, {roachpb.RKeyMin, roachpb.RKey(keys.Meta2KeyMax), nil}, {roachpb.RKeyMin, roachpb.RKey(keys.Meta1KeyMax), nil}, {roachpb.RKey(keys.Meta1KeyMax), roachpb.RKey(keys.SystemPrefix), nil}, {roachpb.RKey(keys.Meta1KeyMax), roachpb.RKey(keys.SystemPrefix.Next()), nil}, {roachpb.RKey(keys.Meta1KeyMax), roachpb.RKey(keys.NodeLivenessPrefix), nil}, {roachpb.RKey(keys.Meta1KeyMax), roachpb.RKey(keys.NodeLivenessPrefix.Next()), keys.NodeLivenessPrefix}, {roachpb.RKey(keys.Meta1KeyMax), roachpb.RKeyMax, keys.NodeLivenessPrefix}, {roachpb.RKey(keys.SystemPrefix), roachpb.RKey(keys.SystemPrefix), nil}, {roachpb.RKey(keys.SystemPrefix), roachpb.RKey(keys.SystemPrefix.Next()), nil}, {roachpb.RKey(keys.SystemPrefix), roachpb.RKeyMax, keys.NodeLivenessPrefix}, {roachpb.RKey(keys.NodeLivenessPrefix), roachpb.RKey(keys.NodeLivenessPrefix.Next()), nil}, {roachpb.RKey(keys.NodeLivenessPrefix), roachpb.RKey(keys.NodeLivenessKeyMax), nil}, {roachpb.RKey(keys.NodeLivenessPrefix), roachpb.RKeyMax, keys.NodeLivenessKeyMax}, {roachpb.RKey(keys.NodeLivenessKeyMax), roachpb.RKeyMax, keys.TimeseriesPrefix}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKey(keys.NodeLivenessPrefix), nil}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKey(keys.NodeLivenessKeyMax), nil}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKey(keys.StoreIDGenerator), nil}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKey(keys.TimeseriesPrefix), nil}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKey(keys.TimeseriesPrefix.Next()), keys.TimeseriesPrefix}, {roachpb.RKey(keys.MigrationPrefix), roachpb.RKeyMax, keys.TimeseriesPrefix}, {roachpb.RKey(keys.TimeseriesPrefix), roachpb.RKey(keys.TimeseriesPrefix.Next()), nil}, {roachpb.RKey(keys.TimeseriesPrefix), roachpb.RKey(keys.TimeseriesPrefix.PrefixEnd()), nil}, {roachpb.RKey(keys.TimeseriesPrefix), roachpb.RKeyMax, keys.TimeseriesPrefix.PrefixEnd()}, {roachpb.RKey(keys.TimeseriesPrefix.PrefixEnd()), roachpb.RKey(keys.TimeseriesPrefix.PrefixEnd()), nil}, {roachpb.RKey(keys.TimeseriesPrefix.PrefixEnd()), roachpb.RKeyMax, keys.SystemConfigSplitKey}, } cfg := config.NewSystemConfig(config.DefaultZoneConfigRef()) kvs, _ /* splits */ := sqlbase.MakeMetadataSchema(cfg.DefaultZoneConfig, config.DefaultSystemZoneConfigRef()).GetInitialValues(cluster.TestingClusterVersion) cfg.SystemConfigEntries = config.SystemConfigEntries{ Values: kvs, } for tcNum, tc := range testCases { splitKey := cfg.ComputeSplitKey(tc.start, tc.end) expected := roachpb.RKey(tc.split) if !splitKey.Equal(expected) { t.Errorf("#%d: bad split:\ngot: %v\nexpected: %v", tcNum, splitKey, expected) } } }
explode_data.jsonl/25323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1300 }
[ 2830, 3393, 46254, 20193, 1592, 2320, 74902, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 21375, 11, 835, 926, 610, 16650, 2013, 1592, 198, 197, 1903, 2292, 414,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddSnapshotRecovery(t *testing.T) { const ( backendName = "addSnapshotRecoveryBackend" scName = "addSnapshotRecoveryBackendSC" volumeName = "addSnapshotRecoveryVolume" fullSnapshotName = "addSnapshotRecoverySnapshotFull" txOnlySnapshotName = "addSnapshotRecoverySnapshotTxOnly" ) orchestrator := getOrchestrator(t) prepRecoveryTest(t, orchestrator, backendName, scName) // It's easier to add the volume/snapshot and then reinject the transaction again afterwards. volumeConfig := tu.GenerateVolumeConfig(volumeName, 50, scName, config.File) if _, err := orchestrator.AddVolume(ctx(), volumeConfig); err != nil { t.Fatal("Unable to add volume: ", err) } // For the full test, we create everything and recreate the AddSnapshot transaction. fullSnapshotConfig := generateSnapshotConfig(fullSnapshotName, volumeName, volumeName) if _, err := orchestrator.CreateSnapshot(ctx(), fullSnapshotConfig); err != nil { t.Fatal("Unable to add snapshot: ", err) } // For the partial test, we add only the AddSnapshot transaction. txOnlySnapshotConfig := generateSnapshotConfig(txOnlySnapshotName, volumeName, volumeName) // BEGIN actual test. Note that the delete idempotency is handled at the backend layer // (above the driver), so if the snapshot doesn't exist after bootstrapping, the driver // will not be called to delete the snapshot. runSnapshotRecoveryTests( t, orchestrator, backendName, storage.AddSnapshot, []recoveryTest{ {name: "full", volumeConfig: volumeConfig, snapshotConfig: fullSnapshotConfig, expectDestroy: true}, {name: "txOnly", volumeConfig: volumeConfig, snapshotConfig: txOnlySnapshotConfig, expectDestroy: false}, }, ) cleanup(t, orchestrator) }
explode_data.jsonl/62729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 2212, 15009, 693, 7449, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 197, 20942, 675, 286, 284, 330, 718, 15009, 693, 7449, 29699, 698, 197, 29928, 675, 1797, 284, 330, 718, 15009, 693, 7449, 29699, 3540, 698, 197, 5195,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMax(t *testing.T) { type testData struct { in []float64 out float64 } tests := []testData{ {in: []float64{18.2, 12.4, 3.5}, out: 18.2}, {in: []float64{}, out: math.Inf(-1)}, {in: nil, out: math.Inf(-1)}, {in: []float64{1.2, -12.4, 3.5}, out: 3.5}, {in: []float64{3.75}, out: 3.75}, {in: []float64{3.75, math.Inf(-1)}, out: 3.75}, {in: []float64{3.75, math.Inf(1)}, out: math.Inf(5)}, {in: []float64{-3.75, math.Inf(-1)}, out: -3.75}, {in: []float64{math.Inf(1), math.Inf(-1)}, out: math.Inf(0)}, } for i, test := range tests { if err := a.AssertDeepEqual(test.out, Max(test.in...)); err != nil { t.Error(m.ErrorMessageTestCount(i+1, err)) } } }
explode_data.jsonl/9014
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 5974, 1155, 353, 8840, 836, 8, 341, 13158, 67348, 2036, 341, 197, 17430, 220, 3056, 3649, 21, 19, 198, 197, 13967, 2224, 21, 19, 198, 197, 630, 78216, 1669, 3056, 1944, 1043, 515, 197, 197, 90, 258, 25, 3056, 3649, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFirestoreBeginTransaction(t *testing.T) { var transaction []byte = []byte("-34") var expectedResponse = &firestorepb.BeginTransactionResponse{ Transaction: transaction, } mockFirestore.err = nil mockFirestore.reqs = nil mockFirestore.resps = append(mockFirestore.resps[:0], expectedResponse) var formattedDatabase string = fmt.Sprintf("projects/%s/databases/%s", "[PROJECT]", "[DATABASE]") var request = &firestorepb.BeginTransactionRequest{ Database: formattedDatabase, } c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.BeginTransaction(context.Background(), request) if err != nil { t.Fatal(err) } if want, got := request, mockFirestore.reqs[0]; !proto.Equal(want, got) { t.Errorf("wrong request %q, want %q", got, want) } if want, got := expectedResponse, resp; !proto.Equal(want, got) { t.Errorf("wrong response %q, want %q)", got, want) } }
explode_data.jsonl/27382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 48513, 11135, 8070, 1155, 353, 8840, 836, 8, 341, 2405, 7745, 3056, 3782, 284, 3056, 3782, 13645, 18, 19, 1138, 2405, 3601, 2582, 284, 609, 10796, 4314, 16650, 28467, 8070, 2582, 515, 197, 197, 8070, 25, 7745, 345, 197, 63...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestByteSlice(t *testing.T) { m := &HashMap{} k := []byte(`Well this is a fine mess`) i := 123 m.Set(k, i) j, ok := m.Get(k) if !ok { t.Fail() } if i != j { t.Fail() } }
explode_data.jsonl/24437
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 7153, 33236, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 18497, 16094, 16463, 1669, 3056, 3782, 5809, 11395, 419, 374, 264, 6915, 9435, 24183, 8230, 1669, 220, 16, 17, 18, 198, 2109, 4202, 5969, 11, 600, 692, 12428, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConfig_GetFloatValue(t *testing.T) { client := createMockApolloConfig(120) defaultValue := 100000.1 config := client.GetConfig(testDefaultNamespace) //test default v := config.GetFloatValue("joe", defaultValue) Assert(t, defaultValue, Equal(v)) //normal value v = config.GetFloatValue("float", defaultValue) Assert(t, 190.3, Equal(v)) //error type v = config.GetFloatValue("int", defaultValue) Assert(t, float64(1), Equal(v)) }
explode_data.jsonl/17971
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 2648, 13614, 5442, 1130, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1855, 11571, 95909, 2648, 7, 16, 17, 15, 340, 11940, 1130, 1669, 220, 16, 15, 15, 15, 15, 15, 13, 16, 198, 25873, 1669, 2943, 2234, 2648, 8623, 3675, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestElem(t *testing.T) { type I interface{} var i I u := ValueFrom(reflect.ValueOf(i)) type X struct { A int16 B string } x := &X{A: 12345, B: "test"} xx := &x var elemPtr uintptr for _, v := range []interface{}{&xx, xx, x, *x} { val := ValueOf(v).UnderlyingElem() elemPtr = val.Pointer() a := *(*int16)(unsafe.Pointer(elemPtr)) if !assert.Equal(t, x.A, a) { t.FailNow() } b := *(*string)(unsafe.Pointer(elemPtr + unsafe.Offsetof(x.B))) if !assert.Equal(t, x.B, b) { t.FailNow() } } var y *X u = ValueOf(&y) if !assert.False(t, u.IsNil()) { t.FailNow() } u = u.UnderlyingElem() if !assert.Equal(t, reflect.Struct, u.Kind()) { t.FailNow() } if !assert.True(t, u.IsNil()) { t.FailNow() } }
explode_data.jsonl/29656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 25586, 1155, 353, 8840, 836, 8, 341, 13158, 358, 3749, 16094, 2405, 600, 358, 198, 10676, 1669, 5162, 3830, 13321, 767, 6167, 2124, 1956, 1171, 13158, 1599, 2036, 341, 197, 22985, 526, 16, 21, 198, 197, 12791, 914, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestServer_Push_RejectForbiddenHeader(t *testing.T) { testServer_Push_RejectSingleRequest(t, func(p http.Pusher, r *http.Request) error { header := http.Header{ "Content-Length": {"10"}, "Content-Encoding": {"gzip"}, "Trailer": {"Foo"}, "Te": {"trailers"}, "Host": {"test.com"}, ":authority": {"test.com"}, } if err := p.Push("https://"+r.Host+"/pushed", &http.PushOptions{Header: header}); err == nil { return errors.New("Push() should have failed (forbidden headers)") } return nil }) }
explode_data.jsonl/1977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 5475, 1088, 1116, 50693, 583, 69115, 4047, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1088, 1116, 50693, 583, 10888, 1900, 1155, 345, 197, 29244, 1295, 1758, 34981, 261, 11, 435, 353, 1254, 9659, 8, 1465, 341, 298, 20883, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNormalizeWindowsPath(t *testing.T) { path := `/var/lib/kubelet/pods/146f8428-83e7-11e7-8dd4-000d3a31dac4/volumes/kubernetes.io~azure-disk` normalizedPath := normalizeWindowsPath(path) if normalizedPath != `c:\var\lib\kubelet\pods\146f8428-83e7-11e7-8dd4-000d3a31dac4\volumes\kubernetes.io~azure-disk` { t.Errorf("normizeWindowsPath test failed, normalizedPath : %q", normalizedPath) } path = `/var/lib/kubelet/pods/146f8428-83e7-11e7-8dd4-000d3a31dac4\volumes\kubernetes.io~azure-disk` normalizedPath = normalizeWindowsPath(path) if normalizedPath != `c:\var\lib\kubelet\pods\146f8428-83e7-11e7-8dd4-000d3a31dac4\volumes\kubernetes.io~azure-disk` { t.Errorf("normizeWindowsPath test failed, normalizedPath : %q", normalizedPath) } path = `/` normalizedPath = normalizeWindowsPath(path) if normalizedPath != `c:\` { t.Errorf("normizeWindowsPath test failed, normalizedPath : %q", normalizedPath) } }
explode_data.jsonl/48681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 87824, 13164, 1820, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 37301, 947, 8194, 14109, 3760, 1149, 4322, 29697, 14, 16, 19, 21, 69, 23, 19, 17, 23, 12, 23, 18, 68, 22, 12, 16, 16, 68, 22, 12, 23, 631, 19, 12, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIntegration_GasUpdater(t *testing.T) { t.Parallel() c, cfgCleanup := cltest.NewConfig(t) defer cfgCleanup() c.Set("ETH_GAS_PRICE_DEFAULT", 5000000000) c.Set("GAS_UPDATER_ENABLED", true) c.Set("GAS_UPDATER_BLOCK_DELAY", 0) c.Set("GAS_UPDATER_BLOCK_HISTORY_SIZE", 2) // Limit the headtracker backfill depth just so we aren't here all week c.Set("ETH_FINALITY_DEPTH", 3) rpcClient, gethClient, sub, assertMocksCalled := cltest.NewEthMocks(t) defer assertMocksCalled() chchNewHeads := make(chan chan<- *models.Head, 1) app, cleanup := cltest.NewApplicationWithConfigAndKey(t, c, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() b41 := models.Block{ Number: 41, Hash: cltest.NewHash(), Transactions: cltest.TransactionsFromGasPrices(41000000000, 41500000000), } b42 := models.Block{ Number: 42, Hash: cltest.NewHash(), Transactions: cltest.TransactionsFromGasPrices(44000000000, 45000000000), } b43 := models.Block{ Number: 43, Hash: cltest.NewHash(), Transactions: cltest.TransactionsFromGasPrices(48000000000, 49000000000, 31000000000), } h40 := models.Head{Hash: cltest.NewHash(), Number: 40} h41 := models.Head{Hash: b41.Hash, ParentHash: h40.Hash, Number: 41} h42 := models.Head{Hash: b42.Hash, ParentHash: h41.Hash, Number: 42} sub.On("Err").Return(nil) sub.On("Unsubscribe").Return(nil).Maybe() rpcClient.On("EthSubscribe", mock.Anything, mock.Anything, "newHeads"). Run(func(args mock.Arguments) { chchNewHeads <- args.Get(1).(chan<- *models.Head) }). Return(sub, nil) // Nonce syncer gethClient.On("PendingNonceAt", mock.Anything, mock.Anything).Maybe().Return(uint64(0), nil) // GasUpdater boot calls rpcClient.On("CallContext", mock.Anything, mock.AnythingOfType("**models.Head"), "eth_getBlockByNumber", "latest", false).Return(nil).Run(func(args mock.Arguments) { arg := args.Get(1).(**models.Head) *arg = &h42 }) rpcClient.On("BatchCallContext", mock.Anything, mock.MatchedBy(func(b []rpc.BatchElem) bool { return len(b) == 2 && b[0].Method == "eth_getBlockByNumber" && b[0].Args[0] == "0x29" && b[1].Method == "eth_getBlockByNumber" && b[1].Args[0] == "0x2a" })).Return(nil).Run(func(args mock.Arguments) { elems := args.Get(1).([]rpc.BatchElem) elems[0].Result = &b41 elems[1].Result = &b42 }) gethClient.On("ChainID", mock.Anything).Return(c.ChainID(), nil) gethClient.On("BalanceAt", mock.Anything, mock.Anything, mock.Anything).Maybe().Return(oneETH.ToInt(), nil) require.NoError(t, app.Start()) var newHeads chan<- *models.Head select { case newHeads = <-chchNewHeads: case <-time.After(10 * time.Second): t.Fatal("timed out waiting for app to subscribe") } assert.Equal(t, "41500000000", app.Config.EthGasPriceDefault().String()) // GasUpdater new blocks rpcClient.On("BatchCallContext", mock.Anything, mock.MatchedBy(func(b []rpc.BatchElem) bool { return len(b) == 2 && b[0].Method == "eth_getBlockByNumber" && b[0].Args[0] == "0x2a" && b[1].Method == "eth_getBlockByNumber" && b[1].Args[0] == "0x2b" })).Return(nil).Run(func(args mock.Arguments) { elems := args.Get(1).([]rpc.BatchElem) elems[0].Result = &b43 elems[1].Result = &b42 }) // HeadTracker backfill rpcClient.On("CallContext", mock.Anything, mock.AnythingOfType("**models.Head"), "eth_getBlockByNumber", "0x2a", false).Return(nil).Run(func(args mock.Arguments) { arg := args.Get(1).(**models.Head) *arg = &h42 }) rpcClient.On("CallContext", mock.Anything, mock.AnythingOfType("**models.Head"), "eth_getBlockByNumber", "0x29", false).Return(nil).Run(func(args mock.Arguments) { arg := args.Get(1).(**models.Head) *arg = &h41 }) // Simulate one new head and check the gas price got updated newHeads <- cltest.Head(43) gomega.NewGomegaWithT(t).Eventually(func() string { return c.EthGasPriceDefault().String() }, cltest.DBWaitTimeout, cltest.DBPollingInterval).Should(gomega.Equal("45000000000")) }
explode_data.jsonl/75910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1610 }
[ 2830, 3393, 52464, 2646, 300, 79854, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 11, 13286, 67335, 1669, 1185, 1944, 7121, 2648, 1155, 340, 16867, 13286, 67335, 741, 1444, 4202, 445, 7625, 2646, 1911, 51900, 13811, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTraceExporter_WithShutdown(t *testing.T) { shutdownCalled := false shutdown := func() error { shutdownCalled = true; return nil } te, err := NewTraceExporter(fakeTraceExporterConfig, newPushTraceData(0, nil), WithShutdown(shutdown)) assert.NotNil(t, te) assert.Nil(t, err) assert.Nil(t, te.Shutdown()) assert.True(t, shutdownCalled) }
explode_data.jsonl/1576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 6550, 88025, 62, 2354, 62004, 1155, 353, 8840, 836, 8, 341, 36196, 18452, 20960, 1669, 895, 198, 36196, 18452, 1669, 2915, 368, 1465, 314, 23766, 20960, 284, 830, 26, 470, 2092, 555, 197, 665, 11, 1848, 1669, 1532, 6550, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetMap(t *testing.T) { set := MustParseTerm(`{"foo", "bar", "baz", "qux"}`).Value.(Set) result, err := set.Map(func(term *Term) (*Term, error) { s := string(term.Value.(String)) if strings.Contains(s, "a") { return &Term{Value: String(strings.ToUpper(s))}, nil } return term, nil }) if err != nil { t.Fatalf("Unexpected error: %v", err) } expected := MustParseTerm(`{"foo", "BAR", "BAZ", "qux"}`).Value if result.Compare(expected) != 0 { t.Fatalf("Expected map result to be %v but got: %v", expected, result) } result, err = set.Map(func(*Term) (*Term, error) { return nil, fmt.Errorf("oops") }) if !reflect.DeepEqual(err, fmt.Errorf("oops")) { t.Fatalf("Expected oops to be returned but got: %v, %v", result, err) } }
explode_data.jsonl/2926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 1649, 2227, 1155, 353, 8840, 836, 8, 1476, 8196, 1669, 15465, 14463, 17249, 5809, 4913, 7975, 497, 330, 2257, 497, 330, 42573, 497, 330, 446, 87, 1, 59979, 1130, 12832, 1649, 692, 9559, 11, 1848, 1669, 738, 10104, 18552, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsParameterizableValue(t *testing.T) { tests := []struct { val string expectedReturn bool }{ // Note, parens are also allowable here. These tests // are set up with braces and parens are substituted in the loop // to test both cases since they are handled the same. {"foo", false}, {"{foo}", false}, {"$foo}", false}, {"foo}", false}, {"{foo", false}, {"${foo", true}, {"${foo}", true}, } for _, test := range tests { if retVal := IsParameterizableValue(test.val); retVal != test.expectedReturn { t.Errorf("IsParameterizableValue with %s expected %t", test.val, test.expectedReturn) } // sub in parens and run again replaced := strings.Replace(test.val, "{", "(", -1) replaced = strings.Replace(replaced, "}", ")", -1) if retVal := IsParameterizableValue(replaced); retVal != test.expectedReturn { t.Errorf("IsParameterizableValue with %s expected %t", replaced, test.expectedReturn) } } }
explode_data.jsonl/17589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 3872, 4971, 8335, 1130, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 19302, 310, 914, 198, 197, 42400, 5598, 1807, 198, 197, 59403, 197, 197, 322, 7036, 11, 23191, 4412, 525, 1083, 84752, 1588, 13, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHexToBytes(t *testing.T) { test := "abcdef1234567890" bytes := hexToBytes(test) bs := fmt.Sprintf("%x", bytes) if bs != test { t.Error("hex to string didn't work") } testHex := func(hex string) (gotError bool) { defer func() { gotError = recover() != nil }() _ = hexToBytes(hex) return false } if !testHex("abcdefg") { t.Error("no error on odd length hex string") } if testHex("abcdef") { t.Error("error on an even length hex string") } if !testHex("abcdefhi") { t.Error("didn't error on invalid characters") } if testHex("abcdef") { t.Error("error on valid characters") } }
explode_data.jsonl/35721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 20335, 1249, 7078, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 330, 41202, 16, 17, 18, 19, 20, 21, 22, 23, 24, 15, 698, 70326, 1669, 12371, 1249, 7078, 8623, 340, 93801, 1669, 8879, 17305, 4430, 87, 497, 5820, 340, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelectStatement_Substatement(t *testing.T) { var tests = []struct { stmt string expr *influxql.VarRef sub string err string }{ // 0. Single series { stmt: `SELECT value FROM myseries WHERE value > 1`, expr: &influxql.VarRef{Val: "value"}, sub: `SELECT value FROM myseries WHERE value > 1`, }, // 1. Simple join { stmt: `SELECT sum(aa.value) + sum(bb.value) FROM aa, bb`, expr: &influxql.VarRef{Val: "aa.value"}, sub: `SELECT "aa.value" FROM aa`, }, // 2. Simple merge { stmt: `SELECT sum(aa.value) + sum(bb.value) FROM aa, bb`, expr: &influxql.VarRef{Val: "bb.value"}, sub: `SELECT "bb.value" FROM bb`, }, // 3. Join with condition { stmt: `SELECT sum(aa.value) + sum(bb.value) FROM aa, bb WHERE aa.host = 'servera' AND bb.host = 'serverb'`, expr: &influxql.VarRef{Val: "bb.value"}, sub: `SELECT "bb.value" FROM bb WHERE "bb.host" = 'serverb'`, }, // 4. Join with complex condition { stmt: `SELECT sum(aa.value) + sum(bb.value) FROM aa, bb WHERE aa.host = 'servera' AND (bb.host = 'serverb' OR bb.host = 'serverc') AND 1 = 2`, expr: &influxql.VarRef{Val: "bb.value"}, sub: `SELECT "bb.value" FROM bb WHERE ("bb.host" = 'serverb' OR "bb.host" = 'serverc') AND 1 = 2`, }, // 5. 4 with different condition order { stmt: `SELECT sum(aa.value) + sum(bb.value) FROM aa, bb WHERE ((bb.host = 'serverb' OR bb.host = 'serverc') AND aa.host = 'servera') AND 1 = 2`, expr: &influxql.VarRef{Val: "bb.value"}, sub: `SELECT "bb.value" FROM bb WHERE (("bb.host" = 'serverb' OR "bb.host" = 'serverc')) AND 1 = 2`, }, } for i, tt := range tests { // Parse statement. stmt, err := influxql.NewParser(strings.NewReader(tt.stmt)).ParseStatement() if err != nil { t.Fatalf("invalid statement: %q: %s", tt.stmt, err) } // Extract substatement. sub, err := stmt.(*influxql.SelectStatement).Substatement(tt.expr) if err != nil { t.Errorf("%d. %q: unexpected error: %s", i, tt.stmt, err) continue } if substr := sub.String(); tt.sub != substr { t.Errorf("%d. %q: unexpected substatement:\n\nexp=%s\n\ngot=%s\n\n", i, tt.stmt, tt.sub, substr) continue } } }
explode_data.jsonl/24803
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 962 }
[ 2830, 3393, 3379, 8636, 36359, 24184, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 55822, 914, 198, 197, 8122, 649, 353, 258, 36706, 1470, 87968, 3945, 198, 197, 28624, 220, 914, 198, 197, 9859, 220, 914, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestParseWithFuncsInvalidType(t *testing.T) { var c int err := env.ParseWithFuncs(&c, nil) assert.Error(t, err) assert.Equal(t, err, env.ErrNotAStructPtr) }
explode_data.jsonl/7497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 14463, 2354, 9626, 82, 7928, 929, 1155, 353, 8840, 836, 8, 341, 2405, 272, 526, 198, 9859, 1669, 6105, 8937, 2354, 9626, 82, 2099, 66, 11, 2092, 340, 6948, 6141, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 1848, 11, 6105...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewWithStackf(t *testing.T) { t.Parallel() expect := "abc def" actual := NewWithStackf("abc %s", "def") //goland:noinspection GoNilness assert.Equal(t, expect, actual.Error()) }
explode_data.jsonl/80869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 3564, 2354, 4336, 69, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 24952, 1669, 330, 13683, 707, 1837, 88814, 1669, 1532, 2354, 4336, 69, 445, 13683, 1018, 82, 497, 330, 750, 5130, 197, 322, 70, 96440, 66479, 533...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestProposalResponseError(t *testing.T) { testError := fmt.Errorf("Test Error") user := mspmocks.NewMockSigningIdentity("test", "1234") ctx := mocks.NewMockContext(user) mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() proc := mock_context.NewMockProposalProcessor(mockCtrl) proc2 := mock_context.NewMockProposalProcessor(mockCtrl) stp, err := signProposal(ctx, &pb.Proposal{}) if err != nil { t.Fatalf("signProposal returned error: %s", err) } tp := fab.ProcessProposalRequest{ SignedProposal: stp, } // Test with error from lower layer tpr := fab.TransactionProposalResponse{Endorser: "example.com", Status: 200} proc.EXPECT().ProcessTransactionProposal(gomock.Any(), tp).Return(&tpr, testError) proc2.EXPECT().ProcessTransactionProposal(gomock.Any(), tp).Return(&tpr, testError) reqCtx, cancel := context.NewRequest(ctx, context.WithTimeout(10*time.Second)) defer cancel() targets := []fab.ProposalProcessor{proc, proc2} _, err = SendProposal(reqCtx, &fab.TransactionProposal{ Proposal: &pb.Proposal{}, }, targets) errs, ok := err.(multi.Errors) assert.True(t, ok, "expected multi errors object") assert.Equal(t, testError, errs[0]) }
explode_data.jsonl/26400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 98637, 2582, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 1454, 1669, 8879, 13080, 445, 2271, 4600, 5130, 19060, 1669, 296, 2154, 16712, 82, 7121, 11571, 93358, 18558, 445, 1944, 497, 330, 16, 17, 18, 19, 1138, 20985, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAssertJSON(t *testing.T) { cases := []struct { e, a string asserts bool }{ { e: `{"RecursiveStruct":{"RecursiveMap":{"foo":{"NoRecurse":"foo"},"bar":{"NoRecurse":"bar"}}}}`, a: `{"RecursiveStruct":{"RecursiveMap":{"bar":{"NoRecurse":"bar"},"foo":{"NoRecurse":"foo"}}}}`, asserts: true, }, } for i, c := range cases { mockT := &testing.T{} if awstesting.AssertJSON(mockT, c.e, c.a) != c.asserts { t.Error("Assert JSON result was not expected.", i) } } }
explode_data.jsonl/6219
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 8534, 5370, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 7727, 11, 264, 262, 914, 198, 197, 6948, 82, 1807, 198, 197, 59403, 197, 197, 515, 298, 7727, 25, 981, 1565, 4913, 78542, 9422, 22317, 785...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBeacon(t *testing.T) { c := NewClient(time.Second) conn, err := net.ListenUDP("udp6", &net.UDPAddr{ IP: net.IPv6zero, Port: c.AdvertPort, }) if err != nil { t.Fatalf("unable to listen UDP: %v", err) } defer conn.Close() _, err = c.Beacon() if err == nil { t.Errorf("Expected error for multiple Beacon() calls") } }
explode_data.jsonl/78495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 3430, 22379, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 2959, 9730, 32435, 340, 32917, 11, 1848, 1669, 4179, 68334, 41648, 445, 31101, 21, 497, 609, 4711, 13, 41648, 13986, 515, 197, 197, 3298, 25, 256, 4179, 46917, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNoShowMessage_Issue37279(t *testing.T) { const noModule = ` -- a.go -- package foo func f() { } ` runner.Run(t, noModule, func(t *testing.T, env *Env) { env.OpenFile("a.go") env.Await( OnceMet( CompletedWork(lsp.DiagnosticWorkTitle(lsp.FromDidOpen), 1), NoDiagnostics("a.go"), ), NoShowMessage(), ) // introduce an error, expect no Show Message env.RegexpReplace("a.go", "func", "fun") env.Await(env.DiagnosticAtRegexp("a.go", "fun"), NoShowMessage()) }) }
explode_data.jsonl/38918
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 2753, 7812, 2052, 7959, 83890, 18, 22, 17, 22, 24, 1155, 353, 8840, 836, 8, 341, 4777, 902, 3332, 284, 22074, 313, 264, 18002, 39514, 1722, 15229, 271, 2830, 282, 368, 341, 532, 3989, 197, 41736, 16708, 1155, 11, 902, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuncStarted(t *testing.T) { ctx, timeout := context.WithTimeout(context.Background(), 10*time.Second) defer timeout() dummyCommand, err := dummySleepRunc() if err != nil { t.Fatalf("Failed to create dummy sleep runc: %s", err) } defer os.Remove(dummyCommand) sleepRunc := &Runc{ Command: dummyCommand, } var wg sync.WaitGroup defer wg.Wait() started := make(chan int) wg.Add(1) go func() { defer wg.Done() interrupt(ctx, t, started) }() status, err := sleepRunc.Run(ctx, "fake-id", "fake-bundle", &CreateOpts{ Started: started, }) if err == nil { t.Fatal("Expected error from Run, but got nil") } if status != -1 { t.Fatalf("Expected exit status 0 from Run, got %d", status) } started = make(chan int) wg.Add(1) go func() { defer wg.Done() interrupt(ctx, t, started) }() err = sleepRunc.Exec(ctx, "fake-id", specs.Process{}, &ExecOpts{ Started: started, }) if err == nil { t.Fatal("Expected error from Exec, but got nil") } status = extractStatus(err) if status != -1 { t.Fatalf("Expected exit status -1 from Exec, got %d", status) } started = make(chan int) wg.Add(1) go func() { defer wg.Done() interrupt(ctx, t, started) }() io, err := NewSTDIO() if err != nil { t.Fatalf("Unexpected error from NewSTDIO: %s", err) } err = sleepRunc.Exec(ctx, "fake-id", specs.Process{}, &ExecOpts{ IO: io, Started: started, }) if err == nil { t.Fatal("Expected error from Exec, but got nil") } status = extractStatus(err) if status != -1 { t.Fatalf("Expected exit status 1 from Exec, got %d", status) } }
explode_data.jsonl/29503
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 661 }
[ 2830, 3393, 49, 1347, 32527, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9632, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 220, 16, 15, 77053, 32435, 340, 16867, 9632, 2822, 2698, 8574, 4062, 11, 1848, 1669, 17292, 41745, 49, 1347, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMotorDriverDigital(t *testing.T) { d := initTestMotorDriver() d.SpeedPin = "" // Disable speed d.CurrentMode = "digital" d.ForwardPin = "2" d.BackwardPin = "3" d.On() gobottest.Assert(t, d.CurrentState, uint8(1)) d.Off() gobottest.Assert(t, d.CurrentState, uint8(0)) }
explode_data.jsonl/23140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 33577, 11349, 38112, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 2930, 2271, 33577, 11349, 741, 2698, 84461, 19861, 284, 1591, 442, 28027, 4628, 198, 2698, 11517, 3636, 284, 330, 57269, 698, 2698, 26676, 1606, 19861, 284, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatExp(t *testing.T) { src := NewMatWithSize(10, 10, MatTypeCV32F) dst := NewMat() Exp(src, &dst) if dst.Empty() { t.Error("TestExp dst should not be empty.") } src.Close() dst.Close() }
explode_data.jsonl/81739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 11575, 8033, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 11, 220, 16, 15, 11, 6867, 929, 19589, 18, 17, 37, 340, 52051, 1669, 1532, 11575, 741, 197, 8033, 14705, 11, 609, 15658, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSysStatsHandler(t *testing.T) { cfg := initTest() r := gofight.New() r.GET("/sys/stats"). Run(routerEngine(cfg, q), func(r gofight.HTTPResponse, rq gofight.HTTPRequest) { assert.Equal(t, http.StatusOK, r.Code) }) }
explode_data.jsonl/67615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 32792, 16635, 3050, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2930, 2271, 2822, 7000, 1669, 728, 21143, 7121, 2822, 7000, 17410, 4283, 7791, 94933, 38609, 197, 85952, 61210, 4571, 28272, 11, 2804, 701, 2915, 2601, 728, 21143,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPeerIDs(t *testing.T) { ApplyDeadline = false id1 := tnet.RandIdentityOrFatal(t) mn := mocknet.New(context.Background()) // add peers to mock net a1 := tnet.RandLocalTCPAddress() h1, err := mn.AddPeer(id1.PrivateKey(), a1) if err != nil { t.Fatal(err) } p1 := h1.ID() timeout := time.Second * 2 pc := NewPartyCoordinator(h1, timeout) r, err := pc.getPeerIDs([]string{}) assert.Nil(t, err) assert.Len(t, r, 0) input := []string{ p1.String(), } r1, err := pc.getPeerIDs(input) assert.Nil(t, err) assert.Len(t, r1, 1) assert.Equal(t, r1[0], p1) input = append(input, "whatever") r2, err := pc.getPeerIDs(input) assert.NotNil(t, err) assert.Len(t, r2, 0) }
explode_data.jsonl/10310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 1949, 30888, 30466, 1155, 353, 8840, 836, 8, 341, 197, 28497, 83593, 284, 895, 198, 15710, 16, 1669, 259, 4711, 2013, 437, 18558, 2195, 62396, 1155, 340, 2109, 77, 1669, 7860, 4711, 7121, 5378, 19047, 2398, 197, 322, 912, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBase58Check(t *testing.T) { for x, test := range checkEncodingStringTests { // test encoding if res := base58.CheckEncode([]byte(test.in), test.version); res != test.out { t.Errorf("CheckEncode test #%d failed: got %s, want: %s", x, res, test.out) } // test decoding res, version, err := base58.CheckDecode(test.out) if err != nil { t.Errorf("CheckDecode test #%d failed with err: %v", x, err) } else if version != test.version { t.Errorf("CheckDecode test #%d failed: got version: %d want: %d", x, version, test.version) } else if string(res) != test.in { t.Errorf("CheckDecode test #%d failed: got: %s want: %s", x, res, test.in) } } // test the two decoding failure cases // case 1: checksum error _, _, err := base58.CheckDecode("3MNQE1Y") if err != base58.ErrChecksum { t.Error("Checkdecode test failed, expected ErrChecksum") } // case 2: invalid formats (string lengths below 5 mean the version byte and/or the checksum // bytes are missing). testString := "" for len := 0; len < 4; len++ { // make a string of length `len` _, _, err = base58.CheckDecode(testString) if err != base58.ErrInvalidFormat { t.Error("Checkdecode test failed, expected ErrInvalidFormat") } } }
explode_data.jsonl/76545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 469 }
[ 2830, 3393, 3978, 20, 23, 3973, 1155, 353, 8840, 836, 8, 341, 2023, 856, 11, 1273, 1669, 2088, 1779, 14690, 703, 18200, 341, 197, 197, 322, 1273, 11170, 198, 197, 743, 592, 1669, 2331, 20, 23, 10600, 32535, 10556, 3782, 8623, 1858, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestConcurrentAdd(t *testing.T) { _, c := New(0, 3) wg := sync.WaitGroup{} wg.Add(10) for i := 0; i < 10; i++ { go func() { defer wg.Done() c.AddTicks(1) }() } wg.Wait() now := c.Now() if now.GetTicks() < 10 { t.Fatal("There must be exactly 10 ticks") } }
explode_data.jsonl/76176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 1109, 3231, 2212, 1155, 353, 8840, 836, 8, 341, 197, 6878, 272, 1669, 1532, 7, 15, 11, 220, 18, 340, 72079, 1669, 12811, 28384, 2808, 16094, 72079, 1904, 7, 16, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunWithBuildArgs(t *testing.T) { b := newBuilderWithMockBackend() args := newBuildArgs(make(map[string]*string)) args.argsFromOptions["HTTP_PROXY"] = strPtr("FOO") b.disableCommit = false sb := newDispatchRequest(b, '`', nil, args, newStagesBuildResults()) runConfig := &container.Config{} origCmd := strslice.StrSlice([]string{"cmd", "in", "from", "image"}) cmdWithShell := strslice.StrSlice(append(getShell(runConfig, runtime.GOOS), "echo foo")) envVars := []string{"|1", "one=two"} cachedCmd := strslice.StrSlice(append(envVars, cmdWithShell...)) imageCache := &mockImageCache{ getCacheFunc: func(parentID string, cfg *container.Config) (string, error) { // Check the runConfig.Cmd sent to probeCache() assert.Equal(t, cachedCmd, cfg.Cmd) assert.Equal(t, strslice.StrSlice(nil), cfg.Entrypoint) return "", nil }, } mockBackend := b.docker.(*MockBackend) mockBackend.makeImageCacheFunc = func(_ []string, _ string) builder.ImageCache { return imageCache } b.imageProber = newImageProber(mockBackend, nil, runtime.GOOS, false) mockBackend.getImageFunc = func(_ string) (builder.Image, builder.ReleaseableLayer, error) { return &mockImage{ id: "abcdef", config: &container.Config{Cmd: origCmd}, }, nil, nil } mockBackend.containerCreateFunc = func(config types.ContainerCreateConfig) (container.ContainerCreateCreatedBody, error) { // Check the runConfig.Cmd sent to create() assert.Equal(t, cmdWithShell, config.Config.Cmd) assert.Contains(t, config.Config.Env, "one=two") assert.Equal(t, strslice.StrSlice{""}, config.Config.Entrypoint) return container.ContainerCreateCreatedBody{ID: "12345"}, nil } mockBackend.commitFunc = func(cID string, cfg *backend.ContainerCommitConfig) (string, error) { // Check the runConfig.Cmd sent to commit() assert.Equal(t, origCmd, cfg.Config.Cmd) assert.Equal(t, cachedCmd, cfg.ContainerConfig.Cmd) assert.Equal(t, strslice.StrSlice(nil), cfg.Config.Entrypoint) return "", nil } from := &instructions.Stage{BaseName: "abcdef"} err := initializeStage(sb, from) require.NoError(t, err) sb.state.buildArgs.AddArg("one", strPtr("two")) run := &instructions.RunCommand{ ShellDependantCmdLine: instructions.ShellDependantCmdLine{ CmdLine: strslice.StrSlice{"echo foo"}, PrependShell: true, }, } require.NoError(t, dispatch(sb, run)) // Check that runConfig.Cmd has not been modified by run assert.Equal(t, origCmd, sb.state.runConfig.Cmd) }
explode_data.jsonl/34880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 919 }
[ 2830, 3393, 51918, 11066, 4117, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 501, 3297, 2354, 11571, 29699, 741, 31215, 1669, 501, 11066, 4117, 36944, 9147, 14032, 8465, 917, 1171, 31215, 16365, 3830, 3798, 1183, 9230, 59065, 1341, 284, 607,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProxyTracingDefaultOptions(t *testing.T) { t1 := newProxyTracing(nil) if t1.tracer == nil || t1.initialOperationName == "" { t.Errorf("did not set default options") } t2 := newProxyTracing(&OpenTracingParams{}) if t2.tracer == nil || t2.initialOperationName == "" { t.Errorf("did not set default options") } }
explode_data.jsonl/50651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 16219, 1282, 4527, 3675, 3798, 1155, 353, 8840, 836, 8, 341, 3244, 16, 1669, 501, 16219, 1282, 4527, 27907, 340, 743, 259, 16, 5427, 9584, 621, 2092, 1369, 259, 16, 17793, 8432, 675, 621, 1591, 341, 197, 3244, 13080, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCheckRetryHandles429And5xx(t *testing.T) { count := 0 ch := make(chan struct{}) testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { data, err := ioutil.ReadAll(req.Body) if err != nil { t.Fatalf("unable to read request body: %v", err) } if !bytes.Equal(data, []byte(strings.Repeat("abcd", 1000))) { t.Fatalf("retry did not send a complete body: %s", data) } t.Logf("attempt %d", count) if count >= 4 { w.WriteHeader(http.StatusOK) close(ch) return } w.Header().Set("Retry-After", "0") w.WriteHeader([]int{http.StatusTooManyRequests, 500, 501, 504}[count]) count++ })) defer testServer.Close() c := testRESTClient(t, testServer) _, err := c.Verb("POST"). Prefix("foo", "bar"). Suffix("baz"). Timeout(time.Second). Body([]byte(strings.Repeat("abcd", 1000))). DoRaw() if err != nil { t.Fatalf("Unexpected error: %v %#v", err, err) } <-ch if count != 4 { t.Errorf("unexpected retries: %d", count) } }
explode_data.jsonl/13274
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 3973, 51560, 65928, 19, 17, 24, 3036, 20, 4146, 1155, 353, 8840, 836, 8, 341, 18032, 1669, 220, 15, 198, 23049, 1669, 1281, 35190, 2036, 37790, 18185, 5475, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestExportAppNonAdminTenant(t *testing.T) { subscriberUserName := subscriber.UserName + "@" + TENANT1 subscriberPassword := subscriber.Password dev := apimClients[0] app := addApp(t, dev, subscriberUserName, subscriberPassword) args := &appImportExportTestArgs{ appOwner: credentials{username: subscriberUserName, password: subscriberPassword}, ctlUser: credentials{username: subscriberUserName, password: subscriberPassword}, application: app, srcAPIM: dev, } validateAppExportFailure(t, args) }
explode_data.jsonl/42431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 16894, 2164, 8121, 7210, 71252, 1155, 353, 8840, 836, 8, 341, 28624, 20351, 18856, 1669, 32115, 43672, 488, 96848, 488, 74266, 2821, 16, 198, 28624, 20351, 4876, 1669, 32115, 25690, 271, 27302, 1669, 1443, 318, 47174, 58, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPathPrefixOrEqual(t *testing.T) { if !isPathPrefixOrEqual("foo", "foo") { t.Error("Same path should return true") } if isPathPrefixOrEqual("foo", "fooer") { t.Error("foo is not a path-type prefix of fooer") } if !isPathPrefixOrEqual("foo", "foo/bar") { t.Error("foo is a path prefix of foo/bar") } if isPathPrefixOrEqual("foo", "foo/") { t.Error("special case - foo is not a path prefix of foo/") } }
explode_data.jsonl/6214
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 1820, 14335, 2195, 2993, 1155, 353, 8840, 836, 8, 972, 743, 753, 285, 1820, 14335, 2195, 2993, 445, 7975, 497, 330, 7975, 899, 972, 197, 3244, 6141, 445, 19198, 1815, 1265, 470, 830, 6060, 197, 2570, 743, 374, 1820, 14335,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSetState(t *testing.T) { l := NewList("A list") li1 := l.Add("First level item") li2 := li1.Add("Second level item") if li1.state != TODO { t.Fatalf("Set state: First test, expected TODO, got %v", li1.state) } if li2.state != TODO { t.Fatalf("Set state: Second test, expected TODO, got %v", li2.state) } li2.state = DONE if li2.state != DONE { t.Fatalf("Set state: Second test, expected DONE, got %v", li2.state) } }
explode_data.jsonl/69496
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 1649, 1397, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 1532, 852, 445, 32, 1140, 1138, 197, 742, 16, 1669, 326, 1904, 445, 5338, 2188, 1509, 1138, 197, 742, 17, 1669, 898, 16, 1904, 445, 15666, 2188, 1509, 1138, 743, 898,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTLSCodec_10(t *testing.T) { if gtest.RunProcess(t, func() { x509SystemCertPool = func() (*x509.CertPool, error) { return nil, fmt.Errorf("x509SystemCertPool_Error") } codec1 := NewTLSClientCodec() codec1.LoadSystemCas() l, _ := net.Listen("tcp", ":0") _, p, _ := net.SplitHostPort(l.Addr().String()) c, _ := Dial("127.0.0.1:"+p, time.Second) c.AddCodec(codec1) _, err := WriteTo(c, "") t.Log(err) }) { return } out, _, _ := gtest.NewProcess(t).Wait() assert.True(t, strings.Contains(out, "x509SystemCertPool_Error")) }
explode_data.jsonl/34698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 45439, 36913, 62, 16, 15, 1155, 353, 8840, 836, 8, 341, 743, 342, 1944, 16708, 7423, 1155, 11, 2915, 368, 341, 197, 10225, 20, 15, 24, 2320, 36934, 10551, 284, 2915, 368, 4609, 87, 20, 15, 24, 727, 529, 10551, 11, 1465...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunner_Processor_Signal_Observer_OK(t *testing.T) { t.Parallel() repo := &RepositoryMock{} publisher := &PublisherMock{} r := New(repo, WithPublisher(100, publisher)) ctx := context.Background() ctx, cancel := context.WithCancel(ctx) repo.GetLastEventsFunc = func(ctx context.Context, limit uint64) ([]Event, error) { return nil, nil } repo.GetLastSequenceFunc = func(ctx context.Context, id PublisherID) (uint64, error) { return 0, nil } repo.GetUnprocessedEventsFunc = func(ctx context.Context, limit uint64) ([]Event, error) { return []Event{ {ID: 100}, {ID: 99}, {ID: 101}, {ID: 120}, {ID: 130}, }, nil } repo.UpdateSequencesFunc = func(ctx context.Context, events []Event) error { return nil } publisher.PublishFunc = func(ctx context.Context, events []Event) error { return nil } repo.SaveLastSequenceFunc = func(ctx context.Context, id PublisherID, seq uint64) error { return nil } var wg sync.WaitGroup wg.Add(1) go func() { defer wg.Done() r.Run(ctx) }() r.Signal() obs := r.NewObserver(1, 3) events := obs.GetNextEvents(ctx, nil) assert.Equal(t, []Event{ {ID: 100, Sequence: 1}, {ID: 99, Sequence: 2}, {ID: 101, Sequence: 3}, }, events) time.Sleep(40 * time.Millisecond) cancel() wg.Wait() assert.Equal(t, 1, len(repo.GetLastEventsCalls())) assert.Equal(t, 1, len(repo.GetUnprocessedEventsCalls())) publishCalls := publisher.PublishCalls() assert.Equal(t, 1, len(publishCalls)) assert.Equal(t, []Event{ {ID: 100, Sequence: 1}, {ID: 99, Sequence: 2}, {ID: 101, Sequence: 3}, {ID: 120, Sequence: 4}, {ID: 130, Sequence: 5}, }, publishCalls[0].Events) saveCalls := repo.SaveLastSequenceCalls() assert.Equal(t, 1, len(saveCalls)) assert.Equal(t, PublisherID(100), saveCalls[0].ID) assert.Equal(t, uint64(5), saveCalls[0].Seq) }
explode_data.jsonl/21369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 763 }
[ 2830, 3393, 19486, 70241, 269, 1098, 25719, 2232, 65, 4030, 8375, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 17200, 5368, 1669, 609, 4624, 11571, 16094, 3223, 15182, 1669, 609, 34550, 11571, 31483, 7000, 1669, 1532, 50608, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestL1Instructions(t *testing.T) { _, err := L1Instructions( unix.PERF_COUNT_HW_CACHE_OP_READ, unix.PERF_COUNT_HW_CACHE_RESULT_MISS, func() error { return nil }, ) if err != nil { t.Fatal(err) } }
explode_data.jsonl/33123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 43, 16, 55291, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 444, 16, 55291, 1006, 197, 20479, 941, 47320, 37, 14672, 44013, 29138, 13908, 13117, 345, 197, 20479, 941, 47320, 37, 14672, 44013, 29138, 21181, 63608, 345...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetProfileImage(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() user := th.BasicUser data, err := testutils.ReadTestFile("test.png") require.NoError(t, err) _, err = th.Client.SetProfileImage(user.Id, data) require.NoError(t, err) resp, err := th.Client.SetProfileImage(model.NewId(), data) require.Error(t, err) CheckForbiddenStatus(t, resp) // status code returns either forbidden or unauthorized // note: forbidden is set as default at Client4.SetProfileImage when request is terminated early by server th.Client.Logout() resp, err = th.Client.SetProfileImage(user.Id, data) require.Error(t, err) if resp.StatusCode == http.StatusForbidden { CheckForbiddenStatus(t, resp) } else if resp.StatusCode == http.StatusUnauthorized { CheckUnauthorizedStatus(t, resp) } else { require.Fail(t, "Should have failed either forbidden or unauthorized") } buser, appErr := th.App.GetUser(user.Id) require.Nil(t, appErr) _, err = th.SystemAdminClient.SetProfileImage(user.Id, data) require.NoError(t, err) ruser, appErr := th.App.GetUser(user.Id) require.Nil(t, appErr) assert.True(t, buser.LastPictureUpdate == ruser.LastPictureUpdate, "Same picture should not have updated") data2, err := testutils.ReadTestFile("testjpg.jpg") require.NoError(t, err) _, err = th.SystemAdminClient.SetProfileImage(user.Id, data2) require.NoError(t, err) ruser, appErr = th.App.GetUser(user.Id) require.Nil(t, appErr) assert.True(t, buser.LastPictureUpdate < ruser.LastPictureUpdate, "Picture should have updated for user") info := &model.FileInfo{Path: "users/" + user.Id + "/profile.png"} err = th.cleanupTestFile(info) require.NoError(t, err) }
explode_data.jsonl/47538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 607 }
[ 2830, 3393, 1649, 8526, 1906, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 19060, 1669, 270, 48868, 1474, 271, 8924, 11, 1848, 1669, 1273, 6031, 6503, 2271, 1703, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenerateSSGenBlockRef(t *testing.T) { var tests = []struct { blockHash string height uint32 expected []byte }{ { "0000000000004740ad140c86753f9295e09f9cc81b1bb75d7f5552aeeedb7012", 1000, hexToBytes("6a241270dbeeae52557f5db71b1bc89c9fe095923" + "f75860c14ad4047000000000000e8030000"), }, { "000000000000000033eafc268a67c8d1f02343d7a96cf3fe2a4915ef779b52f9", 290000, hexToBytes("6a24f9529b77ef15492afef36ca9d74323f0d1c86" + "78a26fcea330000000000000000d06c0400"), }, } for _, test := range tests { h, err := chainhash.NewHashFromStr(test.blockHash) if err != nil { t.Errorf("NewHashFromStr failed: %v", err) continue } s, err := GenerateSSGenBlockRef(*h, test.height) if err != nil { t.Errorf("GenerateSSGenBlockRef failed: %v", err) continue } if !bytes.Equal(s, test.expected) { t.Errorf("GenerateSSGenBlockRef: unexpected script:\n"+ " got %x\nwant %x", s, test.expected) } } }
explode_data.jsonl/29689
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 31115, 1220, 9967, 4713, 3945, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 47996, 6370, 914, 198, 197, 30500, 262, 2622, 18, 17, 198, 197, 42400, 220, 3056, 3782, 198, 197, 59403, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPodStoreError1(t *testing.T) { t.Parallel() ms, ctrl, pod, executor := setup(t) defer ctrl.Finish() // vars genericError := errors.New("some error") // Expectations ms.EXPECT().GetPodFlag(pod, events.PodCreatePodResponse).Return(nil, genericError) // Run code under test out, err := executor(events.PodCreatePodResponse) // Assert assert.Error(t, err) assert.False(t, IsExpected(err)) assert.Nil(t, out) }
explode_data.jsonl/35139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 23527, 6093, 1454, 16, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 47691, 11, 23743, 11, 7509, 11, 31558, 1669, 6505, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 322, 19942, 198, 3174, 3469, 1454, 1669, 5975, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartMGR(t *testing.T) { executor := &exectest.MockExecutor{ MockExecuteCommandWithOutputFile: func(debug bool, actionName string, command string, outFileArg string, args ...string) (string, error) { return "{\"key\":\"mysecurekey\"}", nil }, } configDir, _ := ioutil.TempDir("", "") defer os.RemoveAll(configDir) context := &clusterd.Context{ Executor: executor, ConfigDir: configDir, Clientset: testop.New(3)} volSize := resource.NewQuantity(100000.0, resource.BinarySI) c := New(context, "ns", "myversion", "", "", *volSize, rookalpha.Annotations{}, rookalpha.Placement{}, edgefsv1beta1.NetworkSpec{}, edgefsv1beta1.DashboardSpec{}, v1.ResourceRequirements{}, "", metav1.OwnerReference{}, false) // start a basic service err := c.Start("edgefs") assert.Nil(t, err) validateStart(t, c) }
explode_data.jsonl/4575
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 3479, 44, 8626, 1155, 353, 8840, 836, 8, 341, 67328, 4831, 1669, 609, 327, 439, 477, 24664, 25255, 515, 197, 9209, 1176, 17174, 4062, 2354, 5097, 1703, 25, 2915, 42154, 1807, 11, 1917, 675, 914, 11, 3210, 914, 11, 72809, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanPropagation(t *testing.T) { var rec tracetest.SpanRecorder tracer, err := opentelemetry.NewTracer(opentelemetry.TracerOptions{ Tracer: sdktrace.NewTracerProvider(sdktrace.WithSpanProcessor(&rec)).Tracer(""), }) require.NoError(t, err) interceptortest.AssertSpanPropagation(t, &testTracer{Tracer: tracer, rec: &rec}) }
explode_data.jsonl/72250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 12485, 35172, 1155, 353, 8840, 836, 8, 341, 2405, 1395, 489, 580, 57824, 85309, 47023, 198, 25583, 9584, 11, 1848, 1669, 1179, 6817, 35958, 7121, 1282, 9584, 17096, 6817, 35958, 8240, 9584, 3798, 515, 197, 197, 1282, 9584, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegration(t *testing.T) { // Do not truncate Gomega matcher output // The buildpack output text can be large and we often want to see all of it. format.MaxLength = 0 Expect := NewWithT(t).Expect file, err := os.Open("../integration.json") Expect(err).NotTo(HaveOccurred()) Expect(json.NewDecoder(file).Decode(&settings.Config)).To(Succeed()) Expect(file.Close()).To(Succeed()) file, err = os.Open("../buildpack.toml") Expect(err).NotTo(HaveOccurred()) _, err = toml.NewDecoder(file).Decode(&buildpackInfo) Expect(err).NotTo(HaveOccurred()) root, err := filepath.Abs("./..") Expect(err).ToNot(HaveOccurred()) buildpackStore := occam.NewBuildpackStore() settings.Buildpacks.Poetry.Online, err = buildpackStore.Get. WithVersion("1.2.3"). Execute(root) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.Poetry.Offline, err = buildpackStore.Get. WithVersion("1.2.3"). WithOfflineDependencies(). Execute(root) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.Pip.Online, err = buildpackStore.Get. Execute(settings.Config.Pip) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.Pip.Offline, err = buildpackStore.Get. WithOfflineDependencies(). Execute(settings.Config.Pip) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.CPython.Online, err = buildpackStore.Get. Execute(settings.Config.CPython) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.CPython.Offline, err = buildpackStore.Get. WithOfflineDependencies(). Execute(settings.Config.CPython) Expect(err).NotTo(HaveOccurred()) settings.Buildpacks.BuildPlan.Online, err = buildpackStore.Get. Execute(settings.Config.BuildPlan) Expect(err).NotTo(HaveOccurred()) SetDefaultEventuallyTimeout(5 * time.Second) suite := spec.New("Integration", spec.Report(report.Terminal{})) suite("Default", testDefault, spec.Parallel()) suite("LayerReuse", testLayerReuse, spec.Parallel()) suite.Run(t) }
explode_data.jsonl/81043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 700 }
[ 2830, 3393, 52464, 1155, 353, 8840, 836, 8, 341, 197, 322, 3155, 537, 56772, 479, 32696, 36052, 2550, 198, 197, 322, 576, 1936, 4748, 2550, 1467, 646, 387, 3460, 323, 582, 3545, 1366, 311, 1490, 678, 315, 432, 624, 59416, 70092, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_underscore_arrays_13(t *testing.T) { tt(t, func() { test, _ := test_() test(` test("indexOf", function() { var numbers = [1, 2, 3]; numbers.indexOf = null; equal(_.indexOf(numbers, 2), 1, 'can compute indexOf, even without the native function'); var result = (function(){ return _.indexOf(arguments, 2); })(1, 2, 3); equal(result, 1, 'works on an arguments object'); equal(_.indexOf(null, 2), -1, 'handles nulls properly'); var numbers = [10, 20, 30, 40, 50], num = 35; var index = _.indexOf(numbers, num, true); equal(index, -1, '35 is not in the list'); numbers = [10, 20, 30, 40, 50]; num = 40; index = _.indexOf(numbers, num, true); equal(index, 3, '40 is in the list'); numbers = [1, 40, 40, 40, 40, 40, 40, 40, 50, 60, 70]; num = 40; index = _.indexOf(numbers, num, true); equal(index, 1, '40 is in the list'); numbers = [1, 2, 3, 1, 2, 3, 1, 2, 3]; index = _.indexOf(numbers, 2, 5); equal(index, 7, 'supports the fromIndex argument'); }); `) }) }
explode_data.jsonl/68907
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 62, 53933, 68983, 62, 16, 18, 1155, 353, 8840, 836, 8, 972, 3244, 83, 1155, 11, 2915, 368, 972, 197, 18185, 11, 716, 1669, 1273, 62, 18005, 197, 18185, 5809, 319, 220, 1273, 445, 29849, 497, 729, 368, 972, 262, 762, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileExistenceCheckPropagatesError(t *testing.T) { check := createFileCheckBatch(t, "id", []*ipb.FileCheck{&ipb.FileCheck{ FilesToCheck: []*ipb.FileSet{testconfigcreator.SingleFileWithPath(unreadableFilePath)}, CheckType: &ipb.FileCheck_Existence{Existence: &ipb.ExistenceCheck{ShouldExist: true}}, }}, newFakeAPI()) if _, err := check.Exec(); err == nil { t.Errorf("check.Exec() didn't return an error") } }
explode_data.jsonl/24471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 1703, 25613, 763, 3973, 2008, 351, 973, 1454, 1155, 353, 8840, 836, 8, 341, 25157, 1669, 1855, 1703, 3973, 21074, 1155, 11, 330, 307, 497, 29838, 573, 65, 8576, 3973, 90, 5, 573, 65, 8576, 3973, 515, 197, 197, 10809, 124...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetQueryOptions_InvalidInitialPageIsChangedToDefault(t *testing.T) { queryOptions := getQueryOptions([]spi.QueryOption{spi.WithInitialPageNum(-1)}) require.Equal(t, 0, queryOptions.InitialPageNum) }
explode_data.jsonl/72593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 1949, 2859, 3798, 62, 7928, 6341, 2665, 3872, 5389, 1249, 3675, 1155, 353, 8840, 836, 8, 341, 27274, 3798, 1669, 633, 2859, 3798, 10556, 39157, 15685, 5341, 90, 39157, 26124, 6341, 2665, 4651, 4080, 16, 59209, 17957, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSnakeToCamel(t *testing.T) { var testCases = []struct { line int input string expected string }{ { line: line(), input: "FOO_BAR", expected: "FooBar", }, { line: line(), input: "foo_bar", expected: "FooBar", }, } _, testFile, _, _ := runtime.Caller(0) for _, tc := range testCases { tc := tc t.Run(fmt.Sprintf("marshal %d", tc.line), func(t *testing.T) { t.Parallel() linkToExample := fmt.Sprintf("%s:%d", testFile, tc.line) received := dotenvgen.SnakeToCamel(tc.input) if received != tc.expected { t.Errorf("\nexpected: snakeToCamel(%q) == %q\nreceived: snakeToCamel(%q) == %q\ncase: %s", tc.input, tc.expected, tc.input, received, linkToExample) } }) } }
explode_data.jsonl/62010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 83420, 1249, 25406, 301, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 27109, 257, 526, 198, 197, 22427, 262, 914, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 27109, 25, 257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRenderKubeProxy(t *testing.T) { g := NewGomegaWithT(t) c := &operv1.NetworkSpec{ ClusterNetwork: []operv1.ClusterNetworkEntry{ { CIDR: "192.168.0.0/14", HostPrefix: 23, }, }, DefaultNetwork: operv1.DefaultNetworkDefinition{Type: "Flannel"}, KubeProxyConfig: &operv1.ProxyConfig{ IptablesSyncPeriod: "42s", }, } fillKubeProxyDefaults(c, nil) objs, err := renderStandaloneKubeProxy(c, &FakeKubeProxyBootstrapResult, manifestDir) g.Expect(err).NotTo(HaveOccurred()) g.Expect(objs).To(HaveLen(10)) // Make sure the arguments to kube-proxy are reasonable found := false for _, obj := range objs { if obj.GetAPIVersion() == "v1" && obj.GetKind() == "ConfigMap" && obj.GetName() == "proxy-config" { if found == true { t.Fatal("Two kube-proxy configmaps!?") } found = true val, ok, err := uns.NestedString(obj.Object, "data", "kube-proxy-config.yaml") g.Expect(ok).To(BeTrue()) g.Expect(err).NotTo(HaveOccurred()) g.Expect(val).To(MatchYAML(` apiVersion: kubeproxy.config.k8s.io/v1alpha1 bindAddress: 0.0.0.0 bindAddressHardFail: false clientConnection: acceptContentTypes: "" burst: 0 contentType: "" kubeconfig: "" qps: 0 clusterCIDR: 192.168.0.0/14 configSyncPeriod: 0s conntrack: maxPerCore: null min: null tcpCloseWaitTimeout: null tcpEstablishedTimeout: null detectLocalMode: "" enableProfiling: false healthzBindAddress: 0.0.0.0:10255 hostnameOverride: "" iptables: masqueradeAll: false masqueradeBit: null minSyncPeriod: 0s syncPeriod: 42s ipvs: excludeCIDRs: null minSyncPeriod: 0s scheduler: "" strictARP: false syncPeriod: 0s tcpFinTimeout: 0s tcpTimeout: 0s udpTimeout: 0s kind: KubeProxyConfiguration metricsBindAddress: 0.0.0.0:29102 mode: iptables nodePortAddresses: null oomScoreAdj: null portRange: "" showHiddenMetricsForVersion: "" udpIdleTimeout: 0s winkernel: enableDSR: false networkName: "" sourceVip: "" `)) } } g.Expect(found).To(BeTrue()) }
explode_data.jsonl/57157
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 821 }
[ 2830, 3393, 6750, 42, 3760, 16219, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 692, 1444, 1669, 609, 453, 648, 16, 30149, 8327, 515, 197, 197, 28678, 12320, 25, 3056, 453, 648, 16, 72883, 12320, 5874, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetReplacementChain(t *testing.T) { CSV := func(name, replaces string) *v1alpha1.ClusterServiceVersion { return &v1alpha1.ClusterServiceVersion{ ObjectMeta: metav1.ObjectMeta{ Name: name, }, Spec: v1alpha1.ClusterServiceVersionSpec{ Replaces: replaces, }, } } for _, tc := range []struct { Name string From *v1alpha1.ClusterServiceVersion All map[string]*v1alpha1.ClusterServiceVersion Expected []string }{ { Name: "csv replaces itself", From: CSV("itself", "itself"), All: map[string]*v1alpha1.ClusterServiceVersion{ "itself": CSV("itself", "itself"), }, Expected: []string{"itself"}, }, { Name: "two csvs replace each other", From: CSV("a", "b"), All: map[string]*v1alpha1.ClusterServiceVersion{ "a": CSV("a", "b"), "b": CSV("b", "a"), }, Expected: []string{"a", "b"}, }, { Name: "starting from head of chain without cycles", From: CSV("a", "b"), All: map[string]*v1alpha1.ClusterServiceVersion{ "a": CSV("a", "b"), "b": CSV("b", "c"), "c": CSV("c", ""), }, Expected: []string{"a", "b", "c"}, }, } { t.Run(tc.Name, func(t *testing.T) { assert := assert.New(t) var actual []string for name := range (&Operator{}).getReplacementChain(tc.From, tc.All) { actual = append(actual, name) } assert.ElementsMatch(tc.Expected, actual) }) } }
explode_data.jsonl/31217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 627 }
[ 2830, 3393, 1949, 68569, 18837, 1155, 353, 8840, 836, 8, 341, 6258, 17803, 1669, 2915, 3153, 11, 40700, 914, 8, 353, 85, 16, 7141, 16, 72883, 1860, 5637, 341, 197, 853, 609, 85, 16, 7141, 16, 72883, 1860, 5637, 515, 298, 23816, 1217...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateAzureManagerInvalidConfig(t *testing.T) { _, err := CreateAzureManager(strings.NewReader(invalidAzureCfg), cloudprovider.NodeGroupDiscoveryOptions{}) assert.Error(t, err, "failed to unmarshal config body") }
explode_data.jsonl/12791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 4021, 78107, 2043, 7928, 2648, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 4230, 78107, 2043, 51442, 68587, 5900, 1891, 78107, 42467, 701, 9437, 19979, 21714, 2808, 67400, 3798, 37790, 6948, 6141, 1155, 11, 1848, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUpdateUserActive(t *testing.T) { t.Run("not activating more users when cloud license users at limit", func(t *testing.T) { // create 5 active users th := Setup(t).InitBasic() defer th.TearDown() cloudMock := &mocks.CloudInterface{} cloudMock.Mock.On( "GetSubscription", mock.Anything, ).Return(&model.Subscription{ ID: "MySubscriptionID", CustomerID: "MyCustomer", ProductID: "SomeProductId", AddOns: []string{}, StartAt: 1000000000, EndAt: 2000000000, CreateAt: 1000000000, Seats: 100, DNS: "some.dns.server", IsPaidTier: "false", }, nil) th.App.Srv().SetLicense(model.NewTestLicense("cloud")) th.App.Srv().Cloud = cloudMock user := th.BasicUser th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserDeactivation = true *cfg.ExperimentalSettings.CloudUserLimit = 4 }) // deactivate 5th user, now we have 4 active users and are at limit _, err := th.SystemAdminClient.UpdateUserActive(user.Id, false) require.NoError(t, err) // try and reactivate 5th user, not allowed because it exceeds the set cloud user limit resp, err := th.SystemAdminClient.UpdateUserActive(user.Id, true) CheckErrorMessage(t, err, "Unable to activate more users as the cloud account is over capacity.") CheckBadRequestStatus(t, resp) }) t.Run("basic tests", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() user := th.BasicUser th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserDeactivation = true }) _, err := th.Client.UpdateUserActive(user.Id, false) require.NoError(t, err) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserDeactivation = false }) resp, err := th.Client.UpdateUserActive(user.Id, false) require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserDeactivation = true }) resp, err = th.Client.UpdateUserActive(user.Id, false) require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.LoginBasic2() resp, err = th.Client.UpdateUserActive(user.Id, true) require.Error(t, err) CheckForbiddenStatus(t, resp) resp, err = th.Client.UpdateUserActive(GenerateTestId(), true) require.Error(t, err) CheckForbiddenStatus(t, resp) resp, err = th.Client.UpdateUserActive("junk", true) require.Error(t, err) CheckBadRequestStatus(t, resp) th.Client.Logout() resp, err = th.Client.UpdateUserActive(user.Id, true) require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { _, err = client.UpdateUserActive(user.Id, true) require.NoError(t, err) _, err = client.UpdateUserActive(user.Id, false) require.NoError(t, err) authData := model.NewId() _, err := th.App.Srv().Store.User().UpdateAuthData(user.Id, "random", &authData, "", true) require.NoError(t, err) _, err = client.UpdateUserActive(user.Id, false) require.NoError(t, err) }) }) t.Run("websocket events", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() user := th.BasicUser2 th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserDeactivation = true }) webSocketClient, err := th.CreateWebSocketClient() assert.NoError(t, err) defer webSocketClient.Close() webSocketClient.Listen() time.Sleep(300 * time.Millisecond) resp := <-webSocketClient.ResponseChannel require.Equal(t, model.StatusOk, resp.Status) adminWebSocketClient, err := th.CreateWebSocketSystemAdminClient() assert.NoError(t, err) defer adminWebSocketClient.Close() adminWebSocketClient.Listen() time.Sleep(300 * time.Millisecond) resp = <-adminWebSocketClient.ResponseChannel require.Equal(t, model.StatusOk, resp.Status) // Verify that both admins and regular users see the email when privacy settings allow same, // and confirm event is fired for SystemAdmin and Local mode th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PrivacySettings.ShowEmailAddress = true }) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { _, err := client.UpdateUserActive(user.Id, false) require.NoError(t, err) assertWebsocketEventUserUpdatedWithEmail(t, webSocketClient, user.Email) assertWebsocketEventUserUpdatedWithEmail(t, adminWebSocketClient, user.Email) }) // Verify that only admins see the email when privacy settings hide emails, // and confirm event is fired for SystemAdmin and Local mode th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PrivacySettings.ShowEmailAddress = false }) _, err := client.UpdateUserActive(user.Id, true) require.NoError(t, err) assertWebsocketEventUserUpdatedWithEmail(t, webSocketClient, "") assertWebsocketEventUserUpdatedWithEmail(t, adminWebSocketClient, user.Email) }) }) t.Run("activate guest should fail when guests feature is disable", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() id := model.NewId() guest := &model.User{ Email: "success+" + id + "@simulator.amazonses.com", Username: "un_" + id, Nickname: "nn_" + id, Password: "Password1", EmailVerified: true, } user, err := th.App.CreateGuest(th.Context, guest) require.Nil(t, err) th.App.UpdateActive(th.Context, user, false) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GuestAccountsSettings.Enable = false }) defer th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GuestAccountsSettings.Enable = true }) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { resp, err := client.UpdateUserActive(user.Id, true) require.Error(t, err) CheckUnauthorizedStatus(t, resp) }) }) t.Run("activate guest should work when guests feature is enabled", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() id := model.NewId() guest := &model.User{ Email: "success+" + id + "@simulator.amazonses.com", Username: "un_" + id, Nickname: "nn_" + id, Password: "Password1", EmailVerified: true, } user, appErr := th.App.CreateGuest(th.Context, guest) require.Nil(t, appErr) th.App.UpdateActive(th.Context, user, false) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GuestAccountsSettings.Enable = true }) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { _, err := client.UpdateUserActive(user.Id, true) require.NoError(t, err) }) }) }
explode_data.jsonl/47513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2518 }
[ 2830, 3393, 4289, 1474, 5728, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1921, 71092, 803, 3847, 979, 9437, 5723, 3847, 518, 3930, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 1855, 220, 20, 4541, 3847, 198, 197, 704...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestHandler_SignCertificate(t *testing.T) { type args struct { req *pb.SignCertificateRequest } tests := []struct { name string args args want *pb.SignCertificateResponse wantErr bool }{ { name: "invalid auth", args: args{ req: &pb.SignCertificateRequest{}, }, wantErr: true, }, { name: "valid", args: args{ req: &pb.SignCertificateRequest{ CertificateSigningRequest: testCSR, }, }, wantErr: false, }, } r := newRequestHandler(t) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := r.SignCertificate(context.Background(), tt.args.req) if tt.wantErr { require.Error(t, err) return } require.NoError(t, err) require.NotEmpty(t, got) }) } }
explode_data.jsonl/81812
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 1900, 3050, 1098, 622, 33202, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 24395, 353, 16650, 41152, 33202, 1900, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStyledString(t *testing.T) { Test(t, That("print (styled abc hopefully-never-exists)").Throws(ErrorWithMessage( "hopefully-never-exists is not a valid style transformer")), That("print (styled abc bold)").Prints("\033[1mabc\033[m"), That("print (styled abc red cyan)").Prints("\033[36mabc\033[m"), That("print (styled abc bg-green)").Prints("\033[42mabc\033[m"), That("print (styled abc no-dim)").Prints("abc"), ) }
explode_data.jsonl/6981
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 34180, 703, 1155, 353, 8840, 836, 8, 341, 73866, 1155, 345, 197, 197, 4792, 445, 1350, 320, 22849, 39022, 22326, 12, 36493, 10187, 1671, 63554, 44737, 37396, 2354, 2052, 1006, 298, 197, 1, 96425, 12, 36493, 10187, 1671, 374,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWebsocketUtilOpenCloseConnection(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(handlerToBeTested)) u, _ := url.Parse(srv.URL) u.Scheme = "ws" var log = log.NewMockLog() var ws = NewWebsocketUtil(log, nil) conn, _ := ws.OpenConnection(u.String()) assert.NotNil(t, conn, "Open connection failed.") err := ws.CloseConnection(conn) assert.Nil(t, err, "Error closing the websocket connection.") }
explode_data.jsonl/58791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 5981, 9556, 2742, 5002, 7925, 4526, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 36514, 1249, 3430, 2271, 291, 1171, 10676, 11, 716, 1669, 2515, 8937, 1141, 10553, 20893, 340, 10676,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSliceLit(t *testing.T) { gopClTest(t, ` x := [1, 3.4, 5] y := [1] z := [] `, `package main func main() { x := []float64{1, 3.4, 5} y := []int{1} z := []interface { }{} } `) }
explode_data.jsonl/73632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 33236, 68954, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 87, 1669, 508, 16, 11, 220, 18, 13, 19, 11, 220, 20, 921, 88, 1669, 508, 16, 921, 89, 1669, 4167, 7808, 1565, 1722, 1887, 271, 2830, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKeysManualSerializationService(t *testing.T) { private, err := jwt.ParseECPrivateKeyFromPEM(privateKeyPEM) if err != nil { fmt.Println(string(privateKeyPEM)) t.Fatalf("Error parsing key from PEM: %v", err) } public, err := jwt.ParseECPublicKeyFromPEM(publicKeyPEM) if err != nil { t.Fatalf("Error parsing key from PEM: %v", err) } generatedPublic := private.Public() if !public.Equal(generatedPublic) { t.Fatalf("Generated public key and reference keys are not equal") } genPEM, err := x509.MarshalPKIXPublicKey(generatedPublic) if err != nil { t.Fatalf("Error creating PEM: %v", err) } // creating the same certificate as ssl tool is doing with the following command: // openssl ec -in private.pem -pubout -out public.pem pemString := fmt.Sprintf("-----BEGIN PUBLIC KEY-----\n%s\n%s\n-----END PUBLIC KEY-----\n", base64.StdEncoding.EncodeToString(genPEM)[0:64], base64.StdEncoding.EncodeToString(genPEM)[64:], ) if pemString != string(publicKeyPEM) { fmt.Printf("%s\n", pemString) t.Fatalf("generated public key PEM and referenced are not equal.") } genPEMPrivate, err := x509.MarshalPKCS8PrivateKey(private) // genPEMPrivate, err := x509.MarshalECPrivateKey(private) if err != nil { t.Fatalf("Error creating private PEM: %v", err) } pemStringPrivate := fmt.Sprintf("-----BEGIN PRIVATE KEY-----\n%s\n%s\n%s\n-----END PRIVATE KEY-----\n", base64.StdEncoding.EncodeToString(genPEMPrivate)[0:64], base64.StdEncoding.EncodeToString(genPEMPrivate)[64:128], base64.StdEncoding.EncodeToString(genPEMPrivate)[128:], ) if pemStringPrivate != string(privateKeyPEM) { fmt.Printf("%s\n", pemStringPrivate) t.Fatalf("generated private key PEM and referenced are not equal.") } }
explode_data.jsonl/50749
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 663 }
[ 2830, 3393, 8850, 52092, 35865, 1860, 1155, 353, 8840, 836, 8, 341, 2455, 11, 1848, 1669, 24589, 8937, 7498, 75981, 3830, 1740, 44, 17550, 1592, 1740, 44, 340, 743, 1848, 961, 2092, 341, 197, 11009, 12419, 3609, 17550, 1592, 1740, 44, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestTerragruntGenerateBlockSameNameFail(t *testing.T) { t.Parallel() generateTestCase := filepath.Join(TEST_FIXTURE_CODEGEN_PATH, "generate-block", "same_name_error") cleanupTerraformFolder(t, generateTestCase) cleanupTerragruntFolder(t, generateTestCase) stdout := bytes.Buffer{} stderr := bytes.Buffer{} err := runTerragruntCommand(t, fmt.Sprintf("terragrunt init --terragrunt-working-dir %s", generateTestCase), &stdout, &stderr) require.Error(t, err) parsedError, ok := errors.Unwrap(err).(config.DuplicatedGenerateBlocks) assert.True(t, ok) assert.True(t, len(parsedError.BlockName) == 1) assert.Contains(t, parsedError.BlockName, "backend") }
explode_data.jsonl/10165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 51402, 68305, 3850, 31115, 4713, 19198, 675, 19524, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3174, 13220, 16458, 1669, 26054, 22363, 50320, 42635, 41486, 10020, 11085, 7944, 11, 330, 19366, 9425, 497, 330, 24063, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessProposal(t *testing.T) { const height = 2 txs := factory.MakeNTxs(height, 10) ctx, cancel := context.WithCancel(context.Background()) defer cancel() app := abcimocks.NewApplication(t) logger := log.NewNopLogger() cc := abciclient.NewLocalClient(logger, app) proxyApp := proxy.New(cc, logger, proxy.NopMetrics()) err := proxyApp.Start(ctx) require.NoError(t, err) state, stateDB, privVals := makeState(t, 1, height) stateStore := sm.NewStore(stateDB) blockStore := store.NewBlockStore(dbm.NewMemDB()) eventBus := eventbus.NewDefault(logger) require.NoError(t, eventBus.Start(ctx)) blockExec := sm.NewBlockExecutor( stateStore, logger, proxyApp, new(mpmocks.Mempool), sm.EmptyEvidencePool{}, blockStore, eventBus, sm.NopMetrics(), ) block0 := sf.MakeBlock(state, height-1, new(types.Commit)) lastCommitSig := []types.CommitSig{} partSet, err := block0.MakePartSet(types.BlockPartSizeBytes) require.NoError(t, err) blockID := types.BlockID{Hash: block0.Hash(), PartSetHeader: partSet.Header()} voteInfos := []abci.VoteInfo{} for _, privVal := range privVals { vote, err := factory.MakeVote(ctx, privVal, block0.Header.ChainID, 0, 0, 0, 2, blockID, time.Now()) require.NoError(t, err) pk, err := privVal.GetPubKey(ctx) require.NoError(t, err) addr := pk.Address() voteInfos = append(voteInfos, abci.VoteInfo{ SignedLastBlock: true, Validator: abci.Validator{ Address: addr, Power: 1000, }, }) lastCommitSig = append(lastCommitSig, vote.CommitSig()) } block1 := sf.MakeBlock(state, height, &types.Commit{ Height: height - 1, Signatures: lastCommitSig, }) block1.Txs = txs expectedRpp := &abci.RequestProcessProposal{ Txs: block1.Txs.ToSliceOfBytes(), Hash: block1.Hash(), Height: block1.Header.Height, Time: block1.Header.Time, ByzantineValidators: block1.Evidence.ToABCI(), ProposedLastCommit: abci.CommitInfo{ Round: 0, Votes: voteInfos, }, NextValidatorsHash: block1.NextValidatorsHash, ProposerAddress: block1.ProposerAddress, } app.On("ProcessProposal", mock.Anything, mock.Anything).Return(&abci.ResponseProcessProposal{Status: abci.ResponseProcessProposal_ACCEPT}, nil) acceptBlock, err := blockExec.ProcessProposal(ctx, block1, state) require.NoError(t, err) require.True(t, acceptBlock) app.AssertExpectations(t) app.AssertCalled(t, "ProcessProposal", ctx, expectedRpp) }
explode_data.jsonl/49114
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1030 }
[ 2830, 3393, 7423, 98637, 1155, 353, 8840, 836, 8, 341, 4777, 2608, 284, 220, 17, 198, 3244, 18561, 1669, 8633, 50133, 6408, 18561, 23052, 11, 220, 16, 15, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLayer3ExternalRouterCreateDelete(t *testing.T) { clients.RequireAdmin(t) client, err := clients.NewNetworkV2Client() th.AssertNoErr(t, err) router, err := CreateExternalRouter(t, client) th.AssertNoErr(t, err) defer DeleteRouter(t, client, router.ID) tools.PrintResource(t, router) newName := tools.RandomString("TESTACC-", 8) newDescription := "" updateOpts := routers.UpdateOpts{ Name: newName, Description: &newDescription, } _, err = routers.Update(client, router.ID, updateOpts).Extract() th.AssertNoErr(t, err) newRouter, err := routers.Get(client, router.ID).Extract() th.AssertNoErr(t, err) tools.PrintResource(t, newRouter) th.AssertEquals(t, newRouter.Name, newName) th.AssertEquals(t, newRouter.Description, newDescription) }
explode_data.jsonl/22105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 9188, 18, 25913, 9523, 4021, 6435, 1155, 353, 8840, 836, 8, 341, 197, 31869, 81288, 7210, 1155, 692, 25291, 11, 1848, 1669, 8239, 7121, 12320, 53, 17, 2959, 741, 70479, 11711, 2753, 7747, 1155, 11, 1848, 692, 67009, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPublishVersion(t *testing.T) { const ( testVersion = "v1.20.0-alpha.1.66+d19aec8bf1c8ca" olderTestVersion = "v1.20.0-alpha.0.22+00000000000000" ) mockVersioMarkers := func(mock *releasefakes.FakePublisherClient) { mock.GSUtilOutputReturnsOnCall(0, olderTestVersion, nil) mock.GSUtilOutputReturnsOnCall(1, testVersion, nil) mock.GSUtilOutputReturnsOnCall(2, olderTestVersion, nil) mock.GSUtilOutputReturnsOnCall(3, testVersion, nil) mock.GSUtilOutputReturnsOnCall(4, olderTestVersion, nil) } for _, tc := range []struct { bucket string version string prepare func( *releasefakes.FakePublisherClient, ) (buildDir string, cleanup func()) privateBucket bool fast bool shouldError bool }{ { // success update fast bucket: release.ProductionBucket, version: testVersion, fast: true, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mock.GSUtilOutputReturnsOnCall(0, olderTestVersion, nil) mock.GSUtilOutputReturnsOnCall(1, testVersion, nil) mock.GetURLResponseReturns(testVersion, nil) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: false, }, { // success update on private bucket bucket: release.ProductionBucket, version: testVersion, privateBucket: true, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mockVersioMarkers(mock) mock.GSUtilOutputReturnsOnCall(5, testVersion, nil) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: false, }, { // failure update on private bucket bucket: release.ProductionBucket, version: testVersion, privateBucket: true, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mockVersioMarkers(mock) mock.GSUtilOutputReturnsOnCall(5, "", errors.New("")) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: true, }, { // failure update on private bucket wrong content bucket: release.ProductionBucket, version: testVersion, privateBucket: true, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mockVersioMarkers(mock) mock.GSUtilOutputReturnsOnCall(5, "wrong", nil) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: true, }, { // success update non private bucket bucket: "k8s-another-bucket", version: testVersion, privateBucket: false, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mockVersioMarkers(mock) mock.GetURLResponseReturns(testVersion, nil) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: false, }, { // failure update non private bucket url response failed bucket: "k8s-another-bucket", version: testVersion, privateBucket: false, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mockVersioMarkers(mock) mock.GetURLResponseReturns("", errors.New("")) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: true, }, { // failure release files do not exist bucket: release.ProductionBucket, version: testVersion, privateBucket: false, prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) mock.GSUtilReturnsOnCall(0, errors.New("")) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: true, }, { // failure no semver version bucket: release.ProductionBucket, version: "wrong", prepare: func(mock *releasefakes.FakePublisherClient) (string, func()) { tempDir, err := ioutil.TempDir("", "publish-version-test-") require.Nil(t, err) return tempDir, func() { require.Nil(t, os.RemoveAll(tempDir)) } }, shouldError: true, }, } { sut := release.NewPublisher() clientMock := &releasefakes.FakePublisherClient{} sut.SetClient(clientMock) buildDir, cleanup := tc.prepare(clientMock) err := sut.PublishVersion( "release", tc.version, buildDir, tc.bucket, nil, tc.privateBucket, tc.fast, ) if tc.shouldError { require.NotNil(t, err) } else { require.Nil(t, err) } cleanup() } }
explode_data.jsonl/55163
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2152 }
[ 2830, 3393, 50145, 5637, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 18185, 5637, 414, 284, 330, 85, 16, 13, 17, 15, 13, 15, 64538, 13, 16, 13, 21, 21, 51412, 16, 24, 71221, 23, 13233, 16, 66, 23, 924, 698, 197, 197, 2018, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidHostname(t *testing.T) { tests := []struct { host string validInput, validPattern bool }{ {host: "example.com", validInput: true, validPattern: true}, {host: "eXample123-.com", validInput: true, validPattern: true}, {host: "-eXample123-.com"}, {host: ""}, {host: "."}, {host: "example..com"}, {host: ".example.com"}, {host: "example.com.", validInput: true}, {host: "*.example.com."}, {host: "*.example.com", validPattern: true}, {host: "*foo.example.com"}, {host: "foo.*.example.com"}, {host: "exa_mple.com", validInput: true, validPattern: true}, {host: "foo,bar"}, {host: "project-dev:us-central1:main"}, } for _, tt := range tests { if got := validHostnamePattern(tt.host); got != tt.validPattern { t.Errorf("validHostnamePattern(%q) = %v, want %v", tt.host, got, tt.validPattern) } if got := validHostnameInput(tt.host); got != tt.validInput { t.Errorf("validHostnameInput(%q) = %v, want %v", tt.host, got, tt.validInput) } } }
explode_data.jsonl/29004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 432 }
[ 2830, 3393, 4088, 88839, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 63104, 3824, 914, 198, 197, 56322, 2505, 11, 2697, 15760, 1807, 198, 197, 59403, 197, 197, 90, 3790, 25, 330, 8687, 905, 497, 2697, 2505, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCount(t *testing.T) { err := Connect("./db/mstat.db") if err != nil { t.Fatal(err) } else { Count("stat1", "1PTM", 1) Count("stat1", "1PTM", 1) Count("stat1", "1PTM", 1) Count("stat1", "1PTM", -1) m, err := GetCounter("stat1") if err != nil { t.Fatal(err) } else { if m["1PTM"] != 2 { t.Fatal("count should be 2, but received: ", m["1PTM"]) } } Disconnect() } }
explode_data.jsonl/1820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 2507, 1155, 353, 8840, 836, 8, 1476, 9859, 1669, 13015, 13988, 1999, 3183, 9878, 7076, 5130, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 92, 770, 1476, 197, 197, 2507, 445, 9878, 16, 497, 330, 16, 2828, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSubscription(t *testing.T) { server := newTestServer(t) server.start() defer server.stop() client := newTestClient(t) client.start() defer client.stop() err := retry.Do(func() error { client.handler.Lock() defer client.handler.Unlock() server.handler.Lock() defer server.handler.Unlock() if err := client.test(); err != nil { return err } if err := server.test(1); err != nil { return err } return nil }, retry.Delay(10*time.Millisecond)) if err != nil { t.Error(err.Error()) } }
explode_data.jsonl/32896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 33402, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 501, 2271, 5475, 1155, 340, 41057, 4962, 741, 16867, 3538, 13227, 2822, 25291, 1669, 501, 2271, 2959, 1155, 340, 25291, 4962, 741, 16867, 2943, 13227, 2822, 9859, 1669, 22683, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_ChunkReader_TooLongLine(t *testing.T) { cr := newChunkReaderWithSize(strings.NewReader(influxText), 16) assert.True(t, cr.HasNext()) assert.Equal(t, "# comment", string(cr.Next())) assert.Nil(t, cr.Error()) assert.False(t, cr.HasNext()) assert.Equal(t, "a1,location=us-", string(cr.Next())) assert.NotNil(t, cr.Error()) }
explode_data.jsonl/74834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 27588, 3122, 5062, 1139, 2624, 6583, 2460, 1155, 353, 8840, 836, 8, 341, 91492, 1669, 501, 28304, 5062, 2354, 1695, 51442, 68587, 5900, 36706, 1178, 701, 220, 16, 21, 692, 6948, 32443, 1155, 11, 1560, 16152, 5847, 2398, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetch(t *testing.T) { mockRunner := NewMockRunner("") git := NewCustomGit(mockRunner) repo := &Repo{Path: "/test/"} git.Fetch(repo) if mockRunner.folder != "/test/" { t.Errorf("Folder should be /test/") } if mockRunner.command!= "git" { t.Errorf("Command should be git") } if len(mockRunner.args) != 2 { t.Errorf("Args size should be 2") } if mockRunner.args[0] != "fetch" { t.Errorf("Args 0 should be fetch") } if mockRunner.args[1] != "--all" { t.Errorf("Args 1 should be --all") } }
explode_data.jsonl/14058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 20714, 1155, 353, 8840, 836, 8, 341, 77333, 19486, 1669, 1532, 11571, 19486, 31764, 90731, 1669, 1532, 10268, 46562, 30389, 19486, 692, 17200, 5368, 1669, 609, 25243, 90, 1820, 25, 3521, 1944, 11225, 532, 90731, 78506, 50608, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTransfersController_CreateSuccess_From(t *testing.T) { t.Parallel() config, _ := cltest.NewConfig(t) rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplicationWithConfigAndKey(t, config, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() client := app.NewHTTPClient() require.NoError(t, app.StartAndConnect()) store := app.Store _, from := cltest.MustAddRandomKeyToKeystore(t, store, 0) request := models.SendEtherRequest{ DestinationAddress: common.HexToAddress("0xFA01FA015C8A5332987319823728982379128371"), FromAddress: from, Amount: *assets.NewEth(100), } body, err := json.Marshal(&request) assert.NoError(t, err) resp, cleanup := client.Post("/v2/transfers", bytes.NewBuffer(body)) defer cleanup() errors := cltest.ParseJSONAPIErrors(t, resp.Body) assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Len(t, errors.Errors, 0) count, err := app.GetStore().CountOf(models.EthTx{}) require.NoError(t, err) assert.Equal(t, 1, count) }
explode_data.jsonl/41298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 429 }
[ 2830, 3393, 3167, 49793, 2051, 34325, 7188, 53157, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25873, 11, 716, 1669, 1185, 1944, 7121, 2648, 1155, 340, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 72577, 20960, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPIUserSearchAdminLoggedInUserHidden(t *testing.T) { defer prepareTestEnv(t)() adminUsername := "user1" session := loginUser(t, adminUsername) token := getTokenForLoggedInUser(t, session) query := "user31" req := NewRequestf(t, "GET", "/api/v1/users/search?token=%s&q=%s", token, query) req.SetBasicAuth(token, "x-oauth-basic") resp := session.MakeRequest(t, req, http.StatusOK) var results SearchResults DecodeJSON(t, resp, &results) assert.NotEmpty(t, results.Data) for _, user := range results.Data { assert.Contains(t, user.UserName, query) assert.NotEmpty(t, user.Email) assert.EqualValues(t, "private", user.Visibility) } }
explode_data.jsonl/7510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 7082, 1474, 5890, 7210, 28559, 1474, 17506, 1155, 353, 8840, 836, 8, 341, 16867, 10549, 2271, 14359, 1155, 8, 741, 64394, 11115, 1669, 330, 872, 16, 698, 25054, 1669, 87169, 1155, 11, 3986, 11115, 340, 43947, 1669, 54111, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLogsReceiverStartConsume(t *testing.T) { c := kafkaLogsConsumer{ nextConsumer: consumertest.NewNop(), logger: zap.NewNop(), consumerGroup: &testConsumerGroup{}, } ctx, cancelFunc := context.WithCancel(context.Background()) c.cancelConsumeLoop = cancelFunc require.NoError(t, c.Shutdown(context.Background())) err := c.consumeLoop(ctx, &logsConsumerGroupHandler{ ready: make(chan bool), }) assert.EqualError(t, err, context.Canceled.Error()) }
explode_data.jsonl/79401
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 51053, 25436, 3479, 1109, 31323, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 67852, 51053, 29968, 515, 197, 28144, 29968, 25, 220, 4662, 83386, 7121, 45, 453, 3148, 197, 17060, 25, 286, 32978, 7121, 45, 453, 3148, 197, 37203, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLintMissing(t *testing.T) { ctx := context.TODO() errs := eclint.Lint(ctx, "testdata/missing/file") if len(errs) == 0 { t.Error("an error was expected, got none") } for _, err := range errs { if err == nil { t.Error("an error was expected") } } }
explode_data.jsonl/82388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 47556, 25080, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 90988, 2822, 9859, 82, 1669, 59958, 396, 1214, 396, 7502, 11, 330, 92425, 14, 30616, 23903, 1138, 743, 2422, 3964, 82, 8, 621, 220, 15, 341, 197, 3244, 6141, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTerminateInstances(t *testing.T) { m := &mockEC2{} c := &client{ clients: map[string]*regionalClient{"us-east-1": {region: "us-east-1", ec2: m}}, } err := c.TerminateInstances(context.Background(), "us-east-1", nil) assert.NoError(t, err) m.terminateErr = errors.New("yikes") err = c.TerminateInstances(context.Background(), "us-east-1", nil) assert.EqualError(t, err, "yikes") }
explode_data.jsonl/3405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 62519, 42725, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 16712, 7498, 17, 16094, 1444, 1669, 609, 2972, 515, 197, 197, 31869, 25, 2415, 14032, 8465, 1580, 3914, 2959, 4913, 355, 39507, 12, 16, 788, 314, 3943, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestVersionedParams(t *testing.T) { r := (&Request{content: ContentConfig{GroupVersion: &v1.SchemeGroupVersion}}).Param("foo", "a") if !reflect.DeepEqual(r.params, url.Values{"foo": []string{"a"}}) { t.Errorf("should have set a param: %#v", r) } r.VersionedParams(&v1.PodLogOptions{Follow: true, Container: "bar"}, scheme.ParameterCodec) if !reflect.DeepEqual(r.params, url.Values{ "foo": []string{"a"}, "container": []string{"bar"}, "follow": []string{"true"}, }) { t.Errorf("should have set a param: %#v", r) } }
explode_data.jsonl/13257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 1900, 5637, 291, 4870, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 15899, 1900, 90, 1796, 25, 8883, 2648, 90, 2808, 5637, 25, 609, 85, 16, 92719, 2808, 5637, 3417, 568, 2001, 445, 7975, 497, 330, 64, 1138, 743, 753, 34913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEncodeDecode_CloseAccount(t *testing.T) { fu := ag_gofuzz.New().NilChance(0) for i := 0; i < 1; i++ { t.Run("CloseAccount"+strconv.Itoa(i), func(t *testing.T) { { params := new(CloseAccount) fu.Fuzz(params) params.Accounts = nil params.Signers = nil buf := new(bytes.Buffer) err := encodeT(*params, buf) ag_require.NoError(t, err) // got := new(CloseAccount) err = decodeT(got, buf.Bytes()) params.Accounts = nil params.Signers = nil ag_require.NoError(t, err) ag_require.Equal(t, params, got) } }) } }
explode_data.jsonl/76218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 32535, 32564, 68185, 7365, 1155, 353, 8840, 836, 8, 341, 1166, 84, 1669, 933, 1889, 1055, 8889, 7121, 1005, 19064, 76070, 7, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 26, 600, 1027, 341, 197, 3244, 16708,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDevfile200_GetMetadata(t *testing.T) { type args struct { name string } tests := []struct { name string devfilev2 *DevfileV2 expectedName string expectedVersion string expectedAttribute string expectedDockerfilePath string }{ { name: "case 1: Get the metadata", devfilev2: &DevfileV2{ v1.Devfile{ DevfileHeader: devfilepkg.DevfileHeader{ Metadata: devfilepkg.DevfileMetadata{ Name: "nodejs", Version: "1.2.3", Attributes: attributes.Attributes{}.FromStringMap(map[string]string{ "alpha.build-dockerfile": "/relative/path/to/Dockerfile", }), }, }, }, }, expectedName: "nodejs", expectedVersion: "1.2.3", expectedDockerfilePath: "/relative/path/to/Dockerfile", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { metadata := tt.devfilev2.GetMetadata() if metadata.Name != tt.expectedName { t.Errorf("TestDevfile200_GetMetadata() expected %v, got %v", tt.expectedName, metadata.Name) } if metadata.Version != tt.expectedVersion { t.Errorf("TestDevfile200_GetMetadata() expected %v, got %v", tt.expectedVersion, metadata.Version) } if metadata.Attributes.GetString("alpha.build-dockerfile", nil) != tt.expectedDockerfilePath { t.Errorf("TestDevfile200_GetMetadata() expected %v, got %v", tt.expectedDockerfilePath, metadata.Attributes.GetString("alpha.build-dockerfile", nil)) } }) } }
explode_data.jsonl/32516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 672 }
[ 2830, 3393, 14592, 1192, 17, 15, 15, 13614, 14610, 1155, 353, 8840, 836, 8, 1476, 13158, 2827, 2036, 341, 197, 11609, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 4293, 914, 198, 197, 27302, 1192, 85, 17, 1060, 353, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIsConfiguration(t *testing.T) { tests := []struct { name string path string expected bool }{ {name: "TestIsConfiguration_1", path: "foo", expected: false}, {name: "TestIsConfiguration_2", path: "foo.ini", expected: true}, {name: "TestIsConfiguration_3", path: "/test/path/foo.json", expected: true}, } for _, test := range tests { is := IsConfiguration(test.path) assert.Equal(t, is, test.expected, fmt.Sprintf("%v: is = %v, expected: %v", test.name, is, test.expected)) } }
explode_data.jsonl/20383
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 3872, 7688, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 26781, 257, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 47006, 25, 330, 2271, 3872, 7688, 62, 16, 497, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPullFind(t *testing.T) { defer gock.Off() gock.New("https://gitlab.com"). Get("/api/v4/projects/diaspora/diaspora/merge_requests/1347"). Reply(200). Type("application/json"). SetHeaders(mockHeaders). File("testdata/merge.json") client := NewDefault() got, res, err := client.PullRequests.Find(context.Background(), "diaspora/diaspora", 1347) if err != nil { t.Error(err) return } want := new(scm.PullRequest) raw, _ := ioutil.ReadFile("testdata/merge.json.golden") json.Unmarshal(raw, want) if diff := cmp.Diff(got, want); diff != "" { t.Errorf("Unexpected Results") t.Log(diff) } t.Run("Request", testRequest(res)) t.Run("Rate", testRate(res)) }
explode_data.jsonl/78651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 36068, 9885, 1155, 353, 8840, 836, 8, 341, 16867, 728, 377, 13, 4596, 2822, 3174, 1176, 7121, 445, 2428, 1110, 12882, 14380, 905, 38609, 197, 37654, 4283, 2068, 5457, 19, 39606, 3446, 3473, 70664, 3446, 3473, 70664, 14, 1905...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGodbcManager_Readme(t *testing.T) { // 構造体の定義 type Hoge struct { Id int Name string Flg bool } // データベースへの接続 manager, _ := Connect("postgres", "dbname=test host=localhost user=postgres") // テーブルの作成 manager.Create(Hoge{}).Execute() // データの挿入 manager.Insert(Hoge{1, "name1", true}).Execute() manager.Insert(Hoge{2, "name2", false}).Execute() // データの取得(リスト) manager.From(&Hoge{}).List() manager.From(&Hoge{}).Where(Where{"name", "name", LIKE}).List() // データの取得(一意) manager.From(&Hoge{}).Where(Where{"Id", 1, EQUAL}).SingleResult() // データの削除 manager.From(&Hoge{}).Where(Where{"Id", 1, EQUAL}).Delete().Execute() // テーブルの削除 manager.Drop(Hoge{}).Execute() // SQLの取得 manager.Create(Hoge{}).GetSQL() manager.Insert(Hoge{1, "name1", true}).GetSQL() manager.From(&Hoge{}).Where(Where{"Id", 1, EQUAL}).Delete().GetSQL() manager.Drop(Hoge{}).GetSQL() }
explode_data.jsonl/44392
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 27522, 8904, 2043, 38381, 2660, 1155, 353, 8840, 836, 8, 341, 197, 322, 6567, 100, 233, 66078, 31914, 15767, 22382, 100942, 198, 13158, 472, 40532, 2036, 341, 197, 67211, 526, 198, 197, 21297, 914, 198, 197, 197, 3882, 70, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp_Float64Flag(t *testing.T) { var meters float64 app := &App{ Flags: []Flag{ &Float64Flag{Name: "height", Value: 1.5, Usage: "Set the height, in meters"}, }, Action: func(c *Context) error { meters = c.Float64("height") return nil }, } app.Run([]string{"", "--height", "1.93"}) expect(t, meters, 1.93) }
explode_data.jsonl/52571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 2164, 89067, 21, 19, 12135, 1155, 353, 8840, 836, 8, 341, 2405, 20044, 2224, 21, 19, 271, 28236, 1669, 609, 2164, 515, 197, 197, 9195, 25, 3056, 12135, 515, 298, 197, 5, 5442, 21, 19, 12135, 63121, 25, 330, 2563, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileHelper_IsAbsolutePath_01(t *testing.T) { fh := FileHelper{} commonDir := fh.AdjustPathSlash("../../filesfortest/levelfilesfortest/level_01_dir/level_02_dir/" + "level_03_dir/level_3_1_test.txt") result := fh.IsAbsolutePath(commonDir) if result == true { t.Error("IsAbsolutePath result is INVALID. Relative path classified as Absolute path!") } }
explode_data.jsonl/14487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 1703, 5511, 31879, 39211, 62, 15, 16, 1155, 353, 8840, 836, 8, 1476, 220, 36075, 1669, 2887, 5511, 16094, 220, 4185, 6184, 1669, 36075, 17865, 4250, 1820, 88004, 36800, 7198, 3969, 477, 14, 3449, 490, 3658, 3969, 477, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTicketBundleSubdigestsCanonicalTicketL1Digest(t *testing.T) { m := TicketBundleSubdigests{ TicketL1Digest: [][]byte{ {1, 2, 3, 4, 5, 6, 7, 8, 9, 0}, {4, 5, 6, 7, 8, 9, 0, 1, 2, 3}, {7, 8, 9, 0, 1, 2, 3, 4, 5, 6}, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, }, } digest := m.canonicalTicketL1Digest() expected := []byte{0x71, 0xc2, 0x5a, 0x8c, 0x90, 0x70, 0x77, 0x38, 0x97, 0x2a, 0x6f, 0x87, 0xc2, 0x73, 0x33, 0x61, 0x56, 0x26, 0xa, 0xb, 0x80, 0xcb, 0xcd, 0x9e, 0xf8, 0x1a, 0x1a, 0xc1, 0xcd, 0x74, 0x94, 0x5d, 0xd9, 0x25, 0x43, 0xba, 0x77, 0x44, 0x82, 0x19, 0x10, 0x89, 0x67, 0xe4, 0x73, 0x92, 0x1e, 0xb2} assert.Equal(t, expected, digest) }
explode_data.jsonl/45219
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 34058, 8409, 3136, 36339, 82, 70914, 34058, 43, 16, 45217, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 28397, 8409, 3136, 36339, 82, 515, 197, 10261, 5897, 43, 16, 45217, 25, 52931, 3782, 515, 298, 197, 90, 16, 11, 220, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGraph_AddTarget(t *testing.T) { var target = Target{ RefID: "A", Datasource: "Sample Source", Expr: "sample request"} graph := NewGraph("") graph.AddTarget(&target) if len(graph.GraphPanel.Targets) != 1 { t.Errorf("should be 1 but %d", len(graph.GraphPanel.Targets)) } if graph.GraphPanel.Targets[0].RefID != "A" { t.Errorf("should be equal A but %s", graph.GraphPanel.Targets[0].RefID) } }
explode_data.jsonl/31047
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 11212, 21346, 6397, 1155, 353, 8840, 836, 8, 341, 2405, 2169, 284, 13483, 515, 197, 197, 3945, 915, 25, 414, 330, 32, 756, 197, 10957, 19346, 919, 25, 330, 17571, 8748, 756, 197, 197, 16041, 25, 981, 330, 13611, 1681, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNoImageIDChange(t *testing.T) { // this buildConfig has up to date configuration, but is checked eg. during // startup when we're checking all the imageRepos buildcfg := mockBuildConfig("registry.com/namespace/imagename", "registry.com/namespace/imagename", "testImageStream", "testTag") buildcfg.Triggers[0].ImageChange.LastTriggeredImageID = "registry.com/namespace/imagename:imageID123" imageStream := mockImageStream("testImageStream", "registry.com/namespace/imagename", map[string]string{"testTag": "imageID123"}) image := mockImage("testImage@id", "registry.com/namespace/imagename@id") controller := mockImageChangeController(buildcfg, imageStream, image) bcInstantiator := controller.BuildConfigInstantiator.(*buildConfigInstantiator) bcUpdater := bcInstantiator.buildConfigUpdater err := controller.HandleImageRepo(imageStream) if err != nil { t.Errorf("Unexpected error %v from HandleImageRepo", err) } if len(bcInstantiator.name) != 0 { t.Error("New build generated when no change happened!") } if bcUpdater.buildcfg != nil { t.Error("BuildConfig was updated when no change happened!") } }
explode_data.jsonl/69176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 2753, 1906, 915, 4072, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 1936, 2648, 702, 705, 311, 2400, 6546, 11, 714, 374, 10067, 8695, 13, 2337, 198, 197, 322, 20567, 979, 582, 2299, 13295, 678, 279, 2168, 693, 966, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_shouldBeAbleToSetUpServiceTypeAsLoadBalancer(t *testing.T) { helmChartParser := NewHelmConfigParser( NewHelmTest(t, helmChartRelativePath, map[string]string{ "traefik.enabled": "true", "traefik.service.type": "LoadBalancer", }), ) var d DeploymentMetadata var list string for _, slice := range helmChartParser.SlicedResource { helm.UnmarshalK8SYaml(helmChartParser.T, slice, &d) if d.Kind == "List" { list = slice break } } require.True(t, len(list) != 0) require.Contains(t, list, "LoadBalancer") }
explode_data.jsonl/14598
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 43378, 3430, 32, 891, 1249, 79831, 1860, 929, 2121, 5879, 93825, 1155, 353, 8840, 836, 8, 341, 9598, 23162, 14488, 6570, 1669, 1532, 39, 23162, 2648, 6570, 1006, 197, 197, 3564, 39, 23162, 2271, 1155, 11, 33765, 14488, 28442...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSubquery2(t *testing.T) { ctx := getContext() people := model.QueryPeople(ctx). Alias("manager_count", model.QueryProjects(ctx). Alias("", Count(node.Project().ManagerID())). Where(Equal(node.Project().ManagerID(), node.Person().ID())). Subquery()). Where(Equal(node.Person().LastName(), "Wolfe")). Get() assert.Equal(t, 2, people.GetAlias("manager_count").Int(), "Karen Wolfe manages 2 projects.") }
explode_data.jsonl/26328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 3136, 1631, 17, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 66175, 741, 197, 16069, 1669, 1614, 15685, 15919, 7502, 4292, 197, 197, 22720, 445, 13297, 3180, 756, 298, 19727, 15685, 29958, 7502, 4292, 571, 197, 22720, 19814, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProductSectionList_SortByPropertyKeyName(t *testing.T) { sliceProductSection := &types.ProductSectionList{ ProductSection: &types.ProductSection{}, } emptyProductSection := &types.ProductSectionList{ ProductSection: &types.ProductSection{ Info: "Custom properties", }, } // unordered list for test sortOrder := &types.ProductSectionList{ ProductSection: &types.ProductSection{ Info: "Custom properties", Property: []*types.Property{ &types.Property{ UserConfigurable: false, Key: "sys_owner", Label: "sys_owner_label", Type: "string", DefaultValue: "sys_owner_default", Value: &types.Value{Value: "test"}, }, &types.Property{ UserConfigurable: true, Key: "asset_tag", Label: "asset_tag_label", Type: "string", DefaultValue: "asset_tag_default", Value: &types.Value{Value: "xxxyyy"}, }, &types.Property{ UserConfigurable: true, Key: "guestinfo.config.bootstrap.ip", Label: "guestinfo.config.bootstrap.ip_label", Type: "string", DefaultValue: "default_ip", Value: &types.Value{Value: "192.168.12.180"}, }, }, }, } // correct state after ordering expectedSortedOrder := &types.ProductSectionList{ ProductSection: &types.ProductSection{ Info: "Custom properties", Property: []*types.Property{ &types.Property{ UserConfigurable: true, Key: "asset_tag", Label: "asset_tag_label", Type: "string", DefaultValue: "asset_tag_default", Value: &types.Value{Value: "xxxyyy"}, }, &types.Property{ UserConfigurable: true, Key: "guestinfo.config.bootstrap.ip", Label: "guestinfo.config.bootstrap.ip_label", Type: "string", DefaultValue: "default_ip", Value: &types.Value{Value: "192.168.12.180"}, }, &types.Property{ UserConfigurable: false, Key: "sys_owner", Label: "sys_owner_label", Type: "string", DefaultValue: "sys_owner_default", Value: &types.Value{Value: "test"}, }, }, }, } tests := []struct { name string setValue *types.ProductSectionList expectedValue *types.ProductSectionList }{ {name: "Empty", setValue: emptyProductSection, expectedValue: emptyProductSection}, {name: "Slice", setValue: sliceProductSection, expectedValue: sliceProductSection}, {name: "SortOrder", setValue: sortOrder, expectedValue: expectedSortedOrder}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { p := tt.setValue p.SortByPropertyKeyName() if !reflect.DeepEqual(p, tt.expectedValue) { t.Errorf("Objects were not deeply equal: \n%#+v\n, got:\n %#+v\n", tt.expectedValue, p) } }) } }
explode_data.jsonl/68073
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1439 }
[ 2830, 3393, 4816, 9620, 852, 1098, 371, 1359, 3052, 58660, 1155, 353, 8840, 836, 8, 341, 1903, 4754, 4816, 9620, 1669, 609, 9242, 20592, 9620, 852, 515, 197, 197, 4816, 9620, 25, 609, 9242, 20592, 9620, 38837, 197, 630, 197, 3194, 481...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatchKeyTypeScheme(t *testing.T) { tables := []struct { name string key Key err error }{ {name: "test for unsupported key type", key: Key{ KeyID: "", KeyIDHashAlgorithms: nil, KeyType: "invalid", KeyVal: KeyVal{}, Scheme: "", }, err: ErrUnsupportedKeyType, }, { name: "test for scheme key type mismatch", key: Key{ KeyID: "", KeyIDHashAlgorithms: nil, KeyType: "rsa", KeyVal: KeyVal{}, Scheme: "ed25519", }, err: ErrSchemeKeyTypeMismatch, }, } for _, table := range tables { err := matchKeyTypeScheme(table.key) if !errors.Is(err, table.err) { t.Errorf("%s returned wrong error. We got: %s, we should have got: %s", table.name, err, table.err) } } }
explode_data.jsonl/51774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 450 }
[ 2830, 3393, 8331, 97964, 28906, 1155, 353, 8840, 836, 8, 341, 26481, 82, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 23634, 220, 5309, 198, 197, 9859, 220, 1465, 198, 197, 59403, 197, 197, 47006, 25, 330, 1944, 369, 40409, 1376,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFromObject(t *testing.T) { table := []struct { obj runtime.Object message string }{ {&api.Status{Message: "foobar"}, "foobar"}, {&TestType{}, "unexpected object: &{}"}, } for _, item := range table { if e, a := item.message, FromObject(item.obj).Error(); e != a { t.Errorf("Expected %v, got %v", e, a) } } }
explode_data.jsonl/8868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 3830, 1190, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 3056, 1235, 341, 197, 22671, 257, 15592, 8348, 198, 197, 24753, 914, 198, 197, 59403, 197, 197, 90, 5, 2068, 10538, 90, 2052, 25, 330, 50267, 14345, 330, 50267, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAuthService_GetUsers(t *testing.T) { user1 := createTestUser() user2 := createAnotherTestUser() token := issueTestToken(user1.ID, user1.Username, createTestConfig().PrivKeyPath) dao := dao.MockUserDao{} dao.On("GetAll").Return(&[]st.User{user1, user2}, nil) dao.On("GetByUsername", user1.Username).Return(&user1, nil) s := AuthService{&mailer, &dao, createTestConfig()} userInfo := createTestUserInfo() userInfo2 := createAdditionalTestUserInfo() expected := &[]st.UserInfo{userInfo, userInfo2} us, err := s.GetUsers(token) assert.Nil(t, err) assert.Equal(t, expected, us) }
explode_data.jsonl/18894
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 90466, 13614, 7137, 1155, 353, 8840, 836, 8, 341, 19060, 16, 1669, 1855, 2271, 1474, 741, 19060, 17, 1669, 1855, 14037, 2271, 1474, 741, 43947, 1669, 4265, 2271, 3323, 4277, 16, 9910, 11, 1196, 16, 42777, 11, 1855, 2271, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetLatestDaysBaseNow(t *testing.T) { fn := func(days []time.Time) { for i := range days { t.Log(days[i].Format(Y_M_D)) } } t.Log("-7 days asc") fn(GetLatestDaysBaseNow(-7)) t.Log("-7 days desc") fn(GetLatestDaysBaseNow(-7, true)) t.Log(" 0 days") fn(GetLatestDaysBaseNow(0)) fn(GetLatestDaysBaseNow(0, true)) t.Log("+7 days asc") fn(GetLatestDaysBaseNow(+7)) t.Log("+7 days desc") fn(GetLatestDaysBaseNow(+7, true)) }
explode_data.jsonl/47979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 1949, 31992, 20557, 3978, 7039, 1155, 353, 8840, 836, 8, 341, 40095, 1669, 2915, 42595, 3056, 1678, 16299, 8, 341, 197, 2023, 600, 1669, 2088, 2849, 341, 298, 3244, 5247, 42595, 989, 936, 4061, 20206, 1245, 1557, 1171, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRunParallelFail(t *testing.T) { testing.Benchmark(func(b *testing.B) { b.RunParallel(func(pb *testing.PB) { // The function must be able to log/abort // w/o crashing/deadlocking the whole benchmark. b.Log("log") b.Error("error") }) }) }
explode_data.jsonl/33938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 6727, 16547, 19524, 1155, 353, 8840, 836, 8, 341, 197, 8840, 1785, 39381, 18552, 1883, 353, 8840, 1785, 8, 341, 197, 2233, 16708, 16547, 18552, 76878, 353, 8840, 1069, 33, 8, 341, 298, 197, 322, 576, 729, 1969, 387, 2952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluralsCardinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsCardinal() // expected := 2 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } }
explode_data.jsonl/1278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 2120, 324, 1127, 5770, 977, 1155, 353, 8840, 836, 8, 1476, 72453, 1669, 1532, 2822, 78216, 1669, 3056, 1235, 341, 197, 42400, 52297, 21368, 4176, 11337, 198, 197, 59403, 197, 197, 322, 341, 197, 197, 322, 220, 42400, 25, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestReader(t *testing.T) { data := []byte(`{"foo": "bar", "baz": {"bar": "cat"}}`) testData := []struct { path []string value string }{ { []string{"foo"}, "bar", }, { []string{"baz", "bar"}, "cat", }, } r := NewReader() c, err := r.Merge(&source.ChangeSet{Data: data}, &source.ChangeSet{}) if err != nil { t.Fatal(err) } values, err := r.Values(c) if err != nil { t.Fatal(err) } for _, test := range testData { if v := values.Get(test.path...).String(""); v != test.value { t.Fatalf("Expected %s got %s for path %v", test.value, v, test.path) } } }
explode_data.jsonl/76874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 5062, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 5809, 4913, 7975, 788, 330, 2257, 497, 330, 42573, 788, 5212, 2257, 788, 330, 4616, 30975, 63, 692, 18185, 1043, 1669, 3056, 1235, 341, 197, 26781, 220, 3056, 917...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFiles__GetFile(t *testing.T) { achClient, _, server := MockClientServer("fileDelete", func(r *mux.Router) { AddGetFileRoute(r) }) defer server.Close() file, err := achClient.GetFile("fileId") if err != nil || file == nil { t.Fatalf("file=%v err=%v", file, err) } if file.Header.ImmediateOrigin == "" { t.Error("empty file") } }
explode_data.jsonl/73111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 10809, 563, 1949, 1703, 1155, 353, 8840, 836, 8, 341, 197, 610, 2959, 11, 8358, 3538, 1669, 14563, 2959, 5475, 445, 1192, 6435, 497, 2915, 2601, 353, 75066, 31413, 8, 341, 197, 37972, 1949, 1703, 4899, 2601, 340, 197, 3518...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIncrementAfterDeleteValueInt32(t *testing.T) { const key1 = 12 const key2 = 13 m := make(map[int]int32) m[key1] = 99 delete(m, key1) m[key2]++ if n2 := m[key2]; n2 != 1 { t.Errorf("incremented 0 to %d", n2) } }
explode_data.jsonl/19929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 38311, 6025, 6435, 1130, 1072, 18, 17, 1155, 353, 8840, 836, 8, 341, 4777, 1376, 16, 284, 220, 16, 17, 198, 4777, 1376, 17, 284, 220, 16, 18, 271, 2109, 1669, 1281, 9147, 18640, 63025, 18, 17, 340, 2109, 8157, 16, 60, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWriteErrors(t *testing.T) { for _, w := range errorWriterTests { buf := NewWriter(w) _, e := buf.Write([]byte("hello world")) if e != nil { t.Errorf("Write hello to %v: %v", w, e) continue } // Two flushes, to verify the error is sticky. for i := 0; i < 2; i++ { e = buf.Flush() if e != w.expect { t.Errorf("Flush %d/2 %v: got %v, wanted %v", i+1, w, e, w.expect) } } } }
explode_data.jsonl/2878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 7985, 13877, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 289, 1669, 2088, 1465, 6492, 18200, 341, 197, 26398, 1669, 1532, 6492, 3622, 340, 197, 197, 6878, 384, 1669, 6607, 4073, 10556, 3782, 445, 14990, 1879, 5455, 197, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestISBNValidation(t *testing.T) { tests := []struct { param string expected bool }{ {"", false}, {"foo", false}, {"3836221195", true}, {"1-61729-085-8", true}, {"3 423 21412 0", true}, {"3 401 01319 X", true}, {"9784873113685", true}, {"978-4-87311-368-5", true}, {"978 3401013190", true}, {"978-3-8362-2119-1", true}, } validate := New() for i, test := range tests { errs := validate.Var(test.param, "isbn") if test.expected { if !IsEqual(errs, nil) { t.Fatalf("Index: %d ISBN failed Error: %s", i, errs) } } else { if IsEqual(errs, nil) { t.Fatalf("Index: %d ISBN failed Error: %s", i, errs) } else { val := getError(errs, "", "") if val.Tag() != "isbn" { t.Fatalf("Index: %d ISBN failed Error: %s", i, errs) } } } } }
explode_data.jsonl/77275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 45185, 13799, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 36037, 262, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 497, 895, 1583, 197, 197, 4913, 7975, 497, 895, 1583, 197, 197, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6