text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestLog_pipelinerun_status_done_v1beta1(t *testing.T) { var ( pipelineName = "done-pipeline" prName = "done-run" ns = "namespace" taskName = "done-task" ) nsList := []*corev1.Namespace{ { ObjectMeta: metav1.ObjectMeta{ Name: ns, }, }, } prs := []*v1beta1.PipelineRun{ { ObjectMeta: metav1.ObjectMeta{ Name: prName, Namespace: ns, Labels: map[string]string{"tekton.dev/pipeline": prName}, }, Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{ Name: pipelineName, }, }, Status: v1beta1.PipelineRunStatus{ Status: duckv1beta1.Status{ Conditions: duckv1beta1.Conditions{ { Type: apis.ConditionSucceeded, Status: corev1.ConditionUnknown, Message: "Running", }, }, }, }, }, } ps := []*v1beta1.Pipeline{ { ObjectMeta: metav1.ObjectMeta{ Name: pipelineName, Namespace: ns, }, Spec: v1beta1.PipelineSpec{ Tasks: []v1beta1.PipelineTask{ { Name: taskName, TaskRef: &v1beta1.TaskRef{ Name: taskName, }, }, }, }, }, } cs, _ := test.SeedV1beta1TestData(t, pipelinev1beta1test.Data{PipelineRuns: prs, Pipelines: ps, Namespaces: nsList}) cs.Pipeline.Resources = cb.APIResourceList(versionB1, []string{"pipeline", "pipelinerun"}) watcher := watch.NewFake() tdc := testDynamic.Options{WatchResource: "pipelineruns", Watcher: watcher} dc, err := tdc.Client( cb.UnstructuredV1beta1P(ps[0], versionB1), cb.UnstructuredV1beta1PR(prs[0], versionB1), ) if err != nil { t.Errorf("unable to create dynamic client: %v", err) } prlo := logOptsv1beta1(prName, ns, cs, dc, fake.Streamer([]fake.Log{}), false, false) go func() { time.Sleep(time.Second * 1) for _, pr := range prs { pr.Status.Conditions[0].Status = corev1.ConditionTrue pr.Status.Conditions[0].Message = "completed" watcher.Modify(pr) } }() start := time.Now() output, err := fetchLogs(prlo) elapsed := time.Since(start).Seconds() if err != nil { t.Errorf("Unexpected error: %v", err) } if elapsed > 10 { t.Errorf("Timed out") } test.AssertOutput(t, "", output) }
explode_data.jsonl/14874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1067 }
[ 2830, 3393, 2201, 620, 81079, 10453, 359, 4773, 24390, 2273, 16, 19127, 16, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 3223, 8790, 675, 284, 330, 10438, 2268, 8790, 698, 197, 25653, 675, 981, 284, 330, 10438, 22973, 698, 197, 8404...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConsensusParameters(t *testing.T) { require := require.New(t) // Default consensus parameters. var emptyParams ConsensusParameters require.Error(emptyParams.SanityCheck(), "default consensus parameters should be invalid") // Valid thresholds. validThresholds := map[ThresholdKind]quantity.Quantity{ KindEntity: *quantity.NewQuantity(), KindNodeValidator: *quantity.NewQuantity(), KindNodeCompute: *quantity.NewQuantity(), KindNodeKeyManager: *quantity.NewQuantity(), KindRuntimeCompute: *quantity.NewQuantity(), KindRuntimeKeyManager: *quantity.NewQuantity(), } validThresholdsParams := ConsensusParameters{ Thresholds: validThresholds, FeeSplitWeightVote: mustInitQuantity(t, 1), } require.NoError(validThresholdsParams.SanityCheck(), "consensus parameters with valid thresholds should be valid") // NOTE: There is currently no way to construct invalid thresholds. // Degenerate fee split. degenerateFeeSplit := ConsensusParameters{ Thresholds: validThresholds, FeeSplitWeightPropose: mustInitQuantity(t, 0), FeeSplitWeightVote: mustInitQuantity(t, 0), FeeSplitWeightNextPropose: mustInitQuantity(t, 0), } require.Error(degenerateFeeSplit.SanityCheck(), "consensus parameters with degenerate fee split should be invalid") }
explode_data.jsonl/36019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 449 }
[ 2830, 3393, 15220, 13626, 9706, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 197, 322, 7899, 23869, 5029, 624, 2405, 4287, 4870, 7292, 13626, 9706, 198, 17957, 6141, 24216, 4870, 808, 38270, 3973, 1507, 330, 2258, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBadRandomSource(t *testing.T) { r := strings.NewReader("") _, err := New(nil, "", &Config{RandomSource: r, Logger: log.Discard}) require.Error(t, err) }
explode_data.jsonl/46859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 17082, 13999, 3608, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 9069, 68587, 31764, 197, 6878, 1848, 1669, 1532, 27907, 11, 7342, 609, 2648, 90, 13999, 3608, 25, 435, 11, 9514, 25, 1487, 909, 47560, 3518, 17957, 6141, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsValidRegion(t *testing.T) { testCases := []struct { inputReqRegion string inputConfRegion string expectedResult bool }{ {"", "", false}, {"us-east-1", "", true}, {"us-east-1", "US", true}, {"us-west-1", "US", false}, {"us-west-1", "us-west-1", true}, } for i, testCase := range testCases { actualResult := isValidRegion(testCase.inputReqRegion, testCase.inputConfRegion) if testCase.expectedResult != actualResult { t.Errorf("Test %d: Expected the result to `%v`, but instead got `%v`", i+1, testCase.expectedResult, actualResult) } } }
explode_data.jsonl/81626
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 55470, 14091, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 22427, 27234, 14091, 220, 914, 198, 197, 22427, 15578, 14091, 914, 271, 197, 42400, 2077, 1807, 198, 197, 92, 4257, 197, 197, 4913, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTopologyBuilder_BuildWithTrafficTargetAndTrafficSplit(t *testing.T) { selectorAppA := map[string]string{"app": "app-a"} selectorAppB := map[string]string{"app": "app-b"} selectorAppC := map[string]string{"app": "app-c"} selectorAppD := map[string]string{"app": "app-d"} annotations := map[string]string{ "maesh.containo.us/traffic-type": "http", "maesh.containo.us/ratelimit-average": "100", "maesh.containo.us/ratelimit-burst": "200", } svcPorts := []corev1.ServicePort{svcPort("port-8080", 8080, 8080)} saA := createServiceAccount("my-ns", "service-account-a") podA := createPod("my-ns", "app-a", saA, selectorAppA, "10.10.1.1") saB := createServiceAccount("my-ns", "service-account-b") svcB := createService("my-ns", "svc-b", annotations, svcPorts, selectorAppB, "10.10.1.16") podB := createPod("my-ns", "app-b", saB, svcB.Spec.Selector, "10.10.2.1") saC := createServiceAccount("my-ns", "service-account-c") svcC := createService("my-ns", "svc-c", annotations, svcPorts, selectorAppC, "10.10.1.17") podC := createPod("my-ns", "app-c", saC, svcC.Spec.Selector, "10.10.2.2") saD := createServiceAccount("my-ns", "service-account-d") svcD := createService("my-ns", "svc-d", annotations, svcPorts, selectorAppD, "10.10.1.18") podD := createPod("my-ns", "app-d", saD, svcD.Spec.Selector, "10.10.2.3") epB := createEndpoints(svcB, []*corev1.Pod{podB}) epC := createEndpoints(svcC, []*corev1.Pod{podC}) epD := createEndpoints(svcD, []*corev1.Pod{podD}) apiMatch := createHTTPMatch("api", []string{"GET", "POST"}, "/api") metricMatch := createHTTPMatch("metric", []string{"GET"}, "/metric") rtGrp := createHTTPRouteGroup("my-ns", "http-rt-grp", []spec.HTTPMatch{apiMatch, metricMatch}) ttMatch := []string{apiMatch.Name} tt := createTrafficTarget("my-ns", "tt", saB, "8080", []*corev1.ServiceAccount{saA}, rtGrp, ttMatch) ts := createTrafficSplit("my-ns", "ts", svcB, svcC, svcD) k8sClient := fake.NewSimpleClientset(saA, saB, saC, saD, podA, podB, podC, podD, svcB, svcC, svcD, epB, epC, epD) smiAccessClient := accessfake.NewSimpleClientset(tt) smiSplitClient := splitfake.NewSimpleClientset(ts) smiSpecClient := specfake.NewSimpleClientset(rtGrp) builder, err := createBuilder(k8sClient, smiAccessClient, smiSpecClient, smiSplitClient) require.NoError(t, err) ignoredResources := mk8s.NewIgnored() got, err := builder.Build(ignoredResources) require.NoError(t, err) assertTopology(t, "fixtures/topology-basic.json", got) }
explode_data.jsonl/48677
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1032 }
[ 2830, 3393, 60954, 3297, 96686, 2354, 87229, 6397, 3036, 87229, 20193, 1155, 353, 8840, 836, 8, 341, 197, 8925, 2164, 32, 1669, 2415, 14032, 30953, 4913, 676, 788, 330, 676, 7409, 16707, 197, 8925, 2164, 33, 1669, 2415, 14032, 30953, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConnUDP_WriteWithContext(t *testing.T) { peerAddr := "127.0.0.1:2154" b, err := net.ResolveUDPAddr("udp", peerAddr) assert.NoError(t, err) ctxCanceled, ctxCancel := context.WithCancel(context.Background()) ctxCancel() type args struct { ctx context.Context udpCtx *ConnUDPContext buffer []byte } tests := []struct { name string args args wantErr bool }{ { name: "valid", args: args{ ctx: context.Background(), udpCtx: NewConnUDPContext(b, nil), buffer: []byte("hello world"), }, }, { name: "cancelled", args: args{ ctx: ctxCanceled, buffer: []byte("hello world"), }, wantErr: true, }, } a, err := net.ResolveUDPAddr("udp", "127.0.0.1:") assert.NoError(t, err) l1, err := net.ListenUDP("udp", a) assert.NoError(t, err) err = SetUDPSocketOptions(l1) assert.NoError(t, err) c1 := NewConnUDP(l1, time.Millisecond*100, 0) defer c1.Close() ctx, cancel := context.WithCancel(context.Background()) defer cancel() l2, err := net.ListenUDP("udp", b) err = SetUDPSocketOptions(l2) assert.NoError(t, err) c2 := NewConnUDP(l2, time.Millisecond*100, 0) defer c2.Close() go func() { b := make([]byte, 1024) _, udpCtx, err := c2.ReadWithContext(ctx, b) if err != nil { return } correctSource(udpCtx.context) }() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { err = c1.WriteWithContext(tt.args.ctx, tt.args.udpCtx, tt.args.buffer) c1.LocalAddr() c1.RemoteAddr() if tt.wantErr { assert.Error(t, err) } else { assert.NoError(t, err) } }) } }
explode_data.jsonl/68522
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 761 }
[ 2830, 3393, 9701, 41648, 31825, 91101, 1155, 353, 8840, 836, 8, 341, 197, 16537, 13986, 1669, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 17, 16, 20, 19, 698, 2233, 11, 1848, 1669, 4179, 57875, 41648, 13986, 445, 31101, 497, 1439...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenerateFromUTF16(t *testing.T) { q := apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{}, } res1, err := GenerateManifests("./testdata/utf-16", "/", "", &q, false) assert.Nil(t, err) assert.Equal(t, 2, len(res1.Manifests)) }
explode_data.jsonl/58035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 31115, 3830, 8561, 16, 21, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 1443, 292, 1451, 72272, 1900, 515, 197, 197, 25243, 25, 1060, 609, 12088, 676, 85, 16, 25170, 38837, 197, 78329, 3608, 25, 609, 12088, 676, 85, 16, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalNumberSet(t *testing.T) { input := []byte(`{ "NS": ["1234", "567.8"] }`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) assert.Equal(t, DataTypeNumberSet, av.DataType()) assert.Equal(t, 2, len(av.NumberSet())) assert.Equal(t, "1234", av.NumberSet()[0]) assert.Equal(t, "567.8", av.NumberSet()[1]) }
explode_data.jsonl/61702
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 1806, 27121, 2833, 1649, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 2448, 788, 4383, 16, 17, 18, 19, 497, 330, 20, 21, 22, 13, 23, 1341, 335, 63, 692, 2405, 1822, 71813, 3506, 78554, 198, 985...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStacktraceLocationThrowFromCatch(t *testing.T) { vm := New() _, err := vm.RunString(` function main(arg) { try { if (arg === 1) { return f1(); } if (arg === 2) { return f2(); } if (arg === 3) { return f3(); } } catch (e) { throw e; } } function f1() {} function f2() { throw new Error(); } function f3() {} main(2); `) if err == nil { t.Fatal("Expected error") } stack := err.(*Exception).stack if len(stack) != 2 { t.Fatalf("Unexpected stack len: %v", stack) } if frame := stack[0]; frame.funcName != "main" || frame.pc != 30 { t.Fatalf("Unexpected stack frame 0: %#v", frame) } if frame := stack[1]; frame.funcName != "" || frame.pc != 7 { t.Fatalf("Unexpected stack frame 1: %#v", frame) } }
explode_data.jsonl/10528
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 4336, 15067, 4707, 23079, 3830, 57760, 1155, 353, 8840, 836, 8, 341, 54879, 1669, 1532, 741, 197, 6878, 1848, 1669, 10995, 16708, 703, 61528, 7527, 1887, 9404, 8, 341, 197, 6799, 341, 298, 743, 320, 858, 2049, 220, 16, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestReactorVotingPowerChange(t *testing.T) { nVals := 4 logger := log.TestingLogger() css, cleanup := randConsensusNet(nVals, "consensus_voting_power_changes_test", newMockTickerFunc(true), newPersistentKVStore) defer cleanup() reactors, blocksSubs, eventBuses := startConsensusNet(t, css, nVals) defer stopConsensusNet(logger, reactors, eventBuses) // map of active validators activeVals := make(map[string]struct{}) for i := 0; i < nVals; i++ { addr := css[i].privValidator.GetPubKey().Address() activeVals[string(addr)] = struct{}{} } // wait till everyone makes block 1 timeoutWaitGroup(t, nVals, func(j int) { <-blocksSubs[j].Out() }, css) //--------------------------------------------------------------------------- logger.Debug("---------------------------- Testing changing the voting power of one validator a few times") val1PubKey := css[0].privValidator.GetPubKey() val1PubKeyABCI := types.TM2PB.PubKey(val1PubKey) updateValidatorTx := kvstore.MakeValSetChangeTx(val1PubKeyABCI, 25) previousTotalVotingPower := css[0].GetRoundState().LastValidators.TotalVotingPower() waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlockWithTx(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) if css[0].GetRoundState().LastValidators.TotalVotingPower() == previousTotalVotingPower { t.Fatalf("expected voting power to change (before: %d, after: %d)", previousTotalVotingPower, css[0].GetRoundState().LastValidators.TotalVotingPower()) } updateValidatorTx = kvstore.MakeValSetChangeTx(val1PubKeyABCI, 2) previousTotalVotingPower = css[0].GetRoundState().LastValidators.TotalVotingPower() waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlockWithTx(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) if css[0].GetRoundState().LastValidators.TotalVotingPower() == previousTotalVotingPower { t.Fatalf("expected voting power to change (before: %d, after: %d)", previousTotalVotingPower, css[0].GetRoundState().LastValidators.TotalVotingPower()) } updateValidatorTx = kvstore.MakeValSetChangeTx(val1PubKeyABCI, 26) previousTotalVotingPower = css[0].GetRoundState().LastValidators.TotalVotingPower() waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlockWithTx(t, nVals, activeVals, blocksSubs, css, updateValidatorTx) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) waitForAndValidateBlock(t, nVals, activeVals, blocksSubs, css) if css[0].GetRoundState().LastValidators.TotalVotingPower() == previousTotalVotingPower { t.Fatalf("expected voting power to change (before: %d, after: %d)", previousTotalVotingPower, css[0].GetRoundState().LastValidators.TotalVotingPower()) } }
explode_data.jsonl/7894
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1070 }
[ 2830, 3393, 693, 5621, 53, 11519, 14986, 4072, 1155, 353, 8840, 836, 8, 341, 9038, 52452, 1669, 220, 19, 198, 17060, 1669, 1487, 8787, 287, 7395, 741, 1444, 778, 11, 21290, 1669, 10382, 15220, 13626, 6954, 1445, 52452, 11, 330, 6254, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSidecar(t *testing.T) { var testCases = []struct { name string config *prowapi.DecorationConfig gcsOptions gcsupload.Options blobStorageMounts []coreapi.VolumeMount logMount coreapi.VolumeMount outputMount *coreapi.VolumeMount encodedJobSpec string requirePassingEntries, ignoreInterrupts bool secretVolumeMounts []coreapi.VolumeMount wrappers []wrapper.Options }{ { name: "basic case", config: &prowapi.DecorationConfig{ UtilityImages: &prowapi.UtilityImages{Sidecar: "sidecar-image"}, }, gcsOptions: gcsupload.Options{ Items: []string{"first", "second"}, GCSConfiguration: &prowapi.GCSConfiguration{Bucket: "bucket"}, }, blobStorageMounts: []coreapi.VolumeMount{{Name: "blob", MountPath: "/blob"}}, logMount: coreapi.VolumeMount{Name: "logs", MountPath: "/logs"}, outputMount: &coreapi.VolumeMount{Name: "outputs", MountPath: "/outputs"}, encodedJobSpec: "spec", requirePassingEntries: true, ignoreInterrupts: true, wrappers: []wrapper.Options{{Args: []string{"yes"}}}, }, { name: "with secrets", config: &prowapi.DecorationConfig{ UtilityImages: &prowapi.UtilityImages{Sidecar: "sidecar-image"}, }, gcsOptions: gcsupload.Options{ Items: []string{"first", "second"}, GCSConfiguration: &prowapi.GCSConfiguration{Bucket: "bucket"}, }, blobStorageMounts: []coreapi.VolumeMount{{Name: "blob", MountPath: "/blob"}}, logMount: coreapi.VolumeMount{Name: "logs", MountPath: "/logs"}, outputMount: &coreapi.VolumeMount{Name: "outputs", MountPath: "/outputs"}, encodedJobSpec: "spec", requirePassingEntries: true, ignoreInterrupts: true, secretVolumeMounts: []coreapi.VolumeMount{ {Name: "very", MountPath: "/very"}, {Name: "secret", MountPath: "/secret"}, {Name: "stuff", MountPath: "/stuff"}, }, wrappers: []wrapper.Options{{Args: []string{"yes"}}}, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { container, err := Sidecar( testCase.config, testCase.gcsOptions, testCase.blobStorageMounts, testCase.logMount, testCase.outputMount, testCase.encodedJobSpec, testCase.requirePassingEntries, testCase.ignoreInterrupts, testCase.secretVolumeMounts, testCase.wrappers..., ) if err != nil { t.Fatalf("%s: got an error from Sidecar(): %v", testCase.name, err) } testutil.CompareWithSerializedFixture(t, container) }) } }
explode_data.jsonl/79316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1325 }
[ 2830, 3393, 16384, 6918, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 11609, 1920, 914, 198, 197, 25873, 6656, 353, 79, 651, 2068, 22442, 7614, 2648, 198, 197, 3174, 4837, 3798, 4597, 342, 4837, 6120, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSpanEvent_MoveTo(t *testing.T) { ms := generateTestSpanEvent() dest := NewSpanEvent() ms.MoveTo(dest) assert.EqualValues(t, NewSpanEvent(), ms) assert.EqualValues(t, generateTestSpanEvent(), dest) }
explode_data.jsonl/63283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 12485, 1556, 66352, 1249, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 6923, 2271, 12485, 1556, 741, 49616, 1669, 1532, 12485, 1556, 741, 47691, 31195, 1249, 27010, 340, 6948, 12808, 6227, 1155, 11, 1532, 12485, 1556, 1507, 9829...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestJoin(t *testing.T) { if runtime.GOOS == "windows" { jointests = append(jointests, winjointests...) } for _, test := range jointests { expected := filepath.FromSlash(test.path) if p := filepath.Join(test.elem...); p != expected { t.Errorf("join(%q) = %q, want %q", test.elem, p, expected) } } }
explode_data.jsonl/1657
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 12292, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 197, 12428, 781, 17966, 284, 8737, 3325, 781, 17966, 11, 3164, 32850, 17966, 31218, 197, 532, 2023, 8358, 1273, 1669, 2088, 10284, 17966,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLeakPrime(t *testing.T) { ch := GenNatural() for i:=0; i<100; i++ { prime := <-ch fmt.Printf("%v : %v\n", i+1, prime) ch = PrimeFilter(ch, prime) } go func() { for { select { case value := <- ch: fmt.Println("v ",value) } } }() time.Sleep(time.Second) // 这里会有 goroutine的泄漏问题,生成了100个goroutine,并没有管控goroutine的生命周期,会导致goroutine的泄漏 fmt.Println("goroutine nums ", runtime.NumGoroutine()) }
explode_data.jsonl/70691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 2304, 585, 32306, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 9316, 54281, 741, 2023, 600, 14209, 15, 26, 600, 27, 16, 15, 15, 26, 600, 1027, 341, 197, 25653, 545, 1669, 9119, 331, 198, 197, 11009, 19367, 4430, 85, 549, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestListCommandEnvHostname(t *testing.T) { var s state r := strings.NewReader(exampleStateFileEnvHostname) err := s.read(r) assert.NoError(t, err) // Decode expectation as JSON var exp interface{} err = json.Unmarshal([]byte(expectedListOutputEnvHostname), &exp) assert.NoError(t, err) // Run the command, capture the output var stdout, stderr bytes.Buffer os.Setenv("TF_HOSTNAME_KEY_NAME", "name") exitCode := cmdList(&stdout, &stderr, &s) os.Unsetenv("TF_HOSTNAME_KEY_NAME") assert.Equal(t, 0, exitCode) assert.Equal(t, "", stderr.String()) // Decode the output to compare var act interface{} err = json.Unmarshal([]byte(stdout.String()), &act) assert.NoError(t, err) assert.Equal(t, exp, act) }
explode_data.jsonl/58807
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 852, 4062, 14359, 88839, 1155, 353, 8840, 836, 8, 341, 2405, 274, 1584, 198, 7000, 1669, 9069, 68587, 66203, 1397, 1703, 14359, 88839, 340, 9859, 1669, 274, 4125, 2601, 340, 6948, 35699, 1155, 11, 1848, 692, 197, 322, 50194,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRateLimiter(t *testing.T) { e := echo.New() handler := func(c echo.Context) error { return c.String(http.StatusOK, "test") } var inMemoryStore = NewRateLimiterMemoryStoreWithConfig(RateLimiterMemoryStoreConfig{Rate: 1, Burst: 3}) mw := RateLimiter(inMemoryStore) testCases := []struct { id string code int }{ {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusTooManyRequests}, {"127.0.0.1", http.StatusTooManyRequests}, {"127.0.0.1", http.StatusTooManyRequests}, {"127.0.0.1", http.StatusTooManyRequests}, } for _, tc := range testCases { req := httptest.NewRequest(http.MethodGet, "/", nil) req.Header.Add(echo.HeaderXRealIP, tc.id) rec := httptest.NewRecorder() c := e.NewContext(req, rec) _ = mw(handler)(c) assert.Equal(t, tc.code, rec.Code) } }
explode_data.jsonl/33961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 11564, 43, 17700, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1687, 7121, 2822, 53326, 1669, 2915, 1337, 1687, 9328, 8, 1465, 341, 197, 853, 272, 6431, 19886, 52989, 11, 330, 1944, 1138, 197, 630, 2405, 304, 10642, 6093, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrorsIsAnErrorAndFormatsErrors(t *testing.T) { errs := error(Errors{ fmt.Errorf("some error: foo=2, bar=baz"), fmt.Errorf("some other error: foo=42, bar=qux"), }) assert.Equal(t, "[<some error: foo=2, bar=baz>, "+ "<some other error: foo=42, bar=qux>]", errs.Error()) }
explode_data.jsonl/27155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 13877, 3872, 2082, 1454, 3036, 44599, 13877, 1155, 353, 8840, 836, 8, 341, 9859, 82, 1669, 1465, 7, 13877, 515, 197, 11009, 13080, 445, 14689, 1465, 25, 15229, 28, 17, 11, 3619, 22086, 1370, 4461, 197, 11009, 13080, 445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluginSinglePoolWithStatus(t *testing.T) { t.Parallel() s := &IllumosZpool{ Fields: []string{"alloc"}, Status: true, } zpoolOutput = func() string { return sampleSinglePoolOutput } zpoolStatusOutput = func(pool string) string { return sampleStatusNormalOutput } timeSince = func(timestamp time.Time) float64 { return 10000 } acc := testutil.Accumulator{} require.NoError(t, s.Gather(&acc)) testutil.RequireMetricsEqual( t, testMetricsSelectedStatus, acc.GetTelegrafMetrics(), testutil.SortMetrics(), testutil.IgnoreTime()) }
explode_data.jsonl/18986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 11546, 10888, 10551, 2354, 2522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1903, 1669, 609, 40, 5448, 436, 57, 10285, 515, 197, 197, 8941, 25, 3056, 917, 4913, 4742, 7115, 197, 58321, 25, 830, 345, 197, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTextGenerator_GenerateDataBySchema_MinAndMaxLength_LengthOfStringInRange(t *testing.T) { randomSource := rand.NewSource(time.Now().UnixNano()) textGeneratorInstance := &textGenerator{ generator: &rangedTextGenerator{ random: rand.New(randomSource), }, } schema := openapi3.NewSchema() var maxLength uint64 = 1000 schema.MinLength = 10 schema.MaxLength = &maxLength for i := 0; i < 1000; i++ { data, err := textGeneratorInstance.GenerateDataBySchema(context.Background(), schema) assert.NoError(t, err) assert.GreaterOrEqual(t, len(data.(string)), 10) assert.LessOrEqual(t, len(data.(string)), 1000) } }
explode_data.jsonl/45036
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 1178, 12561, 2646, 13220, 1043, 1359, 8632, 62122, 3036, 35601, 81620, 66952, 76059, 1155, 353, 8840, 836, 8, 341, 83628, 3608, 1669, 10382, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 2398, 15425, 12561, 2523, 1669, 609, 1318,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPatternYears(t *testing.T) { f := newTestLister(t) entries, err := years(context.Background(), f, "potato/", nil) require.NoError(t, err) year := 2000 for _, entry := range entries { assert.Equal(t, "potato/"+fmt.Sprint(year), entry.Remote()) year++ } }
explode_data.jsonl/24368
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 15760, 54419, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 2271, 852, 261, 1155, 340, 197, 12940, 11, 1848, 1669, 1635, 5378, 19047, 1507, 282, 11, 330, 19099, 4330, 28105, 2092, 340, 17957, 35699, 1155, 11, 1848, 692, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRendersCaseElse(t *testing.T) { template, _ := ParseString("A-{% case other %}{% when 'abc' %}when1{% when 1 or 123 %}when2{% else %}else{% endcase%}-Z", nil) assertRender(t, template, nil, `A-else-Z`) }
explode_data.jsonl/42422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 49, 14506, 4207, 22971, 1155, 353, 8840, 836, 8, 341, 22832, 11, 716, 1669, 14775, 703, 445, 32, 63347, 4, 1142, 1008, 1018, 15170, 4, 979, 364, 13683, 6, 1018, 92, 9309, 16, 66365, 979, 220, 16, 476, 220, 16, 17, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTrigger_ErrorCreate(t *testing.T) { t.Skip() // controller := gomock.NewController(t) // defer controller.Finish() // mockUsers := mock.NewMockUserStore(controller) // mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) // mockTriggers := mock.NewMockTriggerStore(controller) // mockTriggers.EXPECT().List(noContext, dummyRepo.ID).Return([]*core.Trigger{dummyTrigger}, nil) // mockRepos := mock.NewMockRepositoryStore(controller) // mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) // mockContents := mock.NewMockContentService(controller) // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummyTrigger.Path, dummyHook.After).Return(dummyYaml, nil, nil) // mockContents.EXPECT().Find(gomock.Any(), dummyRepo.Slug, dummySignature.Path, dummyHook.After).Return(dummySignature, nil, nil) // mockClient := new(scm.Client) // mockClient.Contents = mockContents // mockBuilds := mock.NewMockBuildStore(controller) // mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any()).Return(sql.ErrNoRows) // triggerer := New( // mockClient, // mockBuilds, // nil, // mockRepos, // mockTriggers, // mockUsers, // ) // builds, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) // if err != sql.ErrNoRows { // t.Error("Expect error when persisting the build fails") // } // if got, want := len(builds), 0; got != want { // t.Errorf("Got build count %d, want %d", got, want) // } }
explode_data.jsonl/27000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 618 }
[ 2830, 3393, 17939, 28651, 4021, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 741, 197, 322, 220, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 197, 322, 220, 16867, 6461, 991, 18176, 2822, 197, 322, 220, 77333, 7137, 1669, 7860, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterfaces(t *testing.T) { var i interface{} i = &TORegion{} if _, ok := i.(api.Creator); !ok { t.Errorf("Region must be Creator") } if _, ok := i.(api.Reader); !ok { t.Errorf("Region must be Reader") } if _, ok := i.(api.Updater); !ok { t.Errorf("Region must be Updater") } if _, ok := i.(api.Deleter); !ok { t.Errorf("Region must be Deleter") } if _, ok := i.(api.Identifier); !ok { t.Errorf("Region must be Identifier") } }
explode_data.jsonl/46893
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 41066, 1155, 353, 8840, 836, 8, 341, 2405, 600, 3749, 16094, 8230, 284, 609, 5207, 14091, 31483, 743, 8358, 5394, 1669, 600, 12832, 2068, 98564, 1215, 753, 562, 341, 197, 3244, 13080, 445, 14091, 1969, 387, 35678, 1138, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestArray_UnmarshalValue(t *testing.T) { type V struct { Name string Array *garray.Array } // JSON gtest.C(t, func(t *gtest.T) { var v *V err := gconv.Struct(g.Map{ "name": "john", "array": []byte(`[1,2,3]`), }, &v) t.Assert(err, nil) t.Assert(v.Name, "john") t.Assert(v.Array.Slice(), g.Slice{1, 2, 3}) }) // Map gtest.C(t, func(t *gtest.T) { var v *V err := gconv.Struct(g.Map{ "name": "john", "array": g.Slice{1, 2, 3}, }, &v) t.Assert(err, nil) t.Assert(v.Name, "john") t.Assert(v.Array.Slice(), g.Slice{1, 2, 3}) }) }
explode_data.jsonl/13919
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 1857, 40687, 27121, 1130, 1155, 353, 8840, 836, 8, 341, 13158, 647, 2036, 341, 197, 21297, 220, 914, 198, 197, 58743, 353, 70, 1653, 8114, 198, 197, 532, 197, 322, 4718, 198, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddPDBs(t *testing.T) { labels := map[string]string{"foo": "bar"} notFoundLabels := map[string]string{"bar": "foo"} pdbs := []*pv1beta1.PodDisruptionBudget{ { ObjectMeta: metav1.ObjectMeta{ Name: "pdb-1", Labels: ownerLabels, }, Spec: pv1beta1.PodDisruptionBudgetSpec{ Selector: &metav1.LabelSelector{ MatchLabels: labels, }, }, }, } noReplicas := int32(0) replicas := int32(2) deployments := []*appsv1.Deployment{ { ObjectMeta: metav1.ObjectMeta{ Name: "deployment-1", Labels: labels, }, Spec: appsv1.DeploymentSpec{ Replicas: &noReplicas, Selector: &metav1.LabelSelector{ MatchLabels: labels, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: labels, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "deployment-2", Labels: notFoundLabels, }, Spec: appsv1.DeploymentSpec{ Replicas: &replicas, Selector: &metav1.LabelSelector{ MatchLabels: notFoundLabels, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: notFoundLabels, }, }, }, }, } statefulSets := []*appsv1.StatefulSet{ { ObjectMeta: metav1.ObjectMeta{ Name: "stateful-set-1", Labels: labels, }, Spec: appsv1.StatefulSetSpec{ Replicas: &noReplicas, Selector: &metav1.LabelSelector{ MatchLabels: labels, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: labels, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "stateful-set-2", Labels: labels, }, Spec: appsv1.StatefulSetSpec{ Replicas: &replicas, Selector: &metav1.LabelSelector{ MatchLabels: notFoundLabels, }, Template: v1.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: notFoundLabels, }, }, }, }, } namespaces := []*v1.Namespace{ { ObjectMeta: metav1.ObjectMeta{ Name: "default", }, }, } controller := &PDBController{ Interface: setupMockKubernetes(t, pdbs, deployments, statefulSets, namespaces, nil), } err := controller.addPDBs(namespaces[0]) if err != nil { t.Error(err) } }
explode_data.jsonl/53284
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1091 }
[ 2830, 3393, 2212, 47, 3506, 82, 1155, 353, 8840, 836, 8, 341, 95143, 1669, 2415, 14032, 30953, 4913, 7975, 788, 330, 2257, 16707, 97266, 6650, 23674, 1669, 2415, 14032, 30953, 4913, 2257, 788, 330, 7975, 16707, 3223, 67, 1279, 1669, 298...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsoCanonicalHashes(t *testing.T) { seed := uint64(*origSeed) if *origSeed < 0 { seed = uint64(time.Now().UnixNano()) } defer func() { if t.Failed() && *origSeed < 0 { t.Logf("time based seed: %d", seed) } }() // Number of times to run IsoCanonicalHashes to check consistency. const retries = 5 // Share a global hash function to ensure that we // are resetting the function internally on each use. hash := md5.New() glob, err := filepath.Glob(filepath.Join("testdata", *tests)) if err != nil { t.Fatalf("Failed to open test suite: %v", err) } for _, path := range glob { name := filepath.Base(path) t.Run(name, func(t *testing.T) { src := rand.NewSource(seed) f, err := os.Open(path) if err != nil { t.Fatalf("Failed to open test suite in %q: %v", path, err) } var statements []*Statement dec := NewDecoder(f) for { s, err := dec.Unmarshal() if err != nil { if err == io.EOF { break } t.Fatalf("Unexpected error reading from %q: %v", path, err) } statements = append(statements, s) } f.Close() for _, decomp := range []bool{false, true} { t.Run(fmt.Sprintf("decomp=%t", decomp), func(t *testing.T) { var last map[string][]byte for i := 0; i < retries; i++ { curr, terms := IsoCanonicalHashes(statements, decomp, true, hash, make([]byte, 16)) if !hashesDisjoint(terms) { t.Errorf("IsoCanonicalHashes did not uniquely identify nodes %q with decomp=%t", name, decomp) } if last != nil { last := relabelStatements(statements, termsFor(last, hash)) sort.Sort(simpleLexicalStatements(last)) curr := relabelStatements(statements, termsFor(curr, hash)) sort.Sort(simpleLexicalStatements(curr)) if !reflect.DeepEqual(last, curr) { t.Errorf("IsoCanonicalHashes was not stable between runs on %q with decomp=%t", name, decomp) t.Log("Current run:") for _, s := range curr { t.Logf("\t%s", s) } t.Log("Previous run:") for _, s := range last { t.Logf("\t%s", s) } break } } last = curr } hashes := last ok := allUnique(hashes) if !ok { t.Errorf("Failed to get unique hashes for %q disjoint with decomp=%t", name, decomp) t.Logf("skipping %q decomp=%t", path, decomp) return } // Test that a graph is not isomorphic with one generated // by deleting the last statement. t.Run("isomorphic G != G-s", func(t *testing.T) { if len(statements) == 0 { return } if Isomorphic(statements, statements[:len(statements)-1], decomp, hash) { t.Error("Isomorphic(G, G-s)=true") } }) // Test that a graph is not isomorphic with one generated // by hashing the first grounded statement. t.Run("isomorphic G != Gμ(g)", func(t *testing.T) { mangled, mangTerms := mangleFirstIL(statements, hash) if mangTerms == nil { // All terms were blanks. return } if Isomorphic(statements, mangled, decomp, hash) { t.Error("Isomorphic(G, Gμ(g))=true") } }) // Test that a graph is not isomorphic with one generated // by merging the first two lexically sorted blank nodes // into one. t.Run("isomorphic G != G(b1∪b2)", func(t *testing.T) { mangled, mangTerms := mergeFirst2B(statements) if mangTerms == nil { // All terms were blanks. return } if Isomorphic(statements, mangled, decomp, hash) { t.Error("Isomorphic(G, G(b1∪b2))=true") } }) // Relabel a copy of the statements and then sort. orig := relabelStatements(statements, termsFor(hashes, hash)) sort.Sort(simpleLexicalStatements(orig)) for _, perm := range []struct { name string data func() ([]*Statement, map[string]string) }{ { name: "reverse statements", data: func() ([]*Statement, map[string]string) { return reverseStatements(statements) }, }, { name: "permute statements", data: func() ([]*Statement, map[string]string) { return permuteStatements(statements, src) }, }, { name: "permute blank labels", data: func() ([]*Statement, map[string]string) { return permuteBlanks(statements, src) }, }, { name: "hash blank labels", data: func() ([]*Statement, map[string]string) { return hashBlanks(statements, md5.New()) }, }, { name: "reverse statements and hash blank labels", data: func() ([]*Statement, map[string]string) { // Reordering must come first since it does not return // a non-nil terms map, but hashBlanks does. s, _ := reverseStatements(statements) return hashBlanks(s, md5.New()) }, }, { name: "permute statements and hash blank labels", data: func() ([]*Statement, map[string]string) { // Reordering must come first since it does not return // a non-nil terms map, but hashBlanks does. s, _ := permuteStatements(statements, src) return hashBlanks(s, md5.New()) }, }, } { t.Run(perm.name, func(t *testing.T) { if debug { fmt.Fprintf(os.Stderr, "\n%q %q decomp=%t:\n", path, perm.name, decomp) } altStatements, terms := perm.data() altHashes, altTerms := IsoCanonicalHashes(altStatements, decomp, true, hash, make([]byte, 16)) ok := allUnique(altHashes) && hashesDisjoint(altTerms) if !ok { t.Errorf("Failed to get unique hashes for %q alternative disjoint %q with decomp=%t", path, perm.name, decomp) } if debug { fmt.Fprintln(os.Stderr, "Name mappings from original dataset:") keys := make([]string, len(hashes)) var i int for k := range hashes { keys[i] = k i++ } sort.Strings(keys) w := tabwriter.NewWriter(os.Stderr, 0, 4, 8, ' ', 0) for _, k := range keys { fmt.Fprintf(w, "\t%s\t%s\n", k, translate(k, terms)) } w.Flush() fmt.Fprintln(os.Stderr) } // Relabel a copy of the alternative statements and then sort. alt := relabelStatements(altStatements, termsFor(altHashes, hash)) sort.Sort(simpleLexicalStatements(alt)) for i := range statements { if *orig[i] != *alt[i] { // Otherwise we have pointer inequality. t.Errorf("Unexpected statement in %q %q decomp=%t:\ngot: %#v\nwant:%#v", path, perm.name, decomp, orig[i], alt[i]) break } } if !Isomorphic(statements, altStatements, decomp, hash) { t.Errorf("Isomorphic(G, perm(G))=false in %q %q decomp=%t", path, perm.name, decomp) } }) } }) } }) } }
explode_data.jsonl/47332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3241 }
[ 2830, 3393, 76275, 70914, 6370, 288, 1155, 353, 8840, 836, 8, 341, 197, 22602, 1669, 2622, 21, 19, 4071, 4670, 41471, 340, 743, 353, 4670, 41471, 366, 220, 15, 341, 197, 197, 22602, 284, 2622, 21, 19, 9730, 13244, 1005, 55832, 83819, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTLSPointFormats(t *testing.T) { // Test that a Server returns the ec_point_format extension when ECC is // negotiated, and not returned on RSA handshake. tests := []struct { name string cipherSuites []uint16 supportedCurves []CurveID supportedPoints []uint8 wantSupportedPoints bool }{ {"ECC", []uint16{TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA}, []CurveID{CurveP256}, []uint8{compressionNone}, true}, {"RSA", []uint16{TLS_RSA_WITH_AES_256_GCM_SHA384}, nil, nil, false}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { clientHello := &clientHelloMsg{ vers: VersionTLS12, random: make([]byte, 32), cipherSuites: tt.cipherSuites, compressionMethods: []uint8{compressionNone}, supportedCurves: tt.supportedCurves, supportedPoints: tt.supportedPoints, } c, s := localPipe(t) replyChan := make(chan interface{}) go func() { cli := Client(c, testConfig) cli.vers = clientHello.vers cli.writeRecord(recordTypeHandshake, clientHello.marshal()) reply, err := cli.readHandshake() c.Close() if err != nil { replyChan <- err } else { replyChan <- reply } }() config := testConfig.Clone() config.CipherSuites = clientHello.cipherSuites Server(s, config).Handshake() s.Close() reply := <-replyChan if err, ok := reply.(error); ok { t.Fatal(err) } serverHello, ok := reply.(*serverHelloMsg) if !ok { t.Fatalf("didn't get ServerHello message in reply. Got %v\n", reply) } if tt.wantSupportedPoints { if len(serverHello.supportedPoints) < 1 { t.Fatal("missing ec_point_format extension from server") } found := false for _, p := range serverHello.supportedPoints { if p == pointFormatUncompressed { found = true break } } if !found { t.Fatal("missing uncompressed format in ec_point_format extension from server") } } else { if len(serverHello.supportedPoints) != 0 { t.Fatalf("unexcpected ec_point_format extension from server: %v", serverHello.supportedPoints) } } }) } }
explode_data.jsonl/36317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 945 }
[ 2830, 3393, 45439, 2609, 44599, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 429, 264, 8422, 4675, 279, 11942, 6085, 8955, 8894, 979, 77316, 374, 198, 197, 322, 50478, 11, 323, 537, 5927, 389, 45641, 57020, 624, 78216, 1669, 3056, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdateExifGPS(t *testing.T) { r, err := os.Open("fixtures/walrus.jpg") if err != nil { t.Fatalf("Failed to open test image, %v", err) } defer r.Close() wr := io.Discard lat := 37.61799 lon := -122.384864 gps_lat, err := PrepareDecimalGPSLatitudeTag(lat) if err != nil { t.Fatalf("Failed to prepare GPSLatitudeTag, %v", err) } gps_lon, err := PrepareDecimalGPSLongitudeTag(lon) if err != nil { t.Fatalf("Failed to prepare GPSLatitudeTag, %v", err) } gps_lat_ref, err := PrepareDecimalGPSLatitudeRefTag(lat) if err != nil { t.Fatalf("Failed to prepare GPSLatitudeRefTag, %v", err) } gps_lon_ref, err := PrepareDecimalGPSLongitudeRefTag(lon) if err != nil { t.Fatalf("Failed to prepare GPSLatitudeRefTag, %v", err) } props := map[string]interface{}{ "GPSLatitude": gps_lat, "GPSLatitudeRef": gps_lat_ref, "GPSLongitude": gps_lon, "GPSLongitudeRef": gps_lon_ref, } err = UpdateExif(r, wr, props) if err != nil { t.Fatalf("Failed to update EXIF data, %v", err) } // TO DO: READ AND VALIDATE TAGS }
explode_data.jsonl/18065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 4289, 840, 333, 63176, 1155, 353, 8840, 836, 8, 1476, 7000, 11, 1848, 1669, 2643, 12953, 445, 45247, 6324, 278, 20341, 4819, 5130, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 1787, 1273, 2168, 11, 1018, 85...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSummaryToGauge(t *testing.T) { originalFamily := tests.MakeTestMetricFamily(dto.MetricType_SUMMARY, 1, sampleLabels) convertedFams := summaryToGauges(originalFamily) assert.Equal(t, 3, len(convertedFams)) for _, family := range convertedFams { assert.Equal(t, dto.MetricType_GAUGE, *family.Type) name := family.GetName() for _, metric := range family.Metric { if name == tests.SummaryMetricName { assert.True(t, tests.HasLabelName(metric.Label, summaryQuantileLabelName)) } else if name == (tests.SummaryMetricName + sumPostfix) { assert.False(t, tests.HasLabelName(metric.Label, summaryQuantileLabelName)) } else if name == (tests.SummaryMetricName + countPostfix) { assert.False(t, tests.HasLabelName(metric.Label, summaryQuantileLabelName)) } else { // Unexpected family name t.Fail() } } } }
explode_data.jsonl/62482
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 323 }
[ 2830, 3393, 19237, 1249, 38, 19392, 1155, 353, 8840, 836, 8, 341, 197, 9889, 15192, 1669, 7032, 50133, 2271, 54310, 15192, 55237, 1321, 16340, 929, 50369, 48870, 11, 220, 16, 11, 6077, 23674, 340, 197, 76204, 37, 4122, 1669, 12126, 1249...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRecreate_acceptorSuccess(t *testing.T) { var deployment *kapi.ReplicationController scaler := &cmdtest.FakeScaler{} strategy := &RecreateDeploymentStrategy{ out: &bytes.Buffer{}, errOut: &bytes.Buffer{}, eventClient: fake.NewSimpleClientset().Core(), decoder: kapi.Codecs.UniversalDecoder(), retryTimeout: 1 * time.Second, retryPeriod: 1 * time.Millisecond, scaler: scaler, } acceptorCalled := false acceptor := &testAcceptor{ acceptFn: func(deployment *kapi.ReplicationController) error { acceptorCalled = true return nil }, } oldDeployment, _ := deployutil.MakeDeployment(deploytest.OkDeploymentConfig(1), kapi.Codecs.LegacyCodec(registered.GroupOrDie(kapi.GroupName).GroupVersions[0])) deployment, _ = deployutil.MakeDeployment(deploytest.OkDeploymentConfig(2), kapi.Codecs.LegacyCodec(registered.GroupOrDie(kapi.GroupName).GroupVersions[0])) strategy.rcClient = &fakeControllerClient{deployment: deployment} err := strategy.DeployWithAcceptor(oldDeployment, deployment, 2, acceptor) if err != nil { t.Fatalf("unexpected deploy error: %#v", err) } if !acceptorCalled { t.Fatalf("expected acceptor to be called") } if e, a := 2, len(scaler.Events); e != a { t.Fatalf("expected %d scale calls, got %d", e, a) } if e, a := uint(1), scaler.Events[0].Size; e != a { t.Errorf("expected scale down to %d, got %d", e, a) } if e, a := uint(2), scaler.Events[1].Size; e != a { t.Errorf("expected scale up to %d, got %d", e, a) } }
explode_data.jsonl/19096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 600 }
[ 2830, 3393, 693, 3182, 35728, 269, 7188, 1155, 353, 8840, 836, 8, 341, 2405, 23172, 353, 74, 2068, 2817, 79, 1693, 2051, 198, 1903, 63084, 1669, 609, 8710, 1944, 991, 726, 59553, 31483, 11355, 10228, 1669, 609, 693, 3182, 75286, 19816, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVitessHashMatchesVitessShards(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t;") tk.MustExec("create table t(customer_id bigint, id bigint, expected_shard bigint unsigned, computed_shard bigint unsigned null, primary key (customer_id, id));") tk.MustExec("insert into t (customer_id, id, expected_shard) values " + "(30370720100, 1, x'd6'), " + "(30370670010, 2, x'd6'), " + "(30370689320, 3, x'e1'), " + "(30370693008, 4, x'e0'), " + "(30370656005, 5, x'89'), " + "(30370702638, 6, x'89'), " + "(30370658809, 7, x'ce'), " + "(30370665369, 8, x'cf'), " + "(30370706138, 9, x'85'), " + "(30370708769, 10, x'85'), " + "(30370711915, 11, x'a3'), " + "(30370712595, 12, x'a3'), " + "(30370656340, 13, x'7d'), " + "(30370660143, 14, x'7c'), " + "(30371738450, 15, x'fc'), " + "(30371683979, 16, x'fd'), " + "(30370664597, 17, x'92'), " + "(30370667361, 18, x'93'), " + "(30370656406, 19, x'd2'), " + "(30370716959, 20, x'd3'), " + "(30375207698, 21, x'9a'), " + "(30375168766, 22, x'9a'), " + "(30370711813, 23, x'ca'), " + "(30370721803, 24, x'ca'), " + "(30370717957, 25, x'97'), " + "(30370734969, 26, x'96'), " + "(30375203572, 27, x'98'), " + "(30375292643, 28, x'99'); ") // Sanity check the shards being computed correctly tk.MustExec("update t set computed_shard = (vitess_hash(customer_id) >> 56);") tk.MustQuery("select customer_id, id, hex(expected_shard), hex(computed_shard) from t where expected_shard <> computed_shard"). Check(testkit.Rows()) }
explode_data.jsonl/65581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 761 }
[ 2830, 3393, 53, 275, 433, 6370, 42470, 53, 275, 433, 2016, 2347, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTimeStampMillisLogicalTypeEncode(t *testing.T) { schema := `{"type": "long", "logicalType": "timestamp-millis"}` testBinaryDecodeFail(t, schema, []byte(""), "short buffer") testBinaryEncodeFail(t, schema, "test", "cannot transform binary timestamp-millis, expected time.Time") testBinaryCodecPass(t, schema, time.Date(2006, 1, 2, 15, 04, 05, 565000000, time.UTC), []byte("\xfa\x82\xac\xba\x91\x42")) }
explode_data.jsonl/12003
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 66146, 17897, 64312, 929, 32535, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 1565, 4913, 1313, 788, 330, 4825, 497, 330, 30256, 929, 788, 330, 13035, 1448, 56212, 9207, 3989, 18185, 21338, 32564, 19524, 1155, 11, 10802, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReturnsSTHPollination(t *testing.T) { s := createAndOpenStorage() defer closeAndDeleteStorage(s) v := mustCreateSignatureVerifiers(t) h := newHandlerWithClock(s, v, testStuckClock(stuckClockTimeMillis)) sentPollen := sthPollinationFromString(t, addSTHPollinationJSON) sentPollenJSON, err := json.Marshal(sentPollen) if err != nil { t.Fatalf("Failed to marshal pollen JSON: %v", err) } rr := httptest.NewRecorder() req, err := http.NewRequest("POST", "/.well-known/ct/v1/sth-pollination", bytes.NewReader(sentPollenJSON)) if err != nil { t.Fatalf("Failed to create request: %v", err) } h.HandleSTHPollination(rr, req) assert.Equal(t, http.StatusOK, rr.Code) // Make the request again because it seems there's a race inside (go-)sqlite3 // somewhere; occasionally the storage handler doesn't see any pollen // despite the fact that the transaction which wrote it committed before // the select was executed. h.HandleSTHPollination(rr, req) assert.Equal(t, http.StatusOK, rr.Code) // since this is an empty DB, we should get back all of the pollination we sent // TODO(alcutter): We probably shouldn't blindly return stuff we were just given really, that's kinda silly, but it'll do for now. recvPollen := sthPollinationFromString(t, rr.Body.String()) for _, sth := range sentPollen.STHs { assert.Contains(t, recvPollen.STHs, sth) } assert.Equal(t, len(sentPollen.STHs), len(recvPollen.STHs)) }
explode_data.jsonl/80088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 497 }
[ 2830, 3393, 16446, 784, 6610, 965, 2554, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1855, 3036, 5002, 5793, 741, 16867, 3265, 3036, 6435, 5793, 1141, 340, 5195, 1669, 1969, 4021, 25088, 10141, 11836, 1155, 340, 9598, 1669, 501, 3050, 235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetSpace(t *testing.T) { testCases := []*spaceTestData{ defaultSpaceTestData, { testName: "Empty List", spaceName: "mySpace", cassetteName: "getspace-empty-bc", }, { testName: "Wrong List", spaceName: "mySpace", cassetteName: "getspace-wrong-list", shouldFail: true, }, { testName: "No Items", spaceName: "mySpace", cassetteName: "getspace-no-items", shouldFail: true, }, { testName: "Not Object", spaceName: "mySpace", cassetteName: "getspace-not-object", shouldFail: true, }, { testName: "No Metadata", spaceName: "mySpace", cassetteName: "getspace-no-metadata", shouldFail: true, }, { testName: "No Name", spaceName: "mySpace", cassetteName: "getspace-no-name", shouldFail: true, }, { testName: "Two Apps One Deployed", spaceName: "mySpace", // Test two BCs, but only one DC cassetteName: "getspace-two-apps-one-deploy", appTestData: map[string]*appTestData{ "myApp": defaultAppTestData, "myOtherApp": { spaceName: "mySpace", appName: "myOtherApp", }, }, }, { testName: "Two Apps Both Deployed", spaceName: "mySpace", // Test two deployed applications, with two environments cassetteName: "getspace-two-apps-two-deploy", appTestData: map[string]*appTestData{ "myApp": { spaceName: "mySpace", appName: "myApp", deployTestData: map[string]*deployTestData{ "run": { spaceName: "mySpace", appName: "myApp", envName: "run", expectVersion: "1.0.2", expectPodStatus: [][]string{ {"Running", "2"}, }, expectPodsTotal: 2, expectPodsQuotaCpucores: 0.976, expectPodsQuotaMemory: 524288000, expectConsoleURL: "http://console.myCluster/console/project/my-run", expectLogURL: "http://console.myCluster/console/project/my-run/browse/rc/myDeploy-1?tab=logs", expectAppURL: "http://myDeploy-my-run.example.com", }, "stage": { spaceName: "mySpace", appName: "myApp", envName: "stage", expectVersion: "1.0.3", expectPodStatus: [][]string{ {"Running", "1"}, {"Terminating", "1"}, }, expectPodsTotal: 2, expectPodsQuotaCpucores: 0.976, expectPodsQuotaMemory: 524288000, expectConsoleURL: "http://console.myCluster/console/project/my-stage", expectLogURL: "http://console.myCluster/console/project/my-stage/browse/rc/myDeploy-1?tab=logs", }, }, }, "myOtherApp": { spaceName: "mySpace", appName: "myOtherApp", deployTestData: map[string]*deployTestData{ "run": { spaceName: "mySpace", appName: "myOtherApp", envName: "run", expectVersion: "1.0.1", expectPodStatus: [][]string{ {"Running", "1"}, }, expectPodsTotal: 1, expectPodsQuotaCpucores: 0.488, expectPodsQuotaMemory: 262144000, expectConsoleURL: "http://console.myCluster/console/project/my-run", expectLogURL: "http://console.myCluster/console/project/my-run/browse/rc/myOtherDeploy-1?tab=logs", expectAppURL: "http://myOtherDeploy-my-run.example.com", }, }, }, }, }, { testName: "BC List Error", spaceName: "mySpace", cassetteName: "getspace-bc-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, } for _, testCase := range testCases { t.Run(testCase.testName, func(t *testing.T) { r, err := recorder.New(pathToTestJSON + testCase.cassetteName) require.NoError(t, err, "Failed to open cassette") defer r.Stop() fixture := &testFixture{} kc := getDefaultKubeClient(fixture, r.Transport, t) space, err := kc.GetSpace(testCase.spaceName) if testCase.shouldFail { require.Error(t, err, "Expected an error") if testCase.errorChecker != nil { matches, _ := testCase.errorChecker(err) require.True(t, matches, "Error or cause must be the expected type") } } else { require.NoError(t, err, "Unexpected error occurred") require.NotNil(t, space, "Space is nil") require.NotNil(t, space.Attributes, "Space attributes are nil") require.Equal(t, testCase.spaceName, space.Attributes.Name, "Space name is incorrect") require.NotNil(t, space.Attributes.Applications, "Applications are nil") require.Equal(t, len(testCase.appTestData), len(space.Attributes.Applications), "Wrong number of applications") for _, app := range space.Attributes.Applications { var appInput *appTestData if app != nil { appInput = testCase.appTestData[app.Attributes.Name] require.NotNil(t, appInput, "Unknown app: "+app.Attributes.Name) } verifyApplication(app, appInput, t) } } }) } }
explode_data.jsonl/41270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2433 }
[ 2830, 3393, 1949, 9914, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 29838, 8746, 83920, 515, 197, 11940, 9914, 83920, 345, 197, 197, 515, 298, 18185, 675, 25, 257, 330, 3522, 1759, 756, 298, 1903, 1306, 675, 25, 262, 330, 2408, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLRU_Get(t *testing.T) { t.Parallel() c := NewLRUCache(2000) type fields struct { Cache *lru.Cache Hit int64 Miss int64 } type args struct { key string } tests := []struct { name string fields fields args args want interface{} wantErr bool saveBefore bool }{ { name: "Test_LRU_Get_OK", fields: fields{ Cache: c.Cache, Hit: c.hit, Miss: c.miss, }, args: args{ key: "key", }, want: "value", wantErr: false, saveBefore: true, }, { name: "Test_LRU_Get_ERR", fields: fields{ Cache: c.Cache, Hit: c.hit, Miss: c.miss, }, args: args{ key: "key_with_err", }, want: "value", wantErr: true, saveBefore: false, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() c := &LRU{ Cache: tt.fields.Cache, hit: tt.fields.Hit, miss: tt.fields.Miss, } if tt.saveBefore { if err := c.Add(tt.args.key, tt.want); err != nil { t.Fatal(err) } } var hit, miss int64 = c.hit, c.miss got, err := c.Get(tt.args.key) if (err != nil) != tt.wantErr { t.Errorf("Get() error = %v, wantErr %v", err, tt.wantErr) } if !tt.wantErr && !reflect.DeepEqual(got, tt.want) { t.Errorf("Get() got = %v, want %v", got, tt.want) } if !tt.wantErr { if hit != c.hit-1 { t.Fatal("Expected incrementing hit, but it not happened") } } else { if miss != c.miss-1 { t.Fatal("Expected incrementing miss, but it not happened") } } }) } }
explode_data.jsonl/46947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 860 }
[ 2830, 3393, 20117, 52, 13614, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 1669, 1532, 20117, 5459, 1777, 7, 17, 15, 15, 15, 692, 13158, 5043, 2036, 341, 197, 6258, 1777, 353, 75, 2672, 46130, 198, 197, 13292, 275, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func Test_DeletePlugin(t *testing.T) { assert := assert.New(t) defer setupLocalDistoForTesting()() // Try delete plugin when plugin is not installed err := DeletePlugin("", "login") assert.NotNil(err) assert.Contains(err.Error(), "could not get plugin path for plugin \"login\"") // Install login (standalone) package mockInstallPlugin(assert, "", "login", "v0.2.0") // Try delete plugin when plugin is installed err = DeletePlugin("mgmt", "cluster") assert.NotNil(err) assert.Contains(err.Error(), "could not get plugin path for plugin \"cluster\"") // Install cluster (context) package mockInstallPlugin(assert, "mgmt", "cluster", "v0.2.0") // Try describe plugin when plugin after installing plugin err = DeletePlugin("mgmt", "cluster") assert.Nil(err) }
explode_data.jsonl/71407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 57418, 11546, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 16867, 6505, 7319, 23356, 78, 2461, 16451, 368, 2822, 197, 322, 9735, 3698, 9006, 979, 9006, 374, 537, 10275, 198, 9859, 1669, 10428, 11546, 1981...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanStatus_MoveTo(t *testing.T) { ms := generateTestSpanStatus() dest := NewSpanStatus() ms.MoveTo(dest) assert.EqualValues(t, NewSpanStatus(), ms) assert.EqualValues(t, generateTestSpanStatus(), dest) }
explode_data.jsonl/63301
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 12485, 2522, 66352, 1249, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 6923, 2271, 12485, 2522, 741, 49616, 1669, 1532, 12485, 2522, 741, 47691, 31195, 1249, 27010, 340, 6948, 12808, 6227, 1155, 11, 1532, 12485, 2522, 1507, 9829...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRunJob(t *testing.T) { ctx := context.Background() s, err := standard.New(ctx, standard.WithLogLevel(zerolog.Disabled), standard.WithMonitor(&nullmetrics.Service{})) require.NoError(t, err) require.NotNil(t, s) run := 0 runFunc := func(ctx context.Context, data interface{}) { run++ } require.NoError(t, s.ScheduleJob(ctx, "Test", "Test job", time.Now().Add(time.Second), runFunc, nil)) require.Len(t, s.ListJobs(ctx), 1) require.Equal(t, 0, run) require.NoError(t, s.RunJob(ctx, "Test job")) time.Sleep(time.Duration(100) * time.Millisecond) assert.Equal(t, 1, run) require.Len(t, s.ListJobs(ctx), 0) }
explode_data.jsonl/44213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 6727, 12245, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1903, 11, 1848, 1669, 5297, 7121, 7502, 11, 5297, 26124, 72676, 7, 7070, 1609, 89576, 701, 5297, 26124, 30098, 2099, 2921, 43262, 13860, 6257, 1171, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsValidOrigin(t *testing.T) { // don't use portocal(http/https) for ALLOWED_ORIGINS while testing, // as we trim them off while running the main function envstore.EnvInMemoryStoreObj.UpdateEnvVariable(constants.SliceStoreIdentifier, constants.EnvKeyAllowedOrigins, []string{"localhost:8080", "*.google.com", "*.google.in", "*abc.*"}) assert.False(t, utils.IsValidOrigin("http://myapp.com"), "it should be invalid origin") assert.False(t, utils.IsValidOrigin("http://appgoogle.com"), "it should be invalid origin") assert.True(t, utils.IsValidOrigin("http://app.google.com"), "it should be valid origin") assert.False(t, utils.IsValidOrigin("http://app.google.ind"), "it should be invalid origin") assert.True(t, utils.IsValidOrigin("http://app.google.in"), "it should be valid origin") assert.True(t, utils.IsValidOrigin("http://xyx.abc.com"), "it should be valid origin") assert.True(t, utils.IsValidOrigin("http://xyx.abc.in"), "it should be valid origin") assert.True(t, utils.IsValidOrigin("http://xyxabc.in"), "it should be valid origin") assert.True(t, utils.IsValidOrigin("http://localhost:8080"), "it should be valid origin") envstore.EnvInMemoryStoreObj.UpdateEnvVariable(constants.SliceStoreIdentifier, constants.EnvKeyAllowedOrigins, []string{"*"}) }
explode_data.jsonl/56490
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 55470, 13298, 1155, 353, 8840, 836, 8, 341, 197, 322, 1513, 944, 990, 2635, 3683, 19886, 14, 2428, 8, 369, 72260, 1479, 19834, 1914, 9557, 1393, 7497, 345, 197, 322, 438, 582, 11013, 1105, 1007, 1393, 4303, 279, 1887, 729,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGoCSVNewWithInvalidDeployDir(t *testing.T) { cleanupFunc := chDirWithCleanup(t, testGoDataDir) defer cleanupFunc() cfg := gen.Config{ OperatorName: testProjectName, Inputs: map[string]string{ DeployDirKey: "notExist", APIsDirKey: filepath.Join("pkg", "apis"), CRDsDirKey: "notExist", }, OutputDir: "deploy", } g := NewBundle(cfg, notExistVersion, "", false, false).(bundleGenerator) _, err := g.generateCSV() if err == nil { t.Fatalf("Failed to get error for running CSV generator"+ "on non-existent manifests directory: %s", cfg.Inputs[DeployDirKey]) } }
explode_data.jsonl/21327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 10850, 44209, 3564, 2354, 7928, 69464, 6184, 1155, 353, 8840, 836, 8, 341, 1444, 60639, 9626, 1669, 521, 6184, 2354, 67335, 1155, 11, 1273, 10850, 1043, 6184, 340, 16867, 21290, 9626, 2822, 50286, 1669, 4081, 10753, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRoaringPostingsListEqualWithOtherRoaring(t *testing.T) { first := NewPostingsList() first.Insert(42) first.Insert(44) first.Insert(51) second := NewPostingsList() second.Insert(42) second.Insert(44) second.Insert(51) require.True(t, first.Equal(second)) }
explode_data.jsonl/64022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 38872, 3249, 4133, 819, 852, 2993, 2354, 11409, 38872, 3249, 1155, 353, 8840, 836, 8, 341, 42190, 1669, 1532, 4133, 819, 852, 741, 42190, 23142, 7, 19, 17, 340, 42190, 23142, 7, 19, 19, 340, 42190, 23142, 7, 20, 16, 692,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExplainerToKnativeService(t *testing.T) { kfsvc := v1alpha2.KFService{ ObjectMeta: metav1.ObjectMeta{ Name: "mnist", Namespace: "default", }, Spec: v1alpha2.KFServiceSpec{ Default: v1alpha2.EndpointSpec{ Predictor: v1alpha2.PredictorSpec{ DeploymentSpec: v1alpha2.DeploymentSpec{ MinReplicas: 1, MaxReplicas: 3, ServiceAccountName: "testsvcacc", }, Tensorflow: &v1alpha2.TensorflowSpec{ StorageURI: "s3://test/mnist/export", RuntimeVersion: "1.13.0", }, }, Explainer: &v1alpha2.ExplainerSpec{ Alibi: &v1alpha2.AlibiExplainerSpec{ Type: v1alpha2.AlibiAnchorsTabularExplainer, RuntimeVersion: "latest", }, }, }, }, } kfsvcCanary := kfsvc.DeepCopy() kfsvcCanary.Spec.CanaryTrafficPercent = 20 kfsvcCanary.Spec.Canary = &v1alpha2.EndpointSpec{ Predictor: v1alpha2.PredictorSpec{ DeploymentSpec: v1alpha2.DeploymentSpec{ MinReplicas: 1, MaxReplicas: 3, ServiceAccountName: "testsvcacc", }, Tensorflow: &v1alpha2.TensorflowSpec{ StorageURI: "s3://test/mnist-2/export", RuntimeVersion: "1.13.0", }, }, Explainer: &v1alpha2.ExplainerSpec{ Alibi: &v1alpha2.AlibiExplainerSpec{ Type: v1alpha2.AlibiAnchorsTabularExplainer, RuntimeVersion: "latest", }, }, } var defaultService = &knservingv1alpha1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: constants.DefaultExplainerServiceName("mnist"), Namespace: "default", }, Spec: knservingv1alpha1.ServiceSpec{ ConfigurationSpec: knservingv1alpha1.ConfigurationSpec{ Template: &knservingv1alpha1.RevisionTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{"serving.kubeflow.org/kfservice": "mnist"}, Annotations: map[string]string{ "autoscaling.knative.dev/class": "kpa.autoscaling.knative.dev", "autoscaling.knative.dev/target": "1", }, }, Spec: knservingv1alpha1.RevisionSpec{ RevisionSpec: v1beta1.RevisionSpec{ TimeoutSeconds: &constants.DefaultTimeout, PodSpec: v1.PodSpec{ Containers: []v1.Container{ { Image: "alibi:latest", Args: []string{ constants.ArgumentModelName, kfsvc.Name, constants.ArgumentPredictorHost, constants.DefaultPredictorServiceName(kfsvc.Name) + "." + kfsvc.Namespace, string(v1alpha2.AlibiAnchorsTabularExplainer), }, }, }, }, }, }, }, }, }, } var canaryService = &knservingv1alpha1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: constants.CanaryTransformerServiceName("mnist"), Namespace: "default", }, Spec: knservingv1alpha1.ServiceSpec{ ConfigurationSpec: knservingv1alpha1.ConfigurationSpec{ Template: &knservingv1alpha1.RevisionTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{"serving.kubeflow.org/kfservice": "mnist"}, Annotations: map[string]string{ "autoscaling.knative.dev/class": "kpa.autoscaling.knative.dev", "autoscaling.knative.dev/target": "1", }, }, Spec: knservingv1alpha1.RevisionSpec{ RevisionSpec: v1beta1.RevisionSpec{ TimeoutSeconds: &constants.DefaultTimeout, PodSpec: v1.PodSpec{ Containers: []v1.Container{ { Image: "alibi:latest", Args: []string{ constants.ArgumentModelName, kfsvc.Name, constants.ArgumentPredictorHost, constants.CanaryPredictorServiceName(kfsvc.Name) + "." + kfsvc.Namespace, string(v1alpha2.AlibiAnchorsTabularExplainer), }, }, }, }, }, }, }, }, }, } var configMapData = map[string]string{ "explainers": `{ "alibi" : { "image": "alibi" } }`, } scenarios := map[string]struct { configMapData map[string]string kfService v1alpha2.KFService expectedDefault *knservingv1alpha1.Service expectedCanary *knservingv1alpha1.Service }{ "RunLatestExplainer": { kfService: kfsvc, expectedDefault: defaultService, expectedCanary: nil, configMapData: configMapData, }, "RunCanaryExplainer": { kfService: *kfsvcCanary, expectedDefault: defaultService, expectedCanary: canaryService, configMapData: configMapData, }, } for name, scenario := range scenarios { serviceBuilder := NewServiceBuilder(c, &v1.ConfigMap{ Data: scenario.configMapData, }) actualDefaultService, err := serviceBuilder.CreateExplainerService( constants.DefaultExplainerServiceName(scenario.kfService.Name), scenario.kfService.ObjectMeta, scenario.kfService.Spec.Default.Explainer, constants.DefaultPredictorServiceName(scenario.kfService.Name)+"."+scenario.kfService.Namespace, false) if err != nil { t.Errorf("Test %q unexpected error %s", name, err.Error()) } if diff := cmp.Diff(scenario.expectedDefault, actualDefaultService); diff != "" { t.Errorf("Test %q unexpected default service (-want +got): %v", name, diff) } if scenario.kfService.Spec.Canary != nil { actualCanaryService, err := serviceBuilder.CreateExplainerService( constants.CanaryTransformerServiceName(kfsvc.Name), scenario.kfService.ObjectMeta, scenario.kfService.Spec.Canary.Explainer, constants.CanaryPredictorServiceName(scenario.kfService.Name)+"."+scenario.kfService.Namespace, true) if err != nil { t.Errorf("Test %q unexpected error %s", name, err.Error()) } if diff := cmp.Diff(scenario.expectedCanary, actualCanaryService); diff != "" { t.Errorf("Test %q unexpected canary service (-want +got): %v", name, diff) } } } }
explode_data.jsonl/63805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2680 }
[ 2830, 3393, 43953, 1743, 1249, 42, 29738, 1860, 1155, 353, 8840, 836, 8, 341, 16463, 69, 58094, 1669, 348, 16, 7141, 17, 11352, 37, 1860, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 21775, 380, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestIsLine(t *testing.T) { notLine := []byte("This is not a line") assert.False(t, isLine(notLine)) notLine = []byte("This is not a line\n\r") assert.False(t, isLine(notLine)) notLine = []byte("This is \n not a line") assert.False(t, isLine(notLine)) line := []byte("This is a line \n") assert.True(t, isLine(line)) line = []byte("This is a line\r\n") assert.True(t, isLine(line)) }
explode_data.jsonl/62360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 3872, 2460, 1155, 353, 8840, 836, 8, 341, 97266, 2460, 1669, 3056, 3782, 445, 1986, 374, 537, 264, 1555, 1138, 6948, 50757, 1155, 11, 374, 2460, 24772, 2460, 4390, 97266, 2460, 284, 3056, 3782, 445, 1986, 374, 537, 264, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidArgsCompletionInGo(t *testing.T) { rootCmd := &Command{ Use: "root", ValidArgs: []string{"one", "two", "three"}, Args: MinimumNArgs(1), } // Test that validArgs are completed output, err := executeCommand(rootCmd, ShellCompNoDescRequestCmd, "") if err != nil { t.Errorf("Unexpected error: %v", err) } expected := strings.Join([]string{ "one", "two", "three", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } // Test that validArgs are completed with prefix output, err = executeCommand(rootCmd, ShellCompNoDescRequestCmd, "o") if err != nil { t.Errorf("Unexpected error: %v", err) } expected = strings.Join([]string{ "one", ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } // Test that validArgs don't repeat output, err = executeCommand(rootCmd, ShellCompNoDescRequestCmd, "one", "") if err != nil { t.Errorf("Unexpected error: %v", err) } expected = strings.Join([]string{ ":0", "Completion ended with directive: ShellCompDirectiveDefault", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } }
explode_data.jsonl/43737
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 492 }
[ 2830, 3393, 4088, 4117, 33190, 641, 10850, 1155, 353, 8840, 836, 8, 341, 33698, 15613, 1669, 609, 4062, 515, 197, 95023, 25, 981, 330, 2888, 756, 197, 197, 4088, 4117, 25, 3056, 917, 4913, 603, 497, 330, 19789, 497, 330, 27856, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMySQLExplainWarnings(t *testing.T) { expInfo, err := connTest.Explain("select 1", TraditionalExplainType, TraditionalFormatExplain) if err != nil { t.Error(err) } err = common.GoldenDiff(func() { MySQLExplainWarnings(expInfo) }, t.Name(), update) if err != nil { t.Error(err) } }
explode_data.jsonl/55050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 59224, 840, 20772, 20140, 1155, 353, 8840, 836, 8, 341, 48558, 1731, 11, 1848, 1669, 4534, 2271, 5121, 20772, 445, 1742, 220, 16, 497, 45460, 840, 20772, 929, 11, 45460, 4061, 840, 20772, 340, 743, 1848, 961, 2092, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWeight_Encode(t *testing.T) { assertEncode(t, []encodingAssert{ {NewWeight(29), MustHexDecodeString("0x1d00000000000000")}, }) }
explode_data.jsonl/5199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 57 }
[ 2830, 3393, 8295, 93529, 534, 1155, 353, 8840, 836, 8, 341, 6948, 32535, 1155, 11, 3056, 17159, 8534, 515, 197, 197, 90, 3564, 8295, 7, 17, 24, 701, 15465, 20335, 32564, 703, 445, 15, 87, 16, 67, 15, 15, 15, 15, 15, 15, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFactory_NewMetricsAspect_Errs(t *testing.T) { err := fmt.Errorf("expected") b := &builder{createClient: clientFunc(err), mg: dummyMetadataGenerator} b.SetAdapterConfig(&config.Params{}) res, e := b.Build(context.Background(), test.NewEnv(t)) if e != nil && !strings.Contains(e.Error(), err.Error()) { t.Fatalf("Expected error from factory.createClient to be propagated, got %v, %v", res, e) } else if e == nil { t.Fatalf("Got no error") } }
explode_data.jsonl/54748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 4153, 39582, 27328, 37559, 93623, 82, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 8879, 13080, 445, 7325, 1138, 2233, 1669, 609, 17850, 90, 3182, 2959, 25, 2943, 9626, 3964, 701, 13742, 25, 17292, 14610, 12561, 532, 2233, 4202...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBaseReqValidateBasic(t *testing.T) { fromAddr := "cosmos1cq0sxam6x4l0sv9yz3a2vlqhdhvt2k6jtgcse0" tenstakes, err := types.ParseCoins("10stake") require.NoError(t, err) onestake, err := types.ParseDecCoins("1.0stake") require.NoError(t, err) req1 := NewBaseReq( fromAddr, "", "nonempty", "", "", 0, 0, tenstakes, nil, false, ) req2 := NewBaseReq( "", "", "nonempty", "", "", 0, 0, tenstakes, nil, false, ) req3 := NewBaseReq( fromAddr, "", "", "", "", 0, 0, tenstakes, nil, false, ) req4 := NewBaseReq( fromAddr, "", "nonempty", "", "", 0, 0, tenstakes, onestake, false, ) req5 := NewBaseReq( fromAddr, "", "nonempty", "", "", 0, 0, types.Coins{}, types.DecCoins{}, false, ) tests := []struct { name string req BaseReq w http.ResponseWriter want bool }{ {"ok", req1, httptest.NewRecorder(), true}, {"neither fees nor gasprices provided", req5, httptest.NewRecorder(), true}, {"empty from", req2, httptest.NewRecorder(), false}, {"empty chain-id", req3, httptest.NewRecorder(), false}, {"fees and gasprices provided", req4, httptest.NewRecorder(), false}, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { require.Equal(t, tt.want, tt.req.ValidateBasic(tt.w)) }) } }
explode_data.jsonl/3951
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 559 }
[ 2830, 3393, 3978, 27234, 17926, 15944, 1155, 353, 8840, 836, 8, 341, 42727, 13986, 1669, 330, 9407, 8631, 16, 95418, 15, 48612, 309, 21, 87, 19, 75, 15, 3492, 24, 18348, 18, 64, 17, 14536, 80, 15990, 71, 9708, 17, 74, 21, 57205, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepository_GetFeaturesFail(t *testing.T) { assert := assert.New(t) featuresCalls := make(chan int, 10) var sendStatus200 int32 prevStatus := 0 srv := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { switch req.Method + " " + req.URL.Path { case "POST /client/register": case "GET /client/features": status200 := atomic.LoadInt32(&sendStatus200) == 1 status := 0 if status200 { status = 200 rw.WriteHeader(200) writeJSON(rw, api.FeatureResponse{}) } else { status = 400 rw.WriteHeader(400) } if status != prevStatus { featuresCalls <- status prevStatus = status } case "POST /client/metrics": default: t.Fatalf("Unexpected request: %+v", req) } })) defer srv.Close() ready := make(chan struct{}) mockListener := &MockedListener{} mockListener.On("OnReady").Run(func(args mock.Arguments) { close(ready) }).Return() mockListener.On("OnRegistered", mock.AnythingOfType("ClientData")) mockListener.On("OnError", mock.MatchedBy(func(e error) bool { return strings.HasSuffix(e.Error(), "/client/features returned status code 400") })).Return() mockListener.On("OnSent", mock.AnythingOfType("MetricsData")).Return() client, err := NewClient( WithUrl(srv.URL), WithAppName(mockAppName), WithInstanceId(mockInstanceId), WithListener(mockListener), WithRefreshInterval(time.Millisecond), ) assert.Nil(err, "client should not return an error") assert.Equal(400, <-featuresCalls) select { case <-ready: t.Fatal("client is ready but it shouldn't be") case <-time.NewTimer(time.Second).C: } atomic.StoreInt32(&sendStatus200, 1) assert.Equal(200, <-featuresCalls) select { case <-ready: case <-time.NewTimer(time.Second).C: t.Fatal("client isn't ready but should be") } client.Close() }
explode_data.jsonl/47721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 692 }
[ 2830, 3393, 4624, 13614, 21336, 19524, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 1166, 22462, 55292, 1669, 1281, 35190, 526, 11, 220, 16, 15, 340, 2405, 3624, 2522, 17, 15, 15, 526, 18, 17, 198, 50728, 2522, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHTTPGetterTarDownload(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { f, _ := os.Open("testdata/empty-0.0.1.tgz") defer f.Close() b := make([]byte, 512) f.Read(b) //Get the file size FileStat, _ := f.Stat() FileSize := strconv.FormatInt(FileStat.Size(), 10) //Simulating improper header values from bitbucket w.Header().Set("Content-Type", "application/x-tar") w.Header().Set("Content-Encoding", "gzip") w.Header().Set("Content-Length", FileSize) f.Seek(0, 0) io.Copy(w, f) })) defer srv.Close() g, err := NewHTTPGetter(WithURL(srv.URL)) if err != nil { t.Fatal(err) } data, _ := g.Get(srv.URL) mimeType := http.DetectContentType(data.Bytes()) expectedMimeType := "application/x-gzip" if mimeType != expectedMimeType { t.Fatalf("Expected response with MIME type %s, but got %s", expectedMimeType, mimeType) } }
explode_data.jsonl/53170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 9230, 31485, 62733, 11377, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 1166, 11, 716, 1669, 2643, 12953, 445, 9242...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnicode(t *testing.T) { var jsonStr = `{"key":0,"的情况下解":{"key":1,"的情况":2}}` if Get(jsonStr, "的情况下解.key").Num != 1 { t.Fatal("fail") } if Get(jsonStr, "的情况下解.的情况").Num != 2 { t.Fatal("fail") } if Get(jsonStr, "的情况下解.的?况").Num != 2 { t.Fatal("fail") } if Get(jsonStr, "的情况下解.的?*").Num != 2 { t.Fatal("fail") } if Get(jsonStr, "的情况下解.*?况").Num != 2 { t.Fatal("fail") } if Get(jsonStr, "的情?下解.*?况").Num != 2 { t.Fatal("fail") } if Get(jsonStr, "的情下解.*?况").Num != 0 { t.Fatal("fail") } }
explode_data.jsonl/43433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 33920, 1155, 353, 8840, 836, 8, 341, 2405, 2951, 2580, 284, 1565, 4913, 792, 788, 15, 1335, 104248, 49238, 22317, 792, 788, 16, 1335, 102072, 788, 17, 3417, 3989, 743, 2126, 9304, 2580, 11, 330, 104248, 49238, 4735, 1827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestTemplate_HasOutputs(t *testing.T) { t.Run("Default", func(t *testing.T) { x := &Template{} assert.False(t, x.HasOutput()) }) t.Run("Container", func(t *testing.T) { x := &Template{Container: &corev1.Container{}} assert.True(t, x.HasOutput()) }) t.Run("ContainerSet", func(t *testing.T) { t.Run("NoMain", func(t *testing.T) { x := &Template{ContainerSet: &ContainerSetTemplate{}} assert.False(t, x.HasOutput()) }) t.Run("Main", func(t *testing.T) { x := &Template{ContainerSet: &ContainerSetTemplate{Containers: []ContainerNode{{Container: corev1.Container{Name: "main"}}}}} assert.True(t, x.HasOutput()) }) }) t.Run("Script", func(t *testing.T) { x := &Template{Script: &ScriptTemplate{}} assert.True(t, x.HasOutput()) }) t.Run("Data", func(t *testing.T) { x := &Template{Data: &Data{}} assert.True(t, x.HasOutput()) }) t.Run("Resource", func(t *testing.T) { x := &Template{Resource: &ResourceTemplate{}} assert.False(t, x.HasOutput()) }) }
explode_data.jsonl/26056
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 7275, 2039, 300, 61438, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3675, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 10225, 1669, 609, 7275, 16094, 197, 6948, 50757, 1155, 11, 856, 16152, 5097, 2398, 197, 3518, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFile_Empty(t *testing.T) { fs := testFs() fs.WriteFile(".halfpipe.io", []byte{}, 0777) err := CheckFile(fs, ".halfpipe.io", false) assert.Equal(t, linterrors.NewFileError(".halfpipe.io", "is empty"), err) }
explode_data.jsonl/71217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 1703, 76060, 1595, 1155, 353, 8840, 836, 8, 341, 53584, 1669, 1273, 48300, 741, 53584, 4073, 1703, 5680, 37006, 13768, 4245, 497, 3056, 3782, 22655, 220, 15, 22, 22, 22, 692, 9859, 1669, 4248, 1703, 31856, 11, 5933, 37006, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfig_FieldNotDefined(t *testing.T) { c := NewConfig() err := c.LoadConfig("./testdata/invalid_field.toml") require.Error(t, err, "invalid field name") assert.Equal(t, "Error loading config file ./testdata/invalid_field.toml: plugin inputs.http_listener_v2: line 1: configuration specified the fields [\"not_a_field\"], but they weren't used", err.Error()) }
explode_data.jsonl/67105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 2648, 46272, 2623, 29361, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 2648, 741, 9859, 1669, 272, 13969, 2648, 13988, 92425, 14, 11808, 5013, 73494, 75, 1138, 17957, 6141, 1155, 11, 1848, 11, 330, 11808, 2070, 829, 1138,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndexation_Serialize(t *testing.T) { type test struct { name string source *iotago.Indexation target []byte } tests := []test{ func() test { indexationPayload, indexationPayloadData := tpkg.RandIndexation() return test{"ok", indexationPayload, indexationPayloadData} }(), } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { edData, err := tt.source.Serialize(serializer.DeSeriModePerformValidation) assert.NoError(t, err) assert.Equal(t, tt.target, edData) }) } }
explode_data.jsonl/27822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 1552, 367, 1098, 9050, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 11609, 256, 914, 198, 197, 47418, 353, 11098, 6304, 18338, 367, 198, 197, 28861, 3056, 3782, 198, 197, 532, 78216, 1669, 3056, 1944, 515, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReaderFrom_Stream(t *testing.T) { ctx := context.Background() t.Run("Simple", func(t *testing.T) { const chunkSize = 3 s := NewStreamer(&BytesReader{ Align: chunkSize, Data: []byte{1, 2, 3, 4, 5, 6, 7, 8}, }, chunkSize) t.Run("Equal", func(t *testing.T) { out := new(bytes.Buffer) require.NoError(t, s.StreamAt(ctx, 2, out)) require.Equal(t, []byte{3, 4, 5, 6, 7, 8}, out.Bytes()) }) t.Run("Discard", func(t *testing.T) { require.NoError(t, s.Stream(ctx, io.Discard)) }) }) t.Run("Stream", func(t *testing.T) { const ( chunkSize = 1024 total = chunkSize*100 + 56 ) s := NewStreamer(&StreamReader{ Align: chunkSize, Total: total, }, chunkSize) t.Run("Equal", func(t *testing.T) { buf := new(bytes.Buffer) require.NoError(t, s.StreamAt(ctx, total-chunkSize, buf)) require.Equal(t, byte(56), buf.Bytes()[0]) require.Equal(t, 1024, buf.Len()) }) t.Run("Discard", func(t *testing.T) { require.NoError(t, s.Stream(ctx, io.Discard)) }) }) }
explode_data.jsonl/21775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 473 }
[ 2830, 3393, 5062, 3830, 80631, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 3244, 16708, 445, 16374, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 4777, 11879, 1695, 284, 220, 18, 198, 197, 1903, 1669, 1532, 80063, 209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTeamTxSweepMembers(t *testing.T) { tt := newTeamTester(t) defer tt.cleanup() ann := tt.addUser("ann") t.Logf("Signed up user ann (%s)", ann.username) bob := tt.addUser("bob") t.Logf("Signed up user bob (%s)", bob.username) pat := tt.addPuklessUser("pat") t.Logf("Signed up PUKless user pat (%s)", pat.username) team := ann.createTeam() t.Logf("Team created (%s)", team) ann.addTeamMember(team, bob.username, keybase1.TeamRole_WRITER) bob.reset() bob.loginAfterReset() t.Logf("Bob (%s) resets and reprovisions, he is now: %v", bob.username, bob.userVersion()) // Wait for CLKR and RotateKey link. ann.waitForRotateByID(ann.loadTeam(team, false /* admin */).ID, keybase1.Seqno(3)) teamObj := ann.loadTeam(team, true /* admin */) tx := teams.CreateAddMemberTx(teamObj) err := tx.AddMemberByUsername(context.Background(), bob.username, keybase1.TeamRole_READER) require.NoError(t, err) err = tx.Post(libkb.NewMetaContextForTest(*ann.tc)) require.NoError(t, err) teamObj = ann.loadTeam(team, true /* admin */) members, err := teamObj.Members() require.NoError(t, err) require.Equal(t, 1, len(members.Owners)) require.Equal(t, 1, len(members.Readers)) require.Equal(t, 0, len(members.Admins)+len(members.Writers)+len(members.RestrictedBots)) require.EqualValues(t, ann.userVersion(), members.Owners[0]) require.EqualValues(t, bob.userVersion(), members.Readers[0]) require.Equal(t, 0, len(teamObj.GetActiveAndObsoleteInvites())) }
explode_data.jsonl/27644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 568 }
[ 2830, 3393, 14597, 31584, 50, 48542, 24371, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 501, 14597, 58699, 1155, 340, 16867, 17853, 87689, 2822, 197, 1020, 1669, 17853, 1364, 1474, 445, 1020, 1138, 3244, 98954, 445, 49312, 705, 1196, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertToArabic(t *testing.T) { for _, test := range cases { t.Run(fmt.Sprintf("%s gets converted to %d", test.Roman, test.Arabic), func(t *testing.T) { got := ConvertToArabic(test.Roman) want := test.Arabic if got != want { t.Errorf("got %v, want %v", got, want) } }) } }
explode_data.jsonl/27665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 12012, 1249, 6953, 68291, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 5048, 341, 197, 3244, 16708, 28197, 17305, 4430, 82, 5221, 16099, 311, 1018, 67, 497, 1273, 2013, 6908, 11, 1273, 875, 50105, 292, 701, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRedis_Hgetall(t *testing.T) { runOnRedis(t, func(client *Redis) { assert.Nil(t, client.Hset("a", "aa", "aaa")) assert.Nil(t, client.Hset("a", "bb", "bbb")) _, err := NewRedis(client.Addr, "").Hgetall("a") assert.NotNil(t, err) vals, err := client.Hgetall("a") assert.Nil(t, err) assert.EqualValues(t, map[string]string{ "aa": "aaa", "bb": "bbb", }, vals) }) }
explode_data.jsonl/39156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 48137, 2039, 455, 541, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 6948, 59678, 1155, 11, 2943, 3839, 746, 445, 64, 497, 330, 5305, 497, 330, 32646, 5455, 197, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCrudSegmentsWithFailures(t *testing.T) { var res middleware.Responder db := entity.NewTestDB() c := &crud{} defer db.Close() defer gostub.StubFunc(&getDB, db).Reset() c.CreateFlag(flag.CreateFlagParams{ Body: &models.CreateFlagRequest{ Description: util.StringPtr("funny flag"), }, }) t.Run("FindSegments - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.FindSegments(segment.FindSegmentsParams{FlagID: int64(1)}) assert.NotZero(t, res.(*segment.FindSegmentsDefault).Payload) db.Error = nil }) t.Run("CreateSegments - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.CreateSegment(segment.CreateSegmentParams{ FlagID: int64(1), Body: &models.CreateSegmentRequest{ Description: util.StringPtr("segment1"), RolloutPercent: util.Int64Ptr(int64(100)), }, }) assert.NotZero(t, res.(*segment.CreateSegmentDefault).Payload) db.Error = nil }) t.Run("PutSegments - put on a non-existing segment", func(t *testing.T) { res = c.PutSegment(segment.PutSegmentParams{ FlagID: int64(1), SegmentID: int64(999999), Body: &models.PutSegmentRequest{ Description: util.StringPtr("segment1"), RolloutPercent: util.Int64Ptr(int64(0)), }, }) assert.NotZero(t, res.(*segment.PutSegmentDefault).Payload) }) t.Run("PutSegmentsReorder - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.PutSegmentsReorder(segment.PutSegmentsReorderParams{ FlagID: int64(1), Body: &models.PutSegmentReorderRequest{ SegmentIDs: []int64{int64(999998), int64(1)}, }, }) assert.NotZero(t, res.(*segment.PutSegmentsReorderDefault).Payload) db.Error = nil }) t.Run("DeleteSegment - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.DeleteSegment(segment.DeleteSegmentParams{ FlagID: int64(1), SegmentID: int64(2), }) assert.NotZero(t, res.(*segment.DeleteSegmentDefault).Payload) db.Error = nil }) }
explode_data.jsonl/19452
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 850 }
[ 2830, 3393, 92061, 64813, 2354, 19524, 1413, 1155, 353, 8840, 836, 8, 341, 2405, 592, 29679, 8377, 20328, 198, 20939, 1669, 5387, 7121, 2271, 3506, 741, 1444, 1669, 609, 53569, 31483, 16867, 2927, 10421, 741, 16867, 67934, 392, 7758, 392,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatWithLandLine(t *testing.T) { for _, tt := range mobWithLLFormatTests { number := ParseWithLandLine(tt.input, tt.country) if number != tt.expected { t.Errorf("Parse(number=`%s`, country=`%s`): expected `%s`, actual `%s`", tt.input, tt.country, tt.expected, number) } } }
explode_data.jsonl/58213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 4061, 2354, 41957, 2460, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 12595, 2354, 4086, 4061, 18200, 341, 197, 57135, 1669, 14775, 2354, 41957, 2460, 47152, 10046, 11, 17853, 33324, 340, 197, 743, 1372, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestManifestGenerateComponentHubTag(t *testing.T) { g := NewWithT(t) objs, err := runManifestCommands("component_hub_tag", "", liveCharts) if err != nil { t.Fatal(err) } tests := []struct { deploymentName string containerName string want string }{ { deploymentName: "istio-ingressgateway", containerName: "istio-proxy", want: "istio-spec.hub/proxyv2:istio-spec.tag", }, { deploymentName: "istiod", containerName: "discovery", want: "component.pilot.hub/pilot:2", }, } for _, tt := range tests { for _, os := range objs { containerName := tt.deploymentName if tt.containerName != "" { containerName = tt.containerName } container := mustGetContainer(g, os, tt.deploymentName, containerName) g.Expect(container).Should(HavePathValueEqual(PathValue{"image", tt.want})) } } }
explode_data.jsonl/47902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 38495, 31115, 2189, 19316, 5668, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 22671, 82, 11, 1848, 1669, 1598, 38495, 30479, 445, 8571, 93996, 9372, 497, 7342, 3887, 64878, 340, 743, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestValidateServerPermissions_ValidInput(t *testing.T) { var tests = []struct { name string permissionsInput []models.Permission }{ {"Valid Permission info", []models.Permission{ models.Permission{ "Object", "", "", "", "", "", []string{}, models.Context{}, []string{}, }, }, }, {"Valid Permission info, 2 permissions", []models.Permission{ models.Permission{ "Object", "", "", "", "", "", []string{}, models.Context{}, []string{}, }, models.Permission{ "Object2", "", "", "", "", "", []string{}, models.Context{}, []string{}, }, }, }, {"Valid Permission info, with 1 Type value", []models.Permission{ models.Permission{ "Object", "", "", "", "", "", []string{}, models.Context{}, []string{ "file", }, }, }, }, {"Valid Permission info, with 2 Type values", []models.Permission{ models.Permission{ "Object", "", "", "", "", "", []string{}, models.Context{}, []string{ "file", "directory", }, }, }, }, } for _, test := range tests { output := validateServerPermissions(test.permissionsInput) if output != true { t.Errorf("The validateServerPermissions failed validation for: %v", test) } } }
explode_data.jsonl/71214
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 767 }
[ 2830, 3393, 17926, 5475, 23851, 97279, 2505, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 197, 29900, 2505, 3056, 6507, 73409, 198, 197, 59403, 197, 197, 4913, 4088, 18135, 3546, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestICodeApi_ExecuteRequestList(t *testing.T) { savePath := os.Getenv("GOPATH") + "/src/github.com/it-chain/engine/.tmp/" defer os.RemoveAll(savePath) sshPath := "./id_rsa" err, tearDown1 := generatePriKey(sshPath) assert.NoError(t, err) defer tearDown1() api, _ := setUp(t) icode, err := api.Deploy(savePath, "github.com/junbeomlee/learn-icode", sshPath, "") defer api.UnDeploy(icode.ID) results := api.ExecuteRequestList([]ivm.Request{ ivm.Request{ ICodeID: icode.ID, Function: "initA", Type: "invoke", Args: []string{}, }, ivm.Request{ ICodeID: icode.ID, Function: "incA", Type: "invoke", Args: []string{}, }, }) assert.Equal(t, len(results), 2) for _, result := range results { assert.Equal(t, result.Err, "") } }
explode_data.jsonl/9677
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 1317, 534, 6563, 83453, 1900, 852, 1155, 353, 8840, 836, 8, 341, 49230, 1820, 1669, 2643, 64883, 445, 98733, 4827, 899, 488, 3521, 3548, 71120, 905, 14, 275, 65186, 82045, 11930, 5173, 29555, 16867, 2643, 84427, 33546, 1820, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMigration(t *testing.T) { _ = os.Remove(dbName) db, err := xorm.NewEngine("sqlite3", dbName) if err != nil { log.Fatal(err) } defer db.Close() if err = db.DB().Ping(); err != nil { log.Fatal(err) } m := New(db, DefaultOptions, migrations) err = m.Migrate() assert.NoError(t, err) exists, _ := db.IsTableExist(&Person{}) assert.True(t, exists) exists, _ = db.IsTableExist(&Pet{}) assert.True(t, exists) assert.Equal(t, 2, tableCount(db, "migrations")) err = m.RollbackLast() assert.NoError(t, err) exists, _ = db.IsTableExist(&Person{}) assert.True(t, exists) exists, _ = db.IsTableExist(&Pet{}) assert.False(t, exists) assert.Equal(t, 1, tableCount(db, "migrations")) err = m.RollbackLast() assert.NoError(t, err) exists, _ = db.IsTableExist(&Person{}) assert.False(t, exists) exists, _ = db.IsTableExist(&Pet{}) assert.False(t, exists) assert.Equal(t, 0, tableCount(db, "migrations")) }
explode_data.jsonl/58881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 20168, 1155, 353, 8840, 836, 8, 341, 197, 62, 284, 2643, 13270, 9791, 675, 692, 20939, 11, 1848, 1669, 856, 493, 7121, 4571, 445, 37042, 18, 497, 75564, 340, 743, 1848, 961, 2092, 341, 197, 6725, 26133, 3964, 340, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccShakenFistNetwork(t *testing.T) { var network client.Network randomName := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) resType := "shakenfist_network." resName := "external" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, Steps: []resource.TestStep{ { Config: testAccResourceNetwork1(randomName), Check: resource.ComposeTestCheckFunc( testAccCheckNetworkExists(resType+resName, &network), testAccNetworkValues(&network, resName, randomName), resource.TestCheckResourceAttr( resType+resName, "name", "testacc-"+randomName+"-external"), resource.TestCheckResourceAttrSet(resType+resName, "uuid"), resource.TestCheckResourceAttr( resType+resName, "netblock", "10.0.1.0/24"), resource.TestCheckResourceAttr( resType+resName, "provide_dhcp", "true"), resource.TestCheckResourceAttr( resType+resName, "provide_nat", "false"), testAccNetworkMetadata(resType+resName, map[string]string{ "purpose": "external", }), ), }, { // Change the network configuration Config: testAccResourceNetwork2(randomName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( resType+resName, "name", "testacc-"+randomName+"-external"), resource.TestCheckResourceAttrSet(resType+resName, "uuid"), resource.TestCheckResourceAttr( resType+resName, "netblock", "10.0.99.0/24"), resource.TestCheckResourceAttr( resType+resName, "provide_dhcp", "true"), resource.TestCheckResourceAttr( resType+resName, "provide_nat", "true"), ), }, }, }) }
explode_data.jsonl/18035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 687 }
[ 2830, 3393, 14603, 2016, 3366, 37, 380, 12320, 1155, 353, 8840, 836, 8, 341, 2405, 3922, 2943, 30149, 271, 83628, 675, 1669, 1613, 67880, 2013, 437, 703, 3830, 4768, 1649, 7, 16, 15, 11, 1613, 67880, 10447, 1649, 19384, 4651, 692, 102...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetablockLoadDumpLoad(t *testing.T) { // Dump, load and compare metablock, also compare with metablock loaded // from existing equivalent JSON file, assert that they are equal. mbMemory := Metablock{ Signed: Link{ Type: "link", Name: "package", Command: []string{ "tar", "zcvf", "foo.tar.gz", "foo.py", }, Materials: map[string]interface{}{ "foo.py": map[string]interface{}{ "sha256": "74dc3727c6e89308b39e4dfedf787e37841198b1fa165a27c013544a60502549", }, }, Products: map[string]interface{}{ "foo.tar.gz": map[string]interface{}{ "sha256": "52947cb78b91ad01fe81cd6aef42d1f6817e92b9e6936c1e5aabb7c98514f355", }, }, ByProducts: map[string]interface{}{ "return-value": float64(0), "stderr": "a foo.py\n", "stdout": "", }, Environment: map[string]interface{}{}, }, Signatures: []Signature{ { KeyID: "2f89b9272acfc8f4a0a0f094d789fdb0ba798b0fe41f2f5f417c12f0085ff498", Sig: "66365d379d66a2e76d39a1f048847826393127572ba43bead96419499b0256" + "1a08e1cb06cf91f2addd87c30a01f776a8ccc599574bc9a2bd519558351f56cff" + "a61ac4f994d0d491204ff54707937e15f9abfa97c5bda1ec1ae2a2afea63f8086" + "13f4fb343b85a5a455b668b95fa3a11cb9b34219d4d6af2dd4e80a9af01023954" + "a8813b510a6ff6041c3af52056d021fabbc975211b0d8ee7a429a6c22efde583d" + "8ac0719fd657b398a3e02cc711897acbe8cadf32d54f47012aa44621728ede42c" + "3bc95c662f9c1211df4e18da8e0f6b2de358700cea5db1e76fc61ef5a90bcebcc" + "883eed2272e5ca1c8cbb09b868613b839266cd3ae346ce88439bdb5bb4c69dcb7" + "398f4373f2b051adb3d44d11ef1b70c7189aa5c0e6906bf7be1228dc553390024" + "c9c796316067fda7d63cf60bfac86ef2e13bbd8e4c3575683673f7cdf4639c3a5" + "dc225fc0c040dbd9962a6ff51913b240544939ce2d32a5e84792c0acfa94ee07e" + "88e474bf4937558d107c6ecdef5b5b3a7f3a44a657662bbc1046df3a", }, }, } fnExisting := "package.2f89b927.link" fnTmp := fnExisting + ".tmp" if err := mbMemory.Dump(fnTmp); err != nil { t.Errorf("JSON serialization failed: %s", err) } for _, fn := range []string{fnExisting, fnTmp} { var mbFile Metablock if err := mbFile.Load(fn); err != nil { t.Errorf("Could not parse Metablock: %s", err) } if !reflect.DeepEqual(mbMemory, mbFile) { t.Errorf("Dumped and Loaded Metablocks are not equal: \n%s\n\n\n%s\n", mbMemory, mbFile) } } // Remove temporary metablock file (keep other for remaining tests) if err := os.Remove(fnTmp); err != nil { t.Errorf("Unable to remove directory %s: %s", fnTmp, err) } }
explode_data.jsonl/51758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1345 }
[ 2830, 3393, 34673, 370, 1023, 5879, 51056, 5879, 1155, 353, 8840, 836, 8, 341, 197, 322, 29693, 11, 2795, 323, 9429, 19635, 1023, 11, 1083, 9429, 448, 19635, 1023, 6661, 198, 197, 322, 504, 6350, 13578, 4718, 1034, 11, 2060, 429, 807,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_DeployHandler_Execution_Errors_Release(t *testing.T) { release := MockRelease() awsc := MockAwsClients(release) release.ReleaseID = nil state_machine := createTestStateMachine(t, awsc) exec, err := state_machine.Execute(release) assert.Error(t, err) assert.Regexp(t, "BadReleaseError", exec.LastOutputJSON) assert.Regexp(t, "ReleaseID must", exec.LastOutputJSON) assertNoRootLock(t, awsc, release) assert.Equal(t, []string{ "Validate", "FailureClean", }, exec.Path()) }
explode_data.jsonl/62292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 90680, 1989, 3050, 62, 20294, 93623, 1087, 85573, 1155, 353, 8840, 836, 8, 341, 17200, 1623, 1669, 14563, 16077, 2822, 197, 672, 2388, 1669, 14563, 47359, 47174, 5801, 1623, 340, 17200, 1623, 58693, 915, 284, 2092, 271, 24291,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTagValueUnMarshaling(t *testing.T) { for i, tt := range stringtests { var tv TagValue err := json.Unmarshal(tt.json, &tv) if err != nil { t.Errorf("%d. Unmarshal(%q, &str) returned err: %s", i, tt.json, err) } else { if tv != tt.tv { t.Errorf( "%d. Unmarshal(%q, &str) => str==%q, want %q", i, tt.json, tv, tt.tv, ) } } } }
explode_data.jsonl/14048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 5668, 1130, 1806, 79712, 6132, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 914, 23841, 341, 197, 2405, 11086, 12353, 1130, 198, 197, 9859, 1669, 2951, 38097, 47152, 4323, 11, 609, 22209, 340, 197, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPanicThresholdPercentage(t *testing.T) { cases := []struct { name string pa *PodAutoscaler wantPercentage float64 wantOK bool }{{ name: "not present", pa: pa(map[string]string{}), wantPercentage: 0.0, wantOK: false, }, { name: "present", pa: pa(map[string]string{ autoscaling.PanicThresholdPercentageAnnotationKey: "300.0", }), wantPercentage: 300.0, wantOK: true, }} for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { gotPercentage, gotOK := tc.pa.PanicThresholdPercentage() if gotPercentage != tc.wantPercentage { t.Errorf("PanicThresholdPercentage = %v, want: %v", gotPercentage, tc.wantPercentage) } if gotOK != tc.wantOK { t.Errorf("OK = %v, want: %v", gotOK, tc.wantOK) } }) } }
explode_data.jsonl/27236
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 404 }
[ 2830, 3393, 47, 31270, 37841, 36167, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 3223, 64, 1797, 353, 23527, 19602, 436, 63084, 198, 197, 50780, 36167, 2224, 21, 19, 198, 197, 5078...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAudioAdaptorName(t *testing.T) { a := NewAdaptor() gobottest.Assert(t, strings.HasPrefix(a.Name(), "Audio"), true) a.SetName("NewName") gobottest.Assert(t, a.Name(), "NewName") }
explode_data.jsonl/66224
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 14755, 2589, 32657, 675, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 1532, 2589, 32657, 741, 3174, 674, 1716, 477, 11711, 1155, 11, 9069, 94357, 2877, 2967, 1507, 330, 14755, 3975, 830, 340, 11323, 4202, 675, 445, 3564, 675, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDownloadOverrideObjectRemote(t *testing.T) { var ( proxyURL = tutils.RandomProxyURL(t) baseParams = tutils.BaseAPIParams(proxyURL) bck = cmn.Bck{ Name: cos.RandString(10), Provider: cmn.ProviderAIS, } dlBody = downloader.DlBackendBody{ DlBase: downloader.DlBase{Bck: bck}, } m = &ioContext{ t: t, num: 10, bck: cliBck, } ) tutils.CheckSkip(t, tutils.SkipTestArgs{CloudBck: true, Bck: m.bck}) m.initWithCleanup() m.remotePuts(false /*evict*/) tutils.CreateBucketWithCleanup(t, proxyURL, bck, nil) tutils.SetBackendBck(t, baseParams, bck, m.bck) downloadObjectRemote(t, dlBody, m.num, 0) m.remotePuts(false /*evict*/) downloadObjectRemote(t, dlBody, m.num, 0) }
explode_data.jsonl/70392
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 11377, 2177, 1190, 24703, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 22803, 3144, 256, 284, 259, 6031, 26709, 16219, 3144, 1155, 340, 197, 24195, 4870, 284, 259, 6031, 13018, 7082, 4870, 65787, 3144, 340, 197, 2233,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStdTx(t *testing.T) { priv := crypto.GenPrivKeyEd25519() addr := priv.PubKey().Address() msg := NewTestMsg(addr) fee := newStdFee() sigs := []StdSignature{} tx := NewStdTx(msg, fee, sigs) assert.Equal(t, msg, tx.GetMsg()) assert.Equal(t, sigs, tx.GetSignatures()) feePayer := FeePayer(tx) assert.Equal(t, addr, feePayer) }
explode_data.jsonl/6530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 22748, 31584, 1155, 353, 8840, 836, 8, 341, 71170, 1669, 19028, 65384, 32124, 1592, 2715, 17, 20, 20, 16, 24, 741, 53183, 1669, 6095, 1069, 392, 1592, 1005, 4286, 741, 21169, 1669, 1532, 2271, 6611, 24497, 340, 1166, 2127, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteReadCurrentPidFile(t *testing.T) { ctx := newTestContext("TestWriteReadPidFile", t) var err error var pidRead int pid := os.Getpid() if err = util.WritePidFile(ctx, -1); err != nil { t.Fatalf("error writing pidfile=%s", util.PidFilePath(ctx)) } if pidRead, err = util.ReadPidFile(ctx); err != nil { t.Fatalf("error reading pidfile=%s", util.PidFilePath(ctx)) } if pidRead != pid { t.Fatalf("pidRead=%d != pid=%d", pidRead, pid) } }
explode_data.jsonl/15038
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 7985, 4418, 5405, 32339, 1703, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 501, 2271, 1972, 445, 2271, 7985, 4418, 32339, 1703, 497, 259, 692, 2405, 1848, 1465, 198, 2405, 14814, 4418, 526, 271, 78799, 1669, 2643, 2234, 10412...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestApplicationContext_LoadProperties(t *testing.T) { c, ch := container() err := c.Load("testdata/config/application.yaml") assert.Nil(t, err) err = c.Load("testdata/config/application.properties") assert.Nil(t, err) err = c.Refresh() assert.Nil(t, err) p := <-ch assert.Equal(t, p.Properties().Get("yaml.list[0]"), "1") assert.Equal(t, p.Properties().Get("yaml.list[1]"), "2") assert.Equal(t, p.Properties().Get("spring.application.name"), "test") }
explode_data.jsonl/17403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 19736, 19553, 7903, 1155, 353, 8840, 836, 8, 1476, 1444, 11, 521, 1669, 5476, 2822, 9859, 1669, 272, 13969, 445, 92425, 14730, 33032, 33406, 1138, 6948, 59678, 1155, 11, 1848, 692, 9859, 284, 272, 13969, 445, 92425, 14730, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEKS(t *testing.T) { detectorUtils := new(MockDetectorUtils) ctx := context.Background() require.NoError(t, os.Setenv("KUBERNETES_SERVICE_HOST", "localhost")) detectorUtils.On("getConfigMap", authConfigmapNS, authConfigmapName).Return(map[string]string{"cluster.name": "my-cluster"}, nil) // Call EKS Resource detector to detect resources eksResourceDetector := &detector{utils: detectorUtils, err: nil} res, _, err := eksResourceDetector.Detect(ctx) require.NoError(t, err) assert.Equal(t, map[string]interface{}{ "cloud.provider": "aws", "cloud.platform": "aws_eks", }, internal.AttributesToMap(res.Attributes()), "Resource object returned is incorrect") }
explode_data.jsonl/54900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 36, 40449, 1155, 353, 8840, 836, 8, 341, 2698, 295, 1256, 4209, 1669, 501, 66436, 31606, 4209, 340, 20985, 1669, 2266, 19047, 2822, 17957, 35699, 1155, 11, 2643, 4202, 3160, 445, 42, 4493, 13660, 1348, 1570, 21590, 17213, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetBucketInfo(t *testing.T) { ak := os.Getenv("AccessKey") sk := os.Getenv("SecretKey") bucket := os.Getenv("Bucket") mac := digest.Mac{ak, []byte(sk)} bucketInfo, gErr := GetBucketInfo(&mac, bucket) if gErr != nil { t.Fatal(gErr) } t.Log(bucketInfo.Region) }
explode_data.jsonl/56492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 1949, 36018, 1731, 1155, 353, 8840, 836, 8, 341, 197, 585, 1669, 2643, 64883, 445, 6054, 1592, 1138, 1903, 74, 1669, 2643, 64883, 445, 19773, 1592, 1138, 2233, 11152, 1669, 2643, 64883, 445, 36018, 1138, 2109, 580, 1669, 208...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStickyLoadBalanceWorksWithServiceRemoval(t *testing.T) { client1 := &net.TCPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 0} client2 := &net.TCPAddr{IP: net.IPv4(127, 0, 0, 2), Port: 0} client3 := &net.TCPAddr{IP: net.IPv4(127, 0, 0, 3), Port: 0} loadBalancer := NewLoadBalancerRR() fooService := proxy.ServicePortName{NamespacedName: types.NamespacedName{Namespace: "testnamespace", Name: "foo"}, Port: ""} endpoint, err := loadBalancer.NextEndpoint(fooService, nil, false) if err == nil || len(endpoint) != 0 { t.Errorf("Didn't fail with non-existent service") } loadBalancer.NewService(fooService, api.ServiceAffinityClientIP, 0) endpoints := make([]api.Endpoints, 2) endpoints[0] = api.Endpoints{ ObjectMeta: api.ObjectMeta{Name: fooService.Name, Namespace: fooService.Namespace}, Subsets: []api.EndpointSubset{ { Addresses: []api.EndpointAddress{{IP: "endpoint"}}, Ports: []api.EndpointPort{{Port: 1}, {Port: 2}, {Port: 3}}, }, }, } barService := proxy.ServicePortName{NamespacedName: types.NamespacedName{Namespace: "testnamespace", Name: "bar"}, Port: ""} loadBalancer.NewService(barService, api.ServiceAffinityClientIP, 0) endpoints[1] = api.Endpoints{ ObjectMeta: api.ObjectMeta{Name: barService.Name, Namespace: barService.Namespace}, Subsets: []api.EndpointSubset{ { Addresses: []api.EndpointAddress{{IP: "endpoint"}}, Ports: []api.EndpointPort{{Port: 4}, {Port: 5}}, }, }, } loadBalancer.OnEndpointsUpdate(endpoints) shuffledFooEndpoints := loadBalancer.services[fooService].endpoints expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[0], client1) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[1], client2) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[2], client3) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[0], client1) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[0], client1) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[1], client2) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[1], client2) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[2], client3) expectEndpoint(t, loadBalancer, fooService, shuffledFooEndpoints[2], client3) shuffledBarEndpoints := loadBalancer.services[barService].endpoints expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[1], client2) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[1], client2) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[1], client2) // Then update the configuration by removing foo loadBalancer.OnEndpointsUpdate(endpoints[1:]) endpoint, err = loadBalancer.NextEndpoint(fooService, nil, false) if err == nil || len(endpoint) != 0 { t.Errorf("Didn't fail with non-existent service") } // but bar is still there, and we continue RR from where we left off. shuffledBarEndpoints = loadBalancer.services[barService].endpoints expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[1], client2) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[1], client2) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) expectEndpoint(t, loadBalancer, barService, shuffledBarEndpoints[0], client1) }
explode_data.jsonl/66183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1329 }
[ 2830, 3393, 623, 18964, 5879, 21190, 6776, 16056, 1860, 6590, 13516, 1155, 353, 8840, 836, 8, 341, 25291, 16, 1669, 609, 4711, 836, 7123, 13986, 90, 3298, 25, 4179, 46917, 85, 19, 7, 16, 17, 22, 11, 220, 15, 11, 220, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSaveTokenFailsNoPermission(t *testing.T) { pathWhereWeShouldntHavePermission := "/usr/thiswontwork/atall" if runtime.GOOS == "windows" { pathWhereWeShouldntHavePermission = path.Join(os.Getenv("windir"), "system32\\mytokendir\\mytoken") } err := SaveToken(pathWhereWeShouldntHavePermission, 0644, *token()) expectedSubstring := "failed to create directory" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%v)", expectedSubstring, err) } }
explode_data.jsonl/14995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 8784, 3323, 37, 6209, 2753, 14966, 1155, 353, 8840, 836, 8, 972, 26781, 9064, 1654, 14996, 406, 12116, 14966, 1669, 3521, 7063, 14, 574, 86, 544, 1778, 80730, 541, 5031, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 972, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDateLogicalTypeEncode(t *testing.T) { schema := `{"type": "int", "logicalType": "date"}` testBinaryDecodeFail(t, schema, []byte(""), "short buffer") testBinaryEncodeFail(t, schema, "test", "cannot transform to binary date, expected time.Time, received string") testBinaryCodecPass(t, schema, time.Date(2006, 1, 2, 0, 0, 0, 0, time.UTC), []byte("\xbc\xcd\x01")) }
explode_data.jsonl/12011
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 1916, 64312, 929, 32535, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 1565, 4913, 1313, 788, 330, 396, 497, 330, 30256, 929, 788, 330, 1028, 9207, 3989, 18185, 21338, 32564, 19524, 1155, 11, 10802, 11, 3056, 3782, 86076, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLinkCompletionHandler(t *testing.T) { tests := []struct { name string component string dcList appsv1.DeploymentConfigList serviceList scv1beta1.ServiceInstanceList output []string }{ { name: "Case 1: both components and services are present", component: "frontend", serviceList: scv1beta1.ServiceInstanceList{ Items: []scv1beta1.ServiceInstance{ { ObjectMeta: metav1.ObjectMeta{ Name: "mysql-persistent", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "mysql-persistent", componentlabels.ComponentTypeLabel: "mysql-persistent", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: scv1beta1.ServiceInstanceSpec{ PlanReference: scv1beta1.PlanReference{ ClusterServiceClassExternalName: "mysql-persistent", ClusterServicePlanExternalName: "default", }, }, Status: scv1beta1.ServiceInstanceStatus{ Conditions: []scv1beta1.ServiceInstanceCondition{ { Reason: "ProvisionedSuccessfully", }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "postgresql-ephemeral", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "postgresql-ephemeral", componentlabels.ComponentTypeLabel: "postgresql-ephemeral", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: scv1beta1.ServiceInstanceSpec{ PlanReference: scv1beta1.PlanReference{ ClusterServiceClassExternalName: "postgresql-ephemeral", ClusterServicePlanExternalName: "default", }, }, Status: scv1beta1.ServiceInstanceStatus{ Conditions: []scv1beta1.ServiceInstanceCondition{ { Reason: "Provisioning", }, }, }, }, }, }, dcList: appsv1.DeploymentConfigList{ Items: []appsv1.DeploymentConfig{ { ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "backend", componentlabels.ComponentTypeLabel: "java", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: appsv1.DeploymentConfigSpec{ Template: &corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "dummyContainer", }, }, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "frontend", componentlabels.ComponentTypeLabel: "nodejs", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: appsv1.DeploymentConfigSpec{ Template: &corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "dummyContainer", }, }, }, }, }, }, }, }, // make sure that the 'component' is not part of the suggestions output: []string{"backend", "mysql-persistent", "postgresql-ephemeral"}, }, } for _, tt := range tests { client, fakeClientSet := occlient.FakeNew() parsedArgs := parsedArgs{ commands: make(map[string]bool), } context := genericclioptions.NewFakeContext("project", "app", tt.component, client) //fake the services fakeClientSet.ServiceCatalogClientSet.PrependReactor("list", "serviceinstances", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.serviceList, nil }) //fake the dcs fakeClientSet.AppsClientset.PrependReactor("list", "deploymentconfigs", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.dcList, nil }) for i := range tt.dcList.Items { fakeClientSet.AppsClientset.PrependReactor("get", "deploymentconfigs", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.dcList.Items[i], nil }) } completions := LinkCompletionHandler(nil, parsedArgs, context) sort.Strings(completions) if !reflect.DeepEqual(tt.output, completions) { t.Errorf("expected output: %#v,got: %#v", tt.output, completions) } } }
explode_data.jsonl/3547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2146 }
[ 2830, 3393, 3939, 33190, 3050, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 52228, 256, 914, 198, 197, 87249, 852, 414, 906, 3492, 16, 34848, 39130, 2648, 852, 198, 197, 52934, 852, 1136...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddrSerialization(t *testing.T) { t.Parallel() var b bytes.Buffer for _, test := range addrTests { err := serializeAddr(&b, test.expAddr) if err != test.serErr { t.Fatalf("unexpected serialization err for addr %v, "+ "want: %v, got %v", test.expAddr, test.serErr, err) } else if test.serErr != nil { continue } addr, err := deserializeAddr(&b) if err != nil { t.Fatalf("unable to deserialize address: %v", err) } if addr.String() != test.expAddr.String() { t.Fatalf("expected address %v after serialization, "+ "got %v", addr, test.expAddr) } } }
explode_data.jsonl/46280
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 13986, 35865, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 293, 5820, 22622, 198, 2023, 8358, 1273, 1669, 2088, 10789, 18200, 341, 197, 9859, 1669, 24235, 13986, 2099, 65, 11, 1273, 13754, 13986, 340, 197, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestChangefeedCursor(t *testing.T) { defer leaktest.AfterTest(t)() testFn := func(t *testing.T, db *gosql.DB, f testfeedFactory) { sqlDB := sqlutils.MakeSQLRunner(db) sqlDB.Exec(t, `CREATE TABLE foo (a INT PRIMARY KEY, b STRING)`) // To make sure that these timestamps are after 'before' and before // 'after', throw a couple sleeps around them. We round timestamps to // Microsecond granularity for Postgres compatibility, so make the // sleeps 10x that. sqlDB.Exec(t, `INSERT INTO foo VALUES (1, 'before')`) time.Sleep(10 * time.Microsecond) var tsLogical string sqlDB.QueryRow(t, `SELECT cluster_logical_timestamp()`).Scan(&tsLogical) var tsClock time.Time sqlDB.QueryRow(t, `SELECT clock_timestamp()`).Scan(&tsClock) time.Sleep(10 * time.Microsecond) sqlDB.Exec(t, `INSERT INTO foo VALUES (2, 'after')`) fooLogical := f.Feed(t, `CREATE CHANGEFEED FOR foo WITH cursor=$1`, tsLogical) defer fooLogical.Close(t) assertPayloads(t, fooLogical, []string{ `foo: [2]->{"a": 2, "b": "after"}`, }) nanosStr := strconv.FormatInt(tsClock.UnixNano(), 10) fooNanosStr := f.Feed(t, `CREATE CHANGEFEED FOR foo WITH cursor=$1`, nanosStr) defer fooNanosStr.Close(t) assertPayloads(t, fooNanosStr, []string{ `foo: [2]->{"a": 2, "b": "after"}`, }) timeStr := tsClock.Format(`2006-01-02 15:04:05.999999`) fooString := f.Feed(t, `CREATE CHANGEFEED FOR foo WITH cursor=$1`, timeStr) defer fooString.Close(t) assertPayloads(t, fooString, []string{ `foo: [2]->{"a": 2, "b": "after"}`, }) // Check that the cursor is properly hooked up to the job statement // time. The sinkless tests currently don't have a way to get the // statement timestamp, so only verify this for enterprise. if e, ok := fooLogical.(*tableFeed); ok { var bytes []byte sqlDB.QueryRow(t, `SELECT payload FROM system.jobs WHERE id=$1`, e.jobID).Scan(&bytes) var payload jobspb.Payload require.NoError(t, protoutil.Unmarshal(bytes, &payload)) require.Equal(t, parseTimeToHLC(t, tsLogical), payload.GetChangefeed().StatementTime) } } t.Run(`sinkless`, sinklessTest(testFn)) t.Run(`enterprise`, enterpriseTest(testFn)) t.Run(`rangefeed`, rangefeedTest(sinklessTest, testFn)) }
explode_data.jsonl/21275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 867 }
[ 2830, 3393, 1143, 524, 823, 12051, 14543, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 24911, 1669, 2915, 1155, 353, 8840, 836, 11, 2927, 353, 34073, 1470, 22537, 11, 282, 1273, 11184, 4153, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetPostsubmitsReturnsStaticAndInrepoconfigPostsubmits(t *testing.T) { t.Parallel() org, repo := "org", "repo" c := &Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{Enabled: map[string]*bool{"*": utilpointer.BoolPtr(true)}}, }, JobConfig: JobConfig{ PostsubmitsStatic: map[string][]Postsubmit{ org + "/" + repo: {{ JobBase: JobBase{Name: "my-static-postsubmits"}, Reporter: Reporter{Context: "my-static-postsubmits"}, }}, }, ProwYAMLGetter: fakeProwYAMLGetterFactory( nil, []Postsubmit{ { JobBase: JobBase{Name: "hans"}, }, }, ), }, } postsubmits, err := c.GetPostsubmits(nil, org+"/"+repo, func() (string, error) { return "", nil }) if err != nil { t.Fatalf("Error calling GetPostsubmits: %v", err) } if n := len(postsubmits); n != 2 || postsubmits[0].Name != "my-static-postsubmits" || postsubmits[1].Name != "hans" { t.Errorf(`expected exactly two postsubmits named "my-static-postsubmits" and "hans", got %d (%v)`, n, postsubmits) } }
explode_data.jsonl/8098
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 1949, 4133, 1966, 44703, 16446, 11690, 3036, 641, 23476, 1676, 4133, 1966, 44703, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 87625, 11, 15867, 1669, 330, 1775, 497, 330, 23476, 698, 1444, 1669, 609, 2648, 515, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCache_IsCacheable_negative(t *testing.T) { msgHdr := func(rcode int) (hdr dns.MsgHdr) { return dns.MsgHdr{Id: dns.Id(), Rcode: rcode} } aQuestions := func(name string) []dns.Question { return []dns.Question{{ Name: name, Qtype: dns.TypeA, Qclass: dns.ClassINET, }} } cnameAns := func(name, cname string) (rr dns.RR) { return &dns.CNAME{ Hdr: dns.RR_Header{ Name: name, Rrtype: dns.TypeCNAME, Class: dns.ClassINET, Ttl: 3600, }, Target: cname, } } soaAns := func(name, ns, mbox string) (rr dns.RR) { return &dns.SOA{ Hdr: dns.RR_Header{ Name: name, Rrtype: dns.TypeSOA, Class: dns.ClassINET, Ttl: 3600, }, Ns: ns, Mbox: mbox, } } nsAns := func(name, ns string) (rr dns.RR) { return &dns.NS{ Hdr: dns.RR_Header{ Name: name, Rrtype: dns.TypeNS, Class: dns.ClassINET, Ttl: 3600, }, Ns: ns, } } aAns := func(name string, a net.IP) (rr dns.RR) { return &dns.A{ Hdr: dns.RR_Header{ Name: name, Rrtype: dns.TypeA, Class: dns.ClassINET, Ttl: 3600, }, A: a, } } const ( hostname = "AN.EXAMPLE." anotherHostname = "ANOTHER.EXAMPLE." cname = "TRIPPLE.XX." mbox = "HOSTMASTER.NS1.XX." ns1, ns2 = "NS1.XX.", "NS2.XX." xx = "XX." ) // See https://datatracker.ietf.org/doc/html/rfc2308. testCases := []struct { req *dns.Msg want assert.BoolAssertionFunc name string }{{ req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeNameError), Question: aQuestions(hostname), Answer: []dns.RR{cnameAns(hostname, cname)}, Ns: []dns.RR{ soaAns(xx, ns1, mbox), nsAns(xx, ns1), nsAns(xx, ns2), }, Extra: []dns.RR{ aAns(ns1, net.IP{127, 0, 0, 2}), aAns(ns2, net.IP{127, 0, 0, 3}), }, }, want: assert.False, name: "rfc2308_nxdomain_response_type_1", }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeNameError), Question: aQuestions(hostname), Answer: []dns.RR{cnameAns(hostname, cname)}, Ns: []dns.RR{soaAns("XX.", ns1, mbox)}, }, want: assert.True, name: "rfc2308_nxdomain_response_type_2", }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeNameError), Question: aQuestions(hostname), Answer: []dns.RR{cnameAns(hostname, cname)}, }, want: assert.False, name: "rfc2308_nxdomain_response_type_3", }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeNameError), Question: aQuestions(hostname), Answer: []dns.RR{cnameAns(hostname, cname)}, Ns: []dns.RR{ nsAns(xx, ns1), nsAns(xx, ns2), }, Extra: []dns.RR{ aAns(ns1, net.IP{127, 0, 0, 2}), aAns(ns2, net.IP{127, 0, 0, 3}), }, }, want: assert.False, name: "rfc2308_nxdomain_response_type_4", }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeSuccess), Question: aQuestions(hostname), Answer: []dns.RR{cnameAns(hostname, cname)}, Ns: []dns.RR{ nsAns(xx, ns1), nsAns(xx, ns2), }, Extra: []dns.RR{ aAns(ns1, net.IP{127, 0, 0, 2}), aAns(ns2, net.IP{127, 0, 0, 3}), }, }, want: assert.False, name: "rfc2308_nxdomain_referral_response", }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeSuccess), Question: aQuestions(anotherHostname), Ns: []dns.RR{ soaAns(xx, ns1, mbox), nsAns(xx, ns1), nsAns(xx, ns2), }, Extra: []dns.RR{ aAns(ns1, net.IP{127, 0, 0, 2}), aAns(ns2, net.IP{127, 0, 0, 3}), }, }, name: "rfc2308_nodata_response_type_1", want: assert.False, }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeSuccess), Question: aQuestions(anotherHostname), Ns: []dns.RR{soaAns(xx, ns1, mbox)}, }, name: "rfc2308_nodata_response_type_2", want: assert.True, }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeSuccess), Question: aQuestions(anotherHostname), }, name: "rfc2308_nodata_response_type_3", want: assert.False, }, { req: &dns.Msg{ MsgHdr: msgHdr(dns.RcodeSuccess), Question: aQuestions(anotherHostname), Ns: []dns.RR{ nsAns(xx, ns1), nsAns(xx, ns2), }, Extra: []dns.RR{ aAns(ns1, net.IP{127, 0, 0, 2}), aAns(ns2, net.IP{127, 0, 0, 3}), }, }, name: "rfc2308_nodata_referral_response", want: assert.False, }} for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { tc.want(t, isCacheable(tc.req)) }) } }
explode_data.jsonl/18913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2441 }
[ 2830, 3393, 8233, 31879, 8233, 480, 53865, 1155, 353, 8840, 836, 8, 341, 21169, 88367, 1669, 2915, 35429, 534, 526, 8, 320, 28785, 44077, 30365, 88367, 8, 314, 470, 44077, 30365, 88367, 90, 764, 25, 44077, 6444, 1507, 431, 1851, 25, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResources(t *testing.T) { th := kusttest_test.NewKustTestHarness(t, "/whatever") th.WriteK("/whatever/", kustomizationContent) th.WriteF("/whatever/deployment.yaml", deploymentContent) th.WriteF("/whatever/namespace.yaml", namespaceContent) th.WriteF("/whatever/jsonpatch.json", jsonpatchContent) expected := resmap.ResMap{ resid.NewResIdWithPrefixSuffixNamespace( gvk.Gvk{Group: "apps", Version: "v1", Kind: "Deployment"}, "dply1", "foo-", "-bar", "ns1"): th.FromMap( map[string]interface{}{ "apiVersion": "apps/v1", "kind": "Deployment", "metadata": map[string]interface{}{ "name": "foo-dply1-bar", "namespace": "ns1", "labels": map[string]interface{}{ "app": "nginx", }, "annotations": map[string]interface{}{ "note": "This is a test annotation", }, }, "spec": map[string]interface{}{ "replica": "3", "selector": map[string]interface{}{ "matchLabels": map[string]interface{}{ "app": "nginx", }, }, "template": map[string]interface{}{ "metadata": map[string]interface{}{ "annotations": map[string]interface{}{ "note": "This is a test annotation", }, "labels": map[string]interface{}{ "app": "nginx", }, }, }, }, }), resid.NewResIdWithPrefixSuffixNamespace( gvk.Gvk{Version: "v1", Kind: "ConfigMap"}, "literalConfigMap", "foo-", "-bar", "ns1"): th.FromMapAndOption( map[string]interface{}{ "apiVersion": "v1", "kind": "ConfigMap", "metadata": map[string]interface{}{ "name": "foo-literalConfigMap-bar-8d2dkb8k24", "namespace": "ns1", "labels": map[string]interface{}{ "app": "nginx", }, "annotations": map[string]interface{}{ "note": "This is a test annotation", }, }, "data": map[string]interface{}{ "DB_USERNAME": "admin", "DB_PASSWORD": "somepw", }, }, &types.GeneratorArgs{}, &types.GeneratorOptions{}), resid.NewResIdWithPrefixSuffixNamespace( gvk.Gvk{Version: "v1", Kind: "Secret"}, "secret", "foo-", "-bar", "ns1"): th.FromMapAndOption( map[string]interface{}{ "apiVersion": "v1", "kind": "Secret", "metadata": map[string]interface{}{ "name": "foo-secret-bar-9btc7bt4kb", "namespace": "ns1", "labels": map[string]interface{}{ "app": "nginx", }, "annotations": map[string]interface{}{ "note": "This is a test annotation", }, }, "type": ifc.SecretTypeOpaque, "data": map[string]interface{}{ "DB_USERNAME": base64.StdEncoding.EncodeToString([]byte("admin")), "DB_PASSWORD": base64.StdEncoding.EncodeToString([]byte("somepw")), }, }, &types.GeneratorArgs{}, &types.GeneratorOptions{}), resid.NewResIdWithPrefixSuffixNamespace( gvk.Gvk{Version: "v1", Kind: "Namespace"}, "ns1", "foo-", "-bar", ""): th.FromMap( map[string]interface{}{ "apiVersion": "v1", "kind": "Namespace", "metadata": map[string]interface{}{ "name": "foo-ns1-bar", "labels": map[string]interface{}{ "app": "nginx", }, "annotations": map[string]interface{}{ "note": "This is a test annotation", }, }, }), } actual, err := th.MakeKustTarget().MakeCustomizedResMap() if err != nil { t.Fatalf("unexpected Resources error %v", err) } if !reflect.DeepEqual(actual, expected) { err = expected.ErrorIfNotEqual(actual) t.Fatalf("unexpected inequality: %v", err) } }
explode_data.jsonl/78140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1674 }
[ 2830, 3393, 11277, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 595, 590, 1944, 4452, 7121, 42, 590, 2271, 74248, 1155, 11, 3521, 68286, 1138, 70479, 4073, 42, 4283, 68286, 28105, 595, 1450, 2022, 2762, 340, 70479, 4073, 37, 4283, 68286, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestResolveWithMaxAge(t *testing.T) { b := &resolveBackendWithTracking{ nameToIP: make(map[string][]net.IP), } r := &Resolver{ cache: make(map[string]*cacheRecord), resolve: b.resolve, } testHost := "hostA" expectedIP := net.ParseIP("1.2.3.4") b.nameToIP[testHost] = []net.IP{expectedIP} // Resolve a host, there is no cache, a backend call should be made expectedBackendCalls := 1 refreshed := make(chan bool, 2) ip, err := r.resolveWithMaxAge(testHost, 4, 60*time.Second, refreshed) verify("first-run-no-cache", t, ip, expectedIP, b.calls(), expectedBackendCalls, err) // First Resolve calls refresh twice. Once for init (which succeeds), and // then again for refreshing, which is not needed. Hence the results are true // and then false. if !waitForChannelOrFail(t, refreshed, time.Second) { t.Errorf("refreshed returned false, want true") } if waitForChannelOrFail(t, refreshed, time.Second) { t.Errorf("refreshed returned true, want false") } // Resolve same host again, it should come from cache, no backend call newExpectedIP := net.ParseIP("1.2.3.6") b.nameToIP[testHost] = []net.IP{newExpectedIP} ip, err = r.resolveWithMaxAge(testHost, 4, 60*time.Second, refreshed) verify("second-run-from-cache", t, ip, expectedIP, b.calls(), expectedBackendCalls, err) if waitForChannelOrFail(t, refreshed, time.Second) { t.Errorf("refreshed returned true, want false") } // Resolve same host again with maxAge=0, it will issue an asynchronous (hence no increment // in expectedBackenddCalls) backend call ip, err = r.resolveWithMaxAge(testHost, 4, 0*time.Second, refreshed) verify("third-run-expire-cache", t, ip, expectedIP, b.calls(), expectedBackendCalls, err) if !waitForChannelOrFail(t, refreshed, time.Second) { t.Errorf("refreshed returned false, want true") } // Now that refresh has happened, we should see a new IP. expectedIP = newExpectedIP expectedBackendCalls++ ip, err = r.resolveWithMaxAge(testHost, 4, 60*time.Second, refreshed) verify("fourth-run-new-result", t, ip, expectedIP, b.calls(), expectedBackendCalls, err) if waitForChannelOrFail(t, refreshed, time.Second) { t.Errorf("refreshed returned true, want false") } }
explode_data.jsonl/27868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 786 }
[ 2830, 3393, 56808, 2354, 5974, 16749, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 609, 17325, 29699, 2354, 37119, 515, 197, 11609, 1249, 3298, 25, 1281, 9147, 14032, 45725, 4711, 46917, 1326, 197, 532, 7000, 1669, 609, 18190, 515, 197, 52...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestEngineBuildBlock(t *testing.T) { vdr, _, sender, vm, te, gBlk := setup(t) sender.Default(true) blk := &Blk{ parent: gBlk, id: GenerateID(), status: choices.Processing, bytes: []byte{1}, } queried := new(bool) sender.PushQueryF = func(inVdrs ids.ShortSet, _ uint32, blkID ids.ID, blkBytes []byte) { if *queried { t.Fatalf("Asked multiple times") } *queried = true vdrSet := ids.ShortSet{} vdrSet.Add(vdr.ID()) if !inVdrs.Equals(vdrSet) { t.Fatalf("Asking wrong validator for preference") } } vm.BuildBlockF = func() (snowman.Block, error) { return blk, nil } te.Notify(common.PendingTxs) if !*queried { t.Fatalf("Should have sent a query to the peer") } }
explode_data.jsonl/3563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 4571, 11066, 4713, 1155, 353, 8840, 836, 8, 341, 5195, 3612, 11, 8358, 4646, 11, 10995, 11, 1013, 11, 342, 4923, 74, 1669, 6505, 1155, 692, 1903, 1659, 13275, 3715, 692, 197, 34989, 1669, 609, 4923, 74, 515, 197, 24804, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVarName(t *testing.T) { cases := map[string][]string{ "B_C_A": []string{"a", "b", "c"}, "B_C_AA": []string{"aa", "b", "c"}, "AA": []string{"aa"}, } e := &env{} for exp, c := range cases { require.Equal(t, exp, e.varName(c[0], c[1:]...)) } }
explode_data.jsonl/78215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 3962, 675, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 2415, 14032, 45725, 917, 515, 197, 197, 63590, 920, 1566, 788, 220, 3056, 917, 4913, 64, 497, 330, 65, 497, 330, 66, 7115, 197, 197, 63590, 920, 81941, 788, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSubCommandCall(t *testing.T) { var result string f1 := func(c *Command, args *Args) { result = "noop" } f2 := func(c *Command, args *Args) { result = args.LastParam() } c := &Command{Usage: "foo", Run: f1} s := &Command{Key: "bar", Usage: "foo bar", Run: f2} c.Use(s) args := NewArgs([]string{"foo", "bar", "baz"}) c.Call(args) assert.Equal(t, "baz", result) }
explode_data.jsonl/60589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 3136, 4062, 7220, 1155, 353, 8840, 836, 8, 341, 2405, 1102, 914, 198, 1166, 16, 1669, 2915, 1337, 353, 4062, 11, 2827, 353, 4117, 8, 314, 1102, 284, 330, 40162, 1, 456, 1166, 17, 1669, 2915, 1337, 353, 4062, 11, 2827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDo(t *testing.T) { t.Run("success", func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{}) got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", nil }) assert.NoError(t, err) assert.Equal(t, "data", got.(string)) assert.Equal(t, int64(0), cb.Counters().Failures) }) t.Run("error", func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{}) wantErr := errors.New("something happens") got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", wantErr }) assert.Equal(t, err, wantErr) assert.Equal(t, "data", got.(string)) assert.Equal(t, int64(1), cb.Counters().Failures) }) t.Run("ignore", func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{}) wantErr := errors.New("something happens") got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", circuitbreaker.Ignore(wantErr) }) assert.Equal(t, err, wantErr) assert.Equal(t, "data", got.(string)) assert.Equal(t, int64(0), cb.Counters().Failures) }) t.Run("markassuccess", func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{}) wantErr := errors.New("something happens") got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", circuitbreaker.MarkAsSuccess(wantErr) }) assert.Equal(t, err, wantErr) assert.Equal(t, "data", got.(string)) assert.Equal(t, int64(0), cb.Counters().Failures) }) t.Run("context-canceled", func(t *testing.T) { tests := []struct { FailOnContextCancel bool ExpectedFailures int64 }{ {FailOnContextCancel: true, ExpectedFailures: 1}, {FailOnContextCancel: false, ExpectedFailures: 0}, } for _, test := range tests { cancelErr := errors.New("context's Done channel closed.") t.Run(fmt.Sprintf("FailOnContextCanceled=%t", test.FailOnContextCancel), func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{ FailOnContextCancel: test.FailOnContextCancel, }) ctx, cancel := context.WithCancel(context.Background()) cancel() got, err := cb.Do(ctx, func() (interface{}, error) { <-ctx.Done() return "", cancelErr }) assert.Equal(t, err, cancelErr) assert.Equal(t, "", got.(string)) assert.Equal(t, test.ExpectedFailures, cb.Counters().Failures) }) } }) t.Run("context-timeout", func(t *testing.T) { tests := []struct { FailOnContextDeadline bool ExpectedFailures int64 }{ {FailOnContextDeadline: true, ExpectedFailures: 1}, {FailOnContextDeadline: false, ExpectedFailures: 0}, } for _, test := range tests { timeoutErr := errors.New("context's Done channel closed.") t.Run(fmt.Sprintf("FailOnContextDeadline=%t", test.FailOnContextDeadline), func(t *testing.T) { cb := circuitbreaker.New(&circuitbreaker.Options{ FailOnContextDeadline: test.FailOnContextDeadline, }) ctx, cancel := context.WithTimeout(context.Background(), 1*time.Millisecond) defer cancel() got, err := cb.Do(ctx, func() (interface{}, error) { <-ctx.Done() return "", timeoutErr }) assert.Equal(t, err, timeoutErr) assert.Equal(t, "", got.(string)) assert.Equal(t, test.ExpectedFailures, cb.Counters().Failures) }) } }) t.Run("cyclic-state-transition", func(t *testing.T) { clock := clock.NewMock() cb := circuitbreaker.New(&circuitbreaker.Options{ ShouldTrip: circuitbreaker.NewTripFuncThreshold(3), Clock: clock, OpenTimeout: 1000 * time.Millisecond, HalfOpenMaxSuccesses: 4, }) wantErr := errors.New("something happens") // ( Closed => Open => HalfOpen => Open => HalfOpen => Closed ) x 10 iterations. for i := 0; i < 10; i++ { // State: Closed. for i := 0; i < 3; i++ { assert.Equal(t, circuitbreaker.StateClosed, cb.State()) got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", wantErr }) assert.Equal(t, err, wantErr) assert.Equal(t, "data", got.(string)) } // State: Closed => Open. Should return nil and ErrOpen error. assert.Equal(t, circuitbreaker.StateOpen, cb.State()) got, err := cb.Do(context.Background(), func() (interface{}, error) { return "data", wantErr }) assert.Equal(t, err, circuitbreaker.ErrOpen) assert.Nil(t, got) // State: Open => HalfOpen. clock.Add(1000 * time.Millisecond) assert.Equal(t, circuitbreaker.StateHalfOpen, cb.State()) // State: HalfOpen => Open. got, err = cb.Do(context.Background(), func() (interface{}, error) { return "data", wantErr }) assert.Equal(t, err, wantErr) assert.Equal(t, "data", got.(string)) assert.Equal(t, circuitbreaker.StateOpen, cb.State()) // State: Open => HalfOpen. clock.Add(1000 * time.Millisecond) // State: HalfOpen => Close. for i := 0; i < 4; i++ { assert.Equal(t, circuitbreaker.StateHalfOpen, cb.State()) got, err = cb.Do(context.Background(), func() (interface{}, error) { return "data", nil }) assert.Equal(t, err, nil) assert.Equal(t, "data", got.(string)) } assert.Equal(t, circuitbreaker.StateClosed, cb.State()) } }) }
explode_data.jsonl/8216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2108 }
[ 2830, 3393, 5404, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5630, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 63810, 1669, 16224, 64121, 7121, 2099, 66, 37268, 64121, 22179, 37790, 197, 3174, 354, 11, 1848, 1669, 9858, 33596, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnumSpecifications(t *testing.T) { cache := newTestCache(t) input := []TestData{{S: "Foo", I: 1}, {S: "Bar", I: 2}} cases := []struct { name string spec map[string][]string orderBy string expected []map[string]interface{} skipJson bool }{ { name: "No spec", spec: map[string][]string{}, orderBy: "S", expected: []map[string]interface{}{{"S": "Bar", "I": 2.0}, {"S": "Foo", "I": 1.0}}}, { name: "Spec string enum with non natural key order", spec: map[string][]string{"S": {"Foo", "Bar"}}, orderBy: "S", expected: []map[string]interface{}{{"S": "Foo", "I": 1.0}, {"S": "Bar", "I": 2.0}}}, { name: "Spec int enum with non natural key order", spec: map[string][]string{"I": {"2", "1"}}, orderBy: "I", expected: []map[string]interface{}{{"S": "Bar", "I": "2"}, {"S": "Foo", "I": "1"}}, // Making enums of integers does not (currently) work and since you cannot type // spec JSON input the same way that you can with CSV there is no turn integers // into a string/enum. skipJson: true}, } for _, inputFormat := range []string{"csv", "json"} { for _, tc := range cases { t.Run(fmt.Sprintf("Format: %s: %s", inputFormat, tc.name), func(t *testing.T) { jsonSpec, err := json.Marshal(tc.spec) assertEqual(t, nil, err) // For CSV the enum columns must be part of type specification // in addition to the enum spec. jsonTyp, err := json.Marshal(enumTypes(tc.spec)) assertEqual(t, nil, err) headers := map[string]string{ "X-QCache-enum-specs": string(jsonSpec), "X-QCache-types": string(jsonTyp)} key := "FOO" if inputFormat == "json" { if tc.skipJson { return } cache.insertJson(key, headers, input) } else { cache.insertCsv(key, headers, input) } output := make([]map[string]interface{}, 0) cache.queryJson(key, nil, fmt.Sprintf(`{"select": ["S", "I"], "order_by": ["%s"]}`, tc.orderBy), "GET", &output) assertEqual(t, tc.expected, output) }) } } }
explode_data.jsonl/8985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 903 }
[ 2830, 3393, 10766, 85021, 1155, 353, 8840, 836, 8, 341, 52680, 1669, 501, 2271, 8233, 1155, 340, 22427, 1669, 3056, 83920, 2979, 50, 25, 330, 40923, 497, 358, 25, 220, 16, 2137, 314, 50, 25, 330, 3428, 497, 358, 25, 220, 17, 47449, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildUpdateError(t *testing.T) { testServer(t, func(c *stdsdk.Client, p *structs.MockProvider) { var b1 *structs.Build p.On("BuildUpdate", "app1", "build1", structs.BuildUpdateOptions{}).Return(b1, fmt.Errorf("err1")) err := c.Put("/apps/app1/builds/build1", stdsdk.RequestOptions{}, nil) require.EqualError(t, err, "err1") }) }
explode_data.jsonl/71428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 11066, 4289, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1155, 11, 2915, 1337, 353, 1834, 51295, 11716, 11, 281, 353, 1235, 82, 24664, 5179, 8, 341, 197, 2405, 293, 16, 353, 1235, 82, 25212, 198, 197, 3223, 8071, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoad(t *testing.T) { gou.Flows = make(map[string]*gou.Flow) Load(config.Conf) LoadFrom("not a path", "404.") check(t) }
explode_data.jsonl/70069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 59 }
[ 2830, 3393, 5879, 1155, 353, 8840, 836, 8, 341, 3174, 283, 73459, 4241, 284, 1281, 9147, 14032, 8465, 70, 283, 60732, 340, 197, 5879, 8754, 4801, 69, 340, 197, 5879, 3830, 445, 1921, 264, 1815, 497, 330, 19, 15, 19, 13053, 25157, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestWhereUint(t *testing.T) { v := &Value{data: []uint{uint(1), uint(1), uint(1), uint(1), uint(1), uint(1)}} selected := v.WhereUint(func(i int, val uint) bool { return i%2 == 0 }).MustUintSlice() assert.Equal(t, 3, len(selected)) }
explode_data.jsonl/23460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 9064, 21570, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 2496, 90, 2496, 7, 16, 701, 2622, 7, 16, 701, 2622, 7, 16, 701, 2622, 7, 16, 701, 2622, 7, 16, 701, 2622, 7, 16, 9139, 630, 7063...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrefixMetadataToACLs(t *testing.T) { t.Parallel() epoch := time.Date(2018, time.February, 1, 2, 3, 0, 0, time.UTC) Convey("Works", t, func() { out := prefixMetadataToACLs(&api.InheritedPrefixMetadata{ PerPrefixMetadata: []*api.PrefixMetadata{ { Prefix: "a", Acls: []*api.PrefixMetadata_ACL{ {Role: api.Role_READER, Principals: []string{"group:a"}}, {Role: api.Role_READER, Principals: []string{"group:b"}}, {Role: api.Role_WRITER, Principals: []string{"group:b"}}, {Role: api.Role_OWNER, Principals: []string{"group:c"}}, }, UpdateUser: "user:a-updater@example.com", UpdateTime: google.NewTimestamp(epoch), }, { Prefix: "a/b/c", Acls: []*api.PrefixMetadata_ACL{ {Role: api.Role_OWNER, Principals: []string{"group:c"}}, }, UpdateUser: "user:c-updater@example.com", UpdateTime: google.NewTimestamp(epoch), }, }, }) So(out, ShouldResemble, []PackageACL{ { PackagePath: "a", Role: "READER", Principals: []string{"group:a", "group:b"}, // merged into one PackageACL ModifiedBy: "user:a-updater@example.com", ModifiedTs: UnixTime(epoch), }, { PackagePath: "a", Role: "WRITER", Principals: []string{"group:b"}, ModifiedBy: "user:a-updater@example.com", ModifiedTs: UnixTime(epoch), }, { PackagePath: "a", Role: "OWNER", Principals: []string{"group:c"}, ModifiedBy: "user:a-updater@example.com", ModifiedTs: UnixTime(epoch), }, { PackagePath: "a/b/c", Role: "OWNER", Principals: []string{"group:c"}, ModifiedBy: "user:c-updater@example.com", ModifiedTs: UnixTime(epoch), }, }) }) }
explode_data.jsonl/4606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 876 }
[ 2830, 3393, 14335, 14610, 1249, 55393, 82, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 20197, 1669, 882, 8518, 7, 17, 15, 16, 23, 11, 882, 991, 68, 6812, 11, 220, 16, 11, 220, 17, 11, 220, 18, 11, 220, 15, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetLogoutRequest(t *testing.T) { for k, tc := range []struct { exists bool used bool status int }{ {false, false, http.StatusNotFound}, {true, false, http.StatusOK}, {true, true, http.StatusConflict}, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { key := fmt.Sprint(k) challenge := "challenge" + key conf := internal.NewConfigurationWithDefaults() reg := internal.NewRegistryMemory(conf) if tc.exists { require.NoError(t, reg.ConsentManager().CreateLogoutRequest(context.TODO(), &LogoutRequest{ Client: &client.Client{ID: "client" + key}, Challenge: challenge, WasUsed: tc.used, })) } h := NewHandler(reg, conf) r := x.NewRouterAdmin() h.SetRoutes(r) ts := httptest.NewServer(r) defer ts.Close() c := &http.Client{} resp, err := c.Get(ts.URL + LogoutPath + "?challenge=" + challenge) require.NoError(t, err) require.EqualValues(t, tc.status, resp.StatusCode) }) } }
explode_data.jsonl/47936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 1949, 27958, 1900, 1155, 353, 8840, 836, 8, 341, 2023, 595, 11, 17130, 1669, 2088, 3056, 1235, 341, 197, 8122, 1671, 1807, 198, 197, 197, 2591, 256, 1807, 198, 197, 23847, 526, 198, 197, 59403, 197, 197, 90, 3849, 11, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInt64sToMap(t *testing.T) { assert.Equal(t, map[int64]bool{}, Int64sToMap([]int64{})) assert.Equal(t, map[int64]bool{1: true, 4: true, 16: true}, Int64sToMap([]int64{1, 4, 16}), ) }
explode_data.jsonl/14321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1072, 21, 19, 82, 1249, 2227, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 2415, 18640, 21, 19, 96436, 22655, 1333, 21, 19, 82, 1249, 2227, 10556, 396, 21, 19, 6257, 1171, 6948, 12808, 1155, 345, 197, 19567, 1864...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMkdir(t *testing.T) { ctx := context.Background() r := fstest.NewRun(t) defer r.Finalise() err := operations.Mkdir(ctx, r.Fremote, "") require.NoError(t, err) fstest.CheckListing(t, r.Fremote, []fstest.Item{}) err = operations.Mkdir(ctx, r.Fremote, "") require.NoError(t, err) }
explode_data.jsonl/51923
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 44, 12438, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 16867, 435, 991, 977, 1064, 2822, 9859, 1669, 7525, 1321, 12438, 7502, 11, 435, 991, 18147, 11, 14676, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetryBasic(t *testing.T) { conf := NewConfig() childConf := NewConfig() conf.Retry.Output = &childConf output, err := NewRetry(conf, nil, log.Noop(), metrics.Noop()) if err != nil { t.Fatal(err) } ret, ok := output.(*Retry) if !ok { t.Fatal("Failed to cast") } mOut := &mockOutput{ ts: make(chan types.Transaction), } ret.wrapped = mOut tChan := make(chan types.Transaction) resChan := make(chan types.Response) if err = ret.Consume(tChan); err != nil { t.Fatal(err) } testMsg := message.New(nil) go func() { select { case tChan <- types.NewTransaction(testMsg, resChan): case <-time.After(time.Second): t.Fatal("timed out") } }() var tran types.Transaction select { case tran = <-mOut.ts: case <-time.After(time.Second): t.Fatal("timed out") } if tran.Payload != testMsg { t.Error("Wrong payload returned") } select { case tran.ResponseChan <- response.NewAck(): case <-time.After(time.Second): t.Fatal("timed out") } select { case res := <-resChan: if err = res.Error(); err != nil { t.Error(err) } case <-time.After(time.Second): t.Fatal("timed out") } output.CloseAsync() if err = output.WaitForClose(time.Second); err != nil { t.Error(err) } }
explode_data.jsonl/3320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 51560, 15944, 1155, 353, 8840, 836, 8, 341, 67850, 1669, 1532, 2648, 2822, 58391, 15578, 1669, 1532, 2648, 741, 67850, 2013, 15149, 34246, 284, 609, 3048, 15578, 271, 21170, 11, 1848, 1669, 1532, 51560, 29879, 11, 2092, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3