text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewResources(t *testing.T) { tests := []struct { name string session session.ServiceFormatter want *Resources wantErr bool }{ { name: "passing", session: &session.Mock{ URL: "https://test.salesforce.com", }, want: &Resources{ metadata: &metadata{ session: &session.Mock{ URL: "https://test.salesforce.com", }, }, describe: &describe{ session: &session.Mock{ URL: "https://test.salesforce.com", }, }, list: &list{ session: &session.Mock{ URL: "https://test.salesforce.com", }, }, dml: &dml{ session: &session.Mock{ URL: "https://test.salesforce.com", }, }, query: &query{ session: &session.Mock{ URL: "https://test.salesforce.com", }, }, }, wantErr: false, }, { name: "error_nil_session", want: nil, wantErr: true, }, { name: "error_refresh", session: &session.Mock{ URL: "https://test.salesforce.com", RefreshErr: errors.New("failed to refresh session"), }, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := NewResources(tt.session) t.Log("got error:", err) if (err != nil) != tt.wantErr { t.Errorf("NewResources() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("NewResources() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/45149
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 744 }
[ 2830, 3393, 3564, 11277, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 25054, 3797, 13860, 14183, 198, 197, 50780, 262, 353, 11277, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEcdsaSignerSign(t *testing.T) { t.Parallel() signer := &ecdsaSigner{} verifierPrivateKey := &ecdsaPrivateKeyVerifier{} verifierPublicKey := &ecdsaPublicKeyKeyVerifier{} // Generate a key lowLevelKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) assert.NoError(t, err) k := &ecdsaPrivateKey{lowLevelKey} pk, err := k.PublicKey() assert.NoError(t, err) // Sign msg := []byte("Hello World") sigma, err := signer.Sign(k, msg, nil) assert.NoError(t, err) assert.NotNil(t, sigma) // Verify valid, err := verifyECDSA(&lowLevelKey.PublicKey, sigma, msg, nil) assert.NoError(t, err) assert.True(t, valid) valid, err = verifierPrivateKey.Verify(k, sigma, msg, nil) assert.NoError(t, err) assert.True(t, valid) valid, err = verifierPublicKey.Verify(pk, sigma, msg, nil) assert.NoError(t, err) assert.True(t, valid) }
explode_data.jsonl/29813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 36, 4385, 9081, 7264, 261, 7264, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 69054, 261, 1669, 609, 757, 96780, 7264, 261, 16094, 197, 423, 3049, 75981, 1669, 609, 757, 96780, 75981, 82394, 16094, 197, 423, 3049, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadManifest_Validate_BundleOutput(t *testing.T) { cxt := context.NewTestContext(t) cxt.AddTestFile("testdata/outputs/bundle-outputs.yaml", config.Name) wantOutputs := []OutputDefinition{ { Name: "mysql-root-password", Schema: definition.Schema{ Description: "The root MySQL password", Type: "string", }, }, { Name: "mysql-password", Schema: definition.Schema{ Type: "string", }, ApplyTo: []string{ "install", "upgrade", }, }, } m, err := LoadManifestFrom(cxt.Context, config.Name) require.NoError(t, err, "could not load manifest") require.Equal(t, wantOutputs, m.Outputs) }
explode_data.jsonl/37723
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 4418, 38495, 62, 17926, 1668, 4206, 5097, 1155, 353, 8840, 836, 8, 341, 1444, 2252, 1669, 2266, 7121, 2271, 1972, 1155, 692, 1444, 2252, 1904, 2271, 1703, 445, 92425, 14, 41006, 3470, 4206, 12, 41006, 33406, 497, 2193, 2967,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidatorsSortTendermint(t *testing.T) { vals := make([]types.Validator, 100) for i := range vals { pk := ed25519.GenPrivKey().PubKey() pk2 := ed25519.GenPrivKey().PubKey() vals[i] = newValidator(t, sdk.ValAddress(pk2.Address()), pk) vals[i].Status = types.Bonded vals[i].Tokens = sdk.NewInt(rand.Int63()) } // create some validators with the same power for i := 0; i < 10; i++ { vals[i].Tokens = sdk.NewInt(1000000) } valz := types.Validators(vals) // create expected tendermint validators by converting to tendermint then sorting expectedVals, err := teststaking.ToTmValidators(valz) require.NoError(t, err) sort.Sort(tmtypes.ValidatorsByVotingPower(expectedVals)) // sort in SDK and then convert to tendermint sort.Sort(types.ValidatorsByVotingPower(valz)) actualVals, err := teststaking.ToTmValidators(valz) require.NoError(t, err) require.Equal(t, expectedVals, actualVals, "sorting in SDK is not the same as sorting in Tendermint") }
explode_data.jsonl/48980
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 31748, 10231, 51, 1659, 67791, 1155, 353, 8840, 836, 8, 341, 19302, 82, 1669, 1281, 10556, 9242, 13, 14256, 11, 220, 16, 15, 15, 692, 2023, 600, 1669, 2088, 28356, 341, 197, 3223, 74, 1669, 1578, 17, 20, 20, 16, 24, 65...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReplaceOrder(t *testing.T) { t.Parallel() _, err := b.ReplaceOrder(1337, "BTCUSD", 1, 1, true, "market", false) if err == nil { t.Error("Test Failed - ReplaceOrder() error") } }
explode_data.jsonl/79950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 23107, 4431, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 6878, 1848, 1669, 293, 20858, 4431, 7, 16, 18, 18, 22, 11, 330, 59118, 26749, 497, 220, 16, 11, 220, 16, 11, 830, 11, 330, 19301, 497, 895, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_paddingAddress(t *testing.T) { type args struct { address string } tests := []struct { name string args args want string }{ {name: "0", args: args{"0x0"}, want: strings.Repeat("0", 25)}, {name: "1", args: args{"0"}, want: strings.Repeat("0", 25)}, {name: "2", args: args{"1"}, want: strings.Repeat("0", 24) + "1"}, {name: "3", args: args{"0x3f5ce5fbfe3e9af3971dd833d26ba9b5c936f0be"}, want: "0000000000000000000000003f5ce5fbfe3e9af3971dd833d26ba9b5c936f0be"}, {name: "4", args: args{"3f5ce5fbfe3e9af3971dd833d26ba9b5c936f0be"}, want: "0000000000000000000000003f5ce5fbfe3e9af3971dd833d26ba9b5c936f0be"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := paddingAddress(tt.args.address); got != tt.want { t.Errorf("paddingAddress() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/67824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 40726, 4286, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 63202, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 4700...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccAWSS3BucketObject_sse(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") defer os.Remove(source) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { PreConfig: func() {}, Config: testAccAWSS3BucketObjectConfig_withSSE(rInt, source), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectSSE(resourceName, "AES256"), testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), ), }, }, }) }
explode_data.jsonl/64965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 14603, 14419, 1220, 18, 36018, 1190, 643, 325, 1155, 353, 8840, 836, 8, 341, 2405, 2839, 274, 18, 25618, 5097, 198, 50346, 675, 1669, 330, 8635, 643, 18, 38749, 5314, 6035, 698, 7000, 1072, 1669, 1613, 67880, 2013, 437, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_postWorkflowHandlerWithRootShouldSuccess(t *testing.T) { api, db, router := newTestAPI(t) // Init user u, pass := assets.InsertAdminUser(t, api.mustDB()) // Init project key := sdk.RandomString(10) proj := assets.InsertTestProject(t, db, api.Cache, key, key) // Init pipeline pip := sdk.Pipeline{ Name: "pipeline1", ProjectID: proj.ID, } test.NoError(t, pipeline.InsertPipeline(api.mustDB(), &pip)) //Prepare request vars := map[string]string{ "permProjectKey": proj.Key, } uri := router.GetRoute("POST", api.postWorkflowHandler, vars) test.NotEmpty(t, uri) // Insert application app := sdk.Application{ Name: "app1", RepositoryFullname: "test/app1", VCSServer: "github", } test.NoError(t, application.Insert(api.mustDB(), *proj, &app)) var workflow = &sdk.Workflow{ Name: "Name", Description: "Description", WorkflowData: sdk.WorkflowData{ Node: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ ApplicationID: app.ID, PipelineID: pip.ID, }, }, }, } req := assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, &workflow) //Do the request w := httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 201, w.Code) test.NoError(t, json.Unmarshal(w.Body.Bytes(), &workflow)) assert.NotEqual(t, 0, workflow.ID) assert.NotEqual(t, 0, workflow.WorkflowData.Node.Context.ApplicationID) assert.NotNil(t, workflow.WorkflowData.Node.Context.DefaultPayload) payload, err := workflow.WorkflowData.Node.Context.DefaultPayloadToMap() test.NoError(t, err) assert.NotEmpty(t, payload["git.branch"], "git.branch should not be empty") }
explode_data.jsonl/31073
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 701 }
[ 2830, 3393, 6333, 62768, 3050, 2354, 8439, 14996, 7188, 1155, 353, 8840, 836, 8, 1476, 54299, 11, 2927, 11, 9273, 1669, 501, 2271, 7082, 1155, 692, 197, 322, 15690, 1196, 198, 10676, 11, 1494, 1669, 11770, 23142, 7210, 1474, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidArgsFuncSingleCmdInvalidArg(t *testing.T) { rootCmd := &Command{ Use: "root", // If we don't specify a value for Args, this test fails. // This is only true for a root command without any subcommands, and is caused // by the fact that the __complete command becomes a subcommand when there should not be one. // The problem is in the implementation of legacyArgs(). Args: MinimumNArgs(1), ValidArgsFunction: validArgsFunc, Run: emptyRun, } // Check completing with wrong number of args output, err := executeCommand(rootCmd, ShellCompNoDescRequestCmd, "unexpectedArg", "t") if err != nil { t.Errorf("Unexpected error: %v", err) } expected := strings.Join([]string{ ":4", "Completion ended with directive: ShellCompDirectiveNoFileComp", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } }
explode_data.jsonl/43748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 310 }
[ 2830, 3393, 4088, 4117, 9626, 10888, 15613, 7928, 2735, 1155, 353, 8840, 836, 8, 341, 33698, 15613, 1669, 609, 4062, 515, 197, 95023, 25, 330, 2888, 756, 197, 197, 322, 1416, 582, 1513, 944, 13837, 264, 897, 369, 17693, 11, 419, 1273,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetPipelineConditionStatus_PipelineTimeouts(t *testing.T) { d, err := dagFromState(oneFinishedState) if err != nil { t.Fatalf("Unexpected error while buildig DAG for state %v: %v", oneFinishedState, err) } pr := &v1beta1.PipelineRun{ ObjectMeta: metav1.ObjectMeta{Name: "pipelinerun-no-tasks-started"}, Spec: v1beta1.PipelineRunSpec{ Timeout: &metav1.Duration{Duration: 1 * time.Minute}, }, Status: v1beta1.PipelineRunStatus{ PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ StartTime: &metav1.Time{Time: now.Add(-2 * time.Minute)}, }, }, } facts := PipelineRunFacts{ State: oneFinishedState, TasksGraph: d, FinalTasksGraph: &dag.Graph{}, } c := facts.GetPipelineConditionStatus(context.Background(), pr, zap.NewNop().Sugar(), testClock) if c.Status != corev1.ConditionFalse && c.Reason != v1beta1.PipelineRunReasonTimedOut.String() { t.Fatalf("Expected to get status %s but got %s for state %v", corev1.ConditionFalse, c.Status, oneFinishedState) } }
explode_data.jsonl/18200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 1949, 34656, 10547, 2522, 1088, 8790, 7636, 82, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 28069, 3830, 1397, 51067, 24890, 1397, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 29430, 1465, 1393, 1936, 343, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListPullRequestComments(t *testing.T) { ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { t.Errorf("Bad method: %s", r.Method) } if r.URL.Path == "/repos/k8s/kuber/pulls/15/comments" { prcs := []ReviewComment{{ID: 1}} b, err := json.Marshal(prcs) if err != nil { t.Fatalf("Didn't expect error: %v", err) } w.Header().Set("Link", fmt.Sprintf(`<blorp>; rel="first", <https://%s/someotherpath>; rel="next"`, r.Host)) fmt.Fprint(w, string(b)) } else if r.URL.Path == "/someotherpath" { prcs := []ReviewComment{{ID: 2}} b, err := json.Marshal(prcs) if err != nil { t.Fatalf("Didn't expect error: %v", err) } fmt.Fprint(w, string(b)) } else { t.Errorf("Bad request path: %s", r.URL.Path) } })) defer ts.Close() c := getClient(ts.URL) prcs, err := c.ListPullRequestComments("k8s", "kuber", 15) if err != nil { t.Errorf("Didn't expect error: %v", err) } else if len(prcs) != 2 { t.Errorf("Expected two comments, found %d: %v", len(prcs), prcs) } else if prcs[0].ID != 1 || prcs[1].ID != 2 { t.Errorf("Wrong issue IDs: %v", prcs) } }
explode_data.jsonl/6271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 852, 36068, 1900, 17373, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 13470, 1220, 2836, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20798, 961, 1758, 20798, 194...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestClient_TLSActivation(t *testing.T) { t.Parallel() fixtureBase := "custom_tls_activation/" // Create var err error var ta *TLSActivation record(t, fixtureBase+"create", func(c *Client) { ta, err = c.CreateTLSActivation(&CreateTLSActivationInput{ Certificate: &CustomTLSCertificate{ID: "CERTIFICATE_ID"}, Configuration: &TLSConfiguration{ID: "CONFIGURATION_ID"}, Domain: &TLSDomain{ID: "DOMAIN_NAME"}, }) }) if err != nil { t.Fatal(err) } // Ensure deleted defer func() { record(t, fixtureBase+"cleanup", func(c *Client) { c.DeleteTLSActivation(&DeleteTLSActivationInput{ ID: ta.ID, }) }) }() // List var lta []*TLSActivation record(t, fixtureBase+"list", func(c *Client) { lta, err = c.ListTLSActivations(&ListTLSActivationsInput{}) }) if err != nil { t.Fatal(err) } if len(lta) < 1 { t.Errorf("bad TLS activations: %v", lta) } if lta[0].Certificate == nil { t.Errorf("TLS certificate relation should not be nil: %v", lta) } if lta[0].Certificate.ID != ta.Certificate.ID { t.Errorf("bad Certificate ID: %q (%q)", lta[0].Certificate.ID, ta.Certificate.ID) } if lta[0].Configuration == nil { t.Errorf("TLS Configuration relation should not be nil: %v", lta) } if lta[0].Configuration.ID != ta.Configuration.ID { t.Errorf("bad Configuration ID: %q (%q)", lta[0].Configuration.ID, ta.Configuration.ID) } if lta[0].Domain == nil { t.Errorf("TLS domain relation should not be nil: %v", lta) } if lta[0].Domain.ID != ta.Domain.ID { t.Errorf("bad Domain ID: %q (%q)", lta[0].Domain.ID, ta.Domain.ID) } // Get var gta *TLSActivation record(t, fixtureBase+"get", func(c *Client) { gta, err = c.GetTLSActivation(&GetTLSActivationInput{ ID: ta.ID, }) }) if err != nil { t.Fatal(err) } if ta.ID != gta.ID { t.Errorf("bad ID: %q (%q)", ta.ID, gta.ID) } // Update var uta *TLSActivation record(t, fixtureBase+"update", func(c *Client) { uta, err = c.UpdateTLSActivation(&UpdateTLSActivationInput{ ID: "ACTIVATION_ID", Certificate: &CustomTLSCertificate{}, }) }) if err != nil { t.Fatal(err) } if ta.ID != uta.ID { t.Errorf("bad ID: %q (%q)", ta.ID, uta.ID) } // Delete record(t, fixtureBase+"delete", func(c *Client) { err = c.DeleteTLSActivation(&DeleteTLSActivationInput{ ID: ta.ID, }) }) if err != nil { t.Fatal(err) } }
explode_data.jsonl/2979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1040 }
[ 2830, 3393, 2959, 69067, 61460, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1166, 12735, 3978, 1669, 330, 9163, 71262, 52404, 85312, 197, 322, 4230, 198, 2405, 1848, 1465, 198, 2405, 9450, 353, 45439, 61460, 198, 71952, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckConversionM2(t *testing.T) { t.Run("converts temperature", func(t *testing.T) { dir, err := os.Getwd() if err != nil { t.Fatal(err) } cmd := exec.Command(path.Join(dir, binaryName), []string{"F"}...) stdin, e := cmd.StdinPipe() if e != nil { panic(e) } outPipe, e := cmd.StdoutPipe() if e != nil { panic(e) } if e := cmd.Start(); e != nil { panic(e) } _, e = stdin.Write([]byte("32\n")) if e != nil { panic(e) } _, e = stdin.Write([]byte("n\n")) if e != nil { panic(e) } stdin.Close() outPrint, _ := ioutil.ReadAll(outPipe) if !strings.Contains(string(outPrint), "32 F = 0 C") { t.Fatal("Did not properly convert temperature") } }) }
explode_data.jsonl/81218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 3973, 48237, 44, 17, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 14166, 82, 9315, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 48532, 11, 1848, 1669, 2643, 2234, 6377, 741, 197, 743, 1848, 961, 2092, 341, 298, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestOutboundListenerAccessLogs(t *testing.T) { t.Helper() p := &fakePlugin{} env := buildListenerEnv(nil) listeners := buildAllListeners(p, nil, env) found := false for _, l := range listeners { if l.Name == VirtualOutboundListenerName { fc := &tcp_proxy.TcpProxy{} if err := getFilterConfig(l.FilterChains[0].Filters[0], fc); err != nil { t.Fatalf("failed to get TCP Proxy config: %s", err) } if fc.AccessLog == nil { t.Fatal("expected access log configuration") } found = true break } } if !found { t.Fatal("expected virtual outbound listener, but not found") } // Update MeshConfig env.Mesh().AccessLogFormat = "format modified" // Trigger MeshConfig change and validate that access log is recomputed. resetCachedListenerConfig(nil) // Validate that access log filter users the new format. listeners = buildAllListeners(p, nil, env) for _, l := range listeners { if l.Name == VirtualOutboundListenerName { validateAccessLog(t, l, "format modified") } } }
explode_data.jsonl/61659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 2662, 10891, 2743, 6054, 51053, 1155, 353, 8840, 836, 8, 341, 3244, 69282, 741, 3223, 1669, 609, 30570, 11546, 16094, 57538, 1669, 1936, 2743, 14359, 27907, 692, 14440, 18223, 1669, 1936, 2403, 31570, 1295, 11, 2092, 11, 6105,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRequiredCreateOpts(t *testing.T) { res := listeners.Create(fake.ServiceClient(), listeners.CreateOpts{}) if res.Err == nil { t.Fatalf("Expected error, got none") } res = listeners.Create(fake.ServiceClient(), listeners.CreateOpts{Name: "foo"}) if res.Err == nil { t.Fatalf("Expected error, got none") } res = listeners.Create(fake.ServiceClient(), listeners.CreateOpts{Name: "foo", ProjectID: "bar"}) if res.Err == nil { t.Fatalf("Expected error, got none") } res = listeners.Create(fake.ServiceClient(), listeners.CreateOpts{Name: "foo", ProjectID: "bar", Protocol: "bar"}) if res.Err == nil { t.Fatalf("Expected error, got none") } res = listeners.Create(fake.ServiceClient(), listeners.CreateOpts{Name: "foo", ProjectID: "bar", Protocol: "bar", ProtocolPort: 80}) if res.Err == nil { t.Fatalf("Expected error, got none") } }
explode_data.jsonl/27551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 8164, 4021, 43451, 1155, 353, 8840, 836, 8, 341, 10202, 1669, 23562, 7251, 74138, 13860, 2959, 1507, 23562, 7251, 43451, 37790, 743, 592, 27862, 621, 2092, 341, 197, 3244, 30762, 445, 18896, 1465, 11, 2684, 6857, 1138, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAccAWSDBInstance_generatedName(t *testing.T) { var v rds.DBInstance resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSDBInstanceDestroy, Steps: []resource.TestStep{ { Config: testAccAWSDBInstanceConfig_generatedName, Check: resource.ComposeTestCheckFunc( testAccCheckAWSDBInstanceExists("aws_db_instance.test", &v), testAccCheckAWSDBInstanceAttributes(&v), ), }, }, }) }
explode_data.jsonl/33917
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 14603, 36136, 3506, 2523, 67313, 675, 1155, 353, 8840, 836, 8, 341, 2405, 348, 435, 5356, 22537, 2523, 271, 50346, 8787, 1155, 11, 5101, 31363, 515, 197, 197, 4703, 3973, 25, 257, 2915, 368, 314, 1273, 14603, 4703, 3973, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdate(t *testing.T) { qs := dORM.QueryTable("user") num, err := qs.Filter("user_name", "slene").Filter("is_staff", false).Update(Params{ "is_staff": true, "is_active": true, }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) // with join num, err = qs.Filter("user_name", "slene").Filter("profile__age", 28).Filter("is_staff", true).Update(Params{ "is_staff": false, }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) num, err = qs.Filter("user_name", "slene").Update(Params{ "Nums": ColValue(ColAdd, 100), }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) num, err = qs.Filter("user_name", "slene").Update(Params{ "Nums": ColValue(ColMinus, 50), }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) num, err = qs.Filter("user_name", "slene").Update(Params{ "Nums": ColValue(ColMultiply, 3), }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) num, err = qs.Filter("user_name", "slene").Update(Params{ "Nums": ColValue(ColExcept, 5), }) throwFail(t, err) throwFail(t, AssertIs(num, 1)) user := User{UserName: "slene"} err = dORM.Read(&user, "UserName") throwFail(t, err) throwFail(t, AssertIs(user.Nums, 30)) }
explode_data.jsonl/18152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 18534, 82, 1669, 294, 4365, 15685, 2556, 445, 872, 1138, 22431, 11, 1848, 1669, 32421, 31696, 445, 872, 1269, 497, 330, 3226, 1952, 1827, 5632, 445, 285, 47060, 497, 895, 568, 4289, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiChmod(t *testing.T) { files := []string{ dirRoot + "/chmod1.txt", dirRoot + "/chmod2.txt", dirRoot + "/chmod3.txt", } err := Touch(files) if err != nil { panic(err) } t.Cleanup(func() { _ = RemoveWithRecur(dirRoot) }) if !Exists(files) { t.Error("Multi Chmod test failed!") } err = Chmod(files, 755) if err != nil { panic(err) } }
explode_data.jsonl/34175
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 20358, 1143, 2593, 1155, 353, 8840, 836, 8, 341, 74075, 1669, 3056, 917, 515, 197, 48532, 8439, 488, 3521, 56274, 16, 3909, 756, 197, 48532, 8439, 488, 3521, 56274, 17, 3909, 756, 197, 48532, 8439, 488, 3521, 56274, 18, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIterString(t *testing.T) { s := []string{"abc", "bbc", "abccd", "abcdd"} it := New(FromStrings(s)) newit := it. Filter(func(item interface{}) bool { elm := item.(string) return strings.HasPrefix(elm, "ab") }). Or(func(item interface{}) bool { return item.(string) != "abcdd" }, "abcde"). Map(func(item interface{}) interface{} { return fmt.Sprintf("%s starts from 'ab'", item.(string)) }). Every(func(i int, v interface{}) interface{} { return fmt.Sprintf("%d: %s", i, v.(string)) }) newit.Each(func(it interface{}) { fmt.Printf("%s\n", it) }) // layer is now: // zero := "0: abc starts from 'ab'" // one := "1: abccd starts from 'ab'" two := "2: abcde starts from 'ab'" if newit.Nth(2) != two { t.Errorf("Nth element is wrong, got: %s, want:%s", newit.Nth(2), two) } }
explode_data.jsonl/56143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 359 }
[ 2830, 3393, 8537, 703, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 3056, 917, 4913, 13683, 497, 330, 84185, 497, 330, 370, 95840, 497, 330, 13683, 631, 16707, 23374, 1669, 1532, 7, 3830, 20859, 1141, 4390, 8638, 275, 1669, 432, 624, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStorage_FormatVariables(t *testing.T) { storage := &Storage{} t.Log(storage.FormatVariables("/var/log/teaweb-year${year}-month${month}-week${week}-day${day}-hour${hour}-minute${minute}-second${second}-date${date}")) }
explode_data.jsonl/28491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 5793, 72999, 22678, 1155, 353, 8840, 836, 8, 341, 197, 16172, 1669, 609, 5793, 16094, 3244, 5247, 52463, 9978, 22678, 4283, 947, 19413, 14, 665, 672, 3065, 4666, 2365, 3157, 19732, 10249, 2365, 10249, 19732, 10264, 2365, 10264...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestExclusiveMaximum(t *testing.T) { GenerateValuesAsYaml(t, "exclusiveMaximum.test.schema.json", func(console *tests.ConsoleWrapper, donec chan struct{}) { defer close(donec) // Test boolean type console.ExpectString("Enter a value for numberValue") console.SendLine("10.1") console.ExpectString("Sorry, your reply was invalid: 10.1 is not less than 10.1") console.ExpectString("Enter a value for numberValue") console.SendLine("1") console.ExpectString("Enter a value for integerValue") console.SendLine("20") console.ExpectString("Sorry, your reply was invalid: 20 is not less than 20") console.ExpectString("Enter a value for integerValue") console.SendLine("2") console.ExpectEOF() }) }
explode_data.jsonl/61754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 70405, 27309, 1155, 353, 8840, 836, 8, 341, 197, 31115, 6227, 2121, 56, 9467, 1155, 11, 330, 89122, 27309, 5958, 30892, 4323, 756, 197, 29244, 52818, 353, 23841, 46298, 11542, 11, 2814, 66, 26023, 2036, 28875, 341, 298, 1686...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogDist(t *testing.T) { a := common.NodeID{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1} b := common.NodeID{0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1} l := LogDist(a, b) fmt.Printf("l:%d bucketMinDistance:%d a:%s b:%s\n", l, bucketMinDistance, a.String(), b.String()) }
explode_data.jsonl/79616
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 2201, 23356, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 4185, 21714, 915, 90, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileOnCancelledTaskRun(t *testing.T) { taskRun := tb.TaskRun("test-taskrun-run-cancelled", tb.TaskRunNamespace("foo"), tb.TaskRunSpec( tb.TaskRunTaskRef(simpleTask.Name), tb.TaskRunCancelled, ), tb.TaskRunStatus(tb.StatusCondition(apis.Condition{ Type: apis.ConditionSucceeded, Status: corev1.ConditionUnknown, }))) d := test.Data{ TaskRuns: []*v1alpha1.TaskRun{taskRun}, Tasks: []*v1alpha1.Task{simpleTask}, } testAssets, cancel := getTaskRunController(t, d) defer cancel() c := testAssets.Controller clients := testAssets.Clients reconciler := c.Reconciler.(*Reconciler) fr := reconciler.Recorder.(*record.FakeRecorder) if err := reconciler.Reconcile(context.Background(), getRunName(taskRun)); err != nil { t.Fatalf("Unexpected error when reconciling completed TaskRun : %v", err) } newTr, err := clients.Pipeline.TektonV1alpha1().TaskRuns(taskRun.Namespace).Get(taskRun.Name, metav1.GetOptions{}) if err != nil { t.Fatalf("Expected completed TaskRun %s to exist but instead got error when getting it: %v", taskRun.Name, err) } expectedStatus := &apis.Condition{ Type: apis.ConditionSucceeded, Status: corev1.ConditionFalse, Reason: "TaskRunCancelled", Message: `TaskRun "test-taskrun-run-cancelled" was cancelled`, } if d := cmp.Diff(expectedStatus, newTr.Status.GetCondition(apis.ConditionSucceeded), ignoreLastTransitionTime); d != "" { t.Fatalf("Did not get expected condition (-want, +got): %v", d) } wantEvents := []string{ "Normal Started", "Warning Failed TaskRun \"test-taskrun-run-cancelled\" was cancelled", } err = checkEvents(fr, "test-reconcile-on-cancelled-taskrun", wantEvents) if !(err == nil) { t.Errorf(err.Error()) } }
explode_data.jsonl/883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 673 }
[ 2830, 3393, 693, 40446, 457, 1925, 39473, 6262, 6727, 1155, 353, 8840, 836, 8, 341, 49115, 6727, 1669, 16363, 28258, 6727, 445, 1944, 52579, 6108, 22973, 85003, 832, 756, 197, 62842, 28258, 6727, 22699, 445, 7975, 4461, 197, 62842, 28258,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDomains_EditRecordForDomainName(t *testing.T) { setup() defer teardown() editRequest := &DomainRecordEditRequest{ Type: "CNAME", Name: "example", Data: "@", Priority: 10, Port: 10, TTL: 1800, Weight: 10, Flags: 1, Tag: "test", } mux.HandleFunc("/v2/domains/example.com/records/1", func(w http.ResponseWriter, r *http.Request) { v := new(DomainRecordEditRequest) err := json.NewDecoder(r.Body).Decode(v) if err != nil { t.Fatalf("decode json: %v", err) } testMethod(t, r, http.MethodPut) if !reflect.DeepEqual(v, editRequest) { t.Errorf("Request body = %+v, expected %+v", v, editRequest) } fmt.Fprintf(w, `{"domain_record": {"id":1, "type": "CNAME", "name": "example"}}`) }) record, _, err := client.Domains.EditRecord(ctx, "example.com", 1, editRequest) if err != nil { t.Errorf("Domains.EditRecord returned error: %v", err) } expected := &DomainRecord{ID: 1, Type: "CNAME", Name: "example"} if !reflect.DeepEqual(record, expected) { t.Errorf("Domains.EditRecord returned %+v, expected %+v", record, expected) } }
explode_data.jsonl/22679
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 74713, 66158, 6471, 2461, 13636, 675, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 86626, 1900, 1669, 609, 13636, 6471, 4036, 1900, 515, 197, 27725, 25, 257, 330, 34, 7535, 756, 197, 21297, 25, 257, 330, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAppNoHelpFlag(t *testing.T) { oldFlag := HelpFlag defer func() { HelpFlag = oldFlag }() HelpFlag = nil app := &App{Writer: ioutil.Discard} err := app.Run([]string{"test", "-h"}) if err != flag.ErrHelp { t.Errorf("expected error about missing help flag, but got: %s (%T)", err, err) } }
explode_data.jsonl/52578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 2164, 2753, 12689, 12135, 1155, 353, 8840, 836, 8, 341, 61828, 12135, 1669, 11479, 12135, 198, 16867, 2915, 368, 341, 197, 197, 12689, 12135, 284, 2310, 12135, 198, 197, 66816, 197, 12689, 12135, 284, 2092, 271, 28236, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelectLastInsertIdInUnion(t *testing.T) { executor, _, _, _ := createLegacyExecutorEnv() executor.normalize = true sql := "select last_insert_id() as id union select id from user" _, err := executorExec(executor, sql, map[string]*querypb.BindVariable{}) require.Error(t, err) assert.Contains(t, err.Error(), "types does not support hashcode yet: VARCHAR") }
explode_data.jsonl/67393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 3379, 5842, 13780, 764, 641, 32658, 1155, 353, 8840, 836, 8, 341, 67328, 4831, 11, 8358, 8358, 716, 1669, 1855, 77415, 25255, 14359, 741, 67328, 4831, 44657, 284, 830, 198, 30633, 1669, 330, 1742, 1537, 17678, 842, 368, 438,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInfo(t *testing.T) { klog.InitFlags(nil) flag.CommandLine.Set("v", "10") flag.CommandLine.Set("skip_headers", "true") flag.CommandLine.Set("logtostderr", "false") flag.CommandLine.Set("alsologtostderr", "false") flag.Parse() tests := map[string]struct { klogr logr.InfoLogger text string keysAndValues []interface{} expectedOutput string }{ "should log with values passed to keysAndValues": { klogr: New().V(0), text: "test", keysAndValues: []interface{}{"akey", "avalue"}, expectedOutput: ` "msg"="test" "akey"="avalue" `, }, "should not print duplicate keys with the same value": { klogr: New().V(0), text: "test", keysAndValues: []interface{}{"akey", "avalue", "akey", "avalue"}, expectedOutput: ` "msg"="test" "akey"="avalue" `, }, "should only print the last duplicate key when the values are passed to Info": { klogr: New().V(0), text: "test", keysAndValues: []interface{}{"akey", "avalue", "akey", "avalue2"}, expectedOutput: ` "msg"="test" "akey"="avalue2" `, }, "should only print the duplicate key that is passed to Info if one was passed to the logger": { klogr: New().WithValues("akey", "avalue"), text: "test", keysAndValues: []interface{}{"akey", "avalue"}, expectedOutput: ` "msg"="test" "akey"="avalue" `, }, "should only print the key passed to Info when one is already set on the logger": { klogr: New().WithValues("akey", "avalue"), text: "test", keysAndValues: []interface{}{"akey", "avalue2"}, expectedOutput: ` "msg"="test" "akey"="avalue2" `, }, "should correctly handle odd-numbers of KVs": { text: "test", keysAndValues: []interface{}{"akey", "avalue", "akey2"}, expectedOutput: ` "msg"="test" "akey"="avalue" "akey2"=null `, }, "should correctly handle odd-numbers of KVs in both log values and Info args": { klogr: New().WithValues("basekey1", "basevar1", "basekey2"), text: "test", keysAndValues: []interface{}{"akey", "avalue", "akey2"}, expectedOutput: ` "msg"="test" "basekey1"="basevar1" "basekey2"=null "akey"="avalue" "akey2"=null `, }, "should correctly print regular error types": { klogr: New().V(0), text: "test", keysAndValues: []interface{}{"err", errors.New("whoops")}, expectedOutput: ` "msg"="test" "err"="whoops" `, }, "should use MarshalJSON if an error type implements it": { klogr: New().V(0), text: "test", keysAndValues: []interface{}{"err", &customErrorJSON{"whoops"}}, expectedOutput: ` "msg"="test" "err"="WHOOPS" `, }, } for n, test := range tests { t.Run(n, func(t *testing.T) { klogr := test.klogr if klogr == nil { klogr = New() } // hijack the klog output tmpWriteBuffer := bytes.NewBuffer(nil) klog.SetOutput(tmpWriteBuffer) klogr.Info(test.text, test.keysAndValues...) // call Flush to ensure the text isn't still buffered klog.Flush() actual := tmpWriteBuffer.String() if actual != test.expectedOutput { t.Errorf("expected %q did not match actual %q", test.expectedOutput, actual) } }) } }
explode_data.jsonl/50109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1416 }
[ 2830, 3393, 1731, 1155, 353, 8840, 836, 8, 341, 16463, 839, 26849, 9195, 27907, 340, 30589, 12714, 2460, 4202, 445, 85, 497, 330, 16, 15, 1138, 30589, 12714, 2460, 4202, 445, 20599, 26719, 497, 330, 1866, 1138, 30589, 12714, 2460, 4202,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetTemplatesRequest_From(t *testing.T) { expectedFrom := test.RandomTime(12, 365) var givenFrom time.Time expectedResult := randomGetTemplatesResult() req := test.NewRequest(func(req *http.Request) (res *http.Response, err error) { givenFrom, _ = time.Parse("2006-01-02 15:04:05", req.FormValue("date_from")) result := api.Response{ Result: expectedResult, } response, _ := json.Marshal(&result) return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBuffer(response)), }, nil }) _, err := messages.GetTemplates(req). From(expectedFrom). Execute() if err != nil { t.Fatalf(`Error should be nil, "%s" given`, err.Error()) } if expectedFrom != givenFrom { t.Fatalf(`From should be "%s", "%s" given`, expectedFrom, givenFrom) } }
explode_data.jsonl/54165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 1949, 51195, 1900, 53157, 1155, 353, 8840, 836, 8, 341, 42400, 3830, 1669, 1273, 26709, 1462, 7, 16, 17, 11, 220, 18, 21, 20, 340, 2405, 2661, 3830, 882, 16299, 271, 42400, 2077, 1669, 4194, 1949, 51195, 2077, 2822, 24395,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteCSRFilesIfNotExist(t *testing.T) { csr, key := createTestCSR(t) csr2, key2 := createTestCSR(t) var tests = []struct { name string setupFunc func(csrPath string) error expectedError bool expectedCSR *x509.CertificateRequest }{ { name: "no files exist", expectedCSR: csr, }, { name: "other key exists", setupFunc: func(csrPath string) error { if err := pkiutil.WriteCSR(csrPath, "dummy", csr2); err != nil { return err } return pkiutil.WriteKey(csrPath, "dummy", key2) }, expectedCSR: csr2, }, { name: "existing CSR is garbage", setupFunc: func(csrPath string) error { return ioutil.WriteFile(path.Join(csrPath, "dummy.csr"), []byte("a--bunch--of-garbage"), os.ModePerm) }, expectedError: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { tmpdir := testutil.SetupTempDir(t) defer os.RemoveAll(tmpdir) if test.setupFunc != nil { if err := test.setupFunc(tmpdir); err != nil { t.Fatalf("couldn't set up test: %v", err) } } if err := writeCSRFilesIfNotExist(tmpdir, "dummy", csr, key); err != nil { if test.expectedError { return } t.Fatalf("unexpected error %v: ", err) } if test.expectedError { t.Fatal("Expected error, but got none") } parsedCSR, _, err := pkiutil.TryLoadCSRAndKeyFromDisk(tmpdir, "dummy") if err != nil { t.Fatalf("couldn't load csr and key: %v", err) } if sha256.Sum256(test.expectedCSR.Raw) != sha256.Sum256(parsedCSR.Raw) { t.Error("expected csr's fingerprint does not match ") } }) } }
explode_data.jsonl/60263
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 758 }
[ 2830, 3393, 7985, 70022, 10809, 2679, 45535, 1155, 353, 8840, 836, 8, 341, 1444, 15094, 11, 1376, 1669, 1855, 2271, 70022, 1155, 340, 1444, 15094, 17, 11, 1376, 17, 1669, 1855, 2271, 70022, 1155, 692, 2405, 7032, 284, 3056, 1235, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSelectLastInsertId(t *testing.T) { masterSession.LastInsertId = 52 executor, _, _, _ := createLegacyExecutorEnv() executor.normalize = true logChan := QueryLogger.Subscribe("Test") defer QueryLogger.Unsubscribe(logChan) sql := "select last_insert_id()" result, err := executorExec(executor, sql, map[string]*querypb.BindVariable{}) wantResult := &sqltypes.Result{ RowsAffected: 1, Fields: []*querypb.Field{ {Name: "last_insert_id()", Type: sqltypes.Uint64}, }, Rows: [][]sqltypes.Value{{ sqltypes.NewUint64(52), }}, } require.NoError(t, err) utils.MustMatch(t, wantResult, result, "Mismatch") }
explode_data.jsonl/67387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 3379, 5842, 13780, 764, 1155, 353, 8840, 836, 8, 341, 2109, 2300, 5283, 24682, 13780, 764, 284, 220, 20, 17, 198, 67328, 4831, 11, 8358, 8358, 716, 1669, 1855, 77415, 25255, 14359, 741, 67328, 4831, 44657, 284, 830, 198, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBatchConfirms(t *testing.T) { input := CreateTestEnv(t) ctx := input.Context var ( now = time.Now().UTC() mySender, _ = sdk.AccAddressFromBech32("gravity1ahx7f8wyertuus9r20284ej0asrs085ceqtfnm") myReceiver, _ = types.NewEthAddress("0xd041c41EA1bf0F006ADBb6d2c9ef9D425dE5eaD7") myTokenContractAddr, _ = types.NewEthAddress("0x429881672B9AE42b8EbA0E26cD9C73711b891Ca5") // Pickle token, err = types.NewInternalERC20Token(sdk.NewInt(1000000), myTokenContractAddr.GetAddress()) allVouchers = sdk.NewCoins(token.GravityCoin()) ) require.NoError(t, err) // mint some voucher first require.NoError(t, input.BankKeeper.MintCoins(ctx, types.ModuleName, allVouchers)) // set senders balance input.AccountKeeper.NewAccountWithAddress(ctx, mySender) require.NoError(t, input.BankKeeper.SendCoinsFromModuleToAccount(ctx, types.ModuleName, mySender, allVouchers)) // when ctx = ctx.WithBlockTime(now) // add batches with 1 tx to the pool for i := 1; i < 200; i++ { amountToken, err := types.NewInternalERC20Token(sdk.NewInt(int64(i+100)), myTokenContractAddr.GetAddress()) require.NoError(t, err) amount := amountToken.GravityCoin() feeToken, err := types.NewInternalERC20Token(sdk.NewIntFromUint64(uint64(i+10)), myTokenContractAddr.GetAddress()) require.NoError(t, err) fee := feeToken.GravityCoin() //add tx to the pool _, err = input.GravityKeeper.AddToOutgoingPool(ctx, mySender, *myReceiver, amount, fee) require.NoError(t, err) ctx.Logger().Info(fmt.Sprintf("Created transaction %v with amount %v and fee %v", i, amount, fee)) //create batch _, err = input.GravityKeeper.BuildOutgoingTXBatch(ctx, *myTokenContractAddr, 1) require.NoError(t, err) } outogoingBatches := input.GravityKeeper.GetOutgoingTxBatches(ctx) // persist confirmations for i, orch := range OrchAddrs { for _, batch := range outogoingBatches { ethAddr, err := types.NewEthAddress(EthAddrs[i].String()) require.NoError(t, err) conf := &types.MsgConfirmBatch{ Nonce: batch.BatchNonce, TokenContract: batch.TokenContract.GetAddress(), EthSigner: ethAddr.GetAddress(), Orchestrator: orch.String(), Signature: "dummysig", } input.GravityKeeper.SetBatchConfirm(ctx, conf) } } //try to set connfirm with invalid address conf := &types.MsgConfirmBatch{ Nonce: outogoingBatches[0].BatchNonce, TokenContract: outogoingBatches[0].TokenContract.GetAddress(), EthSigner: EthAddrs[0].String(), Orchestrator: "invalid address", Signature: "dummysig", } assert.Panics(t, func() { input.GravityKeeper.SetBatchConfirm(ctx, conf) }) //try to set connfirm with invalid token contract conf = &types.MsgConfirmBatch{ Nonce: outogoingBatches[0].BatchNonce, TokenContract: "invalid token", EthSigner: EthAddrs[0].String(), Orchestrator: OrchAddrs[0].String(), Signature: "dummysig", } assert.Panics(t, func() { input.GravityKeeper.SetBatchConfirm(ctx, conf) }) // verify that confirms are persisted for each orchestrator address var batchConfirm *types.MsgConfirmBatch for _, batch := range outogoingBatches { for i, addr := range OrchAddrs { batchConfirm = input.GravityKeeper.GetBatchConfirm(ctx, batch.BatchNonce, batch.TokenContract, addr) require.Equal(t, batch.BatchNonce, batchConfirm.Nonce) require.Equal(t, batch.TokenContract.GetAddress(), batchConfirm.TokenContract) require.Equal(t, EthAddrs[i].String(), batchConfirm.EthSigner) require.Equal(t, addr.String(), batchConfirm.Orchestrator) require.Equal(t, "dummysig", batchConfirm.Signature) } } }
explode_data.jsonl/21417
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1505 }
[ 2830, 3393, 21074, 15578, 14404, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 4230, 2271, 14359, 1155, 340, 20985, 1669, 1946, 9328, 198, 2405, 2399, 197, 80922, 503, 284, 882, 13244, 1005, 21183, 741, 197, 13624, 20381, 11, 716, 310, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFind(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() checkToken := func(ctx context.Context) { got, ok := ctx.Value(scm.TokenKey{}).(*scm.Token) if !ok { t.Errorf("Expect token stored in context") return } want := &scm.Token{ Token: "755bb80e5b", Refresh: "e08f3fa43e", } if diff := cmp.Diff(got, want); diff != "" { t.Errorf(diff) } } now := time.Now() mockUser := &scm.User{ Login: "octocat", Email: "octocat@github.com", Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", Created: now, Updated: now, } mockUsers := mockscm.NewMockUserService(controller) mockUsers.EXPECT().Find(gomock.Any()).Do(checkToken).Return(mockUser, nil, nil) client := new(scm.Client) client.Users = mockUsers want := &core.User{ Login: "octocat", Email: "octocat@github.com", Avatar: "https://secure.gravatar.com/avatar/8c58a0be77ee441bb8f8595b7f1b4e87", Created: now.Unix(), Updated: now.Unix(), } got, err := New(client, nil).Find(noContext, "755bb80e5b", "e08f3fa43e") if err != nil { t.Error(err) } if diff := cmp.Diff(got, want); diff != "" { t.Errorf(diff) } }
explode_data.jsonl/69029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 9885, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 25157, 3323, 1669, 2915, 7502, 2266, 9328, 8, 341, 197, 3174, 354, 11, 5394, 1669, 5635, 6167, 1141, 62...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_GenerateMarkdown_Returns_Err_From_Handler(t *testing.T) { bh := &mockMarkdownBlockHandler{} mockErr := errors.New("Mock Error") bh.On("GenerateMarkdown", mock.Anything).Return("", mockErr) editorJSData := `{"time": 1607709186831,"blocks": [{"type": "header","data": {"text": "Heading 1","level": 1}}],"version": "2.19.1"}` eng := &goeditorjs.MarkdownEngine{BlockHandlers: make(map[string]goeditorjs.MarkdownBlockHandler)} eng.BlockHandlers["header"] = bh _, err := eng.GenerateMarkdown(editorJSData) require.Error(t, err) require.Equal(t, mockErr, err) bh.AssertCalled(t, "GenerateMarkdown", mock.Anything) }
explode_data.jsonl/21336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 2646, 13220, 68005, 53316, 82, 93623, 53157, 41879, 1155, 353, 8840, 836, 8, 341, 2233, 71, 1669, 609, 16712, 68005, 4713, 3050, 16094, 77333, 7747, 1669, 5975, 7121, 445, 11571, 4600, 1138, 2233, 71, 8071, 445, 31115, 68005, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFxVerifyTransferWrongAmounts(t *testing.T) { vm := testVM{} date := time.Date(2019, time.January, 19, 16, 25, 17, 3, time.UTC) vm.clock.Set(date) fx := Fx{} if err := fx.Initialize(&vm); err != nil { t.Fatal(err) } tx := &testTx{ bytes: txBytes, } out := &TransferOutput{ Amt: 1, Locktime: 0, OutputOwners: OutputOwners{ Threshold: 1, Addrs: []ids.ShortID{ ids.NewShortID(addrBytes), }, }, } in := &TransferInput{ Amt: 2, Input: Input{ SigIndices: []uint32{0}, }, } cred := &Credential{ Sigs: [][crypto.SECP256K1RSigLen]byte{ sigBytes, }, } if err := fx.VerifyTransfer(tx, in, cred, out); err == nil { t.Fatalf("Should have errored due to different amounts") } }
explode_data.jsonl/68845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 81856, 32627, 21970, 29185, 10093, 82, 1155, 353, 8840, 836, 8, 341, 54879, 1669, 1273, 11187, 16094, 44086, 1669, 882, 8518, 7, 17, 15, 16, 24, 11, 882, 3503, 276, 3536, 11, 220, 16, 24, 11, 220, 16, 21, 11, 220, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPEXReactorReceive(t *testing.T) { r, book := createReactor(&PEXReactorConfig{}) defer teardownReactor(book) peer := p2p.CreateRandomPeer(false) // we have to send a request to receive responses r.RequestAddrs(peer) size := book.Size() addrs := []*p2p.NetAddress{peer.NodeInfo().NetAddress()} msg := cdc.MustMarshalBinaryBare(&pexAddrsMessage{Addrs: addrs}) r.Receive(PexChannel, peer, msg) assert.Equal(t, size+1, book.Size()) msg = cdc.MustMarshalBinaryBare(&pexRequestMessage{}) r.Receive(PexChannel, peer, msg) // should not panic. }
explode_data.jsonl/6778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 1740, 55, 693, 5621, 14742, 1155, 353, 8840, 836, 8, 341, 7000, 11, 2311, 1669, 1855, 693, 5621, 2099, 1740, 55, 693, 5621, 2648, 37790, 16867, 49304, 693, 5621, 33130, 692, 197, 16537, 1669, 281, 17, 79, 7251, 13999, 3088...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getSignature(t *testing.T) { type args struct { body []byte m []byte pk []byte } args1 := args{ []byte(`{"name":"case"`), []byte(`merchant`), []byte(`secret`), } tests := []struct { name string args args want string }{ { "working case", args1, "NTM5NmYzMjBjMTU5NDE3MjE1NTY1ZmIxODUwMjVhZjFhZjI3ZDBmNjgyY2NhZjY5ZTBjZGFmYzBmNGQ3NzEwOWM1ZmUxNGQ3ODU0ODRlNDI0Y2ZiNzkzY2I2ZjAyZmFiZTNhMzk2NjNlODVkYzllZDA4ZTVjZTFmN2ZmMWY2ZGQ=", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getSignature(tt.args.body, tt.args.m, tt.args.pk); got != tt.want { t.Errorf("getSignature() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/59835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 410 }
[ 2830, 3393, 3062, 25088, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 35402, 3056, 3782, 198, 197, 2109, 262, 3056, 3782, 198, 197, 3223, 74, 256, 3056, 3782, 198, 197, 630, 31215, 16, 1669, 2827, 515, 197, 197, 1294, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_AddCycle(t *testing.T) { future := time.Now().Local().AddDate(0, 0, 7) cycleId, err = conn.AddCycle(&future) if err != nil { t.Fatal(err) } if cycleId < 1 { t.Fatal("Invalid cycle Id returned") } t.Logf("created cycle Id: %d", cycleId) }
explode_data.jsonl/22001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 21346, 44820, 1155, 353, 8840, 836, 8, 341, 1166, 2976, 1669, 882, 13244, 1005, 7319, 1005, 2212, 1916, 7, 15, 11, 220, 15, 11, 220, 22, 340, 1444, 5449, 764, 11, 1848, 284, 4534, 1904, 44820, 2099, 21055, 340, 743, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEmailFail(t *testing.T) { config := email.Info{ Username: "", Password: "", Hostname: "127.0.0.1", Port: 25, From: "from@example.com", } err := config.Send("to@example.com", "Subject", "Body") if err == nil { t.Errorf("Expected an error: %v", err) } }
explode_data.jsonl/50511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 4781, 19524, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2551, 20132, 515, 197, 197, 11115, 25, 8324, 197, 197, 4876, 25, 8324, 197, 197, 88839, 25, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 756, 197, 98459, 25, 257, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeliver1(t *testing.T) { tests := []fixture{ {"", 1}, {">", 2}, {">v<^", 4}, {"^v^v^v^v^v", 2}, {">>>>", 5}, {"<<<<", 5}, {"vvvv", 5}, {"^^^^", 5}, } driver(t, tests, 1) }
explode_data.jsonl/35932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 16532, 1524, 16, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 59612, 515, 197, 197, 4913, 497, 220, 16, 1583, 197, 197, 90, 755, 497, 220, 17, 1583, 197, 197, 90, 755, 85, 27, 61, 497, 220, 19, 1583, 197, 197, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProxyRequestContentLengthAndTransferEncoding(t *testing.T) { chunk := func(data []byte) []byte { out := &bytes.Buffer{} chunker := httputil.NewChunkedWriter(out) for _, b := range data { if _, err := chunker.Write([]byte{b}); err != nil { panic(err) } } chunker.Close() out.Write([]byte("\r\n")) return out.Bytes() } zip := func(data []byte) []byte { out := &bytes.Buffer{} zipper := gzip.NewWriter(out) if _, err := zipper.Write(data); err != nil { panic(err) } zipper.Close() return out.Bytes() } sampleData := []byte("abcde") table := map[string]struct { reqHeaders http.Header reqBody []byte expectedHeaders http.Header expectedBody []byte }{ "content-length": { reqHeaders: http.Header{ "Content-Length": []string{"5"}, }, reqBody: sampleData, expectedHeaders: http.Header{ "Content-Length": []string{"5"}, "Content-Encoding": nil, // none set "Transfer-Encoding": nil, // none set }, expectedBody: sampleData, }, "content-length + identity transfer-encoding": { reqHeaders: http.Header{ "Content-Length": []string{"5"}, "Transfer-Encoding": []string{"identity"}, }, reqBody: sampleData, expectedHeaders: http.Header{ "Content-Length": []string{"5"}, "Content-Encoding": nil, // none set "Transfer-Encoding": nil, // gets removed }, expectedBody: sampleData, }, "content-length + gzip content-encoding": { reqHeaders: http.Header{ "Content-Length": []string{strconv.Itoa(len(zip(sampleData)))}, "Content-Encoding": []string{"gzip"}, }, reqBody: zip(sampleData), expectedHeaders: http.Header{ "Content-Length": []string{strconv.Itoa(len(zip(sampleData)))}, "Content-Encoding": []string{"gzip"}, "Transfer-Encoding": nil, // none set }, expectedBody: zip(sampleData), }, "chunked transfer-encoding": { reqHeaders: http.Header{ "Transfer-Encoding": []string{"chunked"}, }, reqBody: chunk(sampleData), expectedHeaders: http.Header{ "Content-Length": nil, // none set "Content-Encoding": nil, // none set "Transfer-Encoding": nil, // Transfer-Encoding gets removed }, expectedBody: sampleData, // sample data is unchunked }, "chunked transfer-encoding + gzip content-encoding": { reqHeaders: http.Header{ "Content-Encoding": []string{"gzip"}, "Transfer-Encoding": []string{"chunked"}, }, reqBody: chunk(zip(sampleData)), expectedHeaders: http.Header{ "Content-Length": nil, // none set "Content-Encoding": []string{"gzip"}, "Transfer-Encoding": nil, // gets removed }, expectedBody: zip(sampleData), // sample data is unchunked, but content-encoding is preserved }, // "Transfer-Encoding: gzip" is not supported by go // See http/transfer.go#fixTransferEncoding (https://golang.org/src/net/http/transfer.go#L427) // Once it is supported, this test case should succeed // // "gzip+chunked transfer-encoding": { // reqHeaders: http.Header{ // "Transfer-Encoding": []string{"chunked,gzip"}, // }, // reqBody: chunk(zip(sampleData)), // // expectedHeaders: http.Header{ // "Content-Length": nil, // no content-length headers // "Transfer-Encoding": nil, // Transfer-Encoding gets removed // }, // expectedBody: sampleData, // }, } successfulResponse := "backend passed tests" for k, item := range table { // Start the downstream server downstreamServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { // Verify headers for header, v := range item.expectedHeaders { if !reflect.DeepEqual(v, req.Header[header]) { t.Errorf("%s: Expected headers for %s to be %v, got %v", k, header, v, req.Header[header]) } } // Read body body, err := ioutil.ReadAll(req.Body) if err != nil { t.Errorf("%s: unexpected error %v", k, err) } req.Body.Close() // Verify length if req.ContentLength > 0 && req.ContentLength != int64(len(body)) { t.Errorf("%s: ContentLength was %d, len(data) was %d", k, req.ContentLength, len(body)) } // Verify content if !bytes.Equal(item.expectedBody, body) { t.Errorf("%s: Expected %q, got %q", k, string(item.expectedBody), string(body)) } // Write successful response w.Write([]byte(successfulResponse)) })) defer downstreamServer.Close() responder := &fakeResponder{t: t} backendURL, _ := url.Parse(downstreamServer.URL) proxyHandler := &UpgradeAwareProxyHandler{ Location: backendURL, Responder: responder, UpgradeRequired: false, } proxyServer := httptest.NewServer(proxyHandler) defer proxyServer.Close() // Dial the proxy server conn, err := net.Dial(proxyServer.Listener.Addr().Network(), proxyServer.Listener.Addr().String()) if err != nil { t.Errorf("unexpected error %v", err) continue } defer conn.Close() // Add standard http 1.1 headers if item.reqHeaders == nil { item.reqHeaders = http.Header{} } item.reqHeaders.Add("Connection", "close") item.reqHeaders.Add("Host", proxyServer.Listener.Addr().String()) // Write the request headers if _, err := fmt.Fprint(conn, "POST / HTTP/1.1\r\n"); err != nil { t.Fatalf("%s unexpected error %v", k, err) } for header, values := range item.reqHeaders { for _, value := range values { if _, err := fmt.Fprintf(conn, "%s: %s\r\n", header, value); err != nil { t.Fatalf("%s: unexpected error %v", k, err) } } } // Header separator if _, err := fmt.Fprint(conn, "\r\n"); err != nil { t.Fatalf("%s: unexpected error %v", k, err) } // Body if _, err := conn.Write(item.reqBody); err != nil { t.Fatalf("%s: unexpected error %v", k, err) } // Read response response, err := ioutil.ReadAll(conn) if err != nil { t.Errorf("%s: unexpected error %v", k, err) continue } if !strings.HasSuffix(string(response), successfulResponse) { t.Errorf("%s: Did not get successful response: %s", k, string(response)) continue } } }
explode_data.jsonl/13576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2441 }
[ 2830, 3393, 16219, 1900, 2762, 4373, 3036, 21970, 14690, 1155, 353, 8840, 836, 8, 341, 23049, 3122, 1669, 2915, 2592, 3056, 3782, 8, 3056, 3782, 341, 197, 13967, 1669, 609, 9651, 22622, 16094, 197, 23049, 79929, 1669, 54320, 628, 321, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTRaft_Propose(t *testing.T) { lid := NewLeaderId bm := NewTailBitmap sendPropose := func(addr string, xcmd interface{}) *ProposeReply { cmd := toCmd(xcmd) var reply *ProposeReply rpcTo(addr, func(cli TRaftClient, ctx context.Context) { var err error reply, err = cli.Propose(ctx, cmd) if err != nil { lg.Infow("err:", "err", err) } }) return reply } withCluster(t, "invalidLeader", []int64{0, 1, 2}, func(t *testing.T, ts []*TRaft) { ta := require.New(t) ts[0].initTraft(lid(2, 0), lid(1, 1), []int64{}, nil, nil, lid(2, 0)) ts[1].initTraft(lid(3, 1), lid(1, 1), []int64{}, nil, nil, lid(3, 1)) ts[2].initTraft(lid(1, 2), lid(2, 1), []int64{}, nil, []int64{0}, lid(1, 2)) mems := ts[1].Config.Members // no leader elected, not allow to propose reply := sendPropose(mems[1].Addr, NewCmdI64("foo", "x", 1)) ta.Equal(&ProposeReply{ OK: false, Err: "vote expired", OtherLeader: nil, }, reply) // elect ts[1] go ts[1].ElectLoop() waitForMsg(ts, map[string]int{ "vote-win 004#001": 1, }) // send to non-leader replica: reply = sendPropose(mems[0].Addr, NewCmdI64("foo", "x", 1)) ta.Equal(&ProposeReply{ OK: false, Err: "I am not leader", OtherLeader: lid(4, 1)}, reply) }) withCluster(t, "succ", []int64{0, 1, 2}, func(t *testing.T, ts []*TRaft) { ta := require.New(t) ts[0].initTraft(lid(2, 0), lid(1, 1), []int64{}, nil, nil, lid(3, 0)) ts[1].initTraft(lid(3, 1), lid(1, 1), []int64{}, nil, nil, lid(3, 1)) ts[2].initTraft(lid(1, 2), lid(2, 1), []int64{}, nil, []int64{0}, lid(3, 2)) mems := ts[1].Config.Members // elect ts[1] go ts[1].ElectLoop() waitForMsg(ts, map[string]int{ "vote-win 004#001": 1, }) // TODO check state of other replicas // succ to propsoe reply := sendPropose(mems[1].Addr, "y=1") ta.Equal(&ProposeReply{OK: true}, reply) ta.Equal(bm(1), ts[1].Status[1].Accepted) ta.Equal(bm(1), ts[1].Status[1].Committed) ta.Equal( join("[<004#001:000{set(y, 1)}-0:1→0>", "]"), RecordsShortStr(ts[1].Logs, ""), ) reply = sendPropose(mems[1].Addr, "y=2") ta.Equal(&ProposeReply{OK: true, OtherLeader: nil}, reply) ta.Equal(bm(2), ts[1].Status[1].Accepted) ta.Equal(bm(2), ts[1].Status[1].Committed) ta.Equal( join("[<004#001:000{set(y, 1)}-0:1→0>", "<004#001:001{set(y, 2)}-0:3→0>", "]"), RecordsShortStr(ts[1].Logs, ""), ) reply = sendPropose(mems[1].Addr, "x=3") ta.Equal(&ProposeReply{OK: true, OtherLeader: nil}, reply) ta.Equal(bm(3), ts[1].Status[1].Accepted) ta.Equal( join("[<004#001:000{set(y, 1)}-0:1→0>", "<004#001:001{set(y, 2)}-0:3→0>", "<004#001:002{set(x, 3)}-0:4→0>", "]"), RecordsShortStr(ts[1].Logs, ""), ) }) }
explode_data.jsonl/17381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1472 }
[ 2830, 3393, 2378, 64, 723, 1088, 887, 960, 1155, 353, 8840, 836, 8, 1476, 8810, 307, 1669, 1532, 52621, 764, 198, 2233, 76, 1669, 1532, 44795, 16773, 271, 32817, 2008, 960, 1669, 2915, 24497, 914, 11, 856, 8710, 3749, 28875, 353, 2008...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTypeOverflow(t *testing.T) { // Verifies data that doesn't fit into it: //{ // "test": 65536 <-- test defined as uint8 //} var overflowTypedData core.TypedData err := json.Unmarshal([]byte(jsonTypedData), &overflowTypedData) if err != nil { t.Fatalf("unmarshalling failed '%v'", err) } // Set test to something outside uint8 (overflowTypedData.Message["from"]).(map[string]interface{})["test"] = big.NewInt(65536) _, err = overflowTypedData.HashStruct(overflowTypedData.PrimaryType, overflowTypedData.Message) if err == nil || err.Error() != "integer larger than 'uint8'" { t.Fatalf("Expected `integer larger than 'uint8'`, got '%v'", err) } (overflowTypedData.Message["from"]).(map[string]interface{})["test"] = big.NewInt(3) (overflowTypedData.Message["to"]).(map[string]interface{})["test"] = big.NewInt(4) _, err = overflowTypedData.HashStruct(overflowTypedData.PrimaryType, overflowTypedData.Message) if err != nil { t.Fatalf("Expected no err, got '%v'", err) } }
explode_data.jsonl/30004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 929, 42124, 1155, 353, 8840, 836, 8, 341, 197, 322, 6250, 9606, 821, 429, 3171, 944, 4946, 1119, 432, 510, 197, 47603, 197, 322, 197, 1, 1944, 788, 220, 21, 20, 20, 18, 21, 70386, 1273, 4512, 438, 2622, 23, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestImageProxySettingsSetDefaults(t *testing.T) { ss := ServiceSettings{ DEPRECATED_DO_NOT_USE_ImageProxyType: NewString(IMAGE_PROXY_TYPE_ATMOS_CAMO), DEPRECATED_DO_NOT_USE_ImageProxyURL: NewString("http://images.example.com"), DEPRECATED_DO_NOT_USE_ImageProxyOptions: NewString("1234abcd"), } t.Run("default, no old settings", func(t *testing.T) { ips := ImageProxySettings{} ips.SetDefaults(ServiceSettings{}) assert.Equal(t, false, *ips.Enable) assert.Equal(t, IMAGE_PROXY_TYPE_LOCAL, *ips.ImageProxyType) assert.Equal(t, "", *ips.RemoteImageProxyURL) assert.Equal(t, "", *ips.RemoteImageProxyOptions) }) t.Run("default, old settings", func(t *testing.T) { ips := ImageProxySettings{} ips.SetDefaults(ss) assert.Equal(t, true, *ips.Enable) assert.Equal(t, *ss.DEPRECATED_DO_NOT_USE_ImageProxyType, *ips.ImageProxyType) assert.Equal(t, *ss.DEPRECATED_DO_NOT_USE_ImageProxyURL, *ips.RemoteImageProxyURL) assert.Equal(t, *ss.DEPRECATED_DO_NOT_USE_ImageProxyOptions, *ips.RemoteImageProxyOptions) }) t.Run("not default, old settings", func(t *testing.T) { url := "http://images.mattermost.com" options := "aaaaaaaa" ips := ImageProxySettings{ Enable: NewBool(false), ImageProxyType: NewString(IMAGE_PROXY_TYPE_LOCAL), RemoteImageProxyURL: &url, RemoteImageProxyOptions: &options, } ips.SetDefaults(ss) assert.Equal(t, false, *ips.Enable) assert.Equal(t, IMAGE_PROXY_TYPE_LOCAL, *ips.ImageProxyType) assert.Equal(t, url, *ips.RemoteImageProxyURL) assert.Equal(t, options, *ips.RemoteImageProxyOptions) }) }
explode_data.jsonl/50699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 666 }
[ 2830, 3393, 1906, 16219, 6086, 1649, 16273, 1155, 353, 8840, 836, 8, 341, 34472, 1669, 5362, 6086, 515, 197, 197, 1150, 57713, 21820, 9169, 22295, 45949, 16219, 929, 25, 262, 1532, 703, 7, 29926, 59065, 4189, 8667, 44, 3126, 39446, 46, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeactivate(t *testing.T) { user := &model.User{Id: model.NewId(), Roles: "system_user"} config := model.Config{} config.SetDefaults() config.ServiceSettings.EnableMultifactorAuthentication = model.NewBool(true) configService := testutils.StaticConfigService{Cfg: &config} t.Run("fail on disabled mfa", func(t *testing.T) { wrongConfig := model.Config{} wrongConfig.SetDefaults() wrongConfig.ServiceSettings.EnableMultifactorAuthentication = model.NewBool(false) wrongConfigService := testutils.StaticConfigService{Cfg: &wrongConfig} mfa := New(wrongConfigService, nil) err := mfa.Deactivate(user.Id) require.NotNil(t, err) require.Equal(t, "mfa.mfa_disabled.app_error", err.Id) }) t.Run("fail on store UpdateMfaActive action fail", func(t *testing.T) { storeMock := mocks.Store{} userStoreMock := mocks.UserStore{} userStoreMock.On("UpdateMfaActive", user.Id, false).Return(func(userId string, active bool) *model.AppError { return model.NewAppError("Deactivate", "mfa.deactivate.save_active.app_error", nil, "", http.StatusInternalServerError) }) userStoreMock.On("UpdateMfaSecret", user.Id, "").Return(func(userId string, secret string) *model.AppError { return model.NewAppError("Deactivate", "mfa.deactivate.save_secret.app_error", nil, "", http.StatusInternalServerError) }) storeMock.On("User").Return(&userStoreMock) mfa := New(configService, &storeMock) err := mfa.Deactivate(user.Id) require.NotNil(t, err) require.Equal(t, "mfa.deactivate.save_active.app_error", err.Id) }) t.Run("fail on store UpdateMfaSecret action fail", func(t *testing.T) { storeMock := mocks.Store{} userStoreMock := mocks.UserStore{} userStoreMock.On("UpdateMfaActive", user.Id, false).Return(func(userId string, active bool) *model.AppError { return nil }) userStoreMock.On("UpdateMfaSecret", user.Id, "").Return(func(userId string, secret string) *model.AppError { return model.NewAppError("Deactivate", "mfa.deactivate.save_secret.app_error", nil, "", http.StatusInternalServerError) }) storeMock.On("User").Return(&userStoreMock) mfa := New(configService, &storeMock) err := mfa.Deactivate(user.Id) require.NotNil(t, err) require.Equal(t, "mfa.deactivate.save_secret.app_error", err.Id) }) t.Run("Successful deactivate", func(t *testing.T) { storeMock := mocks.Store{} userStoreMock := mocks.UserStore{} userStoreMock.On("UpdateMfaActive", user.Id, false).Return(func(userId string, active bool) *model.AppError { return nil }) userStoreMock.On("UpdateMfaSecret", user.Id, "").Return(func(userId string, secret string) *model.AppError { return nil }) storeMock.On("User").Return(&userStoreMock) mfa := New(configService, &storeMock) err := mfa.Deactivate(user.Id) require.Nil(t, err) }) }
explode_data.jsonl/64311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1050 }
[ 2830, 3393, 1912, 16856, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 609, 2528, 7344, 90, 764, 25, 1614, 7121, 764, 1507, 50907, 25, 330, 8948, 3317, 63159, 25873, 1669, 1614, 10753, 16094, 25873, 4202, 16273, 741, 25873, 13860, 6086, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetLeasedJobIds(t *testing.T) { withRepository(func(r *RedisJobRepository) { addTestJob(t, r, "queue1") leasedJob1 := addLeasedJob(t, r, "queue1", "cluster1") leasedJob2 := addLeasedJob(t, r, "queue1", "cluster2") addTestJob(t, r, "queue2") addLeasedJob(t, r, "queue2", "cluster1") ids, e := r.GetLeasedJobIds("queue1") assert.Nil(t, e) assert.Equal(t, 2, len(ids)) idsSet := util.StringListToSet(ids) assert.True(t, idsSet[leasedJob1.Id]) assert.True(t, idsSet[leasedJob2.Id]) }) }
explode_data.jsonl/32053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 1949, 2304, 1475, 12245, 12701, 1155, 353, 8840, 836, 8, 341, 46948, 4624, 18552, 2601, 353, 48137, 12245, 4624, 8, 341, 197, 12718, 2271, 12245, 1155, 11, 435, 11, 330, 4584, 16, 1138, 197, 197, 4673, 12245, 16, 1669, 912...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp(t *testing.T) { appConfigOption := entity.AppConfigOption{ Plugins: []string{"oauth"}, Port: 1304, ProxyHost: "www.local.host", } appConfigOption.Authorization.Username = "altair" appConfigOption.Authorization.Password = "secret" appConfigOption.Metric.Interface = "prometheus" t.Run("Compile", func(t *testing.T) { t.Run("Given config path", func(t *testing.T) { t.Run("Normal scenario", func(t *testing.T) { t.Run("Return app config", func(t *testing.T) { configPath := "./app_normal/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigNormal, fileName, 0666) expectedAppConfig := entity.NewAppConfig(appConfigOption) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.Nil(t, err) assert.Equal(t, expectedAppConfig.Plugins(), appConfig.Plugins()) assert.Equal(t, expectedAppConfig.Port(), appConfig.Port()) assert.Equal(t, expectedAppConfig.ProxyHost(), appConfig.ProxyHost()) assert.Equal(t, expectedAppConfig.BasicAuthPassword(), appConfig.BasicAuthPassword()) assert.Equal(t, expectedAppConfig.BasicAuthUsername(), appConfig.BasicAuthUsername()) assert.Equal(t, expectedAppConfig.Metric().Interface(), appConfig.Metric().Interface()) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("With custom port", func(t *testing.T) { appConfigOption := entity.AppConfigOption{ Plugins: []string{"oauth"}, Port: 7001, ProxyHost: "www.local.host", } appConfigOption.Authorization.Username = "altair" appConfigOption.Authorization.Password = "secret" t.Run("Return app config", func(t *testing.T) { configPath := "./app_with_custom_port/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigWithCustomPort, fileName, 0666) expectedAppConfig := entity.NewAppConfig(appConfigOption) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.Nil(t, err) assert.Equal(t, expectedAppConfig.Plugins(), appConfig.Plugins()) assert.Equal(t, expectedAppConfig.Port(), appConfig.Port()) assert.Equal(t, expectedAppConfig.ProxyHost(), appConfig.ProxyHost()) assert.Equal(t, expectedAppConfig.BasicAuthPassword(), appConfig.BasicAuthPassword()) assert.Equal(t, expectedAppConfig.BasicAuthUsername(), appConfig.BasicAuthUsername()) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("With custom proxy host", func(t *testing.T) { appConfigOption := entity.AppConfigOption{ Plugins: []string{"oauth"}, Port: 1304, ProxyHost: "www.altair.id", } appConfigOption.Authorization.Username = "altair" appConfigOption.Authorization.Password = "secret" t.Run("Return app config", func(t *testing.T) { configPath := "./app_with_custom_proxy_host/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigWithCustomProxyHost, fileName, 0666) expectedAppConfig := entity.NewAppConfig(appConfigOption) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.Nil(t, err) assert.Equal(t, expectedAppConfig.Plugins(), appConfig.Plugins()) assert.Equal(t, expectedAppConfig.Port(), appConfig.Port()) assert.Equal(t, expectedAppConfig.ProxyHost(), appConfig.ProxyHost()) assert.Equal(t, expectedAppConfig.BasicAuthPassword(), appConfig.BasicAuthPassword()) assert.Equal(t, expectedAppConfig.BasicAuthUsername(), appConfig.BasicAuthUsername()) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("Empty authorization username", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_empty_username/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigAuthUsernameEmpty, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("Empty authorization password", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_empty_password/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigAuthPasswordEmpty, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("Invalid custom port", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_invalid_custom_port/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigWithInvalidCustomPort, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("File not found", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_not_found/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigNormal, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, "should_be_not_found_yml")) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("Template error", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_template_error/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigTemplateError, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) t.Run("Unmarshal failed", func(t *testing.T) { t.Run("Return error", func(t *testing.T) { configPath := "./app_unmarshal_failed/" fileName := "app.yml" testhelper.GenerateTempTestFiles(configPath, AppConfigUnmarshalError, fileName, 0666) appConfig, err := loader.App().Compile(fmt.Sprintf("%s%s", configPath, fileName)) assert.NotNil(t, err) assert.Nil(t, appConfig) testhelper.RemoveTempTestFiles(configPath) }) }) }) }) }
explode_data.jsonl/55804
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2644 }
[ 2830, 3393, 2164, 1155, 353, 8840, 836, 8, 341, 28236, 2648, 5341, 1669, 5387, 5105, 2648, 5341, 515, 197, 197, 45378, 25, 256, 3056, 917, 4913, 34363, 7115, 197, 98459, 25, 414, 220, 16, 18, 15, 19, 345, 197, 197, 16219, 9296, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScoreDealerHoleCard(t *testing.T) { type testCase struct { card cards.Card score int description string } tcs := []testCase{ {card: cards.Card{Rank: cards.Ace, Suit: cards.Club}, score: 11, description: "Ace"}, {card: cards.Card{Rank: cards.King, Suit: cards.Club}, score: 10, description: "King"}, {card: cards.Card{Rank: cards.Three, Suit: cards.Club}, score: 3, description: "Three"}, } for _, tc := range tcs { want := tc.score got := blackjack.ScoreDealerHoleCard(tc.card) if want != got { t.Fatalf("wanted: %d, got: %d", want, got) } } }
explode_data.jsonl/5930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 10570, 93909, 39, 1263, 5770, 1155, 353, 8840, 836, 8, 1476, 13158, 54452, 2036, 341, 197, 80084, 286, 7411, 48613, 198, 197, 60425, 981, 526, 198, 197, 42407, 914, 198, 197, 532, 3244, 4837, 1669, 3056, 66194, 515, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNullValuesWithFirstOrCreate(t *testing.T) { var nv1 = NullValue{ Name: sql.NullString{String: "first_or_create", Valid: true}, Gender: &sql.NullString{String: "M", Valid: true}, } var nv2 NullValue result := DB.Where(nv1).FirstOrCreate(&nv2) if result.RowsAffected != 1 { t.Errorf("RowsAffected should be 1 after create some record") } if result.Error != nil { t.Errorf("Should not raise any error, but got %v", result.Error) } if nv2.Name.String != "first_or_create" || nv2.Gender.String != "M" { t.Errorf("first or create with nullvalues") } if err := DB.Where(nv1).Assign(NullValue{Age: sql.NullInt64{Int64: 18, Valid: true}}).FirstOrCreate(&nv2).Error; err != nil { t.Errorf("Should not raise any error, but got %v", err) } if nv2.Age.Int64 != 18 { t.Errorf("should update age to 18") } }
explode_data.jsonl/28041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 3280, 6227, 2354, 5338, 57111, 1155, 353, 8840, 836, 8, 341, 2405, 31440, 16, 284, 18084, 1130, 515, 197, 21297, 25, 256, 5704, 23979, 703, 90, 703, 25, 330, 3896, 8734, 8657, 497, 7818, 25, 830, 1583, 197, 9600, 1659, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestNewGossipStateProvider_SendingManyMessages(t *testing.T) { t.Parallel() bootstrapSetSize := 5 bootstrapSet := make([]*peerNode, 0) var bootPorts []int for i := 0; i < bootstrapSetSize; i++ { commit := newCommitter() bootPeer, bootPort := newBootNode(i, commit, noopPeerIdentityAcceptor) bootstrapSet = append(bootstrapSet, bootPeer) bootPorts = append(bootPorts, bootPort) } defer func() { for _, p := range bootstrapSet { p.shutdown() } }() msgCount := 10 for i := 1; i <= msgCount; i++ { rawblock := pcomm.NewBlock(uint64(i), []byte{}) if b, err := pb.Marshal(rawblock); err == nil { payload := &proto.Payload{ SeqNum: uint64(i), Data: b, } bootstrapSet[0].s.AddPayload(payload) } else { t.Fail() } } standartPeersSize := 10 peersSet := make([]*peerNode, 0) for i := 0; i < standartPeersSize; i++ { commit := newCommitter() peersSet = append(peersSet, newPeerNode(bootstrapSetSize+i, commit, noopPeerIdentityAcceptor, bootPorts...)) } defer func() { for _, p := range peersSet { p.shutdown() } }() waitUntilTrueOrTimeout(t, func() bool { for _, p := range peersSet { if len(p.g.PeersOfChannel(common.ChainID(util.GetTestChainID()))) != bootstrapSetSize+standartPeersSize-1 { t.Log("Peer discovery has not finished yet") return false } } t.Log("All peer discovered each other!!!") return true }, 30*time.Second) t.Log("Waiting for all blocks to arrive.") waitUntilTrueOrTimeout(t, func() bool { t.Log("Trying to see all peers get all blocks") for _, p := range peersSet { height, err := p.commit.LedgerHeight() if height != uint64(msgCount+1) || err != nil { return false } } t.Log("All peers have same ledger height!!!") return true }, 60*time.Second) }
explode_data.jsonl/5644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 735 }
[ 2830, 3393, 3564, 38, 41473, 1397, 5179, 1098, 2459, 8441, 15820, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6281, 1649, 1695, 1669, 220, 20, 198, 197, 6281, 1649, 1669, 1281, 85288, 16537, 1955, 11, 220, 15, 692, 2405...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStateBadProposal(t *testing.T) { config := configSetup(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() cs1, vss := makeState(ctx, t, makeStateArgs{config: config, validators: 2}) height, round := cs1.Height, cs1.Round vs2 := vss[1] partSize := types.BlockPartSizeBytes proposalCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryCompleteProposal) voteCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryVote) propBlock, err := cs1.createProposalBlock(ctx) // changeProposer(t, cs1, vs2) require.NoError(t, err) // make the second validator the proposer by incrementing round round++ incrementRound(vss[1:]...) // make the block bad by tampering with statehash stateHash := propBlock.AppHash if len(stateHash) == 0 { stateHash = make([]byte, 32) } stateHash[0] = (stateHash[0] + 1) % 255 propBlock.AppHash = stateHash propBlockParts, err := propBlock.MakePartSet(partSize) require.NoError(t, err) blockID := types.BlockID{Hash: propBlock.Hash(), PartSetHeader: propBlockParts.Header()} proposal := types.NewProposal(vs2.Height, round, -1, blockID, propBlock.Header.Time) p := proposal.ToProto() err = vs2.SignProposal(ctx, config.ChainID(), p) require.NoError(t, err) proposal.Signature = p.Signature // set the proposal block err = cs1.SetProposalAndBlock(ctx, proposal, propBlock, propBlockParts, "some peer") require.NoError(t, err) // start the machine startTestRound(ctx, cs1, height, round) // wait for proposal ensureProposal(t, proposalCh, height, round, blockID) // wait for prevote ensurePrevoteMatch(t, voteCh, height, round, nil) // add bad prevote from vs2 and wait for it signAddVotes(ctx, t, cs1, tmproto.PrevoteType, config.ChainID(), blockID, vs2) ensurePrevote(t, voteCh, height, round) // wait for precommit ensurePrecommit(t, voteCh, height, round) validatePrecommit(ctx, t, cs1, round, -1, vss[0], nil, nil) signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), blockID, vs2) }
explode_data.jsonl/54259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 1397, 17082, 98637, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2193, 21821, 1155, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 71899, 16, 11, 348, 778, 1669, 1281, 1397, 7502, 11, 259,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImageRef_OverSizedMetadata(t *testing.T) { Startup(nil) srcBytes, err := ioutil.ReadFile(resources + "png-bad-metadata.png") require.NoError(t, err) src := bytes.NewReader(srcBytes) img, err := NewImageFromReader(src) assert.NoError(t, err) assert.NotNil(t, img) }
explode_data.jsonl/38816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 1906, 3945, 62, 1918, 50, 1506, 14610, 1155, 353, 8840, 836, 8, 341, 197, 39076, 27907, 692, 41144, 7078, 11, 1848, 1669, 43144, 78976, 52607, 488, 330, 14066, 1455, 329, 96431, 3508, 1138, 17957, 35699, 1155, 11, 1848, 692,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeAssuredCallStatus(t *testing.T) { decoded := false expected := &Call{ Path: "test/assured", StatusCode: http.StatusForbidden, Method: http.MethodGet, Headers: map[string]string{"Assured-Status": "403"}, Query: map[string]string{}, } testDecode := func(resp http.ResponseWriter, req *http.Request) { c, err := decodeAssuredCall(ctx, req) require.NoError(t, err) require.Equal(t, expected, c) decoded = true } req, err := http.NewRequest(http.MethodGet, "/given/test/assured", nil) require.NoError(t, err) req.Header.Set("Assured-Status", "403") router := mux.NewRouter() router.HandleFunc("/given/{path:.*}", testDecode).Methods(http.MethodGet) resp := httptest.NewRecorder() router.ServeHTTP(resp, req) require.True(t, decoded, "decode method was not hit") }
explode_data.jsonl/20255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 328 }
[ 2830, 3393, 32564, 5615, 3073, 7220, 2522, 1155, 353, 8840, 836, 8, 341, 197, 62913, 1669, 895, 198, 42400, 1669, 609, 7220, 515, 197, 69640, 25, 981, 330, 1944, 14, 395, 3073, 756, 197, 197, 15872, 25, 1758, 10538, 69115, 345, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGitignore_String(t *testing.T) { v := Gitignore{ Name: String(""), Source: String(""), } want := `github.Gitignore{Name:"", Source:""}` if got := v.String(); got != want { t.Errorf("Gitignore.String = %v, want %v", got, want) } }
explode_data.jsonl/33243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 46562, 13130, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 21120, 13130, 515, 197, 21297, 25, 256, 923, 445, 4461, 197, 197, 3608, 25, 923, 445, 4461, 197, 532, 50780, 1669, 1565, 5204, 1224, 275, 13130, 63121, 83131, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIterator(t *testing.T) { env := NewTestVDBEnv(t) env.Cleanup("testiterator_") env.Cleanup("testiterator_ns1") env.Cleanup("testiterator_ns2") env.Cleanup("testiterator_ns3") defer env.Cleanup("testiterator_") defer env.Cleanup("testiterator_ns1") defer env.Cleanup("testiterator_ns2") defer env.Cleanup("testiterator_ns3") commontests.TestIterator(t, env.DBProvider) }
explode_data.jsonl/593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 11951, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 1532, 2271, 53, 3506, 14359, 1155, 340, 57538, 727, 60639, 445, 1944, 6854, 62, 1138, 57538, 727, 60639, 445, 1944, 6854, 34728, 16, 1138, 57538, 727, 60639, 445, 1944, 6854,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccSecretAclResource(t *testing.T) { // TODO: refactor for common instance pool & AZ CLI if _, ok := os.LookupEnv("CLOUD_ENV"); !ok { t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set") } //var secretScope Secre var secretACL ACLItem scope := fmt.Sprintf("tf-scope-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)) principal := "users" permission := "READ" acceptance.AccTest(t, resource.TestCase{ CheckDestroy: testSecretACLResourceDestroy, Steps: []resource.TestStep{ { // use a dynamic configuration with the random name from above Config: testSecretACLResource(scope, principal, permission), // compose a basic test, checking both remote and local values Check: resource.ComposeTestCheckFunc( // query the API to retrieve the tokenInfo object testSecretACLResourceExists("databricks_secret_acl.my_secret_acl", &secretACL, t), // verify remote values testSecretACLValues(t, &secretACL, permission, principal), // verify local values resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "scope", scope), resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "principal", principal), resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "permission", permission), ), }, { PreConfig: func() { client := common.CommonEnvironmentClient() err := NewSecretAclsAPI(client).Delete(scope, principal) assert.NoError(t, err, err) }, // use a dynamic configuration with the random name from above Config: testSecretACLResource(scope, principal, permission), // compose a basic test, checking both remote and local values Check: resource.ComposeTestCheckFunc( // query the API to retrieve the tokenInfo object testSecretACLResourceExists("databricks_secret_acl.my_secret_acl", &secretACL, t), // verify remote values testSecretACLValues(t, &secretACL, permission, principal), // verify local values resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "scope", scope), resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "principal", principal), resource.TestCheckResourceAttr("databricks_secret_acl.my_secret_acl", "permission", permission), ), }, }, }) }
explode_data.jsonl/35234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 831 }
[ 2830, 3393, 14603, 19773, 32, 564, 4783, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 25, 92295, 369, 4185, 2867, 7314, 609, 30876, 39277, 198, 743, 8358, 5394, 1669, 2643, 79261, 14359, 445, 34, 47645, 22027, 5038, 753, 562, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDSNParser(t *testing.T) { testcases := []struct { name string dsn string connCfg splunksql.ConnectionConfig errStr string }{ { name: "invalid dsn", dsn: "invalid dsn", errStr: "missing \"=\" after \"invalid\" in connection info string\"", }, { name: "url: tcp address", dsn: "postgres://user:password@localhost:8080/testdb", connCfg: splunksql.ConnectionConfig{ Name: "testdb", ConnectionString: "dbname=testdb host=localhost port=8080 user=user", User: "user", Host: "localhost", Port: 8080, NetTransport: splunksql.NetTransportTCP, }, }, { name: "params: unix socket", dsn: "user=user password=password host=/tmp/pgdb dbname=testdb", connCfg: splunksql.ConnectionConfig{ Name: "testdb", ConnectionString: "dbname=testdb host=/tmp/pgdb port=5432 user=user", User: "user", Host: "/tmp/pgdb", Port: 5432, NetTransport: splunksql.NetTransportUnix, }, }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { got, err := splunkpq.DSNParser(tc.dsn) if tc.errStr != "" { assert.EqualError(t, err, tc.errStr) } else { assert.NoError(t, err) } assert.Equal(t, tc.connCfg, got) }) } }
explode_data.jsonl/59976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 674 }
[ 2830, 3393, 5936, 45, 6570, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 2698, 9613, 257, 914, 198, 197, 32917, 42467, 12503, 15296, 1470, 17463, 2648, 198, 197, 9859, 2580, 220, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPostUnmarshallable(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePost)) defer srv.Close() type ft func() var f ft url := "http://" + srv.Listener.Addr().String() res := structType{} payload := f _, err := Post(url, &payload, &res, nil) assert.NotEqual(t, nil, err) _, ok := err.(*json.UnsupportedTypeError) if !ok { t.Log(err) t.Error("Expected json.UnsupportedTypeError") } }
explode_data.jsonl/14813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 4133, 1806, 84161, 480, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 90832, 4133, 1171, 16867, 43578, 10421, 741, 13158, 10482, 2915, 741, 2405, 282, 10482, 198, 19320, 1669, 330, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_ErrConnection(t *testing.T) { testdata := map[string]net.Addr{ "local": &testaddr{ tcp: true, addr: "127.0.0.1:8080", }, "nil": nil, } for name, test := range testdata { t.Run(name, func(t *testing.T) { err := disconnected(test) if err == nil { t.Error("expected error") } if err.Error() != "disconnected" { t.Error("expected error message to be 'disconnected'") } e, ok := err.(*ErrConnection) if !ok { t.Error("expected error to be of type ErrConnection") } if e.Addr != test { t.Error("expected error to have Addr set to test") } }) } }
explode_data.jsonl/51607
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 93623, 4526, 1155, 353, 8840, 836, 8, 341, 18185, 691, 1669, 2415, 14032, 60, 4711, 93626, 515, 197, 197, 1, 2438, 788, 609, 1944, 6214, 515, 298, 3244, 4672, 25, 220, 830, 345, 298, 53183, 25, 330, 16, 17, 22, 13, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestClient_SecureConfig(t *testing.T) { // Test failure case secureConfig := &SecureConfig{ Checksum: []byte{'1'}, Hash: sha256.New(), } process := helperProcess("test-interface") c := NewClient(&ClientConfig{ Cmd: process, HandshakeConfig: testHandshake, Plugins: testPluginMap, SecureConfig: secureConfig, }) // Grab the RPC client, should error _, err := c.Client() c.Kill() if err != ErrChecksumsDoNotMatch { t.Fatalf("err should be %s, got %s", ErrChecksumsDoNotMatch, err) } // Get the checksum of the executable file, err := os.Open(os.Args[0]) if err != nil { t.Fatal(err) } defer file.Close() hash := sha256.New() _, err = io.Copy(hash, file) if err != nil { t.Fatal(err) } sum := hash.Sum(nil) secureConfig = &SecureConfig{ Checksum: sum, Hash: sha256.New(), } process = helperProcess("test-interface") c = NewClient(&ClientConfig{ Cmd: process, HandshakeConfig: testHandshake, Plugins: testPluginMap, SecureConfig: secureConfig, }) defer c.Kill() // Grab the RPC client _, err = c.Client() if err != nil { t.Fatalf("err should be nil, got %s", err) } }
explode_data.jsonl/57851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 499 }
[ 2830, 3393, 2959, 1098, 76108, 2648, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 7901, 1142, 198, 197, 25132, 2648, 1669, 609, 49813, 2648, 515, 197, 69472, 1242, 25, 3056, 3782, 13608, 16, 11688, 197, 197, 6370, 25, 257, 15870, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateMissingContext(t *testing.T) { const expectedErrorContains = "context was not found for specified context: not-present" config := createValidTestConfig() clientBuilder := NewNonInteractiveClientConfig(*config, "not-present", &ConfigOverrides{ ClusterDefaults: clientcmdapi.Cluster{Server: "http://localhost:8080"}, }, nil) _, err := clientBuilder.ClientConfig() if err == nil { t.Fatalf("Expected error: %v", expectedErrorContains) } if !strings.Contains(err.Error(), expectedErrorContains) { t.Fatalf("Expected error: %v, but got %v", expectedErrorContains, err) } }
explode_data.jsonl/56169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 4021, 25080, 1972, 1155, 353, 8840, 836, 8, 341, 4777, 3601, 1454, 23805, 284, 330, 2147, 572, 537, 1730, 369, 5189, 2266, 25, 537, 48024, 698, 25873, 1669, 1855, 4088, 2271, 2648, 741, 25291, 3297, 1669, 1532, 8121, 71686, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_Pagination_PrevNext_FirstPageLinkAsFolderURL(t *testing.T) { doc := testutil.CreateHTML() body := dom.QuerySelector(doc, "body") root := testutil.CreateDiv(0) dom.AppendChild(body, root) // Some sites' first page links are the same as the folder URL, // previous page link needs to recognize this. href := ExampleURL[:strings.LastIndex(ExampleURL, "/")] anchor := testutil.CreateAnchor(href, "PREV") dom.AppendChild(root, anchor) assertDefaultDocumenOutlink(t, doc, anchor, nil) }
explode_data.jsonl/10832
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 1088, 10353, 1088, 7282, 5847, 79790, 2665, 3939, 2121, 13682, 3144, 1155, 353, 8840, 836, 8, 341, 59536, 1669, 1273, 1314, 7251, 5835, 741, 35402, 1669, 4719, 15685, 5877, 19153, 11, 330, 2599, 5130, 33698, 1669, 1273, 1314, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAssertableAny_HasTypeOf(t *testing.T) { tests := []struct { name string actual interface{} shouldFail bool }{ { name: "should assert the same types", actual: "123", shouldFail: false, }, { name: "should assert different types", actual: true, shouldFail: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { test := &testing.T{} That(test, tt.actual).HasTypeOf(reflect.TypeOf("")) ThatBool(t, test.Failed()).IsEqualTo(tt.shouldFail) }) } }
explode_data.jsonl/53650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 8534, 480, 8610, 2039, 300, 929, 2124, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 88814, 257, 3749, 16094, 197, 197, 5445, 19524, 1807, 198, 197, 59403, 197, 197, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchChain(t *testing.T) { InitMSP() resetFlags() mockchain := "mockchain" signer, err := common.GetDefaultSigner() if err != nil { t.Fatalf("Get default signer error: %v", err) } mockCF := &ChannelCmdFactory{ BroadcastFactory: mockBroadcastClientFactory, Signer: signer, DeliverClient: &mockDeliverClient{}, } cmd := createCmd(mockCF) AddFlags(cmd) args := []string{"-c", mockchain} cmd.SetArgs(args) assert.NoError(t, cmd.Execute(), "Join command expected to succeed") os.Remove(mockchain + ".block") cmd = fetchCmd(mockCF) defer os.Remove(mockchain + ".block") AddFlags(cmd) args = []string{"-c", mockchain, "oldest", mockchain + ".block"} cmd.SetArgs(args) assert.NoError(t, cmd.Execute(), "Join command expected to succeed") if _, err := os.Stat(mockchain + ".block"); os.IsNotExist(err) { // path/to/whatever does not exist t.Error("expected configuration block to be fetched") t.Fail() } }
explode_data.jsonl/13504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 20714, 18837, 1155, 353, 8840, 836, 8, 341, 98762, 44, 4592, 741, 70343, 9195, 2822, 77333, 8819, 1669, 330, 16712, 8819, 1837, 69054, 261, 11, 1848, 1669, 4185, 2234, 3675, 7264, 261, 741, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOpenConnector(t *testing.T) { Register("testctx", &fakeDriverCtx{}) db, err := Open("testctx", "people") if err != nil { t.Fatal(err) } defer db.Close() c, ok := db.connector.(*fakeConnector) if !ok { t.Fatal("not using *fakeConnector") } if err := db.Close(); err != nil { t.Fatal(err) } if !c.closed { t.Fatal("connector is not closed") } }
explode_data.jsonl/16034
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 5002, 35954, 1155, 353, 8840, 836, 8, 341, 79096, 445, 1944, 3773, 497, 609, 30570, 11349, 23684, 37790, 20939, 11, 1848, 1669, 5264, 445, 1944, 3773, 497, 330, 16069, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAuthcryptPackerUsingKeysWithDifferentCurvesSuccess(t *testing.T) { k := createKMS(t) _, recipientsKey1, keyHandles1 := createRecipients(t, k, 1) // since authcrypt does ECDH kw using the sender key, the recipient keys must be on the same curve (for NIST P keys) // and the same key type (for NIST P / X25519 keys) as the sender's. // this is why recipient keys with different curves/type are not supported for authcrypt. _, recipientsKey2, _ := createRecipients(t, k, 1) // can't create key with kms.NISTP384ECDHKW _, recipientsKey3, _ := createRecipients(t, k, 1) // can't create key with kms.NISTP521ECDHKW recipientsKeys := make([][]byte, 3) recipientsKeys[0] = make([]byte, len(recipientsKey1[0])) recipientsKeys[1] = make([]byte, len(recipientsKey2[0])) recipientsKeys[2] = make([]byte, len(recipientsKey3[0])) copy(recipientsKeys[0], recipientsKey1[0]) copy(recipientsKeys[1], recipientsKey2[0]) copy(recipientsKeys[2], recipientsKey3[0]) cty := transport.MediaTypeV1PlaintextPayload skid, senderKey, _ := createAndMarshalKey(t, k) thirdPartyKeyStore := make(map[string]mockstorage.DBEntry) mockStoreProvider := &mockstorage.MockStoreProvider{Store: &mockstorage.MockStore{ Store: thirdPartyKeyStore, }} cryptoSvc, err := tinkcrypto.New() require.NoError(t, err) authPacker, err := New(newMockProvider(mockStoreProvider, k, cryptoSvc), afgjose.A256GCM) require.NoError(t, err) // add sender key in thirdPartyKS (prep step before Authcrypt.Pack()/Unpack()) fromWrappedKID := prefix.StorageKIDPrefix + skid thirdPartyKeyStore[fromWrappedKID] = mockstorage.DBEntry{Value: senderKey} origMsg := []byte("secret message") ct, err := authPacker.Pack(cty, origMsg, []byte(skid), recipientsKeys) require.NoError(t, err) t.Logf("authcrypt JWE: %s", ct) msg, err := authPacker.Unpack(ct) require.NoError(t, err) recKey, err := exportPubKeyBytes(keyHandles1[0]) require.NoError(t, err) require.EqualValues(t, &transport.Envelope{ MediaType: transport.MediaTypeV2EncryptedEnvelopeV1PlaintextPayload, Message: origMsg, ToKey: recKey, }, msg) // try with only 1 recipient ct, err = authPacker.Pack(cty, origMsg, []byte(skid), [][]byte{recipientsKeys[0]}) require.NoError(t, err) msg, err = authPacker.Unpack(ct) require.NoError(t, err) require.EqualValues(t, &transport.Envelope{ MediaType: transport.MediaTypeV2EncryptedEnvelopeV1PlaintextPayload, Message: origMsg, ToKey: recKey, }, msg) jweJSON, err := afgjose.Deserialize(string(ct)) require.NoError(t, err) verifyJWETypes(t, cty, jweJSON.ProtectedHeaders) }
explode_data.jsonl/19840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 990 }
[ 2830, 3393, 5087, 48125, 47, 9683, 16429, 8850, 2354, 69123, 16704, 2342, 7188, 1155, 353, 8840, 836, 8, 341, 16463, 1669, 1855, 42, 4826, 1155, 340, 197, 6878, 33776, 1592, 16, 11, 1376, 65928, 16, 1669, 1855, 3820, 47647, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getOrSetCorrelationID(t *testing.T) { t.Parallel() withID := http.Header{correlation.HeaderID: []string{"123"}} withoutID := http.Header{correlation.HeaderID: []string{}} withEmptyID := http.Header{correlation.HeaderID: []string{""}} missingHeader := http.Header{} type args struct { hdr http.Header } tests := map[string]struct { args args }{ "with id": {args: args{hdr: withID}}, "without id": {args: args{hdr: withoutID}}, "with empty id": {args: args{hdr: withEmptyID}}, "missing Header": {args: args{hdr: missingHeader}}, } for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { t.Parallel() assert.NotEmpty(t, getOrSetCorrelationID(tt.args.hdr)) assert.NotEmpty(t, tt.args.hdr[correlation.HeaderID][0]) }) } }
explode_data.jsonl/54955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 3062, 2195, 1649, 10580, 22221, 915, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 46948, 915, 1669, 1758, 15753, 90, 6005, 22221, 15753, 915, 25, 3056, 917, 4913, 16, 17, 18, 95642, 197, 28996, 915, 1669, 1758, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDetectFolderBlendMode(t *testing.T) { for _, filename := range []string{ "mod.psd", "mod2.psd", } { file, err := os.Open("testdata/" + filename) if err != nil { t.Errorf("cannot open %s: %v", filename, err) return } defer file.Close() psd, _, err := Decode(file, nil) if err != nil { t.Errorf("cannot open %s as psd: %v", filename, err) return } { want := "フォルダー3" got := psd.Layer[2].Layer[0].UnicodeName if want != got { t.Error(filename, "want", want, "but got", got) return } } { want := BlendModeNormal got := psd.Layer[2].Layer[0].SectionDividerSetting.BlendMode if want != got { t.Error(filename, "want", want, "but got", got) return } } } }
explode_data.jsonl/50814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 57193, 13682, 45000, 3636, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 3899, 1669, 2088, 3056, 917, 515, 197, 197, 1, 2593, 556, 13446, 756, 197, 197, 1, 2593, 17, 556, 13446, 756, 197, 92, 341, 197, 17661, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCidForTestGetter(t *testing.T) { tf.UnitTest(t) newCid := NewCidForTestGetter() c1 := newCid() c2 := newCid() assert.False(t, c1.Equals(c2)) assert.False(t, c1.Equals(SomeCid())) // Just in case. }
explode_data.jsonl/45601
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 34, 307, 2461, 2271, 31485, 1155, 353, 8840, 836, 8, 341, 3244, 69, 25159, 2271, 1155, 692, 8638, 34, 307, 1669, 1532, 34, 307, 2461, 2271, 31485, 741, 1444, 16, 1669, 501, 34, 307, 741, 1444, 17, 1669, 501, 34, 307, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSolution(t *testing.T) { for _, tc := range testCases { answer := countPrimes(tc.tcase) if answer != tc.answer { t.Errorf("expected: %v, got: %v", tc.answer, answer) } } }
explode_data.jsonl/30360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 36842, 1155, 353, 8840, 836, 8, 341, 262, 369, 8358, 17130, 1669, 2088, 1273, 37302, 341, 286, 4226, 1669, 1760, 3533, 1733, 44415, 734, 5638, 340, 286, 421, 4226, 961, 17130, 44527, 341, 310, 259, 13080, 445, 7325, 25, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestEndWriteRestartReadOnlyTransaction(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) ctx := context.Background() clock := hlc.NewClock(hlc.UnixNano, time.Nanosecond) ambient := log.AmbientContext{Tracer: tracing.NewTracer()} sender := &mockSender{} stopper := stop.NewStopper() defer stopper.Stop(ctx) var calls []roachpb.Method sender.match(func(ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) { br := ba.CreateReply() br.Txn = ba.Txn.Clone() calls = append(calls, ba.Methods()...) switch ba.Requests[0].GetInner().Method() { case roachpb.Put, roachpb.Scan: return nil, roachpb.NewErrorWithTxn( roachpb.NewTransactionRetryError(roachpb.RETRY_SERIALIZABLE, "test err"), ba.Txn) case roachpb.EndTxn: br.Txn.Status = roachpb.COMMITTED } return br, nil }) factory := NewTxnCoordSenderFactory( TxnCoordSenderFactoryConfig{ AmbientCtx: ambient, Clock: clock, Stopper: stopper, Settings: cluster.MakeTestingClusterSettings(), TestingKnobs: ClientTestingKnobs{ // Disable span refresh, otherwise it kicks and retries batches by // itself. MaxTxnRefreshAttempts: -1, }, }, sender, ) db := kv.NewDB(testutils.MakeAmbientCtx(), factory, clock, stopper) testutils.RunTrueAndFalse(t, "write", func(t *testing.T, write bool) { testutils.RunTrueAndFalse(t, "success", func(t *testing.T, success bool) { calls = nil firstIter := true if err := db.Txn(ctx, func(ctx context.Context, txn *kv.Txn) error { if firstIter { firstIter = false var err error if write { err = txn.Put(ctx, "consider", "phlebas") } else /* locking read */ { _, err = txn.ScanForUpdate(ctx, "a", "b", 0) } if err == nil { t.Fatal("missing injected retriable error") } } if !success { return errors.New("aborting on purpose") } return nil }); err == nil != success { t.Fatalf("expected error: %t, got error: %v", !success, err) } var expCalls []roachpb.Method if write { expCalls = []roachpb.Method{roachpb.Put, roachpb.EndTxn} } else { expCalls = []roachpb.Method{roachpb.Scan, roachpb.EndTxn} } if !reflect.DeepEqual(expCalls, calls) { t.Fatalf("expected %v, got %v", expCalls, calls) } }) }) }
explode_data.jsonl/76904
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1010 }
[ 2830, 3393, 3727, 7985, 59354, 20914, 8070, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20985, 1669, 2266, 19047, 741, 84165, 1669, 305, 17257, 7121, 26104, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTypeForKey(t *testing.T) { tests := []struct{ key, in, out string }{ {"co", "en", ""}, {"co", "en-u-abc", ""}, {"co", "en-u-co-phonebk", "phonebk"}, {"co", "en-u-co-phonebk-cu-aud", "phonebk"}, {"co", "x-foo-u-co-phonebk", ""}, {"va", "en-US-u-va-posix", "posix"}, {"rg", "en-u-rg-gbzzzz", "gbzzzz"}, {"nu", "en-u-co-phonebk-nu-arabic", "arabic"}, {"kc", "cmn-u-co-stroke", ""}, } for _, tt := range tests { if v := Make(tt.in).TypeForKey(tt.key); v != tt.out { t.Errorf("%q[%q]: was %q; want %q", tt.in, tt.key, v, tt.out) } } }
explode_data.jsonl/15845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 929, 14954, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 90, 1376, 11, 304, 11, 700, 914, 335, 515, 197, 197, 4913, 1015, 497, 330, 268, 497, 77496, 197, 197, 4913, 1015, 497, 330, 268, 45381, 12, 13683, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenerate(t *testing.T) { kind := TokenKind{ Algo: TokenAlgoHmacSHA256, Expiration: 30 * time.Minute, SecretKey: "secret_key_name", Version: 1, } Convey("Works", t, func() { ctx := testContext() token, err := kind.Generate(ctx, nil, nil, 0) So(token, ShouldNotEqual, "") So(err, ShouldBeNil) }) Convey("Empty key", t, func() { ctx := testContext() token, err := kind.Generate(ctx, nil, map[string]string{"": "v"}, 0) So(token, ShouldEqual, "") So(err, ShouldErrLike, "empty key") }) Convey("Forbidden key", t, func() { ctx := testContext() token, err := kind.Generate(ctx, nil, map[string]string{"_x": "v"}, 0) So(token, ShouldEqual, "") So(err, ShouldErrLike, "bad key") }) Convey("Negative exp", t, func() { ctx := testContext() token, err := kind.Generate(ctx, nil, nil, -time.Minute) So(token, ShouldEqual, "") So(err, ShouldErrLike, "expiration can't be negative") }) Convey("Unknown algo", t, func() { ctx := testContext() k2 := kind k2.Algo = "unknown" token, err := k2.Generate(ctx, nil, nil, 0) So(token, ShouldEqual, "") So(err, ShouldErrLike, "unknown algo") }) }
explode_data.jsonl/36817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 492 }
[ 2830, 3393, 31115, 1155, 353, 8840, 836, 8, 341, 197, 15314, 1669, 9660, 10629, 515, 197, 197, 2101, 3346, 25, 981, 9660, 2101, 3346, 39, 11948, 33145, 17, 20, 21, 345, 197, 197, 66301, 25, 220, 18, 15, 353, 882, 75770, 345, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_SQLite_003(t *testing.T) { db, err := sqlite3.OpenPathEx(":memory:", sqlite3.SQLITE_OPEN_CREATE, "") if err != nil { t.Error(err) } else if err := db.SetBusyTimeout(5 * time.Second); err != nil { t.Error(err) } else if err := db.Close(); err != nil { t.Error(err) } }
explode_data.jsonl/48727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 46625, 632, 62, 15, 15, 18, 1155, 353, 8840, 836, 8, 341, 20939, 11, 1848, 1669, 22003, 18, 12953, 1820, 840, 18893, 17269, 12147, 22003, 18, 25095, 5991, 24070, 25823, 11, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 6141...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTemplateAttribute_marshal(t *testing.T) { tests := []struct { name string in TemplateAttribute inF func() TemplateAttribute expected ttlv.Value }{ { name: "basic", in: TemplateAttribute{ Name: []Name{ { NameValue: "first", NameType: kmip14.NameTypeUninterpretedTextString, }, { NameValue: "this is a uri", NameType: kmip14.NameTypeURI, }, }, Attribute: []Attribute{ { AttributeName: kmip14.TagAlwaysSensitive.CanonicalName(), AttributeIndex: 5, AttributeValue: true, }, }, }, expected: s(kmip14.TagTemplateAttribute, s(kmip14.TagName, v(kmip14.TagNameValue, "first"), v(kmip14.TagNameType, kmip14.NameTypeUninterpretedTextString), ), s(kmip14.TagName, v(kmip14.TagNameValue, "this is a uri"), v(kmip14.TagNameType, kmip14.NameTypeURI), ), s(kmip14.TagAttribute, v(kmip14.TagAttributeName, kmip14.TagAlwaysSensitive.CanonicalName()), v(kmip14.TagAttributeIndex, 5), v(kmip14.TagAttributeValue, true), ), ), }, { name: "noname", in: TemplateAttribute{Attribute: []Attribute{ { AttributeName: kmip14.TagAlwaysSensitive.CanonicalName(), AttributeIndex: 5, AttributeValue: true, }, }}, expected: s(kmip14.TagTemplateAttribute, s(kmip14.TagAttribute, v(kmip14.TagAttributeName, kmip14.TagAlwaysSensitive.CanonicalName()), v(kmip14.TagAttributeIndex, 5), v(kmip14.TagAttributeValue, true), ), ), }, { name: "noattribute", in: TemplateAttribute{ Name: []Name{ { NameValue: "first", NameType: kmip14.NameTypeUninterpretedTextString, }, }, }, expected: s(kmip14.TagTemplateAttribute, s(kmip14.TagName, v(kmip14.TagNameValue, "first"), v(kmip14.TagNameType, kmip14.NameTypeUninterpretedTextString), ), ), }, { name: "omitzeroindex", in: TemplateAttribute{ Attribute: []Attribute{ { AttributeName: kmip14.TagAlwaysSensitive.CanonicalName(), AttributeValue: true, }, }, }, expected: s(kmip14.TagTemplateAttribute, s(kmip14.TagAttribute, v(kmip14.TagAttributeName, kmip14.TagAlwaysSensitive.CanonicalName()), v(kmip14.TagAttributeValue, true), ), ), }, { name: "use canonical names", inF: func() TemplateAttribute { var ta TemplateAttribute ta.Append(kmip14.TagCryptographicAlgorithm, ttlv.EnumValue(kmip14.CryptographicAlgorithmBlowfish)) return ta }, expected: s(kmip14.TagTemplateAttribute, s(kmip14.TagAttribute, v(kmip14.TagAttributeName, "Cryptographic Algorithm"), v(kmip14.TagAttributeValue, ttlv.EnumValue(kmip14.CryptographicAlgorithmBlowfish)), ), ), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { in := test.in if test.inF != nil { in = test.inF() } out, err := ttlv.Marshal(&in) require.NoError(t, err) expected, err := ttlv.Marshal(test.expected) require.NoError(t, err) require.Equal(t, out, expected) var ta TemplateAttribute err = ttlv.Unmarshal(expected, &ta) require.NoError(t, err) require.Equal(t, in, ta) }) } }
explode_data.jsonl/27936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1523 }
[ 2830, 3393, 7275, 3907, 717, 28423, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 17430, 981, 14355, 3907, 198, 197, 17430, 37, 414, 2915, 368, 14355, 3907, 198, 197, 42400, 53932, 85, 616...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDotReplace(t *testing.T) { records := make([]*kinesis.PutRecordsRequestEntry, 0, 500) record := map[interface{}]interface{}{ "message.key": map[interface{}]interface{}{ "messagevalue": []byte("some.message"), "message.value/one": []byte("some message"), "message.value/two": []byte("some message"), }, "kubernetes": map[interface{}]interface{}{ "app": []byte("test app label"), "app.kubernetes.io/name": []byte("test key with dots"), }, } outputPlugin, _ := newMockOutputPlugin(nil, false) timeStamp := time.Now() retCode := outputPlugin.AddRecord(&records, record, &timeStamp) assert.Equal(t, retCode, fluentbit.FLB_OK, "Expected return code to be FLB_OK") assert.Len(t, records, 1, "Expected output to contain 1 record") data := records[0].Data var log map[string]map[string]interface{} json.Unmarshal(data, &log) assert.Equal(t, "test app label", log["kubernetes"]["app"]) assert.Equal(t, "test key with dots", log["kubernetes"]["app-kubernetes-io/name"]) assert.Equal(t, "some.message", log["message-key"]["messagevalue"]) assert.Equal(t, "some message", log["message-key"]["message-value/one"]) assert.Equal(t, "some message", log["message-key"]["message-value/two"]) }
explode_data.jsonl/74108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 492 }
[ 2830, 3393, 34207, 23107, 1155, 353, 8840, 836, 8, 341, 197, 26203, 1669, 1281, 85288, 74, 82789, 39825, 25876, 1900, 5874, 11, 220, 15, 11, 220, 20, 15, 15, 340, 71952, 1669, 2415, 58, 4970, 78134, 4970, 67066, 197, 197, 1, 1994, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBenchmarkResultDecoder_decodeok(t *testing.T) { verifyParses := func(line string, expected string) func(t *testing.T) { return func(t *testing.T) { d := benchmarkResultDecoder{} res, err := d.decode(line) require.NoError(t, err) require.Equal(t, expected, res.String()) } } t.Run("case=simple", verifyParses("Benchmark 1 10 ns/op", "Benchmark 1 10 ns/op")) t.Run("case=named", verifyParses("BenchmarkBob 1 10 ns/op", "BenchmarkBob 1 10 ns/op")) t.Run("case=manyspaces", verifyParses("BenchmarkBob 1\t10\t \t ns/op", "BenchmarkBob 1 10 ns/op")) t.Run("case=tworesults", verifyParses("Benchmark 1 10 ns/op 5 MB/s", "Benchmark 1 10 ns/op 5 MB/s")) }
explode_data.jsonl/39866
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 84971, 2077, 20732, 15227, 562, 1155, 353, 8840, 836, 8, 341, 93587, 47, 1561, 288, 1669, 2915, 8797, 914, 11, 3601, 914, 8, 2915, 1155, 353, 8840, 836, 8, 341, 197, 853, 2915, 1155, 353, 8840, 836, 8, 341, 298, 2698, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmptyUI64S(t *testing.T) { var is []uint64 f := setUpUI64SFlagSet(&is) err := f.Parse([]string{}) if err != nil { t.Fatal("expected no error; got", err) } getI64S, err := f.GetUint64Slice("is") if err != nil { t.Fatal("got an error from GetUint64Slice():", err) } if len(getI64S) != 0 { t.Fatalf("got is %v with len=%d but expected length=0", getI64S, len(getI64S)) } }
explode_data.jsonl/16218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 3522, 2275, 21, 19, 50, 1155, 353, 8840, 836, 8, 341, 2405, 374, 3056, 2496, 21, 19, 198, 1166, 1669, 18620, 2275, 21, 19, 50, 12135, 1649, 2099, 285, 340, 9859, 1669, 282, 8937, 10556, 917, 37790, 743, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClient_CreateExternalInitiator_Errors(t *testing.T) { t.Parallel() tests := []struct { name string args []string }{ {"no arguments", []string{}}, {"too many arguments", []string{"bitcoin", "https://valid.url", "extra arg"}}, {"invalid url", []string{"bitcoin", "not a url"}}, } for _, tt := range tests { test := tt t.Run(test.name, func(t *testing.T) { app, cleanup := cltest.NewApplicationWithKey(t, cltest.LenientEthMock) defer cleanup() require.NoError(t, app.Start()) client, _ := app.NewClientAndRenderer() set := flag.NewFlagSet("create", 0) assert.NoError(t, set.Parse(test.args)) c := cli.NewContext(nil, set, nil) err := client.CreateExternalInitiator(c) assert.Error(t, err) exis := cltest.AllExternalInitiators(t, app.Store) assert.Len(t, exis, 0) }) } }
explode_data.jsonl/78840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 2959, 34325, 25913, 3803, 36122, 93623, 1087, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 3056, 917, 198, 197, 59403, 197, 197, 4913, 2152, 5977, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandlerServeHTTP(t *testing.T) { fm := &fakeMaintner{ resp: &apipb.DashboardResponse{Commits: []*apipb.DashCommit{ { Title: "x/build/cmd/coordinator: implement dashboard", Commit: "752029e171d535b0dd4ff7bbad5ad0275a3969a8", CommitTimeSec: 1257894000, AuthorName: "Gopherbot", AuthorEmail: "gopherbot@example.com", }, }}, } dh := &Handler{ Maintner: fm, memoryResults: map[string][]string{ "752029e171d535b0dd4ff7bbad5ad0275a3969a8": {"linux-amd64-longtest|true|SomeLog|752029e171d535b0dd4ff7bbad5ad0275a3969a8"}, }, } req := httptest.NewRequest("GET", "/dashboard", nil) w := httptest.NewRecorder() dh.ServeHTTP(w, req) resp := w.Result() defer resp.Body.Close() ioutil.ReadAll(resp.Body) if resp.StatusCode != http.StatusOK { t.Errorf("resp.StatusCode = %d, wanted %d", resp.StatusCode, http.StatusOK) } }
explode_data.jsonl/45359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 3050, 60421, 9230, 1155, 353, 8840, 836, 8, 341, 1166, 76, 1669, 609, 30570, 66734, 1194, 515, 197, 34653, 25, 609, 391, 573, 65, 909, 7349, 2582, 90, 17977, 1199, 25, 29838, 391, 573, 65, 909, 988, 33441, 515, 298, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsBinary(t *testing.T) { tests := []struct { name string data []byte expected bool }{ {name: "TestIsBinary_1", data: []byte("foo"), expected: false}, {name: "TestIsBinary_2", data: []byte{0}, expected: true}, {name: "TestIsBinary_3", data: bytes.Repeat([]byte{'o'}, 8000), expected: false}, } for _, test := range tests { is := IsBinary(test.data) assert.Equal(t, is, test.expected, fmt.Sprintf("%v: is = %v, expected: %v", test.name, is, test.expected)) } }
explode_data.jsonl/20384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 3872, 21338, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 8924, 257, 3056, 3782, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 47006, 25, 330, 2271, 3872, 21338, 62, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCollection_Upsert(t *testing.T) { ast := require.New(t) cli := initClient("test") defer cli.Close(context.Background()) defer cli.DropCollection(context.Background()) cli.EnsureIndexes(context.Background(), []string{"name"}, nil) id1 := primitive.NewObjectID() id2 := primitive.NewObjectID() docs := []interface{}{ bson.D{{Key: "_id", Value: id1}, {Key: "name", Value: "Alice"}}, bson.D{{Key: "_id", Value: id2}, {Key: "name", Value: "Lucas"}}, } _, err := cli.InsertMany(context.Background(), docs) ast.NoError(err) // replace already exist filter1 := bson.M{ "name": "Alice", } replacement1 := bson.M{ "name": "Alice1", "age": 18, } opts := options.UpsertOptions{} opts.ReplaceOptions = officialOpts.Replace() res, err := cli.Upsert(context.Background(), filter1, replacement1, opts) ast.NoError(err) ast.NotEmpty(res) ast.Equal(int64(1), res.MatchedCount) ast.Equal(int64(1), res.ModifiedCount) ast.Equal(int64(0), res.UpsertedCount) ast.Equal(nil, res.UpsertedID) // not exist filter2 := bson.M{ "name": "Lily", } replacement2 := bson.M{ "name": "Lily", "age": 20, } res, err = cli.Upsert(context.Background(), filter2, replacement2) ast.NoError(err) ast.NotEmpty(res) ast.Equal(int64(0), res.MatchedCount) ast.Equal(int64(0), res.ModifiedCount) ast.Equal(int64(1), res.UpsertedCount) ast.NotNil(res.UpsertedID) // filter is nil or wrong BSON Document format replacement3 := bson.M{ "name": "Geek", "age": 21, } res, err = cli.Upsert(context.Background(), nil, replacement3) ast.Error(err) ast.Empty(res) res, err = cli.Upsert(context.Background(), 1, replacement3) ast.Error(err) ast.Empty(res) // replacement is nil or wrong BSON Document format filter4 := bson.M{ "name": "Geek", } res, err = cli.Upsert(context.Background(), filter4, nil) ast.Error(err) ast.Empty(res) res, err = cli.Upsert(context.Background(), filter4, 1) ast.Error(err) ast.Empty(res) }
explode_data.jsonl/18374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 817 }
[ 2830, 3393, 6482, 6665, 1690, 529, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 1373, 7121, 1155, 340, 86448, 1669, 2930, 2959, 445, 1944, 1138, 16867, 21348, 10421, 5378, 19047, 2398, 16867, 21348, 21688, 6482, 5378, 19047, 2398, 86448, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetAutocommitON(t *testing.T) { stmt, err := Parse("SET autocommit=ON") if err != nil { t.Error(err) } s, ok := stmt.(*Set) if !ok { t.Errorf("SET statement is not Set: %T", s) } if len(s.Exprs) < 1 { t.Errorf("SET statement has no expressions") } e := s.Exprs[0] switch v := e.Expr.(type) { case *SQLVal: if v.Type != StrVal { t.Errorf("SET statement value is not StrVal: %T", v) } if !bytes.Equal([]byte("on"), v.Val) { t.Errorf("SET statement value want: on, got: %s", v.Val) } default: t.Errorf("SET statement expression is not SQLVal: %T", e.Expr) } stmt, err = Parse("SET @@session.autocommit=ON") if err != nil { t.Error(err) } s, ok = stmt.(*Set) if !ok { t.Errorf("SET statement is not Set: %T", s) } if len(s.Exprs) < 1 { t.Errorf("SET statement has no expressions") } e = s.Exprs[0] switch v := e.Expr.(type) { case *SQLVal: if v.Type != StrVal { t.Errorf("SET statement value is not StrVal: %T", v) } if !bytes.Equal([]byte("on"), v.Val) { t.Errorf("SET statement value want: on, got: %s", v.Val) } default: t.Errorf("SET statement expression is not SQLVal: %T", e.Expr) } }
explode_data.jsonl/3374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 1649, 19602, 11986, 1763, 711, 1155, 353, 8840, 836, 8, 341, 55822, 11, 1848, 1669, 14775, 445, 5884, 3078, 11986, 1763, 28, 711, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 532, 1903, 11, 5394, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSprintStatusMap(t *testing.T) { assert := assert.New(t) assert.Equal(sdk.AgileSprintStatusFuture, sprintStateMap["future"]) assert.Equal(sdk.AgileSprintStatusActive, sprintStateMap["active"]) assert.Equal(sdk.AgileSprintStatusClosed, sprintStateMap["closed"]) }
explode_data.jsonl/3682
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 50, 1350, 2522, 2227, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 6948, 12808, 1141, 7584, 49850, 457, 50, 1350, 2522, 24206, 11, 37849, 1397, 2227, 1183, 21055, 14108, 6948, 12808, 1141, 7584, 49850, 45...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSlotMakeCallable(t *testing.T) { fooType := newTestClass("Foo", []*Type{ObjectType}, NewDict()) foo := newObject(fooType) // fun returns a tuple: (ret, args) where ret is the return value of // the callable produced by makeCallable and args are the arguments // that were passed into the callable. fun := wrapFuncForTest(func(f *Frame, s slot, ret *Object, args ...*Object) (*Object, *BaseException) { gotArgs := None prepareTestSlot(s, &gotArgs, ret) callable := s.makeCallable(fooType, "__slot__") if callable == nil { // This slot does not produce a callable, so just // return None. return None, nil } result, raised := callable.Call(f, args, nil) if raised != nil { return nil, raised } return NewTuple(result, gotArgs).ToObject(), nil }) cases := []invokeTestCase{ {args: wrapArgs(&basisSlot{}, None), want: None}, {args: wrapArgs(&binaryOpSlot{}, "foo", foo, 123), want: newTestTuple("foo", newTestTuple(foo, 123)).ToObject()}, {args: wrapArgs(&binaryOpSlot{}, None, "abc", 123), wantExc: mustCreateException(TypeErrorType, "'__slot__' requires a 'Foo' object but received a 'str'")}, {args: wrapArgs(&delAttrSlot{}, None, foo, "bar"), want: newTestTuple(None, newTestTuple(foo, "bar")).ToObject()}, {args: wrapArgs(&delAttrSlot{}, None, foo, 3.14), wantExc: mustCreateException(TypeErrorType, "'__slot__' requires a 'str' object but received a 'float'")}, {args: wrapArgs(&delAttrSlot{}, RuntimeErrorType, foo, "bar"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&deleteSlot{}, None, foo, "bar"), want: newTestTuple(None, newTestTuple(foo, "bar")).ToObject()}, {args: wrapArgs(&deleteSlot{}, None, foo, 1, 2, 3), wantExc: mustCreateException(TypeErrorType, "'__slot__' of 'Foo' requires 2 arguments")}, {args: wrapArgs(&deleteSlot{}, RuntimeErrorType, foo, "bar"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&delItemSlot{}, None, foo, "bar"), want: newTestTuple(None, newTestTuple(foo, "bar")).ToObject()}, {args: wrapArgs(&delItemSlot{}, None, foo, 1, 2, 3), wantExc: mustCreateException(TypeErrorType, "'__slot__' of 'Foo' requires 2 arguments")}, {args: wrapArgs(&delItemSlot{}, RuntimeErrorType, foo, "bar"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&getAttributeSlot{}, None, foo, "bar"), want: newTestTuple(None, newTestTuple(foo, "bar")).ToObject()}, {args: wrapArgs(&getAttributeSlot{}, None, foo, 3.14), wantExc: mustCreateException(TypeErrorType, "'__slot__' requires a 'str' object but received a 'float'")}, {args: wrapArgs(&getAttributeSlot{}, RuntimeErrorType, foo, "bar"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&getSlot{}, 3.14, foo, 123, IntType), want: newTestTuple(3.14, newTestTuple(foo, 123, IntType)).ToObject()}, {args: wrapArgs(&getSlot{}, None, foo, "bar", "baz"), wantExc: mustCreateException(TypeErrorType, "'__slot__' requires a 'type' object but received a 'str'")}, {args: wrapArgs(&nativeSlot{}, None), want: None}, {args: wrapArgs(&setAttrSlot{}, None, foo, "bar", 123), want: newTestTuple(None, newTestTuple(foo, "bar", 123)).ToObject()}, {args: wrapArgs(&setAttrSlot{}, None, foo, true, None), wantExc: mustCreateException(TypeErrorType, "'__slot__' requires a 'str' object but received a 'bool'")}, {args: wrapArgs(&setAttrSlot{}, RuntimeErrorType, foo, "bar", "baz"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&setItemSlot{}, None, foo, "bar", true), want: newTestTuple(None, newTestTuple(foo, "bar", true)).ToObject()}, {args: wrapArgs(&setItemSlot{}, None, foo, 1, 2, 3), wantExc: mustCreateException(TypeErrorType, "'__slot__' of 'Foo' requires 3 arguments")}, {args: wrapArgs(&setItemSlot{}, RuntimeErrorType, foo, "bar", "baz"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&setSlot{}, None, foo, 3.14, false), want: newTestTuple(None, newTestTuple(foo, 3.14, false)).ToObject()}, {args: wrapArgs(&setSlot{}, RuntimeErrorType, foo, "bar", "baz"), wantExc: mustCreateException(RuntimeErrorType, "")}, {args: wrapArgs(&setSlot{}, None, foo, 1, 2, 3), wantExc: mustCreateException(TypeErrorType, "'__slot__' of 'Foo' requires 3 arguments")}, {args: wrapArgs(&unaryOpSlot{}, 42, foo), want: newTestTuple(42, NewTuple(foo)).ToObject()}, {args: wrapArgs(&unaryOpSlot{}, None, foo, "bar"), wantExc: mustCreateException(TypeErrorType, "'__slot__' of 'Foo' requires 1 arguments")}, } for _, cas := range cases { if err := runInvokeTestCase(fun, &cas); err != "" { t.Error(err) } } }
explode_data.jsonl/3222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1616 }
[ 2830, 3393, 19877, 8078, 40410, 1155, 353, 8840, 836, 8, 341, 197, 7975, 929, 1669, 501, 60100, 445, 40923, 497, 29838, 929, 90, 49530, 2137, 1532, 13448, 2398, 197, 7975, 1669, 501, 1190, 71880, 929, 340, 197, 322, 2464, 4675, 264, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEndpointFlipFlops(t *testing.T) { s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{}) addEdsCluster(s, "flipflop.com", "http", "10.0.0.53", 8080) adscConn := s.Connect(nil, nil, watchAll) // Validate that endpoints are pushed correctly. testEndpoints("10.0.0.53", "outbound|8080||flipflop.com", adscConn, t) // Clear the endpoint and validate it does not trigger a full push. s.Discovery.MemRegistry.SetEndpoints("flipflop.com", "", []*model.IstioEndpoint{}) upd, _ := adscConn.Wait(5*time.Second, v3.EndpointType) if contains(upd, "cds") { t.Fatalf("Expecting only EDS update as part of a partial push. But received CDS also %v", upd) } if len(upd) > 0 && !contains(upd, v3.EndpointType) { t.Fatalf("Expecting EDS push as part of a partial push. But received %v", upd) } lbe := adscConn.GetEndpoints()["outbound|8080||flipflop.com"] if len(lbe.Endpoints) != 0 { t.Fatalf("There should be no endpoints for outbound|8080||flipflop.com. Endpoints:\n%v", adscConn.EndpointsJSON()) } // Validate that keys in service still exist in EndpointShardsByService - this prevents full push. if len(s.Discovery.EndpointShardsByService["flipflop.com"]) == 0 { t.Fatalf("Expected service key %s to be present in EndpointShardsByService. But missing %v", "flipflop.com", s.Discovery.EndpointShardsByService) } // Set the endpoints again and validate it does not trigger full push. s.Discovery.MemRegistry.SetEndpoints("flipflop.com", "", []*model.IstioEndpoint{ { Address: "10.10.1.1", ServicePortName: "http", EndpointPort: 8080, }, }) upd, _ = adscConn.Wait(5*time.Second, v3.EndpointType) if contains(upd, v3.ClusterType) { t.Fatalf("expecting only EDS update as part of a partial push. But received CDS also %+v", upd) } if len(upd) > 0 && !contains(upd, v3.EndpointType) { t.Fatalf("expecting EDS push as part of a partial push. But did not receive %+v", upd) } testEndpoints("10.10.1.1", "outbound|8080||flipflop.com", adscConn, t) }
explode_data.jsonl/7952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 797 }
[ 2830, 3393, 27380, 46808, 37, 53689, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 856, 5356, 7121, 52317, 67400, 5475, 1155, 11, 856, 5356, 991, 726, 3798, 37790, 12718, 2715, 82, 28678, 1141, 11, 330, 39017, 69, 22288, 905, 497, 330, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestSetPrimaryDNSserver(t *testing.T) { d := setup() dn := DNS{} dn.PriIPv4 = "44.147.45.53" dn, err := d.SetPrimaryIPv4DNSserver(dn) notErr(t, err) assert(t, dn.PriIPv4, "44.147.45.53") }
explode_data.jsonl/21121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1649, 15972, 61088, 4030, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 6505, 741, 2698, 77, 1669, 27598, 16094, 2698, 77, 1069, 461, 58056, 19, 284, 330, 19, 19, 13, 16, 19, 22, 13, 19, 20, 13, 20, 18, 698, 2698, 77, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGithub_BranchReplaceSlash(t *testing.T) { ci := &Github{CIBranchName: "refs/heads/feature/xyz"} assert.Equal(t, "feature_xyz", ci.BranchReplaceSlash()) }
explode_data.jsonl/53501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 78717, 1668, 81, 3497, 23107, 88004, 1155, 353, 8840, 836, 8, 341, 1444, 72, 1669, 609, 78717, 90, 34, 3256, 81, 3497, 675, 25, 330, 16149, 14, 35810, 14, 12753, 14, 28854, 63159, 6948, 12808, 1155, 11, 330, 12753, 64974, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestStripFlags(t *testing.T) { tests := []struct { input []string output []string }{ { []string{"foo", "bar"}, []string{"foo", "bar"}, }, { []string{"foo", "--bar", "-b"}, []string{"foo"}, }, { []string{"-b", "foo", "--bar", "bar"}, []string{}, }, { []string{"-i10", "echo"}, []string{"echo"}, }, { []string{"-i=10", "echo"}, []string{"echo"}, }, { []string{"--int=100", "echo"}, []string{"echo"}, }, { []string{"-ib", "echo", "-bfoo", "baz"}, []string{"echo", "baz"}, }, { []string{"-i=baz", "bar", "-i", "foo", "blah"}, []string{"bar", "blah"}, }, { []string{"--int=baz", "-bbar", "-i", "foo", "blah"}, []string{"blah"}, }, { []string{"--cat", "bar", "-i", "foo", "blah"}, []string{"bar", "blah"}, }, { []string{"-c", "bar", "-i", "foo", "blah"}, []string{"bar", "blah"}, }, { []string{"--persist", "bar"}, []string{"bar"}, }, { []string{"-p", "bar"}, []string{"bar"}, }, } cmdPrint := &Command{ Use: "print [string to print]", Short: "Print anything to the screen", Long: `an utterly useless command for testing.`, Run: func(cmd *Command, args []string) { tp = args }, } var flagi int var flagstr string var flagbool bool cmdPrint.PersistentFlags().BoolVarP(&flagbool, "persist", "p", false, "help for persistent one") cmdPrint.Flags().IntVarP(&flagi, "int", "i", 345, "help message for flag int") cmdPrint.Flags().StringVarP(&flagstr, "bar", "b", "bar", "help message for flag string") cmdPrint.Flags().BoolVarP(&flagbool, "cat", "c", false, "help message for flag bool") for _, test := range tests { output := stripFlags(test.input, cmdPrint) if !reflect.DeepEqual(test.output, output) { t.Errorf("expected: %v, got: %v", test.output, output) } } }
explode_data.jsonl/47428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 814 }
[ 2830, 3393, 5901, 9195, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 22427, 220, 3056, 917, 198, 197, 21170, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 197, 1294, 917, 4913, 7975, 497, 330, 2257, 7115, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOnbuildIllegalTriggers(t *testing.T) { triggers := []struct{ command, expectedError string }{ {"ONBUILD", "Chaining ONBUILD via `ONBUILD ONBUILD` isn't allowed"}, {"MAINTAINER", "MAINTAINER isn't allowed as an ONBUILD trigger"}, {"FROM", "FROM isn't allowed as an ONBUILD trigger"}} for _, trigger := range triggers { b := &Builder{flags: &BFlags{}, runConfig: &container.Config{}, disableCommit: true} err := onbuild(b, []string{trigger.command}, nil, "") if err == nil { t.Fatalf("Error should not be nil") } if !strings.Contains(err.Error(), trigger.expectedError) { t.Fatalf("Error message not correct. Should be: %s, got: %s", trigger.expectedError, err.Error()) } } }
explode_data.jsonl/28277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 1925, 5834, 33713, 1282, 21385, 1155, 353, 8840, 836, 8, 341, 25583, 21385, 1669, 3056, 1235, 90, 3210, 11, 3601, 1454, 914, 335, 515, 197, 197, 4913, 711, 47180, 497, 330, 1143, 2056, 6197, 47180, 4566, 1565, 711, 47180, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPostMedia_String(t *testing.T) { pm := types.PostMedia{ URI: "http://example.com", MimeType: "text/plain", } actual := pm.String() expected := "Media - URI - [http://example.com] ; Mime-Type - [text/plain] \n" require.Equal(t, expected, actual) }
explode_data.jsonl/47671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 4133, 12661, 31777, 1155, 353, 8840, 836, 8, 341, 86511, 1669, 4494, 23442, 12661, 515, 197, 197, 10301, 25, 414, 330, 1254, 1110, 8687, 905, 756, 197, 9209, 34872, 25, 330, 1318, 36971, 756, 197, 630, 88814, 1669, 8836, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_chan(t *testing.T) { t.Skip("do not support chan") type TestObject struct { MyChan chan bool MyField int } should := require.New(t) obj := TestObject{} str, err := json.Marshal(obj) should.Nil(err) should.Equal(``, str) }
explode_data.jsonl/73516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 45552, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 2982, 537, 1824, 26023, 5130, 13158, 3393, 1190, 2036, 341, 197, 78469, 46019, 220, 26023, 1807, 198, 197, 78469, 1877, 526, 198, 197, 630, 197, 5445, 1669, 1373, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGbLocationStringBuilder(t *testing.T) { tmpDataDir, err := ioutil.TempDir("", "data-*") if err != nil { t.Error(err) } defer os.RemoveAll(tmpDataDir) scrubbedGb := Read("../../data/t4_intron.gb") // removing gbkLocationString from features to allow testing for gbkLocationBuilder for featureIndex := range scrubbedGb.Features { scrubbedGb.Features[featureIndex].GbkLocationString = "" } tmpGbFilePath := filepath.Join(tmpDataDir, "t4_intron_test.gb") Write(scrubbedGb, tmpGbFilePath) testInputGb := Read("../../data/t4_intron.gb") testOutputGb := Read(tmpGbFilePath) if diff := cmp.Diff(testInputGb, testOutputGb, cmpopts.IgnoreFields(poly.Feature{}, "ParentSequence")); diff != "" { t.Errorf("Issue with either Join or complement location building. Parsing the output of Build() does not produce the same output as parsing the original file read with Read(). Got this diff:\n%s", diff) } }
explode_data.jsonl/74810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 84097, 4707, 69412, 1155, 353, 8840, 836, 8, 341, 20082, 1043, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 691, 44903, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 532, 16867, 2643, 84427, 1036...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOCMProvider_CheckClusterStatus(t *testing.T) { type fields struct { ocmClient ocm.Client } type args struct { clusterSpec *types.ClusterSpec } internalId := "test-internal-id" externalId := "test-external-id" clusterFailedProvisioningErrorText := "cluster provisioning failed test message" spec := &types.ClusterSpec{ InternalID: internalId, ExternalID: "", Status: "", AdditionalInfo: nil, } tests := []struct { name string fields fields args args want *types.ClusterSpec wantErr bool }{ { name: "should return cluster status ready", fields: fields{ ocmClient: &ocm.ClientMock{ GetClusterFunc: func(clusterID string) (*clustersmgmtv1.Cluster, error) { sb := clustersmgmtv1.NewClusterStatus().State(clustersmgmtv1.ClusterStateReady) return clustersmgmtv1.NewCluster().Status(sb).ExternalID(externalId).Build() }, }, }, args: args{ clusterSpec: spec, }, want: &types.ClusterSpec{ InternalID: internalId, ExternalID: externalId, Status: api.ClusterProvisioned, AdditionalInfo: nil, }, wantErr: false, }, { name: "should return cluster status failed", fields: fields{ ocmClient: &ocm.ClientMock{ GetClusterFunc: func(clusterID string) (*clustersmgmtv1.Cluster, error) { sb := clustersmgmtv1.NewClusterStatus().State(clustersmgmtv1.ClusterStateError).ProvisionErrorMessage(clusterFailedProvisioningErrorText) return clustersmgmtv1.NewCluster().Status(sb).ExternalID(externalId).Build() }, }, }, args: args{ clusterSpec: spec, }, want: &types.ClusterSpec{ InternalID: internalId, ExternalID: externalId, Status: api.ClusterFailed, StatusDetails: clusterFailedProvisioningErrorText, AdditionalInfo: nil, }, wantErr: false, }, { name: "should return error when failed to get cluster from OCM", fields: fields{ ocmClient: &ocm.ClientMock{ GetClusterFunc: func(clusterID string) (*clustersmgmtv1.Cluster, error) { return nil, errors.Errorf("failed to get cluster") }, }, }, args: args{ clusterSpec: spec, }, wantErr: true, want: nil, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { RegisterTestingT(t) p := newOCMProvider(test.fields.ocmClient, nil, &ocm.OCMConfig{}) resp, err := p.CheckClusterStatus(test.args.clusterSpec) Expect(resp).To(Equal(test.want)) if test.wantErr { Expect(err).NotTo(BeNil()) } }) } }
explode_data.jsonl/4831
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1125 }
[ 2830, 3393, 7612, 44, 5179, 28188, 28678, 2522, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 509, 76, 2959, 297, 6226, 11716, 198, 197, 532, 13158, 2827, 2036, 341, 197, 197, 18855, 8327, 353, 9242, 72883, 8327, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReaderHasNextAgainstEmptyTopic(t *testing.T) { client, err := NewClient(ClientOptions{ URL: lookupURL, }) assert.Nil(t, err) defer client.Close() // create reader on 5th message (not included) reader, err := client.CreateReader(ReaderOptions{ Topic: "an-empty-topic", StartMessageID: EarliestMessageID(), }) assert.Nil(t, err) defer reader.Close() assert.Equal(t, reader.HasNext(), false) }
explode_data.jsonl/6387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 5062, 10281, 5847, 84317, 3522, 26406, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 46851, 3798, 515, 197, 79055, 25, 18615, 3144, 345, 197, 8824, 6948, 59678, 1155, 11, 1848, 340, 16867, 2943, 10421, 282...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrUnique(t *testing.T) { zdb.RunTest(t, func(t *testing.T, ctx context.Context) { err := zdb.Exec(ctx, `create table t (c varchar); create unique index test on t(c)`) if err != nil { t.Fatal(err) } err = zdb.Exec(ctx, `insert into t values ('a')`) if err != nil { t.Fatal(err) } err = zdb.Exec(ctx, `insert into t values ('a')`) if err == nil { t.Fatal("error is nil") } if !zdb.ErrUnique(err) { t.Fatalf("wrong error: %#v", err) } }) }
explode_data.jsonl/57560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 7747, 22811, 1155, 353, 8840, 836, 8, 341, 20832, 1999, 16708, 2271, 1155, 11, 2915, 1155, 353, 8840, 836, 11, 5635, 2266, 9328, 8, 341, 197, 9859, 1669, 1147, 1999, 30798, 7502, 11, 1565, 3182, 1965, 259, 320, 66, 32958, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDPAReconciler_populateAzureRegistrySecret(t *testing.T) { tests := []struct { name string bsl *velerov1.BackupStorageLocation registrySecret *corev1.Secret azureSecret *corev1.Secret dpa *oadpv1alpha1.DataProtectionApplication wantErr bool }{ { name: "Given Velero CR and bsl instance, appropriate registry secret is updated for azure case", wantErr: false, bsl: &velerov1.BackupStorageLocation{ ObjectMeta: metav1.ObjectMeta{ Name: "test-bsl", Namespace: "test-ns", }, Spec: velerov1.BackupStorageLocationSpec{ Provider: AzureProvider, StorageType: velerov1.StorageType{ ObjectStorage: &velerov1.ObjectStorageLocation{ Bucket: "azure-bucket", }, }, Config: map[string]string{ StorageAccount: "velero-azure-account", ResourceGroup: testResourceGroup, RegistryStorageAzureAccountnameEnvVarKey: "velero-azure-account", "storageAccountKeyEnvVar": "AZURE_STORAGE_ACCOUNT_ACCESS_KEY", }, }, }, dpa: &oadpv1alpha1.DataProtectionApplication{ ObjectMeta: metav1.ObjectMeta{ Name: "Velero-test-CR", Namespace: "test-ns", }, }, azureSecret: &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "cloud-credentials-azure", Namespace: "test-ns", }, Data: secretAzureData, }, registrySecret: &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "oadp-test-bsl-azure-registry-secret", Namespace: "test-ns", }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fakeClient, err := getFakeClientFromObjects(tt.azureSecret, tt.dpa) if err != nil { t.Errorf("error in creating fake client, likely programmer error") } r := &DPAReconciler{ Client: fakeClient, Scheme: fakeClient.Scheme(), Log: logr.Discard(), Context: newContextForTest(tt.name), NamespacedName: types.NamespacedName{ Namespace: tt.bsl.Namespace, Name: tt.bsl.Name, }, EventRecorder: record.NewFakeRecorder(10), } wantRegistrySecret := &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "oadp-" + tt.bsl.Name + "-" + tt.bsl.Spec.Provider + "-registry-secret", Namespace: r.NamespacedName.Namespace, Labels: map[string]string{ oadpv1alpha1.OadpOperatorLabel: "True", }, }, Data: azureRegistrySecretData, } if err := r.populateAzureRegistrySecret(tt.bsl, tt.registrySecret); (err != nil) != tt.wantErr { t.Errorf("populateAWSRegistrySecret() error = %v, wantErr %v", err, tt.wantErr) } if !reflect.DeepEqual(tt.registrySecret.Data, wantRegistrySecret.Data) { t.Errorf("expected bsl labels to be %#v, got %#v", tt.registrySecret, wantRegistrySecret.Data) } }) } }
explode_data.jsonl/45550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1393 }
[ 2830, 3393, 35, 8041, 693, 40446, 5769, 17061, 6334, 78107, 15603, 19773, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 93801, 75, 310, 353, 889, 261, 859, 16, 8864, 454, 5793, 4707, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEngineAbandonResponse(t *testing.T) { vdr, _, sender, _, te, gBlk := setup(t) sender.Default(false) blk := &Blk{ parent: gBlk, id: GenerateID(), status: choices.Unknown, bytes: []byte{1}, } te.insert(blk) te.QueryFailed(vdr.ID(), 1) if len(te.blocked) != 0 { t.Fatalf("Should have removed blocking event") } }
explode_data.jsonl/3560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 4571, 5830, 11037, 2582, 1155, 353, 8840, 836, 8, 341, 5195, 3612, 11, 8358, 4646, 11, 8358, 1013, 11, 342, 4923, 74, 1669, 6505, 1155, 692, 1903, 1659, 13275, 3576, 692, 197, 34989, 1669, 609, 4923, 74, 515, 197, 24804, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTerragruntInitHookNoSourceNoBackend(t *testing.T) { t.Parallel() cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_NO_BACKEND) tmpEnvPath := copyEnvironment(t, "fixture-hooks/init-once") rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_INIT_ONCE_NO_SOURCE_NO_BACKEND) var ( stdout bytes.Buffer stderr bytes.Buffer ) err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr) output := stderr.String() if err != nil { t.Errorf("Did not expect to get error: %s", err.Error()) } assert.Equal(t, 1, strings.Count(output, "AFTER_INIT_ONLY_ONCE"), "Hooks on init command executed more than once") // With no source, `init-from-module` should not execute assert.NotContains(t, output, "AFTER_INIT_FROM_MODULE_ONLY_ONCE", "Hooks on init-from-module command executed when no source was specified") }
explode_data.jsonl/10058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 51402, 68305, 3850, 3803, 31679, 2753, 3608, 2753, 29699, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 60639, 51, 13886, 627, 13682, 1155, 11, 13602, 42635, 41486, 82251, 50, 14446, 98814, 9100, 25430, 9100, 77...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestYearlyWithDifferentConfig(t *testing.T) { defer ResetConfig() ParseConfig.SetYearlyPattern(`jeden (\d{1,2})\.(\d{1,2})\.?$`) re := parseRe("[] bla (jeden 2.5.)") assert.NotNil(t, re) assert.Equal(t, moment.RecurYearly, re.Recurrence) assert.Equal(t, 2, re.RefDate.Time.Day()) assert.Equal(t, time.May, re.RefDate.Time.Month()) }
explode_data.jsonl/67518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 9490, 398, 2354, 69123, 2648, 1155, 353, 8840, 836, 8, 341, 16867, 16932, 2648, 741, 10025, 2583, 2648, 4202, 9490, 398, 15760, 5809, 73, 14134, 19788, 67, 90, 16, 11, 17, 5410, 18831, 11520, 67, 90, 16, 11, 17, 5410, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSimulationLog(t *testing.T) { cdc := makeTestCodec() tests := []struct { store string kvPair cmn.KVPair }{ {auth.StoreKey, cmn.KVPair{Key: auth.AddressStoreKey(delAddr1), Value: cdc.MustMarshalBinaryBare(auth.BaseAccount{})}}, {mint.StoreKey, cmn.KVPair{Key: mint.MinterKey, Value: cdc.MustMarshalBinaryLengthPrefixed(mint.Minter{})}}, {staking.StoreKey, cmn.KVPair{Key: staking.LastValidatorPowerKey, Value: valAddr1.Bytes()}}, {gov.StoreKey, cmn.KVPair{Key: gov.VoteKey(1, delAddr1), Value: cdc.MustMarshalBinaryLengthPrefixed(gov.Vote{})}}, {distribution.StoreKey, cmn.KVPair{Key: distr.ProposerKey, Value: consAddr1.Bytes()}}, {slashing.StoreKey, cmn.KVPair{Key: slashing.GetValidatorMissedBlockBitArrayKey(consAddr1, 6), Value: cdc.MustMarshalBinaryLengthPrefixed(true)}}, {supply.StoreKey, cmn.KVPair{Key: supply.SupplyKey, Value: cdc.MustMarshalBinaryLengthPrefixed(supply.NewSupply(sdk.Coins{}))}}, {"Empty", cmn.KVPair{}}, {"OtherStore", cmn.KVPair{Key: []byte("key"), Value: []byte("value")}}, } for _, tt := range tests { t.Run(tt.store, func(t *testing.T) { require.NotPanics(t, func() { GetSimulationLog(tt.store, cdc, cdc, tt.kvPair, tt.kvPair) }, tt.store) }) } }
explode_data.jsonl/13737
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 524 }
[ 2830, 3393, 1949, 64554, 2201, 1155, 353, 8840, 836, 8, 341, 1444, 7628, 1669, 1281, 2271, 36913, 2822, 78216, 1669, 3056, 1235, 341, 197, 57279, 220, 914, 198, 197, 16463, 85, 12443, 9961, 77, 11352, 53, 12443, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1