text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestCreateRun_ThroughPipelineID(t *testing.T) { store, manager, p := initWithPipeline(t) defer store.Close() experiment := &model.Experiment{Name: "e1"} experiment, err := manager.CreateExperiment(experiment) assert.Nil(t, err) apiRun := &api.Run{ Name: "run1", PipelineSpec: &api.PipelineSpec{ PipelineId: p.UUID, Parameters: []*api.Parameter{ {Name: "param1", Value: "world"}, }, }, ResourceReferences: []*api.ResourceReference{ { Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER, }, }, } runDetail, err := manager.CreateRun(apiRun) assert.Nil(t, err) expectedRuntimeWorkflow := testWorkflow.DeepCopy() expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ {Name: "param1", Value: util.StringPointer("world")}} expectedRunDetail := &model.RunDetail{ Run: model.Run{ UUID: "workflow1", DisplayName: "run1", Name: "workflow-name", StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), CreatedAtInSec: 3, Conditions: "Running", PipelineSpec: model.PipelineSpec{ PipelineId: p.UUID, WorkflowSpecManifest: testWorkflow.ToStringForStore(), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ResourceReferences: []*model.ResourceReference{ { ResourceUUID: "workflow1", ResourceType: common.Run, ReferenceUUID: experiment.UUID, ReferenceType: common.Experiment, Relationship: common.Owner, }, }, }, PipelineRuntime: model.PipelineRuntime{ WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), }, } assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") assert.Equal(t, 1, store.workflowClientFake.GetWorkflowCount(), "Workflow CRD is not created.") runDetail, err = manager.GetRun(runDetail.UUID) assert.Nil(t, err) assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") }
explode_data.jsonl/28355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 852 }
[ 2830, 3393, 4021, 6727, 62, 23857, 34656, 915, 1155, 353, 8840, 836, 8, 341, 57279, 11, 6645, 11, 281, 1669, 13864, 34656, 1155, 340, 16867, 3553, 10421, 741, 8122, 14329, 1669, 609, 2528, 5121, 14329, 63121, 25, 330, 68, 16, 16707, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteAddrPubKeyRelation(t *testing.T) { stakedValidator := getStakedValidator() type args struct { validator types.Validator } type expected struct { validator types.Validator set bool message string } tests := []struct { name string args expected }{ { name: "delete a PubKeyRelation", args: args{validator: stakedValidator}, expected: expected{ validator: stakedValidator, set: true, message: fmt.Sprintf("address %s not found", sdk.Address(stakedValidator.GetPublicKey().Address())), }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { context, _, keeper := createTestInput(t, true) keeper.setAddrPubkeyRelation(context, test.args.validator.GetPublicKey().Address(), test.args.validator.GetPublicKey()) keeper.deleteAddrPubkeyRelation(context, test.args.validator.GetPublicKey().Address()) _, err := keeper.getPubKeyRelation(context, test.args.validator.GetPublicKey().Address()) if err != nil { assert.Equal(t, test.expected.message, fmt.Sprintf("%s", err), "error message doe sno tmatch expected$") } }) } }
explode_data.jsonl/9978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 6435, 13986, 29162, 1592, 33790, 1155, 353, 8840, 836, 8, 341, 18388, 7741, 14256, 1669, 633, 623, 7741, 14256, 2822, 13158, 2827, 2036, 341, 197, 197, 16112, 4494, 13, 14256, 198, 197, 532, 13158, 3601, 2036, 341, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_goTag_Handle(t *testing.T) { f := &FieldDescriptor{} NewGoTag(`db:"b" json:"a"`).Handle(f) test.Assert(t, f.Alias == "a") }
explode_data.jsonl/32999
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 25515, 5668, 42714, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 609, 1877, 11709, 16094, 197, 3564, 10850, 5668, 5809, 1999, 2974, 65, 1, 2951, 2974, 64, 39917, 568, 6999, 955, 340, 18185, 11711, 1155, 11, 282, 875, 71829, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDeepCopyStructs(t *testing.T) { structs := []interface{}{ &Empty{}, &BuiltInTypes{}, &PtrToBuiltInTypes{}, &SliceOfBuiltInTypes{}, &SliceOfPtrToBuiltInTypes{}, &ArrayOfBuiltInTypes{}, &ArrayOfPtrToBuiltInTypes{}, &MapsOfBuiltInTypes{}, &MapsOfSimplerBuiltInTypes{}, &SliceToSlice{}, &PtrTo{}, &Structs{}, &MapWithStructs{}, &RecursiveType{}, &EmbeddedStruct1{}, &EmbeddedStruct2{}, &UnnamedStruct{}, &StructWithStructFieldWithoutEqualMethod{}, &StructWithStructWithFromAnotherPackage{}, &FieldWithStructWithPrivateFields{}, &Enums{}, &NamedTypes{}, &Duration{}, &Nickname{}, &PrivateEmbedded{}, &StructOfStructs{}, } for _, this := range structs { desc := reflect.TypeOf(this).Elem().Name() t.Run(desc, func(t *testing.T) { for i := 0; i < 100; i++ { this = random(this) for reflect.ValueOf(this).IsNil() { this = random(this) } that := random(this) for reflect.ValueOf(that).IsNil() { that = random(that) } deepcopy(this, that) if want, got := true, reflect.DeepEqual(this, that); want != got { t.Fatalf("want %v got %v\n this = %#v, that = %#v\n", want, got, this, that) } } }) } }
explode_data.jsonl/70019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 33464, 12106, 9422, 82, 1155, 353, 8840, 836, 8, 341, 6472, 82, 1669, 3056, 4970, 67066, 197, 197, 5, 3522, 38837, 197, 197, 48239, 11227, 641, 4173, 38837, 197, 197, 5, 5348, 1249, 54300, 641, 4173, 38837, 197, 197, 5, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIsPurgeable(t *testing.T) { type args struct { candidate time.Time before time.Duration since time.Duration } tests := []struct { name string args args want bool }{ { "Purgeable if both before and since are not set", args{time.Now(), 0, 0}, true, }, { "Both before and since aren't supported at a time", args{time.Now(), 1 * time.Minute, 1 * time.Minute}, false, }, { "Purgeable candidate before", args{time.Now().Add(-10 * time.Hour), 9 * time.Hour, 0}, true, }, { "non-Purgeable candidate before", args{time.Now().Add(-10 * time.Hour), 11 * time.Hour, 0}, false, }, { "Purgeable candidate since", args{time.Now().Add(-10 * time.Hour), 0, 11 * time.Hour}, true, }, { "non-Purgeable candidate since", args{time.Now().Add(-10 * time.Hour), 0, 9 * time.Hour}, false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := IsPurgeable(tt.args.candidate, tt.args.before, tt.args.since) if got != tt.want { t.Errorf("IsPurgeable() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/76249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 507 }
[ 2830, 3393, 3872, 47, 39823, 480, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1444, 17050, 882, 16299, 198, 197, 63234, 262, 882, 33795, 198, 197, 1903, 1701, 257, 882, 33795, 198, 197, 532, 78216, 1669, 3056, 1235, 341...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAgent_RegisterCheck_BadStatus(t *testing.T) { t.Parallel() a := NewTestAgent(t.Name(), "") defer a.Shutdown() args := &structs.CheckDefinition{ Name: "test", TTL: 15 * time.Second, Status: "fluffy", } req, _ := http.NewRequest("PUT", "/v1/agent/check/register", jsonReader(args)) resp := httptest.NewRecorder() if _, err := a.srv.AgentRegisterCheck(resp, req); err != nil { t.Fatalf("err: %v", err) } if resp.Code != 400 { t.Fatalf("accepted bad status") } }
explode_data.jsonl/33613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 16810, 73124, 3973, 1668, 329, 2522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 14676, 16867, 264, 10849, 18452, 2822, 31215, 1669, 609, 1235, 82, 10600, 10398, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetImageMetadata(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() MockSetImageMetadataResponse(t) options := &volumeactions.ImageMetadataOpts{ Metadata: map[string]string{ "label": "test", }, } err := volumeactions.SetImageMetadata(client.ServiceClient(), "cd281d77-8217-4830-be95-9528227c105c", options).ExtractErr() th.AssertNoErr(t, err) }
explode_data.jsonl/20634
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 1649, 1906, 14610, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 9209, 1176, 1649, 1906, 14610, 2582, 1155, 692, 35500, 1669, 609, 25060, 4020, 7528, 14610, 43451, 515, 197, 9209,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPINewMultipartHandler(t *testing.T) { defer DetectTestLeak(t)() ExecObjectLayerAPITest(t, testAPINewMultipartHandler, []string{"NewMultipart"}) }
explode_data.jsonl/10703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 60 }
[ 2830, 3393, 2537, 687, 365, 44, 18204, 3050, 1155, 353, 8840, 836, 8, 341, 16867, 33287, 2271, 2304, 585, 1155, 8, 741, 197, 10216, 1190, 9188, 2537, 952, 477, 1155, 11, 1273, 2537, 687, 365, 44, 18204, 3050, 11, 3056, 917, 4913, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestStart_ExecuteInbound(t *testing.T) { followup, _, err := (&start{}).ExecuteInbound(nil, &metaData{}) require.EqualError(t, err, "start: ExecuteInbound function is not supposed to be used") require.Nil(t, followup) }
explode_data.jsonl/66238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 3479, 83453, 641, 10891, 1155, 353, 8840, 836, 8, 341, 1166, 1544, 454, 11, 8358, 1848, 1669, 15899, 2468, 6257, 568, 17174, 641, 10891, 27907, 11, 609, 5490, 1043, 37790, 17957, 12808, 1454, 1155, 11, 1848, 11, 330, 2468, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestExportStructValue(t *testing.T) { t.Parallel() script := ` access(all) struct Foo { access(all) let bar: Int init(bar: Int) { self.bar = bar } } access(all) fun main(): Foo { return Foo(bar: 42) } ` actual := exportValueFromScript(t, script) expected := cadence.NewStruct([]cadence.Value{cadence.NewInt(42)}).WithType(fooStructType) assert.Equal(t, expected, actual) }
explode_data.jsonl/4625
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 16894, 9422, 1130, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 86956, 1669, 22074, 286, 2615, 20388, 8, 2036, 33428, 341, 310, 2615, 20388, 8, 1077, 3619, 25, 1333, 271, 310, 2930, 54630, 25, 1333, 8, 341, 394,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSumPerKeyWithPartitionsReturnsNonNegativeFloat64(t *testing.T) { // We have two test cases, one for public partitions as a PCollection and one for public partitions as a slice (i.e., in-memory). for _, tc := range []struct { inMemory bool }{ {true}, {false}, } { var triples []testutils.TripleWithFloatValue for key := 0; key < 100; key++ { triples = append(triples, testutils.TripleWithFloatValue{key, key, 0.01}) } var publicPartitionsSlice []int for p := 0; p < 200; p++ { publicPartitionsSlice = append(publicPartitionsSlice, p) } p, s, col := ptest.CreateList(triples) col = beam.ParDo(s, testutils.ExtractIDFromTripleWithFloatValue, col) var publicPartitions interface{} if tc.inMemory { publicPartitions = publicPartitionsSlice } else { publicPartitions = beam.CreateList(s, publicPartitionsSlice) } // Using a low epsilon, a high delta, and a high maxValue. epsilon, delta, maxValue := 0.001, 0.999, 1e8 pcol := MakePrivate(s, col, NewPrivacySpec(epsilon, delta)) pcol = ParDo(s, testutils.TripleWithFloatValueToKV, pcol) sumParams := SumParams{MinValue: 0, MaxValue: maxValue, MaxPartitionsContributed: 1, NoiseKind: GaussianNoise{}, PublicPartitions: publicPartitions} sums := SumPerKey(s, pcol, sumParams) values := beam.DropKey(s, sums) beam.ParDo0(s, testutils.CheckNoNegativeValuesFloat64Fn, values) if err := ptest.Run(p); err != nil { t.Errorf("TestSumPerKeyWithPartitionsReturnsNonNegativeFloat64 in-memory=%t returned errors: %v", tc.inMemory, err) } } }
explode_data.jsonl/42976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 580 }
[ 2830, 3393, 9190, 3889, 1592, 2354, 5800, 5930, 16446, 8121, 38489, 5442, 21, 19, 1155, 353, 8840, 836, 8, 341, 197, 322, 1205, 614, 1378, 1273, 5048, 11, 825, 369, 584, 46688, 438, 264, 393, 6482, 323, 825, 369, 584, 46688, 438, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestExtendErrorLogger(t *testing.T) { DefaultCreateErrorLoggerFunc = NewMockLogger defer func() { DefaultCreateErrorLoggerFunc = CreateDefaultErrorLogger }() logName := "/tmp/mosn/test_mock_log.log" os.Remove(logName) // reset for test errorLoggerManagerInstance.managers = make(map[string]log.ErrorLogger) log.ClearAll() if err := InitDefaultLogger(logName, INFO); err != nil { t.Fatal(err) } DefaultLogger.Infof("test_%d", 123) // [mocked] [INFO] [] test_123 Proxy.Infof(context.Background(), "test_%d", 123) // [mocked] [INFO] [] [connId,traceId] test_123 time.Sleep(time.Second) lines, err := readLines(logName) if err != nil { t.Fatal(err) } if len(lines) != 2 { t.Fatalf("logger write lines not expected, writes: %d, expected: %d", len(lines), 2) } for _, l := range lines { qs := strings.SplitN(l, " ", 4) if !(len(qs) == 4 && qs[0] == "[mocked]" && qs[1] == "[INFO]" && qs[2] == "[]" && strings.Contains(qs[3], "test_123")) { t.Fatalf("log output is unexpected: %s", l) } } ToggleLogger(logName, true) DefaultLogger.Infof("test_%d", 123) Proxy.Infof(context.Background(), "test_%d", 123) if lines, err := readLines(logName); err != nil || len(lines) != 2 { t.Fatal("disable proxy logger failed") } }
explode_data.jsonl/61741
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 534 }
[ 2830, 3393, 72136, 1454, 7395, 1155, 353, 8840, 836, 8, 341, 91084, 4021, 1454, 7395, 9626, 284, 1532, 11571, 7395, 198, 16867, 2915, 368, 341, 197, 91084, 4021, 1454, 7395, 9626, 284, 4230, 3675, 1454, 7395, 198, 197, 69826, 6725, 675,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistentVolumeDescriber(t *testing.T) { block := api.PersistentVolumeBlock file := api.PersistentVolumeFilesystem testCases := []struct { plugin string pv *api.PersistentVolume expectedElements []string unexpectedElements []string }{ { plugin: "hostpath", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ HostPath: &api.HostPathVolumeSource{Type: new(api.HostPathType)}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "gce", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ GCEPersistentDisk: &api.GCEPersistentDiskVolumeSource{}, }, VolumeMode: &file, }, }, expectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "ebs", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ AWSElasticBlockStore: &api.AWSElasticBlockStoreVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "nfs", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ NFS: &api.NFSVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "iscsi", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ ISCSI: &api.ISCSIPersistentVolumeSource{}, }, VolumeMode: &block, }, }, expectedElements: []string{"VolumeMode", "Block"}, }, { plugin: "gluster", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Glusterfs: &api.GlusterfsVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "rbd", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ RBD: &api.RBDPersistentVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "quobyte", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Quobyte: &api.QuobyteVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "cinder", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Cinder: &api.CinderVolumeSource{}, }, }, }, unexpectedElements: []string{"VolumeMode", "Filesystem"}, }, { plugin: "fc", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ FC: &api.FCVolumeSource{}, }, VolumeMode: &block, }, }, expectedElements: []string{"VolumeMode", "Block"}, }, { plugin: "local", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Local: &api.LocalVolumeSource{}, }, }, }, expectedElements: []string{"Node Affinity: <none>"}, unexpectedElements: []string{"Required Terms", "Term "}, }, { plugin: "local", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Local: &api.LocalVolumeSource{}, }, NodeAffinity: &api.VolumeNodeAffinity{}, }, }, expectedElements: []string{"Node Affinity: <none>"}, unexpectedElements: []string{"Required Terms", "Term "}, }, { plugin: "local", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Local: &api.LocalVolumeSource{}, }, NodeAffinity: &api.VolumeNodeAffinity{ Required: &api.NodeSelector{}, }, }, }, expectedElements: []string{"Node Affinity", "Required Terms: <none>"}, unexpectedElements: []string{"Term "}, }, { plugin: "local", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Local: &api.LocalVolumeSource{}, }, NodeAffinity: &api.VolumeNodeAffinity{ Required: &api.NodeSelector{ NodeSelectorTerms: []api.NodeSelectorTerm{ { MatchExpressions: []api.NodeSelectorRequirement{}, }, { MatchExpressions: []api.NodeSelectorRequirement{}, }, }, }, }, }, }, expectedElements: []string{"Node Affinity", "Required Terms", "Term 0", "Term 1"}, }, { plugin: "local", pv: &api.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{Name: "bar"}, Spec: api.PersistentVolumeSpec{ PersistentVolumeSource: api.PersistentVolumeSource{ Local: &api.LocalVolumeSource{}, }, NodeAffinity: &api.VolumeNodeAffinity{ Required: &api.NodeSelector{ NodeSelectorTerms: []api.NodeSelectorTerm{ { MatchExpressions: []api.NodeSelectorRequirement{ { Key: "foo", Operator: "In", Values: []string{"val1", "val2"}, }, { Key: "foo", Operator: "Exists", }, }, }, }, }, }, }, }, expectedElements: []string{"Node Affinity", "Required Terms", "Term 0", "foo in [val1, val2]", "foo exists"}, }, } for _, test := range testCases { fake := fake.NewSimpleClientset(test.pv) c := PersistentVolumeDescriber{fake} str, err := c.Describe("foo", "bar", printers.DescriberSettings{ShowEvents: true}) if err != nil { t.Errorf("Unexpected error for test %s: %v", test.plugin, err) } if str == "" { t.Errorf("Unexpected empty string for test %s. Expected PV Describer output", test.plugin) } for _, expected := range test.expectedElements { if !strings.Contains(str, expected) { t.Errorf("expected to find %q in output: %q", expected, str) } } for _, unexpected := range test.unexpectedElements { if strings.Contains(str, unexpected) { t.Errorf("unexpected to find %q in output: %q", unexpected, str) } } } }
explode_data.jsonl/34938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3172 }
[ 2830, 3393, 53194, 18902, 62664, 652, 1155, 353, 8840, 836, 8, 341, 47996, 1669, 6330, 61655, 18902, 4713, 198, 17661, 1669, 6330, 61655, 18902, 1703, 8948, 198, 18185, 37302, 1669, 3056, 1235, 341, 197, 197, 9138, 1797, 914, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestFormatStore_Query(t *testing.T) { t.Run("Fail to format tags", func(t *testing.T) { provider := formattedstore.NewProvider(mem.NewProvider(), &mockFormatter{errFormat: errors.New("tags formatting failure"), useDeterministicKeyFormatting: true}) require.NotNil(t, provider) store, err := provider.OpenStore("StoreName") require.NoError(t, err) require.NotNil(t, store) t.Run("Tag name only query", func(t *testing.T) { iterator, err := store.Query("TagName1") require.EqualError(t, err, `failed to format tag name "TagName1": tags formatting failure`) require.Empty(t, iterator) }) t.Run("Tag name and value query", func(t *testing.T) { iterator, err := store.Query("TagName1:TagValue1") require.EqualError(t, err, `failed to format tag: tags formatting failure`) require.Empty(t, iterator) }) }) t.Run("Fail to query underlying store", func(t *testing.T) { provider := formattedstore.NewProvider(&mock.Provider{ OpenStoreReturn: &mock.Store{ ErrQuery: errors.New("query failure"), }, }, &exampleformatters.NoOpFormatter{}) require.NotNil(t, provider) store, err := provider.OpenStore("StoreName") require.NoError(t, err) require.NotNil(t, store) t.Run("Tag name only query", func(t *testing.T) { iterator, err := store.Query("TagName1") require.EqualError(t, err, `failed to query underlying store: query failure`) require.Empty(t, iterator) }) t.Run("Tag name and value query", func(t *testing.T) { iterator, err := store.Query("TagName1:TagValue1") require.EqualError(t, err, `failed to query underlying store: query failure`) require.Empty(t, iterator) }) }) }
explode_data.jsonl/28243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 615 }
[ 2830, 3393, 4061, 6093, 48042, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 19524, 311, 3561, 9492, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 19979, 1669, 23126, 4314, 7121, 5179, 39908, 7121, 5179, 3148, 298, 197, 5, 1671...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_login(t *testing.T) { // given handler := http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { assert.Equal(t, TokenPath, req.URL.Path) assert.Equal(t, req.Method, http.MethodPost) response := v1alpha1.TokenResponse{ AccessToken: "dummy", } json, err := jsoniter.Marshal(response) assert.NoError(t, err) size, err := w.Write(json) assert.NoError(t, err) assert.Equal(t, size, len(json)) w.WriteHeader(204) }) server := httptest.NewServer(handler) defer server.Close() client := Client{ requester: server.Client(), URL: server.URL, token: "not set", } // when err := client.login("dummy", "dummy") // then // token must be set on the client now assert.NoError(t, err) assert.Equal(t, client.token, "dummy") }
explode_data.jsonl/49878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 321 }
[ 2830, 3393, 2959, 13681, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 53326, 1669, 1758, 89164, 18552, 3622, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 341, 197, 6948, 12808, 1155, 11, 9660, 1820, 11, 4232, 20893, 17474, 340, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrorContextWithTreeCopy(t *testing.T) { tree, err := New("root").Parse("{{if true}}{{end}}", "", "", make(map[string]*Tree), nil) if err != nil { t.Fatalf("unexpected tree parse failure: %v", err) } treeCopy := tree.Copy() wantLocation, wantContext := tree.ErrorContext(tree.Root.Nodes[0]) gotLocation, gotContext := treeCopy.ErrorContext(treeCopy.Root.Nodes[0]) if wantLocation != gotLocation { t.Errorf("wrong error location want %q got %q", wantLocation, gotLocation) } if wantContext != gotContext { t.Errorf("wrong error location want %q got %q", wantContext, gotContext) } }
explode_data.jsonl/68973
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 1454, 1972, 2354, 6533, 12106, 1155, 353, 8840, 836, 8, 341, 51968, 11, 1848, 1669, 1532, 445, 2888, 1827, 14463, 445, 2979, 333, 830, 77286, 408, 3417, 497, 7342, 7342, 1281, 9147, 14032, 8465, 6533, 701, 2092, 340, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetExchangeLowestPriceByCurrencyPair(t *testing.T) { CreateTestBot(t) p, err := currency.NewPairFromStrings("BTC", "USD") if err != nil { t.Fatal(err) } err = stats.Add("Bitfinex", p, asset.Spot, 1000, 10000) if err != nil { t.Error(err) } err = stats.Add(testExchange, p, asset.Spot, 1337, 10000) if err != nil { t.Error(err) } exchangeName, err := GetExchangeLowestPriceByCurrencyPair(p, asset.Spot) if err != nil { t.Error(err) } if exchangeName != "Bitfinex" { t.Error("Unexpected result") } btcaud, err := currency.NewPairFromStrings("BTC", "AUD") if err != nil { t.Fatal(err) } _, err = GetExchangeLowestPriceByCurrencyPair(btcaud, asset.Spot) if err == nil { t.Error("Unexpected reuslt") } }
explode_data.jsonl/59239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 316 }
[ 2830, 3393, 1949, 31564, 24187, 477, 6972, 1359, 26321, 12443, 1155, 353, 8840, 836, 8, 341, 75569, 2271, 23502, 1155, 692, 3223, 11, 1848, 1669, 11413, 7121, 12443, 3830, 20859, 445, 59118, 497, 330, 26749, 1138, 743, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCompiler_compilePick(t *testing.T) { const pickTargetValue uint64 = 12345 op := &wazeroir.OperationPick{Depth: 1} tests := []struct { name string pickTargetSetupFunc func(compiler compilerImpl, ce *callEngine) error isPickTargetFloat, isPickTargetOnRegister bool }{ { name: "float on register", pickTargetSetupFunc: func(compiler compilerImpl, _ *callEngine) error { return compiler.compileConstF64(&wazeroir.OperationConstF64{Value: math.Float64frombits(pickTargetValue)}) }, isPickTargetFloat: true, isPickTargetOnRegister: true, }, { name: "int on register", pickTargetSetupFunc: func(compiler compilerImpl, _ *callEngine) error { return compiler.compileConstI64(&wazeroir.OperationConstI64{Value: pickTargetValue}) }, isPickTargetFloat: false, isPickTargetOnRegister: true, }, { name: "float on stack", pickTargetSetupFunc: func(compiler compilerImpl, ce *callEngine) error { pickTargetLocation := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack() pickTargetLocation.valueType = runtimeValueTypeF64 ce.valueStack[pickTargetLocation.stackPointer] = pickTargetValue return nil }, isPickTargetFloat: true, isPickTargetOnRegister: false, }, { name: "int on stack", pickTargetSetupFunc: func(compiler compilerImpl, ce *callEngine) error { pickTargetLocation := compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack() pickTargetLocation.valueType = runtimeValueTypeI64 ce.valueStack[pickTargetLocation.stackPointer] = pickTargetValue return nil }, isPickTargetFloat: false, isPickTargetOnRegister: false, }, } for _, tt := range tests { tc := tt t.Run(tc.name, func(t *testing.T) { env := newCompilerEnvironment() compiler := env.requireNewCompiler(t, newCompiler, nil) err := compiler.compilePreamble() require.NoError(t, err) // Set up the stack before picking. err = tc.pickTargetSetupFunc(compiler, env.callEngine()) require.NoError(t, err) pickTargetLocation := compiler.runtimeValueLocationStack().peek() // Push the unused median value. _ = compiler.runtimeValueLocationStack().pushRuntimeValueLocationOnStack() require.Equal(t, uint64(2), compiler.runtimeValueLocationStack().sp) // Now ready to compile Pick operation. err = compiler.compilePick(op) require.NoError(t, err) require.Equal(t, uint64(3), compiler.runtimeValueLocationStack().sp) pickedLocation := compiler.runtimeValueLocationStack().peek() require.True(t, pickedLocation.onRegister()) require.Equal(t, pickTargetLocation.getRegisterType(), pickedLocation.getRegisterType()) err = compiler.compileReturnFunction() require.NoError(t, err) // Compile and execute the code under test. code, _, _, err := compiler.compile() require.NoError(t, err) env.exec(code) // Check the returned status and stack pointer. require.Equal(t, nativeCallStatusCodeReturned, env.compilerStatus()) require.Equal(t, uint64(3), env.stackPointer()) // Verify the top value is the picked one and the pick target's value stays the same. if tc.isPickTargetFloat { require.Equal(t, math.Float64frombits(pickTargetValue), env.stackTopAsFloat64()) require.Equal(t, math.Float64frombits(pickTargetValue), math.Float64frombits(env.stack()[pickTargetLocation.stackPointer])) } else { require.Equal(t, pickTargetValue, env.stackTopAsUint64()) require.Equal(t, pickTargetValue, env.stack()[pickTargetLocation.stackPointer]) } }) } }
explode_data.jsonl/13225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1332 }
[ 2830, 3393, 38406, 74170, 36953, 1155, 353, 8840, 836, 8, 341, 4777, 3735, 6397, 1130, 2622, 21, 19, 284, 220, 16, 17, 18, 19, 20, 198, 39703, 1669, 609, 86, 1370, 2328, 404, 56297, 36953, 90, 19776, 25, 220, 16, 532, 78216, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvictionInUnexpectedOrder(t *testing.T) { // Order: View - 2, OnNewBlock - 2, View - 5, View - 6, OnNewBlock - 3, OnNewBlock - 4, View - 5, OnNewBlock - 5, OnNewBlock - 100 require := require.New(t) cfg := DefaultCoherentConfig cfg.KeysLimit = 3 cfg.NewBlockWait = 0 c := New(cfg) c.selectOrCreateRoot(2) require.Equal(1, len(c.roots)) require.Equal(0, int(c.latestViewID)) require.False(c.roots[2].isCanonical) c.add([]byte{1}, nil, c.roots[2], 2) require.Equal(0, c.evictList.Len()) c.advanceRoot(2) require.Equal(1, len(c.roots)) require.Equal(2, int(c.latestViewID)) require.True(c.roots[2].isCanonical) c.add([]byte{1}, nil, c.roots[2], 2) require.Equal(1, c.evictList.Len()) c.selectOrCreateRoot(5) require.Equal(2, len(c.roots)) require.Equal(2, int(c.latestViewID)) require.False(c.roots[5].isCanonical) c.add([]byte{2}, nil, c.roots[5], 5) // not added to evict list require.Equal(1, c.evictList.Len()) c.add([]byte{2}, nil, c.roots[2], 2) // added to evict list, because it's latest view require.Equal(2, c.evictList.Len()) c.selectOrCreateRoot(6) require.Equal(3, len(c.roots)) require.Equal(2, int(c.latestViewID)) require.False(c.roots[6].isCanonical) // parrent exists, but parent has isCanonical=false c.advanceRoot(3) require.Equal(4, len(c.roots)) require.Equal(3, int(c.latestViewID)) require.True(c.roots[3].isCanonical) c.advanceRoot(4) require.Equal(5, len(c.roots)) require.Equal(4, int(c.latestViewID)) require.True(c.roots[4].isCanonical) c.selectOrCreateRoot(5) require.Equal(5, len(c.roots)) require.Equal(4, int(c.latestViewID)) require.False(c.roots[5].isCanonical) c.advanceRoot(5) require.Equal(5, len(c.roots)) require.Equal(5, int(c.latestViewID)) require.True(c.roots[5].isCanonical) c.advanceRoot(100) require.Equal(6, len(c.roots)) require.Equal(100, int(c.latestViewID)) require.True(c.roots[100].isCanonical) //c.add([]byte{1}, nil, c.roots[2], 2) require.Equal(0, c.latestView.cache.Len()) require.Equal(0, c.evictList.Len()) }
explode_data.jsonl/11838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 886 }
[ 2830, 3393, 34112, 2479, 641, 29430, 4431, 1155, 353, 8840, 836, 8, 341, 197, 322, 7217, 25, 2738, 481, 220, 17, 11, 1913, 3564, 4713, 481, 220, 17, 11, 2738, 481, 220, 20, 11, 2738, 481, 220, 21, 11, 1913, 3564, 4713, 481, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEvent(t *testing.T) { chainID := util.GetTestChainID() lis, err := initPeer(chainID) if err != nil { t.Fail() t.Logf("Error creating peer: %s", err) } defer finitPeer(lis, chainID) var ctxt = context.Background() url := "github.com/hyperledger/fabric/examples/chaincode/go/eventsender" cID := &pb.ChaincodeID{Name: "esender", Path: url, Version: "0"} f := "init" spec := &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: util.ToChaincodeArgs(f)}} cccid := ccprovider.NewCCContext(chainID, "esender", "0", "", false, nil, nil) var nextBlockNumber uint64 _, err = deploy(ctxt, cccid, spec, nextBlockNumber) nextBlockNumber++ chaincodeID := spec.ChaincodeId.Name if err != nil { t.Fail() t.Logf("Error initializing chaincode %s(%s)", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } time.Sleep(time.Second) args := util.ToChaincodeArgs("invoke", "i", "am", "satoshi") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} var ccevt *pb.ChaincodeEvent ccevt, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) if err != nil { t.Logf("Error invoking chaincode %s(%s)", chaincodeID, err) t.Fail() } if ccevt == nil { t.Logf("Error ccevt is nil %s(%s)", chaincodeID, err) t.Fail() } if ccevt.ChaincodeId != chaincodeID { t.Logf("Error ccevt id(%s) != cid(%s)", ccevt.ChaincodeId, chaincodeID) t.Fail() } if strings.Index(string(ccevt.Payload), "i,am,satoshi") < 0 { t.Logf("Error expected event not found (%s)", string(ccevt.Payload)) t.Fail() } theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) }
explode_data.jsonl/52839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 705 }
[ 2830, 3393, 1949, 1556, 1155, 353, 8840, 836, 8, 341, 197, 8819, 915, 1669, 4094, 2234, 2271, 18837, 915, 2822, 8810, 285, 11, 1848, 1669, 2930, 30888, 62591, 915, 340, 743, 1848, 961, 2092, 341, 197, 3244, 57243, 741, 197, 3244, 9895...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestValuer(t *testing.T) { name := randName() origUser := User{Name: name, Age: 1, Password: EncryptedData("pass1"), PasswordHash: []byte("abc")} if err := DB.Save(&origUser).Error; err != nil { t.Errorf("No error should happen when saving user, but got %v", err) } var user2 User if err := DB.Where("name = ? AND password = ? AND password_hash = ?", name, EncryptedData("pass1"), []byte("abc")).First(&user2).Error; err != nil { t.Errorf("No error should happen when querying user with valuer, but got %v", err) } }
explode_data.jsonl/34901
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 2208, 8801, 1155, 353, 8840, 836, 8, 341, 11609, 1669, 10382, 675, 2822, 197, 4670, 1474, 1669, 2657, 63121, 25, 829, 11, 13081, 25, 220, 16, 11, 12362, 25, 10751, 14026, 1043, 445, 6385, 16, 3975, 12362, 6370, 25, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNegativeCount(t *testing.T) { err := Connect("./db/mstat.db") if err != nil { t.Fatal(err) } Count("stat2", "1PTM", -1) m, err := GetCounter("stat2") if err != nil { t.Fatal(err) } else { if m["1PTM"] != 0 { t.Fatal("Count should be 0, but received: ", m["1PTM"]) } } Disconnect() }
explode_data.jsonl/1821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 38489, 2507, 1155, 353, 8840, 836, 8, 1476, 9859, 1669, 13015, 13988, 1999, 3183, 9878, 7076, 5130, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 197, 2507, 445, 9878, 17, 497, 330, 16, 2828, 44, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestABIChecking(t *testing.T) { goCmd(t, "install", "-buildmode=shared", "-linkshared", "dep") goCmd(t, "install", "-linkshared", "exe") // If we make an ABI-breaking change to dep and rebuild libp.so but not exe, // exe will abort with a complaint on startup. // This assumes adding an exported function breaks ABI, which is not true in // some senses but suffices for the narrow definition of ABI compatiblity the // toolchain uses today. appendFile("src/dep/dep.go", "func ABIBreak() {}\n") goCmd(t, "install", "-buildmode=shared", "-linkshared", "dep") c := exec.Command("./bin/exe") output, err := c.CombinedOutput() if err == nil { t.Fatal("executing exe did not fail after ABI break") } scanner := bufio.NewScanner(bytes.NewReader(output)) foundMsg := false const wantLine = "abi mismatch detected between the executable and libdep.so" for scanner.Scan() { if scanner.Text() == wantLine { foundMsg = true break } } if err = scanner.Err(); err != nil { t.Errorf("scanner encountered error: %v", err) } if !foundMsg { t.Fatalf("exe failed, but without line %q; got output:\n%s", wantLine, output) } // Rebuilding exe makes it work again. goCmd(t, "install", "-linkshared", "exe") run(t, "rebuilt exe", "./bin/exe") // If we make a change which does not break ABI (such as adding an unexported // function) and rebuild libdep.so, exe still works. appendFile("src/dep/dep.go", "func noABIBreak() {}\n") goCmd(t, "install", "-buildmode=shared", "-linkshared", "dep") run(t, "after non-ABI breaking change", "./bin/exe") }
explode_data.jsonl/24195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 1867, 1317, 1227, 287, 1155, 353, 8840, 836, 8, 341, 30680, 15613, 1155, 11, 330, 12248, 497, 6523, 5834, 8516, 28, 6100, 497, 6523, 2080, 6100, 497, 330, 14891, 1138, 30680, 15613, 1155, 11, 330, 12248, 497, 6523, 2080, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestIsStr(t *testing.T) { var v *Value v = &Value{data: string("hello")} assert.True(t, v.IsStr()) v = &Value{data: []string{string("hello")}} assert.True(t, v.IsStrSlice()) }
explode_data.jsonl/23410
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 3872, 2580, 1155, 353, 8840, 836, 8, 1476, 2405, 348, 353, 1130, 271, 5195, 284, 609, 1130, 90, 691, 25, 914, 445, 14990, 42132, 6948, 32443, 1155, 11, 348, 4506, 2580, 12367, 5195, 284, 609, 1130, 90, 691, 25, 3056, 917...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIf(t *testing.T) { gopClTest(t, `x := 0 if t := false; t { x = 3 } else if !t { x = 5 } else { x = 7 } println("x:", x) `, `package main import fmt "fmt" func main() { x := 0 if t := false; t { x = 3 } else if !t { x = 5 } else { x = 7 } fmt.Println("x:", x) } `) }
explode_data.jsonl/73675
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 2679, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 1565, 87, 1669, 220, 15, 198, 333, 259, 1669, 895, 26, 259, 341, 10225, 284, 220, 18, 198, 92, 770, 421, 753, 83, 341, 10225, 284, 220, 20, 198, 92...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetClientset(t *testing.T) { testConfigTokenFile := "test-config-file" tmpDir, err := ioutil.TempDir("", "kubeadm-token-test") if err != nil { t.Errorf("Unable to create temporary directory: %v", err) } defer os.RemoveAll(tmpDir) fullPath := filepath.Join(tmpDir, testConfigTokenFile) // test dryRun = false on a non-exisiting file if _, err = getClientset(fullPath, false); err == nil { t.Errorf("getClientset(); dry-run: false; did no fail for test file %q: %v", fullPath, err) } // test dryRun = true on a non-exisiting file if _, err = getClientset(fullPath, true); err == nil { t.Errorf("getClientset(); dry-run: true; did no fail for test file %q: %v", fullPath, err) } f, err := os.Create(fullPath) if err != nil { t.Errorf("Unable to create test file %q: %v", fullPath, err) } defer f.Close() if _, err = f.WriteString(testConfigToken); err != nil { t.Errorf("Unable to write test file %q: %v", fullPath, err) } // test dryRun = true on an exisiting file if _, err = getClientset(fullPath, true); err != nil { t.Errorf("getClientset(); dry-run: true; failed for test file %q: %v", fullPath, err) } }
explode_data.jsonl/11814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 443 }
[ 2830, 3393, 1949, 2959, 746, 1155, 353, 8840, 836, 8, 341, 18185, 2648, 3323, 1703, 1669, 330, 1944, 25130, 14203, 1837, 20082, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 74, 392, 3149, 76, 34841, 16839, 1138, 743, 1848, 961,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSearchPostsInChannel(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() th.LoginBasic() Client := th.Client channel := th.CreatePublicChannel() message := "sgtitlereview with space" _ = th.CreateMessagePost(message) message = "sgtitlereview\n with return" _ = th.CreateMessagePostWithClient(Client, th.BasicChannel2, message) message = "other message with no return" _ = th.CreateMessagePostWithClient(Client, th.BasicChannel2, message) message = "other message with no return" _ = th.CreateMessagePostWithClient(Client, channel, message) posts, _ := Client.SearchPosts(th.BasicTeam.Id, "channel:", false) require.Empty(t, posts.Order, "wrong number of posts for search 'channel:'") posts, _ = Client.SearchPosts(th.BasicTeam.Id, "in:", false) require.Empty(t, posts.Order, "wrong number of posts for search 'in:'") posts, _ = Client.SearchPosts(th.BasicTeam.Id, "channel:"+th.BasicChannel.Name, false) require.Lenf(t, posts.Order, 2, "wrong number of posts returned for search 'channel:%v'", th.BasicChannel.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "in:"+th.BasicChannel2.Name, false) require.Lenf(t, posts.Order, 2, "wrong number of posts returned for search 'in:%v'", th.BasicChannel2.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "channel:"+th.BasicChannel2.Name, false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'channel:%v'", th.BasicChannel2.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "ChAnNeL:"+th.BasicChannel2.Name, false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'ChAnNeL:%v'", th.BasicChannel2.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "sgtitlereview", false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'sgtitlereview'") posts, _ = Client.SearchPosts(th.BasicTeam.Id, "sgtitlereview channel:"+th.BasicChannel.Name, false) require.Lenf(t, posts.Order, 1, "wrong number of posts for search 'sgtitlereview channel:%v'", th.BasicChannel.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "sgtitlereview in: "+th.BasicChannel2.Name, false) require.Lenf(t, posts.Order, 1, "wrong number of posts for search 'sgtitlereview in: %v'", th.BasicChannel2.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "sgtitlereview channel: "+th.BasicChannel2.Name, false) require.Lenf(t, posts.Order, 1, "wrong number of posts for search 'sgtitlereview channel: %v'", th.BasicChannel2.Name) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "channel: "+th.BasicChannel2.Name+" channel: "+channel.Name, false) require.Lenf(t, posts.Order, 3, "wrong number of posts for 'channel: %v channel: %v'", th.BasicChannel2.Name, channel.Name) }
explode_data.jsonl/5259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 917 }
[ 2830, 3393, 5890, 19631, 641, 9629, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 70479, 32499, 15944, 741, 71724, 1669, 270, 11716, 271, 71550, 1669, 270, 7251, 12676, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_varListErr(t *testing.T) { m := testModule(t, "plan-var-list-err") p := testProvider("aws") ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), }) _, err := ctx.Plan() if err == nil { t.Fatal("should error") } }
explode_data.jsonl/28712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 1972, 17, 20485, 4612, 852, 7747, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 85415, 9029, 12, 615, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 20985, 1669, 1273, 1972, 17, 1155, 11, 609,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewFunctionWithArgs(t *testing.T) { t.Parallel() ctx := NewIsolate().NewContext() cons, _ := ctx.Eval(`(function(x, y){ this.x = x + y; })`, "") one, _ := ctx.Eval(`1`, "") two, _ := ctx.Eval(`2`, "") obj, err := cons.New(one, two) if err != nil { t.Fatal(err) } res, err := obj.Get("x") if err != nil { t.Fatal(err) } else if num := res.Int64(); num != 3 { t.Errorf("Expected 3, got %v (%v)", num, res) } }
explode_data.jsonl/81563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 3564, 5152, 2354, 4117, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1532, 3872, 33066, 1005, 3564, 1972, 741, 197, 6254, 11, 716, 1669, 5635, 5142, 831, 5809, 7, 1688, 2075, 11, 379, 6098, 419, 1993,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIssueUnlock(t *testing.T) { client, _ := New("https://try.gogs.io") _, err := client.Issues.Unlock(context.Background(), "gogits/go-gogs-client", 1) if err != scm.ErrNotSupported { t.Errorf("Expect Not Supported error") } }
explode_data.jsonl/77752
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 42006, 49679, 1155, 353, 8840, 836, 8, 341, 25291, 11, 716, 1669, 1532, 445, 2428, 1110, 1539, 1302, 26307, 4245, 1138, 197, 6878, 1848, 1669, 2943, 2447, 778, 1137, 39188, 5378, 19047, 1507, 330, 70, 538, 1199, 25525, 2371,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRolling_deployRollingHooks(t *testing.T) { config := appstest.OkDeploymentConfig(1) config.Spec.Strategy = appstest.OkRollingStrategy() latest, _ := appsinternalutil.MakeDeploymentV1(config) var hookError error deployments := map[string]*corev1.ReplicationController{latest.Name: latest} client := &fake.Clientset{} client.AddReactor("get", "replicationcontrollers", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { name := action.(clientgotesting.GetAction).GetName() return true, deployments[name], nil }) client.AddReactor("update", "replicationcontrollers", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { updated := action.(clientgotesting.UpdateAction).GetObject().(*corev1.ReplicationController) return true, updated, nil }) strategy := &RollingDeploymentStrategy{ rcClient: client.Core(), eventClient: fake.NewSimpleClientset().Core(), initialStrategy: &testStrategy{ deployFn: func(from *corev1.ReplicationController, to *corev1.ReplicationController, desiredReplicas int, updateAcceptor strat.UpdateAcceptor) error { t.Fatalf("unexpected call to initial strategy") return nil }, }, rollingUpdate: func(config *RollingUpdaterConfig) error { return nil }, hookExecutor: &hookExecutorImpl{ executeFunc: func(hook *appsv1.LifecycleHook, deployment *corev1.ReplicationController, suffix, label string) error { return hookError }, }, getUpdateAcceptor: getUpdateAcceptor, apiRetryPeriod: 1 * time.Millisecond, apiRetryTimeout: 10 * time.Millisecond, } cases := []struct { params *appsapi.RollingDeploymentStrategyParams hookShouldFail bool deploymentShouldFail bool }{ {rollingParams(appsapi.LifecycleHookFailurePolicyAbort, ""), true, true}, {rollingParams(appsapi.LifecycleHookFailurePolicyAbort, ""), false, false}, {rollingParams("", appsapi.LifecycleHookFailurePolicyAbort), true, true}, {rollingParams("", appsapi.LifecycleHookFailurePolicyAbort), false, false}, } for _, tc := range cases { config := appstest.OkDeploymentConfig(2) config.Spec.Strategy.RollingParams = tc.params deployment, _ := appsinternalutil.MakeDeploymentV1(config) deployments[deployment.Name] = deployment hookError = nil if tc.hookShouldFail { hookError = fmt.Errorf("hook failure") } strategy.out, strategy.errOut = &bytes.Buffer{}, &bytes.Buffer{} err := strategy.Deploy(latest, deployment, 2) if err != nil && tc.deploymentShouldFail { t.Logf("got expected error: %v", err) } if err == nil && tc.deploymentShouldFail { t.Errorf("expected an error for case: %#v", tc) } if err != nil && !tc.deploymentShouldFail { t.Errorf("unexpected error for case: %#v: %v", tc, err) } } }
explode_data.jsonl/64616
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1003 }
[ 2830, 3393, 32355, 287, 91890, 32355, 287, 67769, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 906, 267, 477, 54282, 75286, 2648, 7, 16, 340, 25873, 36473, 27318, 10228, 284, 906, 267, 477, 54282, 32355, 287, 19816, 741, 197, 19350, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_signature_Marshal(t *testing.T) { t.Parallel() type ( testCase struct { name string blob []byte want []byte wantErr bool } testList []testCase ) algos := GetAlgos() tests := make(testList, 0, algos.Len()) for name, algo := range algos { sign, _ := mockSignature(algo) blob, _ := sign.Raw() want, _ := proto.Marshal(&pb.Signature{Blob: blob}) tests = append(tests, testCase{ name: name + "_OK", blob: blob, want: want, }) } for idx := range tests { test := tests[idx] t.Run(test.name, func(t *testing.T) { t.Parallel() got, err := NewSignature(test.blob).Marshal() if (err != nil) != test.wantErr { t.Errorf("Marshal() error: %v | want: %v", err, test.wantErr) return } if !reflect.DeepEqual(got, test.want) { t.Errorf("Marshal() got: %#v | want: %#v", got, test.want) } }) } }
explode_data.jsonl/21342
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 413 }
[ 2830, 3393, 39859, 1245, 28423, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 13158, 2399, 197, 18185, 4207, 2036, 341, 298, 11609, 262, 914, 198, 298, 2233, 1684, 262, 3056, 3782, 198, 298, 50780, 262, 3056, 3782, 198, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetDefaultConfigFilePaths(t *testing.T) { assert := assert.New(t) results := getDefaultConfigFilePaths() // There should be atleast two config file locations assert.True(len(results) >= 2) for _, f := range results { // Paths cannot be empty assert.NotNil(f) } }
explode_data.jsonl/5132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 1949, 3675, 2648, 19090, 82, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 55497, 1669, 69106, 2648, 19090, 82, 741, 197, 322, 2619, 1265, 387, 89928, 1378, 2193, 1034, 10468, 198, 6948, 32443, 6901, 20484...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInputUsage(t *testing.T) { pkg := &pkgContext{} arrayUsage := pkg.getInputUsage("FooArray") assert.Equal( t, "FooArrayInput is an input type that accepts FooArray and FooArrayOutput values.\nYou can construct a "+ "concrete instance of `FooArrayInput` via:\n\n\t\t FooArray{ FooArgs{...} }\n ", arrayUsage) mapUsage := pkg.getInputUsage("FooMap") assert.Equal( t, "FooMapInput is an input type that accepts FooMap and FooMapOutput values.\nYou can construct a concrete"+ " instance of `FooMapInput` via:\n\n\t\t FooMap{ \"key\": FooArgs{...} }\n ", mapUsage) ptrUsage := pkg.getInputUsage("FooPtr") assert.Equal( t, "FooPtrInput is an input type that accepts FooArgs, FooPtr and FooPtrOutput values.\nYou can construct a "+ "concrete instance of `FooPtrInput` via:\n\n\t\t FooArgs{...}\n\n or:\n\n\t\t nil\n ", ptrUsage) usage := pkg.getInputUsage("Foo") assert.Equal( t, "FooInput is an input type that accepts FooArgs and FooOutput values.\nYou can construct a concrete instance"+ " of `FooInput` via:\n\n\t\t FooArgs{...}\n ", usage) }
explode_data.jsonl/22932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 423 }
[ 2830, 3393, 2505, 14783, 1155, 353, 8840, 836, 8, 341, 3223, 7351, 1669, 609, 30069, 1972, 16094, 11923, 14783, 1669, 24793, 87784, 14783, 445, 40923, 1857, 1138, 6948, 12808, 1006, 197, 3244, 345, 197, 197, 1, 40923, 1857, 2505, 374, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBasicModelWithRootNoPolicy(t *testing.T) { e, _ := NewEnforcer("examples/basic_with_root_model.conf") testEnforce(t, e, "alice", "data1", "read", false) testEnforce(t, e, "alice", "data1", "write", false) testEnforce(t, e, "alice", "data2", "read", false) testEnforce(t, e, "alice", "data2", "write", false) testEnforce(t, e, "bob", "data1", "read", false) testEnforce(t, e, "bob", "data1", "write", false) testEnforce(t, e, "bob", "data2", "read", false) testEnforce(t, e, "bob", "data2", "write", false) testEnforce(t, e, "root", "data1", "read", true) testEnforce(t, e, "root", "data1", "write", true) testEnforce(t, e, "root", "data2", "read", true) testEnforce(t, e, "root", "data2", "write", true) }
explode_data.jsonl/57118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 15944, 1712, 2354, 8439, 2753, 13825, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 1532, 1702, 82010, 445, 51668, 77909, 6615, 12993, 5047, 13937, 5130, 18185, 1702, 8833, 1155, 11, 384, 11, 330, 63195, 497, 330, 691, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeResource(t *testing.T) { nodeCapacity := make(map[v1.ResourceName]resource.Quantity) nodeCapacity[v1.ResourceCPU] = resource.MustParse("14500m") result := GetNodeResource(&v1.NodeStatus{ Allocatable: nodeCapacity, }) assert.Equal(t, result.Resources[constants.CPU].GetValue(), int64(14500)) }
explode_data.jsonl/63796
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 1955, 4783, 1155, 353, 8840, 836, 8, 341, 20831, 29392, 1669, 1281, 9147, 16529, 16, 20766, 675, 60, 9233, 66267, 340, 20831, 29392, 16529, 16, 20766, 31615, 60, 284, 5101, 50463, 14463, 445, 16, 19, 20, 15, 15, 76, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteSecurityGroup(t *testing.T) { tests := []struct { name string existingSG *models.SecurityGroup deallocatedInt int64 }{ { name: "delete security group", existingSG: &models.SecurityGroup{ UUID: "sg_uuid", SecurityGroupID: 8000001, }, deallocatedInt: 1, }, } for _, tt := range tests { runTest(t, tt.name, func(t *testing.T, sv *ContrailTypeLogicService) { sv.ReadService.(*servicesmock.MockReadService).EXPECT().GetSecurityGroup( // nolint: errcheck gomock.Not(gomock.Nil()), &services.GetSecurityGroupRequest{ID: tt.existingSG.UUID}, ).Return(&services.GetSecurityGroupResponse{SecurityGroup: tt.existingSG}, nil).Times(1) sv.IntPoolAllocator.(*typesmock.MockIntPoolAllocator).EXPECT().DeallocateInt( gomock.Not(gomock.Nil()), gomock.Not(gomock.Nil()), tt.deallocatedInt, ).Return(nil).Times(1) sv.Next().(*servicesmock.MockService). EXPECT().DeleteSecurityGroup(gomock.Not(gomock.Nil()), gomock.Not(gomock.Nil())). DoAndReturn(func(_ context.Context, request *services.DeleteSecurityGroupRequest) ( *services.DeleteSecurityGroupResponse, error) { return &services.DeleteSecurityGroupResponse{ ID: request.ID, }, nil }).Times(1) ctx := context.Background() res, err := sv.DeleteSecurityGroup(ctx, &services.DeleteSecurityGroupRequest{ ID: tt.existingSG.UUID, }) if assert.NoError(t, err) { assert.Equal(t, tt.existingSG.UUID, res.ID) } }) } }
explode_data.jsonl/47318
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 631 }
[ 2830, 3393, 6435, 15352, 2808, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 8122, 11083, 7783, 257, 353, 6507, 21567, 2808, 198, 197, 58351, 57372, 1072, 526, 21, 19, 198, 197, 59403, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommitStats_String(t *testing.T) { v := CommitStats{ Additions: Int(0), Deletions: Int(0), Total: Int(0), } want := `github.CommitStats{Additions:0, Deletions:0, Total:0}` if got := v.String(); got != want { t.Errorf("CommitStats.String = %v, want %v", got, want) } }
explode_data.jsonl/33229
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 33441, 16635, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 9205, 16635, 515, 197, 37972, 5930, 25, 1333, 7, 15, 1326, 197, 197, 1912, 1149, 908, 25, 1333, 7, 15, 1326, 197, 197, 7595, 25, 257, 1333, 7, 15, 1326, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScan(t *testing.T) { s, err := Run() ok(t, err) defer s.Close() c, err := proto.Dial(s.Addr()) ok(t, err) defer c.Close() // We cheat with scan. It always returns everything. s.Set("key", "value") t.Run("no problem", func(t *testing.T) { mustDo(t, c, "SCAN", "0", proto.Array( proto.String("0"), proto.Array( proto.String("key"), ), ), ) }) t.Run("invalid cursor", func(t *testing.T) { mustDo(t, c, "SCAN", "42", proto.Array( proto.String("0"), proto.Array(), ), ) }) t.Run("count (ignored)", func(t *testing.T) { mustDo(t, c, "SCAN", "0", "COUNT", "200", proto.Array( proto.String("0"), proto.Array( proto.String("key"), ), ), ) }) t.Run("match", func(t *testing.T) { s.Set("aap", "noot") s.Set("mies", "wim") mustDo(t, c, "SCAN", "0", "MATCH", "mi*", proto.Array( proto.String("0"), proto.Array( proto.String("mies"), ), ), ) }) t.Run("errors", func(t *testing.T) { mustDo(t, c, "SCAN", proto.Error(errWrongNumber("scan")), ) mustDo(t, c, "SCAN", "noint", proto.Error("ERR invalid cursor"), ) mustDo(t, c, "SCAN", "1", "MATCH", proto.Error("ERR syntax error"), ) mustDo(t, c, "SCAN", "1", "COUNT", proto.Error("ERR syntax error"), ) mustDo(t, c, "SCAN", "1", "COUNT", "noint", proto.Error("ERR value is not an integer or out of range"), ) }) }
explode_data.jsonl/44822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 762 }
[ 2830, 3393, 26570, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 6452, 741, 59268, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 1444, 11, 1848, 1669, 18433, 98462, 1141, 93626, 2398, 59268, 1155, 11, 1848, 340, 16867, 272, 10421, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLimitListenerClose(t *testing.T) { ln, err := net.Listen("tcp", "127.0.0.1:0") if err != nil { t.Fatal(err) } defer ln.Close() ln = LimitListener(ln, 1) errCh := make(chan error) go func() { defer close(errCh) c, err := net.Dial(ln.Addr().Network(), ln.Addr().String()) if err != nil { errCh <- err return } c.Close() }() c, err := ln.Accept() if err != nil { t.Fatal(err) } defer c.Close() err = <-errCh if err != nil { t.Fatalf("Dial: %v", err) } // Allow the subsequent Accept to block before closing the listener. // (Accept should unblock and return.) timer := time.AfterFunc(10*time.Millisecond, func() { ln.Close() }) c, err = ln.Accept() if err == nil { c.Close() t.Errorf("Unexpected successful Accept()") } if timer.Stop() { t.Errorf("Accept returned before listener closed: %v", err) } }
explode_data.jsonl/45649
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 16527, 2743, 7925, 1155, 353, 8840, 836, 8, 341, 197, 2261, 11, 1848, 1669, 4179, 68334, 445, 27161, 497, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 15, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFlagSet_Register_Default(t *testing.T) { resetFlagSet() Register(&StringFlag{ Name: "config", Usage: "--config", EnvVar: constant.EgoConfigPath, Default: ConfigDefaultToml, Action: func(name string, fs *FlagSet) {}, }) err := Parse() assert.NoError(t, err) configStr, err := StringE("config") assert.NoError(t, err) assert.Equal(t, ConfigDefaultToml, configStr) }
explode_data.jsonl/50977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 12135, 1649, 73124, 60336, 1155, 353, 8840, 836, 8, 341, 70343, 12135, 1649, 741, 79096, 2099, 703, 12135, 515, 197, 21297, 25, 262, 330, 1676, 756, 197, 197, 14783, 25, 256, 14482, 1676, 756, 197, 197, 14359, 3962, 25, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_myAtoi(t *testing.T) { tests := []struct { name string args string want int }{ {"symbol", "+-1", 0}, {"symbol", "-1", -1}, {"with words", "4193 the words", 4193}, {"upper flow", "2873498734182581725", 1<<31 - 1}, {"lower flow", "-238482375873285712857", -1 << 31}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := myAtoi(tt.args); got != tt.want { t.Errorf("myAtoi() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/63408
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 35686, 32, 52609, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 914, 198, 197, 50780, 526, 198, 197, 59403, 197, 197, 4913, 18785, 497, 6630, 12, 16, 497, 220, 15, 1583, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckDiskName(t *testing.T) { tests := []struct { diskName string expected bool }{ { diskName: "a", expected: true, }, { diskName: ".", expected: false, }, { diskName: "_", expected: false, }, { diskName: "_", expected: false, }, { diskName: "09", expected: true, }, { diskName: "az", expected: true, }, { diskName: "1_", expected: true, }, { diskName: "_1", expected: false, }, { diskName: "1.", expected: false, }, { diskName: "1-", expected: false, }, { diskName: "0.z", expected: true, }, { diskName: "1.2", expected: true, }, { diskName: "a-9", expected: true, }, { diskName: "a_c", expected: true, }, { diskName: "1__", expected: true, }, { diskName: "a---9", expected: true, }, { diskName: "1#2", expected: false, }, } for _, test := range tests { result := checkDiskName(test.diskName) if !reflect.DeepEqual(result, test.expected) { t.Errorf("input: %q, checkShareNameBeginAndEnd result: %v, expected: %v", test.diskName, result, test.expected) } } }
explode_data.jsonl/62108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 583 }
[ 2830, 3393, 3973, 47583, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 2698, 3187, 675, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 515, 298, 2698, 3187, 675, 25, 330, 64, 756, 298, 42400, 25, 830,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLock(t *testing.T) { var ( key string ok bool err error ) Convey("TEST Lock", t, func() { ok, err = d.TryLock(ctx, key, "test", 1) So(err, ShouldBeNil) So(ok, ShouldBeTrue) ok, err = d.TryLock(ctx, key, "test", 1) So(err, ShouldBeNil) So(ok, ShouldBeFalse) err = d.UnLock(ctx, key) So(err, ShouldBeNil) ok, err = d.TryLock(ctx, key, "test", 1) So(err, ShouldBeNil) So(ok, ShouldBeTrue) ok, err = d.TryLock(ctx, key, "test", 1) So(err, ShouldBeNil) So(ok, ShouldBeFalse) }) }
explode_data.jsonl/80569
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 11989, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 23634, 914, 198, 197, 59268, 220, 1807, 198, 197, 9859, 1465, 198, 197, 340, 93070, 5617, 445, 10033, 15701, 497, 259, 11, 2915, 368, 341, 197, 59268, 11, 1848, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerCreation(t *testing.T) { p2pInstance.Run() storage.Run() defer storage.Close() defer p2pInstance.Close() server := NewServer(nil, storage, p2pInstance, nil) assert.Equal(t, server.Logger, new(util.PlaceholderLogger)) assert.NotNil(t, server) assert.Equal(t, server.Orders.Storage, storage) assert.Equal(t, server.Channels.Storage, storage) assert.Equal(t, server.Orders.P2p, p2pInstance) assert.Equal(t, server.Channels.P2p, p2pInstance) var err error err = server.Orders.Storage.Put([]byte(serverTestKey), []byte(serverTestEntry)) assert.NoError(t, err) server.Orders.Storage.DeleteAll() err = server.Channels.Storage.Put([]byte(serverTestKey), []byte(serverTestEntry)) assert.NoError(t, err) server.Channels.Storage.DeleteAll() }
explode_data.jsonl/33454
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 5475, 32701, 1155, 353, 8840, 836, 8, 341, 3223, 17, 79, 2523, 16708, 741, 197, 16172, 16708, 741, 16867, 5819, 10421, 741, 16867, 281, 17, 79, 2523, 10421, 2822, 41057, 1669, 1532, 5475, 27907, 11, 5819, 11, 281, 17, 79, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReceive(t *testing.T) { is := is.New(t) transport := newTestTransport() defer func() { transport.Stop() <-transport.Done() }() testProducer := NewNSQTestProducer(t, "test_receive") testProducer.Send([]byte("hello vice")) ch := transport.Receive("test_receive") select { case msg := <-ch: is.Equal(msg, []byte("hello vice")) case <-time.After(2 * time.Second): is.Fail() // timeout: transport.Receive } }
explode_data.jsonl/55188
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 14742, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 692, 197, 26445, 1669, 501, 2271, 27560, 741, 16867, 2915, 368, 341, 197, 197, 26445, 30213, 741, 197, 197, 45342, 26445, 60422, 741, 197, 66816, 18185, 4500...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStacktrace2(t *testing.T) { withTestProcess("retstack", t, func(p *proc.Target, fixture protest.Fixture) { assertNoError(p.Continue(), t, "Continue()") locations, err := proc.ThreadStacktrace(p.CurrentThread(), 40) assertNoError(err, t, "Stacktrace()") if !stackMatch([]loc{{-1, "main.f"}, {16, "main.main"}}, locations, false) { for i := range locations { t.Logf("\t%s:%d [%s]\n", locations[i].Call.File, locations[i].Call.Line, locations[i].Call.Fn.Name) } t.Fatalf("Stack error at main.f()\n%v\n", locations) } assertNoError(p.Continue(), t, "Continue()") locations, err = proc.ThreadStacktrace(p.CurrentThread(), 40) assertNoError(err, t, "Stacktrace()") if !stackMatch([]loc{{-1, "main.g"}, {17, "main.main"}}, locations, false) { for i := range locations { t.Logf("\t%s:%d [%s]\n", locations[i].Call.File, locations[i].Call.Line, locations[i].Call.Fn.Name) } t.Fatalf("Stack error at main.g()\n%v\n", locations) } }) }
explode_data.jsonl/56217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 4336, 15067, 17, 1155, 353, 8840, 836, 8, 341, 46948, 2271, 7423, 445, 2122, 7693, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, 8, 341, 197, 6948, 2753, 1454, 1295, 2451, 6232, 1507, 259, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAllocENI(t *testing.T) { ctrl, _, mockEC2 := setup(t) defer ctrl.Finish() cureniID := eniID eni := ec2.CreateNetworkInterfaceOutput{NetworkInterface: &ec2.NetworkInterface{NetworkInterfaceId: &cureniID}} mockEC2.EXPECT().CreateNetworkInterface(gomock.Any()).Return(&eni, nil) mockEC2.EXPECT().CreateTags(gomock.Any()).Return(nil, nil) // 2 ENIs, uses device number 0 3, expect to find free at 1 ec2ENIs := make([]*ec2.InstanceNetworkInterface, 0) deviceNum1 := int64(0) ownerID := accountID ec2ENI := &ec2.InstanceNetworkInterface{Attachment: &ec2.InstanceNetworkInterfaceAttachment{DeviceIndex: &deviceNum1}, OwnerId: &ownerID} ec2ENIs = append(ec2ENIs, ec2ENI) deviceNum2 := int64(3) ownerID = accountID ec2ENI = &ec2.InstanceNetworkInterface{Attachment: &ec2.InstanceNetworkInterfaceAttachment{DeviceIndex: &deviceNum2}, OwnerId: &ownerID} ec2ENIs = append(ec2ENIs, ec2ENI) result := &ec2.DescribeInstancesOutput{ Reservations: []*ec2.Reservation{{Instances: []*ec2.Instance{{NetworkInterfaces: ec2ENIs}}}}} mockEC2.EXPECT().DescribeInstances(gomock.Any()).Return(result, nil) attachmentID := "eni-attach-58ddda9d" attachResult := &ec2.AttachNetworkInterfaceOutput{ AttachmentId: &attachmentID} mockEC2.EXPECT().AttachNetworkInterface(gomock.Any()).Return(attachResult, nil) mockEC2.EXPECT().ModifyNetworkInterfaceAttribute(gomock.Any()).Return(nil, nil) ins := &EC2InstanceMetadataCache{ec2SVC: mockEC2} _, err := ins.AllocENI(false, nil, "") assert.NoError(t, err) }
explode_data.jsonl/19291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 562 }
[ 2830, 3393, 25154, 953, 40, 1155, 353, 8840, 836, 8, 341, 84381, 11, 8358, 7860, 7498, 17, 1669, 6505, 1155, 340, 16867, 23743, 991, 18176, 2822, 1444, 552, 7751, 915, 1669, 662, 72, 915, 198, 197, 33129, 1669, 11942, 17, 7251, 12320,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportRequestWriteRoundTrip(t *testing.T) { nBytes := int64(1 << 10) newFileFunc := func() (r io.Reader, done func(), err error) { f, err := ioutil.TempFile("", "net-http-newfilefunc") if err != nil { return nil, nil, err } // Write some bytes to the file to enable reading. if _, err := io.CopyN(f, rand.Reader, nBytes); err != nil { return nil, nil, fmt.Errorf("failed to write data to file: %v", err) } if _, err := f.Seek(0, 0); err != nil { return nil, nil, fmt.Errorf("failed to seek to front: %v", err) } done = func() { f.Close() os.Remove(f.Name()) } return f, done, nil } newBufferFunc := func() (io.Reader, func(), error) { return bytes.NewBuffer(make([]byte, nBytes)), func() {}, nil } cases := []struct { name string readerFunc func() (io.Reader, func(), error) contentLength int64 expectedReadFrom bool }{ { name: "file, length", readerFunc: newFileFunc, contentLength: nBytes, expectedReadFrom: true, }, { name: "file, no length", readerFunc: newFileFunc, }, { name: "file, negative length", readerFunc: newFileFunc, contentLength: -1, }, { name: "buffer", contentLength: nBytes, readerFunc: newBufferFunc, }, { name: "buffer, no length", readerFunc: newBufferFunc, }, { name: "buffer, length -1", contentLength: -1, readerFunc: newBufferFunc, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { r, cleanup, err := tc.readerFunc() if err != nil { t.Fatal(err) } defer cleanup() tConn := &testMockTCPConn{} trFunc := func(tr *Transport) { tr.DialContext = func(ctx context.Context, network, addr string) (net.Conn, error) { var d net.Dialer conn, err := d.DialContext(ctx, network, addr) if err != nil { return nil, err } tcpConn, ok := conn.(*net.TCPConn) if !ok { return nil, fmt.Errorf("%s/%s does not provide a *net.TCPConn", network, addr) } tConn.TCPConn = tcpConn return tConn, nil } } cst := newClientServerTest( t, h1Mode, HandlerFunc(func(w ResponseWriter, r *Request) { io.Copy(ioutil.Discard, r.Body) r.Body.Close() w.WriteHeader(200) }), trFunc, ) defer cst.close() req, err := NewRequest("PUT", cst.ts.URL, r) if err != nil { t.Fatal(err) } req.ContentLength = tc.contentLength req.Header.Set("Content-Type", "application/octet-stream") resp, err := cst.c.Do(req) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Fatalf("status code = %d; want 200", resp.StatusCode) } if !tConn.ReadFromCalled && tc.expectedReadFrom { t.Fatalf("did not call ReadFrom") } if tConn.ReadFromCalled && !tc.expectedReadFrom { t.Fatalf("ReadFrom was unexpectedly invoked") } }) } }
explode_data.jsonl/14181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1399 }
[ 2830, 3393, 27560, 1900, 7985, 27497, 56352, 1155, 353, 8840, 836, 8, 341, 9038, 7078, 1669, 526, 21, 19, 7, 16, 1115, 220, 16, 15, 340, 8638, 1703, 9626, 1669, 2915, 368, 320, 81, 6399, 47431, 11, 2814, 2915, 1507, 1848, 1465, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldSendAlertNotification(t *testing.T) { tnow := time.Now() tcs := []struct { name string prevState models.AlertStateType newState models.AlertStateType sendReminder bool frequency time.Duration state *models.AlertNotificationState expect bool }{ { name: "pending -> ok should not trigger an notification", newState: models.AlertStateOK, prevState: models.AlertStatePending, sendReminder: false, expect: false, }, { name: "ok -> alerting should trigger an notification", newState: models.AlertStateAlerting, prevState: models.AlertStateOK, sendReminder: false, expect: true, }, { name: "ok -> pending should not trigger an notification", newState: models.AlertStatePending, prevState: models.AlertStateOK, sendReminder: false, expect: false, }, { name: "ok -> ok should not trigger an notification", newState: models.AlertStateOK, prevState: models.AlertStateOK, sendReminder: false, expect: false, }, { name: "ok -> ok with reminder should not trigger an notification", newState: models.AlertStateOK, prevState: models.AlertStateOK, sendReminder: true, expect: false, }, { name: "alerting -> ok should trigger an notification", newState: models.AlertStateOK, prevState: models.AlertStateAlerting, sendReminder: false, expect: true, }, { name: "alerting -> ok should trigger an notification when reminders enabled", newState: models.AlertStateOK, prevState: models.AlertStateAlerting, frequency: time.Minute * 10, sendReminder: true, state: &models.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, expect: true, }, { name: "alerting -> alerting with reminder and no state should trigger", newState: models.AlertStateAlerting, prevState: models.AlertStateAlerting, frequency: time.Minute * 10, sendReminder: true, expect: true, }, { name: "alerting -> alerting with reminder and last notification sent 1 minute ago should not trigger", newState: models.AlertStateAlerting, prevState: models.AlertStateAlerting, frequency: time.Minute * 10, sendReminder: true, state: &models.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, expect: false, }, { name: "alerting -> alerting with reminder and last notifciation sent 11 minutes ago should trigger", newState: models.AlertStateAlerting, prevState: models.AlertStateAlerting, frequency: time.Minute * 10, sendReminder: true, state: &models.AlertNotificationState{UpdatedAt: tnow.Add(-11 * time.Minute).Unix()}, expect: true, }, { name: "OK -> alerting with notifciation state pending and updated 30 seconds ago should not trigger", newState: models.AlertStateAlerting, prevState: models.AlertStateOK, state: &models.AlertNotificationState{State: models.AlertNotificationStatePending, UpdatedAt: tnow.Add(-30 * time.Second).Unix()}, expect: false, }, { name: "OK -> alerting with notifciation state pending and updated 2 minutes ago should trigger", newState: models.AlertStateAlerting, prevState: models.AlertStateOK, state: &models.AlertNotificationState{State: models.AlertNotificationStatePending, UpdatedAt: tnow.Add(-2 * time.Minute).Unix()}, expect: true, }, { name: "unknown -> ok", prevState: models.AlertStateUnknown, newState: models.AlertStateOK, expect: false, }, { name: "unknown -> pending", prevState: models.AlertStateUnknown, newState: models.AlertStatePending, expect: false, }, { name: "unknown -> alerting", prevState: models.AlertStateUnknown, newState: models.AlertStateAlerting, expect: true, }, { name: "no_data -> pending", prevState: models.AlertStateNoData, newState: models.AlertStatePending, expect: false, }, } for _, tc := range tcs { evalContext := alerting.NewEvalContext(context.Background(), &alerting.Rule{ State: tc.prevState, }) if tc.state == nil { tc.state = &models.AlertNotificationState{} } evalContext.Rule.State = tc.newState nb := &NotifierBase{SendReminder: tc.sendReminder, Frequency: tc.frequency} r := nb.ShouldNotify(evalContext.Ctx, evalContext, tc.state) assert.Equal(t, r, tc.expect, "failed test %s. expected %+v to return: %v", tc.name, tc, tc.expect) } }
explode_data.jsonl/58803
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1862 }
[ 2830, 3393, 14996, 11505, 9676, 11196, 1155, 353, 8840, 836, 8, 341, 197, 1517, 363, 1669, 882, 13244, 2822, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 50728, 1397, 262, 4119, 40143, 1397, 929, 198, 197, 8638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandlePrometheus(t *testing.T) { for _, tc := range handlePrometheusTests { ms := metrics.NewStore() for _, metric := range tc.metrics { ms.Add(metric) } o := Options{ms, "gunstar"} e, err := New(o) if err != nil { t.Fatalf("couldn't make exporter: %s", err) } response := httptest.NewRecorder() e.HandlePrometheusMetrics(response, &http.Request{}) if response.Code != 200 { t.Errorf("test case %s: response code not 200: %d", tc.name, response.Code) } b, err := ioutil.ReadAll(response.Body) if err != nil { t.Errorf("test case %s: failed to read response: %s", tc.name, err) } diff := deep.Equal(tc.expected, string(b)) if diff != nil { t.Errorf("test case %s: response not expected:\n%s", tc.name, diff) } } }
explode_data.jsonl/10753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 6999, 35186, 39705, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3705, 35186, 39705, 18200, 341, 197, 47691, 1669, 16734, 7121, 6093, 741, 197, 2023, 8358, 18266, 1669, 2088, 17130, 35359, 341, 298, 47691, 1904...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestKubernetesResources_AreCreated(t *testing.T) { // TODO: Create builder/yaml fixture of some type to construct MDB objects for unit tests mdb := newTestReplicaSet() mgr := client.NewManager(&mdb) r := NewReconciler(mgr) res, err := r.Reconcile(context.TODO(), reconcile.Request{NamespacedName: types.NamespacedName{Namespace: mdb.Namespace, Name: mdb.Name}}) assertReconciliationSuccessful(t, res, err) s := corev1.Secret{} err = mgr.GetClient().Get(context.TODO(), types.NamespacedName{Name: mdb.AutomationConfigSecretName(), Namespace: mdb.Namespace}, &s) assert.NoError(t, err) assert.Equal(t, mdb.Namespace, s.Namespace) assert.Equal(t, mdb.AutomationConfigSecretName(), s.Name) assert.Contains(t, s.Data, automationconfig.ConfigKey) assert.NotEmpty(t, s.Data[automationconfig.ConfigKey]) }
explode_data.jsonl/80673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 42, 29827, 11277, 1566, 265, 11694, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 25, 4230, 7363, 26491, 9467, 12507, 315, 1045, 943, 311, 9245, 86352, 6171, 369, 4982, 7032, 198, 2109, 1999, 1669, 501, 2271, 18327, 15317, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_stripDockerMetaFromCommand(t *testing.T) { type args struct { command string } tests := []struct { name string args args want string }{ { name: "empty", args: args{ command: "", }, want: "", }, { name: "space strip", args: args{ command: " space strip ", }, want: "space strip", }, { name: "no strip", args: args{ command: "bin/sh #(nop) CMD [/bin/bash]", }, want: "bin/sh #(nop) CMD [/bin/bash]", }, { name: "strip with #(nop)", args: args{ command: "/bin/sh -c #(nop) CMD [/bin/bash] ", }, want: "CMD [/bin/bash]", }, { name: "strip without #(nop)", args: args{ command: "/bin/sh -c CMD [/bin/bash] ", }, want: "CMD [/bin/bash]", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := stripDockerMetaFromCommand(tt.args.command); got != tt.want { t.Errorf("stripDockerMetaFromCommand() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/28534
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 520 }
[ 2830, 3393, 66130, 35, 13659, 12175, 3830, 4062, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 45566, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIncludedIn(t *testing.T) { assert.False(t, IncludedIn([]string{}, "")) assert.True(t, IncludedIn([]string{""}, "")) assert.False(t, IncludedIn([]string{"a"}, "")) assert.True(t, IncludedIn([]string{"a"}, "a")) assert.False(t, IncludedIn([]string{""}, "a")) assert.True(t, IncludedIn([]string{"a", "b"}, "a")) assert.True(t, IncludedIn([]string{"a", "b"}, "b")) assert.False(t, IncludedIn([]string{"a", "b"}, "c")) }
explode_data.jsonl/26280
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 84610, 641, 1155, 353, 8840, 836, 8, 341, 6948, 50757, 1155, 11, 45964, 641, 10556, 917, 22655, 77561, 6948, 32443, 1155, 11, 45964, 641, 10556, 917, 90, 3014, 2137, 77561, 6948, 50757, 1155, 11, 45964, 641, 10556, 917, 4913...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQuery(t *testing.T) { testenv.MustHaveExternalNetwork(t) testenv.MustHaveExecPath(t, "git") for _, tt := range queryTests { allow := tt.allow if allow == "" { allow = "*" } allowed := func(m module.Version) bool { ok, _ := path.Match(allow, m.Version) return ok } tt := tt t.Run(strings.ReplaceAll(tt.path, "/", "_")+"/"+tt.query+"/"+tt.current+"/"+allow, func(t *testing.T) { t.Parallel() info, err := Query(tt.path, tt.query, tt.current, allowed) if tt.err != "" { if err == nil { t.Errorf("Query(%q, %q, %v) = %v, want error %q", tt.path, tt.query, allow, info.Version, tt.err) } else if err.Error() != tt.err { t.Errorf("Query(%q, %q, %v): %v, want error %q", tt.path, tt.query, allow, err, tt.err) } return } if err != nil { t.Fatalf("Query(%q, %q, %v): %v", tt.path, tt.query, allow, err) } if info.Version != tt.vers { t.Errorf("Query(%q, %q, %v) = %v, want %v", tt.path, tt.query, allow, info.Version, tt.vers) } }) } }
explode_data.jsonl/46510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 2859, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 50463, 12116, 25913, 12320, 1155, 340, 18185, 3160, 50463, 12116, 10216, 1820, 1155, 11, 330, 12882, 5130, 2023, 8358, 17853, 1669, 2088, 3239, 18200, 341, 197, 197, 7183, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMountOptions(t *testing.T) { tmpDir, plug := getPlugin(t) defer os.RemoveAll(tmpDir) pod := &v1.Pod{ObjectMeta: metav1.ObjectMeta{UID: types.UID("poduid")}} mounter, err := plug.NewMounter(getTestVolume(false, tmpDir, false, []string{"test-option"}), pod, volume.VolumeOptions{}) if err != nil { t.Errorf("Failed to make a new Mounter: %v", err) } if mounter == nil { t.Fatalf("Got a nil Mounter") } // Wrap with FakeMounter. fakeMounter := mount.NewFakeMounter(nil) mounter.(*localVolumeMounter).mounter = fakeMounter if err := mounter.SetUp(volume.MounterArgs{}); err != nil { t.Errorf("Expected success, got: %v", err) } mountOptions := fakeMounter.MountPoints[0].Opts expectedMountOptions := []string{"bind", "test-option"} if !reflect.DeepEqual(mountOptions, expectedMountOptions) { t.Errorf("Expected mount options to be %v got %v", expectedMountOptions, mountOptions) } }
explode_data.jsonl/14261
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 16284, 3798, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 19633, 1669, 633, 11546, 1155, 340, 16867, 2643, 84427, 10368, 6184, 692, 3223, 347, 1669, 609, 85, 16, 88823, 90, 1190, 12175, 25, 77520, 16, 80222, 90, 6463, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMD5(t *testing.T) { str := "The fog is getting thicker!" result := MD5(str) if result != "bd009e4d93affc7c69101d2e0ec4bfde" { t.Errorf("md5: expect bd009e4d93affc7c69101d2e0ec4bfde, got %v", result) } }
explode_data.jsonl/15691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 6076, 20, 1155, 353, 8840, 836, 8, 341, 11355, 1669, 330, 785, 30249, 374, 3709, 58784, 24734, 9559, 1669, 13979, 20, 4199, 340, 743, 1102, 961, 330, 8940, 15, 15, 24, 68, 19, 67, 24, 18, 2649, 66, 22, 66, 21, 24, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenerateDockerfile(t *testing.T) { str, _ := platform.GenerateDockerfile() if !strings.Contains(str, "/fabric-baseimage:") { t.Fatalf("should have generated a docker file using the fabric-baseimage, but got %s", str) } if !strings.Contains(str, "ADD binpackage.tar /usr/local/src") { t.Fatalf("should have generated a docker file that adds code package content to /usr/local/src, but got %s", str) } }
explode_data.jsonl/58618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 31115, 35, 13659, 1192, 1155, 353, 8840, 836, 8, 341, 11355, 11, 716, 1669, 5339, 57582, 35, 13659, 1192, 741, 743, 753, 18594, 11545, 4199, 11, 3521, 85154, 30013, 1805, 90220, 341, 197, 3244, 30762, 445, 5445, 614, 7907, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDecompressPubkey(t *testing.T) { key, err := DecompressPubkey(testpubkeyc) if err != nil { t.Fatal(err) } if uncompressed := FromECDSAPub(key); !bytes.Equal(uncompressed, testpubkey) { t.Errorf("wrong public key result: got %x, want %x", uncompressed, testpubkey) } if _, err := DecompressPubkey(nil); err == nil { t.Errorf("no error for nil pubkey") } if _, err := DecompressPubkey(testpubkeyc[:5]); err == nil { t.Errorf("no error for incomplete pubkey") } if _, err := DecompressPubkey(append(common.CopyBytes(testpubkeyc), 1, 2, 3)); err == nil { t.Errorf("no error for pubkey with extra bytes at the end") } }
explode_data.jsonl/17532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 4900, 316, 1873, 29162, 792, 1155, 353, 8840, 836, 8, 341, 23634, 11, 1848, 1669, 96378, 1873, 29162, 792, 8623, 9585, 792, 66, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 743, 92382, 1669, 5542,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNewClient_SuccessfullyConnected(t *testing.T) { l, err := net.Listen("tcp", "127.0.0.1:0") assert.NoError(t, err) defer func() { assert.NoError(t, l.Close()) }() cfgPlugin := &config.Viper{Type: "yaml", ReadInCfg: []byte("rpc:\n listen: tcp://" + l.Addr().String())} assert.NoError(t, cfgPlugin.Init()) c, err := rpc.NewClient(cfgPlugin) assert.NotNil(t, c) assert.NoError(t, err) defer func() { assert.NoError(t, c.Close()) }() }
explode_data.jsonl/62147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 3564, 2959, 87161, 3641, 21146, 1155, 353, 8840, 836, 8, 341, 8810, 11, 1848, 1669, 4179, 68334, 445, 27161, 497, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 15, 1138, 6948, 35699, 1155, 11, 1848, 692, 16867, 2915, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCertificate(t *testing.T) { man := &Manager{Prompt: AcceptTOS} defer man.stopRenew() hello := &tls.ClientHelloInfo{ServerName: "example.org"} testGetCertificate(t, man, "example.org", hello) }
explode_data.jsonl/51424
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 1949, 33202, 1155, 353, 8840, 836, 8, 341, 197, 1515, 1669, 609, 2043, 90, 54615, 25, 20829, 51, 3126, 532, 16867, 883, 13227, 34625, 365, 741, 9598, 4791, 1669, 609, 34488, 11716, 9707, 1731, 90, 5475, 675, 25, 330, 8687,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestClustersShutdown(t *testing.T) { ctx := context.Background() clusters, mock := createClusters(t) defer shutdownClusters(t, clusters, mock) f := func(t *testing.T, c *Cluster) { err := c.Shutdown(ctx) if err != nil { t.Error("should be able to shutdown cleanly") } } // Shutdown 3 times runF(t, clusters, f) runF(t, clusters, f) runF(t, clusters, f) }
explode_data.jsonl/66610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 94992, 62004, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 39407, 14605, 11, 7860, 1669, 1855, 94992, 1155, 340, 16867, 23766, 94992, 1155, 11, 26968, 11, 7860, 692, 1166, 1669, 2915, 1155, 353, 8840, 836, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPointInPolygonWithHole(t *testing.T) { for i, polygon := range polygonsWithHoles { result := expected2[i] t.Logf("%s:", result.name) for j, xy := range pointsToTest { isInside := inside(shp.Point{xy.x, xy.y}, polygon) t.Log(xy, isInside) if result.inside[j] != isInside { t.Fail() } } } }
explode_data.jsonl/76734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 2609, 641, 37619, 2354, 39, 1263, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 29372, 1669, 2088, 68159, 2354, 39, 7151, 341, 197, 9559, 1669, 3601, 17, 989, 921, 197, 3244, 98954, 4430, 82, 12147, 1102, 2644, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOpenWithPasswordAndMSI(t *testing.T) { dktesting.ParallelTest(t, specs, func(t *testing.T, c dktest.ContainerInfo) { SkipIfUnsupportedArch(t, c) ip, port, err := c.Port(defaultPort) if err != nil { t.Fatal(err) } addr := msConnectionStringMsiWithPassword(ip, port, true) p := &SQLServer{} _, err = p.Open(addr) if err == nil { t.Fatal("Open should fail when both password and useMsi=true are passed.") } addr = msConnectionStringMsiWithPassword(ip, port, false) p = &SQLServer{} d, err := p.Open(addr) if err != nil { t.Fatal(err) } defer func() { if err := d.Close(); err != nil { t.Error(err) } }() dt.Test(t, d, []byte("SELECT 1")) }) }
explode_data.jsonl/74118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 5002, 2354, 4876, 3036, 4826, 40, 1155, 353, 8840, 836, 8, 341, 2698, 74, 8840, 41288, 7957, 2271, 1155, 11, 32247, 11, 2915, 1155, 353, 8840, 836, 11, 272, 40204, 1944, 33672, 1731, 8, 341, 197, 7568, 13389, 2679, 41884, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStringPointerDeref(t *testing.T) { value := "test" testCases := []struct { stringPointer *string expected string }{ { stringPointer: nil, expected: "", }, { stringPointer: &value, expected: value, }, } for _, tc := range testCases { if got := derefStringPointer(tc.stringPointer); got != tc.expected { t.Errorf("Got: %v, expected: %v", got, tc.expected) } } }
explode_data.jsonl/31006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 703, 9084, 35, 43970, 1155, 353, 8840, 836, 8, 341, 16309, 1669, 330, 1944, 698, 18185, 37302, 1669, 3056, 1235, 341, 197, 11357, 9084, 353, 917, 198, 197, 42400, 414, 914, 198, 197, 59403, 197, 197, 515, 298, 11357, 9084,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTomlHeaderFromFile(t *testing.T) { assert := assert.New(t) settings := testutil.Settings().WithSections().Build() expected, err := testutil.GetExpected("toml", "toml-HeaderFromFile") assert.Nil(err) options, err := module.NewOptions().WithOverwrite(&module.Options{ HeaderFromFile: "doc.tf", }) assert.Nil(err) module, err := testutil.GetModule(options) assert.Nil(err) printer := NewTOML(settings) actual, err := printer.Print(module, settings) assert.Nil(err) assert.Equal(expected, actual) }
explode_data.jsonl/36772
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 24732, 75, 4047, 43633, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 62930, 1669, 1273, 1314, 27000, 1005, 2354, 38122, 1005, 11066, 2822, 42400, 11, 1848, 1669, 1273, 1314, 2234, 18896, 445, 37401, 75, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDrawImage_Ratio(t *testing.T) { // d := NewGLDriver() // win := d.CreateWindow("Test") // c := win.Canvas().(*glCanvas) img := canvas.NewImageFromResource(theme.FyneLogo()) img.Resize(fyne.NewSize(10, 10)) // c.newGlImageTexture(img) // assert.Equal(t, float32(1.0), c.aspects[img]) }
explode_data.jsonl/59302
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 8137, 1906, 2568, 6266, 1155, 353, 8840, 836, 8, 341, 197, 322, 2698, 1669, 1532, 3825, 11349, 741, 197, 322, 68452, 1669, 294, 7251, 4267, 445, 2271, 1138, 197, 322, 1444, 1669, 3164, 54121, 1005, 4071, 6072, 18226, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStream(t *testing.T) { recvc := make(chan raftpb.Message, streamBufSize) propc := make(chan raftpb.Message, streamBufSize) msgapp := raftpb.Message{ Type: raftpb.MsgApp, From: 2, To: 1, Term: 1, LogTerm: 1, Index: 3, Entries: []raftpb.Entry{{Term: 1, Index: 4}}, } tests := []struct { t streamType m raftpb.Message wc chan raftpb.Message }{ { streamTypeMessage, raftpb.Message{Type: raftpb.MsgProp, To: 2}, propc, }, { streamTypeMessage, msgapp, recvc, }, { streamTypeMsgAppV2, msgapp, recvc, }, } for i, tt := range tests { h := &fakeStreamHandler{t: tt.t} srv := httptest.NewServer(h) defer srv.Close() sw := startStreamWriter(types.ID(1), newPeerStatus(types.ID(1)), &stats.FollowerStats{}, &fakeRaft{}) defer sw.stop() h.sw = sw picker := mustNewURLPicker(t, []string{srv.URL}) tr := &Transport{streamRt: &http.Transport{}} sr := startStreamReader(tr, picker, tt.t, types.ID(1), types.ID(2), types.ID(1), newPeerStatus(types.ID(1)), recvc, propc, nil) defer sr.stop() // wait for stream to work var writec chan<- raftpb.Message for { var ok bool if writec, ok = sw.writec(); ok { break } time.Sleep(time.Millisecond) } writec <- tt.m var m raftpb.Message select { case m = <-tt.wc: case <-time.After(time.Second): t.Fatalf("#%d: failed to receive message from the channel", i) } if !reflect.DeepEqual(m, tt.m) { t.Fatalf("#%d: message = %+v, want %+v", i, m, tt.m) } } }
explode_data.jsonl/68642
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 726 }
[ 2830, 3393, 3027, 1155, 353, 8840, 836, 8, 341, 67904, 7362, 1669, 1281, 35190, 52455, 16650, 8472, 11, 4269, 15064, 1695, 340, 79244, 66, 1669, 1281, 35190, 52455, 16650, 8472, 11, 4269, 15064, 1695, 340, 21169, 676, 1669, 52455, 16650, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBinanceProvider_GetTickerPrices(t *testing.T) { p, err := NewBinanceProvider(context.TODO(), zerolog.Nop(), types.CurrencyPair{Base: "ATOM", Quote: "USDT"}) require.NoError(t, err) t.Run("valid_request_single_ticker", func(t *testing.T) { lastPrice := "34.69000000" volume := "2396974.02000000" tickerMap := map[string]BinanceTicker{} tickerMap["ATOMUSDT"] = BinanceTicker{ Symbol: "ATOMUSDT", LastPrice: lastPrice, Volume: volume, } p.tickers = tickerMap prices, err := p.GetTickerPrices(types.CurrencyPair{Base: "ATOM", Quote: "USDT"}) require.NoError(t, err) require.Len(t, prices, 1) require.Equal(t, sdk.MustNewDecFromStr(lastPrice), prices["ATOMUSDT"].Price) require.Equal(t, sdk.MustNewDecFromStr(volume), prices["ATOMUSDT"].Volume) }) t.Run("valid_request_multi_ticker", func(t *testing.T) { lastPriceAtom := "34.69000000" lastPriceLuna := "41.35000000" volume := "2396974.02000000" tickerMap := map[string]BinanceTicker{} tickerMap["ATOMUSDT"] = BinanceTicker{ Symbol: "ATOMUSDT", LastPrice: lastPriceAtom, Volume: volume, } tickerMap["LUNAUSDT"] = BinanceTicker{ Symbol: "LUNAUSDT", LastPrice: lastPriceLuna, Volume: volume, } p.tickers = tickerMap prices, err := p.GetTickerPrices( types.CurrencyPair{Base: "ATOM", Quote: "USDT"}, types.CurrencyPair{Base: "LUNA", Quote: "USDT"}, ) require.NoError(t, err) require.Len(t, prices, 2) require.Equal(t, sdk.MustNewDecFromStr(lastPriceAtom), prices["ATOMUSDT"].Price) require.Equal(t, sdk.MustNewDecFromStr(volume), prices["ATOMUSDT"].Volume) require.Equal(t, sdk.MustNewDecFromStr(lastPriceLuna), prices["LUNAUSDT"].Price) require.Equal(t, sdk.MustNewDecFromStr(volume), prices["LUNAUSDT"].Volume) }) t.Run("invalid_request_invalid_ticker", func(t *testing.T) { prices, err := p.GetTickerPrices(types.CurrencyPair{Base: "FOO", Quote: "BAR"}) require.Error(t, err) require.Equal(t, "binance provider failed to get ticker price for FOOBAR", err.Error()) require.Nil(t, prices) }) }
explode_data.jsonl/24564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 896 }
[ 2830, 3393, 33, 24387, 5179, 13614, 87278, 62718, 1155, 353, 8840, 836, 8, 341, 3223, 11, 1848, 1669, 1532, 33, 24387, 5179, 5378, 90988, 1507, 76178, 1609, 2067, 453, 1507, 4494, 77186, 12443, 90, 3978, 25, 330, 77932, 497, 24535, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigsFetch(t *testing.T) { t.Run("should return values fetched from store", func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() var pluginAConfigs []map[string]interface{} var pluginBConfigs []map[string]interface{} pluginAResponse := map[string]interface{}{ "name": "x-service", "type": "server", } pluginBResponse := map[string]interface{}{ "app-name": "y-service", "app-env": "staging", } stevedoreContext := stevedore.Context{Environment: "staging"} pluginAConfigProvider := mockPlugin.NewMockConfigInterface(ctrl) pluginAConfigProvider.EXPECT().Type().Return(pkgPlugin.TypeConfig, nil) pluginAConfigProvider.EXPECT().Fetch(gomock.Any(), gomock.Any()).Return(pluginAResponse, nil) pluginBConfigProvider := mockPlugin.NewMockConfigInterface(ctrl) pluginBConfigProvider.EXPECT().Type().Return(pkgPlugin.TypeConfig, nil) pluginBConfigProvider.EXPECT().Fetch(gomock.Any(), gomock.Any()).Return(pluginBResponse, nil) plugins := provider.Plugins{"pluginAStore": provider.ClientPlugin{PluginImpl: pluginAConfigProvider}, "pluginBStore": provider.ClientPlugin{PluginImpl: pluginBConfigProvider}} configProviders, _ := plugins.ConfigProviders() expected := stevedore.Substitute{ "app-env": "staging", "app-name": "y-service", "name": "x-service", "type": "server", } configs := stevedore.Configs{ "pluginAStore": pluginAConfigs, "pluginBStore": pluginBConfigs, } substitutes, err := configs.Fetch(configProviders, stevedoreContext) assert.NoError(t, err) if !cmp.Equal(expected, substitutes) { assert.Fail(t, cmp.Diff(expected, substitutes)) } }) t.Run("should fail to return values from store when any plugin call fails", func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() err := fmt.Errorf("some error") pluginConfigs := []map[string]interface{}{{"name": "plugin"}} stevedoreContext := stevedore.Context{Environment: "staging"} contextAsMap, _ := stevedoreContext.Map() pluginConfigProvider := mockPlugin.NewMockConfigInterface(ctrl) pluginConfigProvider.EXPECT().Type().Return(pkgPlugin.TypeConfig, nil) pluginConfigProvider.EXPECT().Fetch(contextAsMap, gomock.Any()).Return(nil, err) plugins := provider.Plugins{"pluginStore": provider.ClientPlugin{PluginImpl: pluginConfigProvider}} configProviders, _ := plugins.ConfigProviders() configs := stevedore.Configs{ "pluginStore": pluginConfigs, } substitutes, err := configs.Fetch(configProviders, stevedoreContext) assert.Error(t, err) assert.Equal(t, "error in fetching from provider: some error", err.Error()) assert.Nil(t, substitutes) }) }
explode_data.jsonl/48494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 961 }
[ 2830, 3393, 84905, 20714, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5445, 470, 2750, 41442, 504, 3553, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 197, 16867, 23743, 991, 181...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindChartInRepoIndex(t *testing.T) { name := "foo" version := "v1.0.0" chartURL := "wordpress-0.1.0.tgz" repoURL := "http://charts.example.com/repo/" expectedURL := fmt.Sprintf("%s%s", repoURL, chartURL) chartMeta := chartv2.Metadata{Name: name, Version: version} chartVersion := repo.ChartVersion{URLs: []string{chartURL}} chartVersion.Metadata = &chartMeta chartVersions := []*repo.ChartVersion{&chartVersion} entries := map[string]repo.ChartVersions{} entries[name] = chartVersions index := &repo.IndexFile{APIVersion: "v1", Generated: time.Now(), Entries: entries} res, err := findChartInRepoIndex(index, repoURL, name, version) if err != nil { t.Errorf("Unexpected error %v", err) } if res != expectedURL { t.Errorf("Expecting %s to be resolved as %s", res, expectedURL) } }
explode_data.jsonl/12747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 9885, 14488, 641, 25243, 1552, 1155, 353, 8840, 836, 8, 341, 11609, 1669, 330, 7975, 698, 74954, 1669, 330, 85, 16, 13, 15, 13, 15, 698, 197, 15941, 3144, 1669, 330, 58215, 12, 15, 13, 16, 13, 15, 734, 46589, 698, 1720...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddItemNoEnclosureGUIDValid(t *testing.T) { t.Parallel() // arrange theLink := "http://someotherurl.com/story.html" p := podcast.New("title", "link", "description", nil, nil) i := podcast.Item{Title: "title", Description: "desc"} i.Link = theLink // act added, err := p.AddItem(i) // assert assert.EqualValues(t, 1, added) assert.NoError(t, err) assert.Len(t, p.Items, 1) assert.EqualValues(t, theLink, p.Items[0].GUID) }
explode_data.jsonl/73080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 2212, 1234, 2753, 7408, 11653, 41778, 4088, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 30893, 198, 32088, 3939, 1669, 330, 1254, 1110, 14689, 1575, 1085, 905, 78389, 2564, 698, 3223, 1669, 17711, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecognizeIdentifierRecognizeErr(t *testing.T) { a := assert.New(t) opts := makeOptions(strings.NewReader("")) s, _ := scanner.Scan(opts) l := &lexer{ s: s, opts: opts, } l.indent.PushBack(1) r := &recognizeIdentifier{ l: l, s: recogString(l).(*recognizeString), } s.Push(common.AugChar{ C: common.Err, Loc: common.Location{ File: "file", B: common.FilePos{L: 3, C: 2}, E: common.FilePos{L: 3, C: 3}, }, Val: assert.AnError, }) ch := common.AugChar{ C: 's', Loc: common.Location{ File: "file", B: common.FilePos{L: 3, C: 1}, E: common.FilePos{L: 3, C: 2}, }, } r.Recognize(ch) a.Nil(l.s) a.Equal(1, l.tokens.Len()) a.Equal(&common.Token{ Sym: common.TokError, Loc: common.Location{ File: "file", B: common.FilePos{L: 3, C: 2}, E: common.FilePos{L: 3, C: 3}, }, Val: assert.AnError, }, l.tokens.Front().Value.(*common.Token)) }
explode_data.jsonl/7683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 17915, 551, 8714, 17915, 551, 7747, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 64734, 1669, 1281, 3798, 51442, 68587, 73303, 1903, 11, 716, 1669, 20775, 54874, 30885, 340, 8810, 1669, 609, 38815, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRemovePrivateAS(t *testing.T) { aspathParam := []bgp.AsPathParamInterface{bgp.NewAs4PathParam(2, []uint32{64512, 64513, 1, 2})} aspath := bgp.NewPathAttributeAsPath(aspathParam) nlri := bgp.NewIPAddrPrefix(24, "30.30.30.0") path := NewPath(nil, nlri, false, []bgp.PathAttributeInterface{aspath}, time.Now(), false) path.RemovePrivateAS(10, config.REMOVE_PRIVATE_AS_OPTION_ALL) list := path.GetAsList() assert.Equal(t, len(list), 2) assert.Equal(t, list[0], uint32(1)) assert.Equal(t, list[1], uint32(2)) path = NewPath(nil, nlri, false, []bgp.PathAttributeInterface{aspath}, time.Now(), false) path.RemovePrivateAS(10, config.REMOVE_PRIVATE_AS_OPTION_REPLACE) list = path.GetAsList() assert.Equal(t, len(list), 4) assert.Equal(t, list[0], uint32(10)) assert.Equal(t, list[1], uint32(10)) assert.Equal(t, list[2], uint32(1)) assert.Equal(t, list[3], uint32(2)) }
explode_data.jsonl/57493
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 13021, 16787, 1911, 1155, 353, 8840, 836, 8, 341, 60451, 2343, 2001, 1669, 3056, 12220, 79, 20242, 93492, 5051, 90, 12220, 79, 7121, 2121, 19, 93492, 7, 17, 11, 3056, 2496, 18, 17, 90, 21, 19, 20, 16, 17, 11, 220, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHostKeyCheck(t *testing.T) { for _, tt := range []struct { name string wantError string key PublicKey }{ {"no callback", "must specify HostKeyCallback", nil}, {"correct key", "", testSigners["rsa"].PublicKey()}, {"mismatch", "mismatch", testSigners["ecdsa"].PublicKey()}, } { c1, c2, err := netPipe() if err != nil { t.Fatalf("netPipe: %v", err) } defer c1.Close() defer c2.Close() serverConf := &ServerConfig{ NoClientAuth: true, } serverConf.AddHostKey(testSigners["rsa"]) go NewServerConn(c1, serverConf) clientConf := ClientConfig{ User: "user", } if tt.key != nil { clientConf.HostKeyCallback = FixedHostKey(tt.key) } _, _, _, err = NewClientConn(c2, "", &clientConf) if err != nil { if tt.wantError == "" || !strings.Contains(err.Error(), tt.wantError) { t.Errorf("%s: got error %q, missing %q", tt.name, err.Error(), tt.wantError) } } else if tt.wantError != "" { t.Errorf("%s: succeeded, but want error string %q", tt.name, tt.wantError) } } }
explode_data.jsonl/11090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 9296, 1592, 3973, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 50780, 1454, 914, 198, 197, 23634, 981, 70280, 198, 197, 59403, 197, 197, 4913, 2152, 4822, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestWithInt32_SetsTheBody(t *testing.T) { r, err := Prepare(&http.Request{}, WithInt32(42)) if err != nil { t.Fatalf("autorest: WithInt32 failed with error (%v)", err) } s, err := ioutil.ReadAll(r.Body) if err != nil { t.Fatalf("autorest: WithInt32 failed with error (%v)", err) } if r.ContentLength != int64(len(fmt.Sprintf("%v", 42))) { t.Fatalf("autorest: WithInt32 set Content-Length to %v, expected %v", r.ContentLength, int64(len(fmt.Sprintf("%v", 42)))) } v, err := strconv.ParseInt(string(s), 10, 32) if err != nil || int32(v) != int32(42) { t.Fatalf("autorest: WithInt32 incorrectly encoded the boolean as %v", s) } }
explode_data.jsonl/20968
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 2354, 1072, 18, 17, 1098, 1415, 785, 5444, 1155, 353, 8840, 836, 8, 972, 7000, 11, 1848, 1669, 31166, 2099, 1254, 9659, 6257, 1871, 197, 197, 2354, 1072, 18, 17, 7, 19, 17, 5784, 743, 1848, 961, 2092, 972, 197, 3244, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReflectionOnJsField(t *testing.T) { a := StructWithJsField1{Object: js.Global.Get("Array").New(42)} wa := Wrapper1{StructWithJsField1: a} if reflect.ValueOf(a).FieldByName("Length").Int() != 42 || reflect.ValueOf(&wa).Elem().FieldByName("WrapperLength").Int() != 42 { t.Fail() } reflect.ValueOf(&wa).Elem().FieldByName("WrapperLength").Set(reflect.ValueOf(10)) if a.Length != 10 { t.Fail() } }
explode_data.jsonl/56784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 44238, 1925, 30480, 1877, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 16139, 2354, 30480, 1877, 16, 90, 1190, 25, 6994, 27381, 2234, 445, 1857, 1827, 3564, 7, 19, 17, 10569, 6692, 64, 1669, 35488, 16, 90, 9422, 2354, 30480,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSetRequest_NilMsgPayload(t *testing.T) { t.Parallel() ctx := inslogger.TestContext(t) msg := payload.Meta{ Polymorph: uint32(payload.TypeMeta), Payload: nil, } handler := handle.NewSetIncomingRequest(nil, msg, false) err := handler.Present(ctx, flow.NewFlowMock(t)) require.Error(t, err) }
explode_data.jsonl/49667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 1649, 1900, 1604, 321, 6611, 29683, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 1640, 9786, 8787, 1972, 1155, 340, 21169, 1669, 7729, 58806, 515, 197, 10025, 337, 1600, 16347, 25, 2622, 18, 17, 26772,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSigning(t *testing.T) { context := NewSecp256k1Context() priv_1 := context.NewRandomPrivateKey() pub_1 := context.GetPublicKey(priv_1) sig_1 := context.Sign(data, priv_1) if !context.Verify(sig_1, data, pub_1) { t.Error( "Context fails t to verify signature", priv_1, pub_1, sig_1, ) } priv_2 := context.NewRandomPrivateKey() sig_2 := context.Sign(data, priv_2) if context.Verify(sig_2, data, pub_1) { t.Error( "Context verifies wrong signature", priv_2, pub_1, sig_2, ) } // Verify that everything returns the right algorithm name assertSecp256k1(context.GetAlgorithmName(), t) assertSecp256k1(priv_1.GetAlgorithmName(), t) assertSecp256k1(pub_1.GetAlgorithmName(), t) assertSecp256k1(priv_2.GetAlgorithmName(), t) }
explode_data.jsonl/4929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 315 }
[ 2830, 3393, 93358, 1155, 353, 8840, 836, 8, 341, 28413, 1669, 1532, 8430, 79, 17, 20, 21, 74, 16, 1972, 741, 71170, 62, 16, 1669, 2266, 7121, 13999, 75981, 741, 62529, 62, 16, 1669, 2266, 2234, 61822, 51750, 62, 16, 692, 84841, 62, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCompareAndSwapHashedKey(t *testing.T) { m := &Map{} elephant := &Animal{"elephant"} monkey := &Animal{"monkey"} m.Set(1<<(strconv.IntSize-2), elephant) if m.Len() != 1 { t.Error("map should contain exactly one element.") } if !m.CAS(1<<(strconv.IntSize-2), elephant, monkey) { t.Error("Cas should success if expectation met") } if m.Len() != 1 { t.Error("map should contain exactly one element.") } if m.CAS(1<<(strconv.IntSize-2), elephant, monkey) { t.Error("Cas should fail if expectation didn't meet") } if m.Len() != 1 { t.Error("map should contain exactly one element.") } item, ok := m.Get(1 << (strconv.IntSize - 2)) if !ok { t.Error("ok should be true for item stored within the map.") } if item != monkey { t.Error("wrong item returned.") } }
explode_data.jsonl/30800
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 27374, 3036, 46179, 6370, 291, 1592, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 2227, 16094, 7727, 273, 26924, 1669, 609, 41415, 4913, 10068, 26924, 16707, 197, 96016, 1669, 609, 41415, 4913, 96016, 63159, 2109, 4202, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestWebSocketReverseProxyServeHTTPHandler(t *testing.T) { // No-op websocket backend simply allows the WS connection to be // accepted then it will be immediately closed. Perfect for testing. accepted := make(chan struct{}) wsNop := httptest.NewServer(websocket.Handler(func(ws *websocket.Conn) { close(accepted) })) defer wsNop.Close() // Get proxy to use for the test p := newWebSocketTestProxy(wsNop.URL, false, 30*time.Second) // Create client request r := httptest.NewRequest("GET", "/", nil) r.Header = http.Header{ "Connection": {"Upgrade"}, "Upgrade": {"websocket"}, "Origin": {wsNop.URL}, "Sec-WebSocket-Key": {"x3JJHMbDL1EzLkh9GBhXDw=="}, "Sec-WebSocket-Version": {"13"}, } // Capture the request w := &recorderHijacker{httptest.NewRecorder(), new(fakeConn)} // Booya! Do the test. p.ServeHTTP(w, r) // Make sure the backend accepted the WS connection. // Mostly interested in the Upgrade and Connection response headers // and the 101 status code. expected := []byte("HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: HSmrc0sMlYUkAGmm5OPpG2HaGWk=\r\n\r\n") actual := w.fakeConn.writeBuf.Bytes() if !bytes.Equal(actual, expected) { t.Errorf("Expected backend to accept response:\n'%s'\nActually got:\n'%s'", expected, actual) } // wait a minute for backend handling, see issue 1654. time.Sleep(10 * time.Millisecond) select { case <-accepted: default: t.Error("Expect a accepted websocket connection, but not") } }
explode_data.jsonl/64231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 593 }
[ 2830, 3393, 61238, 45695, 16219, 60421, 9230, 3050, 1155, 353, 8840, 836, 8, 341, 197, 322, 2308, 29492, 58943, 19163, 4936, 6147, 279, 24906, 3633, 311, 387, 198, 197, 322, 11666, 1221, 432, 686, 387, 7069, 7877, 13, 23239, 369, 7497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRHostSplit(t *testing.T) { s, err := rHostSplit("[::]:6379:1:abc") assert.NoError(t, err) assert.Equal(t, "::", s.host) assert.Equal(t, 6379, s.port) assert.Equal(t, 1, s.db) assert.Equal(t, "abc", s.pass) s, err = rHostSplit("127.0.0.1:6379:1:abc") assert.NoError(t, err) assert.Equal(t, "127.0.0.1", s.host) assert.Equal(t, 6379, s.port) assert.Equal(t, 1, s.db) assert.Equal(t, "abc", s.pass) }
explode_data.jsonl/45206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 49, 9296, 20193, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 435, 9296, 20193, 10937, 486, 5669, 21, 18, 22, 24, 25, 16, 25, 13683, 1138, 6948, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 70154, 497, 274, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubnetID(t *testing.T) { cases := []struct { Input string Valid bool }{ { // empty Input: "", Valid: false, }, { // missing SubscriptionId Input: "/", Valid: false, }, { // missing value for SubscriptionId Input: "/subscriptions/", Valid: false, }, { // missing ResourceGroup Input: "/subscriptions/12345678-1234-9876-4563-123456789012/", Valid: false, }, { // missing value for ResourceGroup Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/", Valid: false, }, { // missing VirtualNetworkName Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/", Valid: false, }, { // missing value for VirtualNetworkName Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/", Valid: false, }, { // missing Name Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/", Valid: false, }, { // missing value for Name Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/", Valid: false, }, { // valid Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.Network/virtualNetworks/network1/subnets/subnet1", Valid: true, }, { // upper-cased Input: "/SUBSCRIPTIONS/12345678-1234-9876-4563-123456789012/RESOURCEGROUPS/RESGROUP1/PROVIDERS/MICROSOFT.NETWORK/VIRTUALNETWORKS/NETWORK1/SUBNETS/SUBNET1", Valid: false, }, } for _, tc := range cases { t.Logf("[DEBUG] Testing Value %s", tc.Input) _, errors := SubnetID(tc.Input, "test") valid := len(errors) == 0 if tc.Valid != valid { t.Fatalf("Expected %t but got %t", tc.Valid, valid) } } }
explode_data.jsonl/35117
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 843 }
[ 2830, 3393, 3136, 4711, 915, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 66588, 914, 198, 197, 197, 4088, 1807, 198, 197, 92, 4257, 197, 197, 515, 298, 197, 322, 4287, 198, 298, 66588, 25, 8324, 298, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNew(t *testing.T) { t.Parallel() type args struct { name string fullname string description string i interface{} } type want struct { want metrics.Metric err error } type test struct { name string args args want want checkFunc func(want, metrics.Metric, error) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want, got metrics.Metric, err error) error { if !errors.Is(err, w.err) { return errors.Errorf("got_error: \"%#v\",\n\t\t\t\twant: \"%#v\"", err, w.err) } if !reflect.DeepEqual(got, w.want) { return errors.Errorf("got: \"%#v\",\n\t\t\t\twant: \"%#v\"", got, w.want) } return nil } tests := []test{ func() test { type x struct { A string `info:"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"` } return test{ name: "returns error when the passed struct's field is invalid", args: args{ name: "x/info", fullname: "x info", description: "description", i: x{ A: "a", }, }, want: want{ want: nil, err: errors.New("invalid key name: only ASCII characters accepted; max length must be 255 characters"), }, checkFunc: defaultCheckFunc, } }(), func() test { type x struct { A string `info:"data1"` B string `info:"data2"` } return test{ name: "returns new metric when the passed struct is valid", args: args{ name: "x/info", fullname: "x info", description: "description", i: x{ A: "a", B: "b", }, }, want: want{}, checkFunc: func(w want, got metrics.Metric, err error) error { if !errors.Is(err, w.err) { return errors.Errorf("got_error: \"%#v\",\n\t\t\t\twant: \"%#v\"", err, w.err) } if got == nil { return errors.New("got is nil") } return nil }, } }(), } for _, tc := range tests { test := tc t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt, goleak.IgnoreCurrent()) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } got, err := New(test.args.name, test.args.fullname, test.args.description, test.args.i) if err := test.checkFunc(test.want, got, err); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/70576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1206 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 11609, 286, 914, 198, 197, 94042, 606, 262, 914, 198, 197, 42407, 914, 198, 197, 8230, 1843, 3749, 16094, 197, 532, 13158, 1366, 2036...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestApiTest_MatchesJSONResponseBodyWithWhitespace(t *testing.T) { handler := http.NewServeMux() handler.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusCreated) w.Header().Set("Content-Type", "application/json") _, err := w.Write([]byte(`{"a": 12345, "b": "hi"}`)) if err != nil { panic(err) } }) apitest.New(). Handler(handler). Get("/hello"). Expect(t). Body(`{ "a": 12345, "b": "hi" }`). Status(http.StatusCreated). End() }
explode_data.jsonl/54801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 6563, 2271, 1245, 9118, 5370, 29637, 2354, 73804, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 1758, 7121, 60421, 44, 2200, 741, 53326, 63623, 4283, 14990, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_ChunkReader(t *testing.T) { assertReadAll(t, newChunkReader(strings.NewReader(influxText))) assertReadAll(t, newChunkReaderWithSize(strings.NewReader(influxText), 64)) assertReadAll(t, newChunkReaderWithSize(strings.NewReader(influxText), 128)) assertReadAll(t, newChunkReaderWithSize(strings.NewReader(influxText), 256)) }
explode_data.jsonl/74833
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 27588, 3122, 5062, 1155, 353, 8840, 836, 8, 341, 6948, 4418, 2403, 1155, 11, 501, 28304, 5062, 51442, 68587, 5900, 36706, 1178, 19235, 6948, 4418, 2403, 1155, 11, 501, 28304, 5062, 2354, 1695, 51442, 68587, 5900, 36706, 1178, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadAuthors(t *testing.T) { authors := readAuthors("resources/authors_test.csv") var author = authors[0] if len(authors) != 1 { t.Errorf("readAuthors is not removing the first line of the file (header)") } if author.firstName != "Paul" { t.Errorf("Read firstName incorretly") } if author.lastName != "Walter" { t.Errorf("Read lastName incorretly") } if author.email != "null-walter@echocat.org" { t.Errorf("Read email incorretly") } }
explode_data.jsonl/9725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 4418, 57890, 1155, 353, 8840, 836, 8, 341, 197, 47005, 1669, 1349, 57890, 445, 12745, 17369, 1087, 4452, 11219, 1138, 2405, 3150, 284, 12014, 58, 15, 2533, 743, 2422, 27435, 1087, 8, 961, 220, 16, 341, 197, 3244, 13080, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSearchSearchSug(t *testing.T) { var ( ctx = context.Background() req = &searchMdl.ReqSug{ MobiApp: "android_tv_yst", Build: "1011", Platform: "android", Term: "test", } ) convey.Convey("SearchSug", t, func(c convey.C) { result, err := d.SearchSug(ctx, req) c.Convey("Then err should be nil.result should not be nil.", func(c convey.C) { c.So(err, convey.ShouldBeNil) c.So(result, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/10801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 5890, 5890, 50, 768, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 20985, 284, 2266, 19047, 741, 197, 24395, 284, 609, 1836, 44, 8736, 2817, 80, 50, 768, 515, 298, 9209, 18337, 2164, 25, 220, 330, 5954, 46132, 62, 597, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckAuthorization_Whitelist(t *testing.T) { auth := setupAuthorizationTest(t) auth.resource = "dataset_example" id, err := auth.dam.populateIdentityVisas(auth.ctx, auth.id, auth.cfg) if err != nil { t.Fatalf("unable to obtain passport identity: %v", err) } // Establish rejection due to not meeting policy. err = checkAuthorization(auth.ctx, id, auth.ttl, auth.resource, auth.view, auth.role, auth.cfg, test.TestClientID, auth.dam.ValidateCfgOpts(storage.DefaultRealm, nil)) if status.Code(err) != codes.PermissionDenied { t.Errorf("setup checkAuthorization(ctx, id, %v, %q, %q, %q, cfg, %q) failed, expected %d, got: %v", auth.ttl, auth.resource, auth.view, auth.role, test.TestClientID, codes.PermissionDenied, err) } if errutil.ErrorReason(err) != errRejectedPolicy { t.Errorf("setup errutil.ErrorReason() = %s want %s", errutil.ErrorReason(err), errRejectedPolicy) } // Now try again with being on the whitelist. auth.cfg.Resources[auth.resource].Views[auth.view].Roles[auth.role].Policies = []*pb.ViewRole_ViewPolicy{{ Name: whitelistPolicyName, Args: map[string]string{ "users": "abc@example.org;dr_joe@faculty.example.edu;foo@bar.org", }, }} err = checkAuthorization(auth.ctx, id, auth.ttl, auth.resource, auth.view, auth.role, auth.cfg, test.TestClientID, auth.dam.ValidateCfgOpts(storage.DefaultRealm, nil)) if err != nil { t.Errorf("whitelist by email: checkAuthorization(ctx, id, %v, %q, %q, %q, cfg, %q) failed: %v", auth.ttl, auth.resource, auth.view, auth.role, test.TestClientID, err) } // Use group membership whitelist auth.cfg.Resources[auth.resource].Views[auth.view].Roles[auth.role].Policies = []*pb.ViewRole_ViewPolicy{{ Name: whitelistPolicyName, Args: map[string]string{ "groups": "whitelisted", }, }} err = checkAuthorization(auth.ctx, id, auth.ttl, auth.resource, auth.view, auth.role, auth.cfg, test.TestClientID, auth.dam.ValidateCfgOpts(storage.DefaultRealm, nil)) if err != nil { t.Errorf("whitelist by group membership: checkAuthorization(ctx, id, %v, %q, %q, %q, cfg, %q) failed: %v", auth.ttl, auth.resource, auth.view, auth.role, test.TestClientID, err) } }
explode_data.jsonl/18491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 3973, 18124, 62, 1639, 57645, 1155, 353, 8840, 836, 8, 341, 78011, 1669, 6505, 18124, 2271, 1155, 340, 78011, 24013, 284, 330, 21378, 39304, 1837, 15710, 11, 1848, 1669, 4166, 950, 309, 91243, 18558, 3120, 300, 27435, 30608, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestConcurrentCount(t *testing.T) { err := Connect("./db/mstat.db") if err != nil { t.Fatal(err) } done := make(chan bool) for i := 0; i < 10; i++ { go func() { r, err := Count("stat3", "1PTM", 1) if err != nil { t.Log(err) } else { t.Log(r) } done <- true }() } for j := 0; j < 10; j++ { <-done } m, err := GetCounter("stat3") if err != nil { t.Fatal(err) } else { t.Log(m["1PTM"]) } Disconnect() }
explode_data.jsonl/1822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 1109, 3231, 2507, 1155, 353, 8840, 836, 8, 1476, 9859, 1669, 13015, 13988, 1999, 3183, 9878, 7076, 5130, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 40495, 1669, 1281, 35190, 1807, 692, 2023, 600, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSession1(t *testing.T) { sm := NewSessionManager(&memorySessionStore{}) r, err := http.NewRequest("GET", "http://localhost/", nil) if err != nil { t.Fatal(err) } if _, err := sm.Get(r, false); !errors.Is(err, errNoSession) { t.Fatal(err) } s, err := sm.Get(r, true) if err != nil { t.Fatal(err) } if s == nil { t.Fatal("wtf") } w := &DummyWriter{} s.Data["a"] = 1 s.Save(context.TODO(), r, w) }
explode_data.jsonl/77721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 5283, 16, 1155, 353, 8840, 836, 8, 341, 72023, 1669, 1532, 5283, 2043, 2099, 17269, 5283, 6093, 6257, 692, 7000, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 330, 1254, 1110, 8301, 28105, 2092, 340, 743, 1848, 961, 2092, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestALS_MovieLens(t *testing.T) { trainSet, testSet, err := LoadDataFromBuiltIn("ml-1m") assert.Nil(t, err) m := NewALS(model.Params{ model.NFactors: 8, model.Reg: 0.015, model.NEpochs: 10, model.Alpha: 0.05, }) score := m.Fit(trainSet, testSet, fitConfig) assertEpsilon(t, 0.36, score.NDCG, benchEpsilon) // test predict assert.Equal(t, m.Predict("1", "1"), m.InternalPredict(1, 1)) // test increment test m.nEpochs = 0 scoreInc := m.Fit(trainSet, testSet, fitConfig) assertEpsilon(t, score.NDCG, scoreInc.NDCG, incrEpsilon) // test clear m.Clear() score = m.Fit(trainSet, testSet, fitConfig) assert.Less(t, score.NDCG, float32(0.2)) }
explode_data.jsonl/67651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 46737, 1245, 6327, 98105, 1155, 353, 8840, 836, 8, 341, 197, 10397, 1649, 11, 1273, 1649, 11, 1848, 1669, 8893, 1043, 3830, 54300, 641, 445, 1014, 12, 16, 76, 1138, 6948, 59678, 1155, 11, 1848, 340, 2109, 1669, 1532, 46737...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTemplateLookUp(t *testing.T) { t1 := New("foo") if t1.Lookup("foo") != nil { t.Error("Lookup returned non-nil value for undefined template foo") } t1.New("bar") if t1.Lookup("bar") != nil { t.Error("Lookup returned non-nil value for undefined template bar") } t1.Parse(`{{define "foo"}}test{{end}}`) if t1.Lookup("foo") == nil { t.Error("Lookup returned nil value for defined template") } }
explode_data.jsonl/71990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 7275, 10380, 2324, 1155, 353, 8840, 836, 8, 341, 3244, 16, 1669, 1532, 445, 7975, 1138, 743, 259, 16, 79261, 445, 7975, 899, 961, 2092, 341, 197, 3244, 6141, 445, 34247, 5927, 2477, 83248, 897, 369, 5614, 3811, 15229, 1138...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRuleCountConst(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := [][]string{ { `select count(1) from tbl;`, `select count(col) from tbl;`, }, { `select count(*) from tbl`, `select count(DISTINCT col) from tbl`, }, } for _, sql := range sqls[0] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleCountConst() if rule.Item != "FUN.005" { t.Error("Rule not match:", rule.Item, "Expect : FUN.005") } } else { t.Error("sqlparser.Parse Error:", err) } } for _, sql := range sqls[1] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleCountConst() if rule.Item != "OK" { t.Error("Rule not match:", rule.Item, "Expect : OK") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 11337, 2507, 19167, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 52931, 917, 515, 197, 197, 515, 298, 197, 63, 1742, 1760, 7, 16, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestInstanceTypeCache(t *testing.T) { c := newAsgInstanceTypeCache(nil) err := c.Add(instanceTypeCachedObject{ name: "123", instanceType: "t2.medium", }) require.NoError(t, err) obj, ok, err := c.GetByKey("123") require.NoError(t, err) require.True(t, ok) require.Equal(t, "t2.medium", obj.(instanceTypeCachedObject).instanceType) }
explode_data.jsonl/68668
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 2523, 929, 8233, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 501, 32, 1991, 2523, 929, 8233, 27907, 340, 9859, 1669, 272, 1904, 21650, 929, 70293, 1190, 515, 197, 11609, 25, 260, 330, 16, 17, 18, 756, 197, 56256, 929, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegration_CreateTaskAndList(t *testing.T) { scheduler := server.NewScheduler() server := httptest.NewServer(http.HandlerFunc(scheduler.TaskHandler)) defer server.Close() client := client.Client{Url: server.URL} createdTask, err := client.Execute("true") if err != nil { t.Error(err) } // list all tasks tasks, err := client.GetTasks() if err != nil { t.Error(err) } if tasks[0].Executable != "true" { t.Error("Expected a task that runs true") } if tasks[0].Status != shared.Pending { t.Error("Expected a pending task") } // list created task task, err := client.GetTask(createdTask.Uuid) if err != nil { t.Error(err) } if task.Executable != "true" { t.Error("Expected a task that runs true") } if task.Status != shared.Pending { t.Error("Expected a pending task") } if task.SubmittedTime.After(time.Now()) { t.Error("A submitted time should be set") } }
explode_data.jsonl/62110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 52464, 34325, 6262, 3036, 852, 1155, 353, 8840, 836, 8, 1476, 1903, 15222, 1669, 3538, 7121, 38878, 741, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 1141, 15222, 28258, 3050, 1171, 16867, 3538, 10421, 2822, 25291, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestFindObjectFailsOnFailedInit(t *testing.T) { ctx := MockCtx{} ctx.FindObjectsFinalFunc = findObjectsFinalOK ctx.FindObjectsFunc = func(pkcs11.SessionHandle, int) ([]pkcs11.ObjectHandle, bool, error) { return []pkcs11.ObjectHandle{1}, false, nil } // test FindObject fails when FindObjectsInit fails ctx.FindObjectsInitFunc = func(pkcs11.SessionHandle, []*pkcs11.Attribute) error { return errors.New("broken") } s := &Session{ctx, 0} _, err := s.FindObject(nil) test.AssertError(t, err, "FindObject didn't fail when FindObjectsInit failed") }
explode_data.jsonl/1154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 80835, 37, 6209, 1925, 9408, 3803, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 14563, 23684, 16094, 20985, 9998, 11543, 19357, 9626, 284, 1477, 11543, 19357, 3925, 198, 20985, 9998, 11543, 9626, 284, 2915, 39928, 4837, 16, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandleRealmsIndex(t *testing.T) { t.Parallel() ctx := project.TestContext(t) harness := envstest.NewServerConfig(t, testDatabaseInstance) c := admin.New(harness.Config, harness.Cacher, harness.Database, harness.AuthProvider, harness.RateLimiter, harness.Renderer) handler := harness.WithCommonMiddlewares(c.HandleRealmsIndex()) t.Run("middleware", func(t *testing.T) { t.Parallel() envstest.ExerciseUserMissing(t, handler) envstest.ExerciseBadPagination(t, &database.Membership{ User: &database.User{}, }, handler) }) t.Run("failure", func(t *testing.T) { t.Parallel() c := admin.New(harness.Config, harness.Cacher, harness.BadDatabase, harness.AuthProvider, harness.RateLimiter, harness.Renderer) handler := harness.WithCommonMiddlewares(c.HandleRealmsIndex()) ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithUser(ctx, &database.User{}) w, r := envstest.BuildFormRequest(ctx, t, http.MethodGet, "/", nil) handler.ServeHTTP(w, r) if got, want := w.Code, http.StatusInternalServerError; got != want { t.Errorf("Expected %d to be %d", got, want) } }) t.Run("lists_all", func(t *testing.T) { t.Parallel() ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithUser(ctx, &database.User{}) w, r := envstest.BuildFormRequest(ctx, t, http.MethodGet, "/", nil) handler.ServeHTTP(w, r) if got, want := w.Code, http.StatusOK; got != want { t.Errorf("Expected %d to be %d", got, want) } }) }
explode_data.jsonl/53829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 582 }
[ 2830, 3393, 6999, 12768, 1011, 1552, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 2390, 8787, 1972, 1155, 340, 9598, 23518, 1669, 6105, 267, 477, 7121, 5475, 2648, 1155, 11, 1273, 5988, 2523, 692, 1444, 1669, 3986...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandleAbsentValidator(t *testing.T) { // initial setup ctx, ck, sk, _, keeper := createTestInput(t, keeperTestParams()) power := int64(100) amt := sdk.TokensFromTendermintPower(power) addr, val := addrs[0], pks[0] sh := staking.NewHandler(sk) slh := NewHandler(keeper) got := sh(ctx, NewTestMsgCreateValidator(addr, val, amt)) require.True(t, got.IsOK()) staking.EndBlocker(ctx, sk) require.Equal( t, ck.GetCoins(ctx, sdk.AccAddress(addr)), sdk.Coins{sdk.NewCoin(sk.GetParams(ctx).BondDenom, initCoins.Sub(amt))}, ) require.Equal(t, amt, sk.Validator(ctx, addr).GetBondedTokens()) // will exist since the validator has been bonded info, found := keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) require.Equal(t, int64(0), info.IndexOffset) require.Equal(t, int64(0), info.MissedBlocksCounter) require.Equal(t, time.Unix(0, 0).UTC(), info.JailedUntil) height := int64(0) // 1000 first blocks OK for ; height < keeper.SignedBlocksWindow(ctx); height++ { ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, true) } info, found = keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) require.Equal(t, int64(0), info.MissedBlocksCounter) // 500 blocks missed for ; height < keeper.SignedBlocksWindow(ctx)+(keeper.SignedBlocksWindow(ctx)-keeper.MinSignedPerWindow(ctx)); height++ { ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) } info, found = keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) require.Equal(t, keeper.SignedBlocksWindow(ctx)-keeper.MinSignedPerWindow(ctx), info.MissedBlocksCounter) // validator should be bonded still validator, _ := sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, sdk.Bonded, validator.GetStatus()) pool := sk.GetPool(ctx) require.True(sdk.IntEq(t, amt, pool.BondedTokens)) // 501st block missed ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) info, found = keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) // counter now reset to zero require.Equal(t, int64(0), info.MissedBlocksCounter) // end block staking.EndBlocker(ctx, sk) // validator should have been jailed validator, _ = sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, sdk.Unbonding, validator.GetStatus()) slashAmt := amt.ToDec().Mul(keeper.SlashFractionDowntime(ctx)).RoundInt64() // validator should have been slashed require.Equal(t, amt.Int64()-slashAmt, validator.GetTokens().Int64()) // 502nd block *also* missed (since the LastCommit would have still included the just-unbonded validator) height++ ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) info, found = keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) require.Equal(t, int64(1), info.MissedBlocksCounter) // end block staking.EndBlocker(ctx, sk) // validator should not have been slashed any more, since it was already jailed validator, _ = sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, amt.Int64()-slashAmt, validator.GetTokens().Int64()) // unrevocation should fail prior to jail expiration got = slh(ctx, NewMsgUnjail(addr)) require.False(t, got.IsOK()) // unrevocation should succeed after jail expiration ctx = ctx.WithBlockHeader(abci.Header{Time: time.Unix(1, 0).Add(keeper.DowntimeJailDuration(ctx))}) got = slh(ctx, NewMsgUnjail(addr)) require.True(t, got.IsOK()) // end block staking.EndBlocker(ctx, sk) // validator should be rebonded now validator, _ = sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, sdk.Bonded, validator.GetStatus()) // validator should have been slashed pool = sk.GetPool(ctx) require.Equal(t, amt.Int64()-slashAmt, pool.BondedTokens.Int64()) // Validator start height should not have been changed info, found = keeper.getValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address())) require.True(t, found) require.Equal(t, int64(0), info.StartHeight) // we've missed 2 blocks more than the maximum, so the counter was reset to 0 at 1 block more and is now 1 require.Equal(t, int64(1), info.MissedBlocksCounter) // validator should not be immediately jailed again height++ ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) validator, _ = sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, sdk.Bonded, validator.GetStatus()) // 500 signed blocks nextHeight := height + keeper.MinSignedPerWindow(ctx) + 1 for ; height < nextHeight; height++ { ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) } // end block staking.EndBlocker(ctx, sk) // validator should be jailed again after 500 unsigned blocks nextHeight = height + keeper.MinSignedPerWindow(ctx) + 1 for ; height <= nextHeight; height++ { ctx = ctx.WithBlockHeight(height) keeper.handleValidatorSignature(ctx, val.Address(), power, false) } // end block staking.EndBlocker(ctx, sk) validator, _ = sk.GetValidatorByConsAddr(ctx, sdk.GetConsAddress(val)) require.Equal(t, sdk.Unbonding, validator.GetStatus()) }
explode_data.jsonl/44697
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2027 }
[ 2830, 3393, 6999, 80251, 14256, 1155, 353, 8840, 836, 8, 1476, 197, 322, 2856, 6505, 198, 20985, 11, 38613, 11, 1901, 11, 8358, 53416, 1669, 1855, 2271, 2505, 1155, 11, 53416, 2271, 4870, 2398, 3223, 1202, 1669, 526, 21, 19, 7, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIssue21677(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t;") tk.MustExec("create table t(1e int);") tk.MustExec("insert into t values (1);") tk.MustQuery("select t.1e from test.t;").Check(testkit.Rows("1")) tk.MustExec("drop table if exists t;") tk.MustExec("create table t(99e int, r10 int);") tk.MustExec("insert into t values (1, 10), (2, 2);") tk.MustQuery("select 99e+r10 from t;").Check(testkit.Rows("11", "4")) tk.MustQuery("select .78$123;").Check(testkit.Rows("0.78")) tk.MustGetErrCode("select .78$421+1;", mysql.ErrParse) tk.MustQuery("select t. `r10` > 3 from t;").Check(testkit.Rows("1", "0")) tk.MustQuery("select * from t where t. `r10` > 3;").Check(testkit.Rows("1 10")) }
explode_data.jsonl/65566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 42006, 17, 16, 21, 22, 22, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 692, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1