text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestProcess(t *testing.T) { startTime := time.Now() datasetID := "werqae-asdqwrwf-erwe" reader := strings.NewReader(exampleHeaderLine + exampleCsvLine + "\n" + exampleCsvLine) url, _ := url.Parse("s3://bucket/dir/test.csv") s3URL := event.NewS3URL(url) uploadEvent := &event.FileUploaded{S3URL: s3URL, Time: time.Now().UTC().Unix()} mockProducer := &MockProducer{} Convey("Given a mock mockProducer with two rows that succeeds", t, func() { splitter.Producer = mockProducer var processor = splitter.NewCSVProcessor() Convey("When the processor is called", func() { processor.Process(reader, uploadEvent, startTime, datasetID) So(len(mockProducer.multipleMessagesInvocations), ShouldEqual, 1) So(len(mockProducer.multipleMessagesInvocations[0]), ShouldEqual, 2) for i := 0; i < 2; i++ { producerMessage := mockProducer.multipleMessagesInvocations[0][i] So(producerMessage.Topic, ShouldEqual, config.RowTopicName) rowMessage := extractRowMessage(producerMessage) So(rowMessage.DatasetID, ShouldEqual, datasetID) So(rowMessage.S3URL, ShouldEqual, url.String()) So(rowMessage.StartTime, ShouldEqual, startTime.UTC().Unix()) So(rowMessage.Index, ShouldEqual, i) So(rowMessage.Row, ShouldEqual, exampleCsvLine) uuid, err := uuid.FromString(rowMessage.RowID) So(uuid, ShouldNotBeNil) So(err, ShouldBeNil) } So(len(mockProducer.singleMessageInvocations), ShouldEqual, 1) producerMessage := mockProducer.singleMessageInvocations[0] So(producerMessage.Topic, ShouldEqual, config.DatasetTopicName) datasetMessage := extractDatasetMessage(mockProducer.singleMessageInvocations[0]) So(datasetMessage.DatasetID, ShouldEqual, datasetID) So(datasetMessage.TotalRows, ShouldEqual, 2) }) }) }
explode_data.jsonl/68893
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 682 }
[ 2830, 3393, 7423, 1155, 353, 8840, 836, 8, 1476, 21375, 1462, 1669, 882, 13244, 741, 2698, 8369, 915, 1669, 330, 6566, 80, 5918, 32434, 85646, 18718, 43083, 12, 261, 896, 698, 61477, 1669, 9069, 68587, 66203, 4047, 2460, 488, 3110, 9482...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRMIDComparisonDifferentGroups(t *testing.T) { a := "rmid1:0d77c-b0b2e66aece-00000000-00000001" b := "rmid1:0d77c-b0b2e66aecd-00000000-00000002" rmidA, err := ReplicationGroupMessageIDFromString(a) if err != nil { t.Error(err) } if rmidA == nil { t.Error("Expected non nil RMID, got nil") } rmidB, err := ReplicationGroupMessageIDFromString(b) if err != nil { t.Error(err) } if rmidB == nil { t.Error("Expected non nil RMID, got nil") } _, err = rmidB.Compare(rmidA) if err == nil { t.Error("Expected non nil error") } if _, ok := err.(*solace.IllegalArgumentError); !ok { t.Errorf("Expected error to be of type IllegalArgumentError, got %T", err) } _, err = rmidA.Compare(rmidB) if err == nil { t.Error("Expected non nil error") } if _, ok := err.(*solace.IllegalArgumentError); !ok { t.Errorf("Expected error to be of type IllegalArgumentError, got %T", err) } }
explode_data.jsonl/72227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 23652, 915, 33487, 69123, 22173, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 330, 8719, 307, 16, 25, 15, 67, 22, 22, 66, 1455, 15, 65, 17, 68, 21, 21, 5918, 346, 12, 15, 15, 15, 15, 15, 15, 15, 15, 12, 15, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestValidateNamespaceFinalizeUpdate(t *testing.T) { tests := []struct { oldNamespace core.Namespace namespace core.Namespace valid bool }{ {core.Namespace{}, core.Namespace{}, true}, {core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "foo"}}, core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "foo"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"Foo"}, }, }, false}, {core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "foo"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"foo.com/bar"}, }, }, core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "foo"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"foo.com/bar", "what.com/bar"}, }, }, true}, {core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "fooemptyfinalizer"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"foo.com/bar"}, }, }, core.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "fooemptyfinalizer"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"", "foo.com/bar", "what.com/bar"}, }, }, false}, } for i, test := range tests { test.namespace.ObjectMeta.ResourceVersion = "1" test.oldNamespace.ObjectMeta.ResourceVersion = "1" errs := ValidateNamespaceFinalizeUpdate(&test.namespace, &test.oldNamespace) if test.valid && len(errs) > 0 { t.Errorf("%d: Unexpected error: %v", i, errs) t.Logf("%#v vs %#v", test.oldNamespace, test.namespace) } if !test.valid && len(errs) == 0 { t.Errorf("%d: Unexpected non-error", i) } } }
explode_data.jsonl/1048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 723 }
[ 2830, 3393, 17926, 22699, 19357, 551, 4289, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 61828, 22699, 6200, 46011, 198, 197, 56623, 262, 6200, 46011, 198, 197, 56322, 286, 1807, 198, 197, 59403, 197, 197, 90, 2153, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestBuild(t *testing.T) { kp0 := newKeypair0() sourceAccount := NewSimpleAccount(kp0.Address(), int64(9605939170639897)) createAccount := CreateAccount{ Destination: "GCCOBXW2XQNUSL467IEILE6MMCNRR66SSVL4YQADUNYYNUVREF3FIV2Z", Amount: "10", } tx, err := NewTransaction( TransactionParams{ SourceAccount: &sourceAccount, IncrementSequenceNum: true, Operations: []Operation{&createAccount}, BaseFee: MinBaseFee, Timebounds: NewInfiniteTimeout(), }, ) assert.NoError(t, err) expectedUnsigned := "AAAAAgAAAADg3G3hclysZlFitS+s5zWyiiJD5B0STWy5LXCj6i5yxQAAAGQAIiCNAAAAGgAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAACE4N7avBtJL576CIWTzGCbGPvSlVfMQAOjcYbSsSF2VAAAAAAF9eEAAAAAAAAAAAA=" expectedSigned := "AAAAAgAAAADg3G3hclysZlFitS+s5zWyiiJD5B0STWy5LXCj6i5yxQAAAGQAIiCNAAAAGgAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAACE4N7avBtJL576CIWTzGCbGPvSlVfMQAOjcYbSsSF2VAAAAAAF9eEAAAAAAAAAAAHqLnLFAAAAQB7MjKIwNEOTIjbEeV+QIjaQp/ZpV5qpbkbDaU54gkfdTOFOUxZq66lTS5FOfP5fmPIVD8InQ00Usy2SmzFC/wc=" txeB64, err := tx.Base64() assert.NoError(t, err) assert.Equal(t, expectedUnsigned, txeB64, "tx envelope should match") tx, err = tx.Sign(network.TestNetworkPassphrase, kp0) assert.NoError(t, err) txeB64, err = tx.Base64() assert.NoError(t, err) assert.Equal(t, expectedSigned, txeB64, "tx envelope should match") }
explode_data.jsonl/20696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 668 }
[ 2830, 3393, 11066, 1155, 353, 8840, 836, 8, 341, 16463, 79, 15, 1669, 501, 6608, 1082, 1310, 15, 741, 47418, 7365, 1669, 1532, 16374, 7365, 5969, 79, 15, 26979, 1507, 526, 21, 19, 7, 24, 21, 15, 20, 24, 18, 24, 16, 22, 15, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLedDriverToggle(t *testing.T) { d := initTestLedDriver(newGpioTestAdaptor()) d.Off() d.Toggle() gobottest.Assert(t, d.State(), true) d.Toggle() gobottest.Assert(t, d.State(), false) }
explode_data.jsonl/55080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 60850, 11349, 18897, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 2930, 2271, 60850, 11349, 1755, 38, 11917, 2271, 2589, 32657, 2398, 2698, 13, 4596, 741, 2698, 80700, 741, 3174, 674, 1716, 477, 11711, 1155, 11, 294, 18942, 150...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsLeapYear(t *testing.T) { for _, i := range []uint{1600, 2000, 2012, 2016, 2020} { if !isLeapYear(i) { t.Errorf("isLeapYear(%d) got false, want true", i) } } for _, i := range []uint{1601, 2002, 2015, 2017, 2018} { if isLeapYear(i) { t.Errorf("isLeapYear(%d) got true, want false", i) } } }
explode_data.jsonl/65319
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 3872, 96657, 9490, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 600, 1669, 2088, 3056, 2496, 90, 16, 21, 15, 15, 11, 220, 17, 15, 15, 15, 11, 220, 17, 15, 16, 17, 11, 220, 17, 15, 16, 21, 11, 220, 17, 15, 17, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestInvalidRanges(t *testing.T) { var invalidRangeTests = []struct { floor int ceil int }{ {10, 10}, {100, 100}, {990, 998}, } for _, tt := range invalidRangeTests { _, err := LargestPalindromeFromProductOfNumbers(tt.floor, tt.ceil) assert.Equal(t, err, ErrNoPalindromesInRange, "the range %d to %d should have no palindromes", tt.floor, tt.ceil) } }
explode_data.jsonl/10875
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 7928, 74902, 1155, 353, 8840, 836, 8, 341, 2405, 8318, 6046, 18200, 284, 3056, 1235, 341, 197, 1166, 3993, 526, 198, 197, 197, 22058, 220, 526, 198, 197, 59403, 197, 197, 90, 16, 15, 11, 220, 16, 15, 1583, 197, 197, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNodeAddresses(t *testing.T) { // Note these instances have the same name // (we test that this produces an error) var instance0 ec2.Instance var instance1 ec2.Instance var instance2 ec2.Instance //0 instance0.InstanceId = aws.String("i-0") instance0.PrivateDnsName = aws.String("instance-same.ec2.internal") instance0.PrivateIpAddress = aws.String("192.168.0.1") instance0.PublicDnsName = aws.String("instance-same.ec2.external") instance0.PublicIpAddress = aws.String("1.2.3.4") instance0.InstanceType = aws.String("c3.large") instance0.Placement = &ec2.Placement{AvailabilityZone: aws.String("us-east-1a")} state0 := ec2.InstanceState{ Name: aws.String("running"), } instance0.State = &state0 //1 instance1.InstanceId = aws.String("i-1") instance1.PrivateDnsName = aws.String("instance-same.ec2.internal") instance1.PrivateIpAddress = aws.String("192.168.0.2") instance1.InstanceType = aws.String("c3.large") instance1.Placement = &ec2.Placement{AvailabilityZone: aws.String("us-east-1a")} state1 := ec2.InstanceState{ Name: aws.String("running"), } instance1.State = &state1 //2 instance2.InstanceId = aws.String("i-2") instance2.PrivateDnsName = aws.String("instance-other.ec2.internal") instance2.PrivateIpAddress = aws.String("192.168.0.1") instance2.PublicIpAddress = aws.String("1.2.3.4") instance2.InstanceType = aws.String("c3.large") instance2.Placement = &ec2.Placement{AvailabilityZone: aws.String("us-east-1a")} state2 := ec2.InstanceState{ Name: aws.String("running"), } instance2.State = &state2 instances := []*ec2.Instance{&instance0, &instance1, &instance2} aws1, _ := mockInstancesResp(&instance0, []*ec2.Instance{&instance0}) _, err1 := aws1.NodeAddresses("instance-mismatch.ec2.internal") if err1 == nil { t.Errorf("Should error when no instance found") } aws2, _ := mockInstancesResp(&instance2, instances) _, err2 := aws2.NodeAddresses("instance-same.ec2.internal") if err2 == nil { t.Errorf("Should error when multiple instances found") } aws3, _ := mockInstancesResp(&instance0, instances[0:1]) addrs3, err3 := aws3.NodeAddresses("instance-same.ec2.internal") if err3 != nil { t.Errorf("Should not error when instance found") } if len(addrs3) != 4 { t.Errorf("Should return exactly 4 NodeAddresses") } testHasNodeAddress(t, addrs3, v1.NodeInternalIP, "192.168.0.1") testHasNodeAddress(t, addrs3, v1.NodeExternalIP, "1.2.3.4") testHasNodeAddress(t, addrs3, v1.NodeExternalDNS, "instance-same.ec2.external") testHasNodeAddress(t, addrs3, v1.NodeInternalDNS, "instance-same.ec2.internal") // Fetch from metadata aws4, fakeServices := mockInstancesResp(&instance0, []*ec2.Instance{&instance0}) fakeServices.selfInstance.PublicIpAddress = aws.String("2.3.4.5") fakeServices.selfInstance.PrivateIpAddress = aws.String("192.168.0.2") addrs4, err4 := aws4.NodeAddresses(mapInstanceToNodeName(&instance0)) if err4 != nil { t.Errorf("unexpected error: %v", err4) } testHasNodeAddress(t, addrs4, v1.NodeInternalIP, "192.168.0.2") testHasNodeAddress(t, addrs4, v1.NodeExternalIP, "2.3.4.5") }
explode_data.jsonl/12847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1185 }
[ 2830, 3393, 1955, 52290, 1155, 353, 8840, 836, 8, 341, 197, 322, 7036, 1493, 13121, 614, 279, 1852, 829, 198, 197, 322, 320, 896, 1273, 429, 419, 18644, 458, 1465, 340, 2405, 2867, 15, 11942, 17, 12688, 198, 2405, 2867, 16, 11942, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUploads(t *testing.T) { app := createTestApp(t) defer app.Close() // Write 9 uploads to the database. These uploads have 1-9 // results each, a common label "i" set to the upload number, // and a label "j" set to the record number within the upload. var uploadIDs []string for i := 0; i < 9; i++ { status := app.uploadFiles(t, func(mpw *multipart.Writer) { w, err := mpw.CreateFormFile("file", "path/1.txt") if err != nil { t.Errorf("CreateFormFile: %v", err) } bp := benchfmt.NewPrinter(w) for j := 0; j <= i; j++ { r := &benchfmt.Result{Labels: map[string]string{"i": fmt.Sprintf("%d", i)}, NameLabels: make(map[string]string), Content: "BenchmarkName 1 ns/op"} r.Labels["j"] = fmt.Sprintf("%d", j) if err := bp.Print(r); err != nil { t.Fatalf("Print: %v", err) } } }) uploadIDs = append(uploadIDs, status.UploadID) } tests := []struct { q string extraLabels []string want []storage.UploadInfo }{ {"", nil, []storage.UploadInfo{ {9, uploadIDs[8], nil}, {8, uploadIDs[7], nil}, {7, uploadIDs[6], nil}, {6, uploadIDs[5], nil}, {5, uploadIDs[4], nil}, {4, uploadIDs[3], nil}, {3, uploadIDs[2], nil}, {2, uploadIDs[1], nil}, {1, uploadIDs[0], nil}, }}, {"j:5", nil, []storage.UploadInfo{{1, uploadIDs[8], nil}, {1, uploadIDs[7], nil}, {1, uploadIDs[6], nil}, {1, uploadIDs[5], nil}}}, {"i:5", []string{"i"}, []storage.UploadInfo{{6, uploadIDs[5], benchfmt.Labels{"i": "5"}}}}, {"not:found", nil, nil}, } for _, test := range tests { t.Run("query="+test.q, func(t *testing.T) { u := app.srv.URL + "/uploads" uv := url.Values{} if test.q != "" { uv["q"] = []string{test.q} } if test.extraLabels != nil { uv["extra_label"] = test.extraLabels } if len(uv) > 0 { u += "?" + uv.Encode() } resp, err := http.Get(u) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Fatalf("get /uploads: %v", resp.Status) } dec := json.NewDecoder(resp.Body) i := 0 for { var ui storage.UploadInfo if err := dec.Decode(&ui); err == io.EOF { break } else if err != nil { t.Fatalf("failed to parse UploadInfo: %v", err) } if i > len(test.want) { t.Fatalf("too many responses: have %d+ want %d", i, len(test.want)) } if !reflect.DeepEqual(ui, test.want[i]) { t.Errorf("uploadinfo = %#v, want %#v", ui, test.want[i]) } i++ } if i < len(test.want) { t.Fatalf("missing responses: have %d want %d", i, len(test.want)) } }) } }
explode_data.jsonl/4951
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1184 }
[ 2830, 3393, 13844, 82, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1855, 2271, 2164, 1155, 340, 16867, 906, 10421, 2822, 197, 322, 9645, 220, 24, 66563, 311, 279, 4625, 13, 4220, 66563, 614, 220, 16, 12, 24, 198, 197, 322, 3059, 1817...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReceive_1(t *testing.T) { _, teardown := setupIpTest(t) defer teardown() dev := createTapDevice() iface := createIface() _ = repo.IfaceRepo.Register(iface, dev) // ICMP packet := createIpPacket() want := psErr.OK got := Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // TCP packet[9] = uint8(mw.PnTCP) packet[10] = 0x00 packet[11] = 0x00 csum := mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) want = psErr.OK got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } // UDP packet[9] = uint8(mw.PnUDP) packet[10] = 0x00 packet[11] = 0x00 csum = mw.Checksum(packet[:HdrLenMin], 0) packet[10] = uint8((csum & 0xff00) >> 8) packet[11] = uint8(csum & 0x00ff) want = psErr.Error got = Receive(packet, dev) if got != want { t.Errorf("Receive() = %s; want %s", got, want) } }
explode_data.jsonl/8506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 14742, 62, 16, 1155, 353, 8840, 836, 8, 341, 197, 6878, 49304, 1669, 6505, 23378, 2271, 1155, 340, 16867, 49304, 2822, 27302, 1669, 1855, 23879, 6985, 741, 743, 578, 1669, 1855, 40, 1564, 741, 197, 62, 284, 15867, 2447, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestServerTimeoutErrorWithCode(t *testing.T) { s := &Server{ Handler: func(ctx *RequestCtx) { go func() { ctx.Success("aaa/bbb", []byte("xxxyyy")) }() ctx.TimeoutErrorWithCode("should be ignored", 234) ctx.TimeoutErrorWithCode("stolen ctx", StatusBadRequest) }, } rw := &readWriter{} rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") rw.r.WriteString("GET /foo HTTP/1.1\r\nHost: google.com\r\n\r\n") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("Unexpected error from serveConn: %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) verifyResponse(t, br, StatusBadRequest, string(defaultContentType), "stolen ctx") data, err := ioutil.ReadAll(br) if err != nil { t.Fatalf("Unexpected error when reading remaining data: %s", err) } if len(data) != 0 { t.Fatalf("Unexpected data read after the first response %q. Expecting %q", data, "") } }
explode_data.jsonl/73302
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 429 }
[ 2830, 3393, 5475, 7636, 1454, 2354, 2078, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 5475, 515, 197, 197, 3050, 25, 2915, 7502, 353, 1900, 23684, 8, 341, 298, 30680, 2915, 368, 341, 571, 20985, 33320, 445, 32646, 3470, 6066, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouterStaticDynamicConflict(t *testing.T) { e := New() r := e.router r.Add(http.MethodGet, "/dictionary/skills", handlerHelper("a", 1)) r.Add(http.MethodGet, "/dictionary/:name", handlerHelper("b", 2)) r.Add(http.MethodGet, "/users/new", handlerHelper("d", 4)) r.Add(http.MethodGet, "/users/:name", handlerHelper("e", 5)) r.Add(http.MethodGet, "/server", handlerHelper("c", 3)) r.Add(http.MethodGet, "/", handlerHelper("f", 6)) c := e.NewContext(nil, nil) r.Find(http.MethodGet, "/dictionary/skills", c) c.Handler()(c) assert.Equal(t, 1, c.Get("a")) assert.Equal(t, "/dictionary/skills", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/dictionary/skillsnot", c) c.Handler()(c) assert.Equal(t, 2, c.Get("b")) assert.Equal(t, "/dictionary/:name", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/dictionary/type", c) c.Handler()(c) assert.Equal(t, 2, c.Get("b")) assert.Equal(t, "/dictionary/:name", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/server", c) c.Handler()(c) assert.Equal(t, 3, c.Get("c")) assert.Equal(t, "/server", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/users/new", c) c.Handler()(c) assert.Equal(t, 4, c.Get("d")) assert.Equal(t, "/users/new", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/users/new2", c) c.Handler()(c) assert.Equal(t, 5, c.Get("e")) assert.Equal(t, "/users/:name", c.Get("path")) c = e.NewContext(nil, nil) r.Find(http.MethodGet, "/", c) c.Handler()(c) assert.Equal(t, 6, c.Get("f")) assert.Equal(t, "/", c.Get("path")) }
explode_data.jsonl/47129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 713 }
[ 2830, 3393, 9523, 11690, 21752, 57974, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 7000, 1669, 384, 22125, 271, 7000, 1904, 19886, 20798, 1949, 11, 3521, 35671, 2687, 60925, 497, 7013, 5511, 445, 64, 497, 220, 16, 1171, 7000, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPushWithHostsDir(t *testing.T) { // Skip docker, because Docker doesn't have `--hosts-dir` option, and we don't want to contaminate the global /etc/docker/certs.d during this test testutil.DockerIncompatible(t) base := testutil.NewBase(t) reg := testregistry.NewHTTPS(base, "admin", "badmin") defer reg.Cleanup() base.Cmd("--hosts-dir", reg.HostsDir, "login", "-u", "admin", "-p", "badmin", fmt.Sprintf("%s:%d", reg.IP.String(), reg.ListenPort)).AssertOK() base.Cmd("pull", testutil.CommonImage).AssertOK() testImageRef := fmt.Sprintf("%s:%d/%s:%s", reg.IP.String(), reg.ListenPort, testutil.Identifier(t), strings.Split(testutil.CommonImage, ":")[1]) t.Logf("testImageRef=%q", testImageRef) base.Cmd("tag", testutil.CommonImage, testImageRef).AssertOK() base.Cmd("--debug", "--hosts-dir", reg.HostsDir, "push", testImageRef).AssertOK() }
explode_data.jsonl/51087
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 325 }
[ 2830, 3393, 16644, 2354, 9296, 82, 6184, 1155, 353, 8840, 836, 8, 341, 197, 322, 25784, 26588, 11, 1576, 40549, 3171, 944, 614, 1565, 313, 44692, 45283, 63, 2999, 11, 323, 582, 1513, 944, 1366, 311, 683, 309, 3277, 279, 3644, 608, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMixedOSBuildKojiJob(t *testing.T) { require := require.New(t) emptyManifestV2 := distro.Manifest(`{"version":"2","pipelines":{}}`) server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1") fbPipelines := &worker.PipelineNames{Build: distro.BuildPipelinesFallback(), Payload: distro.PayloadPipelinesFallback()} enqueueKojiJob := func(job *worker.OSBuildKojiJob) uuid.UUID { initJob := new(worker.KojiInitJob) initJobID, err := server.EnqueueKojiInit(initJob, "") require.NoError(err) jobID, err := server.EnqueueOSBuildKoji("k", job, initJobID, "") require.NoError(err) return jobID } oldJob := worker.OSBuildKojiJob{ Manifest: emptyManifestV2, ImageName: "no-pipeline-names", } oldJobID := enqueueKojiJob(&oldJob) newJob := worker.OSBuildKojiJob{ Manifest: emptyManifestV2, ImageName: "with-pipeline-names", PipelineNames: &worker.PipelineNames{ Build: []string{"build"}, Payload: []string{"other", "pipelines"}, }, } newJobID := enqueueKojiJob(&newJob) var oldJobRead worker.OSBuildKojiJob err := server.OSBuildKojiJob(oldJobID, &oldJobRead) require.NoError(err) require.NotNil(oldJobRead.PipelineNames) // OldJob gets default pipeline names when read require.Equal(fbPipelines, oldJobRead.PipelineNames) require.Equal(oldJob.Manifest, oldJobRead.Manifest) require.Equal(oldJob.ImageName, oldJobRead.ImageName) // Not entirely equal require.NotEqual(oldJob, oldJobRead) // NewJob the same when read back var newJobRead worker.OSBuildKojiJob err = server.OSBuildKojiJob(newJobID, &newJobRead) require.NoError(err) require.NotNil(newJobRead.PipelineNames) require.Equal(newJob.PipelineNames, newJobRead.PipelineNames) // Dequeue the jobs (via RequestJob) to get their tokens and update them to // test the result retrieval // Finish init jobs for idx := uint(0); idx < 2; idx++ { ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) defer cancel() _, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"koji-init"}, []string{""}) require.NoError(err) require.NoError(server.FinishJob(token, nil)) } getJob := func() (uuid.UUID, uuid.UUID) { // don't block forever if the jobs weren't added or can't be retrieved ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) defer cancel() id, token, _, _, _, err := server.RequestJob(ctx, "k", []string{"osbuild-koji"}, []string{""}) require.NoError(err) return id, token } getJobTokens := func(n uint) map[uuid.UUID]uuid.UUID { tokens := make(map[uuid.UUID]uuid.UUID, n) for idx := uint(0); idx < n; idx++ { id, token := getJob() tokens[id] = token } return tokens } jobTokens := getJobTokens(2) // make sure we got them both as expected require.Contains(jobTokens, oldJobID) require.Contains(jobTokens, newJobID) oldJobResult := &worker.OSBuildKojiJobResult{ HostOS: "rhel-10", Arch: "k", OSBuildOutput: &osbuild2.Result{ Type: "result", Success: true, Log: map[string]osbuild2.PipelineResult{ "build-old": { osbuild2.StageResult{ ID: "---", Type: "org.osbuild.test", Output: "<test output>", Success: true, }, }, }, }, } oldJobResultRaw, err := json.Marshal(oldJobResult) require.NoError(err) oldJobToken := jobTokens[oldJobID] err = server.FinishJob(oldJobToken, oldJobResultRaw) require.NoError(err) oldJobResultRead := new(worker.OSBuildKojiJobResult) _, _, err = server.OSBuildKojiJobStatus(oldJobID, oldJobResultRead) require.NoError(err) // oldJobResultRead should have PipelineNames now require.NotEqual(oldJobResult, oldJobResultRead) require.Equal(fbPipelines, oldJobResultRead.PipelineNames) require.NotNil(oldJobResultRead.PipelineNames) require.Equal(oldJobResult.OSBuildOutput, oldJobResultRead.OSBuildOutput) require.Equal(oldJobResult.HostOS, oldJobResultRead.HostOS) require.Equal(oldJobResult.Arch, oldJobResultRead.Arch) newJobResult := &worker.OSBuildKojiJobResult{ HostOS: "rhel-10", Arch: "k", PipelineNames: &worker.PipelineNames{ Build: []string{"build-result"}, Payload: []string{"result-test-payload", "result-test-assembler"}, }, OSBuildOutput: &osbuild2.Result{ Type: "result", Success: true, Log: map[string]osbuild2.PipelineResult{ "build-new": { osbuild2.StageResult{ ID: "---", Type: "org.osbuild.test", Output: "<test output new>", Success: true, }, }, }, }, } newJobResultRaw, err := json.Marshal(newJobResult) require.NoError(err) newJobToken := jobTokens[newJobID] err = server.FinishJob(newJobToken, newJobResultRaw) require.NoError(err) newJobResultRead := new(worker.OSBuildKojiJobResult) _, _, err = server.OSBuildKojiJobStatus(newJobID, newJobResultRead) require.NoError(err) require.Equal(newJobResult, newJobResultRead) }
explode_data.jsonl/1110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1956 }
[ 2830, 3393, 86433, 3126, 11066, 42, 26902, 12245, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 197, 3194, 38495, 53, 17, 1669, 1582, 299, 72272, 5809, 4913, 4366, 3252, 17, 2198, 51501, 10999, 788, 90, 3417, 24183, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddItemAuthor(t *testing.T) { t.Parallel() // arrange theAuthor := podcast.Author{Name: "Jane Doe", Email: "me@janedoe.com"} p := podcast.New("title", "link", "description", nil, nil) i := podcast.Item{Title: "title", Description: "desc", Link: "http://a.co/"} i.Author = &theAuthor // act added, err := p.AddItem(i) // assert assert.EqualValues(t, 1, added) assert.NoError(t, err) assert.Len(t, p.Items, 1) assert.EqualValues(t, &theAuthor, p.Items[0].Author) assert.EqualValues(t, theAuthor.Email, p.Items[0].IAuthor) }
explode_data.jsonl/73082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 2212, 1234, 7133, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 30893, 198, 32088, 7133, 1669, 17711, 33858, 63121, 25, 330, 62502, 49628, 497, 8299, 25, 330, 2660, 31, 22838, 291, 4644, 905, 16707, 3223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_preventDestroy_bad(t *testing.T) { m := testModule(t, "plan-prevent-destroy-bad") p := testProvider("aws") p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), State: MustShimLegacyState(&State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "i-abc123", }, }, }, }, }, }), }) plan, err := ctx.Plan() expectedErr := "aws_instance.foo has lifecycle.prevent_destroy" if !strings.Contains(fmt.Sprintf("%s", err), expectedErr) { if plan != nil { t.Logf(legacyDiffComparisonString(plan.Changes)) } t.Fatalf("expected err would contain %q\nerr: %s", expectedErr, err) } }
explode_data.jsonl/28660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 423 }
[ 2830, 3393, 1972, 17, 20485, 10442, 684, 14245, 34199, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 21334, 684, 12, 15518, 1455, 329, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 98063, 24911, 284,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMultiError_Append(t *testing.T) { multiErrors := MultiError{} multiErrors.Errors = append(multiErrors.Errors, errors.New("invalid")) multiErrors.Errors = append(multiErrors.Errors, errors.New("fatal")) if len(multiErrors.Errors) != 2 { t.Fatal("Test Append()") } }
explode_data.jsonl/57818
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 20358, 1454, 36117, 408, 1155, 353, 8840, 836, 8, 341, 2109, 7068, 13877, 1669, 17439, 1454, 16094, 2109, 7068, 13877, 61745, 284, 8737, 1255, 7068, 13877, 61745, 11, 5975, 7121, 445, 11808, 5455, 2109, 7068, 13877, 61745, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCollectorAutoscalersDisabledByExplicitOption(t *testing.T) { // prepare disabled := false jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) jaeger.Spec.Collector.Autoscale = &disabled c := NewCollector(jaeger) // test a := c.Autoscalers() // verify assert.Len(t, a, 0) }
explode_data.jsonl/59536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 53694, 19602, 436, 5416, 388, 25907, 1359, 98923, 5341, 1155, 353, 8840, 836, 8, 341, 197, 322, 10549, 198, 34597, 2312, 1669, 895, 198, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransactionalUpsert(t *testing.T) { s, c := setupMiniredis() defer s.Close() ss := &StateStore{ client: c, json: jsoniter.ConfigFastest, logger: logger.NewLogger("test"), } err := ss.Multi(&state.TransactionalStateRequest{ Operations: []state.TransactionalStateOperation{{ Operation: state.Upsert, Request: state.SetRequest{ Key: "weapon", Value: "deathstar", }, }}, }) assert.Equal(t, nil, err) res, err := c.DoContext(context.Background(), "HGETALL", "weapon").Result() assert.Equal(t, nil, err) vals := res.([]interface{}) data, version, err := ss.getKeyVersion(vals) assert.Equal(t, nil, err) assert.Equal(t, ptr.String("1"), version) assert.Equal(t, `"deathstar"`, data) }
explode_data.jsonl/78640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 31375, 98778, 529, 1155, 353, 8840, 836, 8, 341, 1903, 11, 272, 1669, 6505, 6217, 2690, 285, 741, 16867, 274, 10421, 2822, 34472, 1669, 609, 1397, 6093, 515, 197, 25291, 25, 272, 345, 197, 30847, 25, 256, 2951, 2015, 10753...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEditIngress(t *testing.T) { rh, cc, done := setup(t) defer done() meta := metav1.ObjectMeta{Name: "kuard", Namespace: "default"} s1 := &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kuard", Namespace: "default", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 80, TargetPort: intstr.FromInt(8080), }}, }, } rh.OnAdd(s1) // add default/kuard to translator. old := &v1beta1.Ingress{ ObjectMeta: meta, Spec: v1beta1.IngressSpec{ Backend: &v1beta1.IngressBackend{ ServiceName: "kuard", ServicePort: intstr.FromInt(80), }, }, } rh.OnAdd(old) // check that it's been translated correctly. assert.Equal(t, &v2.DiscoveryResponse{ VersionInfo: "1", Resources: routeResources(t, envoy.RouteConfiguration("ingress_http", envoy.VirtualHost("*", &envoy_api_v2_route.Route{ Match: routePrefix("/"), Action: routecluster("default/kuard/80/da39a3ee5e"), }), ), ), TypeUrl: routeType, Nonce: "1", }, streamRDS(t, cc)) // update old to new rh.OnUpdate(old, &v1beta1.Ingress{ ObjectMeta: meta, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{{ IngressRuleValue: v1beta1.IngressRuleValue{ HTTP: &v1beta1.HTTPIngressRuleValue{ Paths: []v1beta1.HTTPIngressPath{{ Path: "/testing", Backend: v1beta1.IngressBackend{ ServiceName: "kuard", ServicePort: intstr.FromInt(80), }, }}, }, }, }}, }, }) // check that ingress_http has been updated. assert.Equal(t, &v2.DiscoveryResponse{ VersionInfo: "2", Resources: routeResources(t, envoy.RouteConfiguration("ingress_http", envoy.VirtualHost("*", &envoy_api_v2_route.Route{ Match: routePrefix("/testing"), Action: routecluster("default/kuard/80/da39a3ee5e"), }), ), ), TypeUrl: routeType, Nonce: "2", }, streamRDS(t, cc)) }
explode_data.jsonl/70750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 927 }
[ 2830, 3393, 4036, 641, 2483, 1155, 353, 8840, 836, 8, 341, 7000, 71, 11, 12527, 11, 2814, 1669, 6505, 1155, 340, 16867, 2814, 2822, 84004, 1669, 77520, 16, 80222, 63121, 25, 330, 74, 11034, 497, 41962, 25, 330, 2258, 63159, 1903, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResponse_GetRolesVerboseEqual(t *testing.T) { popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano())) p := NewPopulatedResponse_GetRoles(popr, false) data, err := github_com_gogo_protobuf_proto.Marshal(p) if err != nil { panic(err) } msg := &Response_GetRoles{} if err := github_com_gogo_protobuf_proto.Unmarshal(data, msg); err != nil { panic(err) } if err := p.VerboseEqual(msg); err != nil { t.Fatalf("%#v !VerboseEqual %#v, since %v", msg, p, err) } }
explode_data.jsonl/42029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 2582, 13614, 25116, 63404, 2993, 1155, 353, 8840, 836, 8, 341, 3223, 46288, 1669, 6888, 33864, 7121, 37270, 33864, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 12145, 3223, 1669, 1532, 11598, 7757, 2582, 13614, 25116, 40148, 81,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestImplementsProvisioner(t *testing.T) { var raw interface{} raw = &Provisioner{} if _, ok := raw.(packer.Provisioner); !ok { t.Fatalf("Interface packer.Provisioner is not implemented") } }
explode_data.jsonl/1404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 1427, 4674, 1336, 13013, 261, 1155, 353, 8840, 836, 8, 341, 220, 762, 7112, 3749, 31483, 220, 7112, 284, 609, 1336, 13013, 261, 31483, 220, 421, 8358, 5394, 1669, 7112, 12832, 4748, 261, 7763, 13013, 261, 1215, 753, 562, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_Pack(t *testing.T) { gtest.C(t, func(t *gtest.T) { srcPath := gdebug.TestDataPath("files") data, err := gres.Pack(srcPath) t.Assert(err, nil) r := gres.New() err = r.Add(string(data)) t.Assert(err, nil) t.Assert(r.Contains("files"), true) }) }
explode_data.jsonl/26297
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 1088, 473, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 41144, 1820, 1669, 342, 8349, 8787, 1043, 1820, 445, 7198, 1138, 197, 8924, 11, 1848, 1669, 342, 416, 1069, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGeoCounterLabel(t *testing.T) { collector := metrics.New(&geoIpLookupProvider{}) counter, err := collector.CreateCounterGeo("test", "seconds", "Hello world!") assert.Nil(t, err, "creating counter returned an error") counter.Increment(net.ParseIP("127.0.0.2")) newCounter := counter.WithLabels(metrics.Label("foo", "bar")) newCounter.Increment(net.ParseIP("127.0.0.2"), metrics.Label("baz", "bar")) metric := collector.GetMetric("test") assert.Equal(t, 2, len(metric)) assert.Equal(t, "test", metric[0].Name) assert.Equal(t, float64(1), metric[0].Value) assert.Equal(t, 1, len(metric[0].Labels)) assert.Equal(t, "test", metric[1].Name) assert.Equal(t, float64(1), metric[1].Value) assert.Equal(t, 3, len(metric[1].Labels)) }
explode_data.jsonl/71468
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 37344, 14099, 2476, 1155, 353, 8840, 836, 8, 341, 1444, 24605, 269, 1669, 16734, 7121, 2099, 13052, 23378, 34247, 5179, 37790, 58261, 11, 1848, 1669, 31953, 7251, 14099, 37344, 445, 1944, 497, 330, 17403, 497, 330, 9707, 1879,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRandomStream(t *testing.T) { r, err := NewRandomStream(1024) require.NoError(t, err) a := make([]byte, 500) n, err := r.Read(a) require.NoError(t, err) require.Equal(t, 500, n) require.NotEqual(t, a, make([]byte, 500)) b := make([]byte, 500) n, err = r.Read(b) require.NoError(t, err) require.Equal(t, 500, n) c := make([]byte, 500) n, err = r.Read(c) require.NoError(t, err) require.Equal(t, 24, n) require.NoError(t, r.Close()) // Read after finishing should return io.EOF n, err = r.Read(c) require.Equal(t, io.EOF, err) require.Equal(t, 0, n) // SeekStart should set the position to 0 pos, err := r.Seek(0, io.SeekStart) require.NoError(t, err) require.Equal(t, int64(0), pos) require.Equal(t, r.(*randomStream).position, int64(0)) require.Equal(t, r.(*randomStream).remaining, 1024) a1 := make([]byte, 500) n, err = r.Read(a1) require.NoError(t, err) require.Equal(t, 500, n) require.Equal(t, a, a1) }
explode_data.jsonl/44846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 13999, 3027, 1155, 353, 8840, 836, 8, 341, 7000, 11, 1848, 1669, 1532, 13999, 3027, 7, 16, 15, 17, 19, 340, 17957, 35699, 1155, 11, 1848, 692, 11323, 1669, 1281, 10556, 3782, 11, 220, 20, 15, 15, 340, 9038, 11, 1848, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPongWire(t *testing.T) { tests := []struct { in wire.MsgPong // Message to encode out wire.MsgPong // Expected decoded message buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding }{ // Latest protocol version. { wire.MsgPong{Nonce: 123123}, // 0x1e0f3 wire.MsgPong{Nonce: 123123}, // 0x1e0f3 []byte{0xf3, 0xe0, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00}, wire.ProtocolVersion, }, // Protocol version BIP0031Version+1 { wire.MsgPong{Nonce: 456456}, // 0x6f708 wire.MsgPong{Nonce: 456456}, // 0x6f708 []byte{0x08, 0xf7, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00}, wire.BIP0031Version + 1, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode the message to wire format. var buf bytes.Buffer err := test.in.BtcEncode(&buf, test.pver) if err != nil { t.Errorf("BtcEncode #%d error %v", i, err) continue } if !bytes.Equal(buf.Bytes(), test.buf) { t.Errorf("BtcEncode #%d\n got: %s want: %s", i, spew.Sdump(buf.Bytes()), spew.Sdump(test.buf)) continue } // Decode the message from wire format. var msg wire.MsgPong rbuf := bytes.NewReader(test.buf) err = msg.BtcDecode(rbuf, test.pver) if err != nil { t.Errorf("BtcDecode #%d error %v", i, err) continue } if !reflect.DeepEqual(msg, test.out) { t.Errorf("BtcDecode #%d\n got: %s want: %s", i, spew.Sdump(msg), spew.Sdump(test.out)) continue } } }
explode_data.jsonl/6622
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 695 }
[ 2830, 3393, 47, 644, 37845, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 9067, 30365, 47, 644, 442, 4856, 311, 16164, 198, 197, 13967, 220, 9067, 30365, 47, 644, 442, 31021, 29213, 1943, 198, 197, 26398...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestInvokeBinding(t *testing.T) { port, _ := freeport.GetFreePort() srv := &api{ sendToOutputBindingFn: func(name string, req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) { if name == "error-binding" { return nil, errors.New("error when invoke binding") } return &bindings.InvokeResponse{Data: []byte("ok")}, nil }, } server := startTestServerAPI(port, srv) defer server.Stop() clientConn := createTestClient(port) defer clientConn.Close() client := runtimev1pb.NewDaprClient(clientConn) _, err := client.InvokeBinding(context.Background(), &runtimev1pb.InvokeBindingRequest{}) assert.Nil(t, err) _, err = client.InvokeBinding(context.Background(), &runtimev1pb.InvokeBindingRequest{Name: "error-binding"}) assert.Equal(t, codes.Internal, status.Code(err)) }
explode_data.jsonl/21738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 17604, 15059, 1155, 353, 8840, 836, 8, 341, 52257, 11, 716, 1669, 1910, 403, 2234, 10940, 7084, 741, 1903, 10553, 1669, 609, 2068, 515, 197, 32817, 1249, 5097, 15059, 24911, 25, 2915, 3153, 914, 11, 4232, 353, 65495, 32784, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIssue15315(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustQuery("select '0-3261554956'+0.0").Check(testkit.Rows("0")) tk.MustQuery("select cast('0-1234' as real)").Check(testkit.Rows("0")) }
explode_data.jsonl/65502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 42006, 16, 20, 18, 16, 20, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreatesCorrectBoxFromSimpleBox(t *testing.T) { assert := assert.New(t) bh := BoxHandler{} boxes := []SimpleBox{SimpleBox{Boxname: "dev", Username: "benphegan", Provider: "virtualbox", Version: "2.0"}} host := "localhost" bh.createBoxes(boxes, 80, &host) assert.Equal(1, len(bh.Boxes["benphegan"])) assert.Equal("2.0", bh.GetBox("benphegan", "dev").CurrentVersion.Version) assert.Equal(1, len(bh.GetBox("benphegan", "dev").CurrentVersion.Providers)) }
explode_data.jsonl/51584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 54868, 33092, 1611, 3830, 16374, 1611, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 2233, 71, 1669, 8261, 3050, 16094, 197, 22204, 1669, 3056, 16374, 1611, 90, 16374, 1611, 90, 1611, 606, 25, 330, 3583, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSQLite_ColumnInt(t *testing.T) { t.Run("ChangeTypeNull", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "integer"}}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) usersT.Columns[0].Type.Null = true usersT.Columns[0].Type.Type = &schema.FloatType{T: "real"} changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) require.Equal(t, schema.ChangeNull|schema.ChangeType, changes[0].(*schema.ModifyColumn).Change) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) }) }) t.Run("ChangeDefault", func(t *testing.T) { liteRun(t, func(t *liteTest) { usersT := &schema.Table{ Name: "users", Columns: []*schema.Column{{Name: "a", Type: &schema.ColumnType{Type: &schema.IntegerType{T: "int"}}, Default: &schema.Literal{V: "1"}}}, } t.migrate(&schema.AddTable{T: usersT}) t.dropTables(usersT.Name) ensureNoChange(t, usersT) for _, x := range []string{"2", "'3'", "10.1"} { usersT.Columns[0].Default.(*schema.Literal).V = x changes := t.diff(t.loadUsers(), usersT) require.Len(t, changes, 1) t.migrate(&schema.ModifyTable{T: usersT, Changes: changes}) ensureNoChange(t, usersT) _, err := t.db.Exec("INSERT INTO users DEFAULT VALUES") require.NoError(t, err) } rows, err := t.db.Query("SELECT a FROM users") require.NoError(t, err) for _, e := range []driver.Value{2, 3, 10.1} { var v driver.Value require.True(t, rows.Next()) require.NoError(t, rows.Scan(&v)) require.EqualValues(t, e, v) } require.False(t, rows.Next()) require.NoError(t, rows.Close()) }) }) }
explode_data.jsonl/20088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 823 }
[ 2830, 3393, 81772, 96622, 1072, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4072, 929, 3280, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 632, 6727, 1155, 11, 2915, 1155, 353, 68078, 2271, 8, 341, 298, 90896, 51, 1669, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateLogsReceiver_error(t *testing.T) { cfg := createDefaultConfig().(*Config) cfg.ProtocolVersion = "2.0.0" // disable contacting broker at startup cfg.Metadata.Full = false f := kafkaReceiverFactory{logsUnmarshalers: defaultLogsUnmarshalers()} r, err := f.createLogsReceiver(context.Background(), componenttest.NewNopReceiverCreateSettings(), cfg, nil) require.NoError(t, err) assert.NotNil(t, r) }
explode_data.jsonl/70860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 4021, 51053, 25436, 4096, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 1855, 3675, 2648, 1005, 4071, 2648, 340, 50286, 54096, 5637, 284, 330, 17, 13, 15, 13, 15, 698, 197, 322, 11156, 46108, 22316, 518, 20567, 198, 50286, 46...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSwitchExists(t *testing.T) { s1, err := NewSwitch() if err != nil { t.Fatal(err) } defer s1.Release() if !s1.Exists() { t.Fatal("Expecting switch", s1.NodeName(), "exists") } s2 := Switch{Name: "xcvxcvcxv"} if s2.Exists() { t.Fatal("Expecting switch", s2.NodeName(), "does not exist") } }
explode_data.jsonl/34909
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 16837, 15575, 1155, 353, 8840, 836, 8, 341, 1903, 16, 11, 1848, 1669, 1532, 16837, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 16867, 274, 16, 58693, 2822, 743, 753, 82, 16, 26996, 368, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCreatePool(t *testing.T) { if _, err := fc.CreatePool(c.NewAdminContext(), &model.StoragePoolSpec{BaseModel: &model.BaseModel{}}); err != nil { t.Error("Create pool failed:", err) } }
explode_data.jsonl/50705
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 4021, 10551, 1155, 353, 8840, 836, 8, 341, 743, 8358, 1848, 1669, 25563, 7251, 10551, 1337, 7121, 7210, 1972, 1507, 609, 2528, 43771, 10551, 8327, 90, 3978, 1712, 25, 609, 2528, 13018, 1712, 90, 3417, 1215, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestEntry_Disable_KeyDown(t *testing.T) { entry := widget.NewEntry() test.Type(entry, "H") entry.Disable() test.Type(entry, "i") assert.Equal(t, "H", entry.Text) entry.Enable() test.Type(entry, "i") assert.Equal(t, "Hi", entry.Text) }
explode_data.jsonl/12366
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 5874, 45525, 480, 81459, 1155, 353, 8840, 836, 8, 341, 48344, 1669, 9086, 7121, 5874, 2822, 18185, 10184, 18238, 11, 330, 39, 1138, 48344, 10166, 480, 741, 18185, 10184, 18238, 11, 330, 72, 1138, 6948, 12808, 1155, 11, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { newFn := func() (toil.Toiler, error) { return nil, nil } newer := newFuncNewer(newFn) toilForker := New(newer) if nil == toilForker { t.Errorf("After calling New(), expected returned value not to be nil, but instead was: %v", toilForker) } }
explode_data.jsonl/15804
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 1476, 8638, 24911, 1669, 2915, 368, 320, 983, 321, 3274, 5769, 11, 1465, 8, 341, 197, 853, 2092, 11, 2092, 198, 197, 630, 8638, 261, 1669, 501, 9626, 3564, 261, 1755, 24911, 692, 31709, 321...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplicationResolver_DeleteApplication(t *testing.T) { // GIVEN fixName := "fix" appSvc := automock.NewApplicationSvc() defer appSvc.AssertExpectations(t) appSvc.On("Delete", fixName).Return(nil) resolver := application.NewApplicationResolver(appSvc, nil) // WHEN out, err := resolver.DeleteApplication(context.Background(), fixName) // THEN require.NoError(t, err) assert.Equal(t, fixName, out.Name) }
explode_data.jsonl/28761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 4988, 18190, 57418, 4988, 1155, 353, 8840, 836, 8, 341, 197, 322, 89836, 198, 1166, 941, 675, 1669, 330, 5743, 698, 28236, 92766, 1669, 5006, 1176, 7121, 4988, 92766, 741, 16867, 906, 92766, 11711, 17536, 804, 1155, 340, 282...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccOrganisation_basic(t *testing.T) { var organisationResponse organisations.GetUnitsIDOK parentOrganisationId := os.Getenv("FORM3_ORGANISATION_ID") organisationId := uuid.NewV4().String() resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckOrganisationDestroy, Steps: []resource.TestStep{ { Config: fmt.Sprintf(testForm3OrganisationConfigA, organisationId, parentOrganisationId), Check: resource.ComposeTestCheckFunc( testAccCheckOrganisationExists("form3_organisation.organisation", &organisationResponse), resource.TestCheckResourceAttr( "form3_organisation.organisation", "name", "terraform-organisation"), resource.TestCheckResourceAttr( "form3_organisation.organisation", "parent_organisation_id", parentOrganisationId), resource.TestCheckResourceAttr( "form3_organisation.organisation", "organisation_id", organisationId), ), }, { Config: fmt.Sprintf(testForm3OrganisationConfigAUpdate, organisationId, parentOrganisationId), Check: resource.ComposeTestCheckFunc( testAccCheckOrganisationExists("form3_organisation.organisation", &organisationResponse), resource.TestCheckResourceAttr( "form3_organisation.organisation", "name", "terraform-organisation-updated"), resource.TestCheckResourceAttr( "form3_organisation.organisation", "parent_organisation_id", parentOrganisationId), resource.TestCheckResourceAttr( "form3_organisation.organisation", "organisation_id", organisationId), ), }, }, }) }
explode_data.jsonl/58662
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 14603, 23227, 7923, 34729, 1155, 353, 8840, 836, 8, 341, 2405, 21424, 2582, 28433, 2234, 26314, 915, 3925, 198, 24804, 23227, 7923, 764, 1669, 2643, 64883, 445, 10818, 18, 19834, 58487, 1637, 3495, 3450, 1138, 197, 57804, 764,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test(t *testing.T) { s := new(S) s.browser = rod.New().Client(nil).Connect() defer s.browser.Close() s.page = s.browser.Page("") s.page.Viewport(800, 600, 1, false) suite.Run(t, s) }
explode_data.jsonl/15071
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 3759, 340, 1903, 38611, 284, 20589, 7121, 1005, 2959, 27907, 568, 14611, 2822, 16867, 274, 38611, 10421, 2822, 1903, 10460, 284, 274, 38611, 17558, 31764, 1903, 10460, 6382, 403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestController_GetOperations(t *testing.T) { controller, err := New(&operation.Config{ VDRI: &vdrmock.MockVDRegistry{}, StoreProvider: ariesmemstorage.NewProvider(), }) require.NoError(t, err) require.NotNil(t, controller) ops := controller.GetOperations() require.Equal(t, 6, len(ops)) }
explode_data.jsonl/26410
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 2051, 13614, 35120, 1155, 353, 8840, 836, 8, 341, 61615, 11, 1848, 1669, 1532, 2099, 9262, 10753, 515, 197, 197, 12474, 4305, 25, 688, 609, 85, 3612, 16712, 24664, 12474, 15603, 38837, 197, 197, 6093, 5179, 25, 264, 4019, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsBasePoint(t *testing.T) { k256 := btcec.S256() p224 := elliptic.P224() p256 := elliptic.P256() notG_p224, err := NewScalarBaseMult(p224, tt.B10("9876453120")) require.NoError(t, err) tests := []struct { name string curve elliptic.Curve x, y *big.Int expected bool }{ {"k256-positive", k256, k256.Gx, k256.Gy, true}, {"p224-positive", p224, p224.Params().Gx, p224.Params().Gy, true}, {"p256-positive", p256, p256.Params().Gx, p256.Params().Gy, true}, {"p224-negative", p224, notG_p224.X, notG_p224.Y, false}, {"p256-negative-wrong-curve", p256, notG_p224.X, notG_p224.Y, false}, {"k256-negative-doubleGx", k256, k256.Gx, k256.Gx, false}, {"k256-negative-doubleGy", k256, k256.Gy, k256.Gy, false}, {"k256-negative-xy-swap", k256, k256.Gy, k256.Gx, false}, {"k256-negative-oh-oh", k256, core.Zero, core.Zero, false}, } // Run all the tests! for _, test := range tests { t.Run(test.name, func(t *testing.T) { actual := EcPoint{test.curve, test.x, test.y}.IsBasePoint() require.Equal(t, test.expected, actual) }) } }
explode_data.jsonl/75671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 499 }
[ 2830, 3393, 3872, 3978, 2609, 1155, 353, 8840, 836, 8, 341, 16463, 17, 20, 21, 1669, 19592, 68955, 808, 17, 20, 21, 741, 3223, 17, 17, 19, 1669, 77783, 292, 1069, 17, 17, 19, 741, 3223, 17, 20, 21, 1669, 77783, 292, 1069, 17, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatBytes(t *testing.T) { tests := []struct { val uint64 want string }{ {1023, "1023 bytes"}, {1024, "1.00KB (1024 bytes)"}, {1024*1024 - 100, "1023.90KB (1048476 bytes)"}, {1024 * 1024, "1.00MB (1048576 bytes)"}, {1024 * 1025, "1.00MB (1049600 bytes)"}, {1024 * 1024 * 1024, "1.00GB (1073741824 bytes)"}, {1024*1024*1024 + 430*1024*1024, "1.42GB (1524629504 bytes)"}, {1024 * 1024 * 1024 * 1024 * 1024, "1.00PB (1125899906842624 bytes)"}, {1024 * 1024 * 1024 * 1024 * 1024 * 1024, "1024.00PB (1152921504606846976 bytes)"}, } for _, tt := range tests { result := formatBytes(tt.val) if result != tt.want { t.Errorf("formatBytes(%v) = %q; want %q", tt.val, result, tt.want) } } }
explode_data.jsonl/16615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 4061, 7078, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 19302, 220, 2622, 21, 19, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 90, 16, 15, 17, 18, 11, 330, 16, 15, 17, 18, 5820, 7115, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExtractTrigrams(t *testing.T) { var tests = []struct { input string trigrams []string }{ { input: "word", trigrams: []string{" w", " wo", "wor", "ord", "rd "}, }, { input: "two words", trigrams: []string{" t", " tw", "two", "wo ", " w", " wo", "wor", "ord", "rds", "ds "}, }, { input: "a", trigrams: []string{" a", " a "}, }, { input: " a ", trigrams: []string{" a", " a "}, }, { input: "", }, { input: " ", }, } for _, tt := range tests { t.Run(tt.input, func(t *testing.T) { trigrams := ExtractTrigrams(tt.input) require.Equal(t, len(tt.trigrams), len(trigrams)) for i, tg := range trigrams { assert.Equal(t, tt.trigrams[i], fmt.Sprint(tg)) } }) } }
explode_data.jsonl/30742
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 28959, 1282, 83109, 1155, 353, 8840, 836, 8, 1476, 2405, 7032, 284, 3056, 1235, 341, 197, 22427, 262, 914, 198, 197, 25583, 83109, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 262, 330, 1158, 756, 298, 25583,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPushImageJSONIndex(t *testing.T) { r := spawnTestRegistrySession(t) imgData := []*ImgData{ { ID: "77dbf71da1d00e3fbddc480176eac8994025630c6590d11cfc8fe1209c2a1d20", Checksum: "sha256:1ac330d56e05eef6d438586545ceff7550d3bdcb6b19961f12c5ba714ee1bb37", }, { ID: "42d718c941f5c532ac049bf0b0ab53f0062f09a03afd4aa4a02c098e46032b9d", Checksum: "sha256:bea7bf2e4bacd479344b737328db47b18880d09096e6674165533aa994f5e9f2", }, } repoData, err := r.PushImageJSONIndex("foo42/bar", imgData, false, nil) if err != nil { t.Fatal(err) } if repoData == nil { t.Fatal("Expected RepositoryData object") } repoData, err = r.PushImageJSONIndex("foo42/bar", imgData, true, []string{r.indexEndpoint.String()}) if err != nil { t.Fatal(err) } if repoData == nil { t.Fatal("Expected RepositoryData object") } }
explode_data.jsonl/13111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 16644, 1906, 5370, 1552, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 18042, 2271, 15603, 5283, 1155, 340, 39162, 1043, 1669, 29838, 13033, 1043, 515, 197, 197, 515, 298, 29580, 25, 981, 330, 22, 22, 1999, 69, 22, 16, 3235, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDgraphUniverseRepositoryFind(t *testing.T) { wantUniverse := &Universe{ Name: "TestDgraphUniverseRepositoryFind_name", User: "TestDgraphUniverseRepositoryFind_user", Description: "TestDgraphUniverseRepositoryFind_description", } graphql := graphql.NewClient(graphqlUri, nil) repo := &GraphqlUniverseRepository{graphql} ctx := context.Background() if err := repo.Insert(ctx, wantUniverse); err != nil { t.Fatal(err) } defer func(u *Universe) { if err := repo.Delete(ctx, u); err != nil { t.Fatal(err) } }(wantUniverse) if len(wantUniverse.Id) == 0 { t.Fatalf("Got empty universe Id") } if gotUniverse, err := repo.Find(ctx, wantUniverse.Id); err != nil { t.Fatal(err) } else if gotUniverse.Id != wantUniverse.Id { t.Errorf("Got id = %v, want %v", gotUniverse.Id, wantUniverse.Id) } else if gotUniverse.Name != wantUniverse.Name { t.Errorf("Got name = %s, want %s", gotUniverse.Name, wantUniverse.Name) } else if gotUniverse.User != wantUniverse.User { t.Errorf("Got user = %s, want %s", gotUniverse.User, wantUniverse.User) } else if gotUniverse.Description != wantUniverse.Description { t.Errorf("Got description = %s, want %s", gotUniverse.Description, wantUniverse.Description) } }
explode_data.jsonl/12304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 480 }
[ 2830, 3393, 35, 4439, 1806, 8034, 4624, 9885, 1155, 353, 8840, 836, 8, 341, 50780, 1806, 8034, 1669, 609, 1806, 8034, 515, 197, 21297, 25, 286, 330, 2271, 35, 4439, 1806, 8034, 4624, 9885, 1269, 756, 197, 31672, 25, 286, 330, 2271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVariousDeadlines1Proc(t *testing.T) { // Cannot use t.Parallel - modifies global GOMAXPROCS. if testing.Short() { t.Skip("skipping in short mode") } defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(1)) testVariousDeadlines(t) }
explode_data.jsonl/57687
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 72641, 28320, 7969, 16, 24508, 1155, 353, 8840, 836, 8, 341, 197, 322, 34655, 990, 259, 41288, 7957, 481, 82949, 3644, 479, 1898, 2954, 9117, 6412, 624, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 304, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestContextAbortWithError(t *testing.T) { w := httptest.NewRecorder() c, _ := CreateTestContext(w) c.AbortWithError(http.StatusUnauthorized, errors.New("bad input")).SetMeta("some input") // nolint: errcheck assert.Equal(t, http.StatusUnauthorized, w.Code) assert.Equal(t, abortIndex, c.index) assert.True(t, c.IsAborted()) }
explode_data.jsonl/26815
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 1972, 85891, 66102, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 54320, 70334, 7121, 47023, 741, 1444, 11, 716, 1669, 4230, 2271, 1972, 3622, 692, 1444, 25206, 371, 66102, 19886, 10538, 51181, 11, 5975, 7121, 445, 13855, 1946, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSharedBufferSingleton(t *testing.T) { buf := NewSharedBuffer(3) ch := buf.NewChannel() for i := 0; i < 5; i++ { ch.In() <- (*int)(nil) ch.In() <- (*int)(nil) ch.In() <- (*int)(nil) select { case ch.In() <- (*int)(nil): t.Error("Wrote to full shared-buffer") default: } <-ch.Out() <-ch.Out() <-ch.Out() select { case <-ch.Out(): t.Error("Read from empty shared-buffer") default: } } ch.Close() buf.Close() }
explode_data.jsonl/59663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 16997, 4095, 25915, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 1532, 16997, 4095, 7, 18, 692, 23049, 1669, 6607, 7121, 9629, 741, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 26, 600, 1027, 341, 197, 23049, 5337, 368...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCanSupport(t *testing.T) { tmpDir, err := utiltesting.MkTmpdir("gcepdTest") if err != nil { t.Fatalf("can't make a temp dir: %v", err) } defer os.RemoveAll(tmpDir) plugMgr := volume.VolumePluginMgr{} plugMgr.InitPlugins(ProbeVolumePlugins(), nil /* prober */, volumetest.NewFakeVolumeHost(tmpDir, nil, nil)) plug, err := plugMgr.FindPluginByName("kubernetes.io/gce-pd") if err != nil { t.Errorf("Can't find the plugin by name") } if plug.GetPluginName() != "kubernetes.io/gce-pd" { t.Errorf("Wrong name: %s", plug.GetPluginName()) } if !plug.CanSupport(&volume.Spec{Volume: &v1.Volume{VolumeSource: v1.VolumeSource{GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{}}}}) { t.Errorf("Expected true") } if !plug.CanSupport(&volume.Spec{PersistentVolume: &v1.PersistentVolume{Spec: v1.PersistentVolumeSpec{PersistentVolumeSource: v1.PersistentVolumeSource{GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{}}}}}) { t.Errorf("Expected true") } }
explode_data.jsonl/65181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 6713, 7916, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 1848, 1669, 4094, 8840, 1321, 74, 35986, 3741, 445, 70, 346, 15360, 2271, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 4814, 944, 1281, 264, 2730, 5419,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestParseErrors(t *testing.T) { t.Parallel() cases := []struct { name string input string expect string }{ {"disallow-before", "Disallow: /\nUser-agent: bot", "Disallow before User-agent"}, {"crawl-delay-syntax", "User-agent: bot\nCrawl-delay: bad-time-value", "invalid syntax"}, {"crawl-delay-inf", "User-agent: bot\nCrawl-delay: -inf", "invalid value"}, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { t.Log("input:", c.input) _, err := FromString(c.input) require.Error(t, err) _, ok := err.(*ParseError) assert.True(t, ok, "Expected ParseError") require.Contains(t, err.Error(), c.expect) }) } }
explode_data.jsonl/51681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 14463, 13877, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 22427, 220, 914, 198, 197, 24952, 914, 198, 197, 59403, 197, 197, 4913, 4243, 7183, 694...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFuncMap(t *testing.T) { fns := funcMap() forbidden := []string{"env", "expandenv"} for _, f := range forbidden { if _, ok := fns[f]; ok { t.Errorf("Forbidden function %s exists in FuncMap.", f) } } // Test for Engine-specific template functions. expect := []string{"include", "required", "tpl", "toYaml", "fromYaml", "toToml", "toJson", "fromJson"} for _, f := range expect { if _, ok := fns[f]; !ok { t.Errorf("Expected add-on function %q", f) } } }
explode_data.jsonl/74698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 9626, 2227, 1155, 353, 8840, 836, 8, 341, 1166, 4412, 1669, 2915, 2227, 741, 2023, 22108, 1669, 3056, 917, 4913, 3160, 497, 330, 32317, 3160, 16707, 2023, 8358, 282, 1669, 2088, 36813, 341, 197, 743, 8358, 5394, 1669, 282, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCheckMergeBoundedQuantilesStateChecks(t *testing.T) { for _, tc := range []struct { state1 aggregationState state2 aggregationState wantErr bool }{ {defaultState, defaultState, false}, {resultReturned, defaultState, true}, {defaultState, resultReturned, true}, {serialized, defaultState, true}, {defaultState, serialized, true}, {defaultState, merged, true}, {merged, defaultState, true}, } { lower, upper := -5.0, 5.0 bq1 := getNoiselessBQ(t, lower, upper) bq2 := getNoiselessBQ(t, lower, upper) bq1.state = tc.state1 bq2.state = tc.state2 if err := checkMergeBoundedQuantiles(bq1, bq2); (err != nil) != tc.wantErr { t.Errorf("CheckMerge: when states [%v, %v] for err got %v, wantErr %t", tc.state1, tc.state2, err, tc.wantErr) } } }
explode_data.jsonl/80643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 3973, 52096, 33, 13082, 44220, 3658, 1397, 49820, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 24291, 16, 220, 51629, 1397, 198, 197, 24291, 17, 220, 51629, 1397, 198, 197, 50780, 7747, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOFctrlFlow(t *testing.T) { br := "br03" err := PrepareOVSBridge(br) if err != nil { t.Fatalf("Failed to prepare OVS bridge: %v", err) } defer func() { err = DeleteOVSBridge(br) if err != nil { t.Errorf("error while deleting OVS bridge: %v", err) } }() bridge := binding.NewOFBridge(br) table1 := bridge.CreateTable(1, 2, binding.TableMissActionNext) table2 := bridge.CreateTable(2, 3, binding.TableMissActionNext) err = bridge.Connect(maxRetry, make(chan struct{})) if err != nil { t.Fatal("Failed to start OFService") } defer bridge.Disconnect() for _, test := range []tableFlows{ {table: table1, flowGenerator: prepareFlows}, {table: table2, flowGenerator: prepareNATflows}, } { myTable := test.table myFunc := test.flowGenerator flows, expectflows := myFunc(myTable) for id, flow := range flows { if err := flow.Add(); err != nil { t.Errorf("Failed to install flow%d: %v", id, err) } } dumpTable := uint8(myTable.GetID()) flowList := CheckFlowExists(t, br, dumpTable, true, expectflows) // Test: DumpTableStatus for _, tableStates := range bridge.DumpTableStatus() { if tableStates.ID == uint(dumpTable) { if int(tableStates.FlowCount) != len(flowList) { t.Errorf("Flow count of table %d in the cache is incorrect, expect: %d, actual %d", dumpTable, len(flowList), tableStates.FlowCount) } } } // Test: DumpFlows dumpCookieID, dumpCookieMask := getCookieIDMask() flowStates := bridge.DumpFlows(dumpCookieID, dumpCookieMask) if len(flowStates) != len(flowList) { t.Errorf("Flow count in dump result is incorrect") } // Test: Flow.Delete for _, f := range flows[0:2] { if err := f.Delete(); err != nil { t.Errorf("Failed to uninstall flow1 %v", err) } } CheckFlowExists(t, br, dumpTable, false, expectflows[0:2]) // Test: DeleteFlowsByCookie err = bridge.DeleteFlowsByCookie(dumpCookieID, dumpCookieMask) if err != nil { t.Errorf("Failed to DeleteFlowsByCookie: %v", err) } flowList, _ = OfctlDumpTableFlows(br, uint8(myTable.GetID())) if len(flowList) > 0 { t.Errorf("Failed to delete flows by CookieID") } } }
explode_data.jsonl/23785
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 859 }
[ 2830, 3393, 12483, 11933, 18878, 1155, 353, 8840, 836, 8, 341, 80255, 1669, 330, 1323, 15, 18, 698, 9859, 1669, 31166, 38957, 16680, 11183, 41237, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 10549, 506, 26050, 1416...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExportContainerAfterDaemonRestart(t *testing.T) { skip.If(t, testEnv.DaemonInfo.OSType != "linux") skip.If(t, testEnv.IsRemoteDaemon()) d := daemon.New(t, "", "dockerd", daemon.Config{}) client, err := d.NewClient() assert.NilError(t, err) d.StartWithBusybox(t) defer d.Stop(t) ctx := context.Background() cfg := containerTypes.Config{ Image: "busybox", Cmd: []string{"top"}, } ctr, err := client.ContainerCreate(ctx, &cfg, nil, nil, "") assert.NilError(t, err) d.Restart(t) _, err = client.ContainerExport(ctx, ctr.ID) assert.NilError(t, err) }
explode_data.jsonl/34688
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 16894, 4502, 6025, 89177, 59354, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 32901, 1155, 11, 1273, 14359, 909, 64, 7291, 1731, 13, 4233, 499, 961, 330, 14210, 1138, 1903, 13389, 32901, 1155, 11, 1273, 14359, 4506, 24703, 891...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAdapterCookieSyncMetric(t *testing.T) { m := createMetricsForTesting() adapterName := "anyName" privacyBlocked := true m.RecordAdapterCookieSync(openrtb_ext.BidderName(adapterName), privacyBlocked) expectedCount := float64(1) assertCounterVecValue(t, "", "adapterCookieSync", m.adapterCookieSync, expectedCount, prometheus.Labels{ adapterLabel: adapterName, privacyBlockedLabel: "true", }) }
explode_data.jsonl/13953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 5940, 20616, 12154, 54310, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1855, 27328, 2461, 16451, 741, 197, 19731, 675, 1669, 330, 3767, 675, 698, 197, 46821, 95847, 1669, 830, 271, 2109, 49959, 5940, 20616, 12154, 30981, 3342, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchIntoObjectPool_Failure(t *testing.T) { cfgBuilder := testcfg.NewGitalyCfgBuilder() cfg, repos := cfgBuilder.BuildWithRepoAt(t, t.Name()) locator := config.NewLocator(cfg) gitCmdFactory := git.NewExecCommandFactory(cfg) catfileCache := catfile.NewCache(cfg) t.Cleanup(catfileCache.Stop) server := NewServer( cfg, locator, gitCmdFactory, catfileCache, transaction.NewManager(cfg, backchannel.NewRegistry()), ) ctx, cancel := testhelper.Context() defer cancel() pool := initObjectPool(t, cfg, cfg.Storages[0]) poolWithDifferentStorage := pool.ToProto() poolWithDifferentStorage.Repository.StorageName = "some other storage" testCases := []struct { description string request *gitalypb.FetchIntoObjectPoolRequest code codes.Code errMsg string }{ { description: "empty origin", request: &gitalypb.FetchIntoObjectPoolRequest{ ObjectPool: pool.ToProto(), }, code: codes.InvalidArgument, errMsg: "origin is empty", }, { description: "empty pool", request: &gitalypb.FetchIntoObjectPoolRequest{ Origin: repos[0], }, code: codes.InvalidArgument, errMsg: "object pool is empty", }, { description: "origin and pool do not share the same storage", request: &gitalypb.FetchIntoObjectPoolRequest{ Origin: repos[0], ObjectPool: poolWithDifferentStorage, }, code: codes.InvalidArgument, errMsg: "origin has different storage than object pool", }, } for _, tc := range testCases { t.Run(tc.description, func(t *testing.T) { _, err := server.FetchIntoObjectPool(ctx, tc.request) require.Error(t, err) testhelper.RequireGrpcCode(t, err, tc.code) assert.Contains(t, err.Error(), tc.errMsg) }) } }
explode_data.jsonl/62527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 695 }
[ 2830, 3393, 20714, 26591, 1190, 10551, 1400, 9373, 1155, 353, 8840, 836, 8, 341, 50286, 3297, 1669, 1273, 14072, 7121, 38, 2174, 88, 42467, 3297, 741, 50286, 11, 45774, 1669, 13286, 3297, 25212, 2354, 25243, 1655, 1155, 11, 259, 2967, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInt64ArrayScanBytes(t *testing.T) { for _, tt := range Int64ArrayStringTests { bytes := []byte(tt.str) arr := Int64Array{5, 5, 5} err := arr.Scan(bytes) if err != nil { t.Fatalf("Expected no error for %q, got %v", bytes, err) } if !reflect.DeepEqual(arr, tt.arr) { t.Errorf("Expected %+v for %q, got %+v", tt.arr, bytes, arr) } } }
explode_data.jsonl/5328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 1072, 21, 19, 1857, 26570, 7078, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 1333, 21, 19, 1857, 703, 18200, 341, 197, 70326, 1669, 3056, 3782, 47152, 9528, 340, 197, 36511, 1669, 1333, 21, 19, 1857, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIsCanonicalHeader(t *testing.T) { expected := true result := isCanonicalHeader("content-type") if result != expected { t.Error(util.FormatTest("isCanonicalHeader", strconv.FormatBool(result), strconv.FormatBool(expected))) } expected = false result = isCanonicalHeader("Accept-Encoding") if result != expected { t.Error(util.FormatTest("isCanonicalHeader", strconv.FormatBool(result), strconv.FormatBool(expected))) } }
explode_data.jsonl/27170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 3872, 70914, 4047, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 830, 198, 9559, 1669, 374, 70914, 4047, 445, 1796, 10604, 5130, 743, 1102, 961, 3601, 341, 197, 3244, 6141, 67811, 9978, 2271, 445, 285, 70914, 4047, 497, 33317, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGlobalToGlobal(t *testing.T) { if globalV4 == nil { t.Skip("no globalV4 addresses configured") return } testUseFirst(t, globalV4, globalV4, loopbackV4) testUseFirst(t, globalV6, globalV6, loopbackV6) }
explode_data.jsonl/3802
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 11646, 1249, 11646, 1155, 353, 8840, 836, 8, 341, 743, 3644, 53, 19, 621, 2092, 341, 197, 3244, 57776, 445, 2152, 3644, 53, 19, 14230, 19755, 1138, 197, 853, 198, 197, 532, 18185, 10253, 5338, 1155, 11, 3644, 53, 19, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTaskConversion(t *testing.T) { versions := []apis.Convertible{&v1beta1.Task{}} tests := []struct { name string in *Task wantErr bool }{{ name: "simple conversion", in: &Task{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", Generation: 1, }, Spec: TaskSpec{ TaskSpec: v1beta1.TaskSpec{ Description: "test", Steps: []v1beta1.Step{{Container: corev1.Container{ Image: "foo", }}}, Volumes: []corev1.Volume{{}}, Params: []v1beta1.ParamSpec{{ Name: "param-1", Type: v1beta1.ParamTypeString, Description: "My first param", }}, Resources: &v1beta1.TaskResources{ Inputs: []v1beta1.TaskResource{{ResourceDeclaration: v1beta1.ResourceDeclaration{ Name: "input-1", Type: resource.PipelineResourceTypeGit, }}}, Outputs: []v1beta1.TaskResource{{ResourceDeclaration: v1beta1.ResourceDeclaration{ Name: "output-1", Type: resource.PipelineResourceTypeGit, }}}, }, }, }, }, }, { name: "deprecated and non deprecated inputs", in: &Task{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", Generation: 1, }, Spec: TaskSpec{ TaskSpec: v1beta1.TaskSpec{ Resources: &v1beta1.TaskResources{ Inputs: []v1beta1.TaskResource{{ResourceDeclaration: v1beta1.ResourceDeclaration{ Name: "input-1", Type: resource.PipelineResourceTypeGit, }}}, }, }, Inputs: &Inputs{ Resources: []TaskResource{{ResourceDeclaration: ResourceDeclaration{ Name: "input-1", Type: resource.PipelineResourceTypeGit, }}}, }, }, }, wantErr: true, }, { name: "deprecated and non deprecated params", in: &Task{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", Generation: 1, }, Spec: TaskSpec{ TaskSpec: v1beta1.TaskSpec{ Params: []v1beta1.ParamSpec{{ Name: "param-1", Type: v1beta1.ParamTypeString, Description: "My first param", }}, }, Inputs: &Inputs{ Params: []ParamSpec{{ Name: "param-1", Type: v1beta1.ParamTypeString, Description: "My first param", }}, }, }, }, wantErr: true, }, { name: "deprecated and non deprecated outputs", in: &Task{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", Namespace: "bar", Generation: 1, }, Spec: TaskSpec{ TaskSpec: v1beta1.TaskSpec{ Resources: &v1beta1.TaskResources{ Outputs: []v1beta1.TaskResource{{ResourceDeclaration: v1beta1.ResourceDeclaration{ Name: "output-1", Type: resource.PipelineResourceTypeGit, }}}, }, }, Outputs: &Outputs{ Resources: []TaskResource{{ResourceDeclaration: ResourceDeclaration{ Name: "output-1", Type: resource.PipelineResourceTypeGit, }}}, }, }, }, wantErr: true, }} for _, test := range tests { for _, version := range versions { t.Run(test.name, func(t *testing.T) { ver := version if err := test.in.ConvertTo(context.Background(), ver); err != nil { if !test.wantErr { t.Errorf("ConvertTo() = %v", err) } return } t.Logf("ConvertTo() = %#v", ver) got := &Task{} if err := got.ConvertFrom(context.Background(), ver); err != nil { t.Errorf("ConvertFrom() = %v", err) } t.Logf("ConvertFrom() = %#v", got) if d := cmp.Diff(test.in, got); d != "" { t.Errorf("roundtrip %s", diff.PrintWantGot(d)) } }) } } }
explode_data.jsonl/15946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1763 }
[ 2830, 3393, 6262, 48237, 1155, 353, 8840, 836, 8, 341, 197, 28290, 1669, 3056, 13725, 36179, 1238, 90, 5, 85, 16, 19127, 16, 28258, 6257, 630, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 17430, 414, 353, 6262, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNullAt(t *testing.T) { for i := uint16(0); i < BatchSize; i++ { if i%3 == 0 { require.True(t, nulls3.NullAt(i)) } else { require.False(t, nulls3.NullAt(i)) } } }
explode_data.jsonl/37154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 3280, 1655, 1155, 353, 8840, 836, 8, 341, 2023, 600, 1669, 2622, 16, 21, 7, 15, 1215, 600, 366, 33904, 1695, 26, 600, 1027, 341, 197, 743, 600, 4, 18, 621, 220, 15, 341, 298, 17957, 32443, 1155, 11, 845, 82, 18, 2397...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandleSyntaxErr(t *testing.T) { var testData = []string{ "asdasdasd", "((a=1", "((a='foo'", "((a='foo')", "a=foo", "a in (foo,bar)", "a in (foo)", "a ∩ (foo)", "a != foo", } data := map[string]interface{}{ "a": "foo", } ass := assert.New(t) for _, tc := range testData { ok, err := Match(tc, data) ass.False(ok) ass.Error(err) } }
explode_data.jsonl/65934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 6999, 33890, 7747, 1155, 353, 8840, 836, 8, 341, 2405, 67348, 284, 3056, 917, 515, 197, 197, 1, 300, 34889, 34889, 67, 756, 197, 197, 1, 1188, 64, 28, 16, 756, 197, 197, 1, 1188, 64, 1131, 7975, 38330, 197, 197, 1, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTuple(t *testing.T) { require := require.New(t) tup := NewTuple( NewLiteral(int64(1), sql.Int64), NewLiteral(float64(3.14), sql.Float64), NewLiteral("foo", sql.LongText), ) ctx := sql.NewEmptyContext() require.False(tup.IsNullable()) require.True(tup.Resolved()) require.Equal(sql.CreateTuple(sql.Int64, sql.Float64, sql.LongText), tup.Type()) result, err := tup.Eval(ctx, nil) require.NoError(err) require.Equal([]interface{}{int64(1), float64(3.14), "foo"}, result) tup = NewTuple( NewGetField(0, sql.LongText, "text", true), ) require.True(tup.IsNullable()) require.True(tup.Resolved()) require.Equal(sql.LongText, tup.Type()) result, err = tup.Eval(ctx, sql.NewRow("foo")) require.NoError(err) require.Equal("foo", result) tup = NewTuple( NewGetField(0, sql.LongText, "text", true), NewLiteral("bar", sql.LongText), ) require.False(tup.IsNullable()) require.True(tup.Resolved()) require.Equal(sql.CreateTuple(sql.LongText, sql.LongText), tup.Type()) result, err = tup.Eval(ctx, sql.NewRow("foo")) require.NoError(err) require.Equal([]interface{}{"foo", "bar"}, result) tup = NewTuple( NewUnresolvedColumn("bar"), NewLiteral("bar", sql.LongText), ) require.False(tup.Resolved()) require.False(tup.IsNullable()) }
explode_data.jsonl/58254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 28681, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 3244, 454, 1669, 1532, 28681, 1006, 197, 197, 3564, 17350, 1548, 21, 19, 7, 16, 701, 5704, 7371, 21, 19, 1326, 197, 197, 3564, 17350, 8268, 21, 19,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBasic(t *testing.T) { tc := setup(t, "basic", false) defer tc.cleanup() fmt.Printf("Test: Basic Join/Leave ...\n") tc.join(0) ck := tc.clerk() ck.Put("a", "x") ck.Append("a", "b") if ck.Get("a") != "xb" { t.Fatalf("Get got wrong value") } keys := make([]string, 10) vals := make([]string, len(keys)) for i := 0; i < len(keys); i++ { keys[i] = strconv.Itoa(rand.Int()) vals[i] = strconv.Itoa(rand.Int()) ck.Put(keys[i], vals[i]) } // are keys still there after joins? for g := 1; g < len(tc.groups); g++ { tc.join(g) time.Sleep(1 * time.Second) for i := 0; i < len(keys); i++ { v := ck.Get(keys[i]) if v != vals[i] { t.Fatalf("joining; wrong value; g=%v k=%v wanted=%v got=%v", g, keys[i], vals[i], v) } vals[i] = strconv.Itoa(rand.Int()) ck.Put(keys[i], vals[i]) } } // are keys still there after leaves? for g := 0; g < len(tc.groups)-1; g++ { tc.leave(g) time.Sleep(1 * time.Second) for i := 0; i < len(keys); i++ { v := ck.Get(keys[i]) if v != vals[i] { t.Fatalf("leaving; wrong value; g=%v k=%v wanted=%v got=%v", g, keys[i], vals[i], v) } vals[i] = strconv.Itoa(rand.Int()) ck.Put(keys[i], vals[i]) } } fmt.Printf(" ... Passed\n") }
explode_data.jsonl/18805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 624 }
[ 2830, 3393, 15944, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 6505, 1155, 11, 330, 22342, 497, 895, 340, 16867, 17130, 87689, 2822, 11009, 19367, 445, 2271, 25, 14625, 16471, 14, 21833, 98760, 77, 5130, 78255, 5446, 7, 15, 692, 197, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestWebSocketEmptyTLS(t *testing.T) { testSrv := getTestServer(t, "127.0.0.1:8000") defer testSrv.Close() wsYaml := ` kind: WebSocketServer name: websocket-demo port: 10081 https: true backend: wss://127.0.0.1:8000 certBase64: 1234 keyBase64: 2234 wssCertBase64: 3234 wssKeyBase64: 4234 ` super := supervisor.NewDefaultMock() superSpec, err := super.NewSpec(wsYaml) require.Nil(t, err) _, err = superSpec.ObjectSpec().(*Spec).tlsConfig() assert.NotNil(t, err) _, err = superSpec.ObjectSpec().(*Spec).wssTLSConfig() assert.NotNil(t, err) ws := &WebSocketServer{} ws.Init(superSpec) assert.Nil(t, ws.Validate()) time.Sleep(50 * time.Millisecond) ws.Close() }
explode_data.jsonl/65817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 61238, 3522, 45439, 1155, 353, 8840, 836, 8, 341, 18185, 50, 10553, 1669, 633, 2271, 5475, 1155, 11, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 23, 15, 15, 15, 1138, 16867, 1273, 50, 10553, 10421, 2822, 6692, 82, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigRefine(t *testing.T) { cfgFile, ctx, cluster, ns := "testdata/kubeconfig-test.yml", "test2", "cluster2", "ns2" uu := map[string]struct { flags *genericclioptions.ConfigFlags issue bool context, cluster, namespace string }{ "plain": { flags: &genericclioptions.ConfigFlags{KubeConfig: &cfgFile}, issue: false, context: "test1", cluster: "cluster1", namespace: "ns1", }, "overrideNS": { flags: &genericclioptions.ConfigFlags{ KubeConfig: &cfgFile, Context: &ctx, ClusterName: &cluster, Namespace: &ns, }, issue: false, context: ctx, cluster: cluster, namespace: ns, }, "badContext": { flags: &genericclioptions.ConfigFlags{ KubeConfig: &cfgFile, Context: &ns, ClusterName: &cluster, Namespace: &ns, }, issue: true, }, } for k := range uu { u := uu[k] t.Run(k, func(t *testing.T) { mc := NewMockConnection() m.When(mc.ValidNamespaces()).ThenReturn(namespaces(), nil) mk := newMockSettings(u.flags) cfg := config.NewConfig(mk) err := cfg.Refine(u.flags, nil, client.NewConfig(u.flags)) if u.issue { assert.NotNil(t, err) } else { assert.Nil(t, err) assert.Equal(t, u.context, cfg.K9s.CurrentContext) assert.Equal(t, u.cluster, cfg.K9s.CurrentCluster) assert.Equal(t, u.namespace, cfg.ActiveNamespace()) } }) } }
explode_data.jsonl/19249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 2648, 3945, 482, 1155, 353, 8840, 836, 8, 341, 50286, 1703, 11, 5635, 11, 10652, 11, 12268, 1669, 330, 92425, 14109, 3760, 1676, 16839, 33936, 497, 330, 1944, 17, 497, 330, 18855, 17, 497, 330, 4412, 17, 698, 10676, 84, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTransactionFetcherDrop(t *testing.T) { testTransactionFetcherParallel(t, txFetcherTest{ init: func() *TxFetcher { return NewTxFetcher( func(common.Hash) bool { return false }, func(txs []*types.Transaction) []error { return make([]error, len(txs)) }, func(string, []common.Hash) error { return nil }, ) }, steps: []interface{}{ // Set up a few hashes into various stages doTxNotify{peer: "A", hashes: []common.Hash{{0x01}}}, doWait{time: txArriveTimeout, step: true}, doTxNotify{peer: "A", hashes: []common.Hash{{0x02}}}, doWait{time: txArriveTimeout, step: true}, doTxNotify{peer: "A", hashes: []common.Hash{{0x03}}}, isWaiting(map[string][]common.Hash{ "A": {{0x03}}, }), isScheduled{ tracking: map[string][]common.Hash{ "A": {{0x01}, {0x02}}, }, fetching: map[string][]common.Hash{ "A": {{0x01}}, }, }, // Drop the peer and ensure everything's cleaned out doDrop("A"), isWaiting(nil), isScheduled{nil, nil, nil}, // Push the node into a dangling (timeout) state doTxNotify{peer: "A", hashes: []common.Hash{testTxsHashes[0]}}, doWait{time: txArriveTimeout, step: true}, isWaiting(nil), isScheduled{ tracking: map[string][]common.Hash{ "A": {testTxsHashes[0]}, }, fetching: map[string][]common.Hash{ "A": {testTxsHashes[0]}, }, }, doWait{time: txFetchTimeout, step: true}, isWaiting(nil), isScheduled{ tracking: nil, fetching: nil, dangling: map[string][]common.Hash{ "A": {}, }, }, // Drop the peer and ensure everything's cleaned out doDrop("A"), isWaiting(nil), isScheduled{nil, nil, nil}, }, }) }
explode_data.jsonl/52226
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 8070, 97492, 19871, 1155, 353, 8840, 836, 8, 341, 18185, 8070, 97492, 16547, 1155, 11, 259, 9770, 2995, 261, 2271, 515, 197, 28248, 25, 2915, 368, 353, 51, 9770, 2995, 261, 341, 298, 853, 1532, 51, 9770, 2995, 261, 1006, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewMarkdownMessage(t *testing.T) { tests := []struct { name string want *MarkdownMessage }{ { name: "Should return a MarkdownMessage instance", want: &MarkdownMessage{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := NewMarkdownMessage(); !reflect.DeepEqual(got, tt.want) { t.Errorf("NewMarkdownMessage() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/74059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 3564, 68005, 2052, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 50780, 353, 68005, 2052, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 14996, 470, 264, 73192, 2052, 2867, 75...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBytes_Type(t *testing.T) { tests := []struct { name string e Bytes want Type }{ {name: "", e: Bytes{[]byte("")}, want: ValTypes.Bytes}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.e.Type(); !got.Equal(tt.want) { t.Errorf("Bytes.Type() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/34754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 7078, 13729, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 7727, 262, 30024, 198, 197, 50780, 3990, 198, 197, 59403, 197, 197, 47006, 25, 7342, 384, 25, 30024, 90, 1294, 3782, 39047...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDaoUV(t *testing.T) { convey.Convey("addUV+countUV", t, func(ctx convey.C) { var ( action = "test" hour = 0 slot = 0 kind = "test" mid = int64(0) err error counts []int64 ) ctx.Convey("When everything goes positive", func(ctx convey.C) { err = d.AddUV(context.Background(), action, hour, slot, mid, kind) ctx.So(err, convey.ShouldBeNil) keys := []string{keyUV(action, hour, slot, kind)} counts, err = d.CountUV(context.Background(), keys) ctx.So(err, convey.ShouldBeNil) ctx.So(len(counts), convey.ShouldEqual, 1) }) }) }
explode_data.jsonl/21784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 12197, 22246, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 718, 22246, 10, 1830, 22246, 497, 259, 11, 2915, 7502, 20001, 727, 8, 341, 197, 2405, 2399, 298, 38933, 284, 330, 1944, 698, 298, 9598, 413, 256, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDownload(t *testing.T) { co := newCapturedOutput() co.override() defer co.reset() testCases := []struct { requester bool expectedDir string flags map[string]string }{ { requester: true, expectedDir: "", flags: map[string]string{"exercise": "bogus-exercise"}, }, { requester: true, expectedDir: "", flags: map[string]string{"uuid": "bogus-id"}, }, { requester: false, expectedDir: filepath.Join("users", "alice"), flags: map[string]string{"uuid": "bogus-id"}, }, { requester: true, expectedDir: filepath.Join("teams", "bogus-team"), flags: map[string]string{"exercise": "bogus-exercise", "track": "bogus-track", "team": "bogus-team"}, }, } for _, tc := range testCases { tmpDir, err := ioutil.TempDir("", "download-cmd") defer os.RemoveAll(tmpDir) assert.NoError(t, err) ts := fakeDownloadServer(strconv.FormatBool(tc.requester), tc.flags["team"]) defer ts.Close() v := viper.New() v.Set("workspace", tmpDir) v.Set("apibaseurl", ts.URL) v.Set("token", "abc123") cfg := config.Config{ UserViperConfig: v, } flags := pflag.NewFlagSet("fake", pflag.PanicOnError) setupDownloadFlags(flags) for name, value := range tc.flags { flags.Set(name, value) } err = runDownload(cfg, flags, []string{}) assert.NoError(t, err) targetDir := filepath.Join(tmpDir, tc.expectedDir) assertDownloadedCorrectFiles(t, targetDir) dir := filepath.Join(targetDir, "bogus-track", "bogus-exercise") b, err := ioutil.ReadFile(workspace.NewExerciseFromDir(dir).MetadataFilepath()) var metadata workspace.ExerciseMetadata err = json.Unmarshal(b, &metadata) assert.NoError(t, err) assert.Equal(t, "bogus-track", metadata.Track) assert.Equal(t, "bogus-exercise", metadata.ExerciseSlug) assert.Equal(t, tc.requester, metadata.IsRequester) } }
explode_data.jsonl/15069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 11377, 1155, 353, 8840, 836, 8, 341, 197, 1015, 1669, 501, 40536, 3073, 5097, 741, 197, 1015, 96802, 741, 16867, 1062, 13857, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 23555, 261, 256, 1807, 198, 197, 42400, 6184, 914,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateUserPassword(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() password := "newpassword1" _, err := th.Client.UpdateUserPassword(th.BasicUser.Id, th.BasicUser.Password, password) require.NoError(t, err) resp, err := th.Client.UpdateUserPassword(th.BasicUser.Id, password, "") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, password, "junk") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.UpdateUserPassword("junk", password, password) require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, "", password) require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, "junk", password) require.Error(t, err) CheckBadRequestStatus(t, resp) _, err = th.Client.UpdateUserPassword(th.BasicUser.Id, password, th.BasicUser.Password) require.NoError(t, err) th.Client.Logout() resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, password, password) require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.LoginBasic2() resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, password, password) require.Error(t, err) CheckForbiddenStatus(t, resp) th.LoginBasic() // Test lockout th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.MaximumLoginAttempts = 2 }) // Fail twice resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, "badpwd", "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, "badpwd", "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) // Should fail because account is locked out resp, err = th.Client.UpdateUserPassword(th.BasicUser.Id, th.BasicUser.Password, "newpwd") CheckErrorID(t, err, "api.user.check_user_login_attempts.too_many.app_error") CheckUnauthorizedStatus(t, resp) // System admin can update another user's password adminSetPassword := "pwdsetbyadmin" _, err = th.SystemAdminClient.UpdateUserPassword(th.BasicUser.Id, "", adminSetPassword) require.NoError(t, err) _, _, err = th.Client.Login(th.BasicUser.Email, adminSetPassword) require.NoError(t, err) }
explode_data.jsonl/47527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 808 }
[ 2830, 3393, 4289, 1474, 4876, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 58199, 1669, 330, 931, 3833, 16, 698, 197, 6878, 1848, 1669, 270, 11716, 16689, 1474, 4876, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListBuckets(t *testing.T) { ts := newTestServer(t, withoutInitialBuckets()) defer ts.Close() svc := ts.s3Client() assertBuckets := func(expected ...string) { t.Helper() rs, err := svc.ListBuckets(&s3.ListBucketsInput{}) ts.OK(err) var found []string for _, bucket := range rs.Buckets { found = append(found, *bucket.Name) } sort.Strings(expected) sort.Strings(found) if !reflect.DeepEqual(found, expected) { t.Fatalf("buckets:\nexp: %v\ngot: %v", expected, found) } } assertBucketTime := func(bucket string, created time.Time) { t.Helper() rs, err := svc.ListBuckets(&s3.ListBucketsInput{}) ts.OK(err) for _, v := range rs.Buckets { if *v.Name == bucket { if *v.CreationDate != created { t.Fatal("time mismatch for bucket", bucket, "expected:", created, "found:", *v.CreationDate) } return } } t.Fatal("bucket", bucket, "not found") } assertBuckets() ts.backendCreateBucket("test") assertBuckets("test") assertBucketTime("test", defaultDate) ts.backendCreateBucket("test2") assertBuckets("test", "test2") assertBucketTime("test2", defaultDate) ts.Advance(1 * time.Minute) ts.backendCreateBucket("test3") assertBuckets("test", "test2", "test3") assertBucketTime("test", defaultDate) assertBucketTime("test2", defaultDate) assertBucketTime("test3", defaultDate.Add(1*time.Minute)) }
explode_data.jsonl/22249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 567 }
[ 2830, 3393, 852, 33, 38551, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 501, 2271, 5475, 1155, 11, 2041, 6341, 33, 38551, 2398, 16867, 10591, 10421, 741, 1903, 7362, 1669, 10591, 514, 18, 2959, 2822, 6948, 33, 38551, 1669, 2915, 15253, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReconciler_PromoteCanaries_Unblock(t *testing.T) { job := mock.Job() job.TaskGroups[0].Update = canaryUpdate // Create an existing deployment that has placed some canaries and mark them // promoted d := structs.NewDeployment(job) s := &structs.DeploymentState{ Promoted: true, DesiredTotal: 10, DesiredCanaries: 2, PlacedAllocs: 2, } d.TaskGroups[job.TaskGroups[0].Name] = s // Create 10 allocations from the old job var allocs []*structs.Allocation for i := 0; i < 10; i++ { alloc := mock.Alloc() alloc.Job = job alloc.JobID = job.ID alloc.NodeID = uuid.Generate() alloc.Name = structs.AllocName(job.ID, job.TaskGroups[0].Name, uint(i)) alloc.TaskGroup = job.TaskGroups[0].Name allocs = append(allocs, alloc) } // Create the canaries handled := make(map[string]allocUpdateType) for i := 0; i < 2; i++ { // Create one canary canary := mock.Alloc() canary.Job = job canary.JobID = job.ID canary.NodeID = uuid.Generate() canary.Name = structs.AllocName(job.ID, job.TaskGroups[0].Name, uint(i)) canary.TaskGroup = job.TaskGroups[0].Name s.PlacedCanaries = append(s.PlacedCanaries, canary.ID) canary.DeploymentID = d.ID canary.DeploymentStatus = &structs.AllocDeploymentStatus{ Healthy: helper.BoolToPtr(true), } allocs = append(allocs, canary) handled[canary.ID] = allocUpdateFnIgnore } mockUpdateFn := allocUpdateFnMock(handled, allocUpdateFnDestructive) reconciler := NewAllocReconciler(testLogger(), mockUpdateFn, false, job.ID, job, d, allocs, nil) r := reconciler.Compute() // Assert the correct results assertResults(t, r, &resultExpectation{ createDeployment: nil, deploymentUpdates: nil, destructive: 2, stop: 2, desiredTGUpdates: map[string]*structs.DesiredUpdates{ job.TaskGroups[0].Name: { Stop: 2, DestructiveUpdate: 2, Ignore: 8, }, }, }) assertNoCanariesStopped(t, d, r.stop) assertNamesHaveIndexes(t, intRange(2, 3), destructiveResultsToNames(r.destructiveUpdate)) assertNamesHaveIndexes(t, intRange(0, 1), stopResultsToNames(r.stop)) }
explode_data.jsonl/67266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 889 }
[ 2830, 3393, 693, 40446, 5769, 1088, 441, 1272, 6713, 5431, 40687, 4574, 1155, 353, 8840, 836, 8, 341, 68577, 1669, 7860, 45293, 741, 68577, 28258, 22173, 58, 15, 936, 4289, 284, 646, 658, 4289, 271, 197, 322, 4230, 458, 6350, 23172, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCopyWriterWrite(t *testing.T) { resp := httptest.NewRecorder() cr := util.NewCopyWriter(resp) b := []byte("data") _, err := cr.Write(b) require.NoError(t, err) cb, err := cr.Body() require.NoError(t, err) assert.Equal(t, b, cb) }
explode_data.jsonl/63015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 12106, 6492, 7985, 1155, 353, 8840, 836, 8, 341, 34653, 1669, 54320, 70334, 7121, 47023, 2822, 91492, 1669, 4094, 7121, 12106, 6492, 20267, 692, 2233, 1669, 3056, 3782, 445, 691, 5130, 197, 6878, 1848, 1669, 1560, 4073, 1883, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnCoordSenderCondenseLockSpans(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) a := roachpb.Span{Key: roachpb.Key("a"), EndKey: roachpb.Key(nil)} b := roachpb.Span{Key: roachpb.Key("b"), EndKey: roachpb.Key(nil)} c := roachpb.Span{Key: roachpb.Key("c"), EndKey: roachpb.Key(nil)} d := roachpb.Span{Key: roachpb.Key("dddddd"), EndKey: roachpb.Key(nil)} e := roachpb.Span{Key: roachpb.Key("e"), EndKey: roachpb.Key(nil)} aToBClosed := roachpb.Span{Key: roachpb.Key("a"), EndKey: roachpb.Key("b").Next()} cToEClosed := roachpb.Span{Key: roachpb.Key("c"), EndKey: roachpb.Key("e").Next()} fTof0 := roachpb.Span{Key: roachpb.Key("f"), EndKey: roachpb.Key("f0")} g := roachpb.Span{Key: roachpb.Key("g"), EndKey: roachpb.Key(nil)} g0Tog1 := roachpb.Span{Key: roachpb.Key("g0"), EndKey: roachpb.Key("g1")} fTog1Closed := roachpb.Span{Key: roachpb.Key("f"), EndKey: roachpb.Key("g1")} testCases := []struct { span roachpb.Span expLocks []roachpb.Span expLocksSize int64 }{ {span: a, expLocks: []roachpb.Span{a}, expLocksSize: 1}, {span: b, expLocks: []roachpb.Span{a, b}, expLocksSize: 2}, {span: c, expLocks: []roachpb.Span{a, b, c}, expLocksSize: 3}, {span: d, expLocks: []roachpb.Span{a, b, c, d}, expLocksSize: 9}, // Note that c-e condenses and then lists first. {span: e, expLocks: []roachpb.Span{cToEClosed, a, b}, expLocksSize: 5}, {span: fTof0, expLocks: []roachpb.Span{cToEClosed, a, b, fTof0}, expLocksSize: 8}, {span: g, expLocks: []roachpb.Span{cToEClosed, a, b, fTof0, g}, expLocksSize: 9}, {span: g0Tog1, expLocks: []roachpb.Span{fTog1Closed, cToEClosed, aToBClosed}, expLocksSize: 9}, // Add a key in the middle of a span, which will get merged on commit. {span: c, expLocks: []roachpb.Span{aToBClosed, cToEClosed, fTog1Closed}, expLocksSize: 9}, } splits := []roachpb.Span{ {Key: roachpb.Key("a"), EndKey: roachpb.Key("c")}, {Key: roachpb.Key("c"), EndKey: roachpb.Key("f")}, {Key: roachpb.Key("f"), EndKey: roachpb.Key("j")}, } descs := []roachpb.RangeDescriptor{testMetaRangeDescriptor} for i, s := range splits { descs = append(descs, roachpb.RangeDescriptor{ RangeID: roachpb.RangeID(2 + i), StartKey: roachpb.RKey(s.Key), EndKey: roachpb.RKey(s.EndKey), InternalReplicas: []roachpb.ReplicaDescriptor{{NodeID: 1, StoreID: 1}}, }) } descDB := mockRangeDescriptorDBForDescs(descs...) s := createTestDB(t) st := s.Store.ClusterSettings() trackedWritesMaxSize.Override(&st.SV, 10) /* 10 bytes and it will condense */ defer s.Stop() // Check end transaction locks, which should be condensed and split // at range boundaries. expLocks := []roachpb.Span{aToBClosed, cToEClosed, fTog1Closed} sendFn := func(_ context.Context, ba roachpb.BatchRequest) (*roachpb.BatchResponse, error) { resp := ba.CreateReply() resp.Txn = ba.Txn if req, ok := ba.GetArg(roachpb.EndTxn); ok { if !req.(*roachpb.EndTxnRequest).Commit { t.Errorf("expected commit to be true") } et := req.(*roachpb.EndTxnRequest) if a, e := et.LockSpans, expLocks; !reflect.DeepEqual(a, e) { t.Errorf("expected end transaction to have locks %+v; got %+v", e, a) } resp.Txn.Status = roachpb.COMMITTED } return resp, nil } ambient := log.AmbientContext{Tracer: tracing.NewTracer()} ds := NewDistSender(DistSenderConfig{ AmbientCtx: ambient, Clock: s.Clock, NodeDescs: s.Gossip, RPCContext: s.Cfg.RPCContext, TestingKnobs: ClientTestingKnobs{ TransportFactory: adaptSimpleTransport(sendFn), }, RangeDescriptorDB: descDB, Settings: cluster.MakeTestingClusterSettings(), }) tsf := NewTxnCoordSenderFactory( TxnCoordSenderFactoryConfig{ AmbientCtx: ambient, Settings: st, Clock: s.Clock, Stopper: s.Stopper(), }, ds, ) db := kv.NewDB(ambient, tsf, s.Clock, s.Stopper()) ctx := context.Background() txn := kv.NewTxn(ctx, db, 0 /* gatewayNodeID */) // Disable txn pipelining so that all write spans are immediately // added to the transaction's lock footprint. if err := txn.DisablePipelining(); err != nil { t.Fatal(err) } for i, tc := range testCases { if tc.span.EndKey != nil { if err := txn.DelRange(ctx, tc.span.Key, tc.span.EndKey); err != nil { t.Fatal(err) } } else { if err := txn.Put(ctx, tc.span.Key, []byte("value")); err != nil { t.Fatal(err) } } tcs := txn.Sender().(*TxnCoordSender) locks := tcs.interceptorAlloc.txnPipeliner.lockFootprint.asSlice() if a, e := locks, tc.expLocks; !reflect.DeepEqual(a, e) { t.Errorf("%d: expected keys %+v; got %+v", i, e, a) } locksSize := int64(0) for _, i := range locks { locksSize += int64(len(i.Key) + len(i.EndKey)) } if a, e := locksSize, tc.expLocksSize; a != e { t.Errorf("%d: keys size expected %d; got %d", i, e, a) } } if err := txn.Commit(ctx); err != nil { t.Fatal(err) } }
explode_data.jsonl/76883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2234 }
[ 2830, 3393, 31584, 77, 19437, 20381, 49696, 1117, 11989, 6406, 596, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 11323, 1669, 926, 610, 16650, 85309, 90, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeployInjectImageEnvVar(t *testing.T) { f := newIBDFixture(t, k8s.EnvGKE) defer f.TearDown() manifest := NewSanchoManifestWithImageInEnvVar(f) _, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(manifest), store.BuildStateSet{}) if err != nil { t.Fatal(err) } entities, err := k8s.ParseYAMLFromString(f.k8s.Yaml) if err != nil { t.Fatal(err) } if !assert.Equal(t, 1, len(entities)) { return } d := entities[0].Obj.(*v1.Deployment) if !assert.Equal(t, 1, len(d.Spec.Template.Spec.Containers)) { return } c := d.Spec.Template.Spec.Containers[0] // container image always gets injected assert.Equal(t, "gcr.io/some-project-162817/sancho:tilt-11cd0b38bc3ceb95", c.Image) expectedEnv := []corev1.EnvVar{ // sancho2 gets injected here because it sets match_in_env_vars in docker_build {Name: "foo", Value: "gcr.io/some-project-162817/sancho2:tilt-11cd0b38bc3ceb95"}, // sancho does not because it doesn't {Name: "bar", Value: "gcr.io/some-project-162817/sancho"}, } assert.Equal(t, expectedEnv, c.Env) }
explode_data.jsonl/38269
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 69464, 13738, 1906, 14359, 3962, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 3256, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 38, 3390, 340, 16867, 282, 836, 682, 4454, 2822, 197, 42315, 1669, 1532, 23729, 958, 38495, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUnmarshalNestedAnonymousStructs_Controversial(t *testing.T) { type Nested struct { Value string `toml:"nested"` } type Deep struct { Nested } type Document struct { Deep Value string `toml:"own"` } var doc Document err := Unmarshal([]byte(`nested = "nested value"`+"\n"+`own = "own value"`), &doc) if err == nil { t.Fatal("should error") } }
explode_data.jsonl/46355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 1806, 27121, 71986, 32684, 9422, 82, 62, 818, 12563, 530, 1155, 353, 8840, 836, 8, 341, 13158, 71742, 2036, 341, 197, 47399, 914, 1565, 37401, 75, 2974, 59271, 8805, 197, 532, 13158, 18183, 2036, 341, 197, 18317, 9980, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPlayer_IsSpottedBy_HasSpotted_False(t *testing.T) { pl := playerWithProperty("m_bSpottedByMask.000", st.PropertyValue{IntVal: 0}) pl.EntityID = 1 other := newPlayer(0) other.EntityID = 2 assert.False(t, pl.IsSpottedBy(other)) assert.False(t, other.HasSpotted(pl)) }
explode_data.jsonl/12174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 4476, 31879, 6406, 15521, 1359, 2039, 300, 6406, 15521, 1400, 710, 1155, 353, 8840, 836, 8, 341, 72213, 1669, 2781, 2354, 3052, 445, 76, 880, 6406, 15521, 1359, 12686, 13, 15, 15, 15, 497, 357, 15727, 1130, 90, 1072, 2208,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestService_Handle_Inviter(t *testing.T) { mockStore := &mockstorage.MockStore{Store: make(map[string]mockstorage.DBEntry)} storeProv := mockstorage.NewCustomMockStoreProvider(mockStore) k := newKMS(t, storeProv) prov := &protocol.MockProvider{ StoreProvider: storeProv, ServiceMap: map[string]interface{}{ mediator.Coordination: &mockroute.MockMediatorSvc{}, }, CustomKMS: k, KeyTypeValue: kms.ED25519Type, KeyAgreementTypeValue: kms.X25519ECDHKWType, } ctx := &context{ outboundDispatcher: prov.OutboundDispatcher(), crypto: &tinkcrypto.Crypto{}, kms: k, keyType: kms.ED25519Type, keyAgreementType: kms.X25519ECDHKWType, } verPubKey, encPubKey := newSigningAndEncryptionDIDKeys(t, ctx) mtp := transport.MediaTypeRFC0019EncryptedEnvelope ctx.vdRegistry = &mockvdr.MockVDRegistry{CreateValue: createDIDDocWithKey(verPubKey, encPubKey, mtp)} connRec, err := connection.NewRecorder(prov) require.NoError(t, err) require.NotNil(t, connRec) ctx.connectionRecorder = connRec doc, err := ctx.vdRegistry.Create(testMethod, nil) require.NoError(t, err) s, err := New(prov) require.NoError(t, err) actionCh := make(chan service.DIDCommAction, 10) err = s.RegisterActionEvent(actionCh) require.NoError(t, err) statusCh := make(chan service.StateMsg, 10) err = s.RegisterMsgEvent(statusCh) require.NoError(t, err) completedFlag := make(chan struct{}) respondedFlag := make(chan struct{}) go msgEventListener(t, statusCh, respondedFlag, completedFlag) go func() { service.AutoExecuteActionEvent(actionCh) }() invitation := &Invitation{ Type: InvitationMsgType, ID: randomString(), Label: "Bob", RecipientKeys: []string{verPubKey}, ServiceEndpoint: "http://alice.agent.example.com:8081", } err = ctx.connectionRecorder.SaveInvitation(invitation.ID, invitation) require.NoError(t, err) thid := randomString() // Invitation was previously sent by Alice to Bob. // Bob now sends a did-exchange Invitation payloadBytes, err := json.Marshal( &Request{ Type: RequestMsgType, ID: thid, Label: "Bob", Thread: &decorator.Thread{ PID: invitation.ID, }, DID: doc.DIDDocument.ID, DocAttach: unsignedDocAttach(t, doc.DIDDocument), }) require.NoError(t, err) msg, err := service.ParseDIDCommMsgMap(payloadBytes) require.NoError(t, err) _, err = s.HandleInbound(msg, service.NewDIDCommContext(doc.DIDDocument.ID, "", nil)) require.NoError(t, err) select { case <-respondedFlag: case <-time.After(2 * time.Second): require.Fail(t, "didn't receive post event responded") } // Alice automatically sends exchange Response to Bob // Bob replies with an ACK payloadBytes, err = json.Marshal( &model.Ack{ Type: AckMsgType, ID: randomString(), Status: "OK", Thread: &decorator.Thread{ID: thid}, }) require.NoError(t, err) didMsg, err := service.ParseDIDCommMsgMap(payloadBytes) require.NoError(t, err) _, err = s.HandleInbound(didMsg, service.NewDIDCommContext(doc.DIDDocument.ID, "", nil)) require.NoError(t, err) select { case <-completedFlag: case <-time.After(2 * time.Second): require.Fail(t, "didn't receive post event complete") } validateState(t, s, thid, findNamespace(AckMsgType), (&completed{}).Name()) }
explode_data.jsonl/30516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1357 }
[ 2830, 3393, 1860, 42714, 62, 15174, 2015, 1155, 353, 8840, 836, 8, 341, 77333, 6093, 1669, 609, 16712, 16172, 24664, 6093, 90, 6093, 25, 1281, 9147, 14032, 60, 16712, 16172, 22537, 5874, 10569, 57279, 80261, 1669, 7860, 16172, 7121, 10268...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDiagnosticsCountMigrator(t *testing.T) { db := dbtest.NewDB(t) store := lsifstore.NewStore(db, conf.DefaultClient(), &observation.TestContext) migrator := NewDiagnosticsCountMigrator(store, 250) serializer := lsifstore.NewSerializer() assertProgress := func(expectedProgress float64) { if progress, err := migrator.Progress(context.Background()); err != nil { t.Fatalf("unexpected error querying progress: %s", err) } else if progress != expectedProgress { t.Errorf("unexpected progress. want=%.2f have=%.2f", expectedProgress, progress) } } assertCounts := func(expectedCounts []int) { query := sqlf.Sprintf(`SELECT num_diagnostics FROM lsif_data_documents ORDER BY path`) if counts, err := basestore.ScanInts(store.Query(context.Background(), query)); err != nil { t.Fatalf("unexpected error querying num diagnostics: %s", err) } else if diff := cmp.Diff(expectedCounts, counts); diff != "" { t.Errorf("unexpected counts (-want +got):\n%s", diff) } } n := 500 expectedCounts := make([]int, 0, n) diagnostics := make([]precise.DiagnosticData, 0, n) for i := 0; i < n; i++ { expectedCounts = append(expectedCounts, i+1) diagnostics = append(diagnostics, precise.DiagnosticData{Code: fmt.Sprintf("c%d", i)}) data, err := serializer.MarshalLegacyDocumentData(precise.DocumentData{ Diagnostics: diagnostics, }) if err != nil { t.Fatalf("unexpected error serializing document data: %s", err) } if err := store.Exec(context.Background(), sqlf.Sprintf( "INSERT INTO lsif_data_documents (dump_id, path, data, schema_version, num_diagnostics) VALUES (%s, %s, %s, 1, 0)", 42+i/(n/2), // 50% id=42, 50% id=43 fmt.Sprintf("p%04d", i), data, )); err != nil { t.Fatalf("unexpected error inserting row: %s", err) } } assertProgress(0) if err := migrator.Up(context.Background()); err != nil { t.Fatalf("unexpected error performing up migration: %s", err) } assertProgress(0.5) if err := migrator.Up(context.Background()); err != nil { t.Fatalf("unexpected error performing up migration: %s", err) } assertProgress(1) assertCounts(expectedCounts) if err := migrator.Down(context.Background()); err != nil { t.Fatalf("unexpected error performing down migration: %s", err) } assertProgress(0.5) if err := migrator.Down(context.Background()); err != nil { t.Fatalf("unexpected error performing down migration: %s", err) } assertProgress(0) }
explode_data.jsonl/1836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 881 }
[ 2830, 3393, 35, 18938, 2507, 44, 5233, 850, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 1944, 7121, 3506, 1155, 340, 57279, 1669, 19597, 333, 4314, 7121, 6093, 9791, 11, 2335, 13275, 2959, 1507, 609, 77960, 8787, 1972, 340, 2109, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRocksPebbleCheck(t *testing.T) { defer leaktest.AfterTest(t) ctx := context.Background() if *check != "" { if _, err := os.Stat(*check); os.IsNotExist(err) { t.Fatal(err) } run := testRun{ ctx: ctx, t: t, checkFile: *check, restarts: true, engineSequences: [][]engineImpl{ {engineImplRocksDB}, {engineImplPebble}, {engineImplRocksDB, engineImplPebble}, }, } runMetaTest(run) } }
explode_data.jsonl/82664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 49, 25183, 47, 3065, 891, 3973, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 340, 20985, 1669, 2266, 19047, 2822, 743, 353, 2028, 961, 1591, 341, 197, 743, 8358, 1848, 1669, 2643, 53419, 4071, 2028, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEntry_SelectHomeEnd(t *testing.T) { e, window := setupSelection(t, false) defer teardownImageTest(window) c := window.Canvas() // Hold shift to continue selection typeKeys(e, keyShiftLeftDown) // T e[s t i]n g -> end -> // T e[s t i n g] typeKeys(e, fyne.KeyEnd) test.AssertRendersToMarkup(t, "entry/selection_add_to_end.xml", c) assert.Equal(t, "sting", e.SelectedText()) // T e[s t i n g] -> home -> [T e]s t i n g typeKeys(e, fyne.KeyHome) test.AssertRendersToMarkup(t, "entry/selection_add_to_home.xml", c) assert.Equal(t, "Te", e.SelectedText()) }
explode_data.jsonl/12356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 5874, 58073, 7623, 3727, 1155, 353, 8840, 836, 8, 341, 7727, 11, 3241, 1669, 6505, 11177, 1155, 11, 895, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 197, 322, 22749, 6407, 311, 3060, 6589, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEscapeStringField(t *testing.T) { cases := []struct { in string expOut string }{ {in: "abcdefg", expOut: "abcdefg"}, {in: `one double quote " .`, expOut: `one double quote \" .`}, {in: `quote " then backslash \ .`, expOut: `quote \" then backslash \\ .`}, {in: `backslash \ then quote " .`, expOut: `backslash \\ then quote \" .`}, } for _, c := range cases { // Unescapes as expected. got := models.EscapeStringField(c.in) if got != c.expOut { t.Errorf("unexpected result from EscapeStringField(%s)\ngot [%s]\nexp [%s]\n", c.in, got, c.expOut) continue } pointLine := fmt.Sprintf(`t s="%s"`, got) test(t, pointLine, NewTestPoint( "t", models.NewTags(nil), models.Fields{"s": c.in}, time.Unix(0, 0), )) } }
explode_data.jsonl/16977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 48124, 703, 1877, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 17430, 257, 914, 198, 197, 48558, 2662, 914, 198, 197, 59403, 197, 197, 90, 258, 25, 330, 41202, 70, 497, 1343, 2662, 25, 330, 41202...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMenusPageMultipleOutputFormats(t *testing.T) { config := ` baseURL = "https://example.com" # DAMP is similar to AMP, but not permalinkable. [outputFormats] [outputFormats.damp] mediaType = "text/html" path = "damp" ` b := newTestSitesBuilder(t).WithConfigFile("toml", config) b.WithContent("_index.md", ` --- Title: Home Sweet Home outputs: [ "html", "amp" ] menu: "main" --- `) b.WithContent("blog/html-amp.md", ` --- Title: AMP and HTML outputs: [ "html", "amp" ] menu: "main" --- `) b.WithContent("blog/html.md", ` --- Title: HTML only outputs: [ "html" ] menu: "main" --- `) b.WithContent("blog/amp.md", ` --- Title: AMP only outputs: [ "amp" ] menu: "main" --- `) b.WithTemplatesAdded("index.html", `{{ range .Site.Menus.main }}{{ .Title }}|{{ .URL }}|{{ end }}`) b.Build(BuildCfg{}) b.AssertFileContent("public/index.html", "AMP and HTML|/blog/html-amp/|AMP only|/amp/blog/amp/|Home Sweet Home|/|HTML only|/blog/html/|") b.AssertFileContent("public/amp/index.html", "AMP and HTML|/amp/blog/html-amp/|AMP only|/amp/blog/amp/|Home Sweet Home|/amp/|HTML only|/blog/html/|") }
explode_data.jsonl/51843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 77944, 2665, 32089, 5097, 44599, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 22074, 3152, 3144, 284, 330, 2428, 1110, 8687, 905, 1837, 2, 12360, 47, 374, 4428, 311, 62908, 11, 714, 537, 79228, 480, 624, 58, 3006, 44599, 921...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCantMergeConflict(t *testing.T) { onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) { session := loginUser(t, "user1") testRepoFork(t, session, "user2", "repo1", "user1", "repo1") testEditFileToNewBranch(t, session, "user1", "repo1", "master", "conflict", "README.md", "Hello, World (Edited Once)\n") testEditFileToNewBranch(t, session, "user1", "repo1", "master", "base", "README.md", "Hello, World (Edited Twice)\n") // Use API to create a conflicting pr token := getTokenForLoggedInUser(t, session) req := NewRequestWithJSON(t, http.MethodPost, fmt.Sprintf("/api/v1/repos/%s/%s/pulls?token=%s", "user1", "repo1", token), &api.CreatePullRequestOption{ Head: "conflict", Base: "base", Title: "create a conflicting pr", }) session.MakeRequest(t, req, 201) // Now this PR will be marked conflict - or at least a race will do - so drop down to pure code at this point... user1 := models.AssertExistsAndLoadBean(t, &models.User{ Name: "user1", }).(*models.User) repo1 := models.AssertExistsAndLoadBean(t, &models.Repository{ OwnerID: user1.ID, Name: "repo1", }).(*models.Repository) pr := models.AssertExistsAndLoadBean(t, &models.PullRequest{ HeadRepoID: repo1.ID, BaseRepoID: repo1.ID, HeadBranch: "conflict", BaseBranch: "base", }).(*models.PullRequest) gitRepo, err := git.OpenRepository(models.RepoPath(user1.Name, repo1.Name)) assert.NoError(t, err) err = pull.Merge(pr, user1, gitRepo, models.MergeStyleMerge, "CONFLICT") assert.Error(t, err, "Merge should return an error due to conflict") assert.True(t, models.IsErrMergeConflicts(err), "Merge error is not a conflict error") err = pull.Merge(pr, user1, gitRepo, models.MergeStyleRebase, "CONFLICT") assert.Error(t, err, "Merge should return an error due to conflict") assert.True(t, models.IsErrRebaseConflicts(err), "Merge error is not a conflict error") gitRepo.Close() }) }
explode_data.jsonl/70639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 775 }
[ 2830, 3393, 34, 517, 52096, 57974, 1155, 353, 8840, 836, 8, 341, 24630, 38, 632, 64, 6727, 1155, 11, 2915, 1155, 353, 8840, 836, 11, 342, 632, 64, 3144, 353, 1085, 20893, 8, 341, 197, 25054, 1669, 87169, 1155, 11, 330, 872, 16, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoClassDefaultDependency(t *testing.T) { moduleSystem := NewModuleSystem( map[string][]string{ "client": {"clients/*"}, "endpoint": {"endpoints/*"}, }, map[string][]string{ "endpoint": { "clients/example", }, }, false, ) var err error err = moduleSystem.RegisterClass(ModuleClass{ Name: "client", NamePlural: "clients", ClassType: MultiModule, }) if err != nil { t.Errorf("Unexpected error registering client class: %s", err) } err = moduleSystem.RegisterClassType( "client", "http", &TestHTTPClientGenerator{}, ) if err != nil { t.Errorf("Unexpected error registering http client class type: %s", err) } err = moduleSystem.RegisterClass(ModuleClass{ Name: "endpoint", NamePlural: "endpoints", ClassType: MultiModule, }) if err != nil { t.Errorf("Unexpected error registering endpoint class: %s", err) } err = moduleSystem.RegisterClassType( "endpoint", "http", &TestHTTPEndpointGenerator{}, ) if err != nil { t.Errorf("Unexpected error registering http client class type: %s", err) } currentDir := getTestDirName() testServiceDir := path.Join(currentDir, "test-service") _, err = moduleSystem.GenerateBuild( "github.com/uber/zanzibar/codegen/test-service", testServiceDir, path.Join(testServiceDir, "build"), true, ) if err == nil { t.Errorf("Expected failure due to default dependency directory which is not a dependency") } }
explode_data.jsonl/34588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 547 }
[ 2830, 3393, 2753, 1957, 3675, 36387, 1155, 353, 8840, 836, 8, 341, 54020, 2320, 1669, 1532, 3332, 2320, 1006, 197, 19567, 14032, 45725, 917, 515, 298, 197, 1, 2972, 788, 256, 5212, 31869, 1057, 7115, 298, 197, 1, 32540, 788, 5212, 408...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestKnativeServiceWithCustomContainerName(t *testing.T) { catalog, err := camel.DefaultCatalog() assert.Nil(t, err) traitCatalog := NewCatalog(context.TODO(), nil) environment := Environment{ CamelCatalog: catalog, Catalog: traitCatalog, Integration: &v1.Integration{ ObjectMeta: metav1.ObjectMeta{ Name: KnativeServiceTestName, Namespace: KnativeServiceTestNamespace, }, Status: v1.IntegrationStatus{ Phase: v1.IntegrationPhaseDeploying, }, Spec: v1.IntegrationSpec{ Profile: v1.TraitProfileKnative, Traits: map[string]v1.TraitSpec{ "deployer": { Configuration: map[string]string{ "kind": "knative-service", }, }, "knative-service": { Configuration: map[string]string{ "enabled": "true", "auto": "false", }, }, "container": { Configuration: map[string]string{ "name": "my-container-name", }, }, }, }, }, IntegrationKit: &v1.IntegrationKit{ Status: v1.IntegrationKitStatus{ Phase: v1.IntegrationKitPhaseReady, }, }, Platform: &v1.IntegrationPlatform{ Spec: v1.IntegrationPlatformSpec{ Cluster: v1.IntegrationPlatformClusterOpenShift, Build: v1.IntegrationPlatformBuildSpec{ PublishStrategy: v1.IntegrationPlatformBuildPublishStrategyS2I, Registry: v1.IntegrationPlatformRegistrySpec{Address: "registry"}, }, }, }, EnvVars: make([]corev1.EnvVar, 0), ExecutedTraits: make([]Trait, 0), Resources: kubernetes.NewCollection(), } environment.Platform.ResyncStatusFullConfig() err = traitCatalog.apply(&environment) assert.Nil(t, err) assert.NotEmpty(t, environment.ExecutedTraits) assert.NotNil(t, environment.GetTrait("knative-service")) assert.NotNil(t, environment.GetTrait("container")) s := environment.Resources.GetKnativeService(func(service *serving.Service) bool { return service.Name == KnativeServiceTestName }) assert.NotNil(t, s) assert.Equal( t, environment.Integration.Spec.Traits["container"].Configuration["name"], s.Spec.ConfigurationSpec.Template.Spec.Containers[0].Name, ) }
explode_data.jsonl/55846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 890 }
[ 2830, 3393, 42, 29738, 1860, 2354, 10268, 4502, 675, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 11, 1848, 1669, 49152, 13275, 41606, 741, 6948, 59678, 1155, 11, 1848, 692, 25583, 1315, 41606, 1669, 1532, 41606, 5378, 90988, 1507, 2092, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollection_BulkWrite(t *testing.T) { mgoClient := Ins() result, err := mgoClient.C("test").BulkWrite([]mongo.WriteModel{ &mongo.InsertOneModel{Document: bson.M{"name": "x"}}, &mongo.DeleteOneModel{Filter: bson.M{"name": "x"}}, }) if err != nil { t.Errorf("BulkWrite error: %s", err) t.FailNow() } t.Log("BulkWrite ok", bson.M{ "inserted": result.InsertedCount, "deleted": result.DeletedCount, "matched": result.MatchedCount, "upserted": result.UpsertedCount, "modified": result.ModifiedCount, }) }
explode_data.jsonl/30098
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 6482, 1668, 21785, 7985, 1155, 353, 8840, 836, 8, 341, 2109, 3346, 2959, 1669, 9726, 2822, 9559, 11, 1848, 1669, 296, 3346, 2959, 727, 445, 1944, 1827, 88194, 7985, 10556, 53070, 4073, 1712, 515, 197, 197, 5, 53070, 23142, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBasicGetInvalid(t *testing.T) { client := newBasicClient() result, err := client.GetInvalid(context.Background(), nil) if err == nil { t.Fatal("GetInvalid expected an error") } if r := cmp.Diff(result, BasicGetInvalidResponse{}); r != "" { t.Fatal(r) } }
explode_data.jsonl/15339
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 15944, 1949, 7928, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 501, 15944, 2959, 741, 9559, 11, 1848, 1669, 2943, 2234, 7928, 5378, 19047, 1507, 2092, 340, 743, 1848, 621, 2092, 341, 197, 3244, 26133, 445, 1949, 7928, 3601, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBlobberGRPCService_GetReferencePath_Success(t *testing.T) { req := &blobbergrpc.GetReferencePathRequest{ Context: &blobbergrpc.RequestContext{ Client: "client", ClientKey: "", Allocation: "", }, Paths: `["something"]`, Path: "", Allocation: "", } mockStorageHandler := &storageHandlerI{} mockReferencePackage := &mocks.PackageHandler{} mockStorageHandler.On("verifyAllocation", mock.Anything, req.Allocation, false).Return(&allocation.Allocation{ ID: "allocationId", Tx: req.Allocation, OwnerID: "owner", }, nil) mockReferencePackage.On("GetReferencePathFromPaths", mock.Anything, mock.Anything, mock.Anything).Return(&reference.Ref{ Name: "test", Type: reference.DIRECTORY, Children: []*reference.Ref{{Name: "test1", Type: reference.FILE}}, }, nil) svc := newGRPCBlobberService(mockStorageHandler, mockReferencePackage) resp, err := svc.GetReferencePath(context.Background(), req) if err != nil { t.Fatal("unexpected error") } assert.Equal(t, resp.ReferencePath.MetaData.DirMetaData.Name, "test") }
explode_data.jsonl/66834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 37985, 652, 8626, 4872, 1860, 13614, 8856, 1820, 87161, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 609, 35112, 652, 56585, 2234, 8856, 1820, 1900, 515, 197, 70871, 25, 609, 35112, 652, 56585, 9659, 1972, 515, 298, 71724, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLogRestore(t *testing.T) { logdir, err := ioutil.TempDir("", "backupds-test-") require.NoError(t, err) defer os.RemoveAll(logdir) // nolint ds1 := datastore.NewMapDatastore() putVals(t, ds1, 0, 10) bds, err := Wrap(ds1, logdir) require.NoError(t, err) putVals(t, bds, 10, 20) require.NoError(t, bds.Close()) fls, err := ioutil.ReadDir(logdir) require.NoError(t, err) require.Equal(t, 1, len(fls)) bf, err := ioutil.ReadFile(filepath.Join(logdir, fls[0].Name())) require.NoError(t, err) ds2 := datastore.NewMapDatastore() require.NoError(t, RestoreInto(bytes.NewReader(bf), ds2)) checkVals(t, ds2, 0, 20, true) }
explode_data.jsonl/26962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 2201, 56284, 1155, 353, 8840, 836, 8, 341, 6725, 3741, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 31371, 5356, 16839, 12, 1138, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 12531, 3741, 8, 442, 308, 337, 396, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCluserStatus(t *testing.T) { _, err := c.ClusterStatus() if err != nil { t.Fatal(err) } // TODO Here we could do a little more testing // with an active cluster }
explode_data.jsonl/77820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 5066, 872, 2522, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 272, 72883, 2522, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 197, 322, 5343, 5692, 582, 1410, 653, 264, 2632, 803, 7497, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestConverter_ToEntity(t *testing.T) { t.Run("success all nullable properties filled", func(t *testing.T) { versionModel := *fixModelVersion("v1.2", true, "v1.1", false) versionConv := version.NewConverter() //WHEN versionEntity, err := versionConv.ToEntity(versionModel) //THEN require.NoError(t, err) assertVersion(t, versionEntity, versionModel) }) t.Run("success all nullable properties empty", func(t *testing.T) { versionModel := model.Version{} versionConv := version.NewConverter() //WHEN versionEntity, err := versionConv.ToEntity(versionModel) //THEN require.NoError(t, err) assertVersion(t, versionEntity, versionModel) }) }
explode_data.jsonl/37072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 14920, 38346, 3030, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5630, 678, 13258, 5888, 10199, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 74954, 1712, 1669, 353, 5743, 1712, 5637, 445, 85, 16, 13, 17, 497, 830, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMD5(t *testing.T) { for i, this := range []struct { input string expectedHash string }{ {"Hello world, gophers!", "b3029f756f98f79e7f1b7f1d1f0dd53b"}, {"Lorem ipsum dolor", "06ce65ac476fc656bea3fca5d02cfd81"}, } { result, err := md5(this.input) if err != nil { t.Errorf("md5 returned error: %s", err) } if result != this.expectedHash { t.Errorf("[%d] md5: expected '%s', got '%s'", i, this.expectedHash, result) } } _, err := md5(t) if err == nil { t.Error("Expected error from md5") } }
explode_data.jsonl/9255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 6076, 20, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 419, 1669, 2088, 3056, 1235, 341, 197, 22427, 286, 914, 198, 197, 42400, 6370, 914, 198, 197, 59403, 197, 197, 4913, 9707, 1879, 11, 342, 5127, 388, 18789, 330, 65, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIntegrationDNSLookupUnknownDNS(t *testing.T) { ctx := context.Background() results := DNSLookup(ctx, DNSLookupConfig{ Hostname: "ooni.io", ServerNetwork: "antani", }) if !strings.HasSuffix(results.Error.Error(), "unsupported network value") { t.Fatal("expected a different error here") } }
explode_data.jsonl/53532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 52464, 61088, 34247, 13790, 61088, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 55497, 1669, 27598, 34247, 7502, 11, 27598, 34247, 2648, 515, 197, 197, 88839, 25, 414, 330, 9009, 72, 4245, 756, 197, 92075, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCommentsWhitespace(t *testing.T) { cases := []struct { note string module string expected []string }{ { note: "trailing spaces", module: "# a comment \t \n", expected: []string{" a comment \t "}, }, { note: "trailing carriage return", module: "# a comment\r\n", expected: []string{" a comment"}, }, { note: "trailing carriage return double newline", module: "# a comment\r\n\n", expected: []string{" a comment"}, }, { note: "double trailing carriage return newline", module: "#\r\r\n", expected: []string{"\r"}, }, { note: "double trailing carriage return", module: "#\r\r", expected: []string{"\r"}, }, { note: "carriage return", module: "#\r", expected: []string{""}, }, { note: "carriage return in comment", module: "# abc\rdef\r\n", expected: []string{" abc\rdef"}, }, } for _, tc := range cases { t.Run(tc.note, func(t *testing.T) { _, comments, err := ParseStatements("", tc.module) if err != nil { t.Fatalf("Unexpected parse error: %s", err) } for i, exp := range tc.expected { actual := string(comments[i].Text) if exp != actual { t.Errorf("Expected comment text (len %d):\n\n\t%q\n\nbut got (len %d):\n\n\t%q\n\n", len(exp), exp, len(actual), actual) } } }) } }
explode_data.jsonl/50490
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 637 }
[ 2830, 3393, 17373, 73804, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 9038, 1272, 257, 914, 198, 197, 54020, 256, 914, 198, 197, 42400, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 9038, 1272, 25, 257, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSeries_StartsWith(t *testing.T) { // assert that function returns a correct bool Series s := NewSeries("test", nil, "foo", "bar is", "food now", "now now!!") assert.Equal(t, NewSeries("HasPrefix(test)", false, true, false, true, false), s.StartsWith("foo")) }
explode_data.jsonl/54087
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 25544, 38056, 16056, 1155, 353, 8840, 836, 8, 341, 197, 322, 2060, 429, 729, 4675, 264, 4396, 1807, 11131, 198, 1903, 1669, 1532, 25544, 445, 1944, 497, 2092, 11, 330, 7975, 497, 330, 2257, 374, 497, 330, 13915, 1431, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEscapingBlockResource(t *testing.T) { // (this also tests escaping blocks in provisioner blocks, because // they only appear nested inside resource blocks.) parser := NewParser(nil) mod, diags := parser.LoadConfigDir("testdata/escaping-blocks/resource") assertNoDiagnostics(t, diags) if mod == nil { t.Fatal("got nil root module; want non-nil") } rc := mod.ManagedResources["foo.bar"] if rc == nil { t.Fatal("no managed resource named foo.bar") } t.Run("resource body", func(t *testing.T) { if got := rc.Count; got == nil { t.Errorf("count not set; want count = 2") } else { got, diags := got.Value(nil) assertNoDiagnostics(t, diags) if want := cty.NumberIntVal(2); !want.RawEquals(got) { t.Errorf("wrong count\ngot: %#v\nwant: %#v", got, want) } } if got, want := rc.ForEach, hcl.Expression(nil); got != want { // Shouldn't have any count because our test fixture only has // for_each in the escaping block. t.Errorf("wrong for_each\ngot: %#v\nwant: %#v", got, want) } schema := &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ {Name: "normal", Required: true}, {Name: "count", Required: true}, {Name: "for_each", Required: true}, }, Blocks: []hcl.BlockHeaderSchema{ {Type: "normal_block"}, {Type: "lifecycle"}, {Type: "_"}, }, } content, diags := rc.Config.Content(schema) assertNoDiagnostics(t, diags) normalVal, diags := content.Attributes["normal"].Expr.Value(nil) assertNoDiagnostics(t, diags) if got, want := normalVal, cty.StringVal("yes"); !want.RawEquals(got) { t.Errorf("wrong value for 'normal'\ngot: %#v\nwant: %#v", got, want) } countVal, diags := content.Attributes["count"].Expr.Value(nil) assertNoDiagnostics(t, diags) if got, want := countVal, cty.StringVal("not actually count"); !want.RawEquals(got) { t.Errorf("wrong value for 'count'\ngot: %#v\nwant: %#v", got, want) } var gotBlockTypes []string for _, block := range content.Blocks { gotBlockTypes = append(gotBlockTypes, block.Type) } wantBlockTypes := []string{"normal_block", "lifecycle", "_"} if diff := cmp.Diff(gotBlockTypes, wantBlockTypes); diff != "" { t.Errorf("wrong block types\n%s", diff) } }) t.Run("provisioner body", func(t *testing.T) { if got, want := len(rc.Managed.Provisioners), 1; got != want { t.Fatalf("wrong number of provisioners %d; want %d", got, want) } pc := rc.Managed.Provisioners[0] schema := &hcl.BodySchema{ Attributes: []hcl.AttributeSchema{ {Name: "when", Required: true}, {Name: "normal", Required: true}, }, Blocks: []hcl.BlockHeaderSchema{ {Type: "normal_block"}, {Type: "lifecycle"}, {Type: "_"}, }, } content, diags := pc.Config.Content(schema) assertNoDiagnostics(t, diags) normalVal, diags := content.Attributes["normal"].Expr.Value(nil) assertNoDiagnostics(t, diags) if got, want := normalVal, cty.StringVal("yep"); !want.RawEquals(got) { t.Errorf("wrong value for 'normal'\ngot: %#v\nwant: %#v", got, want) } whenVal, diags := content.Attributes["when"].Expr.Value(nil) assertNoDiagnostics(t, diags) if got, want := whenVal, cty.StringVal("hell freezes over"); !want.RawEquals(got) { t.Errorf("wrong value for 'normal'\ngot: %#v\nwant: %#v", got, want) } }) }
explode_data.jsonl/58362
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1354 }
[ 2830, 3393, 36121, 14216, 4713, 4783, 1155, 353, 8840, 836, 8, 341, 197, 322, 320, 574, 1083, 7032, 52654, 10010, 304, 17136, 261, 10010, 11, 1576, 198, 197, 322, 807, 1172, 4994, 24034, 4766, 5101, 10010, 9272, 55804, 1669, 1532, 6570,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestHealthCheckStreamError(t *testing.T) { ts := memorytopo.NewServer("cell") hc := createTestHc(ts) defer hc.Close() tablet := topo.NewTablet(0, "cell", "a") tablet.PortMap["vt"] = 1 input := make(chan *querypb.StreamHealthResponse) resultChan := hc.Subscribe() fc := createFakeConn(tablet, input) fc.errCh = make(chan error) hc.AddTablet(tablet) // Immediately after AddTablet() there will be the first notification. want := &TabletHealth{ Tablet: tablet, Target: &querypb.Target{}, Serving: false, MasterTermStartTime: 0, } result := <-resultChan mustMatch(t, want, result, "Wrong TabletHealth data") // one tablet after receiving a StreamHealthResponse shr := &querypb.StreamHealthResponse{ TabletAlias: tablet.Alias, Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_REPLICA}, Serving: true, TabletExternallyReparentedTimestamp: 0, RealtimeStats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, } want = &TabletHealth{ Tablet: tablet, Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_REPLICA}, Serving: true, Stats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, MasterTermStartTime: 0, } input <- shr result = <-resultChan mustMatch(t, want, result, "Wrong TabletHealth data") // Stream error fc.errCh <- fmt.Errorf("some stream error") want = &TabletHealth{ Tablet: tablet, Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_REPLICA}, Serving: false, Stats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, MasterTermStartTime: 0, LastError: fmt.Errorf("some stream error"), } result = <-resultChan //TODO: figure out how to compare objects that contain errors using utils.MustMatch assert.True(t, want.DeepEqual(result), "Wrong TabletHealth data\n Expected: %v\n Actual: %v", want, result) }
explode_data.jsonl/47685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 966 }
[ 2830, 3393, 14542, 3973, 3027, 1454, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 4938, 3481, 78, 7121, 5475, 445, 5873, 1138, 9598, 66, 1669, 1855, 2271, 39, 66, 35864, 340, 16867, 50394, 10421, 2822, 26481, 83, 1669, 72519, 7121, 2556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCAinitEnrollmentSigner(t *testing.T) { testDirClean(t) cfg = CAConfig{} ca, err := newCA(configFile, &cfg, &srv, true) if err != nil { t.Fatal("newCA FAILED", err) } cfg.Intermediate.ParentServer.URL = "1" ca, err = newCA(configFile, &cfg, &srv, false) if err != nil { t.Fatal("newCA FAILED", err) } //Rely on default policy cfg.Signing = nil ca.csp = nil err = ca.initEnrollmentSigner() t.Log("ca.initEnrollmentSigner error: ", err) if err == nil { t.Error("initEnrollmentSigner should have failed") } CAclean(ca, t) }
explode_data.jsonl/82704
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 5049, 2327, 1702, 25864, 7264, 261, 1155, 353, 8840, 836, 8, 341, 18185, 6184, 27529, 1155, 340, 50286, 284, 9183, 2648, 16094, 197, 924, 11, 1848, 1669, 501, 5049, 8754, 1703, 11, 609, 14072, 11, 609, 40020, 11, 830, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4