text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestHeadersWireErrors(t *testing.T) { pver := wire.ProtocolVersion wireErr := &wire.MessageError{} hash := mainNetGenesisHash merkleHash := blockOne.Header.MerkleRoot bits := uint32(0x1d00ffff) nonce := uint32(0x9962e301) bh := wire.NewBlockHeader(&hash, &merkleHash, bits, nonce) bh.Version = blockOne.Header.Version bh.Timestamp = blockOne.Header.Timestamp // Headers message with one header. oneHeader := wire.NewMsgHeaders() oneHeader.AddBlockHeader(bh) oneHeaderEncoded := []byte{ 0x01, // VarInt for number of headers. 0x01, 0x00, 0x00, 0x00, // Version 1 0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72, 0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f, 0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c, 0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot 0x61, 0xbc, 0x66, 0x49, // Timestamp 0xff, 0xff, 0x00, 0x1d, // Bits 0x01, 0xe3, 0x62, 0x99, // Nonce 0x00, // TxnCount (0 for headers message) } // Message that forces an error by having more than the max allowed // headers. maxHeaders := wire.NewMsgHeaders() for i := 0; i < wire.MaxBlockHeadersPerMsg; i++ { maxHeaders.AddBlockHeader(bh) } maxHeaders.Headers = append(maxHeaders.Headers, bh) maxHeadersEncoded := []byte{ 0xfd, 0xd1, 0x07, // Varint for number of addresses (2001)7D1 } // Intentionally invalid block header that has a transaction count used // to force errors. bhTrans := wire.NewBlockHeader(&hash, &merkleHash, bits, nonce) bhTrans.Version = blockOne.Header.Version bhTrans.Timestamp = blockOne.Header.Timestamp transHeader := wire.NewMsgHeaders() transHeader.AddBlockHeader(bhTrans) transHeaderEncoded := []byte{ 0x01, // VarInt for number of headers. 0x01, 0x00, 0x00, 0x00, // Version 1 0x6f, 0xe2, 0x8c, 0x0a, 0xb6, 0xf1, 0xb3, 0x72, 0xc1, 0xa6, 0xa2, 0x46, 0xae, 0x63, 0xf7, 0x4f, 0x93, 0x1e, 0x83, 0x65, 0xe1, 0x5a, 0x08, 0x9c, 0x68, 0xd6, 0x19, 0x00, 0x00, 0x00, 0x00, 0x00, // PrevBlock 0x98, 0x20, 0x51, 0xfd, 0x1e, 0x4b, 0xa7, 0x44, 0xbb, 0xbe, 0x68, 0x0e, 0x1f, 0xee, 0x14, 0x67, 0x7b, 0xa1, 0xa3, 0xc3, 0x54, 0x0b, 0xf7, 0xb1, 0xcd, 0xb6, 0x06, 0xe8, 0x57, 0x23, 0x3e, 0x0e, // MerkleRoot 0x61, 0xbc, 0x66, 0x49, // Timestamp 0xff, 0xff, 0x00, 0x1d, // Bits 0x01, 0xe3, 0x62, 0x99, // Nonce 0x01, // TxnCount (should be 0 for headers message, but 1 to force error) } tests := []struct { in *wire.MsgHeaders // Value to encode buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding max int // Max size of fixed buffer to induce errors writeErr error // Expected write error readErr error // Expected read error }{ // Latest protocol version with intentional read/write errors. // Force error in header count. {oneHeader, oneHeaderEncoded, pver, 0, io.ErrShortWrite, io.EOF}, // Force error in block header. {oneHeader, oneHeaderEncoded, pver, 5, io.ErrShortWrite, io.EOF}, // Force error with greater than max headers. {maxHeaders, maxHeadersEncoded, pver, 3, wireErr, wireErr}, // Force error with number of transactions. {transHeader, transHeaderEncoded, pver, 81, io.ErrShortWrite, io.EOF}, // Force error with included transactions. {transHeader, transHeaderEncoded, pver, len(transHeaderEncoded), nil, wireErr}, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode to wire format. w := newFixedWriter(test.max) err := test.in.BtcEncode(w, test.pver) if reflect.TypeOf(err) != reflect.TypeOf(test.writeErr) { t.Errorf("BtcEncode #%d wrong error got: %v, want: %v", i, err, test.writeErr) continue } // For errors which are not of type wire.MessageError, check // them for equality. if _, ok := err.(*wire.MessageError); !ok { if err != test.writeErr { t.Errorf("BtcEncode #%d wrong error got: %v, "+ "want: %v", i, err, test.writeErr) continue } } // Decode from wire format. var msg wire.MsgHeaders r := newFixedReader(test.max, test.buf) err = msg.BtcDecode(r, test.pver) if reflect.TypeOf(err) != reflect.TypeOf(test.readErr) { t.Errorf("BtcDecode #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } // For errors which are not of type wire.MessageError, check // them for equality. if _, ok := err.(*wire.MessageError); !ok { if err != test.readErr { t.Errorf("BtcDecode #%d wrong error got: %v, "+ "want: %v", i, err, test.readErr) continue } } } }
explode_data.jsonl/52394
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2212 }
[ 2830, 3393, 10574, 37845, 13877, 1155, 353, 8840, 836, 8, 341, 3223, 423, 1669, 9067, 54096, 5637, 198, 6692, 554, 7747, 1669, 609, 35531, 8472, 1454, 31483, 50333, 1669, 1887, 6954, 84652, 6370, 198, 197, 1174, 23089, 6370, 1669, 2504, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestScanDefinitionFile(t *testing.T) { tests := []struct { name string defPath string sections string }{ {"Arch", "../testdata_good/arch/arch", "../testdata_good/arch/arch_sections.json"}, {"BusyBox", "../testdata_good/busybox/busybox", "../testdata_good/busybox/busybox_sections.json"}, {"Debootstrap", "../testdata_good/debootstrap/debootstrap", "../testdata_good/debootstrap/debootstrap_sections.json"}, {"Docker", "../testdata_good/docker/docker", "../testdata_good/docker/docker_sections.json"}, {"LocalImage", "../testdata_good/localimage/localimage", "../testdata_good/localimage/localimage_sections.json"}, {"Shub", "../testdata_good/shub/shub", "../testdata_good/shub/shub_sections.json"}, {"Yum", "../testdata_good/yum/yum", "../testdata_good/yum/yum_sections.json"}, {"Zypper", "../testdata_good/zypper/zypper", "../testdata_good/zypper/zypper_sections.json"}, } for _, tt := range tests { t.Run(tt.name, test.WithoutPrivilege(func(t *testing.T) { deffile := tt.defPath r, err := os.Open(deffile) if err != nil { t.Fatal("failed to read deffile:", err) } defer r.Close() s := bufio.NewScanner(r) s.Split(scanDefinitionFile) for s.Scan() && s.Text() == "" && s.Err() == nil { } b, err := ioutil.ReadFile(tt.sections) if err != nil { t.Fatal("failed to read JSON:", err) } type DefFileSections struct { Header string } var d []DefFileSections if err := json.Unmarshal(b, &d); err != nil { t.Fatal("failed to unmarshal JSON:", err) } // Right now this only does the header, but the json files are // written with all of the sections in mind so that could be added. if s.Text() != d[0].Header { t.Fatal("scanDefinitionFile does not produce same header as reference") } })) } }
explode_data.jsonl/30714
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 736 }
[ 2830, 3393, 26570, 10398, 1703, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 7452, 1820, 220, 914, 198, 197, 197, 24661, 914, 198, 197, 59403, 197, 197, 4913, 18727, 497, 7005, 92425, 447...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestValidateClusterServiceClass(t *testing.T) { cases := []struct { name string serviceClass *servicecatalog.ClusterServiceClass valid bool }{ { name: "valid serviceClass", serviceClass: validClusterServiceClass(), valid: true, }, { name: "valid serviceClass - uppercase in GUID", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalID = "40D-0983-1b89" return s }(), valid: true, }, { name: "valid serviceClass - period in GUID", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalID = "4315f5e1-0139-4ecf-9706-9df0aff33e5a.plan-name" return s }(), valid: true, }, { name: "valid serviceClass - period in externalName", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "abc.com" return s }(), valid: true, }, { name: "invalid serviceClass - has namespace", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Namespace = "test-ns" return s }(), valid: false, }, { name: "invalid serviceClass - missing guid", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalID = "" return s }(), valid: false, }, { name: "invalid serviceClass - invalid guid", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalID = "1234-4354a\\%-49b" return s }(), valid: false, }, { name: "invalid serviceClass - missing description", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.Description = "" return s }(), valid: false, }, { name: "invalid serviceClass - invalid externalName", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "****" return s }(), valid: false, }, { name: "invalid serviceClass - underscore in externalName", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "test_serviceclass" return s }(), valid: false, }, { name: "invalid serviceClass - missing externalName", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "" return s }(), valid: false, }, { name: "invalid serviceClass - valid but weird externalName1", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "-" return s }(), valid: true, }, { name: "invalid serviceClass - valid but weird externalName2", serviceClass: func() *servicecatalog.ClusterServiceClass { s := validClusterServiceClass() s.Spec.ExternalName = "0" return s }(), valid: true, }, } for _, tc := range cases { errs := ValidateClusterServiceClass(tc.serviceClass) if len(errs) != 0 && tc.valid { t.Errorf("%v: unexpected error: %v", tc.name, errs) continue } else if len(errs) == 0 && !tc.valid { t.Errorf("%v: unexpected success", tc.name) } } }
explode_data.jsonl/48908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1395 }
[ 2830, 3393, 17926, 28678, 1860, 1957, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 52934, 1957, 353, 7936, 26539, 72883, 1860, 1957, 198, 197, 56322, 286, 1807, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvListBasic(t *testing.T) { s := newScaffold(t) defer s.reset() err := s.executeCommand("env", "list") require.NoError(t, err) lines := strings.Split(strings.Trim(s.stdout(), "\n"), "\n") a := assert.New(t) require.Equal(t, 4, len(lines)) a.Equal("dev", lines[0]) a.Equal("local", lines[1]) a.Equal("prod", lines[2]) a.Equal("stage", lines[3]) }
explode_data.jsonl/241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 14359, 852, 15944, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 50, 27864, 1155, 340, 16867, 274, 13857, 741, 9859, 1669, 274, 7769, 4062, 445, 3160, 497, 330, 1607, 1138, 17957, 35699, 1155, 11, 1848, 340, 78390, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDetails(t *testing.T) { err1 := errors.New("error 1") err2 := errors.WithDetails(err1, errors.Details{"data": 123}) err3 := errors.HandledWithMessage(err2, "error 2") err := errors.WithDetails(err3, errors.Details{"data": 456, "value": errors.SafeDetail.Value("test")}) Convey("WithDetails/CollectDetails", t, func() { So(err, ShouldBeError, "error 2") details := errors.CollectDetails(err, nil) So(details, ShouldResemble, errors.Details{ "data": 456, "value": errors.SafeDetail.Value("test"), }) }) Convey("FilterDetails/GetSafeDetails", t, func() { details := errors.GetSafeDetails(err) So(details, ShouldResemble, errors.Details{ "value": "test", }) }) Convey("DetailTaggedValue", t, func() { So(err, ShouldBeError, "error 2") details := errors.CollectDetails(err, nil) data, _ := json.Marshal(details) So(string(data), ShouldEqual, `{"data":456,"value":"[detail: safe]"}`) }) }
explode_data.jsonl/3551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 7799, 1155, 353, 8840, 836, 8, 341, 9859, 16, 1669, 5975, 7121, 445, 841, 220, 16, 1138, 9859, 17, 1669, 5975, 26124, 7799, 3964, 16, 11, 5975, 87719, 4913, 691, 788, 220, 16, 17, 18, 3518, 9859, 18, 1669, 5975, 77388, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAdapterUpdateIngressLoadBalancer(t *testing.T) { a, _ := NewAdapter(testConfig, IngressAPIVersionNetworking, testIngressFilter, testSecurityGroup, testSSLPolicy, aws.LoadBalancerTypeApplication, DefaultClusterLocalDomain, false) client := &mockClient{} a.kubeClient = client ing := &Ingress{ Namespace: "default", Name: "foo", Hostname: "bar", CertificateARN: "zbr", resourceType: ingressTypeIngress, } if err := a.UpdateIngressLoadBalancer(ing, "bar"); err != ErrUpdateNotNeeded { t.Error("expected ErrUpdateNotNeeded") } if err := a.UpdateIngressLoadBalancer(ing, "xpto"); err != nil { t.Error(err) } client.broken = true if err := a.UpdateIngressLoadBalancer(ing, "xpto"); err == nil { t.Error("expected an error") } if err := a.UpdateIngressLoadBalancer(ing, ""); err == nil { t.Error("expected an error") } if err := a.UpdateIngressLoadBalancer(nil, "xpto"); err == nil { t.Error("expected an error") } }
explode_data.jsonl/6732
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 5940, 4289, 641, 2483, 5879, 93825, 1155, 353, 8840, 836, 8, 341, 11323, 11, 716, 1669, 1532, 5940, 8623, 2648, 11, 758, 2483, 7082, 5637, 78007, 11, 1273, 641, 2483, 5632, 11, 1273, 15352, 2808, 11, 1273, 1220, 12567, 801...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMsgDecode(t *testing.T) { registry := codectypes.NewInterfaceRegistry() cryptocodec.RegisterInterfaces(registry) RegisterInterfaces(registry) cdc := codec.NewProtoCodec(registry) // firstly we start testing the pubkey serialization pk1bz, err := cdc.MarshalInterface(pk1) require.NoError(t, err) var pkUnmarshaled cryptotypes.PubKey err = cdc.UnmarshalInterface(pk1bz, &pkUnmarshaled) require.NoError(t, err) require.True(t, pk1.Equals(pkUnmarshaled.(*ed25519.PubKey))) // now let's try to serialize the whole message commission1 := NewCommissionRates(sdk.ZeroDec(), sdk.ZeroDec(), sdk.ZeroDec()) msg, err := NewMsgCreateValidator(valAddr1, pk1, coinPos, Description{}, commission1, sdk.OneInt()) require.NoError(t, err) msgSerialized, err := cdc.MarshalInterface(msg) require.NoError(t, err) var msgUnmarshaled sdk.Msg err = cdc.UnmarshalInterface(msgSerialized, &msgUnmarshaled) require.NoError(t, err) msg2, ok := msgUnmarshaled.(*MsgCreateValidator) require.True(t, ok) require.True(t, msg.Value.IsEqual(msg2.Value)) require.True(t, msg.Pubkey.Equal(msg2.Pubkey)) }
explode_data.jsonl/22794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 6611, 32564, 1155, 353, 8840, 836, 8, 341, 197, 29172, 1669, 20329, 439, 1804, 7121, 5051, 15603, 741, 1444, 3571, 43688, 66, 19983, 41066, 18390, 4944, 340, 79096, 41066, 18390, 4944, 340, 1444, 7628, 1669, 34647, 7121, 31549...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_processFiles(t *testing.T) { fakeStdin := strings.NewReader("hey cool how is it going") files, err := processFiles(io.NopCloser(fakeStdin), "", []string{"-"}) if err != nil { t.Fatalf("unexpected error processing files: %s", err) } assert.Equal(t, 1, len(files)) assert.Equal(t, "hey cool how is it going", files["gistfile0.txt"].Content) }
explode_data.jsonl/27556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 11305, 10809, 1155, 353, 8840, 836, 8, 341, 1166, 726, 22748, 258, 1669, 9069, 68587, 445, 35561, 7010, 1246, 374, 432, 2087, 1138, 74075, 11, 1848, 1669, 1882, 10809, 37258, 2067, 453, 51236, 799, 74138, 22748, 258, 701, 73...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Pagination_PrevNext_1NextLink(t *testing.T) { doc := testutil.CreateHTML() body := dom.QuerySelector(doc, "body") root := testutil.CreateDiv(0) dom.AppendChild(body, root) anchor := testutil.CreateAnchor("next", "next page") dom.AppendChild(root, anchor) assertDefaultDocumenOutlink(t, doc, nil, nil) }
explode_data.jsonl/10820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 1088, 10353, 1088, 7282, 5847, 62, 16, 5847, 3939, 1155, 353, 8840, 836, 8, 341, 59536, 1669, 1273, 1314, 7251, 5835, 741, 35402, 1669, 4719, 15685, 5877, 19153, 11, 330, 2599, 5130, 33698, 1669, 1273, 1314, 7251, 12509, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintJobList(t *testing.T) { completions := int32(2) jobList := batch.JobList{ Items: []batch.Job{ { ObjectMeta: metav1.ObjectMeta{ Name: "job1", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: batch.JobSpec{ Completions: &completions, Template: api.PodTemplateSpec{ Spec: api.PodSpec{ Containers: []api.Container{ { Name: "fake-job-container1", Image: "fake-job-image1", }, { Name: "fake-job-container2", Image: "fake-job-image2", }, }, }, }, Selector: &metav1.LabelSelector{MatchLabels: map[string]string{"job-label": "job-lable-value"}}, }, Status: batch.JobStatus{ Succeeded: 1, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "job2", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: batch.JobSpec{ Completions: &completions, Template: api.PodTemplateSpec{ Spec: api.PodSpec{ Containers: []api.Container{ { Name: "fake-job-container1", Image: "fake-job-image1", }, { Name: "fake-job-container2", Image: "fake-job-image2", }, }, }, }, Selector: &metav1.LabelSelector{MatchLabels: map[string]string{"job-label": "job-lable-value"}}, }, Status: batch.JobStatus{ Succeeded: 2, StartTime: &metav1.Time{Time: time.Now().Add(-20 * time.Minute)}, }, }, }, } // Columns: Name, Completions, Duration, Age expectedRows := []metav1.TableRow{ {Cells: []interface{}{"job1", "1/2", "", "0s"}}, {Cells: []interface{}{"job2", "2/2", "20m", "0s"}}, } rows, err := printJobList(&jobList, printers.GenerateOptions{}) if err != nil { t.Fatalf("Error printing job list: %#v", err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(expectedRows, rows) { t.Errorf("mismatch: %s", diff.ObjectReflectDiff(expectedRows, rows)) } }
explode_data.jsonl/21610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1059 }
[ 2830, 3393, 8994, 12245, 852, 1155, 353, 8840, 836, 8, 341, 32810, 10819, 908, 1669, 526, 18, 17, 7, 17, 340, 68577, 852, 1669, 7162, 45293, 852, 515, 197, 197, 4353, 25, 3056, 14049, 45293, 515, 298, 197, 515, 571, 23816, 12175, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_CurrentVersion_Using_Flag(t *testing.T) { // // #TODO(kishorevaishnav): Don't know how to handle this // args := os.Args // defer func() { os.Args = args }() // os.Args = []string{"", "-version"} // initializeDefaults() }
explode_data.jsonl/22628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 40735, 5637, 62, 16429, 98135, 1155, 353, 8840, 836, 8, 341, 197, 322, 442, 671, 14732, 5969, 812, 460, 6586, 812, 3722, 1648, 4320, 944, 1414, 1246, 311, 3705, 419, 198, 197, 322, 2827, 1669, 2643, 51015, 198, 197, 322, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommits_SubRepoPerms(t *testing.T) { t.Parallel() ctx := actor.WithActor(context.Background(), &actor.Actor{ UID: 1, }) gitCommands := []string{ "touch file1", "git add file1", "GIT_COMMITTER_NAME=a GIT_COMMITTER_EMAIL=a@a.com GIT_COMMITTER_DATE=2006-01-02T15:04:05Z git commit -m commit1 --author='a <a@a.com>' --date 2006-01-02T15:04:05Z", "touch file2", "git add file2", "touch file2.2", "git add file2.2", "GIT_COMMITTER_NAME=c GIT_COMMITTER_EMAIL=c@c.com GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit -m commit2 --author='a <a@a.com>' --date 2006-01-02T15:04:06Z", "touch file3", "git add file3", "GIT_COMMITTER_NAME=c GIT_COMMITTER_EMAIL=c@c.com GIT_COMMITTER_DATE=2006-01-02T15:04:07Z git commit -m commit3 --author='a <a@a.com>' --date 2006-01-02T15:04:07Z", } tests := map[string]struct { repo api.RepoName wantCommits []*gitdomain.Commit opt CommitsOptions wantTotal uint noAccessPaths []string }{ "if no read perms on file should filter out commit": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, wantCommits: []*gitdomain.Commit{ { ID: "d38233a79e037d2ab8170b0d0bc0aa438473e6da", Author: gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit1", }, }, noAccessPaths: []string{"file2", "file3"}, }, "sub-repo perms with path (w/ no access) specified should return no commits": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, opt: CommitsOptions{ Path: "file2", }, wantCommits: []*gitdomain.Commit{}, noAccessPaths: []string{"file2", "file3"}, }, "sub-repo perms with path (w/ access) specified should return that commit": { repo: MakeGitRepository(t, gitCommands...), wantTotal: 1, opt: CommitsOptions{ Path: "file1", }, wantCommits: []*gitdomain.Commit{ { ID: "d38233a79e037d2ab8170b0d0bc0aa438473e6da", Author: gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Committer: &gitdomain.Signature{Name: "a", Email: "a@a.com", Date: MustParseTime(time.RFC3339, "2006-01-02T15:04:05Z")}, Message: "commit1", }, }, noAccessPaths: []string{"file2", "file3"}, }, } for label, test := range tests { t.Run(label, func(t *testing.T) { checker := getTestSubRepoPermsChecker(test.noAccessPaths...) commits, err := Commits(ctx, database.NewMockDB(), test.repo, test.opt, checker) if err != nil { t.Errorf("%s: Commits(): %s", label, err) return } if len(commits) != len(test.wantCommits) { t.Errorf("%s: got %d commits, want %d", label, len(commits), len(test.wantCommits)) } checkCommits(t, label, commits, test.wantCommits) }) } }
explode_data.jsonl/8518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1414 }
[ 2830, 3393, 17977, 1199, 36359, 25243, 3889, 1011, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 12089, 26124, 18870, 5378, 19047, 1507, 609, 5621, 76094, 515, 197, 197, 6463, 25, 220, 16, 345, 197, 3518, 90731, 304...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDaoSetLinkMailCount(t *testing.T) { var ( c = context.Background() linkMail = "2459593393@qq.com" ) convey.Convey("SetLinkMailCount", t, func(ctx convey.C) { state, err := d.SetLinkMailCount(c, linkMail) ctx.Convey("Then err should be nil.state should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(state, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/61952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 12197, 1649, 3939, 16702, 2507, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 286, 284, 2266, 19047, 741, 197, 54238, 16702, 284, 330, 17, 19, 20, 24, 20, 24, 18, 18, 24, 18, 61652, 905, 698, 197, 340, 37203, 56...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerInactiveMonitor(t *testing.T) { inactivityDetected := false ctx, cancel := context.WithTimeout(context.Background(), time.Second*8) defer cancel() ld, err := coapNet.NewListenUDP("udp4", "") require.NoError(t, err) defer func() { err := ld.Close() require.NoError(t, err) }() checkClose := semaphore.NewWeighted(2) err = checkClose.Acquire(ctx, 2) require.NoError(t, err) sd := udp.NewServer( udp.WithOnNewClientConn(func(cc *client.ClientConn) { cc.AddOnClose(func() { checkClose.Release(1) }) }), udp.WithInactivityMonitor(100*time.Millisecond, func(cc inactivity.ClientConn) { require.False(t, inactivityDetected) inactivityDetected = true err := cc.Close() require.NoError(t, err) }), udp.WithPeriodicRunner(periodic.New(ctx.Done(), time.Millisecond*10)), ) var serverWg sync.WaitGroup defer func() { sd.Stop() serverWg.Wait() }() serverWg.Add(1) go func() { defer serverWg.Done() err := sd.Serve(ld) require.NoError(t, err) }() cc, err := udp.Dial( ld.LocalAddr().String(), ) require.NoError(t, err) cc.AddOnClose(func() { checkClose.Release(1) }) // send ping to create serverside connection ctxPing, cancel := context.WithTimeout(ctx, time.Second) defer cancel() err = cc.Ping(ctxPing) require.NoError(t, err) err = cc.Ping(ctxPing) require.NoError(t, err) // wait for fire inactivity time.Sleep(time.Second * 2) err = cc.Close() require.NoError(t, err) <-cc.Done() err = checkClose.Acquire(ctx, 2) require.NoError(t, err) require.True(t, inactivityDetected) }
explode_data.jsonl/34013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 652 }
[ 2830, 3393, 5475, 72214, 30098, 1155, 353, 8840, 836, 8, 341, 17430, 7175, 17076, 1569, 1669, 895, 271, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 882, 32435, 9, 23, 340, 16867, 9121, 2822, 197, 507, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBalancerUnderServerShutdownDelete(t *testing.T) { testBalancerUnderServerShutdownMutable(t, func(cli *clientv3.Client, ctx context.Context) error { _, err := cli.Delete(ctx, "foo") return err }) }
explode_data.jsonl/29448
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 93825, 16250, 5475, 62004, 6435, 1155, 353, 8840, 836, 8, 341, 18185, 93825, 16250, 5475, 62004, 11217, 1155, 11, 2915, 70249, 353, 2972, 85, 18, 11716, 11, 5635, 2266, 9328, 8, 1465, 341, 197, 197, 6878, 1848, 1669, 21348, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestListComprehensionMultiLevel(t *testing.T) { gopClTest(t, ` arr := [1, 2, 3, 4.1, 5, 6] x := [[a, b] for a <- arr, a < b for b <- arr, b > 2] println("x:", x) `, `package main import fmt "fmt" func main() { arr := []float64{1, 2, 3, 4.1, 5, 6} x := func() (_gop_ret [][]float64) { for _, b := range arr { if b > 2 { for _, a := range arr { if a < b { _gop_ret = append(_gop_ret, []float64{a, b}) } } } } return }() fmt.Println("x:", x) } `) }
explode_data.jsonl/73653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 852, 1092, 30782, 2645, 20358, 4449, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1118, 1669, 508, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 13, 16, 11, 220, 20, 11, 220, 21, 921, 87, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRollupBasic(t *testing.T) { defer leaktest.AfterTest(t)() tm := newTestModelRunner(t) tm.Start() defer tm.Stop() series1a := tsd("test.metric", "a") series1b := tsd("test.metric", "b") series2 := tsd("test.othermetric", "a") for i := 0; i < 500; i++ { series1a.Datapoints = append(series1a.Datapoints, tsdp(time.Duration(i), float64(i))) series1b.Datapoints = append(series1b.Datapoints, tsdp(time.Duration(i), float64(i))) series2.Datapoints = append(series2.Datapoints, tsdp(time.Duration(i), float64(i))) } tm.storeTimeSeriesData(resolution1ns, []tspb.TimeSeriesData{series1a, series1b, series2}) tm.assertKeyCount(150) tm.assertModelCorrect() now := 250 + resolution1nsDefaultRollupThreshold.Nanoseconds() tm.rollup(now, timeSeriesResolutionInfo{ Name: "test.metric", Resolution: resolution1ns, }) tm.assertKeyCount(152) tm.assertModelCorrect() tm.prune(now, timeSeriesResolutionInfo{ Name: "test.metric", Resolution: resolution1ns, }) tm.assertKeyCount(102) tm.assertModelCorrect() // Specialty test - rollup only the real series, not the model, and ensure // that the query remains the same. This ensures that the same result is // returned from rolled-up data as is returned from data downsampled during // a query. memOpts := QueryMemoryOptions{ // Large budget, but not maximum to avoid overflows. BudgetBytes: math.MaxInt64, EstimatedSources: 1, // Not needed for rollups InterpolationLimitNanos: 0, Columnar: tm.DB.WriteColumnar(), } if err := tm.DB.rollupTimeSeries( context.Background(), []timeSeriesResolutionInfo{ { Name: "test.othermetric", Resolution: resolution1ns, }, }, hlc.Timestamp{ WallTime: 500 + resolution1nsDefaultRollupThreshold.Nanoseconds(), Logical: 0, }, MakeQueryMemoryContext(tm.workerMemMonitor, tm.resultMemMonitor, memOpts), ); err != nil { t.Fatal(err) } if err := tm.DB.pruneTimeSeries( context.Background(), tm.DB.db, []timeSeriesResolutionInfo{ { Name: "test.othermetric", Resolution: resolution1ns, }, }, hlc.Timestamp{ WallTime: 500 + resolution1nsDefaultRollupThreshold.Nanoseconds(), Logical: 0, }, ); err != nil { t.Fatal(err) } { query := tm.makeQuery("test.othermetric", resolution1ns, 0, 500) query.SampleDurationNanos = 50 query.assertSuccess(10, 1) } }
explode_data.jsonl/71876
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 988 }
[ 2830, 3393, 32355, 454, 15944, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 3244, 76, 1669, 501, 2271, 1712, 19486, 1155, 340, 3244, 76, 12101, 741, 16867, 17333, 30213, 2822, 197, 19880, 16, 64, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPlaceAutocompleteMissingInput(t *testing.T) { c, _ := NewClient(WithAPIKey(apiKey)) r := &PlaceAutocompleteRequest{} _, err := c.PlaceAutocomplete(context.Background(), r) if err == nil { t.Errorf("Error expected: maps: Input missing") } if "maps: Input missing" != err.Error() { t.Errorf("Wrong error returned \"%v\"", err) } }
explode_data.jsonl/76301
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 17371, 19602, 20104, 25080, 2505, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 1532, 2959, 7, 2354, 7082, 1592, 24827, 1592, 1171, 7000, 1669, 609, 17371, 19602, 20104, 1900, 31483, 197, 6878, 1848, 1669, 272, 86675, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRedeemYobicode(t *testing.T) { t.Parallel() _, err := y.RedeemCoupon("bla2") if err == nil { t.Error("Test Failed - RedeemYobicode() error", err) } }
explode_data.jsonl/79873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 6033, 68, 336, 56, 674, 13634, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 379, 2013, 15326, 336, 88052, 445, 64726, 17, 1138, 743, 1848, 621, 2092, 341, 197, 3244, 6141, 445, 2271, 21379, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestInvokeNOKInvokesEmptyCCName(t *testing.T) { t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() testInvokeNOKInvokesEmptyCCName(t, l, v) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() testInvokeNOKInvokesEmptyCCName(t, l, v) }) }
explode_data.jsonl/47810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 17604, 45, 3925, 15174, 7940, 3522, 3706, 675, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 13, 17, 63746, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 11, 348, 11, 21290, 1669, 6505, 60850, 1389, 3036, 142...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClosing(t *testing.T) { ctx := context.Background() lastID.Set(0) count.Set(0) p := NewResourcePool(PoolFactory, 5, 5, time.Second) var resources [10]Resource for i := 0; i < 5; i++ { r, err := p.Get(ctx) if err != nil { t.Errorf("Get failed: %v", err) } resources[i] = r } ch := make(chan bool) go func() { p.Close() ch <- true }() // Wait for goroutine to call Close time.Sleep(10 * time.Millisecond) stats := p.StatsJSON() expected := `{"Capacity": 0, "Available": 0, "Active": 5, "InUse": 5, "MaxCapacity": 5, "WaitCount": 0, "WaitTime": 0, "IdleTimeout": 1000000000, "IdleClosed": 0}` if stats != expected { t.Errorf(`expecting '%s', received '%s'`, expected, stats) } // Put is allowed when closing for i := 0; i < 5; i++ { p.Put(resources[i]) } // Wait for Close to return <-ch // SetCapacity must be ignored after Close err := p.SetCapacity(1) if err == nil { t.Errorf("expecting error") } stats = p.StatsJSON() expected = `{"Capacity": 0, "Available": 0, "Active": 0, "InUse": 0, "MaxCapacity": 5, "WaitCount": 0, "WaitTime": 0, "IdleTimeout": 1000000000, "IdleClosed": 0}` if stats != expected { t.Errorf(`expecting '%s', received '%s'`, expected, stats) } if lastID.Get() != 5 { t.Errorf("Expecting 5, received %d", count.Get()) } if count.Get() != 0 { t.Errorf("Expecting 0, received %d", count.Get()) } }
explode_data.jsonl/660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 565 }
[ 2830, 3393, 36294, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 33096, 915, 4202, 7, 15, 340, 18032, 4202, 7, 15, 340, 3223, 1669, 1532, 4783, 10551, 5304, 1749, 4153, 11, 220, 20, 11, 220, 20, 11, 882, 32435, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTwoDevicePing(t *testing.T) { goroutineLeakCheck(t) pair := genTestPair(t) t.Run("ping 1.0.0.1", func(t *testing.T) { pair.Send(t, Ping, nil) }) t.Run("ping 1.0.0.2", func(t *testing.T) { pair.Send(t, Pong, nil) }) }
explode_data.jsonl/53965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 11613, 6985, 69883, 1155, 353, 8840, 836, 8, 341, 3174, 269, 14159, 2304, 585, 3973, 1155, 340, 3223, 1310, 1669, 4081, 2271, 12443, 1155, 340, 3244, 16708, 445, 9989, 220, 16, 13, 15, 13, 15, 13, 16, 497, 2915, 1155, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseResourceID(t *testing.T) { testData := []struct { Environment azure.Environment Input string }{ { Environment: azure.ChinaCloud, Input: "https://account1.blob.core.chinacloudapi.cn/container1", }, { Environment: azure.GermanCloud, Input: "https://account1.blob.core.cloudapi.de/container1", }, { Environment: azure.PublicCloud, Input: "https://account1.blob.core.windows.net/container1", }, { Environment: azure.USGovernmentCloud, Input: "https://account1.blob.core.usgovcloudapi.net/container1", }, } for _, v := range testData { t.Logf("[DEBUG] Testing Environment %q", v.Environment.Name) actual, err := ParseResourceID(v.Input) if err != nil { t.Fatal(err) } if actual.AccountName != "account1" { t.Fatalf("Expected the account name to be `account1` but got %q", actual.AccountName) } if actual.ContainerName != "container1" { t.Fatalf("Expected the container name to be `container1` but got %q", actual.ContainerName) } } }
explode_data.jsonl/25312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 14463, 4783, 915, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 1235, 341, 197, 197, 12723, 76530, 45651, 198, 197, 66588, 981, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 12723, 25, 76530, 6353, 2210, 16055, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateVmScaleSetsWithCustomTags(t *testing.T) { testTags := map[string]*string{ "orchestrator": to.StringPtr("k8s"), "aksEngineVersion": to.StringPtr("1.15"), "poolName": to.StringPtr("TestPool"), } testVirtualMachineScaleSet := compute.VirtualMachineScaleSet{ Tags: testTags, } testTagsToAdd := map[string]string{ "myTestKey1": "myTestValue1", "myTestKey2": "myTestValue2", "poolName": "myName", } addCustomTagsToVMScaleSets(testTagsToAdd, &testVirtualMachineScaleSet) expectedTags := map[string]*string{ "orchestrator": to.StringPtr("k8s"), "aksEngineVersion": to.StringPtr("1.15"), "poolName": to.StringPtr("TestPool"), "myTestKey1": to.StringPtr("myTestValue1"), "myTestKey2": to.StringPtr("myTestValue2"), } diff := cmp.Diff(testVirtualMachineScaleSet.Tags, expectedTags) if diff != "" { t.Errorf("unexpected diff while expecting equal structs: %s", diff) } }
explode_data.jsonl/49328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 4021, 88124, 6947, 30175, 2354, 10268, 15930, 1155, 353, 8840, 836, 8, 341, 18185, 15930, 1669, 2415, 14032, 8465, 917, 515, 197, 197, 1, 21584, 15111, 850, 788, 257, 311, 6431, 5348, 445, 74, 23, 82, 4461, 197, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSimplifier(t *testing.T) { orig, simpl_5f, simpl_3t := GetTestData() simpl_5f_test := Simplify(orig, 5, false) simpl_3t_test := Simplify(orig, 3, true) if !CompareSlices(simpl_5f_test, simpl_5f) { t.Fatalf(" 5 false | Something went wrong") } if !CompareSlices(simpl_3t_test, simpl_3t) { t.Fatalf("3 true | Something went wrong") } }
explode_data.jsonl/71966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 50, 6383, 3049, 1155, 353, 8840, 836, 8, 341, 197, 4670, 11, 15491, 62, 20, 69, 11, 15491, 62, 18, 83, 1669, 2126, 83920, 2822, 1903, 6383, 62, 20, 69, 4452, 1669, 61242, 1437, 54837, 11, 220, 20, 11, 895, 340, 1903, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHttpStatusCodeError(t *testing.T) { cases := []struct { name string err error expectedOutput string }{ { name: "async required error", err: HTTPStatusCodeError{ StatusCode: http.StatusUnprocessableEntity, ErrorMessage: strPtr(AsyncErrorMessage), Description: strPtr(AsyncErrorDescription), }, expectedOutput: "Status: 422; ErrorMessage: AsyncRequired; Description: This service plan requires client support for asynchronous service operations.; ResponseError: <nil>", }, { name: "app guid required error", err: HTTPStatusCodeError{ StatusCode: http.StatusUnprocessableEntity, ErrorMessage: strPtr(AppGUIDRequiredErrorMessage), Description: strPtr(AppGUIDRequiredErrorDescription), }, expectedOutput: "Status: 422; ErrorMessage: RequiresApp; Description: This service supports generation of credentials through binding an application only.; ResponseError: <nil>", }, { name: "blank error", err: HTTPStatusCodeError{}, expectedOutput: "Status: 0; ErrorMessage: <nil>; Description: <nil>; ResponseError: <nil>", }, } for _, tc := range cases { if e, a := tc.expectedOutput, tc.err.Error(); e != a { t.Errorf("%v: expected %v, got %v", tc.name, e, a) } } }
explode_data.jsonl/53562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 469 }
[ 2830, 3393, 2905, 15872, 1454, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 9859, 310, 1465, 198, 197, 42400, 5097, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 7692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSubset_NestedSlice(t *testing.T) { super := map[string]interface{}{ "a": "1", "b": "2", "c": "3", "d": []interface{}{ "11", "22", }, } sub := map[string]interface{}{ "c": "3", "d": []interface{}{ "11", }, } if !testutil.ContainSubset(super, sub) { t.Fatalf("expected map to be subset of super, got false") } }
explode_data.jsonl/79704
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 70584, 1604, 9980, 33236, 1155, 353, 8840, 836, 8, 1476, 12468, 1669, 2415, 14032, 31344, 67066, 197, 197, 56693, 788, 330, 16, 756, 197, 197, 1, 65, 788, 330, 17, 756, 197, 197, 96946, 788, 330, 18, 756, 197, 197, 44917...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetTime(t *testing.T) { // now time now := time.Now() fmt.Println(now) // unix timestamp fmt.Println(now.Unix()) // nano time fmt.Println(now.UnixNano()) // decimal part fmt.Println(now.Nanosecond()) }
explode_data.jsonl/4698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 1949, 1462, 1155, 353, 8840, 836, 8, 341, 197, 322, 1431, 882, 198, 80922, 1669, 882, 13244, 741, 11009, 12419, 32263, 340, 197, 322, 51866, 11441, 198, 11009, 12419, 32263, 10616, 941, 2398, 197, 322, 50493, 882, 198, 11009...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPathEnclosingInterval_Paths(t *testing.T) { // For these tests, we check only the path of the enclosing // node, but not its complete text because it's often quite // large when !exact. tests := []struct { substr string // first occurrence of this string indicates interval path string // the pathToString(),exact of the expected path }{ {"// add", "[BlockStmt FuncDecl File],false"}, {"(x + y", "[ParenExpr AssignStmt BlockStmt FuncDecl File],false"}, {"x +", "[BinaryExpr ParenExpr AssignStmt BlockStmt FuncDecl File],false"}, {"z := (x", "[AssignStmt BlockStmt FuncDecl File],false"}, {"func f", "[FuncDecl File],false"}, {"func f()", "[FuncDecl File],false"}, {" f()", "[FuncDecl File],false"}, {"() {}", "[FuncDecl File],false"}, {"// Hello", "[File],false"}, {" f", "[Ident FuncDecl File],true"}, {"func ", "[FuncDecl File],true"}, {"mai", "[Ident File],true"}, {"f() // NB", "[CallExpr ExprStmt BlockStmt FuncDecl File],true"}, } for _, test := range tests { f, start, end := findInterval(t, new(token.FileSet), input, test.substr) if f == nil { continue } path, exact := importer.PathEnclosingInterval(f, start, end) if got := fmt.Sprintf("%s,%v", pathToString(path), exact); got != test.path { t.Errorf("PathEnclosingInterval(%q): got %q, want %q", test.substr, got, test.path) continue } } }
explode_data.jsonl/40770
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 566 }
[ 2830, 3393, 1820, 7408, 17831, 10256, 1088, 26286, 1155, 353, 8840, 836, 8, 341, 197, 322, 1752, 1493, 7032, 11, 582, 1779, 1172, 279, 1815, 315, 279, 84928, 198, 197, 322, 2436, 11, 714, 537, 1181, 4583, 1467, 1576, 432, 594, 3545, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGlob(t *testing.T) { sc, c, cleanup := createContexts(t) defer cleanup() root, mts, _, stopper := createNamespace(t, sc) runNestedMountTables(t, sc, mts) defer stopper() ns := v23.GetNamespace(c) ns.SetRoots(root.name) tln := []string{"baz", "mt1", "mt2", "mt3", "mt4", "mt5", "joke1", "joke2", "joke3"} barbaz := []string{"mt4/foo/bar", "mt4/foo/baz"} level12 := []string{"joke1/level1", "joke1/level1/level2", "joke2/level1", "joke2/level1/level2", "joke3/level1", "joke3/level1/level2"} foo := append([]string{"mt4/foo"}, barbaz...) foo = append(foo, level12...) // Try various globs. globTests := []struct { pattern string expected []string }{ {"*", tln}, {"x", []string{}}, {"m*", []string{"mt1", "mt2", "mt3", "mt4", "mt5"}}, {"mt[2,3]", []string{"mt2", "mt3"}}, {"*z", []string{"baz"}}, {"joke1/*", []string{"joke1/level1"}}, {"j?ke1/level1/*", []string{"joke1/level1/level2"}}, {"joke1/level1/*", []string{"joke1/level1/level2"}}, {"joke1/level1/level2/...", []string{"joke1/level1/level2"}}, {"...", append(append(tln, foo...), "")}, {"*/...", append(tln, foo...)}, {"*/foo/*", barbaz}, {"*/*/*z", []string{"mt4/foo/baz"}}, {"*/f??/*z", []string{"mt4/foo/baz"}}, {"mt4/foo/baz", []string{"mt4/foo/baz"}}, } for _, test := range globTests { out := doGlob(t, c, ns, test.pattern, 0) compare(t, "Glob", test.pattern, out, test.expected) // Do the same with a full rooted name. out = doGlob(t, c, ns, naming.JoinAddressName(root.name, test.pattern), 0) var expectedWithRoot []string for _, s := range test.expected { expectedWithRoot = append(expectedWithRoot, naming.JoinAddressName(root.name, s)) } compare(t, "Glob", test.pattern, out, expectedWithRoot) } }
explode_data.jsonl/8228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 778 }
[ 2830, 3393, 38, 1684, 1155, 353, 8840, 836, 8, 341, 29928, 11, 272, 11, 21290, 1669, 83674, 82, 1155, 340, 16867, 21290, 2822, 33698, 11, 296, 2576, 11, 8358, 2936, 712, 1669, 1855, 22699, 1155, 11, 1136, 340, 56742, 71986, 16284, 216...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateProjectUnknownCommand(t *testing.T) { testInvalidInputHelper("update project sockshop someUnknownCommand --git-user=GIT_USER --git-token=GIT_TOKEN --git-remote-url=GIT_REMOTE_URL", "too many arguments set", t) }
explode_data.jsonl/53460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 4289, 7849, 13790, 4062, 1155, 353, 8840, 836, 8, 341, 18185, 7928, 2505, 5511, 445, 2386, 2390, 11087, 8675, 1045, 13790, 4062, 1177, 12882, 8694, 28, 90559, 9107, 1177, 12882, 34841, 28, 90559, 18681, 1177, 12882, 12, 18147,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSerialize(t *testing.T) { file, err := os.OpenFile("tmp.txt", os.O_RDWR|os.O_CREATE, 0666) assert.Nil(t, err) err = testTxout.Serialize(file) assert.Nil(t, err) _, err = file.Seek(0, 0) assert.Nil(t, err) txOutRead := &TxOut{} txOutRead.value = 0 txOutRead.scriptPubKey = script.NewEmptyScript() err = txOutRead.Unserialize(file) assert.Nil(t, err) assert.Equal(t, testTxout.value, txOutRead.value) assert.Equal(t, testTxout.GetScriptPubKey().GetData(), txOutRead.GetScriptPubKey().GetData()) assert.Equal(t, uint32(30), testTxout.SerializeSize()) err = os.Remove("tmp.txt") assert.Nil(t, err) }
explode_data.jsonl/38864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 15680, 1155, 353, 8840, 836, 8, 341, 17661, 11, 1848, 1669, 2643, 12953, 1703, 445, 5173, 3909, 497, 2643, 8382, 66266, 91, 436, 8382, 25823, 11, 220, 15, 21, 21, 21, 340, 6948, 59678, 1155, 11, 1848, 692, 9859, 284, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertIntoPath(t *testing.T) { t.Parallel() tests := []struct { givePath string giveAfter string giveInsert string want string }{ { givePath: "", giveAfter: "", giveInsert: "", want: "", }, { givePath: "foo", giveAfter: "fo", giveInsert: "b", want: "fo/b/o", }, { givePath: "foo/bar", giveAfter: "fo", giveInsert: "b", want: "fo/b/o/bar", }, { givePath: "foo/bar", giveAfter: "foo", giveInsert: "baz", want: "foo/baz/bar", }, { givePath: "foo/bar/", giveAfter: "foo/", giveInsert: "baz/", want: "foo/baz/bar/", }, { givePath: "1/2/3/4/5/6", giveAfter: "1/2/3", giveInsert: "foo", want: "1/2/3/foo/4/5/6", }, } for _, tt := range tests { tt := tt t.Run(tt.want, func(t *testing.T) { t.Parallel() assert.Equal(t, tt.want, InsertIntoPath(tt.givePath, tt.giveAfter, tt.giveInsert)) }) } }
explode_data.jsonl/81335
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 550 }
[ 2830, 3393, 13780, 26591, 1820, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 3174, 533, 1820, 256, 914, 198, 197, 3174, 533, 6025, 220, 914, 198, 197, 3174, 533, 13780, 914, 198, 197, 50780...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunTask(t *testing.T) { SetupWithMock(t) // valid collect task... should succeed ct := &collectHTTPTask{ TaskMeta: TaskMeta{ Task: StringPointer(CollectHTTPTaskName), }, With: collectHTTPInputs{ Duration: StringPointer("1s"), Headers: map[string]string{}, URL: "https://httpbin.org/get", }, } // valid assess task... should succeed at := &assessTask{ TaskMeta: TaskMeta{ Task: StringPointer(AssessTaskName), }, With: assessInputs{ SLOs: []SLO{{ Metric: httpMetricPrefix + "/" + builtInHTTPErrorCountId, UpperLimit: float64Pointer(0), }}, }, } exp := &Experiment{ Tasks: []Task{ct, at}, Result: &ExperimentResult{}, } exp.initResults() err := ct.run(exp) assert.NoError(t, err) assert.Equal(t, exp.Result.Insights.NumVersions, 1) // SLOs should be satisfied by app for i := 0; i < len(exp.Result.Insights.SLOs); i++ { // i^th SLO assert.True(t, exp.Result.Insights.SLOsSatisfied[i][0]) // satisfied by only version } }
explode_data.jsonl/6855
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 6727, 6262, 1155, 353, 8840, 836, 8, 341, 197, 21821, 2354, 11571, 1155, 692, 197, 322, 2697, 6530, 3383, 1112, 1265, 11996, 198, 89216, 1669, 609, 17384, 2545, 51, 2828, 1073, 515, 197, 81153, 12175, 25, 5430, 12175, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRateLimitRealImgur(t *testing.T) { key := os.Getenv("IMGURCLIENTID") if key == "" { t.Skip("IMGURCLIENTID environment variable not set.") } client := createClient(new(http.Client), key, "") rl, err := client.GetRateLimit() if err != nil { t.Errorf("GetRateLimit() failed with error: %v", err) t.FailNow() } if rl.ClientLimit != 12500 || rl.UserLimit != 500 { client.Log.Debugf("Found ClientLimit: %v and UserLimit: %v", rl.ClientLimit, rl.UserLimit) t.Error("Client/User limits are wrong. Probably something broken. Or IMGUR changed their limits. Or you are not using a free account for testing. Sorry. No real good way to test this.") } }
explode_data.jsonl/4809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 11564, 16527, 12768, 13033, 324, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 2643, 64883, 445, 30346, 1511, 45271, 915, 1138, 743, 1376, 621, 1591, 341, 197, 3244, 57776, 445, 30346, 1511, 45271, 915, 4573, 3890, 537, 738, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDerivePathFromSeedAeAccount(t *testing.T) { type args struct { accountIndex uint32 addressIndex uint32 } tests := []struct { name string seed string args args wantAddress string wantErr bool }{ { name: "Standard Seed, m/44'/457'/0'/0'/0'", seed: "60812c7c93d6f9cb346bbcf799957b6ec776aea84b01bdd9f9b7916522cc52c6ea5d07960b68668cd37b0a77f6c4fe283f146bd916153c426df126a8b8707b39", args: args{ accountIndex: 0, addressIndex: 0, }, wantAddress: "ak_2Z74Jhbo3xqF47k2h6NoUpr5gTfc9EQFX7wPH2Vf7Q5PCVcZSW", wantErr: false, }, { name: "Standard Seed, m/44'/457'/0'/0'/3'", seed: "60812c7c93d6f9cb346bbcf799957b6ec776aea84b01bdd9f9b7916522cc52c6ea5d07960b68668cd37b0a77f6c4fe283f146bd916153c426df126a8b8707b39", args: args{ accountIndex: 0, addressIndex: 3, }, wantAddress: "ak_2wPpjbxDhdn8PURqLPsunqBTWYSbe9iac1gjfJcQVzY4aZYEzq", wantErr: false, }, } for _, tt := range tests { seedBytes, err := hex.DecodeString(tt.seed) if err != nil { t.Fatal(err) } key, err := DerivePathFromSeed(seedBytes, tt.args.accountIndex, tt.args.addressIndex) if err != nil { t.Fatal(err) } acc, err := BIP32KeyToAeKey(key) if err != nil { t.Fatal(err) } if acc.Address != tt.wantAddress { t.Errorf("%s should have been %s but got %s", tt.seed, tt.wantAddress, acc.Address) } } }
explode_data.jsonl/25832
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 738 }
[ 2830, 3393, 22171, 533, 1820, 3830, 41471, 32, 68, 7365, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 86866, 1552, 2622, 18, 17, 198, 197, 63202, 1552, 2622, 18, 17, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestProjectResourceHandler_UpdateProjectResource(t *testing.T) { type fields struct { ProjectResourceManager *handler_mock.IResourceManagerMock } tests := []struct { name string fields fields request *http.Request wantParams *models.UpdateResourceParams wantStatus int }{ { name: "update resource successful", fields: fields{ ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) { return &models.WriteResourceResponse{CommitID: "my-commit-id"}, nil }}, }, request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))), wantParams: &models.UpdateResourceParams{ ResourceContext: models.ResourceContext{ Project: models.Project{ProjectName: "my-project"}, }, ResourceURI: "resource.yaml", UpdateResourcePayload: models.UpdateResourcePayload{ResourceContent: "c3RyaW5n"}, }, wantStatus: http.StatusOK, }, { name: "resource content not base64 encoded", fields: fields{ ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) { return nil, errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceWithoutBase64EncodingTestPayload))), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "resourceUri contains invalid string", fields: fields{ ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) { return nil, errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/..resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))), wantParams: nil, wantStatus: http.StatusBadRequest, }, { name: "internal error", fields: fields{ ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(project models.UpdateResourceParams) (*models.WriteResourceResponse, error) { return nil, errors.New("oops") }}, }, request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))), wantParams: &models.UpdateResourceParams{ ResourceContext: models.ResourceContext{ Project: models.Project{ProjectName: "my-project"}, }, ResourceURI: "resource.yaml", UpdateResourcePayload: models.UpdateResourcePayload{ResourceContent: "c3RyaW5n"}, }, wantStatus: http.StatusInternalServerError, }, { name: "invalid payload", fields: fields{ ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(project models.UpdateResourceParams) (*models.WriteResourceResponse, error) { return nil, errors.New("should not have been called") }}, }, request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte("invalid"))), wantParams: nil, wantStatus: http.StatusBadRequest, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager) router := gin.Default() router.PUT("/project/:projectName/resource/:resourceURI", ph.UpdateProjectResource) resp := performRequest(router, tt.request) if tt.wantParams != nil { require.Len(t, tt.fields.ProjectResourceManager.UpdateResourceCalls(), 1) require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.UpdateResourceCalls()[0].Params) } else { require.Empty(t, tt.fields.ProjectResourceManager.UpdateResourceCalls()) } require.Equal(t, tt.wantStatus, resp.Code) }) } }
explode_data.jsonl/7381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1461 }
[ 2830, 3393, 7849, 4783, 3050, 47393, 7849, 4783, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 7849, 32498, 353, 17905, 34134, 2447, 32498, 11571, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCloseLeavesGroup(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() topic := makeTopic() createTopic(t, topic, 1) groupID := makeGroupID() r := NewReader(ReaderConfig{ Brokers: []string{"localhost:9092"}, Topic: topic, GroupID: groupID, MinBytes: 1, MaxBytes: 10e6, MaxWait: 100 * time.Millisecond, RebalanceTimeout: time.Second, }) prepareReader(t, ctx, r, Message{Value: []byte("test")}) conn, err := NewDialer().Dial("tcp", r.config.Brokers[0]) if err != nil { t.Fatalf("error dialing: %v", err) } defer conn.Close() descGroups := func() describeGroupsResponseV0 { resp, err := conn.describeGroups(describeGroupsRequestV0{ GroupIDs: []string{groupID}, }) if err != nil { t.Fatalf("error from describeGroups %v", err) } return resp } _, err = r.ReadMessage(ctx) if err != nil { t.Fatalf("our reader never joind its group or couldn't read a message: %v", err) } resp := descGroups() if len(resp.Groups) != 1 { t.Fatalf("expected 1 group. got: %d", len(resp.Groups)) } if len(resp.Groups[0].Members) != 1 { t.Fatalf("expected group membership size of %d, but got %d", 1, len(resp.Groups[0].Members)) } err = r.Close() if err != nil { t.Fatalf("unexpected error closing reader: %s", err.Error()) } resp = descGroups() if len(resp.Groups) != 1 { t.Fatalf("expected 1 group. got: %d", len(resp.Groups)) } if len(resp.Groups[0].Members) != 0 { t.Fatalf("expected group membership size of %d, but got %d", 0, len(resp.Groups[0].Members)) } }
explode_data.jsonl/80368
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 679 }
[ 2830, 3393, 7925, 2304, 4693, 2808, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 220, 18, 15, 77053, 32435, 340, 16867, 9121, 741, 3244, 24810, 1669, 1281, 26406, 741, 39263, 26406, 1155, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAPIDeleteGitHook(t *testing.T) { defer prepareTestEnv(t)() repo := models.AssertExistsAndLoadBean(t, &models.Repository{ID: 37}).(*models.Repository) owner := models.AssertExistsAndLoadBean(t, &models.User{ID: repo.OwnerID}).(*models.User) // user1 is an admin user session := loginUser(t, "user1") token := getTokenForLoggedInUser(t, session) req := NewRequestf(t, "DELETE", "/api/v1/repos/%s/%s/hooks/git/pre-receive?token=%s", owner.Name, repo.Name, token) MakeRequest(t, req, http.StatusNoContent) req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/hooks/git/pre-receive?token=%s", owner.Name, repo.Name, token) resp := MakeRequest(t, req, http.StatusOK) var apiGitHook2 *api.GitHook DecodeJSON(t, resp, &apiGitHook2) assert.False(t, apiGitHook2.IsActive) assert.Empty(t, apiGitHook2.Content) }
explode_data.jsonl/32986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 2537, 915, 68, 1617, 46562, 31679, 1155, 353, 8840, 836, 8, 341, 16867, 10549, 2271, 14359, 1155, 8, 2822, 17200, 5368, 1669, 4119, 11711, 15575, 3036, 5879, 10437, 1155, 11, 609, 6507, 25170, 90, 915, 25, 220, 18, 22, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColumnPageIndex(t *testing.T) { for _, config := range [...]struct { name string test func(*testing.T, rows) bool }{ { name: "buffer", test: testColumnPageIndexWithBuffer, }, { name: "file", test: testColumnPageIndexWithFile, }, } { t.Run(config.name, func(t *testing.T) { for _, test := range [...]struct { scenario string function func(*testing.T) interface{} }{ { scenario: "boolean", function: func(t *testing.T) interface{} { return func(rows []struct{ Value bool }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "int32", function: func(t *testing.T) interface{} { return func(rows []struct{ Value int32 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "int64", function: func(t *testing.T) interface{} { return func(rows []struct{ Value int64 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "int96", function: func(t *testing.T) interface{} { return func(rows []struct{ Value deprecated.Int96 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "uint32", function: func(t *testing.T) interface{} { return func(rows []struct{ Value uint32 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "uint64", function: func(t *testing.T) interface{} { return func(rows []struct{ Value uint64 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "float32", function: func(t *testing.T) interface{} { return func(rows []struct{ Value float32 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "float64", function: func(t *testing.T) interface{} { return func(rows []struct{ Value float64 }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "string", function: func(t *testing.T) interface{} { return func(rows []struct{ Value string }) bool { return config.test(t, makeRows(rows)) } }, }, { scenario: "uuid", function: func(t *testing.T) interface{} { return func(rows []struct{ Value uuid.UUID }) bool { return config.test(t, makeRows(rows)) } }, }, } { t.Run(test.scenario, func(t *testing.T) { if err := quick.Check(test.function(t), nil); err != nil { t.Error(err) } }) } }) } }
explode_data.jsonl/5821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1126 }
[ 2830, 3393, 2933, 92938, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2193, 1669, 2088, 48179, 1235, 341, 197, 11609, 914, 198, 197, 18185, 2915, 4071, 8840, 836, 11, 6978, 8, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransactionFetcherBroadcasts(t *testing.T) { testTransactionFetcherParallel(t, txFetcherTest{ init: func() *TxFetcher { return NewTxFetcher( func(common.Hash) bool { return false }, func(txs []*types.Transaction) []error { return make([]error, len(txs)) }, func(string, []common.Hash) error { return nil }, ) }, steps: []interface{}{ // Set up three transactions to be in different stats, waiting, queued and fetching doTxNotify{peer: "A", hashes: []common.Hash{testTxsHashes[0]}}, doWait{time: txArriveTimeout, step: true}, doTxNotify{peer: "A", hashes: []common.Hash{testTxsHashes[1]}}, doWait{time: txArriveTimeout, step: true}, doTxNotify{peer: "A", hashes: []common.Hash{testTxsHashes[2]}}, isWaiting(map[string][]common.Hash{ "A": {testTxsHashes[2]}, }), isScheduled{ tracking: map[string][]common.Hash{ "A": {testTxsHashes[0], testTxsHashes[1]}, }, fetching: map[string][]common.Hash{ "A": {testTxsHashes[0]}, }, }, // Broadcast all the transactions and ensure everything gets cleaned // up, but the dangling request is left alone to avoid doing multiple // concurrent requests. doTxEnqueue{peer: "A", txs: []*types.Transaction{testTxs[0], testTxs[1], testTxs[2]}, direct: false}, isWaiting(nil), isScheduled{ tracking: nil, fetching: nil, dangling: map[string][]common.Hash{ "A": {testTxsHashes[0]}, }, }, // Deliver the requested hashes doTxEnqueue{peer: "A", txs: []*types.Transaction{testTxs[0], testTxs[1], testTxs[2]}, direct: true}, isScheduled{nil, nil, nil}, }, }) }
explode_data.jsonl/52217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 698 }
[ 2830, 3393, 8070, 97492, 43362, 82, 1155, 353, 8840, 836, 8, 341, 18185, 8070, 97492, 16547, 1155, 11, 259, 9770, 2995, 261, 2271, 515, 197, 28248, 25, 2915, 368, 353, 51, 9770, 2995, 261, 341, 298, 853, 1532, 51, 9770, 2995, 261, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmptyErrorListAsErrorIsNil(t *testing.T) { errs := errors.L() err := errs.AsError() if err != nil { t.Fatalf("got error %v but want nil", err) } }
explode_data.jsonl/82009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 3522, 1454, 852, 2121, 1454, 3872, 19064, 1155, 353, 8840, 836, 8, 341, 9859, 82, 1669, 5975, 1214, 741, 9859, 1669, 70817, 20242, 1454, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 22390, 1465, 1018, 85, 714, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNofollowLink(t *testing.T) { var tests = []string{ "[foo](http://bar.com/foo/)\n", "<p><a href=\"http://bar.com/foo/\" rel=\"nofollow\">foo</a></p>\n", "[foo](/bar/)\n", "<p><a href=\"/bar/\">foo</a></p>\n", } doTestsInlineParam(t, tests, 0, HTML_SAFELINK|HTML_NOFOLLOW_LINKS, HtmlRendererParameters{}) }
explode_data.jsonl/57354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 45, 1055, 1544, 3939, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 917, 515, 197, 197, 36864, 7975, 9533, 1254, 1110, 2257, 905, 60555, 14, 10699, 77, 756, 197, 197, 22476, 79, 1784, 64, 1801, 4070, 1254, 1110, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeFileConfig(t *testing.T) { var testFileName = "test.json" type testingFileConfig struct { Locale string `json:"locale"` TranslationPath string `json:"translation_path"` DefaultContext string `json:"default_context"` } var newTestingFile = testingFileConfig{ Locale: "en", TranslationPath: "translations", DefaultContext: "messages", } jsonTestData, _ := json.MarshalIndent(newTestingFile, "", "") _ = ioutil.WriteFile(testFileName, jsonTestData, 0644) defer os.Remove(testFileName) translateConfig := NewTranslationConfig(testFileName) if translateConfig.Locale != newTestingFile.Locale { t.Errorf("Locale loaded from config file is not identical with struct config") } if translateConfig.TranslationPath != newTestingFile.TranslationPath { t.Errorf("TranslationPath loaded from config file is not identical with struct config") } if translateConfig.DefaultContext != newTestingFile.DefaultContext { t.Errorf("DefaultContext loaded from config file is not identical with struct config") } }
explode_data.jsonl/81779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 8078, 1703, 2648, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 10903, 284, 330, 1944, 4323, 698, 13158, 7497, 1703, 2648, 2036, 341, 197, 197, 19231, 688, 914, 1565, 2236, 2974, 14484, 8805, 197, 197, 24412, 1820, 914, 1565, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestChangefeedColumnFamily(t *testing.T) { defer leaktest.AfterTest(t)() testFn := func(t *testing.T, db *gosql.DB, f cdctest.TestFeedFactory) { sqlDB := sqlutils.MakeSQLRunner(db) // Table with 2 column families. sqlDB.Exec(t, `CREATE TABLE foo (a INT PRIMARY KEY, b STRING, FAMILY (a), FAMILY (b))`) if strings.Contains(t.Name(), `enterprise`) { sqlDB.ExpectErr(t, `exactly 1 column family`, `CREATE CHANGEFEED FOR foo`) } else { sqlDB.ExpectErr(t, `exactly 1 column family`, `EXPERIMENTAL CHANGEFEED FOR foo`) } // Table with a second column family added after the changefeed starts. sqlDB.Exec(t, `CREATE TABLE bar (a INT PRIMARY KEY, FAMILY f_a (a))`) sqlDB.Exec(t, `INSERT INTO bar VALUES (0)`) bar := feed(t, f, `CREATE CHANGEFEED FOR bar`) defer closeFeed(t, bar) assertPayloads(t, bar, []string{ `bar: [0]->{"after": {"a": 0}}`, }) sqlDB.Exec(t, `ALTER TABLE bar ADD COLUMN b STRING CREATE FAMILY f_b`) sqlDB.Exec(t, `INSERT INTO bar VALUES (1)`) if _, err := bar.Next(); !testutils.IsError(err, `exactly 1 column family`) { t.Errorf(`expected "exactly 1 column family" error got: %+v`, err) } } t.Run(`sinkless`, sinklessTest(testFn)) t.Run(`enterprise`, enterpriseTest(testFn)) }
explode_data.jsonl/7044
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 518 }
[ 2830, 3393, 1143, 524, 823, 12051, 2933, 15192, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 24911, 1669, 2915, 1155, 353, 8840, 836, 11, 2927, 353, 34073, 1470, 22537, 11, 282, 15307, 67880, 8787...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateTarStreamIncludeParentDir(t *testing.T) { tempDir, err := ioutil.TempDir("", "testtar") defer os.RemoveAll(tempDir) if err != nil { t.Fatalf("Cannot create temp directory for test: %v", err) } modificationDate := time.Date(2011, time.March, 5, 23, 30, 1, 0, time.UTC) testDirs := []dirDesc{ {"dir01", modificationDate, 0700}, {"dir01/.git", modificationDate, 0755}, {"dir01/dir02", modificationDate, 0755}, {"dir01/dir03", modificationDate, 0775}, } testFiles := []fileDesc{ {"dir01/dir02/test1.txt", modificationDate, 0700, "Test1 file content", false, ""}, {"dir01/test2.git", modificationDate, 0660, "Test2 file content", false, ""}, {"dir01/dir03/test3.txt", modificationDate, 0444, "Test3 file content", false, ""}, {"dir01/.git/hello.txt", modificationDate, 0600, "Ignore file content", true, ""}, } if err = createTestFiles(tempDir, testDirs, testFiles, []linkDesc{}); err != nil { t.Fatalf("Cannot create test files: %v", err) } th := New(fs.NewFileSystem()) tarFile, err := ioutil.TempFile("", "testtarout") if err != nil { t.Fatalf("Unable to create temporary file %v", err) } defer os.Remove(tarFile.Name()) err = th.CreateTarStream(tempDir, true, tarFile) if err != nil { t.Fatalf("Unable to create tar file %v", err) } tarFile.Close() for i := range testDirs { testDirs[i].name = filepath.ToSlash(filepath.Join(filepath.Base(tempDir), testDirs[i].name)) } for i := range testFiles { testFiles[i].name = filepath.ToSlash(filepath.Join(filepath.Base(tempDir), testFiles[i].name)) } verifyTarFile(t, tarFile.Name(), testDirs, testFiles, []linkDesc{}) }
explode_data.jsonl/40323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 620 }
[ 2830, 3393, 4021, 62733, 3027, 22283, 8387, 6184, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1944, 26737, 1138, 16867, 2643, 84427, 9758, 6184, 340, 743, 1848, 961, 2092, 341, 197, 3244, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSlaveStorage_getSlaveIds(t *testing.T) { assert := assert.New(t) slaveStorage := newSlaveStorage() assert.Equal(0, len(slaveStorage.slaves)) slaveId := "1" slaveHostname := "hn1" slaveStorage.checkAndAdd(slaveId, slaveHostname) assert.Equal(1, len(slaveStorage.getSlaveIds())) slaveId = "2" slaveHostname = "hn2" slaveStorage.checkAndAdd(slaveId, slaveHostname) assert.Equal(2, len(slaveStorage.getSlaveIds())) slaveIds := slaveStorage.getSlaveIds() slaveIdsMap := make(map[string]bool, len(slaveIds)) for _, s := range slaveIds { slaveIdsMap[s] = true } _, ok := slaveIdsMap["1"] assert.Equal(ok, true) _, ok = slaveIdsMap["2"] assert.Equal(ok, true) }
explode_data.jsonl/63771
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 280 }
[ 2830, 3393, 95960, 5793, 3062, 95960, 12701, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 78626, 523, 5793, 1669, 501, 95960, 5793, 741, 6948, 12808, 7, 15, 11, 2422, 55691, 523, 5793, 25101, 4693, 4390, 78626, 523, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestApplicationContext_AutoWireBeans(t *testing.T) { t.Run("wired error", func(t *testing.T) { c := gs.New() obj := &TestObject{} c.Object(obj) i := int(3) c.Object(&i).Name("int_ptr") i2 := int(3) c.Object(&i2).Name("int_ptr_2") err := c.Refresh() assert.Error(t, err, "\"TestObject.IntPtrByType\" wired error: found 2 beans, bean:\"\\?\" type:\"\\*int\"") }) c, ch := container() obj := &TestObject{} c.Object(obj) i := int(3) c.Object(&i).Name("int_ptr") b := TestBincoreng{1} c.Object(&b).Name("struct_ptr").Export((*fmt.Stringer)(nil)) f1 := float32(11.0) c.Object(&f1).Name("float_ptr_1") f2 := float32(12.0) c.Object(&f2).Name("float_ptr_2") err := c.Refresh() assert.Nil(t, err) p := <-ch var ff []*float32 err = p.BeanRegistry().Get(&ff, "float_ptr_2", "float_ptr_1") assert.Nil(t, err) assert.Equal(t, ff, []*float32{&f2, &f1}) fmt.Printf("%+v\n", obj) }
explode_data.jsonl/17396
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 429 }
[ 2830, 3393, 19736, 1566, 1535, 37845, 81558, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 86, 2690, 1465, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1444, 1669, 28081, 7121, 2822, 197, 22671, 1669, 609, 2271, 1190, 16094, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewPeerHandlerOnRaftPrefix(t *testing.T) { ph := newPeerHandler(zap.NewExample(), &fakeServer{cluster: &fakeCluster{}}, fakeRaftHandler, nil) srv := httptest.NewServer(ph) defer srv.Close() tests := []string{ rafthttp.RaftPrefix, rafthttp.RaftPrefix + "/hello", } for i, tt := range tests { resp, err := http.Get(srv.URL + tt) if err != nil { t.Fatalf("unexpected http.Get error: %v", err) } body, err := ioutil.ReadAll(resp.Body) if err != nil { t.Fatalf("unexpected ioutil.ReadAll error: %v", err) } if w := "test data"; string(body) != w { t.Errorf("#%d: body = %s, want %s", i, body, w) } } }
explode_data.jsonl/74234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 3564, 30888, 3050, 1925, 55535, 723, 14335, 1155, 353, 8840, 836, 8, 341, 197, 759, 1669, 501, 30888, 3050, 13174, 391, 7121, 13314, 1507, 609, 30570, 5475, 90, 18855, 25, 609, 30570, 28678, 6257, 2137, 12418, 55535, 723, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStepIntoFunction(t *testing.T) { withTestProcess("teststep", t, func(p *proc.Target, fixture protest.Fixture) { // Continue until breakpoint assertNoError(p.Continue(), t, "Continue() returned an error") // Step into function assertNoError(p.Step(), t, "Step() returned an error") // We should now be inside the function. loc, err := p.CurrentThread().Location() if err != nil { t.Fatal(err) } if loc.Fn.Name != "main.callme" { t.Fatalf("expected to be within the 'callme' function, was in %s instead", loc.Fn.Name) } if !strings.Contains(loc.File, "teststep") { t.Fatalf("debugger stopped at incorrect location: %s:%d", loc.File, loc.Line) } if loc.Line != 8 { t.Fatalf("debugger stopped at incorrect line: %d", loc.Line) } }) }
explode_data.jsonl/56244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 8304, 26591, 5152, 1155, 353, 8840, 836, 8, 341, 46948, 2271, 7423, 445, 1944, 9520, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, 8, 341, 197, 197, 322, 15003, 3080, 52745, 198, 197, 6948, 2753...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAutoFilterError(t *testing.T) { outFile := filepath.Join("test", "TestAutoFilterError%d.xlsx") f, err := prepareTestBook1() if !assert.NoError(t, err) { t.FailNow() } formats := []string{ `{"column":"B","expression":"x <= 1 and x >= blanks"}`, `{"column":"B","expression":"x -- y or x == *2*"}`, `{"column":"B","expression":"x != y or x ? *2"}`, `{"column":"B","expression":"x -- y o r x == *2"}`, `{"column":"B","expression":"x -- y"}`, `{"column":"A","expression":"x -- y"}`, } for i, format := range formats { t.Run(fmt.Sprintf("Expression%d", i+1), func(t *testing.T) { err = f.AutoFilter("Sheet3", "D4", "B1", format) if assert.Error(t, err) { assert.NoError(t, f.SaveAs(fmt.Sprintf(outFile, i+1))) } }) } }
explode_data.jsonl/36986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 13253, 5632, 1454, 1155, 353, 8840, 836, 8, 341, 13967, 1703, 1669, 26054, 22363, 445, 1944, 497, 330, 2271, 13253, 5632, 1454, 14841, 46838, 5130, 1166, 11, 1848, 1669, 10549, 2271, 7134, 16, 741, 743, 753, 2207, 35699, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScriptMatch(t *testing.T) { client := &livekit.ClientInfo{ Protocol: 6, Browser: "chrome", Sdk: 3, // android DeviceModel: "12345", } type testcase struct { name string expr string result bool err bool } cases := []testcase{ {name: "simple match", expr: `c.protocol > 5`, result: true}, {name: "invalid expr", expr: `cc.protocol > 5`, err: true}, {name: "unexist field", expr: `c.protocols > 5`, err: true}, {name: "combined condition", expr: `c.protocol > 5 && (c.sdk=="ANDROID" || c.sdk=="IOS")`, result: true}, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { match := &ScriptMatch{Expr: c.expr} m, err := match.Match(client) if c.err { require.Error(t, err) } else { require.Equal(t, c.result, m) } }) } }
explode_data.jsonl/35110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 5910, 8331, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 609, 24822, 8226, 11716, 1731, 515, 197, 197, 20689, 25, 262, 220, 21, 345, 197, 12791, 4012, 25, 257, 330, 31902, 756, 197, 7568, 7584, 25, 260, 220, 18, 11, 442, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_TransactionComplete(t *testing.T) { mct := newMemcacheTest(defaultConfig) trans := mct.genTransaction( textParseNoFail(t, "set k 0 0 5\r\nvalue\r\n"), textParseNoFail(t, "STORED\r\n"), ) assert.Equal(t, common.OK_STATUS, trans.Status) assert.Equal(t, uint64(20), trans.BytesOut) assert.Equal(t, uint64(8), trans.BytesIn) assert.Equal(t, trans, mct.transactions[0]) event := makeTransactionEvent(t, trans) assert.Equal(t, "memcache", event["type"]) assert.Equal(t, common.OK_STATUS, event["status"]) }
explode_data.jsonl/15059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 34932, 1311, 12548, 1155, 353, 8840, 836, 8, 341, 2109, 302, 1669, 501, 18816, 9360, 2271, 18978, 2648, 340, 72453, 1669, 296, 302, 22822, 8070, 1006, 197, 15425, 14463, 2753, 19524, 1155, 11, 330, 746, 595, 220, 15, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLibrato_ClientError(t *testing.T) { handler := &GrumpyHandler{ResponseCodes: []int{401}} server := httptest.NewServer(handler) defer server.Close() config := GetConfig() config.LibratoUrl, _ = url.Parse(server.URL) config.LibratoUser = "user" config.LibratoToken = "token" measurements := make(chan Measurement, 10) librato := NewLibratoOutputter(measurements, config) if librato.sendWithBackoff([]byte(`{}`)) { t.Errorf("Retry should not have succeeded due to non-server error.") } }
explode_data.jsonl/51437
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 9194, 81, 4330, 46102, 1454, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 609, 6464, 6752, 3050, 90, 2582, 20871, 25, 3056, 396, 90, 19, 15, 16, 11248, 41057, 1669, 54320, 70334, 7121, 5475, 36514, 340, 16867, 3538, 10421, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidatorsQuery(t *testing.T) { cleanup, valPubKeys, operAddrs, port := InitializeTestLCD(t, 1, []sdk.AccAddress{}, true) defer cleanup() require.Equal(t, 1, len(valPubKeys)) require.Equal(t, 1, len(operAddrs)) validators := getValidators(t, port) require.Equal(t, 1, len(validators), fmt.Sprintf("%+v", validators)) // make sure all the validators were found (order unknown because sorted by operator addr) foundVal := false if validators[0].ConsPubKey == valPubKeys[0] { foundVal = true } require.True(t, foundVal, "pk %v, operator %v", operAddrs[0], validators[0].OperatorAddress) }
explode_data.jsonl/25408
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 31748, 2859, 1155, 353, 8840, 836, 8, 341, 1444, 60639, 11, 1044, 29162, 8850, 11, 1997, 2212, 5428, 11, 2635, 1669, 9008, 2271, 64003, 1155, 11, 220, 16, 11, 3056, 51295, 77538, 4286, 22655, 830, 340, 16867, 21290, 2822, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_PostConfig_Anonymous(t *testing.T) { setup(t) defer cleanup(t) for _, c := range allClients { w := request(t, "POST", c.Endpoint, nil) assert.Equal(t, http.StatusUnauthorized, w.Code) } }
explode_data.jsonl/37309
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 66726, 2648, 1566, 6280, 89392, 1155, 353, 8840, 836, 8, 341, 84571, 1155, 340, 16867, 21290, 1155, 692, 2023, 8358, 272, 1669, 2088, 678, 47174, 341, 197, 6692, 1669, 1681, 1155, 11, 330, 2946, 497, 272, 90409, 11, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPipeline_Secret_ParseRepo_failure(t *testing.T) { // setup tests tests := []struct { secret *Secret org string repo string want error }{ { // failure with bad org secret: &Secret{ Name: "foo", Value: "bar", Key: "octocat/helloworld/foo", Engine: "native", Type: "repo", }, org: "wrongorg", repo: "helloworld", }, { // failure with bad repo secret: &Secret{ Name: "foo", Value: "bar", Key: "octocat/helloworld/foo", Engine: "native", Type: "repo", }, org: "octocat", repo: "badrepo", }, { // failure with bad key secret: &Secret{ Name: "foo", Value: "bar", Key: "octocat", Engine: "native", Type: "repo", }, org: "octocat", }, { // failure with bad engine secret: &Secret{ Name: "foo", Value: "bar", Key: "octocat", Engine: "invalid", Type: "org", }, org: "octocat", }, } // run tests for _, test := range tests { _, _, _, err := test.secret.ParseRepo(test.org, test.repo) if err == nil { t.Errorf("ParseRepo should have failed") } } }
explode_data.jsonl/51852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 581 }
[ 2830, 3393, 34656, 1098, 50856, 77337, 25243, 43618, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 7032, 198, 78216, 1669, 3056, 1235, 341, 197, 197, 20474, 353, 19773, 198, 197, 87625, 262, 914, 198, 197, 17200, 5368, 256, 914, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMsgAddLiquidity(t *testing.T) { tests := []struct { name string msg MsgAddLiquidity expectPass bool }{ {"no deposit coin", NewMsgAddLiquidity(sdk.Coin{}, amt, sdk.OneInt(), deadline, sender), false}, {"zero deposit coin", NewMsgAddLiquidity(sdk.NewCoin(denom1, sdk.ZeroInt()), amt, sdk.OneInt(), deadline, sender), false}, {"invalid withdraw amount", NewMsgAddLiquidity(input, sdk.ZeroInt(), sdk.OneInt(), deadline, sender), false}, {"deadline not initialized", NewMsgAddLiquidity(input, amt, sdk.OneInt(), emptyTime, sender), false}, {"empty sender", NewMsgAddLiquidity(input, amt, sdk.OneInt(), deadline, emptyAddr), false}, {"valid MsgAddLiquidity", NewMsgAddLiquidity(input, amt, sdk.OneInt(), deadline, sender), true}, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { err := tc.msg.ValidateBasic() if tc.expectPass { require.Nil(t, err) } else { require.NotNil(t, err) } }) } }
explode_data.jsonl/70504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 6611, 2212, 43, 5011, 18518, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 21169, 286, 24205, 2212, 43, 5011, 18518, 198, 197, 24952, 12187, 1807, 198, 197, 59403, 197, 197, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidation(t *testing.T) { Convey("Validation", t, func() { Convey("ValidateRequired()", func() { So(ValidateRequired("foo"), ShouldEqual, true) So(ValidateRequired(""), ShouldEqual, false) So(ValidateRequired(0), ShouldEqual, false) So(ValidateRequired(1), ShouldEqual, true) }) Convey("ValidateInclusionIn()", func() { So(ValidateInclusionIn("foo", []string{"foo", "bar", "baz"}), ShouldEqual, true) So(ValidateInclusionIn("bing", []string{"foo", "bar", "baz"}), ShouldEqual, false) }) Convey("ValidateMongoIdRef()", func() { connection := getConnection() defer func() { connection.Session.DB("bongotest").DropDatabase() }() // Make the doc doc := &noHookDocument{} err := connection.Collection("docs").Save(doc) So(err, ShouldEqual, nil) So(ValidateMongoIdRef(doc.Id, connection.Collection("docs")), ShouldEqual, true) So(ValidateMongoIdRef(bson.NewObjectId(), connection.Collection("docs")), ShouldEqual, false) So(ValidateMongoIdRef(bson.NewObjectId(), connection.Collection("other_collection")), ShouldEqual, false) }) }) }
explode_data.jsonl/67576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 13799, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 13799, 497, 259, 11, 2915, 368, 341, 197, 93070, 5617, 445, 17926, 8164, 50514, 2915, 368, 341, 298, 76912, 7, 17926, 8164, 445, 7975, 3975, 12260, 2993, 11, 830, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBOM(t *testing.T) { // A byte order mark should not be parsed as an identifier kind, _ := lexToken("\uFEFF.") test.AssertEqual(t, kind, TDelimDot) }
explode_data.jsonl/47978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 33, 1898, 1155, 353, 8840, 836, 8, 341, 197, 322, 362, 4922, 1973, 1868, 1265, 537, 387, 15676, 438, 458, 12816, 198, 197, 15314, 11, 716, 1669, 22429, 3323, 4921, 84, 11419, 1748, 13053, 18185, 11711, 2993, 1155, 11, 3093...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParseGrossAmountRemittanceReaderParseError(t *testing.T) { var line = "{8500}USD1234.56Z*" r := NewReader(strings.NewReader(line)) r.line = line err := r.parseGrossAmountRemittanceDocument() expected := r.parseError(fieldError("Amount", ErrNonAmount, "1234.56Z")).Error() require.EqualError(t, err, expected) _, err = r.Read() expected = r.parseError(fieldError("Amount", ErrNonAmount, "1234.56Z")).Error() require.EqualError(t, err, expected) }
explode_data.jsonl/41409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 14463, 38, 2128, 10093, 6590, 87191, 5062, 14463, 1454, 1155, 353, 8840, 836, 8, 341, 2405, 1555, 284, 13868, 23, 20, 15, 15, 92, 26749, 16, 17, 18, 19, 13, 20, 21, 57, 9, 698, 7000, 1669, 1532, 5062, 51442, 68587, 879...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTomlNoProviders(t *testing.T) { assert := assert.New(t) settings := testutil.Settings().With(&print.Settings{ ShowHeader: true, ShowInputs: true, ShowOutputs: true, ShowProviders: false, ShowRequirements: true, }).Build() expected, err := testutil.GetExpected("toml", "toml-NoProviders") assert.Nil(err) options := module.NewOptions() module, err := testutil.GetModule(options) assert.Nil(err) printer := NewTOML(settings) actual, err := printer.Print(module, settings) assert.Nil(err) assert.Equal(expected, actual) }
explode_data.jsonl/36764
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 24732, 75, 2753, 37351, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 62930, 1669, 1273, 1314, 27000, 1005, 2354, 2099, 1350, 27000, 515, 197, 197, 7812, 4047, 25, 981, 830, 345, 197, 197, 7812, 31946, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewClass(t *testing.T) { got := NewClass(1010, "1010", "Name", "Class", "Teacher", "Time") if err := got.Valid(); err != nil { t.Error(err) } }
explode_data.jsonl/14390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 3564, 1957, 1155, 353, 8840, 836, 8, 341, 3174, 354, 1669, 1532, 1957, 7, 16, 15, 16, 15, 11, 330, 16, 15, 16, 15, 497, 330, 675, 497, 330, 1957, 497, 330, 45065, 497, 330, 1462, 1138, 743, 1848, 1669, 2684, 47156, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestRemittanceOriginatorIdentificationNumberInvalid(t *testing.T) { ro := mockRemittanceOriginator() ro.IdentificationCode = PICDateBirthPlace ro.IdentificationNumber = "zz" err := ro.Validate() require.EqualError(t, err, fieldError("IdentificationNumber", ErrInvalidProperty, ro.IdentificationNumber).Error()) }
explode_data.jsonl/32947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 6590, 87191, 13298, 850, 95212, 2833, 7928, 1155, 353, 8840, 836, 8, 341, 197, 299, 1669, 7860, 6590, 87191, 13298, 850, 741, 197, 299, 6444, 306, 2404, 2078, 284, 60606, 1916, 31478, 17371, 198, 197, 299, 6444, 306, 2404, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvalWriteMapOutput(t *testing.T) { ctx := new(MockEvalContext) ctx.StateState = NewState() ctx.StateLock = new(sync.RWMutex) cases := []struct { name string cfg *ResourceConfig err bool }{ { // Eval should recognize a single map in a slice, and collapse it // into the map value "single-map", &ResourceConfig{ Config: map[string]interface{}{ "value": []map[string]interface{}{ map[string]interface{}{"a": "b"}, }, }, }, false, }, { // we can't apply a multi-valued map to a variable, so this should error "multi-map", &ResourceConfig{ Config: map[string]interface{}{ "value": []map[string]interface{}{ map[string]interface{}{"a": "b"}, map[string]interface{}{"c": "d"}, }, }, }, true, }, } for _, tc := range cases { evalNode := &EvalWriteOutput{Name: tc.name} ctx.InterpolateConfigResult = tc.cfg t.Run(tc.name, func(t *testing.T) { _, err := evalNode.Eval(ctx) if err != nil && !tc.err { t.Fatal(err) } }) } }
explode_data.jsonl/8390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 54469, 7985, 2227, 5097, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 501, 66436, 54469, 1972, 340, 20985, 18942, 1397, 284, 1532, 1397, 741, 20985, 18942, 11989, 284, 501, 97233, 2013, 15210, 9371, 692, 1444, 2264, 1669, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCloudEventInit(t *testing.T) { s := &CloudEvent{} expectedError := fmt.Errorf(cloudEventErrMsg, "Missing cloudevent url") var Tests = []struct { cloudevent config.CloudEvent err error }{ {config.CloudEvent{Url: "foo"}, nil}, {config.CloudEvent{}, expectedError}, } for _, tt := range Tests { c := &config.Config{} c.Handler.CloudEvent = tt.cloudevent if err := s.Init(c); !reflect.DeepEqual(err, tt.err) { t.Fatalf("Init(): %v", err) } } }
explode_data.jsonl/39363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 16055, 1556, 3803, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 16055, 1556, 16094, 42400, 1454, 1669, 8879, 13080, 9849, 2950, 1556, 75449, 11, 330, 25080, 1185, 283, 450, 684, 2515, 5130, 2405, 20150, 284, 3056, 1235, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStatsLogsAverageAndTotal(t *testing.T) { state := makeState() state.logResponse("A", 5) state.logResponse("A", 11) aStats := state.getStats("A") assertStats(t, aStats, float64(8), 2) }
explode_data.jsonl/67661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 16635, 51053, 26292, 3036, 7595, 1155, 353, 8840, 836, 8, 341, 24291, 1669, 1281, 1397, 741, 24291, 1665, 2582, 445, 32, 497, 220, 20, 340, 24291, 1665, 2582, 445, 32, 497, 220, 16, 16, 692, 11323, 16635, 1669, 1584, 670, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewIDFromBase58(t *testing.T) { id := gen.ID() idStr := "1" + base58.Encode(id.Bytes()) id2, err := insolar.NewIDFromBase58(idStr) require.NoError(t, err) assert.Equal(t, id, *id2) }
explode_data.jsonl/42742
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 3564, 915, 3830, 3978, 20, 23, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 4081, 9910, 741, 15710, 2580, 1669, 330, 16, 1, 488, 2331, 20, 23, 50217, 3724, 36868, 2398, 15710, 17, 11, 1848, 1669, 1640, 7417, 7121, 915, 383...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTx_CreateBucket_ErrBucketExists(t *testing.T) { db := MustOpenDB() defer db.MustClose() // Create a bucket. if err := db.Update(func(tx *bolt.Tx) error { if _, err := tx.CreateBucket([]byte("widgets")); err != nil { t.Fatal(err) } return nil }); err != nil { t.Fatal(err) } // Create the same bucket again. if err := db.Update(func(tx *bolt.Tx) error { if _, err := tx.CreateBucket([]byte("widgets")); err != bolt.ErrBucketExists { t.Fatalf("unexpected error: %s", err) } return nil }); err != nil { t.Fatal(err) } }
explode_data.jsonl/1692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 31584, 34325, 36018, 93623, 36018, 15575, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 15465, 5002, 3506, 741, 16867, 2927, 50463, 7925, 2822, 197, 322, 4230, 264, 15621, 624, 743, 1848, 1669, 2927, 16689, 18552, 27301, 353, 524...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVMCBuildImage(t *testing.T) { testForSkip(t) var ctxt = context.Background() //get the tarball for codechain tarRdr, err := getCodeChainBytesInMem() if err != nil { t.Fail() t.Logf("Error reading tar file: %s", err) return } c := make(chan struct{}) //creat a CreateImageReq obj and send it to VMCProcess go func() { defer close(c) cir := CreateImageReq{CCID: ccintf.CCID{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: &pb.ChaincodeID{Name: "simple"}}}, Reader: tarRdr} _, err := VMCProcess(ctxt, "Docker", cir) if err != nil { t.Fail() t.Logf("Error creating image: %s", err) return } }() //wait for VMController to complete. fmt.Println("VMCBuildImage-waiting for response") <-c }
explode_data.jsonl/74639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 11187, 34, 11066, 1906, 1155, 353, 8840, 836, 8, 341, 18185, 2461, 35134, 1155, 340, 2405, 59162, 284, 2266, 19047, 2822, 197, 322, 455, 279, 12183, 3959, 369, 2038, 8819, 198, 3244, 277, 49, 3612, 11, 1848, 1669, 82873, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResponse_GetFrameworksVerboseEqual(t *testing.T) { popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano())) p := NewPopulatedResponse_GetFrameworks(popr, false) data, err := github_com_gogo_protobuf_proto.Marshal(p) if err != nil { panic(err) } msg := &Response_GetFrameworks{} if err := github_com_gogo_protobuf_proto.Unmarshal(data, msg); err != nil { panic(err) } if err := p.VerboseEqual(msg); err != nil { t.Fatalf("%#v !VerboseEqual %#v, since %v", msg, p, err) } }
explode_data.jsonl/42024
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 2582, 13614, 89615, 63404, 2993, 1155, 353, 8840, 836, 8, 341, 3223, 46288, 1669, 6888, 33864, 7121, 37270, 33864, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 12145, 3223, 1669, 1532, 11598, 7757, 2582, 13614, 89615, 40148, 81,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewInt32(t *testing.T) { testCases := []struct { name string input []int32 expect []int32 }{ { name: "test Int32 New, s nothing", input: nil, expect: nil, }, { name: "test Int32 New, inputs multiple elements", input: []int32{1, 2, 3}, expect: []int32{1, 2, 3}, }, } for _, tc := range testCases { t.Logf("running scenario: %s", tc.name) actual := NewInt32(tc.input...) validateInt32(t, actual, tc.expect) } }
explode_data.jsonl/62317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 3564, 1072, 18, 17, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 22427, 220, 3056, 396, 18, 17, 198, 197, 24952, 3056, 396, 18, 17, 198, 197, 59403, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSimpleObjProvider(t *testing.T) { desc := "obj_desc" simple := Simple{Name: "simple", Desc: &desc, Age: 240, Flag: true, Add: []int{12, 34, 45}} simpleObj, simpleErr := GetObject(simple) if simpleErr != nil { t.Errorf("GetObject failed, err:%s", simpleErr.Error()) return } data, dataErr := EncodeObject(simpleObj) if dataErr != nil { t.Errorf("marshal failed, err:%s", dataErr.Error()) return } simpleInfo := &Object{} simpleInfo, simpleErr = DecodeObject(data) if simpleErr != nil { t.Errorf("unmarshal failed, err:%s", simpleErr.Error()) return } simpleVal, simpleErr := GetObjectValue(simple) if simpleErr != nil { t.Errorf("GetEntityModel failed, err:%s", simpleErr.Error()) return } data, dataErr = EncodeObjectValue(simpleVal) if dataErr != nil { t.Errorf("marshal failed, err:%s", dataErr.Error()) return } simpleVal, simpleErr = DecodeObjectValue(data) if simpleErr != nil { t.Errorf("unmarshal failed, err:%s", simpleErr.Error()) return } simpleModel, simpleErr := GetEntityModel(simpleInfo) if simpleErr != nil { t.Errorf("GetEntityModel failed, err:%s", simpleErr.Error()) return } sVal := reflect.ValueOf(simpleVal) simpleModel, simpleErr = SetModelValue(simpleModel, newValue(sVal)) if simpleErr != nil { t.Errorf("SetModelValue failed, err:%s", simpleErr.Error()) return } }
explode_data.jsonl/3459
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 531 }
[ 2830, 3393, 16374, 5261, 5179, 1155, 353, 8840, 836, 8, 341, 41653, 1669, 330, 2295, 10986, 698, 1903, 6456, 1669, 8993, 63121, 25, 330, 22944, 497, 31185, 25, 609, 8614, 11, 13081, 25, 220, 17, 19, 15, 11, 22666, 25, 830, 11, 2691,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGlobs(t *testing.T) { dir := fs.NewDir(t, "fixtures", fs.WithFile("some.foo.js", "foo", fs.WithMode(0755)), fs.WithFile("some.other.bar.js", "bar", fs.WithMode(0755))) defer dir.Remove() type args struct { patterns []string } tests := []struct { name string args args want []string }{ { name: "find one", args: args{[]string{dir.Path() + "/some.foo.js"}}, want: []string{dir.Join("some.foo.js")}, }, { name: "find all", args: args{[]string{dir.Path() + "/*.js"}}, want: []string{dir.Join("some.foo.js"), dir.Join("some.other.bar.js")}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := Globs(tt.args.patterns); !reflect.DeepEqual(got, tt.want) { t.Errorf("Globs() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/72030
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 38, 68164, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 8619, 7121, 6184, 1155, 11, 330, 45247, 756, 197, 53584, 26124, 1703, 445, 14689, 58432, 2857, 497, 330, 7975, 497, 8619, 26124, 3636, 7, 15, 22, 20, 20, 6965, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAnonymousSameStructDifferentTags(t *testing.T) { validate := New() validate.RegisterTagNameFunc(func(fld reflect.StructField) string { name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0] if name == "-" { return "" } return name }) type Test struct { A interface{} } tst := &Test{ A: struct { A string `validate:"required"` }{ A: "", }, } err := validate.Struct(tst) NotEqual(t, err, nil) errs := err.(ValidationErrors) Equal(t, len(errs), 1) AssertError(t, errs, "Test.A.A", "Test.A.A", "A", "A", "required") tst = &Test{ A: struct { A string `validate:"omitempty,required"` }{ A: "", }, } err = validate.Struct(tst) Equal(t, err, nil) }
explode_data.jsonl/77218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 318 }
[ 2830, 3393, 32684, 19198, 9422, 69123, 15930, 1155, 353, 8840, 836, 8, 1476, 197, 7067, 1669, 1532, 741, 197, 7067, 19983, 22616, 9626, 18552, 955, 507, 8708, 51445, 1877, 8, 914, 341, 197, 11609, 1669, 9069, 19823, 45, 955, 507, 23676,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateBootstrap(t *testing.T) { for _, tc := range []struct { burl string h header wantError bool }{ {"", nil, true}, {"619867110810.dkr.ecr.us-west-2.amazonaws.com/reflowbootstrap:reflowbootstrap", nil, true}, {"http://path_to_bootstrap", nil, true}, {"https://path_to_bootstrap", &mockHeader{err: fmt.Errorf("test error")}, true}, {"https://path_to_bootstrap", &mockHeader{resp: &http.Response{StatusCode: http.StatusForbidden}}, true}, {"https://path_to_bootstrap", &mockHeader{resp: &http.Response{StatusCode: http.StatusOK, Header: map[string][]string{"Content-Type": {"text/plain"}}}}, true}, {"https://path_to_bootstrap", &mockHeader{resp: &http.Response{StatusCode: http.StatusOK, Header: map[string][]string{"Content-Type": {"binary/octet-stream"}}}}, false}, } { got := defaultValidateBootstrap(tc.burl, tc.h) if tc.wantError != (got != nil) { t.Errorf("validateBootstrap(%s): got: %v want: %v", tc.burl, got, tc.wantError) } } }
explode_data.jsonl/42285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 17926, 45511, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 2233, 1085, 414, 914, 198, 197, 9598, 260, 4247, 198, 197, 50780, 1454, 1807, 198, 197, 59403, 197, 197, 4913, 497, 2092, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSyncPodWithPullPolicy(t *testing.T) { dm, fakeDocker := newTestDockerManagerWithRealImageManager() puller := dm.dockerPuller.(*FakeDockerPuller) puller.HasImages = []string{"foo/existing_one:v1", "foo/want:latest"} dm.podInfraContainerImage = "foo/infra_image:v1" pod := makePod("foo", &api.PodSpec{ Containers: []api.Container{ {Name: "bar", Image: "foo/pull_always_image:v1", ImagePullPolicy: api.PullAlways}, {Name: "bar2", Image: "foo/pull_if_not_present_image:v1", ImagePullPolicy: api.PullIfNotPresent}, {Name: "bar3", Image: "foo/existing_one:v1", ImagePullPolicy: api.PullIfNotPresent}, {Name: "bar4", Image: "foo/want:latest", ImagePullPolicy: api.PullIfNotPresent}, {Name: "bar5", Image: "foo/pull_never_image:v1", ImagePullPolicy: api.PullNever}, }, }) expectedResults := []*kubecontainer.SyncResult{ //Sync result for infra container {kubecontainer.StartContainer, PodInfraContainerName, nil, ""}, {kubecontainer.SetupNetwork, kubecontainer.GetPodFullName(pod), nil, ""}, //Sync result for user containers {kubecontainer.StartContainer, "bar", nil, ""}, {kubecontainer.StartContainer, "bar2", nil, ""}, {kubecontainer.StartContainer, "bar3", nil, ""}, {kubecontainer.StartContainer, "bar4", nil, ""}, {kubecontainer.StartContainer, "bar5", images.ErrImageNeverPull, "Container image \"foo/pull_never_image:v1\" is not present with pull policy of Never"}, } result := runSyncPod(t, dm, fakeDocker, pod, nil, true) verifySyncResults(t, expectedResults, result) fakeDocker.Lock() defer fakeDocker.Unlock() pulledImageSorted := puller.ImagesPulled[:] sort.Strings(pulledImageSorted) assert.Equal(t, []string{"foo/infra_image:v1", "foo/pull_always_image:v1", "foo/pull_if_not_present_image:v1"}, pulledImageSorted) if len(fakeDocker.Created) != 5 { t.Errorf("unexpected containers created %v", fakeDocker.Created) } }
explode_data.jsonl/31182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 712 }
[ 2830, 3393, 12154, 23527, 2354, 36068, 13825, 1155, 353, 8840, 836, 8, 341, 2698, 76, 11, 12418, 35, 13659, 1669, 501, 2271, 35, 13659, 2043, 2354, 12768, 1906, 2043, 741, 3223, 617, 261, 1669, 28676, 91131, 36068, 261, 41399, 52317, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWalkNotFoundMaskError(t *testing.T) { // this doesn't work for WalkR newListDirs(t, nil, true, listResults{ "": {err: fs.ErrorDirNotFound}, }, errorMap{ "": nil, }, nil, ).Walk() }
explode_data.jsonl/72551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 48849, 10372, 12686, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 3171, 944, 975, 369, 12554, 49, 198, 8638, 852, 97384, 1155, 11, 2092, 11, 830, 345, 197, 14440, 9801, 515, 298, 197, 28796, 314, 615, 25, 8619, 614...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDownloadOverrideObjectWeb(t *testing.T) { var ( proxyURL = tutils.RandomProxyURL(t) baseParams = tutils.BaseAPIParams(proxyURL) bck = cmn.Bck{ Name: cos.RandString(10), Provider: cmn.ProviderAIS, } p = cmn.DefaultBckProps(bck) objName = cos.RandString(10) link = "https://raw.githubusercontent.com/NVIDIA/aistore/master/LICENSE" expectedSize int64 = 1075 newSize int64 = 10 ) clearDownloadList(t) downloadObject(t, bck, objName, link, false /*shouldBeSkipped*/) oldProps := verifyProps(t, bck, objName, expectedSize, "1") // Update the file r, _ := readers.NewRandReader(newSize, p.Cksum.Type) err := api.PutObject(api.PutObjectArgs{ BaseParams: baseParams, Bck: bck, Object: objName, Cksum: r.Cksum(), Reader: r, }) tassert.Fatalf(t, err == nil, "expected: err nil, got: %v", err) verifyProps(t, bck, objName, newSize, "2") downloadObject(t, bck, objName, link, false /*shouldBeSkipped*/) newProps := verifyProps(t, bck, objName, expectedSize, "3") tassert.Errorf( t, oldProps.Atime != newProps.Atime, "atime match (%v == %v)", oldProps.Atime, newProps.Atime, ) }
explode_data.jsonl/70391
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 518 }
[ 2830, 3393, 11377, 2177, 1190, 5981, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 22803, 3144, 256, 284, 259, 6031, 26709, 16219, 3144, 1155, 340, 197, 24195, 4870, 284, 259, 6031, 13018, 7082, 4870, 65787, 3144, 340, 197, 2233, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalEmptyStringSet(t *testing.T) { input := []byte(`{ "SS": [ ] }`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) assert.Equal(t, DataTypeStringSet, av.DataType()) assert.Equal(t, 0, len(av.StringSet())) }
explode_data.jsonl/61706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 1806, 27121, 3522, 703, 1649, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 1220, 788, 508, 2279, 335, 63, 692, 2405, 1822, 71813, 3506, 78554, 198, 9859, 1669, 2951, 38097, 5384, 11, 609, 402, 692,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchesUncachedServiceAccount(t *testing.T) { ns := "myns" // Build a test client that the admission plugin can use to look up the service account missing from its cache client := fake.NewSimpleClientset(&corev1.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: DefaultServiceAccountName, Namespace: ns, }, }) admit := NewServiceAccount() informerFactory := informers.NewSharedInformerFactory(nil, controller.NoResyncPeriodFunc()) admit.SetExternalKubeInformerFactory(informerFactory) admit.client = client admit.RequireAPIToken = false pod := &api.Pod{} attrs := admission.NewAttributesRecord(pod, nil, api.Kind("Pod").WithVersion("version"), ns, "myname", api.Resource("pods").WithVersion("version"), "", admission.Create, false, nil) err := admit.Admit(attrs) if err != nil { t.Errorf("Unexpected error: %v", err) } if pod.Spec.ServiceAccountName != DefaultServiceAccountName { t.Errorf("Expected service account %s assigned, got %s", DefaultServiceAccountName, pod.Spec.ServiceAccountName) } }
explode_data.jsonl/61345
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 20714, 288, 63718, 3854, 1860, 7365, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 330, 76, 1872, 82, 1837, 197, 322, 7854, 264, 1273, 2943, 429, 279, 25293, 9006, 646, 990, 311, 1401, 705, 279, 2473, 2692, 7402, 504, 1181, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_isTwoPair(t *testing.T) { type args struct { hand deck.Hand } tests := []struct { name string args args want string want1 bool }{ { name: "Should be a 2 pairs", args: args{deck.Hand{ deck.Card{Value: 8}, deck.Card{Value: 8}, deck.Card{Value: 7}, deck.Card{Value: 7}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, }}, want: "8", want1: true, }, { name: "Should be a 2 pairs 2", args: args{deck.Hand{ deck.Card{IsRoyal: true, RoyalType: deck.Royal("king")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("king")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, }}, want: "king", want1: true, }, { name: "Should not be a 2 pairs", args: args{deck.Hand{ deck.Card{Value: 3}, deck.Card{Value: 8}, deck.Card{Value: 9}, deck.Card{Value: 3}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, }}, want: "3", want1: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, got1 := isTwoPair(tt.args.hand) if got != tt.want { t.Errorf("isTwoPair() got = %v, want %v", got, tt.want) } if got1 != tt.want1 { t.Errorf("isTwoPair() got1 = %v, want %v", got1, tt.want1) } }) } }
explode_data.jsonl/7191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 682 }
[ 2830, 3393, 6892, 11613, 12443, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 9598, 437, 9530, 35308, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 31215, 220, 2827, 198, 197, 50780, 220, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRename(t *testing.T) { srcFileName, dstFileName := dirRoot+"/src-file.txt", dirRoot+"/dst-file.txt" err := AppendToFile(srcFileName, []byte("filesystem")) if err != nil { panic(err) } t.Cleanup(func() { _ = RemoveWithRecur(dirRoot) }) if !Exists(srcFileName) { t.Error("Rename test failed!") } err = Rename(srcFileName, dstFileName) if err != nil { panic(err) } if !Exists(dstFileName) { t.Error("Rename test failed!") } }
explode_data.jsonl/34182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 88757, 1155, 353, 8840, 836, 8, 341, 41144, 10903, 11, 10648, 10903, 1669, 5419, 8439, 27569, 3548, 14203, 3909, 497, 5419, 8439, 27569, 15658, 14203, 3909, 1837, 9859, 1669, 29807, 41550, 14705, 10903, 11, 3056, 3782, 445, 41...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatchRegexp_OutputsOnlyLinesMatchingRegexp(t *testing.T) { t.Parallel() input := "This is the first line in the file.\nHello, world.\nThis is another line in the file.\n" tcs := []struct { regex, want string }{ { regex: `Hello|file`, want: "This is the first line in the file.\nHello, world.\nThis is another line in the file.\n", }, { regex: `an.ther`, want: "This is another line in the file.\n", }, { regex: `r[a-z]*s`, want: "This is the first line in the file.\n", }, { regex: `r[a-z]+s`, want: "", }, { regex: `bogus$`, want: "", }, } for _, tc := range tcs { got, err := script.Echo(input).MatchRegexp(regexp.MustCompile(tc.regex)).String() if err != nil { t.Fatal(err) } if tc.want != got { t.Error(cmp.Diff(tc.want, got)) } } }
explode_data.jsonl/51495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 8331, 3477, 4580, 36675, 16920, 7308, 16794, 64430, 3477, 4580, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 22427, 1669, 330, 1986, 374, 279, 1156, 1555, 304, 279, 1034, 7110, 77, 9707, 11, 1879, 7110, 77, 1986, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBroadcastTxCommit(t *testing.T) { require := require.New(t) mempool := node.MempoolReactor().Mempool for i, c := range GetClients() { _, _, tx := MakeTxKV() bres, err := c.BroadcastTxCommit(tx) require.Nil(err, "%d: %+v", i, err) require.True(bres.CheckTx.IsOK()) require.True(bres.DeliverTx.IsOK()) require.Equal(0, mempool.Size()) } }
explode_data.jsonl/48949
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 43362, 31584, 33441, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 2109, 3262, 1749, 1669, 2436, 1321, 3262, 1749, 693, 5621, 1005, 44, 3262, 1749, 198, 2023, 600, 11, 272, 1669, 2088, 2126, 47174, 368, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTreeLookupSimple(t *testing.T) { router := New() for _, v := range testTable { v := v router.AddRoute("GET", v[0], func(w http.ResponseWriter, r *http.Request, vp Parameter) { for i := 1; i < len(v); i++ { if r.URL.Path == v[i] { return } } t.Errorf("GOT %s EXPECTED %s\n", r.URL.Path, v) }) } t.Log(router.String()) for _, v := range testTable { for i := 1; i < len(v); i++ { t.Log("GET " + v[i]) fn, variables := router.FindRoute("GET", v[i]) if fn == nil { t.Error("Not Found", v[i], variables) continue } req, _ := http.NewRequest("GET", v[i], nil) fn(nil, req, variables) } } }
explode_data.jsonl/45454
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 316 }
[ 2830, 3393, 6533, 34247, 16374, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 1532, 2822, 2023, 8358, 348, 1669, 2088, 1273, 2556, 341, 197, 5195, 1669, 348, 198, 197, 67009, 1904, 4899, 445, 3806, 497, 348, 58, 15, 1125, 2915, 3622, 175...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCrossStructGteFieldValidation(t *testing.T) { type Inner struct { CreatedAt *time.Time String string Int int Uint uint Float float64 Array []string } type Test struct { Inner *Inner CreatedAt *time.Time `validate:"gtecsfield=Inner.CreatedAt"` String string `validate:"gtecsfield=Inner.String"` Int int `validate:"gtecsfield=Inner.Int"` Uint uint `validate:"gtecsfield=Inner.Uint"` Float float64 `validate:"gtecsfield=Inner.Float"` Array []string `validate:"gtecsfield=Inner.Array"` } now := time.Now().UTC() then := now.Add(time.Hour * -5) inner := &Inner{ CreatedAt: &then, String: "abcd", Int: 13, Uint: 13, Float: 1.13, Array: []string{"val1", "val2"}, } test := &Test{ Inner: inner, CreatedAt: &now, String: "abcde", Int: 14, Uint: 14, Float: 1.14, Array: []string{"val1", "val2", "val3"}, } validate := New() errs := validate.Struct(test) Equal(t, errs, nil) test.CreatedAt = &then test.String = "abcd" test.Int = 13 test.Uint = 13 test.Float = 1.13 test.Array = []string{"val1", "val2"} errs = validate.Struct(test) Equal(t, errs, nil) before := now.Add(time.Hour * -10) test.CreatedAt = &before test.String = "abc" test.Int = 12 test.Uint = 12 test.Float = 1.12 test.Array = []string{"val1"} errs = validate.Struct(test) NotEqual(t, errs, nil) AssertError(t, errs, "Test.CreatedAt", "Test.CreatedAt", "CreatedAt", "CreatedAt", "gtecsfield") AssertError(t, errs, "Test.String", "Test.String", "String", "String", "gtecsfield") AssertError(t, errs, "Test.Int", "Test.Int", "Int", "Int", "gtecsfield") AssertError(t, errs, "Test.Uint", "Test.Uint", "Uint", "Uint", "gtecsfield") AssertError(t, errs, "Test.Float", "Test.Float", "Float", "Float", "gtecsfield") AssertError(t, errs, "Test.Array", "Test.Array", "Array", "Array", "gtecsfield") errs = validate.VarWithValue(1, "", "gtecsfield") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "gtecsfield") // this test is for the WARNING about unforeseen validation issues. errs = validate.VarWithValue(test, now, "gtecsfield") NotEqual(t, errs, nil) AssertError(t, errs, "Test.CreatedAt", "Test.CreatedAt", "CreatedAt", "CreatedAt", "gtecsfield") AssertError(t, errs, "Test.String", "Test.String", "String", "String", "gtecsfield") AssertError(t, errs, "Test.Int", "Test.Int", "Int", "Int", "gtecsfield") AssertError(t, errs, "Test.Uint", "Test.Uint", "Uint", "Uint", "gtecsfield") AssertError(t, errs, "Test.Float", "Test.Float", "Float", "Float", "gtecsfield") AssertError(t, errs, "Test.Array", "Test.Array", "Array", "Array", "gtecsfield") type Other struct { Value string } type Test2 struct { Value Other Time time.Time `validate:"gtecsfield=Value"` } tst := Test2{ Value: Other{Value: "StringVal"}, Time: then, } errs = validate.Struct(tst) NotEqual(t, errs, nil) AssertError(t, errs, "Test2.Time", "Test2.Time", "Time", "Time", "gtecsfield") }
explode_data.jsonl/77227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1366 }
[ 2830, 3393, 28501, 9422, 38, 665, 1877, 13799, 1155, 353, 8840, 836, 8, 1476, 13158, 36356, 2036, 341, 197, 84062, 1655, 353, 1678, 16299, 198, 197, 4980, 262, 914, 198, 197, 57152, 981, 526, 198, 197, 15980, 396, 414, 2622, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStacktraceLocationThrowFromGo(t *testing.T) { vm := New() f := func() { panic(vm.ToValue("Test")) } vm.Set("f", f) _, err := vm.RunString(` function main() { (function noop() {})(); return callee(); } function callee() { return f(); } main(); `) if err == nil { t.Fatal("Expected error") } stack := err.(*Exception).stack if len(stack) != 4 { t.Fatalf("Unexpected stack len: %v", stack) } if frame := stack[0]; !strings.HasSuffix(frame.funcName.String(), "TestStacktraceLocationThrowFromGo.func1") { t.Fatalf("Unexpected stack frame 0: %#v", frame) } if frame := stack[1]; frame.funcName != "callee" || frame.pc != 1 { t.Fatalf("Unexpected stack frame 1: %#v", frame) } if frame := stack[2]; frame.funcName != "main" || frame.pc != 6 { t.Fatalf("Unexpected stack frame 2: %#v", frame) } if frame := stack[3]; frame.funcName != "" || frame.pc != 4 { t.Fatalf("Unexpected stack frame 3: %#v", frame) } }
explode_data.jsonl/10529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 4336, 15067, 4707, 23079, 3830, 10850, 1155, 353, 8840, 836, 8, 341, 54879, 1669, 1532, 741, 1166, 1669, 2915, 368, 341, 197, 30764, 31723, 3274, 1130, 445, 2271, 5455, 197, 532, 54879, 4202, 445, 69, 497, 282, 340, 197, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProject_ListProjWbOverviewData(t *testing.T) { tests := []struct { name string desc string wantErr bool dopErr bool mspErr bool }{ { name: "dop_issue_query_error", wantErr: false, dopErr: true, mspErr: false, }, { name: "dop_msp_query_error", wantErr: false, dopErr: false, mspErr: true, }, { name: "query_success", wantErr: false, dopErr: false, mspErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var projects []apistructs.ProjectDTO identity := apistructs.Identity{ UserID: "2", OrgID: "1", } bdl := &bundle.Bundle{} wb := New(WithBundle(bdl)) monkey.PatchInstanceMethod(reflect.TypeOf(bdl), "GetWorkbenchData", func(c *bundle.Bundle, userID string, req apistructs.WorkbenchRequest) (*apistructs.WorkbenchResponse, error) { if tt.dopErr { return nil, fmt.Errorf("error") } return &apistructs.WorkbenchResponse{}, nil }) defer monkey.UnpatchAll() monkey.PatchInstanceMethod(reflect.TypeOf(bdl), "GetMSPTenantProjects", func(c *bundle.Bundle, userID, orgID string, withStats bool, projectIds []uint64) ([]*projpb.Project, error) { if tt.mspErr { return nil, fmt.Errorf("error") } return nil, nil }) _, err := wb.ListProjWbOverviewData(identity, projects) if (err != nil) != tt.wantErr { t.Errorf("ListProjWbOverviewData() error = %v, wantErr %v", err, tt.wantErr) return } }) } }
explode_data.jsonl/54731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 717 }
[ 2830, 3393, 7849, 27104, 61075, 54, 65, 41044, 1043, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 41653, 262, 914, 198, 197, 50780, 7747, 1807, 198, 197, 2698, 453, 7747, 220, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFormatterColorHexError(test *testing.T) { formatted, err := formatter.Format(`{color "0xFFF3AC67"}funky{normal}`) assert.Error(test, err) assert.Empty(test, formatted) }
explode_data.jsonl/39794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 14183, 1636, 20335, 1454, 8623, 353, 8840, 836, 8, 341, 37410, 12127, 11, 1848, 1669, 24814, 9978, 5809, 90, 3423, 330, 15, 69420, 18, 1706, 21, 22, 9207, 69, 69205, 90, 8252, 5541, 692, 6948, 6141, 8623, 11, 1848, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestClusterListenAddressForMultiReplica(t *testing.T) { a := monitoringv1.Alertmanager{} replicas := int32(3) a.Spec.Version = operator.DefaultAlertmanagerVersion a.Spec.Replicas = &replicas statefulSet, err := makeStatefulSetSpec(&a, defaultTestConfig) if err != nil { t.Fatal(err) } amArgs := statefulSet.Template.Spec.Containers[0].Args containsClusterListenAddress := false for _, arg := range amArgs { if arg == "--cluster.listen-address=[$(POD_IP)]:9094" { containsClusterListenAddress = true } } if !containsClusterListenAddress { t.Fatal("expected stateful set to contain arg '--cluster.listen-address=[$(POD_IP)]:9094'") } }
explode_data.jsonl/25266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 28678, 38714, 4286, 2461, 20358, 18327, 15317, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 16558, 85, 16, 40143, 13297, 16094, 73731, 52210, 1669, 526, 18, 17, 7, 18, 340, 11323, 36473, 35842, 284, 5675, 13275, 9676, 13297, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetFirstNumberThatIsNotSumOfPair(t *testing.T) { t.Run("example", func(t *testing.T) { got := GetFirstNumberThatIsNotSumOfPair("example.txt", 5) want := 127 if got != want { t.Errorf("GetFirstNumberThatIsNotSumOfPair got %d want %d", got, want) } }) t.Run("input", func(t *testing.T) { got := GetFirstNumberThatIsNotSumOfPair("input.txt", 25) want := 217430975 if got != want { t.Errorf("GetFirstNumberThatIsNotSumOfPair got %d want %d", got, want) } }) }
explode_data.jsonl/19324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 1949, 5338, 2833, 4792, 3872, 2623, 9190, 2124, 12443, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 8687, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3174, 354, 1669, 2126, 5338, 2833, 4792, 3872, 2623, 9190, 2124, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewResolverWithPrefilledReadonlyCache(t *testing.T) { r := httptransport.NewResolver(httptransport.Config{ DNSCache: map[string][]string{ "dns.google.com": {"8.8.8.8"}, }, }) ar, ok := r.(resolver.AddressResolver) if !ok { t.Fatal("not the resolver we expected") } ewr, ok := ar.Resolver.(resolver.ErrorWrapperResolver) if !ok { t.Fatal("not the resolver we expected") } cr, ok := ewr.Resolver.(*resolver.CacheResolver) if !ok { t.Fatal("not the resolver we expected") } if cr.ReadOnly != true { t.Fatal("expected readonly cache here") } if cr.Get("dns.google.com")[0] != "8.8.8.8" { t.Fatal("cache not correctly prefilled") } _, ok = cr.Resolver.(resolver.SystemResolver) if !ok { t.Fatal("not the resolver we expected") } }
explode_data.jsonl/78380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 325 }
[ 2830, 3393, 3564, 18190, 2354, 29978, 4374, 4418, 3243, 8233, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1758, 26445, 7121, 18190, 19886, 26445, 10753, 515, 197, 10957, 2448, 8233, 25, 2415, 14032, 45725, 917, 515, 298, 197, 44917, 4412, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAllEnabled(t *testing.T) { t.Parallel() exampleFolder := test_structure.CopyTerraformFolderToTemp(t, "../", "examples/all-enabled") //awsRegion := aws.GetRandomStableRegion(t, nil, nil) terraformOptions := &terraform.Options{ TerraformDir: exampleFolder, Vars: map[string]interface{}{ "root_domain_name": "insight-infra.net", "aws_region": "us-east-1", }, } defer test_structure.RunTestStage(t, "teardown", func() { terraform.Destroy(t, terraformOptions) }) test_structure.RunTestStage(t, "setup", func() { terraform.InitAndApply(t, terraformOptions) }) }
explode_data.jsonl/40696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 2403, 5462, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 8122, 1516, 13682, 1669, 1273, 38283, 31770, 51, 13886, 627, 13682, 1249, 12151, 1155, 11, 7005, 497, 330, 51668, 31406, 54192, 1138, 197, 322, 8635, 14091, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStellarDecorate(t *testing.T) { cases := []decorateTest{ decorateTest{ body: "+1xlm other test", payments: []chat1.TextPayment{ chat1.TextPayment{ Username: "mikem", PaymentText: "+1XLM", Result: chat1.NewTextPaymentResultWithSent(stellar1.PaymentID("stellarid")), }, }, result: "$>kb${\"typ\":0,\"payment\":{\"username\":\"mikem\",\"paymentText\":\"+1XLM\",\"result\":{\"resultTyp\":0,\"sent\":\"stellarid\"}}}$<kb$ other test", }, decorateTest{ body: "`+1xlm` +1xlm other test", payments: []chat1.TextPayment{ chat1.TextPayment{ Username: "mikem", PaymentText: "+1XLM", Result: chat1.NewTextPaymentResultWithSent(stellar1.PaymentID("stellarid")), }, }, result: "`+1xlm` $>kb${\"typ\":0,\"payment\":{\"username\":\"mikem\",\"paymentText\":\"+1XLM\",\"result\":{\"resultTyp\":0,\"sent\":\"stellarid\"}}}$<kb$ other test", }, decorateTest{ body: "HIHIH ```+5xlm@patrick``` +5xlm@patrick `+1xlm` +1xlm other test", payments: []chat1.TextPayment{ chat1.TextPayment{ Username: "patrick", PaymentText: "+5XLM@patrick", Result: chat1.NewTextPaymentResultWithSent(stellar1.PaymentID("stellarid")), }, chat1.TextPayment{ Username: "mikem", PaymentText: "+1XLM", Result: chat1.NewTextPaymentResultWithSent(stellar1.PaymentID("stellarid")), }, }, result: "HIHIH ```+5xlm@patrick``` $>kb${\"typ\":0,\"payment\":{\"username\":\"patrick\",\"paymentText\":\"+5XLM@patrick\",\"result\":{\"resultTyp\":0,\"sent\":\"stellarid\"}}}$<kb$ `+1xlm` $>kb${\"typ\":0,\"payment\":{\"username\":\"mikem\",\"paymentText\":\"+1XLM\",\"result\":{\"resultTyp\":0,\"sent\":\"stellarid\"}}}$<kb$ other test", }, decorateTest{ body: " ``` `+124.005XLM@max``` my life to yours, my breath become yours ``` `+124.005XLM@mikem`` ", payments: []chat1.TextPayment{ chat1.TextPayment{ Username: "mikem", PaymentText: "+124.005XLM@mikem", Result: chat1.NewTextPaymentResultWithSent(stellar1.PaymentID("stellarid")), }, }, result: " ``` `+124.005XLM@max``` my life to yours, my breath become yours ``` `$>kb${\"typ\":0,\"payment\":{\"username\":\"mikem\",\"paymentText\":\"+124.005XLM@mikem\",\"result\":{\"resultTyp\":0,\"sent\":\"stellarid\"}}}$<kb$`` ", }, } for _, c := range cases { res := DecorateWithPayments(context.TODO(), c.body, c.payments) require.Equal(t, c.result, res) } }
explode_data.jsonl/74770
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1146 }
[ 2830, 3393, 623, 26880, 35227, 349, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 47976, 2271, 515, 197, 197, 47976, 2271, 515, 298, 35402, 25, 6630, 16, 87, 17771, 1008, 1273, 756, 298, 3223, 352, 1368, 25, 3056, 9686, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPostFilterPlugins(t *testing.T) { tests := []struct { name string plugins []*TestPlugin wantStatus *framework.Status }{ { name: "a single plugin makes a Pod schedulable", plugins: []*TestPlugin{ { name: "TestPlugin", inj: injectedResult{PostFilterStatus: int(framework.Success)}, }, }, wantStatus: framework.NewStatus(framework.Success, "injected status"), }, { name: "plugin1 failed to make a Pod schedulable, followed by plugin2 which makes the Pod schedulable", plugins: []*TestPlugin{ { name: "TestPlugin1", inj: injectedResult{PostFilterStatus: int(framework.Unschedulable)}, }, { name: "TestPlugin2", inj: injectedResult{PostFilterStatus: int(framework.Success)}, }, }, wantStatus: framework.NewStatus(framework.Success, "injected status"), }, { name: "plugin1 makes a Pod schedulable, followed by plugin2 which cannot make the Pod schedulable", plugins: []*TestPlugin{ { name: "TestPlugin1", inj: injectedResult{PostFilterStatus: int(framework.Success)}, }, { name: "TestPlugin2", inj: injectedResult{PostFilterStatus: int(framework.Unschedulable)}, }, }, wantStatus: framework.NewStatus(framework.Success, "injected status"), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { registry := Registry{} cfgPls := &config.Plugins{} for _, pl := range tt.plugins { // register all plugins tmpPl := pl if err := registry.Register(pl.name, func(_ runtime.Object, _ framework.Handle) (framework.Plugin, error) { return tmpPl, nil }); err != nil { t.Fatalf("fail to register postFilter plugin (%s)", pl.name) } // append plugins to filter pluginset cfgPls.PostFilter.Enabled = append( cfgPls.PostFilter.Enabled, config.Plugin{Name: pl.name}, ) } f, err := newFrameworkWithQueueSortAndBind(registry, cfgPls, emptyArgs) if err != nil { t.Fatalf("fail to create framework: %s", err) } _, gotStatus := f.RunPostFilterPlugins(context.TODO(), nil, pod, nil) if !reflect.DeepEqual(gotStatus, tt.wantStatus) { t.Errorf("Unexpected status. got: %v, want: %v", gotStatus, tt.wantStatus) } }) } }
explode_data.jsonl/35743
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 934 }
[ 2830, 3393, 4133, 5632, 45378, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 197, 18716, 262, 29838, 2271, 11546, 198, 197, 50780, 2522, 353, 3794, 10538, 198, 197, 59403, 197, 197, 515, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResetCornerCases(t *testing.T) { t.Run("should fail on cLog read error", func(t *testing.T) { injectedErr := errors.New("injected error") pLog := appendableFromBuffer(nil) dLog := appendableFromBuffer(make([]byte, 3*sha256.Size)) cLog := appendableFromBuffer(make([]byte, 12*2)) cLog.ReadAtFn = func(bs []byte, off int64) (int, error) { if off == 0 { return 0, injectedErr } return len(bs), nil } tree, err := OpenWith(pLog, dLog, cLog, DefaultOptions()) require.NoError(t, err) err = tree.ResetSize(1) require.ErrorIs(t, err, injectedErr) err = tree.Close() require.NoError(t, err) }) t.Run("should fail on getting pLogSize", func(t *testing.T) { injectedErr := errors.New("injected error") pLog := appendableFromBuffer(nil) dLog := appendableFromBuffer(make([]byte, 3*sha256.Size)) cLog := appendableFromBuffer(make([]byte, 12*2)) tree, err := OpenWith(pLog, dLog, cLog, DefaultOptions()) require.NoError(t, err) pLog.SizeFn = func() (int64, error) { return 0, injectedErr } err = tree.ResetSize(1) require.ErrorIs(t, err, injectedErr) err = tree.Close() require.NoError(t, err) }) t.Run("should fail on corrupted older cLog entries", func(t *testing.T) { pLog := appendableFromBuffer(nil) dLog := appendableFromBuffer(make([]byte, 3*sha256.Size)) cLog := appendableFromBuffer([]byte{ 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, // Corrupted entry, offset way outside pLog size 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // Correct entry to allow opening without an error }) tree, err := OpenWith(pLog, dLog, cLog, DefaultOptions()) require.NoError(t, err) err = tree.ResetSize(1) require.ErrorIs(t, err, ErrorCorruptedData) err = tree.Close() require.NoError(t, err) }) t.Run("should fail on dLog size error", func(t *testing.T) { injectedErr := errors.New("injected error") pLog := appendableFromBuffer(nil) dLog := appendableFromBuffer(make([]byte, 3*sha256.Size)) cLog := appendableFromBuffer(make([]byte, 2*12)) tree, err := OpenWith(pLog, dLog, cLog, DefaultOptions()) require.NoError(t, err) dLog.SizeFn = func() (int64, error) { return 0, injectedErr } err = tree.ResetSize(1) require.ErrorIs(t, err, injectedErr) err = tree.Close() require.NoError(t, err) }) t.Run("should fail on incorrect dlog size", func(t *testing.T) { pLog := appendableFromBuffer(nil) dLog := appendableFromBuffer(make([]byte, 3*sha256.Size)) cLog := appendableFromBuffer(make([]byte, 2*12)) tree, err := OpenWith(pLog, dLog, cLog, DefaultOptions()) require.NoError(t, err) dLog.SizeFn = func() (int64, error) { return 0, nil } err = tree.ResetSize(1) require.ErrorIs(t, err, ErrorCorruptedDigests) err = tree.Close() require.NoError(t, err) }) }
explode_data.jsonl/49666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1116 }
[ 2830, 3393, 14828, 50352, 37302, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 5445, 3690, 389, 272, 2201, 1349, 1465, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 17430, 28303, 7747, 1669, 5975, 7121, 445, 258, 28303, 1465, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConsolidatedSeriesIterator(t *testing.T) { for _, tt := range consolidatedSeriesIteratorTests { blocks, bounds := generateBlocks(t, tt.stepSize) j := 0 for i, block := range blocks { iters, err := block.SeriesIter() require.NoError(t, err) verifyMetas(t, i, bounds, iters.Meta(), iters.SeriesMeta()) for iters.Next() { series, err := iters.Current() require.NoError(t, err) test.EqualsWithNans(t, tt.expected[j], series.Values()) j++ } } } }
explode_data.jsonl/19683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 15220, 5192, 657, 25544, 11951, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 59291, 25544, 11951, 18200, 341, 197, 2233, 34936, 11, 14262, 1669, 6923, 29804, 1155, 11, 17853, 21465, 1695, 340, 197, 12428, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestArchiveTeams(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() id := model.NewId() name := "name" + id displayName := "Name " + id th.CheckCommand(t, "team", "create", "--name", name, "--display_name", displayName) th.CheckCommand(t, "team", "archive", name) output := th.CheckCommand(t, "team", "list") assert.Contains(t, output, name+" (archived)", "should have archived team") }
explode_data.jsonl/59047
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 42502, 60669, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 15710, 1669, 1614, 7121, 764, 741, 11609, 1669, 330, 606, 1, 488, 877, 198, 31271, 675, 1669, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPI_IsRetryableError(t *testing.T) { t.Parallel() if IsRetryableError(nil) { t.Fatal("should not be a retryable error") } if IsRetryableError(fmt.Errorf("not the error you are looking for")) { t.Fatal("should not be a retryable error") } if !IsRetryableError(fmt.Errorf(serverError)) { t.Fatal("should be a retryable error") } if !IsRetryableError(&net.OpError{Err: fmt.Errorf("network conn error")}) { t.Fatal("should be a retryable error") } }
explode_data.jsonl/44946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 7082, 31879, 51560, 480, 1454, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 2160, 51560, 480, 1454, 27907, 8, 341, 197, 3244, 26133, 445, 5445, 537, 387, 264, 22683, 480, 1465, 1138, 197, 630, 743, 2160, 5156...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5