text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestAnchorKey(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) ctx := context.Background() manual := hlc.NewManualClock(123) clock := hlc.NewClock(manual.UnixNano, time.Nanosecond) ambient := log.AmbientContext{Tracer: tracing.NewTracer()} stopper := stop.NewStopper() defer stopper.Stop(ctx) key1 := roachpb.Key("a") key2 := roachpb.Key("b") var senderFn kv.SenderFunc = func( ctx context.Context, ba roachpb.BatchRequest, ) (*roachpb.BatchResponse, *roachpb.Error) { if !roachpb.Key(ba.Txn.Key).Equal(key2) { t.Fatalf("expected anchor %q, got %q", key2, ba.Txn.Key) } br := ba.CreateReply() br.Txn = ba.Txn.Clone() if _, ok := ba.GetArg(roachpb.EndTxn); ok { br.Txn.Status = roachpb.COMMITTED } return br, nil } factory := NewTxnCoordSenderFactory( TxnCoordSenderFactoryConfig{ AmbientCtx: ambient, Clock: clock, Stopper: stopper, Settings: cluster.MakeTestingClusterSettings(), }, senderFn, ) db := kv.NewDB(testutils.MakeAmbientCtx(), factory, clock, stopper) if err := db.Txn(ctx, func(ctx context.Context, txn *kv.Txn) error { ba := txn.NewBatch() ba.Get(key1) ba.Put(key2, "val") return txn.Run(ctx, ba) }); err != nil { t.Fatal(err) } }
explode_data.jsonl/76911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 568 }
[ 2830, 3393, 14677, 1592, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 692, 20985, 1669, 2266, 19047, 741, 197, 19730, 1669, 305, 17257, 7121, 52092, 26104, 7, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateTime(t *testing.T) { time := "1h 2s" allErrs := validateTime(time, field.NewPath("time-field")) if len(allErrs) != 0 { t.Errorf("validateTime returned errors %v valid input %v", allErrs, time) } }
explode_data.jsonl/65865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 17926, 1462, 1155, 353, 8840, 836, 8, 341, 21957, 1669, 330, 16, 71, 220, 17, 82, 698, 50960, 7747, 82, 1669, 9593, 1462, 9730, 11, 2070, 7121, 1820, 445, 1678, 19130, 28075, 743, 2422, 20388, 7747, 82, 8, 961, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAddVoteMsg(t *testing.T) { smr, err := MakeSmr(t) if err != nil { t.Error("TestAddVoteMsg MakeSmr error", err) return } msg := &pb.ChainedBftVoteMessage{ ProposalId: []byte("test case1"), Signature: &pb.SignInfo{ Address: "testcase", PublicKey: `{"Curvname":"P-256","X":74695617477160058757747208220371236837474210247114418775262229497812962582435,"Y":51348715319124770392993866417088542497927816017012182211244120852620959209571}`, }, } privateKey := `{"Curvname":"P-256","X":74695617477160058757747208220371236837474210247114418775262229497812962582435,"Y":51348715319124770392993866417088542497927816017012182211244120852620959209571,"D":29079635126530934056640915735344231956621504557963207107451663058887647996601}` priKey, _ := smr.cryptoClient.GetEcdsaPrivateKeyFromJsonStr(privateKey) sig, err := utils.MakeVoteMsgSign(smr.cryptoClient, priKey, msg.GetSignature(), msg.GetProposalId()) msg.Signature = sig err = smr.addVoteMsg(msg) if err != ErrInValidateSets { t.Error("TestAddVoteMsg addVoteMsg error", "error", err) return } msg.Signature.Address = "dpzuVdosQrF2kmzumhVeFQZa1aYcdgFpN" err = smr.addVoteMsg(msg) if err != nil { t.Error("TestAddVoteMsg addVoteMsg error", "error", err) return } if _, ok := smr.qcVoteMsgs.Load(string(msg.GetProposalId())); !ok { t.Error("TestAddVoteMsg load qcVoteMsgs error") return } }
explode_data.jsonl/33029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 602 }
[ 2830, 3393, 2212, 41412, 6611, 1155, 353, 8840, 836, 8, 341, 1903, 20946, 11, 1848, 1669, 7405, 10673, 81, 1155, 340, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 445, 2271, 2212, 41412, 6611, 7405, 10673, 81, 1465, 497, 1848, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAWSCluster_ValidateAllowedCIDRBlocks(t *testing.T) { tests := []struct { name string awsc *AWSCluster wantErr bool }{ { name: "allow valid CIDRs", awsc: &AWSCluster{ Spec: AWSClusterSpec{ Bastion: Bastion{ AllowedCIDRBlocks: []string{ "192.168.0.0/16", "192.168.0.1/32", }, }, }, }, wantErr: false, }, { name: "disableIngressRules allowed with empty CIDR block", awsc: &AWSCluster{ Spec: AWSClusterSpec{ Bastion: Bastion{ AllowedCIDRBlocks: []string{}, DisableIngressRules: true, }, }, }, wantErr: false, }, { name: "disableIngressRules not allowed with CIDR blocks", awsc: &AWSCluster{ Spec: AWSClusterSpec{ Bastion: Bastion{ AllowedCIDRBlocks: []string{ "192.168.0.0/16", "192.168.0.1/32", }, DisableIngressRules: true, }, }, }, wantErr: true, }, { name: "invalid CIDR block with invalid network", awsc: &AWSCluster{ Spec: AWSClusterSpec{ Bastion: Bastion{ AllowedCIDRBlocks: []string{ "100.200.300.400/99", }, }, }, }, wantErr: true, }, { name: "invalid CIDR block with garbage string", awsc: &AWSCluster{ Spec: AWSClusterSpec{ Bastion: Bastion{ AllowedCIDRBlocks: []string{ "abcdefg", }, }, }, }, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx := context.TODO() cluster := tt.awsc.DeepCopy() cluster.ObjectMeta = metav1.ObjectMeta{ GenerateName: "cluster-", Namespace: "default", } if err := testEnv.Create(ctx, cluster); (err != nil) != tt.wantErr { t.Errorf("ValidateAllowedCIDRBlocks() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/70926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 967 }
[ 2830, 3393, 14419, 3540, 75, 4993, 62, 17926, 35382, 54146, 49, 29804, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 197, 672, 2388, 262, 353, 14419, 3540, 75, 4993, 198, 197, 50780, 7747,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFunc(tt *testing.T) { // Single error return _ = a() // BLANK a() // UNCHECKED // Return another value and an error _, _ = b() // BLANK b() // UNCHECKED // Return a custom error type _ = customError() // BLANK customError() // UNCHECKED // Return a custom concrete error type _ = customConcreteError() // BLANK customConcreteError() // UNCHECKED _, _ = customConcreteErrorTuple() // BLANK customConcreteErrorTuple() // UNCHECKED // Return a custom pointer error type _ = customPointerError() // BLANK customPointerError() // UNCHECKED _, _ = customPointerErrorTuple() // BLANK customPointerErrorTuple() // UNCHECKED // Method with a single error return x := t{} _ = x.a() // BLANK x.a() // UNCHECKED // Method call on a struct member y := u{x} _ = y.t.a() // BLANK y.t.a() // UNCHECKED m1 := map[string]func() error{"a": a} _ = m1["a"]() // BLANK m1["a"]() // UNCHECKED // Additional cases for assigning errors to blank identifier z, _ := b() // BLANK _, w := a(), 5 // BLANK // Assign non error to blank identifier _ = c() _ = z + w // Avoid complaints about unused variables // Type assertions var i interface{} s1 := i.(string) // ASSERT s1 = i.(string) // ASSERT s2, _ := i.(string) // ASSERT s2, _ = i.(string) // ASSERT s3, ok := i.(string) s3, ok = i.(string) switch s4 := i.(type) { case string: _ = s4 } _, _, _, _ = s1, s2, s3, ok // Goroutine go a() // UNCHECKED defer a() // UNCHECKED b1 := bytes.Buffer{} b2 := &bytes.Buffer{} b1.Write(nil) b2.Write(nil) rand.Read(nil) mrand.Read(nil) sha256.New().Write([]byte{}) ioutil.ReadFile("main.go") // UNCHECKED var emiw ErrorMakerInterfaceWrapper emiw.MakeNilError() }
explode_data.jsonl/69167
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 755 }
[ 2830, 3393, 9626, 47152, 353, 8840, 836, 8, 341, 197, 322, 11327, 1465, 470, 198, 197, 62, 284, 264, 368, 442, 14850, 16012, 198, 11323, 368, 257, 442, 6643, 34516, 1479, 271, 197, 322, 3411, 2441, 897, 323, 458, 1465, 198, 197, 687...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServer_Query_DeleteSeries(t *testing.T) { t.Parallel() s := OpenServer(NewConfig()) defer s.Close() test := tests.load(t, "delete_series_time") if err := s.CreateDatabaseAndRetentionPolicy(test.database(), NewRetentionPolicySpec(test.retentionPolicy(), 1, 0), true); err != nil { t.Fatal(err) } for i, query := range test.queries { if i == 0 { if err := test.init(s); err != nil { t.Fatalf("test init failed: %s", err) } } if query.skip { t.Logf("SKIP:: %s", query.name) continue } if err := query.Execute(s); err != nil { t.Error(query.Error(err)) } else if !query.success() { t.Error(query.failureMessage()) } } }
explode_data.jsonl/61238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 5475, 48042, 57418, 25544, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1903, 1669, 5264, 5475, 35063, 2648, 2398, 16867, 274, 10421, 2822, 18185, 1669, 7032, 5104, 1155, 11, 330, 4542, 35015, 3009, 5130, 743, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestKVDelete(t *testing.T) { defer testutil.AfterTest(t) clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3}) defer clus.Terminate(t) kv := clus.RandClient() ctx := context.TODO() presp, err := kv.Put(ctx, "foo", "") if err != nil { t.Fatalf("couldn't put 'foo' (%v)", err) } if presp.Header.Revision != 2 { t.Fatalf("presp.Header.Revision got %d, want %d", presp.Header.Revision, 2) } resp, err := kv.Delete(ctx, "foo") if err != nil { t.Fatalf("couldn't delete key (%v)", err) } if resp.Header.Revision != 3 { t.Fatalf("resp.Header.Revision got %d, want %d", resp.Header.Revision, 3) } gresp, err := kv.Get(ctx, "foo") if err != nil { t.Fatalf("couldn't get key (%v)", err) } if len(gresp.Kvs) > 0 { t.Fatalf("gresp.Kvs got %+v, want none", gresp.Kvs) } }
explode_data.jsonl/16406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 82707, 6435, 1155, 353, 8840, 836, 8, 341, 16867, 1273, 1314, 36892, 2271, 1155, 692, 197, 4163, 1669, 17590, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 72883, 2648, 90, 1695, 25, 220, 18, 3518, 16867, 1185, 355, 836, 261,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestHTTPContainerInfoGetterGetContainerInfoSuccessfully(t *testing.T) { req := &cadvisorapi.ContainerInfoRequest{ NumStats: 10, } cinfo := cadvisorapitest.GenerateRandomContainerInfo( "dockerIDWhichWillNotBeChecked", // docker ID 2, // Number of cores req, 1*time.Second, ) testHTTPContainerInfoGetter(req, cinfo, "somePodID", "containerNameInK8S", 0, t) }
explode_data.jsonl/53976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 9230, 4502, 1731, 31485, 1949, 4502, 1731, 35959, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 609, 34455, 38012, 2068, 33672, 1731, 1900, 515, 197, 197, 4651, 16635, 25, 220, 16, 15, 345, 197, 532, 1444, 2733, 1669, 19409, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTodo(t *testing.T) { c := client.New(LoaderMiddleware(handler.NewDefaultServer(NewExecutableSchema(Config{Resolvers: &Resolver{}})))) t.Run("create a new todo", func(t *testing.T) { var resp interface{} c.MustPost(`{ customers { name address { street } orders { id amount items { name } } } }`, &resp) }) t.Run("2d array marshaling", func(t *testing.T) { var resp struct { Torture2d [][]Customer } c.MustPost(`{ torture2d(customerIds:[[1,2],[3,4,5]]) { id name } }`, &resp) require.EqualValues(t, [][]Customer{ {{ID: 1, Name: "0 0"}, {ID: 2, Name: "0 1"}}, {{ID: 3, Name: "1 0"}, {ID: 4, Name: "1 1"}, {ID: 5, Name: "1 2"}}, }, resp.Torture2d) }) // Input coercion on arrays should convert non array values into an array of the appropriate depth // http://facebook.github.io/graphql/June2018/#sec-Type-System.List t.Run("array coercion", func(t *testing.T) { t.Run("1d", func(t *testing.T) { var resp struct { Torture1d []Customer } c.MustPost(`{ torture1d(customerIds: 1) { id name } }`, &resp) require.EqualValues(t, []Customer{ {ID: 1, Name: "0"}, }, resp.Torture1d) }) t.Run("2d", func(t *testing.T) { var resp struct { Torture2d [][]Customer } c.MustPost(`{ torture2d(customerIds: 1) { id name } }`, &resp) require.EqualValues(t, [][]Customer{ {{ID: 1, Name: "0 0"}}, }, resp.Torture2d) }) }) t.Run("introspection", func(t *testing.T) { // Make sure we can run the graphiql introspection query without errors var resp interface{} c.MustPost(introspection.Query, &resp) }) t.Run("customer array torture malformed array query", func(t *testing.T) { var resp struct { Torture [][]Customer } err := c.Post(`{ torture2d(customerIds:{}) { id name } }`, &resp) require.EqualError(t, err, "[{\"message\":\"map[string]interface {} is not an int\",\"path\":[\"torture2d\",\"customerIds\",0,0]}]") }) }
explode_data.jsonl/49313
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 877 }
[ 2830, 3393, 24176, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2943, 7121, 7, 9181, 24684, 36514, 7121, 3675, 5475, 35063, 94772, 8632, 33687, 90, 1061, 39435, 25, 609, 18190, 90, 3417, 57570, 3244, 16708, 445, 3182, 264, 501, 11804, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedis_Hmget(t *testing.T) { runOnRedis(t, func(client *Redis) { assert.Nil(t, client.Hset("a", "aa", "aaa")) assert.Nil(t, client.Hset("a", "bb", "bbb")) _, err := NewRedis(client.Addr, "").Hmget("a", "aa", "bb") assert.NotNil(t, err) vals, err := client.Hmget("a", "aa", "bb") assert.Nil(t, err) assert.EqualValues(t, []string{"aaa", "bbb"}, vals) vals, err = client.Hmget("a", "aa", "no", "bb") assert.Nil(t, err) assert.EqualValues(t, []string{"aaa", "", "bbb"}, vals) }) }
explode_data.jsonl/39162
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 48137, 2039, 76, 455, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 6948, 59678, 1155, 11, 2943, 3839, 746, 445, 64, 497, 330, 5305, 497, 330, 32646, 5455, 197, 6948, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ObjectTracker_Multiple_Expectations(t *testing.T) { g := gomega.NewWithT(t) ot := newObjTracker(schema.GroupVersionKind{}, nil) const count = 10 ct := makeCTSlice("ct-", count) for i := 0; i < len(ct); i++ { ot.Expect(ct[i]) } g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "should not be satisfied before ExpectationsDone") ot.ExpectationsDone() g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "should not be satisfied after ExpectationsDone") for i := 0; i < len(ct); i++ { g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "should not be satisfied before observations are done") ot.Observe(ct[i]) } g.Expect(ot.Satisfied()).To(gomega.BeTrue(), "should be satisfied") }
explode_data.jsonl/52315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 27839, 31133, 1245, 12229, 62, 17536, 804, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 2354, 51, 1155, 340, 197, 354, 1669, 74259, 31133, 42735, 5407, 5637, 10629, 22655, 2092, 692, 4777, 1760, 284, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAdd(t *testing.T) { bm := New(100) bm.Add(10) if !bm.Has(10) { t.Errorf("wanted %v but get nil", 10) } }
explode_data.jsonl/26717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 2212, 1155, 353, 8840, 836, 8, 341, 2233, 76, 1669, 1532, 7, 16, 15, 15, 340, 2233, 76, 1904, 7, 16, 15, 340, 743, 753, 29307, 16152, 7, 16, 15, 8, 341, 197, 3244, 13080, 445, 86592, 1018, 85, 714, 633, 2092, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSuccessfulCommand(t *testing.T) { f := newLTFixture(t) defer f.TearDown() targ := f.localTarget("echo hello world") res, err := f.ltbad.BuildAndDeploy(f.ctx, f.st, []model.TargetSpec{targ}, store.BuildStateSet{}) require.Nil(t, err) assert.Equal(t, targ.ID(), res[targ.ID()].TargetID()) assert.Contains(t, f.out.String(), "hello world", "expect cmd stdout in logs") }
explode_data.jsonl/17635
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 36374, 4062, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 43, 10808, 12735, 1155, 340, 16867, 282, 836, 682, 4454, 2822, 3244, 858, 1669, 282, 11033, 6397, 445, 3047, 23811, 1879, 5130, 10202, 11, 1848, 1669, 282, 82092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp01maVendorHndlrDB(t *testing.T) { var err error var td *TestData_App01maVendor var rcd App01maVendor.App01maVendor var rcd2 App01maVendor.App01maVendor t.Logf("TestVendor.DB()...\n") td = &TestData_App01maVendor{} td.Setup(t) t.Logf("\tChecking First()...\n") if err = td.db.RowFirst(&rcd2); err != nil { t.Fatalf("Error - Read First failed: %s\n", err.Error()) } rcd.TestData(0) if 0 != rcd.CompareKeys(&rcd2) { t.Fatalf("Error - First did not work, need A, got %+v\n", rcd2) } t.Logf("TestVendor.DB() - End of Test\n\n\n") }
explode_data.jsonl/78270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 2164, 15, 16, 1728, 44691, 39, 303, 19018, 3506, 1155, 353, 8840, 836, 8, 341, 262, 762, 1848, 260, 1465, 198, 262, 762, 17941, 688, 353, 83920, 36117, 15, 16, 1728, 44691, 198, 262, 762, 435, 4385, 260, 1845, 15, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRepositories(t *testing.T) { require := require.New(t) tmpDir, err := CreateTempDir() require.NoError(err) defer os.RemoveAll(tmpDir) config := GitServerConfig{ RepositoriesCache: tmpDir, Complexity: 0, } repos, err := NewRepositories(config) require.NotNil(repos) require.NoError(err) require.Equal(config, repos.config) require.Equal(tmpDir, repos.Path()) err = repos.Download() require.NoError(err) r, err := ioutil.ReadDir(tmpDir) require.NoError(err) linkDir, err := repos.LinksDir() require.NoError(err) defer os.RemoveAll(linkDir) links, err := ioutil.ReadDir(linkDir) require.NoError(err) require.Len(links, len(r)) for i, link := range links { require.Equal(r[i].Name(), link.Name()) } }
explode_data.jsonl/76543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 44814, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 20082, 6184, 11, 1848, 1669, 4230, 12151, 6184, 741, 17957, 35699, 3964, 340, 16867, 2643, 84427, 10368, 6184, 692, 25873, 1669, 21120, 5475, 2648, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRevokeAllSessions(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() user := th.BasicUser th.Client.Login(user.Email, user.Password) resp, err := th.Client.RevokeAllSessions(th.BasicUser2.Id) require.Error(t, err) CheckForbiddenStatus(t, resp) resp, err = th.Client.RevokeAllSessions("junk" + user.Id) require.Error(t, err) CheckBadRequestStatus(t, resp) _, err = th.Client.RevokeAllSessions(user.Id) require.NoError(t, err) th.Client.Logout() resp, err = th.Client.RevokeAllSessions(user.Id) require.Error(t, err) CheckUnauthorizedStatus(t, resp) th.Client.Login(user.Email, user.Password) sessions, _, _ := th.Client.GetSessions(user.Id, "") require.NotEmpty(t, sessions, "session should exist") _, err = th.Client.RevokeAllSessions(user.Id) require.NoError(t, err) sessions, _, _ = th.SystemAdminClient.GetSessions(user.Id, "") require.Empty(t, sessions, "no sessions should exist for user") resp, err = th.Client.RevokeAllSessions(user.Id) require.Error(t, err) CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/47532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 693, 7621, 2403, 59062, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 19060, 1669, 270, 48868, 1474, 198, 70479, 11716, 32499, 4277, 24066, 11, 1196, 25690, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVerifyExpiredTokenWindowAndAud(t *testing.T) { tokenIsValidWindow, errTokenPayload := VerifyToken(expiredToken, &testLocalSessions, nil) if tokenIsValidWindow { t.Fail() t.Logf("token window should be expired") } if errTokenPayload != nil { t.Fail() t.Logf(errTokenPayload.Error()) } }
explode_data.jsonl/11055
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 32627, 54349, 3323, 4267, 3036, 52949, 1155, 353, 8840, 836, 8, 341, 43947, 55470, 4267, 11, 1848, 3323, 29683, 1669, 25429, 3323, 25865, 2690, 3323, 11, 609, 1944, 7319, 59062, 11, 2092, 340, 743, 3950, 55470, 4267, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetFileContentTypeDOC(t *testing.T) { file := `../testdata/files/test5.doc` fileType, err := GetFileContentType(file) if err != nil { t.Log("Error -> ", err) t.Fail() } if fileType != "application/doc" { t.Log(fileType) t.Fail() } }
explode_data.jsonl/24010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 1949, 1703, 29504, 31464, 1155, 353, 8840, 836, 8, 341, 17661, 1669, 1565, 1244, 92425, 33220, 12697, 20, 23671, 3989, 17661, 929, 11, 1848, 1669, 2126, 1703, 29504, 4866, 692, 743, 1848, 961, 2092, 341, 197, 3244, 5247, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClientMethodWithQueryParams(t *testing.T) { Convey("TestClientMethodWithQueryParams Requests", t, func(c C) { targetDir, err := ioutil.TempDir("", "") c.So(err, ShouldBeNil) apiDef := new(raml.APIDefinition) err = raml.ParseFile("../fixtures/body_with_query_params.raml", apiDef) c.So(err, ShouldBeNil) client := NewClient(apiDef, clientNameRequests, true) err = client.Generate(targetDir) c.So(err, ShouldBeNil) rootFixture := "./fixtures/method/client/complex_body/query_params_requests/" files := []string{ "animals_service.py", } for _, f := range files { s, err := utils.TestLoadFile(filepath.Join(targetDir, f)) c.So(err, ShouldBeNil) tmpl, err := utils.TestLoadFile(filepath.Join(rootFixture, f)) c.So(err, ShouldBeNil) c.So(s, ShouldEqual, tmpl) } c.Reset(func() { os.RemoveAll(targetDir) }) }) Convey("TestClientMethodWithQueryParams Aiohttp", t, func(c C) { targetDir, err := ioutil.TempDir("", "") c.So(err, ShouldBeNil) apiDef := new(raml.APIDefinition) err = raml.ParseFile("../fixtures/body_with_query_params.raml", apiDef) c.So(err, ShouldBeNil) client := NewClient(apiDef, clientNameAiohttp, true) err = client.Generate(targetDir) c.So(err, ShouldBeNil) rootFixture := "./fixtures/method/client/complex_body/query_params_aiohttp/" files := []string{ "animals_service.py", } for _, f := range files { s, err := utils.TestLoadFile(filepath.Join(targetDir, f)) c.So(err, ShouldBeNil) tmpl, err := utils.TestLoadFile(filepath.Join(rootFixture, f)) c.So(err, ShouldBeNil) c.So(s, ShouldEqual, tmpl) } c.Reset(func() { os.RemoveAll(targetDir) }) }) }
explode_data.jsonl/19338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 733 }
[ 2830, 3393, 2959, 3523, 2354, 2859, 4870, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 2959, 3523, 2354, 2859, 4870, 50882, 497, 259, 11, 2915, 1337, 356, 8, 341, 197, 28861, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPerformRequestWithCancel(t *testing.T) { tr := &sleepingTransport{timeout: 3 * time.Second} httpClient := &http.Client{Transport: tr} client, err := NewSimpleClient(SetHttpClient(httpClient), SetMaxRetries(0)) if err != nil { t.Fatal(err) } type result struct { res *Response err error } ctx, cancel := context.WithCancel(context.Background()) resc := make(chan result, 1) go func() { res, err := client.PerformRequest(ctx, "GET", "/", nil, nil) resc <- result{res: res, err: err} }() select { case <-time.After(1 * time.Second): cancel() case res := <-resc: t.Fatalf("expected response before cancel, got %v", res) case <-ctx.Done(): t.Fatalf("expected no early termination, got ctx.Done(): %v", ctx.Err()) } err = ctx.Err() if err != context.Canceled { t.Fatalf("expected error context.Canceled, got: %v", err) } }
explode_data.jsonl/38026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 46951, 1900, 2354, 9269, 1155, 353, 8840, 836, 8, 341, 25583, 1669, 609, 25809, 287, 27560, 90, 14150, 25, 220, 18, 353, 882, 32435, 532, 28080, 2959, 1669, 609, 1254, 11716, 90, 27560, 25, 489, 630, 25291, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGuild_AddChannel(t *testing.T) { snowflakes := []Snowflake{ NewSnowflake(6), NewSnowflake(65), NewSnowflake(324), NewSnowflake(5435), NewSnowflake(63453), NewSnowflake(111111111), } guild := NewGuild() for i := range snowflakes { channel := NewChannel() channel.ID = snowflakes[len(snowflakes)-1-i] // reverse guild.AddChannel(channel) } for i, c := range guild.Channels { if snowflakes[i] != c.ID { t.Error("channels in guild did not sort correctly") } } }
explode_data.jsonl/55146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 206 }
[ 2830, 3393, 72574, 21346, 9629, 1155, 353, 8840, 836, 8, 341, 1903, 3328, 1489, 2050, 1669, 3056, 62285, 63456, 515, 197, 197, 3564, 62285, 63456, 7, 21, 1326, 197, 197, 3564, 62285, 63456, 7, 21, 20, 1326, 197, 197, 3564, 62285, 6345...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSpatialObjectToWKB(t *testing.T) { testCases := []struct { ewkt geopb.EWKT expected geopb.WKB }{ {"POINT(1.0 1.0)", []byte("\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x3f\x00\x00\x00\x00\x00\x00\xf0\x3f")}, {"SRID=4004;POINT(1.0 1.0)", []byte("\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x3f\x00\x00\x00\x00\x00\x00\xf0\x3f")}, } for _, tc := range testCases { t.Run(string(tc.ewkt), func(t *testing.T) { so, err := parseEWKT(geopb.SpatialObjectType_GeometryType, tc.ewkt, geopb.DefaultGeometrySRID, DefaultSRIDIsHint) require.NoError(t, err) encoded, err := SpatialObjectToWKB(so, DefaultEWKBEncodingFormat) require.NoError(t, err) require.Equal(t, tc.expected, encoded) }) } }
explode_data.jsonl/31775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 90618, 1190, 1249, 54, 29862, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 197, 365, 5840, 257, 3893, 453, 65, 5142, 54, 33539, 198, 197, 42400, 3893, 453, 65, 1175, 29862, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTriggerImmutableFields(t *testing.T) { tests := []struct { name string current *Trigger original *Trigger want *apis.FieldError }{{ name: "good (no change)", current: &Trigger{ Spec: TriggerSpec{ Broker: "broker", }, }, original: &Trigger{ Spec: TriggerSpec{ Broker: "broker", }, }, want: nil, }, { name: "new nil is ok", current: &Trigger{ Spec: TriggerSpec{ Broker: "broker", }, }, original: nil, want: nil, }, { name: "good (filter change)", current: &Trigger{ Spec: TriggerSpec{ Broker: "broker", }, }, original: &Trigger{ Spec: TriggerSpec{ Broker: "broker", Filter: validAttributesFilter, }, }, want: nil, }, { name: "bad (broker change)", current: &Trigger{ Spec: TriggerSpec{ Broker: "broker", }, }, original: &Trigger{ Spec: TriggerSpec{ Broker: "original_broker", }, }, want: &apis.FieldError{ Message: "Immutable fields changed (-old +new)", Paths: []string{"spec", "broker"}, Details: `{string}: -: "original_broker" +: "broker" `, }, }} for _, test := range tests { t.Run(test.name, func(t *testing.T) { got := test.current.CheckImmutableFields(context.TODO(), test.original) if diff := cmp.Diff(test.want.Error(), got.Error()); diff != "" { t.Errorf("CheckImmutableFields (-want, +got) = %v", diff) } }) } }
explode_data.jsonl/23592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 636 }
[ 2830, 3393, 17939, 58890, 8941, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 20121, 220, 353, 17939, 198, 197, 197, 9889, 353, 17939, 198, 197, 50780, 257, 353, 13725, 17087, 1454, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadOnly(t *testing.T) { Reset() // Mock command line arguments os.Args = append(os.Args, defaultDuskConfig) // This relies on default.dusk.toml if err := Load("default.dusk", nil, nil); err != nil { t.Errorf("Failed parse: %v", err) } if Get().Logger.Level != "debug" { t.Error("Invalid logger level") } r := Get() r.Logger.Level = "MODIFIED_level" if Get().Logger.Level != "debug" { t.Errorf("Invalid config %s", Get().Logger.Level) } }
explode_data.jsonl/43171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 20914, 1155, 353, 8840, 836, 8, 341, 197, 14828, 2822, 197, 322, 14563, 3210, 1555, 5977, 198, 25078, 51015, 284, 8737, 9638, 51015, 11, 1638, 35, 32779, 2648, 692, 197, 322, 1096, 33644, 389, 1638, 950, 32779, 73494, 75, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGroup_Pattern(t *testing.T) { type fields struct { str string name string } tests := []struct { name string fields fields want string }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { g := Group{ str: tt.fields.str, name: tt.fields.name, } if got := g.Pattern(); got != tt.want { t.Errorf("Group.Pattern() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/56174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 2808, 1088, 3227, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 11355, 220, 914, 198, 197, 11609, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 55276, 5043, 198, 197, 50780,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSync(t *testing.T) { //Test should create the file before deleting it src := "s3://bucket/path/to/copy/to" dst := "k8s://namespace/pod/container/path/to/copy/from" parallel := 1 // one file at a time bufferSize := 1.0 // 1GB of in memory buffer size if err := skbn.Sync(src, dst, parallel, bufferSize); err != nil { log.Fatal(err) } }
explode_data.jsonl/13931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 12154, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 1265, 1855, 279, 1034, 1573, 33011, 432, 198, 41144, 1669, 330, 82, 18, 1110, 30410, 50976, 32429, 2899, 1266, 32429, 698, 52051, 1669, 330, 74, 23, 82, 1110, 2231, 4322...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTimer_AddLeveledEntry1(t *testing.T) { gtest.Case(t, func() { timer := New() array := garray.New() //glog.Println("start") timer.DelayAdd(1000*time.Millisecond, 1001*time.Millisecond, func() { //glog.Println("add") array.Append(1) }) time.Sleep(1500*time.Millisecond) gtest.Assert(array.Len(), 0) time.Sleep(1300*time.Millisecond) //glog.Println("check") gtest.Assert(array.Len(), 1) }) }
explode_data.jsonl/3726
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 10105, 21346, 2304, 93964, 5874, 16, 1155, 353, 8840, 836, 8, 341, 262, 342, 1944, 727, 519, 1155, 11, 2915, 368, 341, 286, 9021, 1669, 1532, 741, 286, 1334, 1669, 342, 1653, 7121, 741, 286, 442, 70, 839, 12419, 445, 246...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFeatureEnabled(t *testing.T) { t.Run("Test feature enabled is correct", func(t *testing.T) { features := []FeatureSpec{ { Name: "testEnabled", Enabled: true, }, { Name: "testDisabled", Enabled: false, }, } assert.True(t, IsFeatureEnabled(features, "testEnabled")) assert.False(t, IsFeatureEnabled(features, "testDisabled")) assert.False(t, IsFeatureEnabled(features, "testMissing")) }) }
explode_data.jsonl/9032
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 13859, 5462, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 2271, 4565, 8970, 374, 4396, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1166, 22462, 1669, 3056, 13859, 8327, 515, 298, 197, 515, 571, 21297, 25, 262, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSmiRouter_GetRoutes(t *testing.T) { mocks := newFixture(nil) router := &SmiRouter{ logger: mocks.logger, flaggerClient: mocks.flaggerClient, smiClient: mocks.meshClient, kubeClient: mocks.kubeClient, } err := router.Reconcile(mocks.canary) require.NoError(t, err) p, c, m, err := router.GetRoutes(mocks.canary) require.NoError(t, err) assert.Equal(t, 100, p) assert.Equal(t, 0, c) assert.False(t, m) }
explode_data.jsonl/8374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 50, 8155, 9523, 13614, 26653, 1155, 353, 8840, 836, 8, 341, 2109, 25183, 1669, 501, 18930, 27907, 340, 67009, 1669, 609, 50, 8155, 9523, 515, 197, 17060, 25, 286, 68909, 16078, 345, 197, 1166, 75, 10114, 2959, 25, 68909, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppConfig_SwitchContextByName(t *testing.T) { config := getAppConfig() config.SwitchContextByName("second-context") if config.CurrentContext.Name != "second-context" { t.Error("could not switch context") } }
explode_data.jsonl/6632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 2164, 2648, 1098, 5539, 1972, 16898, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 76382, 2648, 2822, 25873, 808, 5539, 1972, 16898, 445, 5569, 63633, 5130, 743, 2193, 11517, 1972, 2967, 961, 330, 5569, 63633, 1, 341, 197, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestLaunchIDFromVerifiedClientIDQuerySingle(t *testing.T) { keeper, ctx := keepertest.Monitoringc(t) wctx := sdk.WrapSDKContext(ctx) msgs := createNLaunchIDFromVerifiedClientID(keeper, ctx, 2) for _, tc := range []struct { desc string request *types.QueryGetLaunchIDFromVerifiedClientIDRequest response *types.QueryGetLaunchIDFromVerifiedClientIDResponse err error }{ { desc: "First", request: &types.QueryGetLaunchIDFromVerifiedClientIDRequest{ ClientID: msgs[0].ClientID, }, response: &types.QueryGetLaunchIDFromVerifiedClientIDResponse{LaunchIDFromVerifiedClientID: msgs[0]}, }, { desc: "Second", request: &types.QueryGetLaunchIDFromVerifiedClientIDRequest{ ClientID: msgs[1].ClientID, }, response: &types.QueryGetLaunchIDFromVerifiedClientIDResponse{LaunchIDFromVerifiedClientID: msgs[1]}, }, { desc: "KeyNotFound", request: &types.QueryGetLaunchIDFromVerifiedClientIDRequest{ ClientID: strconv.Itoa(100000), }, err: status.Error(codes.InvalidArgument, "not found"), }, { desc: "InvalidRequest", err: status.Error(codes.InvalidArgument, "invalid request"), }, } { t.Run(tc.desc, func(t *testing.T) { response, err := keeper.LaunchIDFromVerifiedClientID(wctx, tc.request) if tc.err != nil { require.ErrorIs(t, err, tc.err) } else { require.NoError(t, err) require.Equal(t, nullify.Fill(tc.response), nullify.Fill(response), ) } }) } }
explode_data.jsonl/22539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 605 }
[ 2830, 3393, 32067, 915, 3830, 54558, 2959, 915, 2859, 10888, 1155, 353, 8840, 836, 8, 341, 197, 18861, 11, 5635, 1669, 2506, 83386, 1321, 30314, 287, 66, 1155, 340, 6692, 3773, 1669, 45402, 38968, 31534, 1972, 7502, 340, 21169, 82, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapProxy_SinglePutGet(t *testing.T) { testKey := "testingKey" testValue := "testingValue" mp.Put(testKey, testValue) res, err := mp.Get(testKey) AssertEqualf(t, err, res, testValue, "get returned a wrong value") mp.Clear() }
explode_data.jsonl/56955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 2227, 16219, 1098, 2173, 19103, 1949, 1155, 353, 8840, 836, 8, 341, 18185, 1592, 1669, 330, 8840, 1592, 698, 18185, 1130, 1669, 330, 8840, 1130, 698, 53230, 39825, 8623, 1592, 11, 1273, 1130, 340, 10202, 11, 1848, 1669, 1049...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddCreateOrder(t *testing.T) { cs := fillChangeset("Create") if cs.Create[0].Kind != "ServiceAccount" { t.Errorf("SA needs to be created before PVC") } if cs.Create[1].Kind != "PersistentVolumeClaim" { t.Errorf("PVC needs to be created before DC") } }
explode_data.jsonl/33774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 2212, 4021, 4431, 1155, 353, 8840, 836, 8, 341, 71899, 1669, 5155, 11317, 295, 445, 4021, 1138, 743, 10532, 7251, 58, 15, 936, 10629, 961, 330, 1860, 7365, 1, 341, 197, 3244, 13080, 445, 7778, 3880, 311, 387, 3465, 1573, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_CreateScratch_DirDestpath_Failure(t *testing.T) { rhcs := runhcs.Runhcs{ Debug: true, } ctx := context.TODO() err := rhcs.CreateScratch(ctx, t.TempDir()) if err == nil { t.Fatal("Should have failed 'CreateScratch' command with dir destpath") } }
explode_data.jsonl/71751
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 34325, 65508, 754, 1557, 404, 34830, 2343, 1400, 9373, 1155, 353, 8840, 836, 8, 341, 7000, 71, 4837, 1669, 1598, 71, 4837, 16708, 71, 4837, 515, 197, 34424, 25, 830, 345, 197, 630, 20985, 1669, 2266, 90988, 741, 9859, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewCommand(t *testing.T) { root := &cmds.Command{ Subcommands: map[string]*cmds.Command{ "test": NewCommand(&oldcmds.Command{ Run: func(req oldcmds.Request, res oldcmds.Response) { res.SetOutput("Test.") }, Marshalers: map[oldcmds.EncodingType]oldcmds.Marshaler{ oldcmds.Text: func(res oldcmds.Response) (io.Reader, error) { ch, ok := res.Output().(<-chan interface{}) if !ok { t.Fatalf("output is not <-chan interface{} but %T", ch) } v := <-ch str, ok := v.(string) if !ok { t.Fatalf("read value is not string but %T", v) } buf := bytes.NewBuffer(nil) _, err := io.WriteString(buf, str) if err != nil { t.Fatal(err) } return buf, nil }, }, Subcommands: map[string]*oldcmds.Command{ "sub": &oldcmds.Command{ Options: []cmdkit.Option{ cmdkit.NewOption(cmdkit.String, "test", "t", "some random test flag"), }, }, }, }), }, } path := []string{"test"} req, err := cmds.NewRequest(context.TODO(), path, nil, nil, nil, root) if err != nil { t.Fatal(err) } buf := bytes.NewBuffer(nil) // test calling "test" command testCmd := root.Subcommands["test"] enc := testCmd.Encoders[oldcmds.Text] if enc == nil { t.Fatal("got nil encoder") } re := cmds.NewWriterResponseEmitter(WriteNopCloser{buf}, req, enc) var env oldcmds.Context root.Call(req, re, &env) expected := `"Test." ` if buf.String() != expected { t.Fatalf("expected string %#v but got %#v", expected, buf.String()) } // test getting subcommand subCmd := testCmd.Subcommands["sub"] if subCmd == nil { t.Fatal("got nil subcommand") } if nOpts := len(subCmd.Options); nOpts != 1 { t.Fatalf("subcommand has %v options, expected 1", nOpts) } opt := subCmd.Options[0] if nNames := len(opt.Names()); nNames != 2 { t.Fatalf("option has %v names, expected 2", nNames) } names := opt.Names() if names[0] != "test" { t.Fatalf("option has name %q, expected %q", names[0], "test") } if names[1] != "t" { t.Fatalf("option has name %q, expected %q", names[1], "t") } }
explode_data.jsonl/71793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 957 }
[ 2830, 3393, 3564, 4062, 1155, 353, 8840, 836, 8, 341, 33698, 1669, 609, 92407, 12714, 515, 197, 197, 3136, 24270, 25, 2415, 14032, 8465, 92407, 12714, 515, 298, 197, 1, 1944, 788, 1532, 4062, 2099, 813, 92407, 12714, 515, 571, 85952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Subscribe(t *testing.T) { var testCases = []struct { desc string retryInterval time.Duration amqpDeliveryList []amqp.Delivery expectedMessages []mq.Message channelErr error notifyErr *amqp.Error }{ { desc: "amqp type messages get converted to appropriate mq.Messages", retryInterval: 1 * time.Second, amqpDeliveryList: []amqp.Delivery{ amqp.Delivery{ RoutingKey: "test.event", Headers: map[string]interface{}{"some_random_header": "random_header_value"}, Body: []byte(`{"body": "lots of content"}`), }, }, expectedMessages: []mq.Message{ mq.Message{ Type: "test.event", Header: map[string]interface{}{"some_random_header": "random_header_value"}, Body: []byte(`{"body": "lots of content"}`), }, }, }, { desc: "in case of an error from the consume, appropriate error messages are dispatched", retryInterval: 1 * time.Second, channelErr: errors.New("consume error"), expectedMessages: []mq.Message{ mq.Message{ Type: "error-rmq", Error: pkgerrors.Wrap(errors.New("consume error"), "error during consume"), }, }, }, { desc: "if notifyerror returns an error, its handled and retried", retryInterval: 1 * time.Second, amqpDeliveryList: []amqp.Delivery{ amqp.Delivery{ RoutingKey: "test.event", Headers: map[string]interface{}{"some_random_header": "random_header_value"}, Body: []byte(`{"body": "lots of content"}`), }, }, expectedMessages: []mq.Message{ mq.Message{ Type: "test.event", Header: map[string]interface{}{"some_random_header": "random_header_value"}, Body: []byte(`{"body": "lots of content"}`), }, }, notifyErr: &amqp.Error{ Code: 123, Reason: "jk lol", }, }, } for _, testCase := range testCases { t.Run(testCase.desc, func(t *testing.T) { assert := assert.New(t) fakeCh := &fakeAmqpChannel{ deliveryList: testCase.amqpDeliveryList, err: testCase.channelErr, notifyErr: testCase.notifyErr, } fakeConn := &fakeConnection{ fakeAmqpChannel: fakeCh, } subscriber, err := NewSubscriber("", true, fakeConn, testCase.retryInterval) assert.Nil(err) messageCh, err := subscriber.Subscribe() assert.Nil(err) var i int for message := range messageCh { message.Ack = nil message.Requeue = nil message.Deadletter = nil if testCase.expectedMessages[i].Error != nil { assert.EqualError(message.Error, testCase.expectedMessages[i].Error.Error()) } else { assert.Equal(testCase.expectedMessages[i], message) assert.Nil(message.Error) } i++ } subscriber.Close() }) } }
explode_data.jsonl/47220
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1260 }
[ 2830, 3393, 36359, 6273, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 41653, 1797, 914, 198, 197, 17200, 1539, 10256, 262, 882, 33795, 198, 197, 197, 309, 32763, 38121, 852, 3056, 309, 32763, 909, 72871, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMiddlewarePanicOnLateUse(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("hello\n")) } mw := func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { next.ServeHTTP(w, r) }) } defer func() { if recover() == nil { t.Error("expected panic()") } }() r := NewRouter() r.Get("/", handler) r.Use(mw) // Too late to apply middleware, we're expecting panic(). }
explode_data.jsonl/42881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 24684, 47, 31270, 1925, 61457, 10253, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 4073, 10556, 3782, 445, 14990, 1699, 5455, 197, 630, 2109, 86, 1669, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_findModifiedCharts_repo2(t *testing.T) { // repo2 is a single-project repo with a single chart in a nested "deploy" directory clonedPath, teardown := setupGitRepo("repo2_bare", t) defer teardown() tests := []struct { p *Plugin want map[string]bool reason string }{ { reason: "Initial Commit", p: &Plugin{ Config: &Config{ RepoURL: "http://charts.mycompany.com/", ChartsDir: clonedPath, CurrentCommitID: "6ee3ced14a2388fa90f6dd27fcbb549442016866", PreviousCommitID: "4b825dc642cb6eb9a060e54bf8d69288fbee4904", //git's empty tree }, }, want: map[string]bool{ filepath.Join(clonedPath, "deploy/chart"): true, }, }, { reason: "No change in Chart", p: &Plugin{ Config: &Config{ RepoURL: "http://charts.mycompany.com/", ChartsDir: clonedPath, CurrentCommitID: "b1badf73d0eee1241a664cd2defc69c104f12b3b", PreviousCommitID: "6ee3ced14a2388fa90f6dd27fcbb549442016866", }, }, want: map[string]bool{}, }, { reason: "Bump Chart version", p: &Plugin{ Config: &Config{ RepoURL: "http://charts.mycompany.com/", ChartsDir: clonedPath, CurrentCommitID: "7213fbe8dd8cb8b4bbd11fbdbc2a58cbb046a7b6", PreviousCommitID: "b1badf73d0eee1241a664cd2defc69c104f12b3b", }, }, want: map[string]bool{ filepath.Join(clonedPath, "deploy/chart"): true, }, }, } for _, test := range tests { if err := test.p.ValidateConfig(); err != nil { t.Error(err) return } got, err := test.p.findModifiedCharts() if err != nil { t.Error(err) return } if !reflect.DeepEqual(got, test.want) { t.Errorf("Incorrect modified charts map got %#v want %#v - Reason: %s", got, test.want, test.reason) } } }
explode_data.jsonl/58799
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 897 }
[ 2830, 3393, 21814, 19148, 64878, 37784, 17, 1155, 353, 8840, 836, 8, 341, 197, 322, 15867, 17, 374, 264, 3175, 33696, 15867, 448, 264, 3175, 9487, 304, 264, 24034, 330, 35794, 1, 6220, 198, 39407, 19684, 1820, 11, 49304, 1669, 6505, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHscan(t *testing.T) { s, err := Run() ok(t, err) defer s.Close() c, err := proto.Dial(s.Addr()) ok(t, err) defer c.Close() // We cheat with hscan. It always returns everything. s.HSet("h", "field1", "value1") s.HSet("h", "field2", "value2") // No problem mustDo(t, c, "HSCAN", "h", "0", proto.Array( proto.String("0"), proto.Array( proto.String("field1"), proto.String("value1"), proto.String("field2"), proto.String("value2"), ), ), ) // Invalid cursor mustDo(t, c, "HSCAN", "h", "42", proto.Array( proto.String("0"), proto.Array(), ), ) // COUNT (ignored) mustDo(t, c, "HSCAN", "h", "0", "COUNT", "200", proto.Array( proto.String("0"), proto.Array( proto.String("field1"), proto.String("value1"), proto.String("field2"), proto.String("value2"), ), ), ) // MATCH s.HSet("h", "aap", "a") s.HSet("h", "noot", "b") s.HSet("h", "mies", "m") mustDo(t, c, "HSCAN", "h", "0", "MATCH", "mi*", proto.Array( proto.String("0"), proto.Array( proto.String("mies"), proto.String("m"), ), ), ) t.Run("errors", func(t *testing.T) { mustDo(t, c, "HSCAN", proto.Error(errWrongNumber("hscan")), ) mustDo(t, c, "HSCAN", "set", proto.Error(errWrongNumber("hscan")), ) mustDo(t, c, "HSCAN", "set", "noint", proto.Error("ERR invalid cursor"), ) mustDo(t, c, "HSCAN", "set", "1", "MATCH", proto.Error("ERR syntax error"), ) mustDo(t, c, "HSCAN", "set", "1", "COUNT", proto.Error("ERR syntax error"), ) mustDo(t, c, "HSCAN", "set", "1", "COUNT", "noint", proto.Error("ERR value is not an integer or out of range"), ) }) }
explode_data.jsonl/11379
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 881 }
[ 2830, 3393, 39, 16405, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 6452, 741, 59268, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 1444, 11, 1848, 1669, 18433, 98462, 1141, 93626, 2398, 59268, 1155, 11, 1848, 340, 16867, 272, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandlerGetErrorsPercentage(t *testing.T) { config := mockConfig{ doErrorsPercentage: func() int { return 12 }, } response := doGetErrorsPercentageRequest(handlerForConfig(config)) checkStatusCode(t, response, http.StatusOK) checkBody(t, response, "12\n") }
explode_data.jsonl/48339
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 3050, 1949, 13877, 36167, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 7860, 2648, 515, 197, 19935, 13877, 36167, 25, 2915, 368, 526, 341, 298, 853, 220, 16, 17, 198, 197, 197, 1583, 197, 630, 21735, 1669, 56704, 13877, 3616...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsMethodAllowedReturnsTrueWithOptions(t *testing.T) { s := New(Options{ // Intentionally left blank. }) if !s.isMethodAllowed("OPTIONS") { t.Error("IsMethodAllowed should return true when c.allowedMethods is nil.") } }
explode_data.jsonl/57642
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 3872, 3523, 35382, 16446, 2514, 74238, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 7, 3798, 515, 197, 322, 1333, 2939, 745, 2115, 10113, 624, 197, 3518, 743, 753, 82, 2079, 3523, 35382, 445, 56929, 899, 341, 197, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestFetchConnectionRecord(t *testing.T) { t.Run("fetch connection record - invalid payload", func(t *testing.T) { svc, err := New(&protocol.MockProvider{ ServiceMap: map[string]interface{}{ mediator.Coordination: &mockroute.MockMediatorSvc{}, }, }) require.NoError(t, err) _, err = svc.fetchConnectionRecord("", service.DIDCommMsgMap{"~thread": map[int]int{1: 1}}) require.Contains(t, fmt.Sprintf("%v", err), `'~thread' needs a map with string keys`) }) t.Run("fetch connection record - no thread id", func(t *testing.T) { svc, err := New(&protocol.MockProvider{ ServiceMap: map[string]interface{}{ mediator.Coordination: &mockroute.MockMediatorSvc{}, }, }) require.NoError(t, err) _, err = svc.fetchConnectionRecord(theirNSPrefix, toDIDCommMsg(t, &Request{ Type: ResponseMsgType, ID: generateRandomID(), })) require.Error(t, err) require.Contains(t, err.Error(), "unable to compute hash, empty bytes") }) t.Run("fetch connection record - valid input", func(t *testing.T) { svc, err := New(&protocol.MockProvider{ ServiceMap: map[string]interface{}{ mediator.Coordination: &mockroute.MockMediatorSvc{}, }, }) require.NoError(t, err) _, err = svc.fetchConnectionRecord(theirNSPrefix, toDIDCommMsg(t, &Response{ Type: ResponseMsgType, ID: generateRandomID(), Thread: &decorator.Thread{ID: generateRandomID()}, })) require.Error(t, err) require.Contains(t, err.Error(), "get connectionID by namespaced threadID: data not found") }) }
explode_data.jsonl/30540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 613 }
[ 2830, 3393, 20714, 4526, 6471, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 9641, 3633, 3255, 481, 8318, 7729, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1903, 7362, 11, 1848, 1669, 1532, 2099, 17014, 24664, 5179, 515, 298, 9161...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateSecurityRuleType(t *testing.T) { validTypes := []string{ "ingress", "egress", } for _, v := range validTypes { if _, errors := validateSecurityRuleType(v, "type"); len(errors) > 0 { t.Fatalf("%q should be a valid Security Group Rule type: %v", v, errors) } } invalidTypes := []string{ "foo", "ingresss", } for _, v := range invalidTypes { if _, errors := validateSecurityRuleType(v, "type"); len(errors) == 0 { t.Fatalf("%q should be an invalid Security Group Rule type: %v", v, errors) } } }
explode_data.jsonl/78591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 17926, 15352, 11337, 929, 1155, 353, 8840, 836, 8, 341, 56322, 4173, 1669, 3056, 917, 515, 197, 197, 1, 287, 673, 756, 197, 197, 1, 791, 673, 756, 197, 532, 2023, 8358, 348, 1669, 2088, 2697, 4173, 341, 197, 743, 8358, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUnitHandleGetPractitionerResources(t *testing.T) { err := os.Chdir("..") if err != nil { log.ErrorR(nil, fmt.Errorf("error accessing root directory")) } Convey("Must need a transactionID in the URL", t, func() { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() res := serveGetPractitionerResourcesRequest(mock_dao.NewMockService(mockCtrl), false) So(res.Code, ShouldEqual, http.StatusBadRequest) }) Convey("Generic error when retrieving practitioner resources from mongo", t, func() { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockService := mock_dao.NewMockService(mockCtrl) // Expect GetPractitionersResource to be called once and return an error mockService.EXPECT().GetPractitionerResources(transactionID).Return(nil, fmt.Errorf("there was a problem handling your request for transaction %s", transactionID)).Times(1) res := serveGetPractitionerResourcesRequest(mockService, true) So(res.Code, ShouldEqual, http.StatusInternalServerError) }) Convey("Error when retrieving practitioner resources from mongo - insolvency case not found", t, func() { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockService := mock_dao.NewMockService(mockCtrl) // Expect GetPractitionersResource to be called once and return nil, nil mockService.EXPECT().GetPractitionerResources(transactionID).Return(nil, nil).Times(1) res := serveGetPractitionerResourcesRequest(mockService, true) So(res.Code, ShouldEqual, http.StatusNotFound) }) Convey("Error when retrieving practitioner resources from mongo - no practitioners assigned to insolvency case", t, func() { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockService := mock_dao.NewMockService(mockCtrl) var practitionerResources []models.PractitionerResourceDao // Expect GetPractitionersResource to be called once and return empty list, nil mockService.EXPECT().GetPractitionerResources(transactionID).Return(practitionerResources, nil).Times(1) res := serveGetPractitionerResourcesRequest(mockService, true) So(res.Code, ShouldEqual, http.StatusNotFound) }) Convey("Successfully retrieve practitioners for insolvency case", t, func() { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockService := mock_dao.NewMockService(mockCtrl) practitionerResources := []models.PractitionerResourceDao{ { IPCode: "IPCode", FirstName: "FirstName", LastName: "LastName", Address: models.AddressResourceDao{ AddressLine1: "AddressLine1", Locality: "Locality", }, Role: "Role", }, } // Expect GetPractitionersResource to be called once and return list of practitioners, nil mockService.EXPECT().GetPractitionerResources(transactionID).Return(practitionerResources, nil).Times(1) res := serveGetPractitionerResourcesRequest(mockService, true) So(res.Code, ShouldEqual, http.StatusOK) }) }
explode_data.jsonl/25956
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 974 }
[ 2830, 3393, 4562, 6999, 1949, 3533, 531, 71246, 11277, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 2643, 6353, 3741, 95032, 1138, 743, 1848, 961, 2092, 341, 197, 6725, 6141, 49, 27907, 11, 8879, 13080, 445, 841, 31788, 3704, 6220, 5455, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterpretError(t *testing.T) { tests := []struct { Test string Input string Output string }{ { Test: "other plugin not installed", Input: `{"error":{"root_cause":[{"type":"parse_exception","reason":"No processor type exists with name [hello_test]","header":{"processor_type":"hello_test"}}],"type":"parse_exception","reason":"No processor type exists with name [hello_test]","header":{"processor_type":"hello_test"}},"status":400}`, Output: "this module requires an Elasticsearch plugin that provides the hello_test processor. " + "Please visit the Elasticsearch documentation for instructions on how to install this plugin. " + "Response body: " + `{"error":{"root_cause":[{"type":"parse_exception","reason":"No processor type exists with name [hello_test]","header":{"processor_type":"hello_test"}}],"type":"parse_exception","reason":"No processor type exists with name [hello_test]","header":{"processor_type":"hello_test"}},"status":400}`, }, { Test: "Elasticsearch 2.4", Input: `{"error":{"root_cause":[{"type":"invalid_index_name_exception","reason":"Invalid index name [_ingest], must not start with '_'","index":"_ingest"}],"type":"invalid_index_name_exception","reason":"Invalid index name [_ingest], must not start with '_'","index":"_ingest"},"status":400}`, Output: `the Ingest Node functionality seems to be missing from Elasticsearch. The Filebeat modules require Elasticsearch >= 5.0. This is the response I got from Elasticsearch: {"error":{"root_cause":[{"type":"invalid_index_name_exception","reason":"Invalid index name [_ingest], must not start with '_'","index":"_ingest"}],"type":"invalid_index_name_exception","reason":"Invalid index name [_ingest], must not start with '_'","index":"_ingest"},"status":400}`, }, { Test: "Elasticsearch 1.7", Input: `{"error":"InvalidIndexNameException[[_ingest] Invalid index name [_ingest], must not start with '_']","status":400}`, Output: `the Filebeat modules require Elasticsearch >= 5.0. This is the response I got from Elasticsearch: {"error":"InvalidIndexNameException[[_ingest] Invalid index name [_ingest], must not start with '_']","status":400}`, }, { Test: "bad json", Input: `blah`, Output: `couldn't load pipeline: test. Additionally, error decoding response body: blah`, }, { Test: "another error", Input: `{"error":{"root_cause":[{"type":"test","reason":""}],"type":"test","reason":""},"status":400}`, Output: "couldn't load pipeline: test. Response body: " + `{"error":{"root_cause":[{"type":"test","reason":""}],"type":"test","reason":""},"status":400}`, }, } for _, test := range tests { t.Run(test.Test, func(t *testing.T) { errResult := interpretError(errors.New("test"), []byte(test.Input)) assert.Equal(t, errResult.Error(), test.Output, test.Test) }) } }
explode_data.jsonl/64760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 899 }
[ 2830, 3393, 3306, 8043, 1454, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 73866, 256, 914, 198, 197, 66588, 220, 914, 198, 197, 80487, 914, 198, 197, 59403, 197, 197, 515, 298, 73866, 25, 220, 330, 1575, 9006, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDuplicate(t *testing.T) { s, c := makeTestServer() defer s.Shutdown() filename := "test-duplicate" mode := "octet" bs := []byte("lalala") sender, err := c.Send(filename, mode) if err != nil { t.Fatalf("requesting write: %v", err) } buf := bytes.NewBuffer(bs) _, err = sender.ReadFrom(buf) if err != nil { t.Fatalf("send error: %v", err) } sender, err = c.Send(filename, mode) if err == nil { t.Fatalf("file already exists") } t.Logf("sending file that already exists: %v", err) }
explode_data.jsonl/17555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 53979, 1155, 353, 8840, 836, 8, 341, 1903, 11, 272, 1669, 1281, 2271, 5475, 741, 16867, 274, 10849, 18452, 741, 66434, 1669, 330, 1944, 1737, 14070, 698, 60247, 1669, 330, 41692, 295, 698, 93801, 1669, 3056, 3782, 445, 75, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetOrderbook(t *testing.T) { t.Parallel() _, err := b.GetOrderbook(BTCAUD, 2) if err != nil { t.Error("GetTrades() error", err) } }
explode_data.jsonl/33143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 1949, 4431, 2190, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 2234, 4431, 2190, 5349, 51, 5049, 4656, 11, 220, 17, 340, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 445, 1949, 1282, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestBatchGetPeers(t *testing.T) { once.Do(testSetup) peer0 := &fakePeer{} peer1 := &fakePeer{} peer2 := &fakePeer{} peerList := fakePeers([]ProtoGetter{peer0, peer1, peer2, nil}) const cacheSize = 0 // disabled localHits := 0 getter := func(_ context.Context, keyList []string, destList []Sink) []error { localHits = localHits + len(keyList) errList := make([]error, 0) for index := range keyList { val := "got:" + keyList[index] dest := StringSink(&val) err := dest.SetString("got:"+keyList[index], time.Time{}) if err != nil { errList = append(errList, err) continue } destList[index] = dest } return errList } testGroup := newGroup("TestBatchGetPeers-group", cacheSize, BatchGetterFunc(getter), peerList) run := func(name string, n int, wantSummary string) { // Reset counters localHits = 0 for _, p := range []*fakePeer{peer0, peer1, peer2} { p.hits = 0 } keyList := make([]string, n) wantList := make([]string, n) gotList := make([]string, n) destList := make([]Sink, n) for i := 0; i < n; i++ { key := fmt.Sprintf("key-%d", i) keyList[i] = key want := "got:" + key wantList[i] = want destList[i] = StringSink(&gotList[i]) } errList := testGroup.BatchGet(dummyCtx, keyList, destList) if len(errList) > 0 { for index, err := range errList { t.Errorf("%s: error on key %q: %v", name, keyList[index], err) } } for index, dest := range destList { view, err := dest.view() if err != nil { t.Errorf("%s: error on key %q: %v", name, keyList[index], err) } gotList[index] = view.s } for index, want := range wantList { if gotList[index] != want { t.Errorf("%s: for key %q, got %q; want %q", name, keyList[index], gotList[index], want) } } summary := func() string { return fmt.Sprintf("localHits = %d, peers = %d %d %d", localHits, peer0.hits, peer1.hits, peer2.hits) } if got := summary(); got != wantSummary { t.Errorf("%s: got %q; want %q", name, got, wantSummary) } } resetCacheSize := func(maxBytes int64) { g := testGroup g.cacheBytes = maxBytes g.mainCache = cache{} g.hotCache = cache{} } // Base case; peers all up, with no problems. resetCacheSize(1 << 20) run("base", 200, "localHits = 49, peers = 51 49 51") // Verify cache was hit. All localHits and peers are gone as the hotCache has // the data we need run("cached_base", 200, "localHits = 0, peers = 0 0 0") resetCacheSize(0) // With one of the peers being down. // TODO(bradfitz): on a peer number being unavailable, the // consistent hashing should maybe keep trying others to // spread the load out. Currently it fails back to local // execution if the first consistent-hash slot is unavailable. peerList[0] = nil run("one_peer_down", 200, "localHits = 100, peers = 0 49 51") //// Failing peer peerList[0] = peer0 peer0.fail = true run("peer0_failing", 200, "localHits = 100, peers = 51 49 51") }
explode_data.jsonl/62847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1191 }
[ 2830, 3393, 21074, 1949, 10197, 388, 1155, 353, 8840, 836, 8, 341, 197, 13184, 33596, 8623, 21821, 340, 197, 16537, 15, 1669, 609, 30570, 30888, 16094, 197, 16537, 16, 1669, 609, 30570, 30888, 16094, 197, 16537, 17, 1669, 609, 30570, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDeclareFaults(t *testing.T) { periodOffset := abi.ChainEpoch(100) actor := newHarness(t, periodOffset) builder := builderForHarness(actor). WithBalance(bigBalance, big.Zero()) t.Run("declare fault pays fee", func(t *testing.T) { // Get sector into proving state rt := builder.Build(t) actor.constructAndVerify(rt) precommits := actor.commitAndProveSectors(rt, 1, 100, nil) // Skip to end of proving period, cron adds sectors to proving set. completeProvingPeriod(rt, actor, &cronConfig{newSectors: true}) info := actor.getSector(rt, precommits[0].SectorNumber) // Declare the sector as faulted ss, err := info.SealProof.SectorSize() require.NoError(t, err) sectorQAPower := miner.QAPowerForSector(ss, info) totalQAPower := big.NewInt(1 << 52) fee := miner.PledgePenaltyForDeclaredFault(actor.epochReward, totalQAPower, sectorQAPower) actor.declareFaults(rt, totalQAPower, fee, info) }) }
explode_data.jsonl/43146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 78455, 58780, 82, 1155, 353, 8840, 836, 8, 341, 197, 19304, 6446, 1669, 61050, 98269, 44338, 7, 16, 15, 15, 340, 93410, 1669, 501, 74248, 1155, 11, 4168, 6446, 340, 44546, 1669, 7363, 2461, 74248, 65451, 4292, 197, 197, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorstPatternFormat(t *testing.T) { in := "short:[%d %t] good:[%D %T] levelPadded:[%-10L] long:%S short:%s xs:%10x Msg:%M Fnc:%P Pkg:%p" out := "short:[2011/10/20 15:39:07] good:[2011-10-20 15:39:07.383] levelPadded:[INFO ] " + "long:/blah/der/some_file.go:7 short:some_file.go:7 xs: some_file Msg:hellooooo nurse! Fnc:hi.Zoot Pkg:hi\n" pf := NewPatFormatter(in) verify(t, in, pf.Format(lr), out) }
explode_data.jsonl/25104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 54, 65296, 15760, 4061, 1155, 353, 8840, 836, 8, 341, 17430, 1669, 330, 8676, 7259, 4, 67, 1018, 83, 60, 1661, 7259, 4, 35, 1018, 51, 60, 2188, 47, 16828, 7259, 42973, 16, 15, 43, 60, 1293, 7533, 50, 2805, 7533, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOSDPlacement(t *testing.T) { clientset := fake.NewSimpleClientset() clusterInfo := &cephclient.ClusterInfo{ Namespace: "ns", CephVersion: cephver.Nautilus, } clusterInfo.SetName("testing") clusterInfo.OwnerInfo = cephclient.NewMinimumOwnerInfo(t) context := &clusterd.Context{Clientset: clientset, ConfigDir: "/var/lib/rook", Executor: &exectest.MockExecutor{}} spec := cephv1.ClusterSpec{ Placement: cephv1.PlacementSpec{ "all": { NodeAffinity: &v1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &v1.NodeSelector{ NodeSelectorTerms: []v1.NodeSelectorTerm{ { MatchExpressions: []v1.NodeSelectorRequirement{{ Key: "role", Operator: v1.NodeSelectorOpIn, Values: []string{"storage-node1"}, }}, }, }, }, }, }, "osd": { NodeAffinity: &v1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &v1.NodeSelector{ NodeSelectorTerms: []v1.NodeSelectorTerm{ { MatchExpressions: []v1.NodeSelectorRequirement{{ Key: "role", Operator: v1.NodeSelectorOpIn, Values: []string{"storage-node1"}, }}, }, }, }, }, }, "prepareosd": { NodeAffinity: &v1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &v1.NodeSelector{ NodeSelectorTerms: []v1.NodeSelectorTerm{ { MatchExpressions: []v1.NodeSelectorRequirement{{ Key: "role", Operator: v1.NodeSelectorOpIn, Values: []string{"storage-node1"}, }}, }, }, }, }, }, }, Storage: cephv1.StorageScopeSpec{ OnlyApplyOSDPlacement: false, }, } osdProps := osdProperties{ pvc: v1.PersistentVolumeClaimVolumeSource{ ClaimName: "pvc1", }, } osdProps.placement = cephv1.Placement{NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{ NodeSelectorTerms: []corev1.NodeSelectorTerm{ { MatchExpressions: []corev1.NodeSelectorRequirement{ { Key: "role", Operator: v1.NodeSelectorOpIn, Values: []string{"storage-node3"}, }, }, }, }, }, }, } osdProps.preparePlacement = &cephv1.Placement{NodeAffinity: &corev1.NodeAffinity{ RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{ NodeSelectorTerms: []corev1.NodeSelectorTerm{ { MatchExpressions: []corev1.NodeSelectorRequirement{ { Key: "role", Operator: v1.NodeSelectorOpIn, Values: []string{"storage-node3"}, }, }, }, }, }, }, } c := New(context, clusterInfo, spec, "rook/rook:myversion") osd := OSDInfo{ ID: 0, CVMode: "raw", } dataPathMap := &provisionConfig{ DataPathMap: opconfig.NewDatalessDaemonDataPathMap(c.clusterInfo.Namespace, "/var/lib/rook"), } // For OSD daemon // When OnlyApplyOSDPlacement false, in case of PVC r, err := c.makeDeployment(osdProps, osd, dataPathMap) assert.NoError(t, err) assert.Equal(t, 2, len(r.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // For OSD-prepare job job, err := c.makeJob(osdProps, dataPathMap) assert.NoError(t, err) assert.Equal(t, 2, len(job.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // When OnlyApplyOSDPlacement true, in case of PVC spec.Storage.OnlyApplyOSDPlacement = true c = New(context, clusterInfo, spec, "rook/rook:myversion") r, err = c.makeDeployment(osdProps, osd, dataPathMap) assert.NoError(t, err) assert.Equal(t, 1, len(r.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // For OSD-prepare job job, err = c.makeJob(osdProps, dataPathMap) assert.NoError(t, err) assert.Equal(t, 1, len(job.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // When OnlyApplyOSDPlacement false, in case of non-PVC spec.Storage.OnlyApplyOSDPlacement = false osdProps = osdProperties{} c = New(context, clusterInfo, spec, "rook/rook:myversion") r, err = c.makeDeployment(osdProps, osd, dataPathMap) assert.NoError(t, err) assert.Equal(t, 2, len(r.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // For OSD-prepare job job, err = c.makeJob(osdProps, dataPathMap) assert.NoError(t, err) assert.Equal(t, 2, len(job.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // When OnlyApplyOSDPlacement true, in case of non-PVC spec.Storage.OnlyApplyOSDPlacement = true c = New(context, clusterInfo, spec, "rook/rook:myversion") r, err = c.makeDeployment(osdProps, osd, dataPathMap) assert.NoError(t, err) assert.Equal(t, 1, len(r.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) // For OSD-prepare job job, err = c.makeJob(osdProps, dataPathMap) assert.NoError(t, err) assert.Equal(t, 1, len(job.Spec.Template.Spec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions)) }
explode_data.jsonl/73008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2336 }
[ 2830, 3393, 3126, 35, 28237, 1155, 353, 8840, 836, 8, 341, 25291, 746, 1669, 12418, 7121, 16374, 2959, 746, 741, 197, 18855, 1731, 1669, 609, 58722, 2972, 72883, 1731, 515, 197, 90823, 25, 256, 330, 4412, 756, 197, 6258, 23544, 5637, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGet(t *testing.T) { var numTimesInitialized int32 expectedTimesInitialized := 2 cache := New("Example_Cache", func(key Key) (interface{}, error) { if key.String() == "error" { return nil, fmt.Errorf("some error") } atomic.AddInt32(&numTimesInitialized, 1) return fmt.Sprintf("Value_for_key_%s", key), nil }) concurrency := 100 var wg sync.WaitGroup wg.Add(concurrency) for i := 0; i < concurrency; i++ { go func() { defer wg.Done() value, err := cache.Get(NewStringKey("Key1")) if err != nil { test.Failf(t, "Error returned: %s", err) } expectedValue := "Value_for_key_Key1" if value != expectedValue { test.Failf(t, "Expecting value [%s] but got [%s]", expectedValue, value) } value, err = cache.Get(NewStringKey("Key2")) if err != nil { test.Failf(t, "Error returned: %s", err) } expectedValue = "Value_for_key_Key2" if value != expectedValue { test.Failf(t, "Expecting value [%s] but got [%s]", expectedValue, value) } _, err = cache.Get(NewStringKey("error")) if err == nil { test.Failf(t, "Expecting error but got none") } }() } wg.Wait() if num := atomic.LoadInt32(&numTimesInitialized); num != int32(expectedTimesInitialized) { t.Fatalf("Expecting initializer to be called %d time(s) but it was called %d time(s)", expectedTimesInitialized, num) } cache.Close() }
explode_data.jsonl/2252
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 2405, 1629, 18889, 22495, 526, 18, 17, 198, 42400, 18889, 22495, 1669, 220, 17, 271, 52680, 1669, 1532, 445, 13314, 920, 1777, 497, 2915, 4857, 5309, 8, 320, 4970, 22655, 1465, 8, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_createBootClone_into_empty_directory_succeeds(t *testing.T) { log.SetOutput(ioutil.Discard) bootDir, err := ioutil.TempDir("", "boot-test") require.NoError(t, err) defer func() { _ = os.RemoveAll(bootDir) }() factory := cmd_mocks.NewMockFactory() commonOpts := opts.NewCommonOptionsWithFactory(factory) commonOpts.BatchMode = true o := BootOptions{ CommonOptions: &commonOpts, Dir: bootDir, } repoPath := filepath.Join(bootDir, "jenkins-x-boot-config") cloneDir, err := o.createBootClone(config.DefaultBootRepository, config.DefaultVersionsRef, repoPath) assert.NoError(t, err) assert.Contains(t, cloneDir, repoPath) }
explode_data.jsonl/63006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 8657, 17919, 37677, 45514, 15124, 14846, 643, 29264, 82, 1155, 353, 8840, 836, 8, 341, 6725, 4202, 5097, 1956, 30158, 909, 47560, 340, 197, 4619, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 4619, 16839, 1138, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIteratorNext(t *testing.T) { const n = 100 l := NewSkiplist(arenaSize, new(y.DefaultKeyComparator)) defer l.DecrRef() it := l.NewIterator() defer it.Close() require.False(t, it.Valid()) it.SeekToFirst() require.False(t, it.Valid()) for i := n - 1; i >= 0; i-- { l.Put(y.KeyWithTs([]byte(fmt.Sprintf("%05d", i)), 0), y.ValueStruct{Value: newValue(i), Meta: 0, UserMeta: 0}) } it.SeekToFirst() for i := 0; i < n; i++ { require.True(t, it.Valid()) v := it.Value() require.EqualValues(t, newValue(i), v.Value) it.Next() } require.False(t, it.Valid()) }
explode_data.jsonl/49654
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 11951, 5847, 1155, 353, 8840, 836, 8, 341, 4777, 308, 284, 220, 16, 15, 15, 198, 8810, 1669, 1532, 50, 6642, 39934, 7, 30527, 1695, 11, 501, 7021, 13275, 1592, 38658, 1171, 16867, 326, 22442, 81, 3945, 741, 23374, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestObjSetSym(t *testing.T) { const SCRIPT = ` 'use strict'; var sym = Symbol(true); var p1 = Object.create(null); var p2 = Object.create(p1); Object.defineProperty(p1, sym, { value: 42 }); Object.defineProperty(p2, sym, { value: 43, writable: true, }); var o = Object.create(p2); o[sym] = 44; o[sym]; ` testScript1(SCRIPT, intToValue(44), t) }
explode_data.jsonl/10515
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 5261, 1649, 27912, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 197, 19176, 7304, 1010, 2405, 7886, 284, 19612, 3715, 317, 2405, 281, 16, 284, 3002, 2520, 4967, 317, 2405, 281, 17, 284, 3002, 2520, 1295, 16, 317,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseValidListLine(t *testing.T) { for _, lt := range listTests { entry, err := parseListLine(lt.line, now, time.UTC) if err != nil { t.Errorf("parseListLine(%v) returned err = %v", lt.line, err) continue } if entry.Name != lt.name { t.Errorf("parseListLine(%v).Name = '%v', want '%v'", lt.line, entry.Name, lt.name) } if entry.Type != lt.entryType { t.Errorf("parseListLine(%v).EntryType = %v, want %v", lt.line, entry.Type, lt.entryType) } if entry.Size != lt.size { t.Errorf("parseListLine(%v).Size = %v, want %v", lt.line, entry.Size, lt.size) } if !entry.Time.Equal(lt.time) { t.Errorf("parseListLine(%v).Time = %v, want %v", lt.line, entry.Time, lt.time) } } }
explode_data.jsonl/54610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 14463, 4088, 852, 2460, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 25175, 1669, 2088, 1140, 18200, 341, 197, 48344, 11, 1848, 1669, 4715, 852, 2460, 2333, 83, 10932, 11, 1431, 11, 882, 87069, 340, 197, 743, 1848, 961, 2092,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAdmissionPluginNames(t *testing.T) { for _, plugin := range CombinedAdmissionControlPlugins { if !strings.HasPrefix(plugin, "openshift.io/") && !kubeAdmissionPlugins.Has(plugin) && !legacyOpenshiftAdmissionPlugins.Has(plugin) { t.Errorf("openshift admission plugins must be prefixed with openshift.io/ %v", plugin) } } }
explode_data.jsonl/49109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 2589, 2728, 11546, 7980, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 9006, 1669, 2088, 57652, 2589, 2728, 3273, 45378, 341, 197, 743, 753, 18594, 94357, 46801, 11, 330, 24175, 47833, 4245, 97112, 1009, 753, 97717, 2589, 2728, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCall_ReserveResourcesStringer(t *testing.T) { popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano())) p := NewPopulatedCall_ReserveResources(popr, false) s1 := p.String() s2 := fmt.Sprintf("%v", p) if s1 != s2 { t.Fatalf("String want %v got %v", s1, s2) } }
explode_data.jsonl/42090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 7220, 92815, 5852, 11277, 703, 261, 1155, 353, 8840, 836, 8, 341, 3223, 46288, 1669, 6888, 33864, 7121, 37270, 33864, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 12145, 3223, 1669, 1532, 11598, 7757, 7220, 92815, 5852, 11277, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_HookProvider_TransformRequest(t *testing.T) { provider := HookProvider{} t.Log("Unsupported Event Type") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"not:supported"}, "Content-Type": {"application/json; charset=utf-8"}, "X-Attempt-Number": {"1"}, }, } hookTransformResult := provider.TransformRequest(&request) require.False(t, hookTransformResult.ShouldSkip) require.EqualError(t, hookTransformResult.Error, "X-Event-Key is not supported: not:supported") } t.Log("Unsupported Content-Type") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"not/supported"}, }, } hookTransformResult := provider.TransformRequest(&request) require.False(t, hookTransformResult.ShouldSkip) require.EqualError(t, hookTransformResult.Error, "Content-Type is not supported: not/supported") } t.Log("No Request Body") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"application/json; charset=utf-8"}, }, } hookTransformResult := provider.TransformRequest(&request) require.False(t, hookTransformResult.ShouldSkip) require.EqualError(t, hookTransformResult.Error, "Failed to read content of request body: no or empty request body") } t.Log("Test with Sample Code Push data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(sampleCodePushData)), } hookTransformResult := provider.TransformRequest(&request) require.NoError(t, hookTransformResult.Error) require.False(t, hookTransformResult.ShouldSkip) require.Equal(t, []bitriseapi.TriggerAPIParamsModel{ { BuildParams: bitriseapi.BuildParamsModel{ CommitHash: "to-hash-1", Branch: "master", }, }, { BuildParams: bitriseapi.BuildParamsModel{ CommitHash: "to-hash-2", Branch: "a-branch", }, }, }, hookTransformResult.TriggerAPIParams) require.Equal(t, false, hookTransformResult.DontWaitForTriggerResponse) } t.Log("Test with Sample Tag Push data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(sampleTagPushData)), } hookTransformResult := provider.TransformRequest(&request) require.NoError(t, hookTransformResult.Error) require.False(t, hookTransformResult.ShouldSkip) require.Equal(t, []bitriseapi.TriggerAPIParamsModel{ { BuildParams: bitriseapi.BuildParamsModel{ Tag: "3.0.4", CommitHash: "2943d981c36ca9a241326a8c9520bec15edef8c5", }, }, }, hookTransformResult.TriggerAPIParams) require.Equal(t, false, hookTransformResult.DontWaitForTriggerResponse) } t.Log("Test with Sample Pull Request data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"pr:opened"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(samplePullRequestData)), } hookTransformResult := provider.TransformRequest(&request) require.NoError(t, hookTransformResult.Error) require.False(t, hookTransformResult.ShouldSkip) require.Equal(t, []bitriseapi.TriggerAPIParamsModel{ { BuildParams: bitriseapi.BuildParamsModel{ CommitHash: "ef8755f06ee4b28c96a847a95cb8ec8ed6ddd1ca", CommitMessage: "a new file added", Branch: "a-branch", BranchDest: "master", PullRequestID: pointers.NewIntPtr(1), }, }, }, hookTransformResult.TriggerAPIParams) require.Equal(t, false, hookTransformResult.DontWaitForTriggerResponse) } t.Log("Test with Sample Pull Request modification data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"pr:modified"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(samplePullRequestModifiedData)), } hookTransformResult := provider.TransformRequest(&request) require.NoError(t, hookTransformResult.Error) require.False(t, hookTransformResult.ShouldSkip) require.Equal(t, []bitriseapi.TriggerAPIParamsModel{ { BuildParams: bitriseapi.BuildParamsModel{ CommitHash: "ef8755f06ee4b28c96a847a95cb8ec8ed6ddd1ca", CommitMessage: "a new file added", Branch: "a-branch", BranchDest: "master", PullRequestID: pointers.NewIntPtr(1), }, }, }, hookTransformResult.TriggerAPIParams) require.Equal(t, false, hookTransformResult.DontWaitForTriggerResponse) } t.Log("Test with Sample Pull Request From Ref Updated Data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"pr:from_ref_updated"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(samplePullRequestFromRefUpdatedData)), } hookTransformResult := provider.TransformRequest(&request) require.NoError(t, hookTransformResult.Error) require.False(t, hookTransformResult.ShouldSkip) require.Equal(t, []bitriseapi.TriggerAPIParamsModel{ { BuildParams: bitriseapi.BuildParamsModel{ CommitHash: "ef8755f06ee4b28c96a847a95cb8ec8ed6ddd1ca", CommitMessage: "a new file added", Branch: "a-branch", BranchDest: "master", PullRequestID: pointers.NewIntPtr(1), }, }, }, hookTransformResult.TriggerAPIParams) require.Equal(t, false, hookTransformResult.DontWaitForTriggerResponse) } t.Log("Test with Sample Pull Request merged data") { request := http.Request{ Header: http.Header{ "X-Event-Key": {"pr:merged"}, "Content-Type": {"application/json; charset=utf-8"}, }, Body: ioutil.NopCloser(strings.NewReader(samplePullRequestMergedData)), } hookTransformResult := provider.TransformRequest(&request) require.True(t, hookTransformResult.ShouldSkip) require.EqualError(t, hookTransformResult.Error, "Pull Request state doesn't require a build: MERGED") } }
explode_data.jsonl/62891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2444 }
[ 2830, 3393, 2039, 1941, 5179, 34932, 627, 1900, 1155, 353, 8840, 836, 8, 341, 197, 19979, 1669, 28171, 5179, 31483, 3244, 5247, 445, 41884, 3665, 3990, 1138, 197, 515, 197, 23555, 1669, 1758, 9659, 515, 298, 197, 4047, 25, 1758, 15753, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnumComments(test *testing.T) { var err error egood, err := parseRDLString(` // Comment for TestEnum type TestEnum enum { ONE, // Comment for ONE TWO // Comment for TWO } `) if err != nil { test.Errorf("cannot parse valid RDL: %v", err) } ebad, err := parseRDLString(` // Comment for TestEnum type TestEnum enum { // Comment for ONE ONE, // Comment for TWO TWO } `) if err != nil { test.Errorf("cannot parse valid RDL: %v", err) } type1 := egood.Types[0] //.EnumTypeDef.Elements[0] type2 := ebad.Types[0] //.EnumTypeDef.Elements[0] if !EquivalentTypes(type1, type2) { test.Errorf("Types don't match: %v, %v", type1, type2) } }
explode_data.jsonl/74355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 10766, 17373, 8623, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 197, 791, 1386, 11, 1848, 1669, 4715, 49, 16524, 703, 61528, 322, 12255, 369, 3393, 10766, 198, 1313, 3393, 10766, 7618, 341, 262, 24038, 11, 442, 12255, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeletePrivateDNS(t *testing.T) { testcases := []struct { name string expectedError string expect func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) }{ { name: "no private dns", expectedError: "", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(nil) }, }, { name: "delete the dns zone", expectedError: "", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(&azure.PrivateDNSSpec{ ZoneName: "my-dns-zone", VNetName: "my-vnet", VNetResourceGroup: "vnet-rg", LinkName: "my-link", Records: []infrav1.AddressRecord{ { Hostname: "hostname-1", IP: "10.0.0.8", }, }, }) s.ResourceGroup().AnyTimes().Return("my-rg") m.DeleteLink(gomockinternal.AContext(), "my-rg", "my-dns-zone", "my-link") m.DeleteZone(gomockinternal.AContext(), "my-rg", "my-dns-zone") }, }, { name: "link already deleted", expectedError: "", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(&azure.PrivateDNSSpec{ ZoneName: "my-dns-zone", VNetName: "my-vnet", VNetResourceGroup: "vnet-rg", LinkName: "my-link", Records: []infrav1.AddressRecord{ { Hostname: "hostname-1", IP: "10.0.0.8", }, }, }) s.ResourceGroup().AnyTimes().Return("my-rg") m.DeleteLink(gomockinternal.AContext(), "my-rg", "my-dns-zone", "my-link"). Return(autorest.NewErrorWithResponse("", "", &http.Response{StatusCode: 404}, "Not found")) m.DeleteZone(gomockinternal.AContext(), "my-rg", "my-dns-zone") }, }, { name: "zone already deleted", expectedError: "", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(&azure.PrivateDNSSpec{ ZoneName: "my-dns-zone", VNetName: "my-vnet", VNetResourceGroup: "vnet-rg", LinkName: "my-link", Records: []infrav1.AddressRecord{ { Hostname: "hostname-1", IP: "10.0.0.8", }, }, }) s.ResourceGroup().AnyTimes().Return("my-rg") m.DeleteLink(gomockinternal.AContext(), "my-rg", "my-dns-zone", "my-link"). Return(autorest.NewErrorWithResponse("", "", &http.Response{StatusCode: 404}, "Not found")) m.DeleteZone(gomockinternal.AContext(), "my-rg", "my-dns-zone"). Return(autorest.NewErrorWithResponse("", "", &http.Response{StatusCode: 404}, "Not found")) }, }, { name: "error while trying to delete the link", expectedError: "failed to delete virtual network link my-vnet with zone my-dns-zone in resource group my-rg: #: Internal Server Error: StatusCode=500", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(&azure.PrivateDNSSpec{ ZoneName: "my-dns-zone", VNetName: "my-vnet", VNetResourceGroup: "vnet-rg", LinkName: "my-link", Records: []infrav1.AddressRecord{ { Hostname: "hostname-1", IP: "10.0.0.8", }, }, }) s.ResourceGroup().AnyTimes().Return("my-rg") m.DeleteLink(gomockinternal.AContext(), "my-rg", "my-dns-zone", "my-link"). Return(autorest.NewErrorWithResponse("", "", &http.Response{StatusCode: 500}, "Internal Server Error")) }, }, { name: "error while trying to delete the zone", expectedError: "failed to delete private dns zone my-dns-zone in resource group my-rg: #: Internal Server Error: StatusCode=500", expect: func(s *mock_privatedns.MockScopeMockRecorder, m *mock_privatedns.MockclientMockRecorder) { s.V(gomock.AssignableToTypeOf(2)).AnyTimes().Return(klogr.New()) s.PrivateDNSSpec().Return(&azure.PrivateDNSSpec{ ZoneName: "my-dns-zone", VNetName: "my-vnet", VNetResourceGroup: "vnet-rg", LinkName: "my-link", Records: []infrav1.AddressRecord{ { Hostname: "hostname-1", IP: "10.0.0.8", }, }, }) s.ResourceGroup().AnyTimes().Return("my-rg") m.DeleteLink(gomockinternal.AContext(), "my-rg", "my-dns-zone", "my-link") m.DeleteZone(gomockinternal.AContext(), "my-rg", "my-dns-zone"). Return(autorest.NewErrorWithResponse("", "", &http.Response{StatusCode: 500}, "Internal Server Error")) }, }, } for _, tc := range testcases { tc := tc t.Run(tc.name, func(t *testing.T) { g := NewWithT(t) t.Parallel() mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() scopeMock := mock_privatedns.NewMockScope(mockCtrl) clientMock := mock_privatedns.NewMockclient(mockCtrl) tc.expect(scopeMock.EXPECT(), clientMock.EXPECT()) s := &Service{ Scope: scopeMock, client: clientMock, } err := s.Delete(context.TODO()) if tc.expectedError != "" { g.Expect(err).To(HaveOccurred()) g.Expect(err).To(MatchError(tc.expectedError)) } else { g.Expect(err).NotTo(HaveOccurred()) } }) } }
explode_data.jsonl/70330
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2767 }
[ 2830, 3393, 6435, 16787, 61088, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 42400, 1454, 914, 198, 197, 24952, 286, 2915, 1141, 353, 16712, 24726, 657, 4412, 24664, 10803, 11571, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewClient(t *testing.T) { p := &Provider{} gomega.RegisterTestingT(t) // nil store _, err := p.NewClient(context.Background(), nil, nil, "") gomega.Expect(err).To(gomega.HaveOccurred()) // missing provider _, err = p.NewClient(context.Background(), &esv1beta1.SecretStore{}, nil, "") gomega.Expect(err).To(gomega.HaveOccurred()) }
explode_data.jsonl/20411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 3564, 2959, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 609, 5179, 16094, 3174, 32696, 19983, 16451, 51, 1155, 692, 197, 322, 2092, 3553, 198, 197, 6878, 1848, 1669, 281, 7121, 2959, 5378, 19047, 1507, 2092, 11, 2092, 11, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileCancelledPipelineRun(t *testing.T) { ps := []*v1alpha1.Pipeline{tb.Pipeline("test-pipeline", "foo", tb.PipelineSpec( tb.PipelineTask("hello-world-1", "hello-world", tb.Retries(1)), ))} prs := []*v1alpha1.PipelineRun{tb.PipelineRun("test-pipeline-run-with-timeout", "foo", tb.PipelineRunSpec("test-pipeline", tb.PipelineRunCancelled, ), )} ts := []*v1alpha1.Task{tb.Task("hello-world", "foo")} d := test.Data{ PipelineRuns: prs, Pipelines: ps, Tasks: ts, } testAssets, cancel := getPipelineRunController(t, d) defer cancel() c := testAssets.Controller clients := testAssets.Clients err := c.Reconciler.Reconcile(context.Background(), "foo/test-pipeline-run-with-timeout") if err != nil { t.Errorf("Did not expect to see error when reconciling completed PipelineRun but saw %s", err) } // Check that the PipelineRun was reconciled correctly reconciledRun, err := clients.Pipeline.Tekton().PipelineRuns("foo").Get("test-pipeline-run-with-timeout", metav1.GetOptions{}) if err != nil { t.Fatalf("Somehow had error getting completed reconciled run out of fake client: %s", err) } // The PipelineRun should be still cancelled. if reconciledRun.Status.GetCondition(apis.ConditionSucceeded).Reason != "PipelineRunCancelled" { t.Errorf("Expected PipelineRun to be cancelled, but condition reason is %s", reconciledRun.Status.GetCondition(apis.ConditionSucceeded)) } // Check that no TaskRun is created or run actions := clients.Pipeline.Actions() for _, action := range actions { actionType := fmt.Sprintf("%T", action) if !(actionType == "testing.UpdateActionImpl" || actionType == "testing.GetActionImpl") { t.Errorf("Expected a TaskRun to be get/updated, but it was %s", actionType) } } }
explode_data.jsonl/81292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 671 }
[ 2830, 3393, 693, 40446, 457, 39473, 34656, 6727, 1155, 353, 8840, 836, 8, 341, 35009, 1669, 29838, 85, 16, 7141, 16, 1069, 8790, 90, 18387, 1069, 8790, 445, 1944, 2268, 8790, 497, 330, 7975, 497, 16363, 1069, 8790, 8327, 1006, 197, 62...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestPersistence(t *testing.T) { db := db.NewMemDB() // Create some random key value pairs records := make(map[string]string) for i := 0; i < 10000; i++ { records[randstr(20)] = randstr(20) } // Construct some tree and save it t1 := NewIAVLTree(0, db) for key, value := range records { t1.Set([]byte(key), []byte(value)) } t1.Save() hash, _ := t1.HashWithCount() // Load a tree t2 := NewIAVLTree(0, db) t2.Load(hash) for key, value := range records { _, t2value, _ := t2.Get([]byte(key)) if string(t2value) != value { t.Fatalf("Invalid value. Expected %v, got %v", value, t2value) } } }
explode_data.jsonl/5016
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 71562, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 7121, 18816, 3506, 2822, 197, 322, 4230, 1045, 4194, 1376, 897, 13530, 198, 197, 26203, 1669, 1281, 9147, 14032, 30953, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestResourceName(t *testing.T) { assert.Equal(t, "rook-ceph-mon-a", resourceName("rook-ceph-mon-a")) assert.Equal(t, "rook-ceph-mon123", resourceName("rook-ceph-mon123")) assert.Equal(t, "rook-ceph-mon-b", resourceName("b")) }
explode_data.jsonl/39524
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 4783, 675, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 330, 299, 562, 53212, 759, 77296, 7409, 497, 98485, 445, 299, 562, 53212, 759, 77296, 7409, 5455, 6948, 12808, 1155, 11, 330, 299, 562, 53212, 759, 77296, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewPodCommandExecutor(t *testing.T) { restClientConfig := &rest.Config{Host: "foo"} poster := &mockPoster{} pce := NewPodCommandExecutor(restClientConfig, poster).(*defaultPodCommandExecutor) assert.Equal(t, restClientConfig, pce.restClientConfig) assert.Equal(t, poster, pce.restClient) assert.Equal(t, &defaultStreamExecutorFactory{}, pce.streamExecutorFactory) }
explode_data.jsonl/62113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 3564, 23527, 4062, 25255, 1155, 353, 8840, 836, 8, 341, 197, 3927, 2959, 2648, 1669, 609, 3927, 10753, 90, 9296, 25, 330, 7975, 16707, 197, 45401, 1669, 609, 16712, 95077, 16094, 3223, 346, 1669, 1532, 23527, 4062, 25255, 62...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateMotorcyclePresenter_Handle(t *testing.T) { // ARRANGE roles := map[authorizationrole.AuthorizationRole]bool{ authorizationrole.AdminAuthorizationRole: true, } authService, _ := security.NewAuthService(true, roles) repo, _ := repository.NewMotorcycleRepository() // Insert a motorcycle so we can update it. insertRequest, _ := request.NewInsertMotorcycleRequest("Honda", "Shadow", 2006, "01234567890123456") insertInteractor, _ := interactor.NewInsertMotorcycleInteractor(repo, authService) insertResponse, _ := insertInteractor.Handle(insertRequest) motorcycle, _, _ := repo.FindByID(insertResponse.ID) motorcycle.Vin = "65432109876543210" updateRequest, _ := request.NewUpdateMotorcycleRequest(insertResponse.ID, motorcycle) updateInteractor, _ := interactor.NewUpdateMotorcycleInteractor(repo, authService) updateResponse, _ := updateInteractor.Handle(updateRequest) updatePresenter, _ := NewUpdateMotorcyclePresenter() // ACT viewModel, _ := updatePresenter.Handle(updateResponse) // ASSERT assert.Nil(t, viewModel.Error) }
explode_data.jsonl/78288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 4289, 33577, 21621, 33849, 42714, 1155, 353, 8840, 836, 8, 1476, 197, 322, 82079, 11503, 198, 197, 14643, 1669, 2415, 58, 39554, 5778, 53786, 9030, 96436, 515, 197, 197, 39554, 5778, 39469, 18124, 9030, 25, 830, 345, 197, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoSources(t *testing.T) { install, _, _ := newInstall() err := install.Install([]string{"no_sources"}) require.Error(t, err) assert.Contains(t, err.Error(), "failed to compile example.com/no_sources: no buildable Go source files") }
explode_data.jsonl/36378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 2753, 32200, 1155, 353, 8840, 836, 8, 341, 197, 12248, 11, 8358, 716, 1669, 501, 24690, 2822, 9859, 1669, 4582, 71207, 541, 10556, 917, 4913, 2152, 52896, 23625, 17957, 6141, 1155, 11, 1848, 340, 6948, 11545, 1155, 11, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsErrAndNotRetriable(t *testing.T) { tests := []struct { description string statusCode proto.StatusCode expected bool }{ { description: "rollout status connection error", statusCode: proto.StatusCode_STATUSCHECK_KUBECTL_CONNECTION_ERR, }, { description: "rollout status kubectl command killed", statusCode: proto.StatusCode_STATUSCHECK_KUBECTL_PID_KILLED, expected: true, }, { description: "rollout status random error", statusCode: proto.StatusCode_STATUSCHECK_UNKNOWN, expected: true, }, { description: "rollout status parent context canceled", statusCode: proto.StatusCode_STATUSCHECK_USER_CANCELLED, expected: true, }, { description: "rollout status parent context timed out", statusCode: proto.StatusCode_STATUSCHECK_DEADLINE_EXCEEDED, expected: true, }, { description: "rollout status nil error", statusCode: proto.StatusCode_STATUSCHECK_SUCCESS, expected: true, }, } for _, test := range tests { testutil.Run(t, test.description, func(t *testutil.T) { actual := isErrAndNotRetryAble(test.statusCode) t.CheckDeepEqual(test.expected, actual) }) } }
explode_data.jsonl/81097
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 3872, 7747, 3036, 2623, 12020, 461, 480, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 23847, 2078, 220, 18433, 37828, 198, 197, 42400, 262, 1807, 198, 197, 59403, 197, 197, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestModuleSampler(t *testing.T) { rand.Seed(time.Now().Unix()) for mod := uint64(1); mod <= 100; mod++ { var ( sampler = zipkin.NewModuloSampler(mod) want = uint64(rand.Intn(1000)) max = mod * want found = uint64(0) ) for i := uint64(0); i < max; i++ { if sampler(i) { found++ } } if want, have := max/mod, found; want != have { t.Errorf("expected %d samples, got %d", want, have) } } }
explode_data.jsonl/74307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 3332, 66048, 1155, 353, 8840, 836, 8, 341, 7000, 437, 5732, 291, 9730, 13244, 1005, 55832, 12367, 2023, 1463, 1669, 2622, 21, 19, 7, 16, 1215, 1463, 2651, 220, 16, 15, 15, 26, 1463, 1027, 341, 197, 2405, 2399, 298, 1903,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStorageSample(t *testing.T) { du, err := disk.Usage("/") if err != nil { t.Skipf("this linux distro can't get the usage statistics: %v", err.Error()) } if du.InodesTotal == 0 { t.Skipf("this linux distro is not supported for inodes: %#v", du) } // GIVEN a Storage Sampler ps := storage.NewSampler(contextMock()) // THAT has already sampled values in the past _, err = ps.Sample() require.NoError(t, err) // WHEN it samples again samples, err := ps.Sample() require.NoError(t, err) // THEN the read samples are of the correct type, with a valid format and non-zero values for those // metrics that can't be zero ss := fullSample(t, samples) assert.Equal(t, "StorageSample", ss.EventType) assert.NotEmpty(t, ss.MountPoint) assert.NotEmpty(t, ss.Device) assert.NotEmpty(t, ss.FileSystemType) assert.NotEmpty(t, ss.Device) assert.NotEmpty(t, ss.IsReadOnly) require.NotNil(t, ss.InodesTotal) require.NotNil(t, ss.InodesFree) require.NotNil(t, ss.InodesUsed) require.NotNil(t, ss.InodesUsedPercent) assert.NotZero(t, *ss.InodesTotal) assert.NotZero(t, *ss.InodesFree) assert.NotZero(t, *ss.InodesUsed) assert.NotZero(t, *ss.InodesUsedPercent) assert.NotNil(t, ss.TotalBytes) assert.NotZero(t, *ss.TotalBytes) assert.NotNil(t, ss.UsedBytes) assert.NotZero(t, *ss.UsedBytes) }
explode_data.jsonl/79672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 508 }
[ 2830, 3393, 5793, 17571, 1155, 353, 8840, 836, 8, 341, 197, 1054, 11, 1848, 1669, 13364, 85900, 61710, 743, 1848, 961, 2092, 341, 197, 3244, 57776, 69, 445, 574, 36245, 1582, 299, 646, 944, 633, 279, 10431, 13142, 25, 1018, 85, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestManagerHTTP(t *testing.T) { t.Run("case=regenerate csrf on principal change", func(t *testing.T) { _, reg := internal.NewFastRegistryWithMocks(t) mock := new(mockCSRFHandler) reg.WithCSRFHandler(mock) require.NoError(t, reg.SessionManager().IssueCookie(context.Background(), httptest.NewRecorder(), new(http.Request), new(session.Session))) assert.Equal(t, 1, mock.c) }) t.Run("suite=lifecycle", func(t *testing.T) { conf, reg := internal.NewFastRegistryWithMocks(t) conf.MustSet(config.ViperKeySelfServiceLoginUI, "https://www.ory.sh") conf.MustSet(config.ViperKeyDefaultIdentitySchemaURL, "file://./stub/fake-session.schema.json") var s *session.Session rp := x.NewRouterPublic() rp.GET("/session/revoke", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { require.NoError(t, reg.SessionManager().PurgeFromRequest(r.Context(), w, r)) w.WriteHeader(http.StatusOK) }) rp.GET("/session/set", func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { require.NoError(t, reg.SessionManager().CreateAndIssueCookie(r.Context(), w, r, s)) w.WriteHeader(http.StatusOK) }) rp.GET("/session/get", func(w http.ResponseWriter, r *http.Request, p httprouter.Params) { sess, err := reg.SessionManager().FetchFromRequest(r.Context(), r) if err != nil { t.Logf("Got error on lookup: %s %T", err, errors.Unwrap(err)) reg.Writer().WriteError(w, r, err) return } reg.Writer().Write(w, r, sess) }) pts := httptest.NewServer(x.NewTestCSRFHandler(rp, reg)) t.Cleanup(pts.Close) conf.MustSet(config.ViperKeyPublicBaseURL, pts.URL) reg.RegisterPublicRoutes(context.Background(), rp) t.Run("case=valid", func(t *testing.T) { conf.MustSet(config.ViperKeySessionLifespan, "1m") i := identity.Identity{Traits: []byte("{}")} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, _ = session.NewActiveSession(&i, conf, time.Now()) c := testhelpers.NewClientWithCookies(t) testhelpers.MockHydrateCookieClient(t, c, pts.URL+"/session/set") res, err := c.Get(pts.URL + "/session/get") require.NoError(t, err) assert.EqualValues(t, http.StatusOK, res.StatusCode) }) t.Run("case=valid and uses x-session-cookie", func(t *testing.T) { conf.MustSet(config.ViperKeySessionLifespan, "1m") i := identity.Identity{Traits: []byte("{}")} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, _ = session.NewActiveSession(&i, conf, time.Now()) c := testhelpers.NewClientWithCookies(t) testhelpers.MockHydrateCookieClient(t, c, pts.URL+"/session/set") cookies := c.Jar.Cookies(urlx.ParseOrPanic(pts.URL)) require.Len(t, cookies, 1) assert.Equal(t, "ory_kratos_session", cookies[0].Name) req, err := http.NewRequest("GET", pts.URL+"/session/get", nil) require.NoError(t, err) req.Header.Set("Cookie", "ory_kratos_session=not-valid") req.Header.Set("X-Session-Cookie", cookies[0].Value) res, err := http.DefaultClient.Do(req) require.NoError(t, err) assert.EqualValues(t, http.StatusOK, res.StatusCode) }) t.Run("case=valid bearer auth as fallback", func(t *testing.T) { conf.MustSet(config.ViperKeySessionLifespan, "1m") i := identity.Identity{Traits: []byte("{}"), State: identity.StateActive} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, err := session.NewActiveSession(&i, conf, time.Now()) require.NoError(t, err) require.NoError(t, reg.SessionPersister().CreateSession(context.Background(), s)) require.NotEmpty(t, s.Token) req, err := http.NewRequest("GET", pts.URL+"/session/get", nil) require.NoError(t, err) req.Header.Set("Authorization", "Bearer "+s.Token) c := http.DefaultClient res, err := c.Do(req) require.NoError(t, err) assert.EqualValues(t, http.StatusOK, res.StatusCode) }) t.Run("case=valid x-session-token auth even if bearer is set", func(t *testing.T) { conf.MustSet(config.ViperKeySessionLifespan, "1m") i := identity.Identity{Traits: []byte("{}"), State: identity.StateActive} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, err := session.NewActiveSession(&i, conf, time.Now()) require.NoError(t, err) require.NoError(t, reg.SessionPersister().CreateSession(context.Background(), s)) req, err := http.NewRequest("GET", pts.URL+"/session/get", nil) require.NoError(t, err) req.Header.Set("Authorization", "Bearer invalid") req.Header.Set("X-Session-Token", s.Token) c := http.DefaultClient res, err := c.Do(req) require.NoError(t, err) assert.EqualValues(t, http.StatusOK, res.StatusCode) }) t.Run("case=expired", func(t *testing.T) { conf.MustSet(config.ViperKeySessionLifespan, "1ns") t.Cleanup(func() { conf.MustSet(config.ViperKeySessionLifespan, "1m") }) i := identity.Identity{Traits: []byte("{}")} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, _ = session.NewActiveSession(&i, conf, time.Now()) c := testhelpers.NewClientWithCookies(t) testhelpers.MockHydrateCookieClient(t, c, pts.URL+"/session/set") time.Sleep(time.Nanosecond * 2) res, err := c.Get(pts.URL + "/session/get") require.NoError(t, err) assert.EqualValues(t, http.StatusUnauthorized, res.StatusCode) }) t.Run("case=revoked", func(t *testing.T) { i := identity.Identity{Traits: []byte("{}")} require.NoError(t, reg.PrivilegedIdentityPool().CreateIdentity(context.Background(), &i)) s, _ = session.NewActiveSession(&i, conf, time.Now()) s, _ = session.NewActiveSession(&i, conf, time.Now()) c := testhelpers.NewClientWithCookies(t) testhelpers.MockHydrateCookieClient(t, c, pts.URL+"/session/set") res, err := c.Get(pts.URL + "/session/revoke") require.NoError(t, err) assert.EqualValues(t, http.StatusOK, res.StatusCode) res, err = c.Get(pts.URL + "/session/get") require.NoError(t, err) assert.EqualValues(t, http.StatusUnauthorized, res.StatusCode) }) }) }
explode_data.jsonl/8552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2480 }
[ 2830, 3393, 2043, 9230, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5638, 28, 1580, 13220, 35522, 389, 12435, 2297, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 6878, 1217, 1669, 5306, 7121, 32174, 15603, 2354, 72577, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAsyncAbortPoisons(t *testing.T) { defer leaktest.AfterTest(t)() // Add a testing request filter which pauses a get request for the // key until after the signal channel is closed. var storeKnobs storage.StoreTestingKnobs keyA, keyB := roachpb.Key("a"), roachpb.Key("b") commitCh := make(chan error, 1) storeKnobs.TestingRequestFilter = func(ba roachpb.BatchRequest) *roachpb.Error { for _, req := range ba.Requests { switch r := req.GetInner().(type) { case *roachpb.EndTransactionRequest: if r.Key.Equal(keyA) { if r.Poison { close(commitCh) } else { commitCh <- fmt.Errorf("EndTransaction didn't have expected Poison flag") } } } } return nil } s, _, _ := serverutils.StartServer(t, base.TestServerArgs{Knobs: base.TestingKnobs{Store: &storeKnobs}}) ctx := context.Background() defer s.Stopper().Stop(ctx) // Setup two userspace ranges: /Min-b, b-/Max. db := s.DB() // Write values to key "a". txn := client.NewTxn(ctx, db, 0 /* gatewayNodeID */, client.RootTxn) b := txn.NewBatch() b.Put(keyA, []byte("value")) if err := txn.Run(ctx, b); err != nil { t.Fatal(err) } // Run a high-priority txn that will abort the previous one. if err := db.Txn(context.TODO(), func(ctx context.Context, txn *client.Txn) error { if err := txn.SetUserPriority(roachpb.MaxUserPriority); err != nil { return err } // Write to keyB first to locate this txn's record on a different key // than the initial txn's record. This allows the request filter to // trivially ignore this transaction. if err := txn.Put(ctx, keyB, []byte("value2")); err != nil { return err } return txn.Put(ctx, keyA, []byte("value2")) }); err != nil { t.Fatal(err) } expErr := regexp.QuoteMeta("TransactionAbortedError(ABORT_REASON_ABORT_SPAN)") if _, err := txn.Get(ctx, keyA); !testutils.IsError(err, expErr) { t.Fatalf("expected %s, got: %v", expErr, err) } if err := <-commitCh; err != nil { t.Fatal(err) } }
explode_data.jsonl/36470
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 789 }
[ 2830, 3393, 6525, 85891, 32904, 19379, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 197, 322, 2691, 264, 7497, 1681, 4051, 892, 85832, 264, 633, 1681, 369, 279, 198, 197, 322, 1376, 3080, 1283, 279, 82...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetTinkerbellTemplateConfig(t *testing.T) { tests := []struct { testName string fileName string wantConfigs map[string]*TinkerbellTemplateConfig wantErr bool }{ { testName: "file doesn't exist", fileName: "testdata/fake_file.yaml", wantConfigs: nil, wantErr: true, }, { testName: "not parseable file", fileName: "testdata/not_parseable_cluster_tinkerbell.yaml", wantConfigs: nil, wantErr: true, }, { testName: "valid tinkerbell template config", fileName: "testdata/cluster_1_21_valid_tinkerbell.yaml", wantConfigs: map[string]*TinkerbellTemplateConfig{ "tink-test": { TypeMeta: metav1.TypeMeta{ Kind: TinkerbellTemplateConfigKind, APIVersion: SchemeBuilder.GroupVersion.String(), }, ObjectMeta: metav1.ObjectMeta{ Name: "tink-test", }, Spec: TinkerbellTemplateConfigSpec{ Template: tinkerbell.Workflow{ Version: "0.1", Name: "tink-test", GlobalTimeout: 6000, ID: "", Tasks: []tinkerbell.Task{ { Name: "tink-test", WorkerAddr: "{{.device_1}}", Volumes: []string{ "/dev:/dev", "/dev/console:/dev/console", "/lib/firmware:/lib/firmware:ro", }, Actions: []tinkerbell.Action{ { Name: "stream-image", Image: "image2disk:v1.0.0", Timeout: 360, Environment: map[string]string{ "IMG_URL": "", "DEST_DISK": "/dev/sda", "COMPRESSED": "true", }, }, }, }, }, }, }, }, }, wantErr: false, }, { testName: "multiple tinkerbell template configs", fileName: "testdata/cluster_1_21_valid_multiple_tinkerbell_templates.yaml", wantConfigs: map[string]*TinkerbellTemplateConfig{ "tink-test-1": { TypeMeta: metav1.TypeMeta{ Kind: TinkerbellTemplateConfigKind, APIVersion: SchemeBuilder.GroupVersion.String(), }, ObjectMeta: metav1.ObjectMeta{ Name: "tink-test-1", }, Spec: TinkerbellTemplateConfigSpec{ Template: tinkerbell.Workflow{ Version: "0.1", Name: "tink-test-1", GlobalTimeout: 6000, ID: "", Tasks: []tinkerbell.Task{ { Name: "tink-test-1", WorkerAddr: "{{.device_1}}", Volumes: []string{ "/dev:/dev", "/dev/console:/dev/console", "/lib/firmware:/lib/firmware:ro", }, Actions: []tinkerbell.Action{ { Name: "stream-image", Image: "image2disk:v1.0.0", Timeout: 360, Environment: map[string]string{ "IMG_URL": "", "DEST_DISK": "/dev/sda", "COMPRESSED": "true", }, }, }, }, }, }, }, }, "tink-test-2": { TypeMeta: metav1.TypeMeta{ Kind: TinkerbellTemplateConfigKind, APIVersion: SchemeBuilder.GroupVersion.String(), }, ObjectMeta: metav1.ObjectMeta{ Name: "tink-test-2", }, Spec: TinkerbellTemplateConfigSpec{ Template: tinkerbell.Workflow{ Version: "0.1", Name: "tink-test-2", GlobalTimeout: 6000, ID: "", Tasks: []tinkerbell.Task{ { Name: "tink-test-2", WorkerAddr: "{{.device_1}}", Volumes: []string{ "/dev:/dev", "/dev/console:/dev/console", "/lib/firmware:/lib/firmware:ro", }, Actions: []tinkerbell.Action{ { Name: "stream-image", Image: "image2disk:v1.0.0", Timeout: 360, Environment: map[string]string{ "IMG_URL": "", "DEST_DISK": "/dev/sda", "COMPRESSED": "true", }, }, }, }, }, }, }, }, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.testName, func(t *testing.T) { g := gomega.NewWithT(t) got, err := GetTinkerbellTemplateConfig(tt.fileName) g.Expect((err != nil)).To(gomega.BeEquivalentTo(tt.wantErr)) g.Expect(got).To(gomega.BeEquivalentTo(tt.wantConfigs)) }) } }
explode_data.jsonl/7373
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2519 }
[ 2830, 3393, 1949, 51, 41112, 17250, 7275, 2648, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 18185, 675, 262, 914, 198, 197, 17661, 675, 262, 914, 198, 197, 50780, 84905, 2415, 14032, 8465, 51, 41112, 17250, 7275, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFluky_Bool_WithOptions(t *testing.T) { mRng := new(RngMock) mRng.On("Float64").Return(0.75).Times(1) mRng.On("Float64").Return(0.25).Times(1) mRng.On("Float64").Return(0.75).Times(1) mRng.On("Float64").Return(0.74999999999999).Times(1) f := NewFluky(mRng) assert.Equal(t, true, f.Bool(WithLikelihood(0.9))) assert.Equal(t, false, f.Bool(WithLikelihood(0.1))) assert.Equal(t, false, f.Bool(WithLikelihood(0.75))) assert.Equal(t, true, f.Bool(WithLikelihood(0.75))) mRng.AssertExpectations(t) }
explode_data.jsonl/77194
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 3882, 3101, 88, 79948, 62, 74238, 1155, 353, 8840, 836, 8, 341, 2109, 49, 968, 1669, 501, 2785, 968, 11571, 340, 2109, 49, 968, 8071, 445, 5442, 21, 19, 1827, 5598, 7, 15, 13, 22, 20, 568, 18889, 7, 16, 340, 2109, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigDefaultFileSettingsS3SSE(t *testing.T) { c1 := Config{} c1.SetDefaults() if *c1.FileSettings.AmazonS3SSE { t.Fatal("FileSettings.AmazonS3SSE should default to false") } }
explode_data.jsonl/50670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 2648, 3675, 1703, 6086, 50, 18, 50, 925, 1155, 353, 8840, 836, 8, 341, 1444, 16, 1669, 5532, 16094, 1444, 16, 4202, 16273, 2822, 743, 353, 66, 16, 8576, 6086, 875, 76, 5522, 50, 18, 50, 925, 341, 197, 3244, 26133, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestValidNonce(t *testing.T) { wfe, _ := setupWFE(t) // signRequestEmbed with a `nil` nonce.NonceService will result in the // JWS not having a protected nonce header. missingNonceJWS, _, _ := signRequestEmbed(t, nil, "", "", nil) // signRequestEmbed with a badNonceProvider will result in the JWS // having an invalid nonce invalidNonceJWS, _, _ := signRequestEmbed(t, nil, "", "", badNonceProvider{}) goodJWS, _, _ := signRequestEmbed(t, nil, "", "", wfe.nonceService) testCases := []struct { Name string JWS *jose.JSONWebSignature ExpectedResult *probs.ProblemDetails ErrorStatType string }{ { Name: "No nonce in JWS", JWS: missingNonceJWS, ExpectedResult: &probs.ProblemDetails{ Type: probs.BadNonceProblem, Detail: "JWS has no anti-replay nonce", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "JWSMissingNonce", }, { Name: "Invalid nonce in JWS", JWS: invalidNonceJWS, ExpectedResult: &probs.ProblemDetails{ Type: probs.BadNonceProblem, Detail: "JWS has an invalid anti-replay nonce: \"im-a-nonce\"", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "JWSInvalidNonce", }, { Name: "Valid nonce in JWS", JWS: goodJWS, ExpectedResult: nil, }, } for _, tc := range testCases { t.Run(tc.Name, func(t *testing.T) { wfe.stats.joseErrorCount.Reset() prob := wfe.validNonce(tc.JWS) if tc.ExpectedResult == nil && prob != nil { t.Fatal(fmt.Sprintf("Expected nil result, got %#v", prob)) } else { test.AssertMarshaledEquals(t, prob, tc.ExpectedResult) } if tc.ErrorStatType != "" { test.AssertEquals(t, test.CountCounterVec( "type", tc.ErrorStatType, wfe.stats.joseErrorCount), 1) } }) } }
explode_data.jsonl/15351
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 811 }
[ 2830, 3393, 4088, 90528, 1155, 353, 8840, 836, 8, 341, 6692, 1859, 11, 716, 1669, 6505, 54, 11419, 1155, 692, 197, 322, 1841, 1900, 25486, 448, 264, 1565, 8385, 63, 39676, 2067, 13184, 1860, 686, 1102, 304, 279, 198, 197, 322, 619, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetStaticScrapeWorkFailure(t *testing.T) { f := func(data string) { t.Helper() sws, err := getStaticScrapeWork([]byte(data), "non-existing-file") if err == nil { t.Fatalf("expecting non-nil error") } if sws != nil { t.Fatalf("expecting nil sws") } } // incorrect yaml f(`foo bar baz`) // Missing job_name f(` scrape_configs: - static_configs: - targets: ["foo"] `) // Invalid scheme f(` scrape_configs: - job_name: x scheme: asdf static_configs: - targets: ["foo"] `) // Missing username in `basic_auth` f(` scrape_configs: - job_name: x basic_auth: password: sss static_configs: - targets: ["a"] `) // Both password and password_file set in `basic_auth` f(` scrape_configs: - job_name: x basic_auth: username: foobar password: sss password_file: sdfdf static_configs: - targets: ["a"] `) // Invalid password_file set in `basic_auth` f(` scrape_configs: - job_name: x basic_auth: username: foobar password_file: /non_existing_file.pass static_configs: - targets: ["a"] `) // Both `bearer_token` and `bearer_token_file` are set f(` scrape_configs: - job_name: x bearer_token: foo bearer_token_file: bar static_configs: - targets: ["a"] `) // Both `basic_auth` and `bearer_token` are set f(` scrape_configs: - job_name: x bearer_token: foo basic_auth: username: foo password: bar static_configs: - targets: ["a"] `) // Invalid `bearer_token_file` f(` scrape_configs: - job_name: x bearer_token_file: non_existing_file.bearer static_configs: - targets: ["a"] `) // non-existing ca_file f(` scrape_configs: - job_name: aa tls_config: ca_file: non/extising/file static_configs: - targets: ["s"] `) // invalid ca_file f(` scrape_configs: - job_name: aa tls_config: ca_file: testdata/prometheus.yml static_configs: - targets: ["s"] `) // non-existing cert_file f(` scrape_configs: - job_name: aa tls_config: cert_file: non/extising/file static_configs: - targets: ["s"] `) // non-existing key_file f(` scrape_configs: - job_name: aa tls_config: key_file: non/extising/file static_configs: - targets: ["s"] `) // Invalid regex in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - regex: "(" source_labels: [foo] target_label: bar static_configs: - targets: ["s"] `) // Missing target_label for action=replace in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: replace source_labels: [foo] static_configs: - targets: ["s"] `) // Missing source_labels for action=keep in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: keep static_configs: - targets: ["s"] `) // Missing source_labels for action=drop in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: drop static_configs: - targets: ["s"] `) // Missing source_labels for action=hashmod in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: hashmod target_label: bar modulus: 123 static_configs: - targets: ["s"] `) // Missing target for action=hashmod in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: hashmod source_labels: [foo] modulus: 123 static_configs: - targets: ["s"] `) // Missing modulus for action=hashmod in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: hashmod source_labels: [foo] target_label: bar static_configs: - targets: ["s"] `) // Invalid action in relabel_configs f(` scrape_configs: - job_name: aa relabel_configs: - action: foobar static_configs: - targets: ["s"] `) }
explode_data.jsonl/13544
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1583 }
[ 2830, 3393, 1949, 11690, 3326, 19842, 6776, 17507, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 2915, 2592, 914, 8, 341, 197, 3244, 69282, 741, 197, 1903, 8915, 11, 1848, 1669, 633, 11690, 3326, 19842, 6776, 10556, 3782, 2592, 701, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_client_modifyConfig(t *testing.T) { // setup context gin.SetMode(gin.TestMode) resp := httptest.NewRecorder() _, engine := gin.CreateTestContext(resp) // setup mock server engine.GET("/api/v3/repos/foo/bar/contents/:path", func(c *gin.Context) { c.Header("Content-Type", "application/json") c.Status(http.StatusOK) c.File("testdata/template.json") }) m := &types.Metadata{ Database: &types.Database{ Driver: "foo", Host: "foo", }, Queue: &types.Queue{ Channel: "foo", Driver: "foo", Host: "foo", }, Source: &types.Source{ Driver: "foo", Host: "foo", }, Vela: &types.Vela{ Address: "foo", WebAddress: "foo", }, } want := &yaml.Build{ Version: "1", Metadata: yaml.Metadata{ Template: false, Environment: []string{"steps", "services", "secrets"}, }, Steps: yaml.StepSlice{ &yaml.Step{ Environment: environment(nil, m, nil, nil), Image: "#init", Name: "init", Pull: "not_present", }, &yaml.Step{ Environment: environment(nil, m, nil, nil), Image: "target/vela-git:v0.3.0", Name: "clone", Pull: "not_present", }, &yaml.Step{ Image: "plugins/docker:18.09", Environment: nil, Name: "docker", Pull: "always", Parameters: map[string]interface{}{ "init_options": map[interface{}]interface{}{ "get_plugins": "true", }, }, }, }, } want2 := &yaml.Build{ Version: "1", Metadata: yaml.Metadata{ Template: false, Environment: []string{"steps", "services", "secrets"}, }, Steps: yaml.StepSlice{ &yaml.Step{ Environment: environment(nil, m, nil, nil), Image: "#init", Name: "init", Pull: "not_present", }, &yaml.Step{ Environment: environment(nil, m, nil, nil), Image: "target/vela-git:v0.3.0", Name: "clone", Pull: "not_present", }, &yaml.Step{ Image: "plugins/docker:18.09", Environment: nil, Name: "docker", Pull: "always", Parameters: map[string]interface{}{ "init_options": map[interface{}]interface{}{ "get_plugins": "true", }, }, }, &yaml.Step{ Image: "alpine", Environment: nil, Name: "modification", Pull: "always", Commands: []string{"echo hello from modification"}, }, }, } engine.POST("/config/unmodified", func(c *gin.Context) { c.Header("Content-Type", "application/json") response, err := convertResponse(want) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } c.JSON(http.StatusOK, response) }) engine.POST("/config/timeout", func(c *gin.Context) { time.Sleep(3 * time.Second) c.Header("Content-Type", "application/json") response, err := convertResponse(want) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } c.JSON(http.StatusOK, response) }) engine.POST("/config/modified", func(c *gin.Context) { c.Header("Content-Type", "application/json") output := want var steps []*yaml.Step steps = append(steps, want.Steps...) steps = append(steps, &yaml.Step{ Image: "alpine", Environment: nil, Name: "modification", Pull: "always", Commands: []string{"echo hello from modification"}, }) output.Steps = steps response, err := convertResponse(want) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } c.JSON(http.StatusOK, response) }) engine.POST("/config/empty", func(c *gin.Context) { c.Status(http.StatusOK) }) engine.POST("/config/unauthorized", func(c *gin.Context) { c.Header("Content-Type", "application/json") response, err := convertResponse(want) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } c.JSON(http.StatusForbidden, response) }) s := httptest.NewServer(engine) defer s.Close() name := "foo" author := "author" number := 1 type args struct { endpoint string build *yaml.Build libraryBuild *library.Build repo *library.Repo } tests := []struct { name string args args want *yaml.Build wantErr bool }{ {"unmodified", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: fmt.Sprintf("%s/%s", s.URL, "config/unmodified"), }, want, false}, {"modified", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: fmt.Sprintf("%s/%s", s.URL, "config/modified"), }, want2, false}, {"invalid endpoint", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: "bad", }, nil, true}, {"unauthorized endpoint", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: fmt.Sprintf("%s/%s", s.URL, "config/unauthorized"), }, nil, true}, {"timeout endpoint", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: fmt.Sprintf("%s/%s", s.URL, "config/timeout"), }, nil, true}, {"empty payload", args{ build: want, libraryBuild: &library.Build{Number: &number, Author: &author}, repo: &library.Repo{Name: &name}, endpoint: fmt.Sprintf("%s/%s", s.URL, "config/empty"), }, nil, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { compiler := client{ ModificationService: ModificationConfig{ Timeout: 2 * time.Second, Retries: 2, Endpoint: tt.args.endpoint, }, } got, err := compiler.modifyConfig(tt.args.build, tt.args.libraryBuild, tt.args.repo) if (err != nil) != tt.wantErr { t.Errorf("modifyConfig() error = %v, wantErr %v", err, tt.wantErr) return } if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("modifyConfig() mismatch (-want +got):\n%s", diff) } }) } }
explode_data.jsonl/24140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2903 }
[ 2830, 3393, 8179, 57725, 2648, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 2266, 198, 3174, 258, 4202, 3636, 3268, 258, 8787, 3636, 692, 34653, 1669, 54320, 70334, 7121, 47023, 741, 197, 6878, 4712, 1669, 46183, 7251, 2271, 1972, 2026...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodControllerLookup(t *testing.T) { manager := NewReplicaSetController(clientset.NewForConfigOrDie(&restclient.Config{Host: "", ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()}}), controller.NoResyncPeriodFunc, BurstReplicas, 0) manager.podStoreSynced = alwaysReady testCases := []struct { inRSs []*extensions.ReplicaSet pod *api.Pod outRSName string }{ // pods without labels don't match any ReplicaSets { inRSs: []*extensions.ReplicaSet{ {ObjectMeta: api.ObjectMeta{Name: "basic"}}}, pod: &api.Pod{ObjectMeta: api.ObjectMeta{Name: "foo1", Namespace: api.NamespaceAll}}, outRSName: "", }, // Matching labels, not namespace { inRSs: []*extensions.ReplicaSet{ { ObjectMeta: api.ObjectMeta{Name: "foo"}, Spec: extensions.ReplicaSetSpec{ Selector: &unversioned.LabelSelector{MatchLabels: map[string]string{"foo": "bar"}}, }, }, }, pod: &api.Pod{ ObjectMeta: api.ObjectMeta{ Name: "foo2", Namespace: "ns", Labels: map[string]string{"foo": "bar"}}}, outRSName: "", }, // Matching ns and labels returns the key to the ReplicaSet, not the ReplicaSet name { inRSs: []*extensions.ReplicaSet{ { ObjectMeta: api.ObjectMeta{Name: "bar", Namespace: "ns"}, Spec: extensions.ReplicaSetSpec{ Selector: &unversioned.LabelSelector{MatchLabels: map[string]string{"foo": "bar"}}, }, }, }, pod: &api.Pod{ ObjectMeta: api.ObjectMeta{ Name: "foo3", Namespace: "ns", Labels: map[string]string{"foo": "bar"}}}, outRSName: "bar", }, } for _, c := range testCases { for _, r := range c.inRSs { manager.rsStore.Add(r) } if rs := manager.getPodReplicaSet(c.pod); rs != nil { if c.outRSName != rs.Name { t.Errorf("Got replica set %+v expected %+v", rs.Name, c.outRSName) } } else if c.outRSName != "" { t.Errorf("Expected a replica set %v pod %v, found none", c.outRSName, c.pod.Name) } } }
explode_data.jsonl/10046
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 846 }
[ 2830, 3393, 23527, 2051, 34247, 1155, 353, 8840, 836, 8, 341, 92272, 1669, 1532, 18327, 15317, 1649, 2051, 12805, 746, 7121, 2461, 2648, 2195, 18175, 2099, 3927, 2972, 10753, 90, 9296, 25, 7342, 8883, 2648, 25, 2732, 2972, 12614, 2648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_IntProvide(t *testing.T) { c := NewLidi(Settings{}) a := rand.Int() err := c.Provide(a) if err != nil { t.Error(err) } err = c.InvokeFunction(func(b int) { if a != b { t.Fatal("Not equal") } }) if err != nil { t.Error(err) } }
explode_data.jsonl/40200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 32054, 60424, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 43, 12278, 57395, 37790, 11323, 1669, 10382, 7371, 741, 9859, 1669, 272, 7763, 19448, 2877, 340, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAgentPoolProfile(t *testing.T) { mockCS := getMockBaseContainerService("1.10") properties := mockCS.Properties properties.OrchestratorProfile.OrchestratorType = Kubernetes properties.MasterProfile.Count = 1 mockCS.SetPropertiesDefaults(false, false) if properties.AgentPoolProfiles[0].ScaleSetPriority != "" { t.Fatalf("AgentPoolProfiles[0].ScaleSetPriority did not have the expected configuration, got %s, expected %s", properties.AgentPoolProfiles[0].ScaleSetPriority, "") } if properties.AgentPoolProfiles[0].ScaleSetEvictionPolicy != "" { t.Fatalf("AgentPoolProfiles[0].ScaleSetEvictionPolicy did not have the expected configuration, got %s, expected %s", properties.AgentPoolProfiles[0].ScaleSetEvictionPolicy, "") } properties.AgentPoolProfiles[0].ScaleSetPriority = ScaleSetPriorityLow mockCS.SetPropertiesDefaults(false, false) if properties.AgentPoolProfiles[0].ScaleSetEvictionPolicy != ScaleSetEvictionPolicyDelete { t.Fatalf("AgentPoolProfile[0].ScaleSetEvictionPolicy did not have the expected configuration, got %s, expected %s", properties.AgentPoolProfiles[0].ScaleSetEvictionPolicy, ScaleSetEvictionPolicyDelete) } }
explode_data.jsonl/33877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 16810, 10551, 8526, 1155, 353, 8840, 836, 8, 341, 77333, 6412, 1669, 633, 11571, 3978, 4502, 1860, 445, 16, 13, 16, 15, 1138, 86928, 1669, 7860, 6412, 15945, 198, 86928, 90449, 331, 15111, 850, 8526, 90449, 331, 15111, 850, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInsert(t *testing.T) { table := []struct { vals []int expect string }{ {[]int{1}, "B:1(,)"}, {[]int{5, 1, 10}, "B:5(B:1(,),B:10(,))"}, {[]int{5, 1, 10, 7}, "B:5(B:1(,),B:10(R:7(,),))"}, {[]int{5, 1, 10, 7, 6}, "B:5(B:1(,),B:7(R:6(,),R:10(,)))"}, {[]int{1, 5, 6, 7, 10}, "B:5(B:1(,),B:7(R:6(,),R:10(,)))"}, } for _, te := range table { var root *RBNode for _, val := range te.vals { root = root.Insert(val) } if root.String() != te.expect { t.Errorf("Insert() => %q, want %q", root, te.expect) } if _, err := root.Validate(); err != nil { t.Errorf("Insert() invalid: %s", err) t.Logf(" Tree: %s", root) } } }
explode_data.jsonl/9587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 13780, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 3056, 1235, 341, 197, 19302, 82, 256, 3056, 396, 198, 197, 24952, 914, 198, 197, 59403, 197, 197, 90, 1294, 396, 90, 16, 2137, 330, 33, 25, 16, 7, 46021, 7115, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestJob_Delete(t *testing.T) { type fields struct { session session.ServiceFormatter info Response } tests := []struct { name string fields fields wantErr bool }{ { name: "Passing", fields: fields{ info: Response{ ID: "1234", }, session: &mockSessionFormatter{ url: "https://test.salesforce.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if req.URL.String() != "https://test.salesforce.com/jobs/ingest/1234" { return &http.Response{ StatusCode: 500, Status: "Invalid URL", Body: ioutil.NopCloser(strings.NewReader(req.URL.String())), Header: make(http.Header), } } if req.Method != http.MethodDelete { return &http.Response{ StatusCode: 500, Status: "Invalid Method", Body: ioutil.NopCloser(strings.NewReader(req.Method)), Header: make(http.Header), } } return &http.Response{ StatusCode: http.StatusNoContent, Status: "Good", Body: ioutil.NopCloser(strings.NewReader("")), Header: make(http.Header), } }), }, }, wantErr: false, }, { name: "Fail", fields: fields{ info: Response{ ID: "1234", }, session: &mockSessionFormatter{ url: "https://test.salesforce.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if req.URL.String() != "https://test.salesforce.com/jobs/ingest/1234" { return &http.Response{ StatusCode: 500, Status: "Invalid URL", Body: ioutil.NopCloser(strings.NewReader(req.URL.String())), Header: make(http.Header), } } if req.Method != http.MethodDelete { return &http.Response{ StatusCode: 500, Status: "Invalid Method", Body: ioutil.NopCloser(strings.NewReader(req.Method)), Header: make(http.Header), } } return &http.Response{ StatusCode: http.StatusBadRequest, Status: "Good", Body: ioutil.NopCloser(strings.NewReader("")), Header: make(http.Header), } }), }, }, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { j := &Job{ session: tt.fields.session, info: tt.fields.info, } if err := j.Delete(); (err != nil) != tt.wantErr { t.Errorf("Job.Delete() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/19884
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1304 }
[ 2830, 3393, 12245, 57418, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 25054, 3797, 13860, 14183, 198, 197, 27043, 262, 5949, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 55276, 220, 5043, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetDBCodeVersion(t *testing.T) { tests := []struct { desc string storedMigration Migration expectVersion semver.Version expectErr string }{ { desc: "no code version", storedMigration: Migration{}, expectVersion: semver.Version{}, }, { desc: "code version, valid", storedMigration: Migration{CodeVersion: "1.2.3"}, expectVersion: semver.Version{Major: 1, Minor: 2, Patch: 3, Pre: nil, Build: nil}, }, { desc: "code version, invalid", storedMigration: Migration{CodeVersion: "a.2*.3"}, expectErr: "unable to parse code version from DB: Invalid character(s) found in major number \"a\"", }, } for _, tt := range tests { tt := tt // alias loop variable as it is used in the closure t.Run(tt.desc, func(t *testing.T) { retVersion, err := getDBCodeVersion(tt.storedMigration) if tt.expectErr != "" { assert.Equal(t, semver.Version{}, retVersion) assert.Equal(t, tt.expectErr, err.Error()) return } assert.Equal(t, tt.expectVersion, retVersion) assert.NoError(t, err) }) } }
explode_data.jsonl/43222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 1949, 3506, 2078, 5637, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 41653, 310, 914, 198, 197, 18388, 3018, 20168, 21248, 198, 197, 24952, 5637, 256, 5234, 423, 35842, 198, 197, 24952, 7747, 981, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadAccount(t *testing.T) { tests := []struct { name string conn Connection exp []byte }{ { name: "should list table output correctly", conn: testNewConnection(t, table.NewWriter()), exp: []byte(testReadAccountTable), }, { name: "should list json output correctly", conn: testNewConnection(t, json.NewWriter()), exp: []byte(testReadAccountJSON), }, } var buf bytes.Buffer for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { tc.conn.writer.SetOutputMirror(&buf) fs := flag.NewFlagSet("id", flag.ExitOnError) ctx := cli.NewContext(nil, fs, nil) if err := tc.conn.ReadAccount(ctx); err != nil { t.Errorf("error running test: %v", err) } got := buf.Bytes() if bytes.Compare(got, tc.exp) != 0 { t.Errorf("unexpected bytes: got %s, exp %s", got, tc.exp) } buf.Reset() }) } }
explode_data.jsonl/64647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 4418, 7365, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 32917, 11032, 198, 197, 48558, 220, 3056, 3782, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 5445, 1140, 1965, 2550...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJobSpecErrorsController_Delete_InvalidUuid(t *testing.T) { t.Parallel() ethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, ethClient, ) defer cleanup() require.NoError(t, app.Start()) client := app.NewHTTPClient() resp, cleanup := client.Get("/v2/specs/garbage") defer cleanup() assert.Equal(t, http.StatusUnprocessableEntity, resp.StatusCode, "Response should be unprocessable entity") }
explode_data.jsonl/71931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 12245, 8327, 13877, 2051, 57418, 62, 7928, 38431, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 769, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNumberDataPoint_DoubleVal(t *testing.T) { ms := NewNumberDataPoint() assert.EqualValues(t, float64(0.0), ms.DoubleVal()) testValDoubleVal := float64(17.13) ms.SetDoubleVal(testValDoubleVal) assert.EqualValues(t, testValDoubleVal, ms.DoubleVal()) }
explode_data.jsonl/32725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 2833, 1043, 2609, 84390, 2208, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 2833, 1043, 2609, 741, 6948, 12808, 6227, 1155, 11, 2224, 21, 19, 7, 15, 13, 15, 701, 9829, 36113, 2208, 2398, 18185, 2208, 7378, 2208, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPointerUnmarshal(t *testing.T) { result := pointerMarshalTestStruct{} err := Unmarshal(pointerTestToml, &result) expected := pointerTestData if err != nil { t.Fatal(err) } if !reflect.DeepEqual(result, expected) { t.Errorf("Bad pointer unmarshal: expected %v, got %v", expected, result) } }
explode_data.jsonl/46321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 9084, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 7445, 55438, 2271, 9422, 16094, 9859, 1669, 1230, 27121, 81962, 2271, 24732, 75, 11, 609, 1382, 340, 42400, 1669, 7445, 83920, 198, 743, 1848, 961, 2092, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandleCommandRespondsToAPanic(t *testing.T) { defer leaktest.Check(t)() ctx, cancel := context.WithCancel(context.Background()) defer cancel() var origReply = _reply defer func() { _reply = origReply }() didPanic := false sentMessage := "" _reply = func(ctx context.Context, tweet *twitter.Tweet, message message.Localized) replier.ReplyResult { // Panic the first time, but let the unknown reply go through if !didPanic { didPanic = true panic("touched the third rail") } sentMessage = string(message) parentTweet := &twitter.Tweet{Id: "123"} return replier.ReplyResult{ ParentTweet: parentTweet, Err: nil, } } mockTwitter := &twitter_test.MockTwitter{T: t} ctx = WithHandleCommand(ctx, mockTwitter) ctx, err := replier.WithReplier(ctx, mockTwitter, false) assert.NoError(t, err) assert.Panics(t, func() { HandleCommand(ctx, "help", &twitter.Tweet{}) }) unknownErr := structured_error.Wrap(errors.New("bad news bears"), structured_error.Unknown) expected := string(message.ErrorMessage(ctx, unknownErr)) assert.Equal(t, expected, sentMessage) }
explode_data.jsonl/10602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 6999, 4062, 65354, 82, 1249, 2537, 31270, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 10600, 1155, 8, 741, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 741, 2405, 2713, 20841, 284, 716, 210...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEscapeTranslation(t *testing.T) { tests := []struct { in, out string }{ {`"\a"`, `"\a"`}, {`"\b"`, `"\b"`}, {`"\f"`, `"\f"`}, {`"\n"`, `"\n"`}, {`"\r"`, `"\r"`}, {`"\t"`, `"\t"`}, {`"\v"`, `"\v"`}, {`"\d\g\h"`, `"dgh"`}, {`'\d\g\h'`, "`\\d\\g\\h`"}, {`"\""`, `"\""`}, {`"\'"`, `"'"`}, {`'\\'`, "`\\`"}, {`'\''`, "`'`"}, {`"\\\""`, `"\\\""`}, {`%w|x\||`, "`x|`"}, } for i, tt := range tests { if caseNum == 0 || caseNum == i+1 { p, err := ParseString(tt.in) node := p.Statements[0].(*StringNode) if tt.out != node.GoString() { t.Errorf("[%d] Expected %s but got %s", i+1, tt.out, node.GoString()) if err != nil { t.Errorf("[%d] Parse errors: %s", i+1, err) } } } } }
explode_data.jsonl/75152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 48124, 24412, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 11, 700, 914, 198, 197, 59403, 197, 197, 90, 63, 11934, 64, 1, 7808, 1565, 11934, 64, 39917, 1583, 197, 197, 90, 63, 11934, 65, 1, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStartCommandLineWithWrongArgumentLength(t *testing.T) { want := "Please pass some command" os.Args = []string{ "--t=123", } err := StartCommandLine(emptyOnlyOneCommand) if err.Error() != want { t.Errorf("Error handling error: want %s, got %s", want, err.Error()) } }
explode_data.jsonl/30297
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 3479, 71885, 2354, 29185, 9171, 4373, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 330, 5501, 1494, 1045, 3210, 698, 25078, 51015, 284, 3056, 917, 515, 197, 197, 74757, 83, 28, 16, 17, 18, 756, 197, 532, 9859, 1669, 5145, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSetEndpointAndPut(t *testing.T) { defer testutil.AfterTest(t) clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 2}) defer clus.Terminate(t) clus.Client(1).SetEndpoints(clus.Members[0].GRPCAddr()) _, err := clus.Client(1).Put(context.TODO(), "foo", "bar") if err != nil && !strings.Contains(err.Error(), "closing") { t.Fatal(err) } }
explode_data.jsonl/30358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 1649, 27380, 3036, 19103, 1155, 353, 8840, 836, 8, 341, 16867, 1273, 1314, 36892, 2271, 1155, 340, 197, 4163, 1669, 17590, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 72883, 2648, 90, 1695, 25, 220, 17, 3518, 16867, 1185, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInitHoF(t *testing.T) { var conf = NewDefaultGAConfig() conf.NPops = 4 var ga, err = conf.NewGA() if err != nil { t.Errorf("Expected nil, got %v", err) } if err = ga.init(NewVector); err != nil { t.Errorf("Expected nil, got %v", err) } for _, pop := range ga.Populations { for _, indi := range pop.Individuals { if ga.HallOfFame[0].Fitness > indi.Fitness { t.Error("The current best individual is not the overall best") } } } }
explode_data.jsonl/82078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 3803, 47978, 37, 1155, 353, 8840, 836, 8, 341, 2405, 2335, 284, 1532, 3675, 16128, 2648, 741, 67850, 2067, 47, 3721, 284, 220, 19, 198, 2405, 13510, 11, 1848, 284, 2335, 7121, 16128, 741, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestLogger_Log(t *testing.T) { expectedLog := testLogStatement + "\n" // Note that Log always adds a new line. f := func() { logger := NewLogger(DEBUG) logger.Log(testLogStatement) } output := testutil.StdoutOutputForFunc(f) if output != expectedLog { t.Errorf("Stdout mismatch. Expected: %s Got: %s", expectedLog, output) } }
explode_data.jsonl/46806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 7395, 44083, 1155, 353, 8840, 836, 8, 341, 42400, 2201, 1669, 1273, 2201, 8636, 488, 2917, 77, 1, 442, 7036, 429, 2835, 2677, 11367, 264, 501, 1555, 382, 1166, 1669, 2915, 368, 341, 197, 17060, 1669, 1532, 7395, 52792, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAndroidAppLinkType(t *testing.T) { testJava(t, ` android_app { name: "foo", srcs: ["a.java"], libs: ["bar"], static_libs: ["baz"], platform_apis: true, } java_library { name: "bar", sdk_version: "current", srcs: ["b.java"], } android_library { name: "baz", sdk_version: "system_current", srcs: ["c.java"], } `) testJavaError(t, "consider adjusting sdk_version: OR platform_apis:", ` android_app { name: "foo", srcs: ["a.java"], libs: ["bar"], sdk_version: "current", static_libs: ["baz"], } java_library { name: "bar", sdk_version: "current", srcs: ["b.java"], } android_library { name: "baz", sdk_version: "system_current", srcs: ["c.java"], } `) testJava(t, ` android_app { name: "foo", srcs: ["a.java"], libs: ["bar"], sdk_version: "system_current", static_libs: ["baz"], } java_library { name: "bar", sdk_version: "current", srcs: ["b.java"], } android_library { name: "baz", sdk_version: "system_current", srcs: ["c.java"], } `) testJavaError(t, "consider adjusting sdk_version: OR platform_apis:", ` android_app { name: "foo", srcs: ["a.java"], libs: ["bar"], sdk_version: "system_current", static_libs: ["baz"], } java_library { name: "bar", sdk_version: "current", srcs: ["b.java"], } android_library { name: "baz", srcs: ["c.java"], } `) }
explode_data.jsonl/58476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 717 }
[ 2830, 3393, 21831, 2164, 3939, 929, 1155, 353, 8840, 836, 8, 341, 18185, 15041, 1155, 11, 22074, 197, 197, 5954, 8191, 341, 298, 11609, 25, 330, 7975, 756, 298, 41144, 82, 25, 4383, 64, 10848, 8097, 298, 197, 35719, 25, 4383, 2257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGaiaCLICreateValidator(t *testing.T) { chainID, servAddr, port := initializeFixtures(t) flags := fmt.Sprintf("--home=%s --node=%v --chain-id=%v", gaiacliHome, servAddr, chainID) // start gaiad server proc := tests.GoExecuteTWithStdout(t, fmt.Sprintf("gaiad start --home=%s --rpc.laddr=%v", gaiadHome, servAddr)) defer proc.Stop(false) tests.WaitForTMStart(port) tests.WaitForNextNBlocksTM(2, port) fooAddr, _ := executeGetAddrPK(t, fmt.Sprintf("gaiacli keys show foo --output=json --home=%s", gaiacliHome)) barAddr, barPubKey := executeGetAddrPK(t, fmt.Sprintf("gaiacli keys show bar --output=json --home=%s", gaiacliHome)) barCeshPubKey := sdk.MustBech32ifyValPub(barPubKey) executeWrite(t, fmt.Sprintf("gaiacli send %v --amount=10steak --to=%s --from=foo", flags, barAddr), app.DefaultKeyPass) tests.WaitForNextNBlocksTM(2, port) barAcc := executeGetAccount(t, fmt.Sprintf("gaiacli account %s %v", barAddr, flags)) require.Equal(t, int64(10), barAcc.GetCoins().AmountOf("steak").Int64()) fooAcc := executeGetAccount(t, fmt.Sprintf("gaiacli account %s %v", fooAddr, flags)) require.Equal(t, int64(40), fooAcc.GetCoins().AmountOf("steak").Int64()) defaultParams := stake.DefaultParams() initialPool := stake.InitialPool() initialPool.BondedTokens = initialPool.BondedTokens.Add(sdk.NewDec(100)) // Delegate tx on GaiaAppGenState initialPool = initialPool.ProcessProvisions(defaultParams) // provisions are added to the pool every hour // create validator cvStr := fmt.Sprintf("gaiacli stake create-validator %v", flags) cvStr += fmt.Sprintf(" --from=%s", "bar") cvStr += fmt.Sprintf(" --pubkey=%s", barCeshPubKey) cvStr += fmt.Sprintf(" --amount=%v", "2steak") cvStr += fmt.Sprintf(" --moniker=%v", "bar-vally") initialPool.BondedTokens = initialPool.BondedTokens.Add(sdk.NewDec(1)) executeWrite(t, cvStr, app.DefaultKeyPass) tests.WaitForNextNBlocksTM(2, port) barAcc = executeGetAccount(t, fmt.Sprintf("gaiacli account %s %v", barAddr, flags)) require.Equal(t, int64(8), barAcc.GetCoins().AmountOf("steak").Int64(), "%v", barAcc) validator := executeGetValidator(t, fmt.Sprintf("gaiacli stake validator %s --output=json %v", barAddr, flags)) require.Equal(t, validator.Operator, barAddr) require.True(sdk.DecEq(t, sdk.NewDec(2), validator.Tokens)) // unbond a single share unbondStr := fmt.Sprintf("gaiacli stake unbond begin %v", flags) unbondStr += fmt.Sprintf(" --from=%s", "bar") unbondStr += fmt.Sprintf(" --validator=%s", barAddr) unbondStr += fmt.Sprintf(" --shares-amount=%v", "1") success := executeWrite(t, unbondStr, app.DefaultKeyPass) require.True(t, success) tests.WaitForNextNBlocksTM(2, port) /* // this won't be what we expect because we've only started unbonding, haven't completed barAcc = executeGetAccount(t, fmt.Sprintf("gaiacli account %v %v", barCech, flags)) require.Equal(t, int64(9), barAcc.GetCoins().AmountOf("steak").Int64(), "%v", barAcc) */ validator = executeGetValidator(t, fmt.Sprintf("gaiacli stake validator %s --output=json %v", barAddr, flags)) require.Equal(t, "1.0000000000", validator.Tokens.String()) params := executeGetParams(t, fmt.Sprintf("gaiacli stake parameters --output=json %v", flags)) require.True(t, defaultParams.Equal(params)) pool := executeGetPool(t, fmt.Sprintf("gaiacli stake pool --output=json %v", flags)) require.Equal(t, initialPool.DateLastCommissionReset, pool.DateLastCommissionReset) require.Equal(t, initialPool.PrevBondedShares, pool.PrevBondedShares) require.Equal(t, initialPool.BondedTokens, pool.BondedTokens) }
explode_data.jsonl/20994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1351 }
[ 2830, 3393, 80788, 685, 3140, 1317, 964, 14256, 1155, 353, 8840, 836, 8, 341, 197, 8819, 915, 11, 4853, 13986, 11, 2635, 1669, 9468, 25958, 18513, 1155, 340, 59516, 1669, 8879, 17305, 21549, 5117, 7846, 82, 1177, 3509, 7846, 85, 1177, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInt64DataPointSlice_CopyTo(t *testing.T) { dest := NewInt64DataPointSlice() // Test CopyTo to empty NewInt64DataPointSlice().CopyTo(dest) assert.EqualValues(t, NewInt64DataPointSlice(), dest) // Test CopyTo larger slice generateTestInt64DataPointSlice().CopyTo(dest) assert.EqualValues(t, generateTestInt64DataPointSlice(), dest) // Test CopyTo same size slice generateTestInt64DataPointSlice().CopyTo(dest) assert.EqualValues(t, generateTestInt64DataPointSlice(), dest) }
explode_data.jsonl/19525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 1072, 21, 19, 1043, 2609, 33236, 77637, 1249, 1155, 353, 8840, 836, 8, 341, 49616, 1669, 1532, 1072, 21, 19, 1043, 2609, 33236, 741, 197, 322, 3393, 14540, 1249, 311, 4287, 198, 197, 3564, 1072, 21, 19, 1043, 2609, 33236, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterSlave(t *testing.T) { if os.Getenv("TEST_REDIS_CLUSTER_SLAVE_URI") == "" { t.Skipf("TEST_REDIS_CLUSTER_SLAVE_URI not set - skipping") } addr := os.Getenv("TEST_REDIS_CLUSTER_SLAVE_URI") e, _ := NewRedisExporter(addr, Options{Namespace: "test", Registry: prometheus.NewRegistry()}) ts := httptest.NewServer(e) defer ts.Close() chM := make(chan prometheus.Metric, 10000) go func() { e.Collect(chM) close(chM) }() body := downloadURL(t, ts.URL+"/metrics") log.Debugf("slave - body: %s", body) for _, want := range []string{ "test_instance_info", "test_master_last_io_seconds", "test_slave_info", } { if !strings.Contains(body, want) { t.Errorf("Did not find key [%s] \nbody: %s", want, body) } } hostReg, _ := regexp.Compile(`master_host="([0,1]?\d{1,2}|2([0-4][0-9]|5[0-5]))(\.([0,1]?\d{1,2}|2([0-4][0-9]|5[0-5]))){3}"`) masterHost := hostReg.FindString(string(body)) portReg, _ := regexp.Compile(`master_port="(\d+)"`) masterPort := portReg.FindString(string(body)) for wantedKey, wantedVal := range map[string]int{ masterHost: 5, masterPort: 5, } { if res := strings.Count(body, wantedKey); res != wantedVal { t.Errorf("Result: %s -> %d, Wanted: %d \nbody: %s", wantedKey, res, wantedVal, body) } } }
explode_data.jsonl/47005
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 573 }
[ 2830, 3393, 28678, 95960, 1155, 353, 8840, 836, 8, 341, 743, 2643, 64883, 445, 10033, 2192, 21202, 77871, 74418, 23116, 899, 621, 1591, 341, 197, 3244, 57776, 69, 445, 10033, 2192, 21202, 77871, 74418, 23116, 537, 738, 481, 42659, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuleNoOSCKey(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := [][]string{ // 正面的例子 { "CREATE TABLE tbl (a int, b int)", }, // 反面的例子 { "CREATE TABLE tbl (a int, primary key(`a`))", "CREATE TABLE tbl (a int, unique key(`a`))", }, } for _, sql := range sqls[0] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleNoOSCKey() if rule.Item != "KEY.002" { t.Error("Rule not match:", rule.Item, "Expect : KEY.002") } } else { t.Error("sqlparser.Parse Error:", err) } } for _, sql := range sqls[1] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleNoOSCKey() if rule.Item != "OK" { t.Error("Rule not match:", rule.Item, "Expect : OK") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76841
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 11337, 2753, 78961, 1592, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 52931, 917, 515, 197, 197, 322, 71928, 96, 27091, 111564, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7