text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestValidIP4ToReturnTrueOnValidAddress(t *testing.T) { result := ipparser.ValidIP4("1.2.3.4") if result != true { t.Fatalf("Expected ValidIP(1.2.3.4) to be true but got false") } }
explode_data.jsonl/70071
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 4088, 3298, 19, 1249, 5598, 2514, 1925, 4088, 4286, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 5997, 9657, 47156, 3298, 19, 445, 16, 13, 17, 13, 18, 13, 19, 5130, 743, 1102, 961, 830, 341, 197, 3244, 30762, 445, 18896, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOwnerReadPubEK(t *testing.T) { rwc := openTPMOrSkip(t) defer rwc.Close() // This test code assumes that the owner auth is the well-known value. ownerAuth := getAuth(ownerAuthEnvVar) pkb, err := OwnerReadPubEK(rwc, ownerAuth) if err != nil { t.Fatal("Couldn't read the pub EK using owner auth:", err) } pk, err := UnmarshalPubRSAPublicKey(pkb) if err != nil { t.Fatal("Couldn't unmarshal the endorsement key:", err) } if pk.N.BitLen() != 2048 { t.Fatal("Invalid endorsement key: not a 2048-bit RSA key") } }
explode_data.jsonl/75357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 13801, 4418, 29162, 71133, 1155, 353, 8840, 836, 8, 341, 7000, 24028, 1669, 1787, 4239, 44, 2195, 35134, 1155, 340, 16867, 435, 24028, 10421, 2822, 197, 322, 1096, 1273, 2038, 21484, 429, 279, 6372, 4166, 374, 279, 1632, 213...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSyncNamespace(t *testing.T) { namespaceList := kapi.NamespaceList{ Items: []kapi.Namespace{ { ObjectMeta: kapi.ObjectMeta{Name: "foo", ResourceVersion: "1"}, }, { ObjectMeta: kapi.ObjectMeta{Name: "bar", ResourceVersion: "2"}, }, { ObjectMeta: kapi.ObjectMeta{Name: "car", ResourceVersion: "3"}, }, }, } mockKubeClient := testclient.NewSimpleFake(&namespaceList) reviewer := &mockReviewer{ expectedResults: map[string]*mockReview{ "foo": { users: []string{alice.GetName(), bob.GetName()}, groups: eve.GetGroups(), }, "bar": { users: []string{frank.GetName(), eve.GetName()}, groups: []string{"random"}, }, "car": { users: []string{}, groups: []string{}, }, }, } mockPolicyCache := &MockReadOnlyPolicyClient{} authorizationCache := NewAuthorizationCache(reviewer, mockKubeClient.Namespaces(), mockPolicyCache) // we prime the data we need here since we are not running reflectors for i := range namespaceList.Items { authorizationCache.namespaceStore.Add(&namespaceList.Items[i]) } // synchronize the cache authorizationCache.synchronize() validateList(t, authorizationCache, alice, sets.NewString("foo")) validateList(t, authorizationCache, bob, sets.NewString("foo")) validateList(t, authorizationCache, eve, sets.NewString("foo", "bar")) validateList(t, authorizationCache, frank, sets.NewString("bar")) // modify access rules reviewer.expectedResults["foo"].users = []string{bob.GetName()} reviewer.expectedResults["foo"].groups = []string{"random"} reviewer.expectedResults["bar"].users = []string{alice.GetName(), eve.GetName()} reviewer.expectedResults["bar"].groups = []string{"employee"} reviewer.expectedResults["car"].users = []string{bob.GetName(), eve.GetName()} reviewer.expectedResults["car"].groups = []string{"employee"} // modify resource version on each namespace to simulate a change had occurred to force cache refresh for i := range namespaceList.Items { namespace := namespaceList.Items[i] oldVersion, err := strconv.Atoi(namespace.ResourceVersion) if err != nil { t.Errorf("Bad test setup, resource versions should be numbered, %v", err) } newVersion := strconv.Itoa(oldVersion + 1) namespace.ResourceVersion = newVersion authorizationCache.namespaceStore.Add(&namespace) } // now refresh the cache (which is resource version aware) authorizationCache.synchronize() // make sure new rights hold validateList(t, authorizationCache, alice, sets.NewString("bar")) validateList(t, authorizationCache, bob, sets.NewString("foo", "bar", "car")) validateList(t, authorizationCache, eve, sets.NewString("bar", "car")) validateList(t, authorizationCache, frank, sets.NewString()) }
explode_data.jsonl/12032
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 939 }
[ 2830, 3393, 12154, 22699, 1155, 353, 8840, 836, 8, 341, 56623, 852, 1669, 595, 2068, 46011, 852, 515, 197, 197, 4353, 25, 3056, 74, 2068, 46011, 515, 298, 197, 515, 571, 23816, 12175, 25, 595, 2068, 80222, 63121, 25, 330, 7975, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHttpParser_composedHeaders(t *testing.T) { data := "HTTP/1.1 200 OK\r\n" + "Content-Length: 0\r\n" + "Date: Tue, 14 Aug 2012 22:31:45 GMT\r\n" + "Set-Cookie: aCookie=yummy\r\n" + "Set-Cookie: anotherCookie=why%20not\r\n" + "\r\n" http := httpModForTests(nil) http.parserConfig.sendHeaders = true http.parserConfig.sendAllHeaders = true message, ok, complete := testParse(http, data) assert.True(t, ok) assert.True(t, complete) assert.False(t, message.isRequest) assert.Equal(t, 200, int(message.statusCode)) assert.Equal(t, "OK", string(message.statusPhrase)) header, ok := message.headers["set-cookie"] assert.True(t, ok) assert.Equal(t, "aCookie=yummy, anotherCookie=why%20not", string(header)) }
explode_data.jsonl/16517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 2905, 6570, 2965, 3865, 10574, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 330, 9230, 14, 16, 13, 16, 220, 17, 15, 15, 10402, 12016, 1699, 1, 3610, 197, 197, 1, 2762, 52493, 25, 220, 15, 12016, 1699, 1, 3610, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRegisterRPC(t *testing.T) { if testing.Short() { t.SkipNow() } t.Parallel() g := newTestingGateway(t) defer func() { if err := g.Close(); err != nil { t.Fatal(err) } }() g.RegisterRPC("Foo", func(conn modules.PeerConn) error { return nil }) defer func() { if r := recover(); r == nil { t.Error("Registering the same RPC twice did not cause a panic") } }() g.RegisterRPC("Foo", func(conn modules.PeerConn) error { return nil }) }
explode_data.jsonl/74446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 8690, 29528, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 7039, 741, 197, 532, 3244, 41288, 7957, 741, 3174, 1669, 501, 16451, 40709, 1155, 340, 16867, 2915, 368, 341, 197, 743, 1848, 1669, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAvg(t *testing.T) { t.Parallel() testCases := []struct { slice []float64 val float64 }{ { slice: []float64{1, 2, 3, 4}, val: 2.5, }, { slice: []float64{-1, -2, -3, -4}, val: -2.5, }, } r := reducer.Avg() for _, c := range testCases { val, err := r.Reduce(c.slice) require.NoError(t, err) require.Equal(t, c.val, val) } }
explode_data.jsonl/4720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 39447, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 1903, 4754, 3056, 3649, 21, 19, 198, 197, 19302, 256, 2224, 21, 19, 198, 197, 59403, 197, 197, 515, 298, 1903, 4754, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_HandleSur(t *testing.T) { task := &Task{ Module: Sur, Input: "../test/sur.xlsx", Output: "../test/surTest.xlsx", } t.Log(task.Run()) }
explode_data.jsonl/25095
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 42714, 23043, 1155, 353, 8840, 836, 8, 341, 49115, 1669, 609, 6262, 515, 197, 197, 3332, 25, 8082, 345, 197, 66588, 25, 220, 7005, 1944, 2687, 324, 46838, 756, 197, 80487, 25, 7005, 1944, 2687, 324, 2271, 46838, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBindUnaryOp(t *testing.T) { env := environment(map[string]interface{}{ "a": NumberType, "b": BoolType, "c": NewOutputType(NumberType), "d": NewOutputType(BoolType), "e": NewPromiseType(NumberType), "f": NewPromiseType(BoolType), }) scope := env.scope() cases := []exprTestCase{ // Standard operations {x: `-a`, t: NumberType}, {x: `!b`, t: BoolType}, // Lifted operations {x: `-c`, t: NewOutputType(NumberType)}, {x: `-e`, t: NewPromiseType(NumberType)}, {x: `!d`, t: NewOutputType(BoolType)}, {x: `!f`, t: NewPromiseType(BoolType)}, } for _, c := range cases { t.Run(c.x, func(t *testing.T) { expr, diags := BindExpressionText(c.x, scope, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, c.t, expr.Type()) _, ok := expr.(*UnaryOpExpression) assert.True(t, ok) assert.Equal(t, c.x, fmt.Sprintf("%v", expr)) }) } }
explode_data.jsonl/42574
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 9950, 94545, 7125, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 4573, 9147, 14032, 31344, 67066, 197, 197, 56693, 788, 5624, 929, 345, 197, 197, 1, 65, 788, 12608, 929, 345, 197, 197, 96946, 788, 1532, 5097, 929, 42999, 929,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReaderSimple(t *testing.T) { data := "hello world" b := NewReader(strings.NewReader(data)) if s := readBytes(b); s != "hello world" { t.Errorf("simple hello world test failed: got %q", s) } b = NewReader(newRot13Reader(strings.NewReader(data))) if s := readBytes(b); s != "uryyb jbeyq" { t.Errorf("rot13 hello world test failed: got %q", s) } }
explode_data.jsonl/2874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 5062, 16374, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 330, 14990, 1879, 698, 2233, 1669, 1532, 5062, 51442, 68587, 2592, 1171, 743, 274, 1669, 1349, 7078, 1883, 1215, 274, 961, 330, 14990, 1879, 1, 341, 197, 3244, 13080, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStreamDelete(t *testing.T) { s, err := Run() ok(t, err) defer s.Close() c, err := redis.Dial("tcp", s.Addr()) ok(t, err) defer c.Close() _, err = redis.String(c.Do("XGROUP", "CREATE", "planets", "processing", "$", "MKSTREAM")) ok(t, err) _, err = redis.String(c.Do("XADD", "planets", "0-1", "name", "Mercury")) ok(t, err) msgs, err := redis.Values(c.Do("XREADGROUP", "GROUP", "processing", "alice", "STREAMS", "planets", ">")) ok(t, err) equals(t, []interface{}{[]interface{}{[]byte("planets"), []interface{}{[]interface{}{[]byte("0-1"), []interface{}{[]byte("name"), []byte("Mercury")}}}}}, msgs) count, err := redis.Int(c.Do("XDEL", "planets", "0-1")) ok(t, err) equals(t, 1, count) _, err = redis.Values(c.Do("XREADGROUP", "GROUP", "processing", "alice", "STREAMS", "planets", "0-0")) mustFail(t, err, redis.ErrNil.Error()) }
explode_data.jsonl/31885
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 3027, 6435, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 6452, 741, 59268, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 1444, 11, 1848, 1669, 20870, 98462, 445, 27161, 497, 274, 93626, 2398, 59268, 1155, 11, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransactionService_GetTransactionStatus(t *testing.T) { mockServer.AddRouter(&mock.Router{ Path: "/transaction/7D354E056A10E7ADAC66741D1021B0E79A57998EAD7E17198821141CE87CF63F/status", RespBody: statusJson, }) cl := mockServer.getTestNetClientUnsafe() txStatus, err := cl.Transaction.GetTransactionStatus(context.Background(), transactionHash) assert.Nilf(t, err, "TransactionService.GetTransactionStatus returned error: %v", err) tests.ValidateStringers(t, status, txStatus) }
explode_data.jsonl/69185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 8070, 1860, 13614, 8070, 2522, 1155, 353, 8840, 836, 8, 341, 77333, 5475, 1904, 9523, 2099, 16712, 31413, 515, 197, 69640, 25, 257, 3521, 13528, 14, 22, 35, 18, 20, 19, 36, 15, 20, 21, 32, 16, 15, 36, 22, 1808, 1706, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResource_Insert(t *testing.T) { type fields struct { update *update query *query insert *insert } type args struct { allOrNone bool records []sobject.Inserter } tests := []struct { name string fields fields args args want []sobject.InsertValue wantErr bool }{ { name: "success", fields: fields{ insert: &insert{ session: &mockSessionFormatter{ url: "something.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if strings.HasPrefix(req.URL.String(), "something.com/composite/sobjects") == false { return &http.Response{ StatusCode: 500, Status: "Bad URL: " + req.URL.String(), Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } if req.Method != http.MethodPost { return &http.Response{ StatusCode: 500, Status: "Bad Method", Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } resp := ` [ { "success" : false, "errors" : [ { "statusCode" : "DUPLICATES_DETECTED", "message" : "Use one of these records?", "fields" : [ ] } ] }, { "id" : "003RM0000068xVCYAY", "success" : true, "errors" : [ ] } ]` return &http.Response{ StatusCode: http.StatusOK, Status: "Some Status", Body: ioutil.NopCloser(strings.NewReader(resp)), Header: make(http.Header), } }), }, }, }, args: args{ allOrNone: true, records: []sobject.Inserter{ &mockInserter{ sobject: "Account", fields: map[string]interface{}{ "Name": "example.com", "BillingCity": "San Francisco", }, }, &mockInserter{ sobject: "Contact", fields: map[string]interface{}{ "LastName": "Johnson", "FirstName": "Erica", }, }, }, }, want: []sobject.InsertValue{ { Success: false, Errors: []sfdc.Error{ { ErrorCode: "DUPLICATES_DETECTED", Message: "Use one of these records?", Fields: make([]string, 0), }, }, }, { Success: true, ID: "003RM0000068xVCYAY", Errors: make([]sfdc.Error, 0), }, }, wantErr: false, }, { name: "not initialized", fields: fields{}, args: args{}, wantErr: true, }, { name: "no records", fields: fields{ insert: &insert{}, }, args: args{}, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { r := &Resource{ update: tt.fields.update, query: tt.fields.query, insert: tt.fields.insert, } got, err := r.Insert(tt.args.allOrNone, tt.args.records) if (err != nil) != tt.wantErr { t.Errorf("Resource.Insert() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Resource.Insert() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/64168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1750 }
[ 2830, 3393, 4783, 76417, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 27175, 353, 2386, 198, 197, 27274, 220, 353, 1631, 198, 197, 59847, 353, 4208, 198, 197, 532, 13158, 2827, 2036, 341, 197, 50960, 2195, 4064, 1807, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMember(t *testing.T) { tmpl, err := ParseFile("testdir/test_member.slim") if err != nil { t.Fatal(err) } m := make(map[string]string) m["baz"] = "Baz!" type Baz struct { Fuga string } var buf bytes.Buffer err = tmpl.Execute(&buf, Values{ "foo": struct { Baz []Baz }{ Baz: []Baz{ {Fuga: "hello"}, {Fuga: "world"}, {Fuga: "golang"}, }, }, "bar": m, }) if err != nil { t.Fatal(err) } expect := readFile(t, "testdir/test_member.html") got := buf.String() if expect != got { t.Fatalf("expected %v but %v", expect, got) } }
explode_data.jsonl/80436
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 9366, 1155, 353, 8840, 836, 8, 341, 3244, 54010, 11, 1848, 1669, 14775, 1703, 445, 1944, 3741, 12697, 19388, 74257, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 2109, 1669, 1281, 9147, 14032, 30953...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRemoveFile(t *testing.T) { beforeTest(t) conn := _getConnection(t) defer conn.Close() client := agaveproto.NewSftpRelayClient(conn) // create a random directory name in our test dir tmpTestFilePath, err := _createTempFile("", ".bin") if err != nil { assert.FailNowf(t, err.Error(), "Unable to create temp test file: %s", err.Error()) } err = _updateLocalSharedTestDirOwnership() if err != nil { assert.FailNowf(t, err.Error(), "Unable to change permission on temp test dir: %s", err.Error()) } remoteTestFilePath := _resolveTestPath(tmpTestFilePath, SFTP_SHARED_TEST_DIR) req := &agaveproto.SrvRemoveRequest{ SystemConfig: _createRemoteSystemConfig(), RemotePath: remoteTestFilePath, } grpcResponse, err := client.Remove(context.Background(), req) if err != nil { assert.Nilf(t, err, "Error while invoking remote service: %v", err) } else { // get the test directory stat in the local shared directory _, err := os.Stat(_resolveTestPath(tmpTestFilePath, LocalSharedTestDir)) assert.True(t, os.IsNotExist(err), "File should not be present after calling Remove") assert.Equal(t, "", grpcResponse.Error, "Error message in response should be empty after successfully request") } afterTest(t) }
explode_data.jsonl/32550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 13021, 1703, 1155, 353, 8840, 836, 8, 341, 63234, 2271, 1155, 692, 32917, 1669, 716, 52414, 1155, 340, 16867, 4534, 10421, 2822, 25291, 1669, 933, 523, 15110, 7121, 50, 25068, 6740, 352, 2959, 20571, 692, 197, 322, 1855, 264...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAckTimeout(t *testing.T) { ctx := context.Background() ctx, cancel := context.WithTimeout(ctx, 100*time.Millisecond) defer cancel() workChan := make(chan *TestWork, 10) go testWorker(workChan) a := ack.NewAckTree(ctx, func() { t.Errorf("expecting timeout error. Completion function should not be called") }, func(err error) { var expected *ack.TimeoutError if !errors.As(err, &expected) { t.Errorf("expecting timeout error, but get %s\n", err.Error()) } }) //send a test work with an ack tree workChan <- &TestWork{ack: a, workTime: 1 * time.Second} a.Wait() if !a.IsDone() { t.Errorf("AckTree should be done") } }
explode_data.jsonl/51888
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 55559, 7636, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 20985, 11, 9121, 1669, 2266, 26124, 7636, 7502, 11, 220, 16, 15, 15, 77053, 71482, 340, 16867, 9121, 2822, 97038, 46019, 1669, 1281, 35190, 353, 2271,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddAddressByIP(t *testing.T) { fmtErr := fmt.Errorf("") addrErr := &net.AddrError{} var tests = []struct { addrIP string err error }{ { someIP + ":8333", nil, }, { someIP, addrErr, }, { someIP[:12] + ":8333", fmtErr, }, { someIP + ":abcd", fmtErr, }, } amgr := addrmgr.New("testaddressbyip", nil) for i, test := range tests { err := amgr.AddAddressByIP(test.addrIP) if test.err != nil && err == nil { t.Errorf("TestGood test %d failed expected an error and got none", i) continue } if test.err == nil && err != nil { t.Errorf("TestGood test %d failed expected no error and got one", i) continue } if reflect.TypeOf(err) != reflect.TypeOf(test.err) { t.Errorf("TestGood test %d failed got %v, want %v", i, reflect.TypeOf(err), reflect.TypeOf(test.err)) continue } } }
explode_data.jsonl/49088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 2212, 4286, 1359, 3298, 1155, 353, 8840, 836, 8, 341, 11009, 7747, 1669, 8879, 13080, 31764, 53183, 7747, 1669, 609, 4711, 93626, 1454, 16094, 2405, 7032, 284, 3056, 1235, 341, 197, 53183, 3298, 914, 198, 197, 9859, 262, 146...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestNodeNetwork(t *testing.T) { node := testSetup() if node.Network().Name() != testNodeNetName { t.Errorf("Expected network: %s, found: %s", testNodeNetName, node.Network().Name()) } }
explode_data.jsonl/23543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 1955, 12320, 1155, 353, 8840, 836, 8, 341, 20831, 1669, 1273, 21821, 741, 743, 2436, 30149, 1005, 675, 368, 961, 1273, 1955, 6954, 675, 341, 197, 3244, 13080, 445, 18896, 3922, 25, 1018, 82, 11, 1730, 25, 1018, 82, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGatherHostAddresses(t *testing.T) { type outputRow struct { IP string Preference int } for _, tc := range []struct { Name string Input []string Output []outputRow }{ { Name: "blank", }, { Name: "loopback", Input: []string{ "127.0.0.1", }, }, { Name: "Single IPv4", Input: []string{ "1.1.1.1", }, Output: []outputRow{ {"1.1.1.1", 65535}, }, }, { Name: "IPv4", Input: []string{ "1.1.1.1", "1.1.1.2", }, Output: []outputRow{ {"1.1.1.1", 2}, {"1.1.1.2", 1}, }, }, { Name: "Single IPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 65535}, }, }, { Name: "IPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 2}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", 1}, }, }, { // If a host has two IPv4 addresses and six IPv6 addresses, it will // insert an IPv4 address after four IPv6 addresses by choosing the // appropriate local preference values when calculating the pair // priorities. Name: "2xIPv4 and 6xIPv6", Input: []string{ "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa3", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa4", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa5", "2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa6", "1.1.1.1", "1.1.1.2", }, Output: []outputRow{ {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa1", 8}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa2", 7}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa3", 6}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa4", 5}, {"1.1.1.1", 4}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa5", 3}, {"2a03:e2c0:60f:52:cfe1:fdd:daf7:7fa6", 2}, {"1.1.1.2", 1}, }, }, } { t.Run(tc.Name, func(t *testing.T) { gatherAddrs := make([]gather.Addr, len(tc.Input)) for i, ip := range tc.Input { gatherAddrs[i] = gather.Addr{ IP: net.ParseIP(ip), } } expected := make([]HostAddr, len(tc.Output)) for i, row := range tc.Output { expected[i] = HostAddr{ IP: net.ParseIP(row.IP), LocalPreference: row.Preference, } } gotAddr, err := HostAddresses(gatherAddrs) if err != nil { t.Fatal(err) } if len(gotAddr) != len(expected) { t.Fatalf("bad length: %d (got) != %d (expected)", len(gotAddr), len(expected), ) } for i := range gotAddr { got := gotAddr[i] exp := expected[i] if got.LocalPreference != exp.LocalPreference || !got.IP.Equal(exp.IP) { t.Errorf("[%d]: %s, %d (got) != %s, %d (expected)", i, got.IP, got.LocalPreference, exp.IP, exp.LocalPreference, ) } } }) } }
explode_data.jsonl/10789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1728 }
[ 2830, 3393, 38, 1856, 9296, 52290, 1155, 353, 8840, 836, 8, 341, 13158, 2550, 3102, 2036, 341, 197, 197, 3298, 260, 914, 198, 197, 197, 31173, 526, 198, 197, 532, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 21297, 256, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestParseApplyOncePolicy(t *testing.T) { r := require.New(t) app := &v1beta1.Application{Spec: v1beta1.ApplicationSpec{ Policies: []v1beta1.AppPolicy{{Type: "example"}}, }} spec, err := ParseApplyOncePolicy(app) r.NoError(err) r.Nil(spec) app.Spec.Policies = append(app.Spec.Policies, v1beta1.AppPolicy{ Type: "apply-once", Properties: &runtime.RawExtension{Raw: []byte("bad value")}, }) _, err = ParseApplyOncePolicy(app) r.Error(err) policySpec := &v1alpha1.ApplyOncePolicySpec{Enable: true} bs, err := json.Marshal(policySpec) r.NoError(err) app.Spec.Policies[1].Properties.Raw = bs spec, err = ParseApplyOncePolicy(app) r.NoError(err) r.Equal(policySpec, spec) }
explode_data.jsonl/22
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 14463, 28497, 12522, 13825, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 340, 28236, 1669, 609, 85, 16, 19127, 16, 17521, 90, 8327, 25, 348, 16, 19127, 16, 17521, 8327, 515, 197, 10025, 42038, 25, 3056, 85, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegrationsEmptyAwsGovCloudCredentials(t *testing.T) { var awsData api.AwsIntegrationData credentials := awsData.GetGovCloudCredentials() accountID := awsData.GetAccountID() secretKey := credentials.SecretAccessKey accessID := credentials.AccessKeyID assert.Empty(t, accountID) assert.Empty(t, secretKey) assert.Empty(t, accessID) }
explode_data.jsonl/81111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 1072, 14412, 804, 3522, 47359, 66901, 16055, 27025, 1155, 353, 8840, 836, 8, 341, 2405, 31521, 1043, 6330, 875, 8915, 52464, 1043, 198, 197, 32353, 1669, 31521, 1043, 2234, 66901, 16055, 27025, 741, 86866, 915, 1669, 31521, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSet(t *testing.T) { t.Parallel() hCopy := sample(t) one0, err := net.ResolveIPAddr("ip", "10.0.0.1") ok(t, err) hCopy.Set(*one0, "tendot") equals(t, len(hCopy.records), 4) equals(t, hCopy.records[3].Hostnames["tendot"], true) equals(t, hCopy.records[3].IpAddress.String(), "10.0.0.1") // appending same element shouldn't change anything hCopy.Set(*one0, "tendot") equals(t, len(hCopy.records), 4) one92, err := net.ResolveIPAddr("ip", "192.168.3.7") ok(t, err) hCopy.Set(*one92, "tendot") equals(t, hCopy.records[3].IpAddress.String(), "192.168.3.7") ip6, err := net.ResolveIPAddr("ip", "::1") ok(t, err) hCopy.Set(*ip6, "tendot") equals(t, len(hCopy.records), 5) equals(t, hCopy.records[4].IpAddress.String(), "::1") }
explode_data.jsonl/43703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 357 }
[ 2830, 3393, 1649, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 9598, 12106, 1669, 6077, 1155, 340, 197, 603, 15, 11, 1848, 1669, 4179, 57875, 3298, 13986, 445, 573, 497, 330, 16, 15, 13, 15, 13, 15, 13, 16, 1138, 59268, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplaceWords(t *testing.T) { wants := []struct { s string sNew string words []string err bool }{ { s: "hello there my friendy friend", sNew: "howdy there my friendly friends", words: []string{"howdy", "there", "my", "friendly", "friends"}, err: false, }, { s: "hello there my friendy friend", sNew: "howdy there my friendly friends", words: []string{"howdy", "there", "my", "friendly", "friends"}, err: false, }, { s: " hello there my friend ", sNew: " howdy there my friends ", words: []string{"howdy", "there", "my", "friends"}, err: false, }, } for _, w := range wants { f := newFields(w.s) errStr := "nil" if w.err { errStr = "error" } if got, err := f.replaceWords(w.words); got != w.sNew || w.err && err == nil { t.Errorf(` fields.replaceWords(%#v) return %q, %v, wanted %q, %s`, w.words, got, err, w.sNew, errStr) } } }
explode_data.jsonl/26716
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 23107, 23489, 1155, 353, 8840, 836, 8, 341, 6692, 1783, 1669, 3056, 1235, 341, 197, 1903, 257, 914, 198, 197, 1903, 3564, 220, 914, 198, 197, 197, 5761, 3056, 917, 198, 197, 9859, 256, 1807, 198, 197, 59403, 197, 197, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestLazyAcker(t *testing.T) { type ackRequest struct { Events []fleetapi.AckEvent `json:"events"` } log, _ := logger.New("") client := newTestingClient() agentInfo := &testAgentInfo{} acker, err := fleet.NewAcker(log, agentInfo, client) if err != nil { t.Fatal(err) } lacker := NewAcker(acker, log) if acker == nil { t.Fatal("acker not initialized") } testID1 := "ack-test-action-id" testID2 := testID1 + "2" testID3 := testID1 + "3" testAction1 := &fleetapi.ActionUnknown{ActionID: testID1} testAction2 := &actionImmediate{ActionID: testID2} testAction3 := &fleetapi.ActionUnknown{ActionID: testID3} ch := client.Answer(func(headers http.Header, body io.Reader) (*http.Response, error) { content, err := ioutil.ReadAll(body) assert.NoError(t, err) cr := &ackRequest{} err = json.Unmarshal(content, &cr) assert.NoError(t, err) if len(cr.Events) == 0 { t.Fatal("expected events but got none") } if cr.Events[0].ActionID == testID1 { assert.EqualValues(t, 2, len(cr.Events)) assert.EqualValues(t, testID1, cr.Events[0].ActionID) assert.EqualValues(t, testID2, cr.Events[1].ActionID) } else { assert.EqualValues(t, 1, len(cr.Events)) } resp := wrapStrToResp(http.StatusOK, `{ "actions": [] }`) return resp, nil }) go func() { for range ch { } }() c := context.Background() if err := lacker.Ack(c, testAction1); err != nil { t.Fatal(err) } if err := lacker.Ack(c, testAction2); err != nil { t.Fatal(err) } if err := lacker.Ack(c, testAction3); err != nil { t.Fatal(err) } if err := lacker.Commit(c); err != nil { t.Fatal(err) } }
explode_data.jsonl/7597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 688 }
[ 2830, 3393, 39766, 32, 15160, 1155, 353, 8840, 836, 8, 341, 13158, 10725, 1900, 2036, 341, 197, 197, 7900, 3056, 72698, 2068, 875, 377, 1556, 1565, 2236, 2974, 12389, 8805, 197, 630, 6725, 11, 716, 1669, 5925, 7121, 31764, 25291, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVPCs_Get(t *testing.T) { setup() defer teardown() svc := client.VPCs path := "/v2/vpcs" want := vTestObj id := "880b7f98-f062-404d-b33c-458d545696f6" jsonBlob := ` { "vpc": ` + vTestJSON + ` } ` mux.HandleFunc(path+"/"+id, func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, http.MethodGet) fmt.Fprint(w, jsonBlob) }) got, _, err := svc.Get(ctx, id) require.NoError(t, err) require.Equal(t, want, got) }
explode_data.jsonl/41239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 53, 4872, 82, 13614, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 1903, 7362, 1669, 2943, 5058, 4872, 82, 198, 26781, 1669, 3521, 85, 17, 5457, 47313, 698, 50780, 1669, 348, 2271, 5261, 198, 15710, 1669, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientConfiguration(t *testing.T) { t.Parallel() t.Run("With empty", func(t *testing.T) { c, err := NewDefaultClient() if err != nil { t.Errorf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != defaultURL { t.Errorf("Unexpected URL, want=%s, got=%s", defaultURL, u) } }) t.Run("With URL from Addresses", func(t *testing.T) { c, err := NewClient(Config{Addresses: []string{"http://localhost:8080//"}}) if err != nil { t.Fatalf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != "http://localhost:8080" { t.Errorf("Unexpected URL, want=http://localhost:8080, got=%s", u) } }) t.Run("With URL from environment", func(t *testing.T) { os.Setenv("ELASTICSEARCH_URL", "http://example.com") defer func() { os.Setenv("ELASTICSEARCH_URL", "") }() c, err := NewDefaultClient() if err != nil { t.Errorf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != "http://example.com" { t.Errorf("Unexpected URL, want=http://example.com, got=%s", u) } }) t.Run("With URL from environment and cfg.Addresses", func(t *testing.T) { os.Setenv("ELASTICSEARCH_URL", "http://example.com") defer func() { os.Setenv("ELASTICSEARCH_URL", "") }() c, err := NewClient(Config{Addresses: []string{"http://localhost:8080//"}}) if err != nil { t.Fatalf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != "http://localhost:8080" { t.Errorf("Unexpected URL, want=http://localhost:8080, got=%s", u) } }) t.Run("With URL from environment and cfg.CloudID", func(t *testing.T) { os.Setenv("ELASTICSEARCH_URL", "http://example.com") defer func() { os.Setenv("ELASTICSEARCH_URL", "") }() c, err := NewClient(Config{CloudID: "foo:YmFyLmNsb3VkLmVzLmlvJGFiYzEyMyRkZWY0NTY="}) if err != nil { t.Fatalf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != "https://abc123.bar.cloud.es.io" { t.Errorf("Unexpected URL, want=https://abc123.bar.cloud.es.io, got=%s", u) } }) t.Run("With cfg.Addresses and cfg.CloudID", func(t *testing.T) { _, err := NewClient(Config{Addresses: []string{"http://localhost:8080//"}, CloudID: "foo:ABC="}) if err == nil { t.Fatalf("Expected error, got: %v", err) } match, _ := regexp.MatchString("both .* are set", err.Error()) if !match { t.Errorf("Expected error when addresses from environment and configuration are used together, got: %v", err) } }) t.Run("With CloudID", func(t *testing.T) { // bar.cloud.es.io$abc123$def456 c, err := NewClient(Config{CloudID: "foo:YmFyLmNsb3VkLmVzLmlvJGFiYzEyMyRkZWY0NTY="}) if err != nil { t.Fatalf("Unexpected error: %s", err) } u := c.Transport.(*estransport.Client).URLs()[0].String() if u != "https://abc123.bar.cloud.es.io" { t.Errorf("Unexpected URL, want=https://abc123.bar.cloud.es.io, got=%s", u) } }) t.Run("With invalid CloudID", func(t *testing.T) { var err error _, err = NewClient(Config{CloudID: "foo:ZZZ==="}) if err == nil { t.Errorf("Expected error for CloudID, got: %v", err) } _, err = NewClient(Config{CloudID: "foo:Zm9v"}) if err == nil { t.Errorf("Expected error for CloudID, got: %v", err) } _, err = NewClient(Config{CloudID: "foo:"}) if err == nil { t.Errorf("Expected error for CloudID, got: %v", err) } }) t.Run("With invalid URL", func(t *testing.T) { u := ":foo" _, err := NewClient(Config{Addresses: []string{u}}) if err == nil { t.Errorf("Expected error for URL %q, got %v", u, err) } }) t.Run("With invalid URL from environment", func(t *testing.T) { os.Setenv("ELASTICSEARCH_URL", ":foobar") defer func() { os.Setenv("ELASTICSEARCH_URL", "") }() c, err := NewDefaultClient() if err == nil { t.Errorf("Expected error, got: %+v", c) } }) }
explode_data.jsonl/24029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1711 }
[ 2830, 3393, 2959, 7688, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 2354, 4287, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1444, 11, 1848, 1669, 1532, 3675, 2959, 2822, 197, 743, 1848, 961, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTwoSum(t *testing.T) { var tests = []struct { nums []int target int indices []int }{ {[]int{2, 7, 11, 15}, 9, []int{0, 1}}, {[]int{2, 3, 4, 6}, 6, []int{0, 2}}, {[]int{2, 3, 4, 7}, 8, nil}, {nil, 8, nil}, } for _, tt := range tests { indices := twoSum(tt.nums, tt.target) if reflect.DeepEqual(indices, tt.indices) == false { t.Errorf("twoSum(%v, %v) return %v, want %v", tt.nums, tt.target, indices, tt.indices) } } }
explode_data.jsonl/58633
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 11613, 9190, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 22431, 82, 262, 3056, 396, 198, 197, 28861, 220, 526, 198, 197, 197, 14987, 3056, 396, 198, 197, 59403, 197, 197, 90, 1294, 396, 90, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsEastAsianLANG(t *testing.T) { env := saveEnv() defer restoreEnv(&env) os.Setenv("LC_ALL", "") os.Setenv("LC_CTYPE", "") testcases := []struct { lcctype string want bool }{ {"ja_JP.UTF-8", true}, {"C", false}, {"POSIX", false}, {"en_US.UTF-8", false}, {"C.UTF-8", false}, } for _, tt := range testcases { os.Setenv("LANG", tt.lcctype) got := IsEastAsian() if got != tt.want { t.Fatalf("IsEastAsian() for LANG=%v should be %v", tt.lcctype, tt.want) } } }
explode_data.jsonl/66385
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 3872, 36340, 67440, 20000, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 3581, 14359, 741, 16867, 14952, 14359, 2099, 3160, 340, 25078, 4202, 3160, 445, 8556, 16269, 497, 14676, 25078, 4202, 3160, 445, 8556, 920, 9502, 497, 85617...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRangeMetaKey(t *testing.T) { testCases := []struct { key, expKey roachpb.RKey }{ { key: roachpb.RKey{}, expKey: roachpb.RKeyMin, }, { key: roachpb.RKey("\x03\x04zonefoo"), expKey: roachpb.RKey("\x02\x04zonefoo"), }, { key: roachpb.RKey("\x02\x04zonefoo"), expKey: roachpb.RKeyMin, }, { key: roachpb.RKey("foo"), expKey: roachpb.RKey("\x03foo"), }, { key: roachpb.RKey("\x03foo"), expKey: roachpb.RKey("\x02foo"), }, { key: roachpb.RKey("\x02foo"), expKey: roachpb.RKeyMin, }, } for i, test := range testCases { result := RangeMetaKey(test.key) if !bytes.Equal(result, test.expKey) { t.Errorf("%d: expected range meta for key %q doesn't match %q (%q)", i, test.key, test.expKey, result) } } }
explode_data.jsonl/56590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 6046, 12175, 1592, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 23634, 11, 1343, 1592, 926, 610, 16650, 2013, 1592, 198, 197, 59403, 197, 197, 515, 298, 23634, 25, 262, 926, 610, 16650, 2013, 159...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMailMessageAttachmentsEndpoint_GetPrimary(t *testing.T) { var want types.GUID n := &MailMessageAttachments{ID: &want} if got := n.GetPrimary(); !reflect.DeepEqual(*got, want) { t.Errorf("MailMessageAttachmentsEndpoint.GetPrimary() failed, got: %v, want: %v", *got, want) } }
explode_data.jsonl/53375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 16702, 2052, 75740, 27380, 13614, 15972, 1155, 353, 8840, 836, 8, 341, 2405, 1366, 4494, 1224, 6463, 198, 9038, 1669, 609, 16702, 2052, 75740, 90, 915, 25, 609, 52657, 630, 743, 2684, 1669, 308, 2234, 15972, 2129, 753, 34913...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_hasRoleWhenRbacEnabled(t *testing.T) { helmChartParser := NewHelmConfigParser( NewHelmTest(t, helmChartRelativePath, map[string]string{ "traefik.enabled": "true", "traefik.rbac.enabled": "true", }), ) require.True(t, helmChartParser.Contains(SearchResourceOption{ Name: "pega-traefik", Kind: "ClusterRole", })) require.True(t, helmChartParser.Contains(SearchResourceOption{ Name: "pega-traefik", Kind: "ServiceAccount", })) require.True(t, helmChartParser.Contains(SearchResourceOption{ Name: "pega-traefik", Kind: "ClusterRoleBinding", })) }
explode_data.jsonl/14600
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 21778, 9030, 4498, 49, 55877, 5462, 1155, 353, 8840, 836, 8, 341, 9598, 23162, 14488, 6570, 1669, 1532, 39, 23162, 2648, 6570, 1006, 197, 197, 3564, 39, 23162, 2271, 1155, 11, 33765, 14488, 28442, 1820, 11, 2415, 14032, 3095...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTraceparent_SetTraceFlags(t *testing.T) { type fields struct { traceFlags byte } type args struct { flags byte } tests := []struct { name string fields fields args args want byte }{ {"ok", fields{0x00}, args{0x01}, 0x01}, {"ok", fields{0xff}, args{0x00}, 0x00}, {"ok", fields{0x00}, args{0xfa}, 0xfa}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tp := &Traceparent{ traceFlags: tt.fields.traceFlags, } tp.SetTraceFlags(tt.args.flags) if tp.traceFlags != tt.want { t.Errorf("Traceparent.SetTraceFlags() = %v, want %v", tp.traceFlags, tt.want) } }) } }
explode_data.jsonl/35205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 6550, 3765, 14812, 6550, 9195, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 65058, 9195, 4922, 198, 197, 532, 13158, 2827, 2036, 341, 197, 59516, 4922, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetRawData(t *testing.T) { database := testdb{ inventory: map[string]interface{}{ "key_cache_block_size": 10, "key_buffer_size": 10, "version_comment": "mysql", "version": "5.4.3", }, metrics: map[string]interface{}{}, replica: map[string]interface{}{}, } inventory, metrics, err := getRawData(database) if err != nil { t.Error() } if metrics == nil { t.Error() } if inventory == nil { t.Error() } }
explode_data.jsonl/13418
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 1949, 20015, 1043, 1155, 353, 8840, 836, 8, 341, 2698, 2211, 1669, 1273, 1999, 515, 197, 17430, 7223, 25, 2415, 14032, 31344, 67066, 298, 197, 1, 792, 11529, 7113, 2368, 788, 220, 16, 15, 345, 298, 197, 1, 792, 7776, 236...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGC_TrackSwitchedTags_DoesNothingIfTriggerDisabled(t *testing.T) { require.NoError(t, testutil.TruncateAllTables(suite.db)) enable, err := testutil.GCTrackSwitchedTagsTrigger.Disable(suite.db) require.NoError(t, err) defer enable() // disable other triggers that also insert on gc_manifest_review_queue so that they don't interfere with this test enable, err = testutil.GCTrackManifestUploadsTrigger.Disable(suite.db) require.NoError(t, err) defer enable() // create repo r := randomRepository(t) rs := datastore.NewRepositoryStore(suite.db) r, err = rs.CreateByPath(suite.ctx, r.Path) require.NoError(t, err) // create manifest ms := datastore.NewManifestStore(suite.db) m := randomManifest(t, r, nil) err = ms.Create(suite.ctx, m) require.NoError(t, err) // tag manifest ts := datastore.NewTagStore(suite.db) err = ts.CreateOrUpdate(suite.ctx, &models.Tag{ Name: "latest", NamespaceID: r.NamespaceID, RepositoryID: r.ID, ManifestID: m.ID, }) require.NoError(t, err) // create another manifest m2 := randomManifest(t, r, nil) err = ms.Create(suite.ctx, m2) require.NoError(t, err) // switch tag to new manifest err = ts.CreateOrUpdate(suite.ctx, &models.Tag{ Name: "latest", NamespaceID: r.NamespaceID, RepositoryID: r.ID, ManifestID: m2.ID, }) require.NoError(t, err) // check that no review records were created mrs := datastore.NewGCManifestTaskStore(suite.db) count, err := mrs.Count(suite.ctx) require.NoError(t, err) require.Zero(t, count) }
explode_data.jsonl/48575
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 22863, 21038, 473, 16837, 291, 15930, 1557, 7072, 23780, 2679, 17939, 25907, 1155, 353, 8840, 836, 8, 341, 17957, 35699, 1155, 11, 1273, 1314, 8240, 26900, 2403, 21670, 89516, 7076, 4390, 197, 12552, 11, 1848, 1669, 1273, 1314...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTodo_TableName(t *testing.T) { // way 1 todo := Todo{} assert.Equal(t, "todos", todo.TableName()) // way 2 todoPtr := &Todo{} assert.Equal(t, "todos", todoPtr.TableName()) }
explode_data.jsonl/61583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 24176, 42544, 675, 1155, 353, 8840, 836, 8, 341, 197, 322, 1616, 220, 16, 198, 3244, 6004, 1669, 25404, 16094, 6948, 12808, 1155, 11, 330, 49188, 497, 11804, 86783, 12367, 197, 322, 1616, 220, 17, 198, 3244, 6004, 5348, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPipePair(t *testing.T) { cmd := NewCommand(&oldcmds.Command{Type: "string"}) req, err := cmds.NewRequest(context.TODO(), nil, nil, nil, nil, cmd) if err != nil { t.Fatal(err) } r, w := io.Pipe() re := cmds.NewWriterResponseEmitter(w, req, cmds.Encoders[cmds.JSON]) res := cmds.NewReaderResponse(r, cmds.JSON, req) wait := make(chan interface{}) expect := "abc" go func() { err := re.Emit(expect) if err != nil { t.Fatal(err) } err = re.Close() if err != nil { t.Fatal(err) } close(wait) }() v, err := res.Next() if err != nil { t.Fatal(err) } str, ok := v.(*string) if !ok { t.Fatalf("expected type %T but got %T", expect, v) } if *str != expect { t.Fatalf("expected value %#v but got %#v", expect, v) } _, err = res.Next() if err != io.EOF { t.Fatal("expected io.EOF, got:", err) } <-wait }
explode_data.jsonl/71794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 34077, 12443, 1155, 353, 8840, 836, 8, 341, 25920, 1669, 1532, 4062, 2099, 813, 92407, 12714, 90, 929, 25, 330, 917, 1, 8824, 24395, 11, 1848, 1669, 47657, 75274, 5378, 90988, 1507, 2092, 11, 2092, 11, 2092, 11, 2092, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMessageSend(t *testing.T) { t.Parallel() d := th.NewDaemon( t, th.DefaultAddress(fixtures.TestAddresses[0]), th.KeyFile(fixtures.KeyFilePaths()[1]), // must include same-index KeyFilePath when configuring with a TestMiner. th.WithMiner(fixtures.TestMiners[0]), th.KeyFile(fixtures.KeyFilePaths()[0]), ).Start() defer d.ShutdownSuccess() d.RunSuccess("mining", "once") from := d.GetDefaultAddress() // this should = fixtures.TestAddresses[0] t.Log("[failure] invalid target") d.RunFail( address.ErrUnknownNetwork.Error(), "message", "send", "--from", from, "--gas-price", "0", "--gas-limit", "300", "--value=10", "xyz", ) t.Log("[success] with from") d.RunSuccess("message", "send", "--from", from, "--gas-price", "0", "--gas-limit", "300", fixtures.TestAddresses[3], ) t.Log("[success] with from and value") d.RunSuccess("message", "send", "--from", from, "--gas-price", "0", "--gas-limit", "300", "--value=10", fixtures.TestAddresses[3], ) }
explode_data.jsonl/74070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 421 }
[ 2830, 3393, 2052, 11505, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2698, 1669, 270, 7121, 89177, 1006, 197, 3244, 345, 197, 70479, 13275, 4286, 955, 941, 18513, 8787, 52290, 58, 15, 17036, 197, 70479, 9610, 1703, 955, 941, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocale(t *testing.T) { trans := New() expected := "kam" if trans.Locale() != expected { t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale()) } }
explode_data.jsonl/26541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 19231, 1155, 353, 8840, 836, 8, 1476, 72453, 1669, 1532, 741, 42400, 1669, 330, 74, 309, 1837, 743, 1356, 59094, 368, 961, 3601, 341, 197, 3244, 13080, 445, 18896, 7677, 82, 6, 24528, 7677, 82, 22772, 3601, 11, 1356, 59094...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPublishBlobsDownloadFail(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() mdx.On("DownloadBLOB", ctx, "blob/1").Return(nil, fmt.Errorf("pop")) _, err := bm.publishBlobsAndSend(ctx, &fftypes.Message{}, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, false) assert.Regexp(t, "FF10240", err) mdi.AssertExpectations(t) }
explode_data.jsonl/12110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 50145, 33, 68164, 11377, 19524, 1155, 353, 8840, 836, 8, 341, 2233, 76, 11, 9121, 1669, 501, 2271, 43362, 1155, 340, 16867, 9121, 741, 2109, 8579, 1669, 34868, 15062, 41399, 67, 2096, 300, 336, 25183, 64378, 340, 2109, 12719...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAfterRetryWithContext(t *testing.T) { c := awstesting.NewClient() req := c.NewRequest(&request.Operation{Name: "Operation"}, nil, nil) ctx := &awstesting.FakeContext{DoneCh: make(chan struct{})} req.SetContext(ctx) req.Error = fmt.Errorf("some error") req.Retryable = aws.Bool(true) req.HTTPResponse = &http.Response{ StatusCode: 500, } corehandlers.AfterRetryHandler.Fn(req) if req.Error != nil { t.Fatalf("expect no error, got %v", req.Error) } if e, a := 1, req.RetryCount; e != a { t.Errorf("expect retry count to be %d, got %d", e, a) } }
explode_data.jsonl/44096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 6025, 51560, 91101, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1360, 267, 59855, 7121, 2959, 2822, 24395, 1669, 272, 75274, 2099, 2035, 56297, 63121, 25, 330, 8432, 14345, 2092, 11, 2092, 692, 20985, 1669, 609, 672, 267, 5985...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccPermissionsJobs(t *testing.T) { permissionsTestHelper(t, func(permissionsAPI PermissionsAPI, user, group string, ef func(string) PermissionsEntity) { ctx := context.Background() jobsAPI := compute.NewJobsAPI(ctx, permissionsAPI.client) job, err := jobsAPI.Create(compute.JobSettings{ NewCluster: &compute.Cluster{ NumWorkers: 2, SparkVersion: "6.4.x-scala2.11", NodeTypeID: compute.NewClustersAPI( ctx, permissionsAPI.client).GetSmallestNodeType( compute.NodeTypeRequest{ LocalDisk: true, }), }, NotebookTask: &compute.NotebookTask{ NotebookPath: "/Production/Featurize", }, Name: group, }) require.NoError(t, err) defer func() { assert.NoError(t, jobsAPI.Delete(job.ID())) }() objectID := fmt.Sprintf("/jobs/%s", job.ID()) require.NoError(t, permissionsAPI.Update(objectID, AccessControlChangeList{ AccessControlList: []AccessControlChange{ { UserName: user, PermissionLevel: "IS_OWNER", }, { GroupName: group, PermissionLevel: "CAN_MANAGE_RUN", }, }, })) entity := ef(objectID) assert.Equal(t, "job", entity.ObjectType) assert.Len(t, entity.AccessControlList, 2) require.NoError(t, permissionsAPI.Delete(objectID)) entity = ef(objectID) assert.Len(t, entity.AccessControlList, 0) }) }
explode_data.jsonl/50890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 574 }
[ 2830, 3393, 14603, 23851, 40667, 1155, 353, 8840, 836, 8, 341, 197, 29900, 2271, 5511, 1155, 11, 2915, 7, 29900, 7082, 53357, 7082, 11, 1196, 11, 1874, 914, 345, 197, 197, 823, 2915, 3609, 8, 53357, 3030, 8, 341, 197, 20985, 1669, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnixDomain(t *testing.T) { assert := assert.New(t) // Normal addr := fmt.Sprintf("127.0.0.1:%d", consts.TEST_UNIX_DOMAIN_FRP_PORT) res, err := util.SendTcpMsg(addr, consts.TEST_UNIX_DOMAIN_STR) if assert.NoError(err) { assert.Equal(consts.TEST_UNIX_DOMAIN_STR, res) } }
explode_data.jsonl/79662
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 55832, 13636, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 197, 322, 18437, 198, 53183, 1669, 8879, 17305, 445, 16, 17, 22, 13, 15, 13, 15, 13, 16, 7533, 67, 497, 95432, 73501, 82754, 33729, 14220, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWrap(t *testing.T) { f := Yaml{} yaml := ` apiVersion: batch/v1 kind: Job metadata: name: pi spec: template: metadata: name: pi spec: containers: - name: pi image: perl command: ["perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"] restartPolicy: Never` wrapped, err := f.Wrap(yaml) if err != nil { t.Fatal(err) } expected := `apiVersion: appcontroller.k8s/v1alpha1 kind: Definition metadata: name: job-pi job: apiVersion: batch/v1 kind: Job metadata: name: pi spec: template: metadata: name: pi spec: containers: - name: pi image: perl command: ["perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"] restartPolicy: Never` if wrapped != expected { t.Errorf("wrapped doesn't match expected output\nExpected:\n%s\nAactual:\n%s", expected, wrapped) } }
explode_data.jsonl/34053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 440 }
[ 2830, 3393, 26787, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 809, 9467, 16094, 14522, 9467, 1669, 1565, 220, 6330, 5637, 25, 7162, 5457, 16, 198, 220, 3093, 25, 12011, 198, 220, 11160, 510, 262, 829, 25, 8938, 198, 220, 1398, 510, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseClassDeclarationStatements(t *testing.T) { checkParseStatement( t, ` class Point { public x: int public y: int } `, "(class-declaration Point ((class-field public x int) (class-field public y int)))", ) /* checkParseStatement( t, ` class Point { public x: int public y: int public func f(self, x: int) { x } } `, "(class-declaration Point ((class-field public x int) (class-field public y int) (class-method public f ((function-param x int)) void (block (expression-statement x)))))", ) */ }
explode_data.jsonl/33517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 14463, 1957, 24489, 93122, 1155, 353, 8840, 836, 8, 341, 25157, 14463, 8636, 1006, 197, 3244, 345, 197, 197, 3989, 7847, 536, 5126, 341, 298, 1219, 856, 25, 526, 198, 298, 1219, 379, 25, 526, 198, 7847, 456, 197, 197, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSmallNegativeInt(t *testing.T) { i := int16(-1) v := ValueOf(i) if v.Int() != -1 { t.Errorf("int16(-1).Int() returned %v", v.Int()) } }
explode_data.jsonl/29575
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 25307, 38489, 1072, 1155, 353, 8840, 836, 8, 341, 8230, 1669, 526, 16, 21, 4080, 16, 340, 5195, 1669, 5162, 2124, 1956, 340, 743, 348, 7371, 368, 961, 481, 16, 341, 197, 3244, 13080, 445, 396, 16, 21, 4080, 16, 568, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_profileName(t *testing.T) { tests := []struct { profileType appstoreconnect.ProfileType bundleID string want string wantErr bool }{ { profileType: appstoreconnect.IOSAppDevelopment, bundleID: "io.bitrise.app", want: "Bitrise iOS development - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.IOSAppStore, bundleID: "io.bitrise.app", want: "Bitrise iOS app-store - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.IOSAppAdHoc, bundleID: "io.bitrise.app", want: "Bitrise iOS ad-hoc - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.IOSAppInHouse, bundleID: "io.bitrise.app", want: "Bitrise iOS enterprise - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.TvOSAppDevelopment, bundleID: "io.bitrise.app", want: "Bitrise tvOS development - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.TvOSAppStore, bundleID: "io.bitrise.app", want: "Bitrise tvOS app-store - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.TvOSAppAdHoc, bundleID: "io.bitrise.app", want: "Bitrise tvOS ad-hoc - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.TvOSAppInHouse, bundleID: "io.bitrise.app", want: "Bitrise tvOS enterprise - (io.bitrise.app)", wantErr: false, }, { profileType: appstoreconnect.ProfileType("unknown"), bundleID: "io.bitrise.app", want: "", wantErr: true, }, } for _, tt := range tests { t.Run(string(tt.profileType), func(t *testing.T) { got, err := ProfileName(tt.profileType, tt.bundleID) if (err != nil) != tt.wantErr { t.Errorf("profileName() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("profileName() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/59051
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1011 }
[ 2830, 3393, 13789, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 5365, 929, 906, 4314, 6459, 59872, 929, 198, 197, 2233, 4206, 915, 262, 914, 198, 197, 50780, 286, 914, 198, 197, 50780, 7747, 257, 1807, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNormalizeGDPR(t *testing.T) { tests := []struct { description string gdprDefaultValue string giveSignal Signal wantSignal Signal }{ { description: "Don't normalize - Signal No and gdprDefaultValue 1", gdprDefaultValue: "1", giveSignal: SignalNo, wantSignal: SignalNo, }, { description: "Don't normalize - Signal No and gdprDefaultValue 0", gdprDefaultValue: "0", giveSignal: SignalNo, wantSignal: SignalNo, }, { description: "Don't normalize - Signal Yes and gdprDefaultValue 1", gdprDefaultValue: "1", giveSignal: SignalYes, wantSignal: SignalYes, }, { description: "Don't normalize - Signal Yes and gdprDefaultValue 0", gdprDefaultValue: "0", giveSignal: SignalYes, wantSignal: SignalYes, }, { description: "Normalize - Signal Ambiguous and gdprDefaultValue 1", gdprDefaultValue: "1", giveSignal: SignalAmbiguous, wantSignal: SignalYes, }, { description: "Normalize - Signal Ambiguous and gdprDefaultValue 0", gdprDefaultValue: "0", giveSignal: SignalAmbiguous, wantSignal: SignalNo, }, } for _, tt := range tests { perms := permissionsImpl{ cfg: config.GDPR{ DefaultValue: tt.gdprDefaultValue, }, } if tt.gdprDefaultValue == "0" { perms.gdprDefaultValue = SignalNo } else { perms.gdprDefaultValue = SignalYes } normalizedSignal := perms.normalizeGDPR(tt.giveSignal) assert.Equal(t, tt.wantSignal, normalizedSignal, tt.description) } }
explode_data.jsonl/31100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 720 }
[ 2830, 3393, 87824, 40849, 6480, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 414, 914, 198, 197, 3174, 67, 649, 41533, 914, 198, 197, 3174, 533, 26810, 981, 27230, 198, 197, 50780, 26810, 981, 27230, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateQCHigh(t *testing.T) { key, _ := GeneratePrivateKey() hs := New(NewConfig(1, key, nil)) block1 := CreateLeaf(hs.genesis, []Command{Command("command1")}, hs.qcHigh, hs.genesis.Height+1) hs.Blocks.Put(block1) qc1 := CreateQuorumCert(block1) if hs.UpdateQCHigh(qc1) { if hs.bLeaf.Hash() != block1.Hash() { t.Error("UpdateQCHigh failed to update the leaf block") } if !bytes.Equal(hs.qcHigh.ToBytes(), qc1.ToBytes()) { t.Error("UpdateQCHigh failed to update qcHigh") } } else { t.Error("UpdateQCHigh failed to complete") } block2 := CreateLeaf(block1, []Command{Command("command2")}, qc1, block1.Height+1) hs.Blocks.Put(block2) qc2 := CreateQuorumCert(block2) hs.UpdateQCHigh(qc2) if hs.UpdateQCHigh(qc1) { t.Error("UpdateQCHigh updated with outdated state given as input.") } }
explode_data.jsonl/1119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 4289, 48, 2149, 1090, 1155, 353, 8840, 836, 8, 341, 23634, 11, 716, 1669, 19813, 75981, 741, 81692, 1669, 1532, 35063, 2648, 7, 16, 11, 1376, 11, 2092, 1171, 47996, 16, 1669, 4230, 31461, 3203, 82, 22822, 13774, 11, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFinalizePulse_NotNextPulse(t *testing.T) { ctx := inslogger.TestContext(t) testPulse := insolar.PulseNumber(pulse.MinTimePulse) jk := executor.NewJetKeeperMock(t) jk.HasAllJetConfirmsMock.Return(true) jk.TopSyncPulseMock.Return(testPulse) pc := insolarPulse.NewCalculatorMock(t) pc.ForwardsMock.Return(insolar.Pulse{PulseNumber: testPulse}, nil) executor.FinalizePulse(ctx, pc, nil, jk, nil, testPulse+10, testBadgerGCInfo()) }
explode_data.jsonl/68988
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 19357, 551, 47, 17217, 60816, 5847, 47, 17217, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1640, 9786, 8787, 1972, 1155, 692, 18185, 47, 17217, 1669, 1640, 7417, 1069, 17217, 2833, 1295, 17217, 17070, 1462, 47, 17217, 692, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunAsAnyGenerateSingle(t *testing.T) { s, err := NewRunAsAny() if err != nil { t.Fatalf("unexpected error initializing NewRunAsAny %v", err) } group, err := s.GenerateSingle(nil) if group != nil { t.Errorf("expected empty but got %v", group) } if err != nil { t.Errorf("unexpected error generating groups: %v", err) } }
explode_data.jsonl/34882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 6727, 2121, 8610, 31115, 10888, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 1532, 6727, 2121, 8610, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1465, 57060, 1532, 6727, 2121, 8610, 1018, 85, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestQueryWithRowFiltering(t *testing.T) { catalogStore, err := store.Open("catalog_where", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("catalog_where") dataStore, err := store.Open("sqldata_where", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("sqldata_where") engine, err := NewEngine(catalogStore, dataStore, DefaultOptions().WithPrefix(sqlPrefix)) require.NoError(t, err) _, err = engine.ExecStmt("CREATE DATABASE db1", nil, true) require.NoError(t, err) err = engine.UseDatabase("db1") require.NoError(t, err) _, err = engine.ExecStmt("CREATE TABLE table1 (id INTEGER, title VARCHAR, active BOOLEAN, payload BLOB, PRIMARY KEY id)", nil, true) require.NoError(t, err) rowCount := 10 for i := 0; i < rowCount; i++ { encPayload := hex.EncodeToString([]byte(fmt.Sprintf("blob%d", i))) _, err = engine.ExecStmt(fmt.Sprintf(` UPSERT INTO table1 (id, title, active, payload) VALUES (%d, 'title%d', %v, x'%s') `, i, i, i%2 == 0, encPayload), nil, true) require.NoError(t, err) } r, err := engine.QueryStmt("SELECT id, title, active FROM table1 WHERE false", nil, true) require.NoError(t, err) _, err = r.Read() require.Equal(t, ErrNoMoreRows, err) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE false OR true", nil, true) require.NoError(t, err) for i := 0; i < rowCount; i++ { _, err := r.Read() require.NoError(t, err) } err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE 1 < 2", nil, true) require.NoError(t, err) for i := 0; i < rowCount; i++ { _, err := r.Read() require.NoError(t, err) } err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE 1 >= 2", nil, true) require.NoError(t, err) _, err = r.Read() require.Equal(t, ErrNoMoreRows, err) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE 1 = true", nil, true) require.NoError(t, err) _, err = r.Read() require.Equal(t, ErrNotComparableValues, err) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE NOT table1.active", nil, true) require.NoError(t, err) for i := 0; i < rowCount/2; i++ { _, err := r.Read() require.NoError(t, err) } _, err = r.Read() require.Equal(t, ErrNoMoreRows, err) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title, active FROM table1 WHERE table1.id > 4", nil, true) require.NoError(t, err) for i := 0; i < rowCount/2; i++ { _, err := r.Read() require.NoError(t, err) } _, err = r.Read() require.Equal(t, ErrNoMoreRows, err) err = r.Close() require.NoError(t, err) _, err = engine.ExecStmt(fmt.Sprintf("UPSERT INTO table1 (id, title) VALUES (%d, 'title%d')", rowCount, rowCount), nil, true) require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title FROM table1 WHERE active = null AND payload = null", nil, true) require.NoError(t, err) _, err = r.Read() require.NoError(t, err) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT id, title FROM table1 WHERE active = null AND payload = null AND active = payload", nil, true) require.NoError(t, err) _, err = r.Read() require.Equal(t, ErrNotComparableValues, err) err = r.Close() require.NoError(t, err) err = engine.Close() require.NoError(t, err) }
explode_data.jsonl/64074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1395 }
[ 2830, 3393, 2859, 2354, 3102, 5632, 287, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 6093, 11, 1848, 1669, 3553, 12953, 445, 26539, 36814, 497, 3553, 13275, 3798, 2398, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 445, 26539, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNewFactory(t *testing.T) { defer func() { _ = fileutil.RemoveDir(testPath) listDirFunc = fileutil.ListDir mapFileFunc = fileutil.RWMap }() // case 1: list page files err listDirFunc = func(path string) ([]string, error) { return nil, fmt.Errorf("err") } fct, err := NewFactory(testPath, 128) assert.Error(t, err) assert.Nil(t, fct) // case 2: list page files parse file sequence err listDirFunc = func(path string) ([]string, error) { return []string{"a.bat"}, nil } fct, err = NewFactory(testPath, 128) assert.Error(t, err) assert.Nil(t, fct) // case 3: create page err listDirFunc = func(path string) ([]string, error) { return []string{"10.bat"}, nil } mapFileFunc = func(filePath string, size int) ([]byte, error) { return nil, fmt.Errorf("err") } fct, err = NewFactory(testPath, 128) assert.Error(t, err) assert.Nil(t, fct) // case 4: reopen page file listDirFunc = func(path string) ([]string, error) { return []string{"10.bat"}, nil } mapFileFunc = fileutil.RWMap fct, err = NewFactory(testPath, 128) assert.NoError(t, err) assert.NotNil(t, fct) fct1 := fct.(*factory) page, ok := fct1.pages[10] assert.True(t, ok) assert.NotNil(t, page) }
explode_data.jsonl/53070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 493 }
[ 2830, 3393, 3564, 4153, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 197, 62, 284, 1034, 1314, 13270, 6184, 8623, 1820, 340, 197, 14440, 6184, 9626, 284, 1034, 1314, 5814, 6184, 198, 197, 19567, 1703, 9626, 284, 1034, 131...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSocketWriter_Write_err(t *testing.T) { listener, err := net.Listen("tcp", "127.0.0.1:0") require.NoError(t, err) sw := newSocketWriter() sw.Address = "tcp://" + listener.Addr().String() err = sw.Connect() require.NoError(t, err) sw.Conn.(*net.TCPConn).SetReadBuffer(256) lconn, err := listener.Accept() require.NoError(t, err) lconn.(*net.TCPConn).SetWriteBuffer(256) metrics := []telegraf.Metric{testutil.TestMetric(1, "testerr")} // close the socket to generate an error lconn.Close() sw.Conn.Close() err = sw.Write(metrics) require.Error(t, err) assert.Nil(t, sw.Conn) }
explode_data.jsonl/34021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 10286, 6492, 31825, 9266, 1155, 353, 8840, 836, 8, 341, 14440, 798, 11, 1848, 1669, 4179, 68334, 445, 27161, 497, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 15, 1138, 17957, 35699, 1155, 11, 1848, 692, 77295, 1669, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWhereInt(t *testing.T) { v := &Value{data: []int{int(1), int(1), int(1), int(1), int(1), int(1)}} selected := v.WhereInt(func(i int, val int) bool { return i%2 == 0 }).MustIntSlice() assert.Equal(t, 3, len(selected)) }
explode_data.jsonl/23420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 9064, 1072, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 396, 90, 396, 7, 16, 701, 526, 7, 16, 701, 526, 7, 16, 701, 526, 7, 16, 701, 526, 7, 16, 701, 526, 7, 16, 9139, 630, 70631, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListAuthChecker_String(t *testing.T) { defer func() { if r := recover(); r != nil { t.Fatalf("Panic detected: %v", r) } }() options := DefaultListAuthCheckerOptions() options.AuthMode = AuthDenylist c := NewListAuthChecker(options) c.Set("1", "2", "3") // Make sure it doesn't crash _ = c.String() c.SetMode(AuthAllowlist) // Make sure it doesn't crash _ = c.String() }
explode_data.jsonl/38961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 852, 5087, 35188, 31777, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 435, 1669, 11731, 2129, 435, 961, 2092, 341, 298, 3244, 30762, 445, 47, 31270, 16507, 25, 1018, 85, 497, 435, 340, 197, 197, 532, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTunnelServerEndpointEds(t *testing.T) { s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{}) s.Discovery.MemRegistry.AddHTTPService(edsIncSvc, edsIncVip, 8080) s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "", []*model.IstioEndpoint{ { Address: "127.0.0.1", ServicePortName: "http-main", EndpointPort: 80, // Labels: map[string]string{"version": version}, ServiceAccount: "hello-sa", TunnelAbility: networking.MakeTunnelAbility(networking.H2Tunnel), }, }) t.Run("TestClientWantsTunnelEndpoints", func(t *testing.T) { t.Helper() adscConn1 := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.10"}, Metadata: &model.NodeMetadata{ ProxyConfig: &model.NodeMetaProxyConfig{ ProxyMetadata: map[string]string{ "tunnel": networking.H2TunnelTypeName, }, }, }}, nil, watchAll) testTunnelEndpoints("127.0.0.1", 15009, adscConn1, t) }) t.Run("TestClientWantsNoTunnelEndpoints", func(t *testing.T) { t.Helper() adscConn2 := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.11"}, Metadata: &model.NodeMetadata{ ProxyConfig: &model.NodeMetaProxyConfig{}, }}, nil, watchAll) testTunnelEndpoints("127.0.0.1", 80, adscConn2, t) }) }
explode_data.jsonl/7947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 51, 40292, 5475, 27380, 2715, 82, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 856, 5356, 7121, 52317, 67400, 5475, 1155, 11, 856, 5356, 991, 726, 3798, 37790, 1903, 10166, 7449, 1321, 336, 15603, 1904, 9230, 1860, 7, 6767, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestList(t *testing.T) { key := sdk.NewKVStoreKey("test") ctx, cdc := defaultComponents(key) store := ctx.KVStore(key) lm := NewList(cdc, store) val := S{1, true} var res S lm.Push(val) require.Equal(t, uint64(1), lm.Len()) lm.Get(uint64(0), &res) require.Equal(t, val, res) val = S{2, false} lm.Set(uint64(0), val) lm.Get(uint64(0), &res) require.Equal(t, val, res) val = S{100, false} lm.Push(val) require.Equal(t, uint64(2), lm.Len()) lm.Get(uint64(1), &res) require.Equal(t, val, res) lm.Delete(uint64(1)) require.Equal(t, uint64(2), lm.Len()) lm.Iterate(&res, func(index uint64) (brk bool) { var temp S lm.Get(index, &temp) require.Equal(t, temp, res) require.True(t, index != 1) return }) lm.Iterate(&res, func(index uint64) (brk bool) { lm.Set(index, S{res.I + 1, !res.B}) return }) lm.Get(uint64(0), &res) require.Equal(t, S{3, true}, res) }
explode_data.jsonl/7809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 45402, 7121, 82707, 6093, 1592, 445, 1944, 1138, 20985, 11, 272, 7628, 1669, 1638, 10443, 4857, 340, 57279, 1669, 5635, 11352, 53, 6093, 4857, 340, 8810, 76, 1669, 1532, 852, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChannelConfig(t *testing.T) { cc, err := NewChannelConfig(&cb.ConfigGroup{ Groups: map[string]*cb.ConfigGroup{ "UnknownGroupKey": {}, }, }) assert.Error(t, err) assert.Nil(t, cc) }
explode_data.jsonl/30621
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 9629, 2648, 1155, 353, 8840, 836, 8, 341, 63517, 11, 1848, 1669, 1532, 9629, 2648, 2099, 7221, 10753, 2808, 515, 197, 197, 22173, 25, 2415, 14032, 8465, 7221, 10753, 2808, 515, 298, 197, 1, 13790, 2808, 1592, 788, 14573, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCalculateHeadersToPropagate(t *testing.T) { for i, tc := range []struct { cfg [][]string claims map[string]interface{} expected map[string]string }{ { cfg: [][]string{{"a", "x-a"}, {"b", "x-b"}, {"c", "x-c"}, {"d", "x-d"}}, claims: map[string]interface{}{ "a": 1, "b": "foo", "c": []interface{}{"one", "two"}, "d": map[string]interface{}{ "a": 1, "b": "foo", "c": []interface{}{"one", "two"}, }, }, expected: map[string]string{"x-a": "1", "x-b": "foo", "x-c": "one,two", "x-d": `{"a":1,"b":"foo","c":["one","two"]}`}, }, } { res, err := CalculateHeadersToPropagate(tc.cfg, tc.claims) if err != nil { t.Errorf("tc-%d: unexpected error: %v", i, err) continue } if !reflect.DeepEqual(tc.expected, res) { t.Errorf("tc-%d: unexpected response: %v", i, res) } } }
explode_data.jsonl/67491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 47866, 10574, 1249, 2008, 46836, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17130, 1669, 2088, 3056, 1235, 341, 197, 50286, 414, 52931, 917, 198, 197, 197, 48561, 256, 2415, 14032, 31344, 16094, 197, 42400, 2415, 14032, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRankingRepository_RankingsForContest(t *testing.T) { sqlHandler, cleanup := setupTestingSuite(t) defer cleanup() repo := repositories.NewRankingRepository(sqlHandler) contestID := uint64(1) users := createTestUsers(t, sqlHandler, 3) type testCase struct { contestID uint64 userID uint64 userDisplayName string language domain.LanguageCode amount float32 } expected := []testCase{ {contestID, users[2].ID, "FOO 3", domain.Global, 30}, {contestID, users[1].ID, "FOO 2", domain.Global, 20}, {contestID, users[0].ID, "FOO 1", domain.Global, 10}, } // Correct rankings { for _, data := range []testCase{expected[2], expected[1], expected[0]} { ranking := &domain.Ranking{ ContestID: data.contestID, UserID: data.userID, Language: data.language, Amount: data.amount, } err := repo.Store(*ranking) assert.NoError(t, err) } } // Create unrelated rankings to check if it is really working { for _, data := range []testCase{ {contestID + 1, 1, "", domain.Global, 50}, {contestID, 1, "", domain.Japanese, 250}, {contestID, 2, "", domain.Korean, 150}, {contestID + 1, 3, "", domain.Global, 200}, } { ranking := &domain.Ranking{ ContestID: data.contestID, UserID: data.userID, Language: data.language, Amount: 0, } err := repo.Store(*ranking) assert.NoError(t, err) } } rankings, err := repo.RankingsForContest(contestID, domain.Global) assert.NoError(t, err) assert.Equal(t, len(expected), len(rankings)) for i, expected := range expected { // This assumption should work as the order of the rankings should be fixed ranking := rankings[i] assert.Equal(t, expected.amount, ranking.Amount) assert.Equal(t, contestID, ranking.ContestID) assert.Equal(t, expected.userID, ranking.UserID) } }
explode_data.jsonl/18795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 754 }
[ 2830, 3393, 22550, 287, 4624, 2568, 1180, 819, 2461, 818, 477, 1155, 353, 8840, 836, 8, 341, 30633, 3050, 11, 21290, 1669, 6505, 16451, 28000, 1155, 340, 16867, 21290, 2822, 17200, 5368, 1669, 49657, 7121, 22550, 287, 4624, 13148, 3050, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTableNameConcurrently(t *testing.T) { DB := DB.Model("") if DB.NewScope(Order{}).TableName() != "orders" { t.Errorf("Order's table name should be orders") } var wg sync.WaitGroup wg.Add(10) for i := 1; i <= 10; i++ { go func(db *gorm.DB) { DB.SingularTable(true) wg.Done() }(DB) } wg.Wait() if DB.NewScope(Order{}).TableName() != "order" { t.Errorf("Order's singular table name should be order") } DB.SingularTable(false) }
explode_data.jsonl/28039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 33227, 1109, 58202, 1155, 353, 8840, 836, 8, 341, 45409, 1669, 5952, 5659, 31764, 743, 5952, 7121, 10803, 39692, 6257, 568, 33227, 368, 961, 330, 7917, 1, 341, 197, 3244, 13080, 445, 4431, 594, 1965, 829, 1265, 387, 10163, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiFilters(t *testing.T) { tracer1 := mocktrace.NewTracer("tracer1") tracer2 := mocktrace.NewTracer("tracer2") tracer3 := mocktrace.NewTracer("tracer3") wrappedFunc := func(tracerName string) restful.RouteFunction { return func(req *restful.Request, resp *restful.Response) { span := oteltrace.SpanFromContext(req.Request.Context()) _, ok := span.(*mocktrace.Span) assert.True(t, ok) spanTracer := span.Tracer() mockTracer, ok := spanTracer.(*mocktrace.Tracer) require.True(t, ok) assert.Equal(t, tracerName, mockTracer.Name) resp.WriteHeader(http.StatusOK) } } ws1 := &restful.WebService{} ws1.Path("/user") ws1.Route(ws1.GET("/{id}"). Filter(restfultrace.OTelFilter("my-service", restfultrace.WithTracer(tracer1))). To(wrappedFunc("tracer1"))) ws1.Route(ws1.GET("/{id}/books"). Filter(restfultrace.OTelFilter("book-service", restfultrace.WithTracer(tracer2))). To(wrappedFunc("tracer2"))) ws2 := &restful.WebService{} ws2.Path("/library") ws2.Filter(restfultrace.OTelFilter("library-service", restfultrace.WithTracer(tracer3))) ws2.Route(ws2.GET("/{name}").To(wrappedFunc("tracer3"))) container := restful.NewContainer() container.Add(ws1) container.Add(ws2) r := httptest.NewRequest("GET", "/user/123", nil) w := httptest.NewRecorder() container.ServeHTTP(w, r) r = httptest.NewRequest("GET", "/user/123/books", nil) w = httptest.NewRecorder() container.ServeHTTP(w, r) r = httptest.NewRequest("GET", "/library/metropolitan", nil) w = httptest.NewRecorder() container.ServeHTTP(w, r) }
explode_data.jsonl/50856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 649 }
[ 2830, 3393, 20358, 28351, 1155, 353, 8840, 836, 8, 341, 25583, 9584, 16, 1669, 7860, 15067, 7121, 1282, 9584, 445, 94941, 16, 1138, 25583, 9584, 17, 1669, 7860, 15067, 7121, 1282, 9584, 445, 94941, 17, 1138, 25583, 9584, 18, 1669, 7860,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_fsimpl_CheckExistsOrWrite(t *testing.T) { type args struct { path string data []byte } tests := map[string]struct { args args want bool wantErr string beforeFn func(m *mocks.FS) }{ "should exists": { args: args{path: "/usr/bar", data: []byte{}}, want: true, beforeFn: func(m *mocks.FS) { m.On("Stat", "/usr/bar").Return(nil, nil) }, }, "should error on fail check": { args: args{path: "/usr/bar", data: []byte{}}, wantErr: "failed to check if file exists on repo at '/usr/bar': some error", beforeFn: func(m *mocks.FS) { m.On("Stat", "/usr/bar").Return(nil, fmt.Errorf("some error")) }, }, "should write to file if not exists and write sucsseded": { args: args{path: "/usr/bar", data: []byte{}}, want: false, beforeFn: func(m *mocks.FS) { mfile := &mocks.File{} mfile.On("Write", mock.AnythingOfType("[]uint8")).Return(1, nil) mfile.On("Close").Return(nil) m.On("Stat", "/usr/bar").Return(nil, os.ErrNotExist) m.On("OpenFile", "/usr/bar", mock.AnythingOfType("int"), mock.AnythingOfType("FileMode")).Return(mfile, nil) }, }, "should fail if WriteFile failed": { args: args{path: "/usr/bar", data: []byte{}}, want: false, wantErr: "failed to create file at '/usr/bar': " + os.ErrPermission.Error(), beforeFn: func(m *mocks.FS) { m.On("Stat", "/usr/bar").Return(nil, os.ErrNotExist) m.On("OpenFile", "/usr/bar", mock.AnythingOfType("int"), mock.AnythingOfType("FileMode")).Return(nil, os.ErrPermission) }, }, } for tname, tt := range tests { t.Run(tname, func(t *testing.T) { mockedFS := &mocks.FS{} tt.beforeFn(mockedFS) fs := Create(mockedFS) got, err := fs.CheckExistsOrWrite(tt.args.path, tt.args.data) if err != nil { if tt.wantErr != "" { assert.EqualError(t, err, tt.wantErr) } else { t.Errorf("prepare() error = %v", err) } return } if got != tt.want { t.Errorf("fsimpl.CheckExistsOrWrite() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/58980
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 938 }
[ 2830, 3393, 34470, 6383, 28188, 15575, 2195, 7985, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 26781, 914, 198, 197, 8924, 3056, 3782, 198, 197, 532, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 31215, 257, 2827, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAttachmentsUnmarshalJSON(t *testing.T) { tests := []struct { name string input string expected Attachments err string }{ { name: "no attachments", input: "{}", expected: Attachments{}, }, { name: "one attachment", input: `{ "foo.txt": { "content_type": "text/plain", "data": "dGVzdCBhdHRhY2htZW50Cg==" } }`, expected: Attachments{ "foo.txt": &Attachment{ Filename: "foo.txt", ContentType: "text/plain", }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { var att Attachments err := json.Unmarshal([]byte(test.input), &att) testy.Error(t, test.err, err) for _, v := range att { _ = v.Content.Close() v.Content = nil } if d := diff.Interface(test.expected, att); d != nil { t.Error(d) } }) } }
explode_data.jsonl/28905
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 426 }
[ 2830, 3393, 75740, 1806, 27121, 5370, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 22427, 914, 271, 197, 42400, 48384, 1368, 198, 197, 9859, 414, 914, 198, 197, 59403, 197, 197, 515, 298,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWriteTheNextLineAtTheEndOfTheFile(t *testing.T) { logFile := outputs.NewSimpleLogFileWithFolderPath("./", "test3") _, _ = logFile.Write([]byte("Dumb log line")) _, writeErr := logFile.Write([]byte("Second log line")) if writeErr != nil { t.Errorf("Could not write in the log file : %v", writeErr) } content, readErr := ioutil.ReadFile(logFile.Path()) if readErr != nil { t.Errorf("Could not read the log file : %v", readErr) } if string(content) != "Dumb log line\nSecond log line\n" { t.Errorf("The log file content does not match with what was written: %v", string(content)) } removeFile(logFile.Path()) }
explode_data.jsonl/22140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 7985, 785, 5847, 2460, 1655, 785, 3727, 2124, 785, 1703, 1155, 353, 8840, 836, 8, 341, 6725, 1703, 1669, 16275, 7121, 16374, 98857, 2354, 90597, 13988, 497, 330, 1944, 18, 5130, 197, 6878, 716, 284, 1487, 1703, 4073, 10556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAssignField(t *testing.T) { t.Parallel() tests := []struct { name string dest interface{} src interface{} expected interface{} }{ { name: "same types", dest: int8(0), src: int8(100), expected: int8(100), }, { name: "same types - more source pointers", dest: int8(0), src: func() interface{} { i := int8(100) return &i }(), expected: int8(100), }, { name: "same types - more dest pointers", dest: func() interface{} { i := int8(0) return &i }(), src: int8(100), expected: int8(100), }, { name: "convertible types - more source pointers", dest: int16(0), src: func() interface{} { i := int8(100) return &i }(), expected: int16(100), }, { name: "convertible types - both pointers", dest: func() interface{} { i := int8(0) return &i }(), src: func() interface{} { i := int16(100) return &i }(), expected: int8(100), }, { name: "convertible types - int16 -> int8", dest: int8(0), src: int16(100), expected: int8(100), }, { name: "convertible types - int16 -> uint8", dest: uint8(0), src: int16(100), expected: uint8(100), }, { name: "convertible types - uint16 -> int8", dest: int8(0), src: uint16(100), expected: int8(100), }, { name: "convertible types - uint16 -> uint8", dest: uint8(0), src: uint16(100), expected: uint8(100), }, { name: "convertible types - float32 -> float64", dest: float64(0), src: float32(1.5), expected: float64(1.5), }, { name: "convertible types - float64 -> float32", dest: float32(0), src: float64(1.5), expected: float32(1.5), }, { name: "convertible types - string -> bool", dest: false, src: "true", expected: true, }, { name: "convertible types - string -> int8", dest: int8(0), src: "100", expected: int8(100), }, { name: "convertible types - string -> uint8", dest: uint8(0), src: "100", expected: uint8(100), }, { name: "convertible types - string -> float32", dest: float32(0), src: "1.5", expected: float32(1.5), }, { name: "convertible types - typecase string -> string", dest: "", src: func() interface{} { type foo string return foo("foo") }(), expected: "foo", }, { name: "convertible types - string -> array", dest: [2]string{}, src: `["test","test2"]`, expected: [2]string{"test", "test2"}, }, { name: "convertible types - string -> slice", dest: []string{}, src: `["test","test2"]`, expected: []string{"test", "test2"}, }, { name: "convertible types - string -> struct", dest: struct{ A int }{}, src: `{"A":100}`, expected: struct{ A int }{100}, }, { name: "convertible types - string -> map", dest: map[string]float64{}, src: `{"1Address":1.5}`, expected: map[string]float64{"1Address": 1.5}, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { dst := reflect.New(reflect.TypeOf(test.dest)).Elem() src := reflect.ValueOf(test.src) err := btcjson.TstAssignField(1, "testField", dst, src) if err != nil { t.Errorf("Test #%d (%s) unexpected error: %v", i, test.name, err) continue } // Inidirect through to the base types to ensure their values // are the same. for dst.Kind() == reflect.Ptr { dst = dst.Elem() } if !reflect.DeepEqual(dst.Interface(), test.expected) { t.Errorf("Test #%d (%s) unexpected value - got %v, "+ "want %v", i, test.name, dst.Interface(), test.expected) continue } } }
explode_data.jsonl/5030
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1891 }
[ 2830, 3393, 28933, 1877, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 49616, 257, 3749, 16094, 197, 41144, 414, 3749, 16094, 197, 42400, 3749, 16094, 197, 59403, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShuffleStrings(t *testing.T) { ShuffleStrings([]string{"a"}) ShuffleStrings(nil) a := []string{"a", "b", "c", "d", "e", "f", "g", "h"} b := make([]string, len(a)) copy(b, a) ShuffleStrings(a) if equalSliceString(a, b) { t.Errorf("shuffle resulted in the same permutation, the odds are slim") } }
explode_data.jsonl/82368
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 2016, 13208, 20859, 1155, 353, 8840, 836, 8, 341, 197, 2016, 13208, 20859, 10556, 917, 4913, 64, 23625, 197, 2016, 13208, 20859, 27907, 692, 11323, 1669, 3056, 917, 4913, 64, 497, 330, 65, 497, 330, 66, 497, 330, 67, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWSIsControlFrame(t *testing.T) { for _, test := range []struct { name string code wsOpCode isControl bool }{ {"binary", wsBinaryMessage, false}, {"text", wsTextMessage, false}, {"ping", wsPingMessage, true}, {"pong", wsPongMessage, true}, {"close", wsCloseMessage, true}, } { t.Run(test.name, func(t *testing.T) { if res := wsIsControlFrame(test.code); res != test.isControl { t.Fatalf("Expected %q isControl to be %v, got %v", test.name, test.isControl, res) } }) } }
explode_data.jsonl/42685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 223 }
[ 2830, 3393, 7433, 3872, 3273, 4369, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 43343, 414, 17624, 7125, 2078, 198, 197, 19907, 3273, 1807, 198, 197, 59403, 197, 197, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCarver_DetectHorizontalEnergySeam(t *testing.T) { var seams [][]Seam var totalEnergySeams int img := image.NewNRGBA(image.Rect(0, 0, ImgWidth, ImgHeight)) draw.Draw(img, img.Bounds(), &image.Uniform{image.White}, image.ZP, draw.Src) // Replace the pixel colors in a single row from 0xff to 0xdd. 5 is an arbitrary value. // The seam detector should recognize that line as being of low energy density // and should perform the seam computation process. // This way we'll make sure, that the seam detector correctly detects one and only one line. dx, dy := img.Bounds().Dx(), img.Bounds().Dy() for x := 0; x < dx; x++ { img.Pix[(5*dx+x)*4+0] = 0xdd img.Pix[(5*dx+x)*4+1] = 0xdd img.Pix[(5*dx+x)*4+2] = 0xdd img.Pix[(5*dx+x)*4+3] = 0xdd } var c = NewCarver(dx, dy) for x := 0; x < ImgWidth; x++ { width, height := img.Bounds().Max.X, img.Bounds().Max.Y c = NewCarver(width, height) c.ComputeSeams(img, p) les := c.FindLowestEnergySeams() seams = append(seams, les) } for i := 0; i < len(seams); i++ { for s := 0; s < len(seams[i]); s++ { totalEnergySeams += seams[i][s].X } } if totalEnergySeams == 0 { t.Errorf("The seam detector should have detected a horizontal energy seam") } }
explode_data.jsonl/8687
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 505 }
[ 2830, 3393, 8852, 423, 1557, 13176, 15837, 32675, 1514, 309, 1155, 353, 8840, 836, 8, 341, 2405, 73688, 52931, 1514, 309, 198, 2405, 2790, 32675, 1514, 4122, 526, 271, 39162, 1669, 2168, 7121, 45, 58927, 10075, 32153, 7, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUserDataMerge(t *testing.T) { for testName, testCase := range map[string]struct { provision userData custom userData expected userData expectError bool }{ "Succeeds": { provision: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo foo", }, }, custom: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo bar", }, }, expected: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo foo\necho bar", }, }, }, "FailsForIncompatibleUserDataDirectives": { provision: userData{ Options: userdata.Options{ Directive: userdata.CloudConfig, Content: "runcmd:\n - echo foo", }, }, custom: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo bar", }, }, expectError: true, }, "FailsForUnspecifiedDirective": { provision: userData{ Options: userdata.Options{ Content: "echo foo", }, }, custom: userData{ Options: userdata.Options{ Content: "echo bar", }, }, expectError: true, }, "PersistsIfBaseUserDataPersists": { provision: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo", Persist: true, }, }, custom: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo bar", }, }, expected: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo\necho bar", Persist: true, }, }, }, "PersistsIfOtherUserDataPersists": { provision: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo", }, }, custom: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo bar", Persist: true, }, }, expected: userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo\necho bar", Persist: true, }, }, }, "FailsIfPersistingForInvalidDirective": { provision: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo foo", Persist: true, }, }, custom: userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo bar", Persist: true, }, }, expectError: true, }, } { t.Run(testName, func(t *testing.T) { merged, err := testCase.provision.merge(&testCase.custom) if !testCase.expectError { require.NoError(t, err) assert.Equal(t, testCase.expected.Directive, merged.Directive) assert.Equal(t, testCase.expected.Content, merged.Content) assert.Equal(t, testCase.expected.Persist, merged.Persist) } else { assert.Error(t, err) assert.Nil(t, merged) } }) } }
explode_data.jsonl/3805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1416 }
[ 2830, 3393, 39485, 52096, 1155, 353, 8840, 836, 8, 341, 2023, 94396, 11, 54452, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 197, 776, 13013, 256, 34385, 198, 197, 1444, 1450, 414, 34385, 198, 197, 42400, 262, 34385, 198, 197, 24952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestManagementClient_Endpoint_Failures(t *testing.T) { //Create a new api client client, err := NewManagementClient(configManagementTest.HTTP.BaseURL) if !assert.NoError(t, err, "error while creating a new api client") { return } //Set username and password if configManagementTest.HTTP.AuthUsername != "" && configManagementTest.HTTP.AuthPassword != "" { err = client.SetUsernameAndPassword(configManagementTest.HTTP.AuthUsername, configManagementTest.HTTP.AuthPassword) if !assert.NoError(t, err, "error while creating a new api client") { return } } //Create Endpoint with invalid input _, err = client.CreateEndpointWithTag("test-Endpoint_Failures-endpoint1", "noAddress", "no valid protocol", configManagementTest.TestTagID) if assert.Error(t, err, "no error when an endpoint with invalid params was created") { if err, ok := err.(HTTPError); assert.True(t, ok, "error is not a http error", err.Error()) { assert.True(t, err.StatusCode == 400, "error != 400") } } //Get Invalid Endpoint _, err = client.GetEndpoint(-1) if assert.Error(t, err, "no error when trying to get an invalid endpoint") { if err, ok := err.(HTTPError); assert.True(t, ok, "error is not a http error", err.Error()) { assert.True(t, err.StatusCode == 404, "error != 404") } } //Delete invalid endpoint err = client.DeleteEndpoint(-1) if assert.Error(t, err, "no error when a non existent endpoint was deleted") { if err, ok := err.(HTTPError); assert.True(t, ok, "error is not a http error", err.Error()) { assert.True(t, err.StatusCode == 404, "error != 404") } } //create valid endpoint endpoint, err := createEndpointAndCheckForSuccess(t, client, "test-Endpoint_Failures-endpoint1", "1.1.1.1:9753", "udpv4") if err != nil { return } defer func() { _ = deleteEndpointAndCheckForSuccess(t, client, endpoint) }() _, err = client.CreateEndpoint("test-Endpoint_Failures-endpoint1", "1.1.1.1:9753", "udpv4") if assert.Error(t, err, "no error when creating a endpoint twice") { if err, ok := err.(HTTPError); assert.True(t, ok, "error is not a http error", err.Error()) { assert.True(t, err.StatusCode == 400, "error != 400") } } }
explode_data.jsonl/82179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 750 }
[ 2830, 3393, 22237, 2959, 49953, 2768, 1400, 604, 1413, 1155, 353, 8840, 836, 8, 341, 197, 322, 4021, 264, 501, 6330, 2943, 198, 25291, 11, 1848, 1669, 1532, 22237, 2959, 8754, 22237, 2271, 27358, 13018, 3144, 340, 743, 753, 2207, 35699,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepository_SaveAddAffiliate(t *testing.T) { db, err := sqlx.Connect("pgx", "host=localhost port=5432 user=postgres dbname=juno password=postgres sslmode=disable") if err != nil { t.Fatal(err) } // Create the codec codec := simapp.MakeTestEncodingConfig() type args struct { msg []*accountstypes.MsgAddAffiliate } tests := []struct { name string args args wantErr bool }{ { name: "valid", args: args{[]*accountstypes.MsgAddAffiliate{ { Creator: "vcg1ljs7p2p9ae3en8knr3d3ke8srsfcj2zjvefv0g", AccountHash: "a935ea2c467d7f666ea2a67870564f2efb902c05f0a2bb4b6202832aedd26cd1", AffiliationHash: "a935ea2c467d7f666ea2a67870564f2efb902c05f0a2bb4b6202832aedd26cd2", Affiliation: accountstypes.AFFILIATION_KIND_REFERRAL, Extras: []*extratypes.Extra{ { Kind: extratypes.EXTRA_KIND_EMAIL, Data: "retg@mail", }, { Kind: extratypes.EXTRA_KIND_PHONE, Data: "+380685548", }, }, }, { Creator: "vcg1ljs7p2p9ae3en8knr3d3ke8srsfcj2zjvefv1g", AccountHash: "a935ea2c467d7f666ea2a67870564f2efb902c05f0a2bb4b6202832aedd26cd3", AffiliationHash: "a935ea2c467d7f666ea2a67870564f2efb902c05f0a2bb4b6202832aedd26cd4", Affiliation: accountstypes.AFFILIATION_KIND_REFERRAL, Extras: []*extratypes.Extra{ { Kind: extratypes.EXTRA_KIND_EMAIL, Data: "rete@gmail", }, { Kind: extratypes.EXTRA_KIND_PHONE, Data: "+380685456", }, }, }, }, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { r := accountsdb.NewRepository(db, codec.Marshaler) if err := r.SaveAddAffiliate(tt.args.msg...); (err != nil) != tt.wantErr { t.Errorf("Repository.SaveAddAffiliate() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/25488
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1018 }
[ 2830, 3393, 4624, 78746, 2212, 25841, 34850, 1155, 353, 8840, 836, 8, 341, 20939, 11, 1848, 1669, 5704, 87, 43851, 445, 3517, 87, 497, 330, 3790, 84551, 2635, 28, 20, 19, 18, 17, 1196, 28, 43070, 83492, 45612, 16311, 3552, 28, 43070, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStrToParquetType(t *testing.T) { testData := []struct { StrData string GoData interface{} PT *parquet.Type CT *parquet.ConvertedType Length int Scale int }{ {"false", bool(false), parquet.TypePtr(parquet.Type_BOOLEAN), nil, 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), nil, 0, 0}, {"0", int64(0), parquet.TypePtr(parquet.Type_INT64), nil, 0, 0}, {"12345", StrIntToBinary("12345", "LittleEndian", 12, true), parquet.TypePtr(parquet.Type_INT96), nil, 0, 0}, {"0.1", float32(0.1), parquet.TypePtr(parquet.Type_FLOAT), nil, 0, 0}, {"0.1", float64(0.1), parquet.TypePtr(parquet.Type_DOUBLE), nil, 0, 0}, {"abc bcd", string("abc bcd"), parquet.TypePtr(parquet.Type_BYTE_ARRAY), nil, 0, 0}, {"abc bcd", string("abc bcd"), parquet.TypePtr(parquet.Type_FIXED_LEN_BYTE_ARRAY), nil, 0, 0}, {"abc bcd", string("abc bcd"), parquet.TypePtr(parquet.Type_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_UTF8), 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_INT_8), 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_INT_16), 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_INT_32), 0, 0}, {"1", int64(1), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_INT_64), 0, 0}, {"1", uint32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_UINT_8), 0, 0}, {"1", uint32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_UINT_16), 0, 0}, {"1", uint32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_UINT_32), 0, 0}, {"1", uint64(1), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_UINT_64), 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_DATE), 0, 0}, {"1", int32(1), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_TIME_MILLIS), 0, 0}, {"1", int64(1), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_TIME_MICROS), 0, 0}, {"1", int64(1), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_TIMESTAMP_MICROS), 0, 0}, {"1", int64(1), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_TIMESTAMP_MILLIS), 0, 0}, {"123456789", StrIntToBinary("123456789", "LittleEndian", 12, false), parquet.TypePtr(parquet.Type_FIXED_LEN_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_INTERVAL), 0, 0}, {"123.45", int32(12345), parquet.TypePtr(parquet.Type_INT32), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 12, 2}, {"123.45", int64(12345), parquet.TypePtr(parquet.Type_INT64), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 12, 2}, {"123.45", StrIntToBinary("12345", "BigEndian", 12, true), parquet.TypePtr(parquet.Type_FIXED_LEN_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 12, 2}, {"373.1145", StrIntToBinary("373114500000000000000", "BigEndian", 16, true), parquet.TypePtr(parquet.Type_FIXED_LEN_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 16, 18}, {"123.45", StrIntToBinary("12345", "BigEndian", 0, true), parquet.TypePtr(parquet.Type_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 12, 2}, {"373.1145", StrIntToBinary("373114500000000000000", "BigEndian", 0, true), parquet.TypePtr(parquet.Type_BYTE_ARRAY), parquet.ConvertedTypePtr(parquet.ConvertedType_DECIMAL), 16, 18}, } for _, data := range testData { res := fmt.Sprintf("%v", StrToParquetType(data.StrData, data.PT, data.CT, data.Length, data.Scale)) expect := fmt.Sprintf("%v", data.GoData) if res != expect { t.Errorf("StrToParquetType err %v-%v, expect %s, got %s", data.PT, data.CT, expect, res) } } }
explode_data.jsonl/29861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1660 }
[ 2830, 3393, 2580, 1249, 4272, 23300, 929, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 1235, 341, 197, 197, 2580, 1043, 914, 198, 197, 9600, 78, 1043, 220, 3749, 16094, 197, 197, 2828, 414, 353, 1732, 23300, 10184, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMetaAddress_Receive_Segwit_Address(t *testing.T) { path := NewDerivationPath(BaseCoinBip84MainNet, 0, 0) wallet := NewHDWalletFromWords(w, BaseCoinBip84MainNet) usableAddress, err := newUsableAddressWithDerivationPath(wallet, path) assert.Nil(t, err) meta, err := usableAddress.MetaAddress() assert.Nil(t, err) expectedAddr := "bc1qcr8te4kr609gcawutmrza0j4xv80jy8z306fyu" expectedPubkey := "0430d54fd0dd420a6e5f8d3624f5f3482cae350f79d5f0753bf5beef9c2d91af3c04717159ce0828a7f686c2c7510b7aa7d4c685ebc2051642ccbebc7099e2f679" assert.Equal(t, expectedAddr, meta.Address) assert.Equal(t, path, meta.DerivationPath) assert.Equal(t, expectedPubkey, meta.UncompressedPublicKey) }
explode_data.jsonl/64002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 12175, 4286, 62, 14742, 1098, 791, 88519, 64899, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 1532, 22171, 39127, 1820, 22225, 41180, 33, 573, 23, 19, 6202, 6954, 11, 220, 15, 11, 220, 15, 340, 6692, 7464, 1669, 1532, 19147,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsJSONType(t *testing.T) { for _, test := range []struct { input string expect bool }{ {"application/json", true}, {"application/xml+json", true}, {"application/vnd.foo+json", true}, {"application/json; charset=utf-8", true}, {"application/vnd.foo+json; charset=utf-8", true}, {"text/json", true}, {"text/xml+json", true}, {"text/vnd.foo+json", true}, {"application/foo-json", false}, {"application/foo.json", false}, {"application/vnd.foo-json", false}, {"application/vnd.foo.json", false}, {"application/json+xml", false}, {"text/foo-json", false}, {"text/foo.json", false}, {"text/vnd.foo-json", false}, {"text/vnd.foo.json", false}, {"text/json+xml", false}, } { result := IsJSONType(test.input) if result != test.expect { t.Errorf("failed on %q: want %v, got %v", test.input, test.expect, result) } } }
explode_data.jsonl/51176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 3872, 5370, 929, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 24952, 1807, 198, 197, 59403, 197, 197, 4913, 5132, 8931, 497, 830, 1583, 197, 197, 4913, 5132, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBoxFillWriterFillsBox(t *testing.T) { bfw := createBoxFillWriter(NewRect(1, 1, 5, 5), termbox.ColorCyan) // test left/right margin for y := 0; y < 7; y++ { _, outputAttribute := bfw(termbox.ColorWhite, termbox.ColorBlack, point{0, y}) assert.Equal(t, termbox.ColorBlack, outputAttribute, "Expected (%d,%d) to not have its colour changed", 0, y) _, outputAttribute = bfw(termbox.ColorWhite, termbox.ColorBlack, point{7, y}) assert.Equal(t, termbox.ColorBlack, outputAttribute, "Expected (%d,%d) to not have its colour changed", 7, y) } // test top/bottom margin for x := 0; x < 7; x++ { _, outputAttribute := bfw(termbox.ColorWhite, termbox.ColorBlack, point{x, 0}) assert.Equal(t, termbox.ColorBlack, outputAttribute, "Expected (%d,%d) to not have its colour changed", x, 0) _, outputAttribute = bfw(termbox.ColorWhite, termbox.ColorBlack, point{x, 7}) assert.Equal(t, termbox.ColorBlack, outputAttribute, "Expected (%d,%d) to not have its colour changed", x, 7) } // test filled in square for x := 1; x <= 6; x++ { for y := 1; y <= 6; y++ { _, outputAttribute := bfw(termbox.ColorWhite, termbox.ColorBlack, point{x, y}) assert.Equal(t, termbox.ColorCyan, outputAttribute, "Expected (%d,%d) to have its colour changed to form the box", x, y) } } }
explode_data.jsonl/30727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 523 }
[ 2830, 3393, 1611, 14449, 6492, 37, 3305, 1611, 1155, 353, 8840, 836, 8, 341, 2233, 20564, 1669, 1855, 1611, 14449, 6492, 35063, 4415, 7, 16, 11, 220, 16, 11, 220, 20, 11, 220, 20, 701, 4647, 2011, 6669, 34, 8339, 692, 197, 322, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_podTracker_Stop(t *testing.T) { // setup types logger := logrus.NewEntry(logrus.StandardLogger()) clientset := fake.NewSimpleClientset() tests := []struct { name string pod *v1.Pod started bool }{ { name: "started", pod: _pod, started: true, }, { name: "not started", pod: _pod, started: false, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { tracker, err := newPodTracker(logger, clientset, test.pod, 0*time.Second) if err != nil { t.Errorf("newPodTracker() error = %v", err) return } if test.started { tracker.Start(context.Background()) } tracker.Stop() }) } }
explode_data.jsonl/62942
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 85337, 31133, 80308, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 4494, 198, 17060, 1669, 1487, 20341, 7121, 5874, 12531, 20341, 53615, 7395, 2398, 25291, 746, 1669, 12418, 7121, 16374, 2959, 746, 2822, 78216, 1669, 3056, 123...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestShouldIgnoreAllowedPattern(t *testing.T) { results := helpers.NewDetectionResults() content := []byte("\"key\" : \"This is an allowed keyword\"\npassword=y0uw1lln3v3rgu3ssmyP@55w0rd") filename := "allowed_pattern.txt" additions := []gitrepo.Addition{gitrepo.NewAddition(filename, content)} fileIgnoreConfig := talismanrc.FileIgnoreConfig{filename, "", []string{}, []string{"key"}} ignores := &talismanrc.TalismanRC{FileIgnoreConfig: []talismanrc.FileIgnoreConfig{fileIgnoreConfig}, AllowedPatterns: []string{"password"}} NewPatternDetector(customPatterns).Test(helpers.NewChecksumCompare(nil, utility.DefaultSHA256Hasher{}, talismanrc.NewTalismanRC(nil)), additions, ignores, results, func() {}) assert.True(t, results.Successful(), "Expected keywords %s to be ignored by Talisman", append(fileIgnoreConfig.AllowedPatterns, ignores.AllowedPatterns...)) }
explode_data.jsonl/82120
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 14996, 12497, 35382, 15760, 1155, 353, 8840, 836, 8, 341, 55497, 1669, 30187, 7121, 54817, 9801, 741, 27751, 1669, 3056, 3782, 38915, 792, 2105, 549, 7245, 1986, 374, 458, 5420, 16174, 22245, 77, 3833, 29368, 15, 42110, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_reconstructQueue(t *testing.T) { type args struct { people [][]int } tests := []struct { name string args args want [][]int }{ { args: args{ people: [][]int{ {7, 0}, {4, 4}, {7, 1}, {5, 0}, {6, 1}, {5, 2}, }, }, want: [][]int{ {5, 0}, {7, 0}, {5, 2}, {6, 1}, {4, 4}, {7, 1}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := reconstructQueue(tt.args.people); !reflect.DeepEqual(got, tt.want) { t.Errorf("reconstructQueue() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/7968
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 1288, 7596, 7554, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 197, 16069, 52931, 396, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 52931, 396, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserEntityUpdateWRONGIMAGE(t *testing.T) { resp, _ := sendPut("http://localhost:8080/TESTING/YourAccount&q=Update", UserEntityUpdateWRONGIMAGE, auth.Header.Get("Authorization")) response := responseToString(resp) compareResults(t, response, HyperText.CustomResponses["wrong-validation"]) }
explode_data.jsonl/59341
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 1474, 3030, 4289, 17925, 7539, 29926, 1155, 353, 8840, 836, 8, 341, 34653, 11, 716, 1669, 3624, 19103, 445, 1254, 1110, 8301, 25, 23, 15, 23, 15, 14, 10033, 1718, 14, 7771, 7365, 62735, 28, 4289, 497, 2657, 3030, 4289, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilPointer(t *testing.T) { var name *string = nil eq := Eq{"name": name} sql, args, err := eq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "name IS NULL", sql) neq := NotEq{"name": name} sql, args, err = neq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "name IS NOT NULL", sql) var ids *[]int = nil eq = Eq{"id": ids} sql, args, err = eq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "id IS NULL", sql) neq = NotEq{"id": ids} sql, args, err = neq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "id IS NOT NULL", sql) var ida *[3]int = nil eq = Eq{"id": ida} sql, args, err = eq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "id IS NULL", sql) neq = NotEq{"id": ida} sql, args, err = neq.ToSql() assert.NoError(t, err) assert.Empty(t, args) assert.Equal(t, "id IS NOT NULL", sql) }
explode_data.jsonl/44173
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 19064, 9084, 1155, 353, 8840, 836, 8, 341, 2405, 829, 353, 917, 284, 2092, 198, 7727, 80, 1669, 33122, 4913, 606, 788, 829, 532, 30633, 11, 2827, 11, 1848, 1669, 8939, 3274, 8269, 2822, 6948, 35699, 1155, 11, 1848, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUntarHardlinkToSymlink(t *testing.T) { // TODO Windows. There may be a way of running this, but turning off for now if runtime.GOOS == "windows" { t.Skip("hardlinks on Windows") } for i, headers := range [][]*tar.Header{ { { Name: "symlink1", Typeflag: tar.TypeSymlink, Linkname: "regfile", Mode: 0644, }, { Name: "symlink2", Typeflag: tar.TypeLink, Linkname: "symlink1", Mode: 0644, }, { Name: "regfile", Typeflag: tar.TypeReg, Mode: 0644, }, }, } { if err := testBreakout("untar", "docker-TestUntarHardlinkToSymlink", headers); err != nil { t.Fatalf("i=%d. %v", i, err) } } }
explode_data.jsonl/82000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 20250, 277, 26907, 2080, 1249, 34667, 44243, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 5515, 13, 2619, 1231, 387, 264, 1616, 315, 4303, 419, 11, 714, 13054, 1007, 369, 1431, 198, 743, 15592, 97574, 3126, 621, 330, 2707...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) { t.Parallel() b := newTestSitesBuilder(t).Running().WithConfigFile("toml", ` baseURL = "https://example.org" [outputs] # This looks odd, but it triggers the behaviour in #5858 # The total output formats list gets sorted, so CSS before HTML. home = [ "CSS" ] `) b.WithContent("mybundle/index.md", ` --- title: Page date: 2017-01-15 --- `, "mybundle/data.json", "MyData", ) b.CreateSites().Build(BuildCfg{}) b.AssertFileContent("public/mybundle/data.json", "MyData") // Change the bundled JSON file and make sure it gets republished. b.EditFiles("content/mybundle/data.json", "My changed data") b.Build(BuildCfg{}) b.AssertFileContent("public/mybundle/data.json", "My changed data") }
explode_data.jsonl/68132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 33, 1241, 832, 11277, 4498, 32089, 5097, 44599, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2233, 1669, 501, 2271, 93690, 3297, 1155, 568, 18990, 1005, 2354, 2648, 1703, 445, 37401, 75, 497, 22074, 3152, 3144, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateS3BucketLifecycleExpirationDays(t *testing.T) { validDays := []int{ 1, 31, 1024, } for _, v := range validDays { _, errors := validateS3BucketLifecycleExpirationDays(v, "days") if len(errors) != 0 { t.Fatalf("%q should be valid days: %q", v, errors) } } invalidDays := []int{ -1, 0, } for _, v := range invalidDays { _, errors := validateS3BucketLifecycleExpirationDays(v, "date") if len(errors) == 0 { t.Fatalf("%q should be invalid days", v) } } }
explode_data.jsonl/78573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 17926, 50, 18, 36018, 62731, 66301, 20557, 1155, 353, 8840, 836, 8, 341, 56322, 20557, 1669, 3056, 396, 515, 197, 197, 16, 345, 197, 197, 18, 16, 345, 197, 197, 16, 15, 17, 19, 345, 197, 630, 2023, 8358, 348, 1669, 208...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCallLessArgsDynamicLocalVar(t *testing.T) { const SCRIPT = ` function f(param) { var a = 42; if (false) { eval(""); } return a; } f(); ` testScript1(SCRIPT, intToValue(42), t) }
explode_data.jsonl/75215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 7220, 27451, 4117, 21752, 7319, 3962, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 282, 9743, 8, 341, 197, 2405, 264, 284, 220, 19, 17, 280, 197, 743, 320, 3849, 8, 341, 298, 93413, 13056, 197, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDoErr(t *testing.T) { is := is.New(t) var calls int srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { calls++ is.Equal(r.Method, http.MethodPost) query := r.FormValue("query") is.Equal(query, `query {}`) io.WriteString(w, `{ "errors": [{ "message": "Something went wrong" }] }`) })) defer srv.Close() ctx := context.Background() client := NewClient(srv.URL, UseMultipartForm()) ctx, cancel := context.WithTimeout(ctx, 1*time.Second) defer cancel() var responseData map[string]interface{} err := client.Run(ctx, &Request{q: "query {}"}, &responseData) is.True(err != nil) is.Equal(err.Error(), "graphql: Something went wrong") }
explode_data.jsonl/53422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 5404, 7747, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 340, 2405, 6738, 526, 198, 1903, 10553, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMySQLBackupWrapper(t *testing.T) { logf.SetLogger(logf.ZapLoggerTo(GinkgoWriter, true)) RegisterFailHandler(Fail) RunSpecs(t, "MySQLBackup wrapper unit tests") }
explode_data.jsonl/29986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 59224, 56245, 11542, 1155, 353, 8840, 836, 8, 341, 6725, 69, 4202, 7395, 12531, 69, 13476, 391, 7395, 1249, 6699, 766, 3346, 6492, 11, 830, 4390, 79096, 19524, 3050, 7832, 604, 340, 85952, 8327, 82, 1155, 11, 330, 59224, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTxExecutorRollbackRedoFail(t *testing.T) { txe, tsv, db := newTestTxExecutor(t) defer db.Close() defer tsv.StopService() txid := newTxForPrep(tsv) // Allow all additions to redo logs to succeed db.AddQueryPattern("insert into _vt\\.redo_state.*", &sqltypes.Result{}) err := txe.Prepare(txid, "bb") require.NoError(t, err) err = txe.RollbackPrepared("bb", txid) require.Error(t, err) require.Contains(t, err.Error(), "is not supported") }
explode_data.jsonl/25170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 31584, 25255, 32355, 1419, 6033, 78, 19524, 1155, 353, 8840, 836, 8, 341, 3244, 8371, 11, 259, 3492, 11, 2927, 1669, 501, 2271, 31584, 25255, 1155, 340, 16867, 2927, 10421, 741, 16867, 259, 3492, 30213, 1860, 741, 46237, 307...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoReadPath(t *testing.T) { t.Parallel() g := gomega.NewGomegaWithT(t) // Set the logger to development mode for verbose logs. logf.SetLogger(zap.New(zap.UseDevMode(true))) namespaced := types.NamespacedName{ Name: "read-test", Namespace: "default", } application := &app.FybrikApplication{} g.Expect(readObjectFromFile("../../testdata/unittests/data-usage.yaml", application)).NotTo(gomega.HaveOccurred()) application.Spec.Data[0] = app.DataContext{ DataSetID: "db2/allow-dataset", Requirements: app.DataRequirements{Interface: app.InterfaceDetails{Protocol: app.JdbcDb2, DataFormat: app.Table}}, } application.SetGeneration(1) // Objects to track in the fake client. objs := []runtime.Object{ application, } // Register operator types with the runtime scheme. s := utils.NewScheme(g) // Create a fake client to mock API calls. cl := fake.NewFakeClientWithScheme(s, objs...) // Read module readModule := &app.FybrikModule{} g.Expect(readObjectFromFile("../../testdata/unittests/module-read-parquet.yaml", readModule)).NotTo(gomega.HaveOccurred()) readModule.Namespace = utils.GetControllerNamespace() g.Expect(cl.Create(context.TODO(), readModule)).NotTo(gomega.HaveOccurred(), "the read module could not be created") // Create a FybrikApplicationReconciler object with the scheme and fake client. r := createTestFybrikApplicationController(cl, s) req := reconcile.Request{ NamespacedName: namespaced, } _, err := r.Reconcile(context.Background(), req) g.Expect(err).To(gomega.BeNil()) err = cl.Get(context.TODO(), req.NamespacedName, application) g.Expect(err).To(gomega.BeNil(), "Cannot fetch fybrikapplication") // Expect an error g.Expect(getErrorMessages(application)).NotTo(gomega.BeEmpty()) }
explode_data.jsonl/45017
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 637 }
[ 2830, 3393, 2753, 4418, 1820, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 197, 322, 2573, 279, 5925, 311, 4401, 3856, 369, 13694, 18422, 624, 6725, 69, 4202, 7395...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScopesAllMatcher(t *testing.T) { for _, v := range []struct { name string scopesKey string claims map[string]interface{} requiredScopes []string expected bool }{ { name: "all_simple_success", scopesKey: "scope", claims: map[string]interface{}{"scope": "a b"}, requiredScopes: []string{"a", "b"}, expected: true, }, { name: "all_simple_fail", scopesKey: "scope", claims: map[string]interface{}{"scope": "a b"}, requiredScopes: []string{"c"}, expected: false, }, { name: "all_missingone_fail", scopesKey: "scope", claims: map[string]interface{}{"scope": "a b"}, requiredScopes: []string{"a", "b", "c"}, expected: false, }, { name: "all_one_simple_success", scopesKey: "scope", claims: map[string]interface{}{"scope": "a b"}, requiredScopes: []string{"b"}, expected: true, }, { name: "all_no_req_scopes_success", scopesKey: "scope", claims: map[string]interface{}{"scope": "a b"}, requiredScopes: []string{}, expected: true, }, { name: "all_struct_success", scopesKey: "data.scope", claims: map[string]interface{}{"data": map[string]interface{}{"scope": "a b"}}, requiredScopes: []string{"a", "b"}, expected: true, }, { name: "all_deep_struct_success", scopesKey: "data.data.data.data.data.data.data.scope", claims: map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "data": map[string]interface{}{ "scope": "a b", }, }, }, }, }, }, }, }, requiredScopes: []string{"a", "b"}, expected: true, }, } { t.Run(v.name, func(t *testing.T) { if res := ScopesAllMatcher(v.scopesKey, v.claims, v.requiredScopes); res != v.expected { t.Errorf("'%s' have %v, want %v", v.name, res, v.expected) } }) } }
explode_data.jsonl/67489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1161 }
[ 2830, 3393, 3326, 18523, 2403, 37554, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 348, 1669, 2088, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 29928, 18523, 1592, 414, 914, 198, 197, 197, 48561, 260, 2415, 14032, 31344, 16094, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestToHex(t *testing.T) { if h := ToHex(refClr); h != "8E0CF2" { t.Errorf("result color hex was %s", h) } if h := ToHex(&color.RGBA{0, 0, 0, 255}); h != "000000" { t.Errorf("result color hex was %s", h) } }
explode_data.jsonl/37826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 1249, 20335, 1155, 353, 8840, 836, 8, 341, 743, 305, 1669, 2014, 20335, 13321, 90431, 1215, 305, 961, 330, 23, 36, 15, 9650, 17, 1, 341, 197, 3244, 13080, 445, 1382, 1894, 12371, 572, 1018, 82, 497, 305, 340, 197, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStorePointerToStruct(t *testing.T) { tc := New(DefaultExpiration, 0) tc.Set(v1, &TestStruct{Num: 1}, DefaultExpiration) x, found := tc.Get(v1) if !found { t.Fatal("*TestStruct was not found for foo") } foo := x.(*TestStruct) foo.Num++ y, found := tc.Get(v1) if !found { t.Fatal("*TestStruct was not found for foo (second time)") } bar := y.(*TestStruct) if bar.Num != 2 { t.Fatal("TestStruct.Num is not 2") } }
explode_data.jsonl/63758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 6093, 9084, 1249, 9422, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 1532, 87874, 66301, 11, 220, 15, 340, 78255, 4202, 3747, 16, 11, 609, 2271, 9422, 90, 4651, 25, 220, 16, 2137, 7899, 66301, 340, 10225, 11, 1730, 1669, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPoolError2(t *testing.T) { assert := assert.New(t) var ( panicCount = uint32(0) panicHandler = func(alias string, err interface{}) { atomic.AddUint32(&panicCount, 1) // Do work panic assert.Equal(alias, fmt.Sprintf("%s-%d", poolName, 1)) } pool = New(poolName, numWorkers, panicHandler) executed uint32 ) pool.Submit(func() { pool.wg.Done() atomic.AddUint32(&executed, 1) }, 1+rand.Float64()) time.Sleep(1 * time.Second) pool.ShutDown() assert.Greater(atomic.LoadUint32(&panicCount), uint32(0)) }
explode_data.jsonl/73115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 10551, 1454, 17, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 2405, 2399, 197, 30764, 2507, 256, 284, 2622, 18, 17, 7, 15, 340, 197, 30764, 3050, 284, 2915, 79965, 914, 11, 1848, 3749, 28875, 341, 298...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtendSectorExpiration(t *testing.T) { periodOffset := abi.ChainEpoch(100) actor := newHarness(t, periodOffset) precommitEpoch := abi.ChainEpoch(1) builder := builderForHarness(actor). WithEpoch(precommitEpoch). WithBalance(bigBalance, big.Zero()) commitSector := func(t *testing.T, rt *mock.Runtime) *miner.SectorOnChainInfo { actor.constructAndVerify(rt) sectorInfo := actor.commitAndProveSectors(rt, 1, 100, nil) return sectorInfo[0] } t.Run("rejects negative extension", func(t *testing.T) { rt := builder.Build(t) sector := commitSector(t, rt) // attempt to shorten epoch newExpiration := sector.Expiration - abi.ChainEpoch(miner.WPoStProvingPeriod) params := &miner.ExtendSectorExpirationParams{ SectorNumber: sector.SectorNumber, NewExpiration: newExpiration, } rt.ExpectAbort(exitcode.ErrIllegalArgument, func() { actor.extendSector(rt, sector, 0, params) }) }) t.Run("rejects extension to invalid epoch", func(t *testing.T) { rt := builder.Build(t) sector := commitSector(t, rt) // attempt to extend to an epoch that is not a multiple of the proving period + the commit epoch extension := 42*miner.WPoStProvingPeriod + 1 newExpiration := sector.Expiration - abi.ChainEpoch(extension) params := &miner.ExtendSectorExpirationParams{ SectorNumber: sector.SectorNumber, NewExpiration: newExpiration, } rt.ExpectAbort(exitcode.ErrIllegalArgument, func() { actor.extendSector(rt, sector, extension, params) }) }) t.Run("rejects extension too far in future", func(t *testing.T) { rt := builder.Build(t) sector := commitSector(t, rt) // extend by even proving period after max rt.SetEpoch(sector.Expiration) extension := miner.WPoStProvingPeriod * (miner.MaxSectorExpirationExtension/miner.WPoStProvingPeriod + 1) newExpiration := rt.Epoch() + extension params := &miner.ExtendSectorExpirationParams{ SectorNumber: sector.SectorNumber, NewExpiration: newExpiration, } rt.ExpectAbort(exitcode.ErrIllegalArgument, func() { actor.extendSector(rt, sector, extension, params) }) }) t.Run("rejects extension past max for seal proof", func(t *testing.T) { rt := builder.Build(t) sector := commitSector(t, rt) rt.SetEpoch(sector.Expiration) maxLifetime := sector.SealProof.SectorMaximumLifetime() // extend sector until just below threshold extension := miner.WPoStProvingPeriod * (miner.MaxSectorExpirationExtension/miner.WPoStProvingPeriod - 1) expiration := rt.Epoch() + extension for ; expiration-sector.Activation < maxLifetime; expiration += extension { params := &miner.ExtendSectorExpirationParams{ SectorNumber: sector.SectorNumber, NewExpiration: expiration, } actor.extendSector(rt, sector, extension, params) rt.SetEpoch(expiration) } // next extension fails because it extends sector past max lifetime params := &miner.ExtendSectorExpirationParams{ SectorNumber: sector.SectorNumber, NewExpiration: expiration, } rt.ExpectAbort(exitcode.ErrIllegalArgument, func() { actor.extendSector(rt, sector, extension, params) }) }) t.Run("updates expiration with valid params", func(t *testing.T) { rt := builder.Build(t) oldSector := commitSector(t, rt) extension := 42 * miner.WPoStProvingPeriod newExpiration := oldSector.Expiration + extension params := &miner.ExtendSectorExpirationParams{ SectorNumber: oldSector.SectorNumber, NewExpiration: newExpiration, } actor.extendSector(rt, oldSector, extension, params) // assert sector expiration is set to the new value st := getState(rt) newSector := actor.getSector(rt, oldSector.SectorNumber) assert.Equal(t, newExpiration, newSector.Expiration) // assert that an expiration exists at the target epoch expirations, err := st.GetSectorExpirations(rt.AdtStore(), newExpiration) require.NoError(t, err) exists, err := expirations.IsSet(uint64(newSector.SectorNumber)) require.NoError(t, err) assert.True(t, exists) // assert that the expiration has been removed from the old epoch expirations, err = st.GetSectorExpirations(rt.AdtStore(), oldSector.Expiration) require.NoError(t, err) exists, err = expirations.IsSet(uint64(newSector.SectorNumber)) require.NoError(t, err) assert.False(t, exists) }) }
explode_data.jsonl/43147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1597 }
[ 2830, 3393, 72136, 68188, 66301, 1155, 353, 8840, 836, 8, 341, 197, 19304, 6446, 1669, 61050, 98269, 44338, 7, 16, 15, 15, 340, 93410, 1669, 501, 74248, 1155, 11, 4168, 6446, 340, 40346, 17413, 44338, 1669, 61050, 98269, 44338, 7, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOCMProvider_ApplyResources(t *testing.T) { type fields struct { ocmClient ocm.Client } type args struct { clusterSpec *types.ClusterSpec resources types.ResourceSet } internalId := "test-internal-id" spec := &types.ClusterSpec{ InternalID: internalId, ExternalID: "", Status: "", AdditionalInfo: nil, } name := "test-resource-set" resources := types.ResourceSet{ Name: name, Resources: []interface{}{sampleProjectCR(), sampleOperatorGroup()}, } tests := []struct { name string fields fields args args want *types.ResourceSet wantErr bool }{ { name: "should create resource set", fields: fields{ ocmClient: &ocm.ClientMock{ GetSyncSetFunc: func(clusterID string, syncSetID string) (*clustersmgmtv1.Syncset, error) { return nil, apiErrors.NotFound("not found error") }, CreateSyncSetFunc: func(clusterID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { Expect(syncset.ID()).To(Equal(resources.Name)) Expect(syncset.Resources()).To(Equal(resources.Resources)) return nil, nil }, UpdateSyncSetFunc: func(clusterID string, syncSetID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.Errorf("UpdateSyncSet should not be called") }, }, }, args: args{ clusterSpec: spec, resources: resources, }, want: &types.ResourceSet{ Name: name, Resources: []interface{}{sampleProjectCR(), sampleOperatorGroup()}, }, wantErr: false, }, { name: "should update resource set if ResourceSet is changed", fields: fields{ ocmClient: &ocm.ClientMock{ GetSyncSetFunc: func(clusterID string, syncSetID string) (*clustersmgmtv1.Syncset, error) { p, _ := runtime.DefaultUnstructuredConverter.ToUnstructured(sampleProjectCR()) return clustersmgmtv1.NewSyncset().ID(name).Resources(p).Build() }, CreateSyncSetFunc: func(clusterID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.New("CreateSyncSet should not be called") }, UpdateSyncSetFunc: func(clusterID string, syncSetID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { Expect(syncset.Resources()).To(Equal(resources.Resources)) return nil, nil }, }, }, args: args{ clusterSpec: spec, resources: resources, }, want: &types.ResourceSet{ Name: name, Resources: []interface{}{sampleProjectCR(), sampleOperatorGroup()}, }, wantErr: false, }, { name: "should not update resource set if ResourceSet is not changed", fields: fields{ ocmClient: &ocm.ClientMock{ GetSyncSetFunc: func(clusterID string, syncSetID string) (*clustersmgmtv1.Syncset, error) { p, _ := runtime.DefaultUnstructuredConverter.ToUnstructured(sampleProjectCR()) g, _ := runtime.DefaultUnstructuredConverter.ToUnstructured(sampleOperatorGroup()) return clustersmgmtv1.NewSyncset().ID(name).Resources(p, g).Build() }, CreateSyncSetFunc: func(clusterID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.New("CreateSyncSet should not be called") }, UpdateSyncSetFunc: func(clusterID string, syncSetID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.New("UpdateSyncSetFunc should not be called") }, }, }, args: args{ clusterSpec: spec, resources: resources, }, want: &types.ResourceSet{ Name: name, Resources: []interface{}{sampleProjectCR(), sampleOperatorGroup()}, }, wantErr: false, }, { name: "should return error when get resources failed", fields: fields{ ocmClient: &ocm.ClientMock{ GetSyncSetFunc: func(clusterID string, syncSetID string) (*clustersmgmtv1.Syncset, error) { return nil, errors.Errorf("error") }, CreateSyncSetFunc: func(clusterID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.Errorf("CreateSyncSet should not be called") }, UpdateSyncSetFunc: func(clusterID string, syncSetID string, syncset *clustersmgmtv1.Syncset) (*clustersmgmtv1.Syncset, error) { return nil, errors.Errorf("UpdateSyncSet should not be called") }, }, }, args: args{ clusterSpec: spec, resources: resources, }, want: nil, wantErr: false, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { RegisterTestingT(t) p := newOCMProvider(test.fields.ocmClient, nil, &ocm.OCMConfig{}) resp, err := p.ApplyResources(test.args.clusterSpec, test.args.resources) Expect(resp).To(Equal(test.want)) if test.wantErr { Expect(err).NotTo(BeNil()) } }) } }
explode_data.jsonl/4835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2036 }
[ 2830, 3393, 7612, 44, 5179, 36117, 398, 11277, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 509, 76, 2959, 297, 6226, 11716, 198, 197, 532, 13158, 2827, 2036, 341, 197, 197, 18855, 8327, 353, 9242, 72883, 8327, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFrom(t *testing.T) { b := &Builder{flags: &BFlags{}, runConfig: &container.Config{}, disableCommit: true} err := from(b, []string{"scratch"}, nil, "") if runtime.GOOS == "windows" { if err == nil { t.Fatalf("Error not set on Windows") } expectedError := "Windows does not support FROM scratch" if !strings.Contains(err.Error(), expectedError) { t.Fatalf("Error message not correct on Windows. Should be: %s, got: %s", expectedError, err.Error()) } } else { if err != nil { t.Fatalf("Error when executing from: %s", err.Error()) } if b.image != "" { t.Fatalf("Image shoule be empty, got: %s", b.image) } if b.noBaseImage != true { t.Fatalf("Image should not have any base image, got: %v", b.noBaseImage) } } }
explode_data.jsonl/28276
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 3830, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 609, 3297, 90, 11161, 25, 609, 33, 9195, 22655, 1598, 2648, 25, 609, 3586, 10753, 22655, 11156, 33441, 25, 830, 630, 9859, 1669, 504, 1883, 11, 3056, 917, 4913, 54055, 14345,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestManifestGenerateGateway(t *testing.T) { runTestGroup(t, testGroup{ { desc: "ingressgateway_k8s_settings", diffSelect: "Deployment:*:istio-ingressgateway, Service:*:istio-ingressgateway", }, }) }
explode_data.jsonl/47911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 38495, 31115, 40709, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 2808, 1155, 11, 1273, 2808, 515, 197, 197, 515, 298, 41653, 25, 981, 330, 287, 673, 46473, 4698, 23, 82, 10853, 756, 298, 80564, 3379, 25, 330, 75286, 53386, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunFailsWithImpossibleTest(t *testing.T) { logs := []string{} config := &Config{ args: []string{".binary", "0"}, logf: func(f string, a ...interface{}) { fmt.Printf(f+"\n", a...) logs = append(logs, fmt.Sprintf(f, a...)) }, puzzles: testPuzzles(), } err := Run(config) require.Error(t, err) assert.Contains(t, err.Error(), "you cannot specify test 0 or under") }
explode_data.jsonl/67965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 6727, 37, 6209, 2354, 89655, 2271, 1155, 353, 8840, 836, 8, 341, 6725, 82, 1669, 3056, 917, 16094, 25873, 1669, 609, 2648, 515, 197, 31215, 25, 3056, 917, 90, 3263, 25891, 497, 330, 15, 7115, 197, 6725, 69, 25, 2915, 955...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccKeycloakOpenidClient_baseUrl(t *testing.T) { realmName := "terraform-" + acctest.RandString(10) clientId := "terraform-" + acctest.RandString(10) baseUrl := "https://www.example.com" resource.Test(t, resource.TestCase{ ProviderFactories: testAccProviderFactories, PreCheck: func() { testAccPreCheck(t) }, CheckDestroy: testAccCheckKeycloakOpenidClientDestroy(), Steps: []resource.TestStep{ { Config: testKeycloakOpenidClient_baseUrl(realmName, clientId, baseUrl), Check: testAccCheckKeycloakOpenidClientBaseUrl("keycloak_openid_client.client", baseUrl), }, }, }) }
explode_data.jsonl/52129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 14603, 1592, 88751, 5002, 307, 2959, 7651, 2864, 1155, 353, 8840, 836, 8, 341, 17200, 7673, 675, 1669, 330, 61385, 27651, 488, 1613, 67880, 2013, 437, 703, 7, 16, 15, 340, 25291, 764, 1669, 330, 61385, 27651, 488, 1613, 67...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewGoogleAuthenticator_Garbage(t *testing.T) { asserter := assert.New(t) reader := rand.Reader bitSize := 2048 start := time.Now() expires := time.Now().Add(-time.Second) clientId := "testyMcTesterson" keyOne, err := rsa.GenerateKey(reader, bitSize) if err != nil { panic(err) } certFetcher := func(ctx context.Context) (GooglePublicCerts, error) { return GooglePublicCerts{ Certs: []*x509.Certificate{ createCert(keyOne, 123, start, expires), }, Expiration: expires, }, nil } getRoles := RoleOracle(func(ctx context.Context, emailAddress string) []Role { asserter.Fail("should not be here") return make([]Role, 0) }) testInstance := NewGoogleAuthenticator(clientId, certFetcher, getRoles) _, err = testInstance(context.Background(), "wtfisthis?") asserter.EqualError(err, "garbage token: format (wtfisthis?)") _, err = testInstance(context.Background(), "YWJj.YWJj.###") asserter.EqualError(err, "garbage token: signature malformed (YWJj.YWJj.###)") _, err = testInstance(context.Background(), "YWJj.###.YWJj") asserter.EqualError(err, "garbage token: payload malformed (YWJj.###.YWJj)") }
explode_data.jsonl/7228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 3564, 14444, 5087, 61393, 2646, 277, 20652, 1155, 353, 8840, 836, 8, 341, 197, 33758, 465, 1669, 2060, 7121, 1155, 692, 61477, 1669, 10382, 47431, 198, 79980, 1695, 1669, 220, 17, 15, 19, 23, 271, 21375, 1669, 882, 13244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBigInt(t *testing.T) { tests := []string{ // 16 0xFFFFFFFF 32-bit uintptrs strings.Repeat("FFFFFFFF", 16), // 17 32-bit uintptrs, minimum value which enters loop on 32-bit "01" + strings.Repeat("00000000", 16), // 32 0xFFFFFFFF 32-bit uintptrs, maximum value which enters loop exactly once on 32-bit strings.Repeat("FFFFFFFF", 32), // 33 32-bit uintptrs, minimum value which enters loop twice on 32-bit "01" + strings.Repeat("00000000", 32), // 16 0xFFFFFFFFFFFFFFFF 64-bit uintptrs strings.Repeat("FFFFFFFFFFFFFFFF", 16), // 17 64-bit uintptrs, minimum value which enters loop on 64-bit "01" + strings.Repeat("0000000000000000", 16), // 32 0xFFFFFFFFFFFFFFFF 64-bit uintptrs, maximum value which enters loop exactly once on 64-bit strings.Repeat("FFFFFFFFFFFFFFFF", 32), // 33 64-bit uintptrs, minimum value which enters loop twice on 64-bit "01" + strings.Repeat("0000000000000000", 32), } for i, s := range tests { v, ok := new(big.Int).SetString(s, 16) if !ok { t.Errorf("Test %d includes invalid hex number %s", i, s) continue } BigInt(v) err := checkZeroWords(v.Bits()) if err != nil { t.Errorf("Test %d (s=%s) failed: %v", i, s, err) continue } if v.Cmp(bigZero) != 0 { t.Errorf("Test %d (s=%s) zeroed big.Int represents non-zero number %v", i, s, v) continue } } }
explode_data.jsonl/62541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 87474, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 917, 515, 197, 197, 322, 220, 16, 21, 220, 15, 22620, 220, 18, 17, 15257, 38190, 82, 198, 197, 11355, 819, 2817, 10979, 445, 98843, 497, 220, 16, 21, 18459, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5