text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func Test_New(t *testing.T) { t.Parallel() a := assert.New(t) provider := googleProvider() a.Equal(provider.ClientKey, domain.Env.GoogleKey) a.Equal(provider.Secret, domain.Env.GoogleSecret) a.Equal(provider.CallbackURL, "/foo") }
explode_data.jsonl/19855
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 39582, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 2060, 7121, 1155, 692, 197, 19979, 1669, 11558, 5179, 741, 11323, 12808, 50886, 11716, 1592, 11, 7947, 81214, 60393, 1592, 340, 11323, 12808, 50886, 747...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestOpen(t *testing.T) { wb, err := document.Open("testdata/simple-1.docx") if err != nil { t.Errorf("error opening document: %s", err) } got := bytes.Buffer{} if err := wb.Validate(); err != nil { t.Errorf("created an invalid document: %s", err) } wb.Save(&got) testhelper.CompareZip(t, "simple-1.docx", got.Bytes(), true) }
explode_data.jsonl/61205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 5002, 1155, 353, 8840, 836, 8, 341, 6692, 65, 11, 1848, 1669, 2197, 12953, 445, 92425, 67195, 12, 16, 23671, 87, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 841, 8568, 2197, 25, 1018, 82, 497, 1848, 340, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_Token_TextNotEquals_CN(t *testing.T) { token := Token{ text: []Text{ []byte("中国"), []byte("文字"), }, } assert.False(t, token.TextEquals("中国文字1")) }
explode_data.jsonl/78430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 1139, 1679, 20550, 2623, 4315, 56621, 1155, 353, 8840, 836, 8, 341, 43947, 1669, 9660, 515, 197, 15425, 25, 3056, 1178, 515, 298, 197, 1294, 3782, 445, 58695, 4461, 298, 197, 1294, 3782, 445, 87335, 4461, 197, 197, 1583, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestListOrgManagers(t *testing.T) { Convey("Get Org Managers for an org", t, func() { setup(MockRoute{"GET", "/v2/organizations/foo/managers", []string{listOrgPeoplePayload}, "", 200, "", nil}, t) defer teardown() c := &Config{ ApiAddress: server.URL, Token: "foobar", } client, err := NewClient(c) So(err, ShouldBeNil) managers, err := client.ListOrgManagers("foo") So(err, ShouldBeNil) So(len(managers), ShouldEqual, 2) So(managers[0].Username, ShouldEqual, "user1") So(managers[1].Username, ShouldEqual, "user2") }) }
explode_data.jsonl/4431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 852, 42437, 1658, 11218, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 1949, 33706, 61434, 369, 458, 1240, 497, 259, 11, 2915, 368, 341, 197, 84571, 66436, 4899, 4913, 3806, 497, 3521, 85, 17, 14, 69253, 60555, 72192, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestObserverNotify(t *testing.T) { conn := &MockNotifyConn{} evt, err := NewEvent([]byte(`{"kind": "test", "created": "2017-01-01T00:00:00Z", "meta": {}}`)) if err != nil { t.Fatal(err) } observer, err := NewObserver([]byte(`{"events": ["test"], "id": "123"}`), conn) if err != nil { t.Fatal(err) } if err = observer.Notify(evt); err != nil { t.Fatal(err) } if conn.Called != true { t.Fatal("expected observer write message to have been called") } }
explode_data.jsonl/27814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 17151, 28962, 1155, 353, 8840, 836, 8, 341, 32917, 1669, 609, 11571, 28962, 9701, 16094, 197, 28734, 11, 1848, 1669, 1532, 1556, 10556, 3782, 5809, 4913, 15314, 788, 330, 1944, 497, 330, 7120, 788, 330, 17, 15, 16, 22, 12,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTransferPlayback(t *testing.T) { client, server := testClientString(http.StatusNoContent, "") defer server.Close() err := client.TransferPlayback("newdevice", true) if err != nil { t.Error(err) } }
explode_data.jsonl/80122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 21970, 87125, 1155, 353, 8840, 836, 8, 341, 25291, 11, 3538, 1669, 1273, 2959, 703, 19886, 10538, 2753, 2762, 11, 14676, 16867, 3538, 10421, 2822, 9859, 1669, 2943, 95802, 87125, 445, 931, 6111, 497, 830, 340, 743, 1848, 961...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestBinaryCondition(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockFilter := index.NewMockSeriesIDsFilter(ctrl) // and query, _ := sql.Parse("select f from cpu " + "where ip='1.1.1.1' and path='/data' and time>'20190410 00:00:00' and time<'20190410 10:00:00'") mockFilter.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "ip", Value: "1.1.1.1"}, query.TimeRange). Return(mockSeriesIDSet(int64(11), roaring.BitmapOf(1, 2, 3, 4)), nil) mockFilter.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "path", Value: "/data"}, query.TimeRange). Return(mockSeriesIDSet(int64(11), roaring.BitmapOf(3, 5)), nil) search := newSeriesSearch(1, mockFilter, query) resultSet, _ := search.Search() assert.Equal(t, *mockSeriesIDSet(int64(11), roaring.BitmapOf(3)), *resultSet) // or mockFilter2 := index.NewMockSeriesIDsFilter(ctrl) query, _ = sql.Parse("select f from cpu " + "where ip='1.1.1.1' or path='/data' and time>'20190410 00:00:00' and time<'20190410 10:00:00'") mockFilter2.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "ip", Value: "1.1.1.1"}, query.TimeRange). Return(mockSeriesIDSet(int64(11), roaring.BitmapOf(1, 2, 3, 4)), nil) mockFilter2.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "path", Value: "/data"}, query.TimeRange). Return(mockSeriesIDSet(int64(11), roaring.BitmapOf(3, 5)), nil) search = newSeriesSearch(1, mockFilter2, query) resultSet, _ = search.Search() assert.Equal(t, *mockSeriesIDSet(int64(11), roaring.BitmapOf(1, 2, 3, 4, 5)), *resultSet) // error mockFilter3 := index.NewMockSeriesIDsFilter(ctrl) mockFilter3.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "ip", Value: "1.1.1.1"}, query.TimeRange). Return(nil, errors.New("left error")) search = newSeriesSearch(1, mockFilter3, query) resultSet, err := search.Search() assert.Nil(t, resultSet) assert.NotNil(t, err) mockFilter4 := index.NewMockSeriesIDsFilter(ctrl) query, _ = sql.Parse("select f from cpu " + "where ip='1.1.1.1' or path='/data' and time>'20190410 00:00:00' and time<'20190410 10:00:00'") mockFilter4.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "ip", Value: "1.1.1.1"}, query.TimeRange). Return(mockSeriesIDSet(int64(11), roaring.BitmapOf(1, 2, 3, 4)), nil) mockFilter4.EXPECT(). FindSeriesIDsByExpr(uint32(1), &stmt.EqualsExpr{Key: "path", Value: "/data"}, query.TimeRange). Return(nil, errors.New("right error")) search = newSeriesSearch(1, mockFilter4, query) resultSet, err = search.Search() assert.Nil(t, resultSet) assert.NotNil(t, err) }
explode_data.jsonl/31352
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1089 }
[ 2830, 3393, 21338, 10547, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 741, 77333, 5632, 1669, 1922, 7121, 11571, 25544, 30466, 5632, 62100, 692, 197, 322, 323, 198, 27274, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_server_ReadyHandler(t *testing.T) { type fields struct { externalAlive chan bool isAlive bool isReady bool pingChannel chan bool pingInterval time.Duration updateReady chan bool server *http.Server } type args struct { method string path string } tests := []struct { name string fields fields args args want int }{ { name: "ReadyHandler_IsReady", fields: fields{isReady: true}, args: args{method: "GET", path: "/ready"}, want: http.StatusOK, }, { name: "ReadyHandler_IsNotReady", fields: fields{isReady: false}, args: args{method: "GET", path: "/ready"}, want: http.StatusServiceUnavailable, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { s := &server{ externalAlive: tt.fields.externalAlive, isAlive: tt.fields.isAlive, isReady: tt.fields.isReady, pingChannel: tt.fields.pingChannel, pingInterval: tt.fields.pingInterval, updateReady: tt.fields.updateReady, server: tt.fields.server, } req, err := http.NewRequest(tt.args.method, tt.args.path, nil) if err != nil { t.Fatal(err) } rr := httptest.NewRecorder() handler := http.HandlerFunc(s.ReadyHandler) handler.ServeHTTP(rr, req) if status := rr.Code; status != tt.want { t.Errorf("handler returned wrong status code: got %v want %v", status, tt.want) } }) } }
explode_data.jsonl/58773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 651 }
[ 2830, 3393, 12015, 62, 19202, 3050, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 20921, 32637, 26023, 1807, 198, 197, 19907, 32637, 981, 1807, 198, 197, 19907, 19202, 981, 1807, 198, 197, 3223, 287, 9629, 256, 26023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsUTF8(t *testing.T) { var cases = []struct { name string want bool }{ {"unicode", true}, {"utf-8", true}, {"utf_8", true}, {"UTF-8", true}, {"UTF8", true}, {"utf8", true}, {"u n ic_ode", true}, {"ut_f%8", true}, {"ubf8", false}, {"punycode", false}, } for _, test := range cases { if g := isUTF8(test.name); g != test.want { t.Errorf("isUTF8(%q) = %v want %v", test.name, g, test.want) } } }
explode_data.jsonl/63447
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 3872, 8561, 23, 1155, 353, 8840, 836, 8, 341, 2405, 5048, 284, 3056, 1235, 341, 197, 11609, 914, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 4913, 24519, 497, 830, 1583, 197, 197, 4913, 4762, 12, 23, 497, 830, 1583...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateMixerService(t *testing.T) { cases := []struct { name string in *mccpb.IstioService valid bool }{ { name: "no name and service", in: &mccpb.IstioService{}, }, { name: "specify both name and service", in: &mccpb.IstioService{Service: "test-service-service", Name: "test-service-name"}, }, { name: "specify both namespace and service", in: &mccpb.IstioService{Service: "test-service-service", Namespace: "test-service-namespace"}, }, { name: "specify both domain and service", in: &mccpb.IstioService{Service: "test-service-service", Domain: "test-service-domain"}, }, { name: "invalid name label", in: &mccpb.IstioService{Name: strings.Repeat("x", 64)}, }, { name: "invalid namespace label", in: &mccpb.IstioService{Name: "test-service-name", Namespace: strings.Repeat("x", 64)}, }, { name: "invalid domian or labels", in: &mccpb.IstioService{Name: "test-service-name", Domain: strings.Repeat("x", 256)}, }, { name: "valid", in: validService, valid: true, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { if got := ValidateMixerService(c.in); (got == nil) != c.valid { t.Errorf("ValidateMixerService(%v): got(%v) != want(%v): %v", c.name, got == nil, c.valid, got) } }) } }
explode_data.jsonl/56935
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 594 }
[ 2830, 3393, 17926, 44, 39014, 1860, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 17430, 262, 353, 76, 638, 16650, 2447, 267, 815, 1860, 198, 197, 56322, 1807, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResponseRouterFatalErrors(t *testing.T) { fatalErrors := []error{ amqp.ErrLinkClosed, amqp.ErrLinkDetached, amqp.ErrConnClosed, amqp.ErrSessionClosed, } for _, fatalError := range fatalErrors { t.Run(fatalError.Error(), func(t *testing.T) { receiver := &fakeReceiver{ Responses: []rpcResponse{ {nil, fatalError}, }, } sentinelCh := make(chan rpcResponse, 1) link := &Link{ responseMap: map[string]chan rpcResponse{ "sentinel": sentinelCh, }, receiver: receiver, } link.startResponseRouter() require.Empty(t, receiver.Responses) // also, we should have broadcasted that the link is closed to anyone else // that had not yet received a response but was still waiting. select { case rpcResponse := <-sentinelCh: require.Error(t, rpcResponse.err, fatalError.Error()) require.Nil(t, rpcResponse.message) case <-time.After(time.Second * 5): require.Fail(t, "sentinel channel should have received a message") } }) } }
explode_data.jsonl/57204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 2582, 9523, 62396, 13877, 1155, 353, 8840, 836, 8, 341, 1166, 4212, 13877, 1669, 3056, 841, 515, 197, 197, 309, 32763, 27862, 3939, 26884, 345, 197, 197, 309, 32763, 27862, 3939, 17076, 3854, 345, 197, 197, 309, 32763, 27862...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_toTitle(t *testing.T) { tests := []struct { s string want string }{ { s: " lorem! IPSUM. doLOR, sIT& aMeT_ ", want: " Lorem! Ipsum. Dolor, Sit& Amet_ ", }, { s: "hello World", want: "Hello World", }, { s: " FOO BAR", want: " Foo Bar", }, } for _, tt := range tests { t.Run(tt.s, func(t *testing.T) { if got := toTitle(tt.s); got != tt.want { t.Errorf("toTitle() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/67559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 2346, 3851, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 1903, 262, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 515, 298, 1903, 25, 262, 330, 92385, 0, 63157, 2794, 13, 653, 43, 868, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFakeKafkaProducer_WaitForKey(t *testing.T) { log := logrus.New() c, p := KafkaPipe(log) defer p.Close() defer c.Close() ctx := context.Background() err := p.Produce(ctx, &kafkalib.Message{ Key: []byte(`key1`), Value: []byte(`val1`), }) require.NoError(t, err) err = p.Produce(ctx, &kafkalib.Message{ Key: []byte(`key2`), Value: []byte(`val2`), }) require.NoError(t, err) msg, err := c.FetchMessage(ctx) require.NoError(t, err) require.Equal(t, "key1", string(msg.Key)) require.Equal(t, "val1", string(msg.Value)) msg, err = c.FetchMessage(ctx) require.NoError(t, err) require.Equal(t, "key2", string(msg.Key)) require.Equal(t, "val2", string(msg.Value)) require.NoError(t, c.CommitMessage(msg)) require.True(t, p.WaitForKey([]byte(`key2`))) }
explode_data.jsonl/35469
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 52317, 42, 21883, 45008, 2763, 1315, 14954, 1155, 353, 8840, 836, 8, 341, 6725, 1669, 1487, 20341, 7121, 741, 1444, 11, 281, 1669, 57025, 34077, 12531, 340, 16867, 281, 10421, 741, 16867, 272, 10421, 2822, 20985, 1669, 2266, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateSession_DontSwallowError(t *testing.T) { t.Skip("This test is bad, and the resultant error from cassandra changes between versions") cluster := createCluster() cluster.ProtoVersion = 0x100 session, err := cluster.CreateSession() if err == nil { session.Close() t.Fatal("expected to get an error for unsupported protocol") } if flagCassVersion.Major < 3 { // TODO: we should get a distinct error type here which include the underlying // cassandra error about the protocol version, for now check this here. if !strings.Contains(err.Error(), "Invalid or unsupported protocol version") { t.Fatalf(`expcted to get error "unsupported protocol version" got: %q`, err) } } else { if !strings.Contains(err.Error(), "unsupported response version") { t.Fatalf(`expcted to get error "unsupported response version" got: %q`, err) } } }
explode_data.jsonl/11190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 4021, 5283, 1557, 544, 13218, 7183, 1454, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 1986, 1273, 374, 3873, 11, 323, 279, 83813, 1465, 504, 44224, 23274, 4344, 1948, 10795, 1138, 197, 18855, 1669, 1855, 28678, 741, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNewNoHysteresis(t *testing.T) { exp := fmt.Errorf("my bad %d", 42) err := errstack.New(exp) if err == nil { t.Fatalf("expected a non-nil error. got=%#v\n", err) } errs := err.(*errstack.Error) n := len(errs.Stack) err = newerr(err) if err == nil { t.Fatalf("expected a non-nil error. got=%#v\n", err) } errs = err.(*errstack.Error) if n != len(errs.Stack) { t.Fatalf("hysteresis detected:\nold-stack=%d\nnew-stack=%d\n", n, len(errs.Stack), ) } err = newerr(errs.Err) errs = err.(*errstack.Error) if n == len(errs.Stack) { t.Fatalf("hysteresis error:\nold-stack=%d\nnew-stack=%d\n%v\n", n, len(errs.Stack), errs, ) } }
explode_data.jsonl/39238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 3564, 2753, 39, 597, 12917, 285, 1155, 353, 8840, 836, 8, 341, 48558, 1669, 8879, 13080, 445, 2408, 3873, 1018, 67, 497, 220, 19, 17, 692, 9859, 1669, 1848, 7693, 7121, 25865, 340, 743, 1848, 621, 2092, 341, 197, 3244, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestContributorStats_String(t *testing.T) { v := ContributorStats{ Author: &Contributor{}, Total: Int(0), } want := `github.ContributorStats{Author:github.Contributor{}, Total:0}` if got := v.String(); got != want { t.Errorf("ContributorStats.String = %v, want %v", got, want) } }
explode_data.jsonl/33231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 52984, 4831, 16635, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 64724, 16635, 515, 197, 197, 7133, 25, 609, 52984, 4831, 38837, 197, 197, 7595, 25, 220, 1333, 7, 15, 1326, 197, 532, 50780, 1669, 1565, 5204, 4801, 1332...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewTensor(t *testing.T) { var tests = []struct { shape []int64 value interface{} }{ {nil, bool(true)}, {nil, int8(5)}, {nil, int16(5)}, {nil, int32(5)}, {nil, int64(5)}, {nil, uint8(5)}, {nil, uint16(5)}, {nil, uint32(5)}, {nil, uint64(5)}, {nil, float32(5)}, {nil, float64(5)}, {nil, complex(float32(5), float32(6))}, {nil, complex(float64(5), float64(6))}, {nil, "a string"}, {[]int64{1}, []uint32{1}}, {[]int64{1}, []uint64{1}}, {[]int64{2}, []bool{true, false}}, {[]int64{1}, []float64{1}}, {[]int64{1}, [1]float64{1}}, {[]int64{1, 1}, [1][1]float64{{1}}}, {[]int64{1, 1, 1}, [1][1][]float64{{{1}}}}, {[]int64{1, 1, 2}, [1][][2]float64{{{1, 2}}}}, {[]int64{1, 1, 1, 1}, [1][][1][]float64{{{{1}}}}}, {[]int64{2}, []string{"string", "slice"}}, {[]int64{2}, [2]string{"string", "array"}}, {[]int64{3, 2}, [][]float64{{1, 2}, {3, 4}, {5, 6}}}, {[]int64{2, 3}, [2][3]float64{{1, 2, 3}, {3, 4, 6}}}, {[]int64{4, 3, 2}, [][][]float64{ {{1, 2}, {3, 4}, {5, 6}}, {{7, 8}, {9, 10}, {11, 12}}, {{0, -1}, {-2, -3}, {-4, -5}}, {{-6, -7}, {-8, -9}, {-10, -11}}, }}, {[]int64{2, 0}, [][]int64{{}, {}}}, {[]int64{2, 2}, [][]string{{"row0col0", "row0,col1"}, {"row1col0", "row1,col1"}}}, {[]int64{2, 3}, [2][3]string{ {"row0col0", "row0,col1", "row0,col2"}, {"row1col0", "row1,col1", "row1,col2"}, }}, } var errorTests = []interface{}{ struct{ a int }{5}, new(int32), new([]int32), // native ints not supported int(5), []int{5}, // Mismatched dimensions [][]float32{{1, 2, 3}, {4}}, // Mismatched dimensions. Should return "mismatched slice lengths" error instead of "BUG" [][][]float32{{{1, 2}, {3, 4}}, {{1}, {3}}}, // Mismatched dimensions. Should return error instead of valid tensor [][][]float32{{{1, 2}, {3, 4}}, {{1}, {3}}, {{1, 2, 3}, {2, 3, 4}}}, // Mismatched dimensions for strings [][]string{{"abc"}, {"abcd", "abcd"}}, } for _, test := range tests { tensor, err := NewTensor(test.value) if err != nil { t.Errorf("NewTensor(%v): %v", test.value, err) continue } if !reflect.DeepEqual(test.shape, tensor.Shape()) { t.Errorf("Tensor.Shape(): got %v, want %v", tensor.Shape(), test.shape) } // Test that encode and decode gives the same value. We skip arrays because // they're returned as slices. if reflect.TypeOf(test.value).Kind() != reflect.Array { got := tensor.Value() if !reflect.DeepEqual(test.value, got) { t.Errorf("encode/decode: got %v, want %v", got, test.value) } } } for _, test := range errorTests { tensor, err := NewTensor(test) if err == nil { t.Errorf("NewTensor(%v): %v", test, err) } if tensor != nil { t.Errorf("NewTensor(%v) = %v, want nil", test, tensor) } } }
explode_data.jsonl/45865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1367 }
[ 2830, 3393, 3564, 25336, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 197, 12231, 3056, 396, 21, 19, 198, 197, 16309, 3749, 16094, 197, 59403, 197, 197, 90, 8385, 11, 1807, 3715, 39781, 197, 197, 90, 8385, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestCaptivePrepareRange_ErrCatchup(t *testing.T) { mockRunner := &stellarCoreRunnerMock{} mockRunner.On("catchup", uint32(100), uint32(192)).Return(errors.New("transient error")).Once() mockRunner.On("close").Return(nil).Once() mockArchive := &historyarchive.MockArchive{} mockArchive. On("GetRootHAS"). Return(historyarchive.HistoryArchiveState{ CurrentLedger: uint32(192), }, nil) ctx := context.Background() cancelCalled := false captiveBackend := CaptiveStellarCore{ archive: mockArchive, stellarCoreRunnerFactory: func(_ stellarCoreRunnerMode) (stellarCoreRunnerInterface, error) { return mockRunner, nil }, cancel: context.CancelFunc(func() { cancelCalled = true }), } err := captiveBackend.PrepareRange(ctx, BoundedRange(100, 192)) assert.EqualError(t, err, "error starting prepare range: opening subprocess: error running stellar-core: transient error") // make sure we can Close without errors assert.NoError(t, captiveBackend.Close()) assert.True(t, cancelCalled) mockArchive.AssertExpectations(t) mockRunner.AssertExpectations(t) }
explode_data.jsonl/7319
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 382 }
[ 2830, 3393, 34, 27781, 50590, 6046, 93623, 57760, 454, 1155, 353, 8840, 836, 8, 341, 77333, 19486, 1669, 609, 77293, 5386, 19486, 11571, 16094, 77333, 19486, 8071, 445, 7173, 454, 497, 2622, 18, 17, 7, 16, 15, 15, 701, 2622, 18, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeLoader(t *testing.T) { gopClTest(t, `import "fmt" func (p *Point) String() string { return fmt.Sprintf("%v-%v",p.X,p.Y) } type Point struct { X int Y int } `, `package main import fmt "fmt" type Point struct { X int Y int } func (p *Point) String() string { return fmt.Sprintf("%v-%v", p.X, p.Y) } `) }
explode_data.jsonl/73709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 929, 9181, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 1565, 474, 330, 12501, 1837, 2830, 320, 79, 353, 2609, 8, 923, 368, 914, 341, 853, 8879, 17305, 4430, 85, 11069, 85, 497, 79, 4338, 7237, 7507, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeSystem_EnumTypesMustBeWellDefined_RejectsAnEnumTypeWithoutValues(t *testing.T) { _, err := schemaWithFieldType(graphql.NewEnum(graphql.EnumConfig{ Name: "SomeEnum", })) expectedError := `SomeEnum values must be an object with value names as keys.` if err == nil || err.Error() != expectedError { t.Fatalf("Expected error: %v, got %v", expectedError, err) } }
explode_data.jsonl/79174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 929, 2320, 62, 10766, 4173, 31776, 3430, 11395, 29361, 50693, 583, 82, 2082, 10766, 929, 26040, 6227, 1155, 353, 8840, 836, 8, 1476, 197, 6878, 1848, 1669, 10802, 2354, 63733, 24312, 1470, 7121, 10766, 24312, 1470, 43225, 2648...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateTaskRejectsInvalidName(t *testing.T) { serv, client := setUp(t) defer tearDown(t, serv) createdQueue := createTestQueue(t, client) createTaskRequest := taskspb.CreateTaskRequest{ Parent: createdQueue.GetName(), Task: &taskspb.Task{ Name: "is-this-a-name", MessageType: &taskspb.Task_HttpRequest{ HttpRequest: &taskspb.HttpRequest{ Url: "http://www.google.com", }, }, }, } createdTask, err := client.CreateTask(context.Background(), &createTaskRequest) assert.Nil(t, createdTask) if assert.Error(t, err, "Should return error") { rsp, ok := grpcStatus.FromError(err) assert.True(t, ok, "Should be grpc error") assert.Regexp(t, "^Task name must be formatted", rsp.Message()) assert.Equal(t, grpcCodes.InvalidArgument, rsp.Code()) } }
explode_data.jsonl/72417
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 4021, 6262, 78413, 82, 7928, 675, 1155, 353, 8840, 836, 8, 341, 1903, 648, 11, 2943, 1669, 18620, 1155, 340, 16867, 32825, 1155, 11, 4853, 692, 197, 7120, 7554, 1669, 1855, 2271, 7554, 1155, 11, 2943, 692, 39263, 6262, 190...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadCsv2Dict(t *testing.T) { csvFile, err := ioutil.TempFile("", "csv2json") check(err) csvFile.WriteString("field1,field2\n") csvFile.WriteString("value1,value2\n") defer os.Remove(csvFile.Name()) expected := map[string]string{"field1": "value1", "field2": "value2"} json := ReadCsv2Dict(csvFile.Name()) if len(json) != 1 || !reflect.DeepEqual(json[0], expected) { t.Errorf("expect: [%s], but got: %s", expected, json) } }
explode_data.jsonl/62020
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 4418, 94826, 17, 13448, 1155, 353, 8840, 836, 8, 341, 1444, 3492, 1703, 11, 1848, 1669, 43144, 65009, 1703, 19814, 330, 18104, 17, 2236, 1138, 25157, 3964, 340, 1444, 3492, 1703, 44747, 445, 2566, 16, 11, 2566, 17, 1699, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCompareAndSwapHashedKey(t *testing.T) { m := &HashMap{} elephant := &Animal{"elephant"} monkey := &Animal{"monkey"} m.SetHashedKey(1<<(strconv.IntSize-2), elephant) if m.Len() != 1 { t.Error("map should contain exactly one element.") } if !m.CasHashedKey(1<<(strconv.IntSize-2), elephant, monkey) { t.Error("Cas should success if expectation met") } if m.CasHashedKey(1<<(strconv.IntSize-2), elephant, monkey) { t.Error("Cas should fail if expectation didn't meet") } item, ok := m.GetHashedKey(1 << (strconv.IntSize - 2)) if !ok { t.Error("ok should be true for item stored within the map.") } if item != monkey { t.Error("wrong item returned.") } }
explode_data.jsonl/24433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 27374, 3036, 46179, 6370, 291, 1592, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 18497, 16094, 7727, 273, 26924, 1669, 609, 41415, 4913, 10068, 26924, 16707, 197, 96016, 1669, 609, 41415, 4913, 96016, 63159, 2109, 4202, 637...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTransportReuseConnEmptyResponseBody(t *testing.T) { defer afterTest(t) cst := newClientServerTest(t, h1Mode, HandlerFunc(func(w ResponseWriter, r *Request) { w.Header().Set("X-Addr", r.RemoteAddr) // Empty response body. })) defer cst.close() n := 100 if testing.Short() { n = 10 } var firstAddr string for i := 0; i < n; i++ { res, err := cst.c.Get(cst.ts.URL) if err != nil { log.Fatal(err) } addr := res.Header.Get("X-Addr") if i == 0 { firstAddr = addr } else if addr != firstAddr { t.Fatalf("On request %d, addr %q != original addr %q", i+1, addr, firstAddr) } res.Body.Close() } }
explode_data.jsonl/14152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 27560, 38081, 9701, 3522, 29637, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 1444, 267, 1669, 501, 2959, 5475, 2271, 1155, 11, 305, 16, 3636, 11, 19954, 9626, 18552, 3622, 5949, 6492, 11, 435, 353, 1900, 8,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRaceCondition(t *testing.T) { tev, fl := initialize(t) defer tev.tearDown() it, _ := fl.Iterator(&ab.SeekPosition{Type: &ab.SeekPosition_Specified{Specified: &ab.SeekSpecified{Number: 1}}}) var block *cb.Block var status cb.Status complete := make(chan struct{}) go func() { block, status = it.Next() close(complete) }() fl.Append(ledger.CreateNextBlock(fl, []*cb.Envelope{{Payload: []byte("My Data")}})) <-complete assert.Equal(t, cb.Status_SUCCESS, status, "Expected to successfully read the block") }
explode_data.jsonl/35030
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 55991, 10547, 1155, 353, 8840, 836, 8, 341, 197, 665, 85, 11, 1320, 1669, 9468, 1155, 340, 16867, 1013, 85, 31853, 59342, 2822, 23374, 11, 716, 1669, 1320, 40846, 2099, 370, 76465, 3812, 90, 929, 25, 609, 370, 76465, 3812,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxExecutorPreparePoolFail(t *testing.T) { txe, tsv, db := newTestTxExecutor(t) defer db.Close() defer tsv.StopService() txid1 := newTxForPrep(tsv) txid2 := newTxForPrep(tsv) err := txe.Prepare(txid1, "aa") require.NoError(t, err) defer txe.RollbackPrepared("aa", 0) err = txe.Prepare(txid2, "bb") require.Error(t, err) require.Contains(t, err.Error(), "prepared transactions exceeded limit") }
explode_data.jsonl/25162
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 31584, 25255, 50590, 10551, 19524, 1155, 353, 8840, 836, 8, 341, 3244, 8371, 11, 259, 3492, 11, 2927, 1669, 501, 2271, 31584, 25255, 1155, 340, 16867, 2927, 10421, 741, 16867, 259, 3492, 30213, 1860, 741, 46237, 307, 16, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGossipDataMerge(t *testing.T) { now := utcNow() // We only care about key names and timestamps for the // merging logic. newSilence := func(ts time.Time) *pb.MeshSilence { return &pb.MeshSilence{ Silence: &pb.Silence{UpdatedAt: ts}, } } cases := []struct { a, b gossipData final, delta gossipData }{ { a: gossipData{ "a1": newSilence(now), "a2": newSilence(now), "a3": newSilence(now), }, b: gossipData{ "b1": newSilence(now), // new key, should be added "a2": newSilence(now.Add(-time.Minute)), // older timestamp, should be dropped "a3": newSilence(now.Add(time.Minute)), // newer timestamp, should overwrite }, final: gossipData{ "a1": newSilence(now), "a2": newSilence(now), "a3": newSilence(now.Add(time.Minute)), "b1": newSilence(now), }, delta: gossipData{ "b1": newSilence(now), "a3": newSilence(now.Add(time.Minute)), }, }, } for _, c := range cases { ca, cb := c.a.clone(), c.b.clone() res := ca.Merge(cb) require.Equal(t, c.final, res, "Merge result should match expectation") require.Equal(t, c.final, ca, "Merge should apply changes to original state") require.Equal(t, c.b, cb, "Merged state should remain unmodified") ca, cb = c.a.clone(), c.b.clone() delta := ca.mergeDelta(cb) require.Equal(t, c.delta, delta, "Merge delta should match expectation") require.Equal(t, c.final, ca, "Merge should apply changes to original state") require.Equal(t, c.b, cb, "Merged state should remain unmodified") } }
explode_data.jsonl/2695
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 683 }
[ 2830, 3393, 38, 41473, 1043, 52096, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 69596, 7039, 2822, 197, 322, 1205, 1172, 2453, 911, 1376, 5036, 323, 48781, 369, 279, 198, 197, 322, 53377, 12218, 624, 8638, 27571, 763, 1669, 2915, 35864, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindHost(t *testing.T) { // doesn't contain "host". signedHeaders := []string{"x-amz-content-sha256", "x-amz-date"} errCode := findHost(signedHeaders) // expected to error out with code ErrUnsignedHeaders . if errCode != ErrUnsignedHeaders { t.Fatalf("Expected the APIErrorCode to be %d, but got %d", ErrUnsignedHeaders, errCode) } // adding "host". signedHeaders = append(signedHeaders, "host") // epxected to pass. errCode = findHost(signedHeaders) if errCode != ErrNone { t.Fatalf("Expected the APIErrorCode to be %d, but got %d", ErrNone, errCode) } }
explode_data.jsonl/81629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 9885, 9296, 1155, 353, 8840, 836, 8, 341, 197, 322, 3171, 944, 6644, 330, 3790, 22956, 1903, 1542, 10574, 1669, 3056, 917, 4913, 87, 32217, 89, 6808, 7514, 64, 17, 20, 21, 497, 330, 87, 32217, 89, 18413, 16707, 9859, 207...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFranchisesCreate(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() franchiseModel := fixtures.NewFranchiseModelMock(ctrl) userModel := fixtures.NewUserModelMock(ctrl) authenticator := fixtures.NewAuthenticatorMock(ctrl) srv := newServer(t, &Options{ Authenticator: authenticator, UserModel: userModel, FranchiseModel: franchiseModel, }) authenticator.EXPECT(). DecodeToken(gomock.Eq(token)). Return(user, nil) userModel. EXPECT(). GetUserByToken(token). Return(user, nil) franchiseModel.EXPECT(). Insert(&models.Franchise{Name: "Batman"}). Return(&franchiseBatman, nil) w := httptest.NewRecorder() r := httptest.NewRequest(http.MethodPost, "/franchises", fixtures.Marshal(t, models.Franchise{Name: "Batman"})) r.Header.Set(models.XAuthToken, token) srv.ServeHTTP(w, r) var res models.Franchise fixtures.Decode(t, w.Body, &res) gassert.StatusOK(t, w) require.Equal(t, franchiseBatman, res) }
explode_data.jsonl/60310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 22560, 3497, 4909, 4021, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 97714, 65690, 1712, 1669, 37664, 7121, 22560, 65690, 1712, 11571, 62100, 340, 19060, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getMemoryLimit_Swarm(t *testing.T) { tests := []struct { title string memoryLimit string expectedLimit string }{ { title: "Kubernetes environment variables missing and limit is set", memoryLimit: "30", expectedLimit: "30m", }, { title: "Kubernetes environment variables missing and limit is unset", memoryLimit: "", expectedLimit: "128m", }, } envVar := "function_memory_limit_mb" for _, test := range tests { t.Run(test.title, func(t *testing.T) { os.Setenv(envVar, test.memoryLimit) limit := getMemoryLimit() if limit != test.expectedLimit { t.Errorf("Test failed! Expected: `%v` got: `%v`.", test.expectedLimit, limit) } }) } }
explode_data.jsonl/11978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 3062, 10642, 16527, 1098, 82597, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 24751, 260, 914, 198, 197, 2109, 4731, 16527, 256, 914, 198, 197, 42400, 16527, 914, 198, 197, 59403, 197, 197, 515, 298, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRoleCreateNotUnique(t *testing.T) { h := newHelper(t) helpers.AllowMe(h, types.ComponentRbacResource(), "role.create") role := h.repoMakeRole() h.apiInit(). Post("/roles/"). Header("Accept", "application/json"). FormData("name", rs()). FormData("handle", role.Handle). Expect(t). Status(http.StatusOK). Assert(helpers.AssertError("role handle not unique")). End() h.apiInit(). Post("/roles/"). Header("Accept", "application/json"). FormData("name", role.Name). FormData("handle", "handle_"+rs()). Expect(t). Status(http.StatusOK). Assert(helpers.AssertError("role name not unique")). End() }
explode_data.jsonl/8337
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 9030, 4021, 2623, 22811, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 501, 5511, 1155, 340, 197, 21723, 29081, 7823, 3203, 11, 4494, 5119, 49, 55877, 4783, 1507, 330, 5778, 2520, 5130, 197, 5778, 1669, 305, 46169, 8078, 9030, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProposerJoinAndLeave(t *testing.T) { _, pub1, _ := crypto.GenerateTestKeyPair() _, pub2, _ := crypto.GenerateTestKeyPair() _, pub3, _ := crypto.GenerateTestKeyPair() _, pub4, _ := crypto.GenerateTestKeyPair() _, pub5, _ := crypto.GenerateTestKeyPair() _, pub6, _ := crypto.GenerateTestKeyPair() _, pub7, _ := crypto.GenerateTestKeyPair() _, pub8, _ := crypto.GenerateTestKeyPair() _, pub9, _ := crypto.GenerateTestKeyPair() _, pubA, _ := crypto.GenerateTestKeyPair() _, pubB, _ := crypto.GenerateTestKeyPair() _, pubC, _ := crypto.GenerateTestKeyPair() _, pubD, _ := crypto.GenerateTestKeyPair() val1 := validator.NewValidator(pub1, 1) val2 := validator.NewValidator(pub2, 2) val3 := validator.NewValidator(pub3, 3) val4 := validator.NewValidator(pub4, 4) val5 := validator.NewValidator(pub5, 5) val6 := validator.NewValidator(pub6, 6) val7 := validator.NewValidator(pub7, 7) val8 := validator.NewValidator(pub8, 8) val9 := validator.NewValidator(pub9, 9) valA := validator.NewValidator(pubA, 10) valB := validator.NewValidator(pubB, 11) valC := validator.NewValidator(pubC, 12) valD := validator.NewValidator(pubD, 13) committee, err := NewCommittee([]*validator.Validator{val1, val2, val3, val4, val5, val6, val7}, 7, val1.Address()) assert.NoError(t, err) // How committee moves when new validator(s) join(s)? // // Example: // // Imagine validators `1` to `7` are in the committee, and `1` is the oldest and also proposer. // +=+-+-+-+-+-+-+ // |1|2|3|4|5|6|7| // +=+-+-+-+-+-+-+ // // New validator joins and sits before proposer. // In this example `8` sits before `1` (current proposer): // +*+=+-+-+-+-+-+-+ // |8|1|2|3|4|5|6|7| // +*+=+-+-+-+-+-+-+ // // Now committee should be adjusted and the oldest validator should leave. // In this example `1` is the oldest validator: // +-+-+-+-+-+-+-+ // |8|2|3|4|5|6|7| // +-+-+-+-+-+-+-+ // // Now we move to the next proposer. // In this example next proposer is `2`: // +-+=+-+-+-+-+-+ // |8|2|3|4|5|6|7| // +-+=+-+-+-+-+-+ // // // In this test we are covering these cases: // // +=+-+-+-+-+-+-+ +-+=+-+-+-+-+-+ +-+-+-+-+-+=+-+ +-+-+-+-+-+-+=+ +=+-+-+-+-+-+-+ +-+-+-+=+-+-+-+ +=+-+-+-+-+-+-+ // |1|2|3|4|5|6|7| ==> |8|2|3|4|5|6|7| ==> |8|2|3|4|5|6|7| ==> |8|4|5|9|A|6|7| ==> |8|5|9|A|6|B|7| ==> |C|D|8|9|A|B|7| ==> |C|D|8|1|9|A|B| // +=+-+-+-+-+-+-+ +-+=+-+-+-+-+-+ +-+-+-+-+-+=+-+ +-+-+-+-+-+-+=+ +=+-+-+-+-+-+-+ +-+-+-+=+-+-+-+ +=+-+-+-+-+-+-+ // // Height 1 val8.UpdateLastJoinedHeight(1) assert.NoError(t, committee.Update(0, []*validator.Validator{val8})) assert.Equal(t, committee.Proposer(0).Number(), 2) assert.Equal(t, committee.Validators(), []*validator.Validator{val8, val2, val3, val4, val5, val6, val7}) // Height 2 assert.NoError(t, committee.Update(3, nil)) assert.Equal(t, committee.Proposer(0).Number(), 6) // Height 3 val9.UpdateLastJoinedHeight(3) valA.UpdateLastJoinedHeight(3) assert.NoError(t, committee.Update(0, []*validator.Validator{val9, valA})) assert.Equal(t, committee.Proposer(0).Number(), 7) assert.Equal(t, committee.Validators(), []*validator.Validator{val8, val4, val5, val9, valA, val6, val7}) // Height 4 valB.UpdateLastJoinedHeight(4) assert.NoError(t, committee.Update(0, []*validator.Validator{valB})) assert.Equal(t, committee.Proposer(0).Number(), 8) assert.Equal(t, committee.Proposer(1).Number(), 5) assert.Equal(t, committee.Proposer(2).Number(), 9) assert.Equal(t, committee.Validators(), []*validator.Validator{val8, val5, val9, valA, val6, valB, val7}) // Height 5 valC.UpdateLastJoinedHeight(5) valD.UpdateLastJoinedHeight(5) assert.NoError(t, committee.Update(0, []*validator.Validator{valC, valD})) assert.Equal(t, committee.Proposer(0).Number(), 9) assert.Equal(t, committee.Proposer(1).Number(), 10) assert.Equal(t, committee.Proposer(2).Number(), 11) assert.Equal(t, committee.Validators(), []*validator.Validator{valC, valD, val8, val9, valA, valB, val7}) // Height 6 val1.UpdateLastJoinedHeight(6) assert.NoError(t, committee.Update(2, []*validator.Validator{val1})) assert.Equal(t, committee.Proposer(0).Number(), 12) assert.Equal(t, committee.Proposer(1).Number(), 13) assert.Equal(t, committee.Proposer(2).Number(), 8) assert.Equal(t, committee.Proposer(3).Number(), 1) assert.Equal(t, committee.Proposer(4).Number(), 9) assert.Equal(t, committee.Proposer(5).Number(), 10) assert.Equal(t, committee.Proposer(6).Number(), 11) assert.Equal(t, committee.Validators(), []*validator.Validator{valC, valD, val8, val1, val9, valA, valB}) }
explode_data.jsonl/35321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1900 }
[ 2830, 3393, 2008, 23438, 12292, 3036, 21833, 1155, 353, 8840, 836, 8, 341, 197, 6878, 6675, 16, 11, 716, 1669, 19028, 57582, 2271, 1592, 12443, 741, 197, 6878, 6675, 17, 11, 716, 1669, 19028, 57582, 2271, 1592, 12443, 741, 197, 6878, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteHumanReadablePrimitiveValues(t *testing.T) { assertWriteHRSEqual(t, "true", Bool(true)) assertWriteHRSEqual(t, "false", Bool(false)) assertWriteHRSEqual(t, "0", Number(0)) assertWriteHRSEqual(t, "42", Number(42)) assertWriteHRSEqual(t, "-42", Number(-42)) assertWriteHRSEqual(t, "3.1415926535", Number(3.1415926535)) assertWriteHRSEqual(t, "314159.26535", Number(3.1415926535e5)) assertWriteHRSEqual(t, "3.1415926535e+20", Number(3.1415926535e20)) assertWriteHRSEqual(t, `"abc"`, String("abc")) assertWriteHRSEqual(t, `" "`, String(" ")) assertWriteHRSEqual(t, `"\t"`, String("\t")) assertWriteHRSEqual(t, `"\t"`, String(" ")) assertWriteHRSEqual(t, `"\n"`, String("\n")) assertWriteHRSEqual(t, `"\n"`, String(` `)) assertWriteHRSEqual(t, `"\r"`, String("\r")) assertWriteHRSEqual(t, `"\r\n"`, String("\r\n")) assertWriteHRSEqual(t, `"\xff"`, String("\xff")) assertWriteHRSEqual(t, `"💩"`, String("\xf0\x9f\x92\xa9")) assertWriteHRSEqual(t, `"💩"`, String("💩")) assertWriteHRSEqual(t, `"\a"`, String("\007")) assertWriteHRSEqual(t, `"☺"`, String("\u263a")) }
explode_data.jsonl/60896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 481 }
[ 2830, 3393, 7985, 33975, 57938, 33313, 6227, 1155, 353, 8840, 836, 8, 341, 6948, 7985, 17088, 925, 1751, 1155, 11, 330, 1866, 497, 12608, 3715, 1171, 6948, 7985, 17088, 925, 1751, 1155, 11, 330, 3849, 497, 12608, 3576, 4390, 6948, 7985,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogURIGenerator(t *testing.T) { for _, tc := range []struct { scheme string path string args map[string]string expected string err string }{ { scheme: "fifo", path: "/full/path/pipe.fifo", expected: "fifo:///full/path/pipe.fifo", }, { scheme: "file", path: "/full/path/file.txt", args: map[string]string{ "maxSize": "100MB", }, expected: "file:///full/path/file.txt?maxSize=100MB", }, { scheme: "binary", path: "/full/path/bin", args: map[string]string{ "id": "testing", }, expected: "binary:///full/path/bin?id=testing", }, { scheme: "unknown", path: "nowhere", err: "absolute path needed", }, } { uri, err := LogURIGenerator(tc.scheme, tc.path, tc.args) if err != nil { assert.Error(t, err, tc.err) continue } assert.Equal(t, tc.expected, uri.String()) } }
explode_data.jsonl/32779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 436 }
[ 2830, 3393, 2201, 1511, 1914, 15312, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 1903, 8058, 256, 914, 198, 197, 26781, 257, 914, 198, 197, 31215, 257, 2415, 14032, 30953, 198, 197, 42400, 914, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_NewSession_FinishSession(t *testing.T) { repo, err := repository.Initialize(dir) defer repo.CloseAllQueues() assert.Nil(t, err) mockTCPConn := NewMockTCPConn() c := NewSession(mockTCPConn, repo) assert.Equal(t, uint64(1), repo.Stats.CurrentConnections) assert.Equal(t, uint64(1), repo.Stats.TotalConnections) c.FinishSession() assert.Equal(t, uint64(0), repo.Stats.CurrentConnections) }
explode_data.jsonl/1416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 39582, 5283, 1400, 18176, 5283, 1155, 353, 8840, 836, 8, 341, 17200, 5368, 11, 1848, 1669, 12542, 45829, 14161, 340, 16867, 15867, 10421, 2403, 25776, 1137, 741, 6948, 59678, 1155, 11, 1848, 692, 77333, 49896, 9701, 1669, 1532...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessEvent(t *testing.T) { var testScenarios = []struct { name string // a series of events that will be supplied to the // GraphBuilder.eventQueue. events []event }{ { name: "test1", events: []event{ createEvent(addEvent, "1", []string{}), createEvent(addEvent, "2", []string{"1"}), createEvent(addEvent, "3", []string{"1", "2"}), }, }, { name: "test2", events: []event{ createEvent(addEvent, "1", []string{}), createEvent(addEvent, "2", []string{"1"}), createEvent(addEvent, "3", []string{"1", "2"}), createEvent(addEvent, "4", []string{"2"}), createEvent(deleteEvent, "2", []string{"doesn't matter"}), }, }, { name: "test3", events: []event{ createEvent(addEvent, "1", []string{}), createEvent(addEvent, "2", []string{"1"}), createEvent(addEvent, "3", []string{"1", "2"}), createEvent(addEvent, "4", []string{"3"}), createEvent(updateEvent, "2", []string{"4"}), }, }, { name: "reverse test2", events: []event{ createEvent(addEvent, "4", []string{"2"}), createEvent(addEvent, "3", []string{"1", "2"}), createEvent(addEvent, "2", []string{"1"}), createEvent(addEvent, "1", []string{}), createEvent(deleteEvent, "2", []string{"doesn't matter"}), }, }, } alwaysStarted := make(chan struct{}) close(alwaysStarted) for _, scenario := range testScenarios { dependencyGraphBuilder := &GraphBuilder{ informersStarted: alwaysStarted, graphChanges: workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()), uidToNode: &concurrentUIDToNode{ uidToNodeLock: sync.RWMutex{}, uidToNode: make(map[types.UID]*node), }, attemptToDelete: workqueue.NewRateLimitingQueue(workqueue.DefaultControllerRateLimiter()), absentOwnerCache: NewUIDCache(2), } for i := 0; i < len(scenario.events); i++ { dependencyGraphBuilder.graphChanges.Add(&scenario.events[i]) dependencyGraphBuilder.processGraphChanges() verifyGraphInvariants(scenario.name, dependencyGraphBuilder.uidToNode.uidToNode, t) } } }
explode_data.jsonl/1189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 857 }
[ 2830, 3393, 75002, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 3326, 60494, 284, 3056, 1235, 341, 197, 11609, 914, 198, 197, 197, 322, 264, 4013, 315, 4357, 429, 686, 387, 17221, 311, 279, 198, 197, 197, 322, 12165, 3297, 5773, 7554, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStuckPodDetector_ReturnsLeaseAndDeletesRetryableStuckPod(t *testing.T) { retryableStuckPod := makeRetryableStuckPod() fakeClusterContext, mockLeaseService, stuckPodDetector := makeStuckPodDetectorWithTestDoubles() addPod(t, fakeClusterContext, retryableStuckPod) stuckPodDetector.HandleStuckPods() // Not done as can be retried assert.Equal(t, 1, mockLeaseService.reportDoneCalls) assert.Equal(t, []*v1.Pod{}, mockLeaseService.reportDoneArg) // Not returning lease yet assert.Equal(t, 0, mockLeaseService.returnLeaseCalls) // Still deletes pod remainingActivePods := getActivePods(t, fakeClusterContext) assert.Equal(t, []*v1.Pod{}, remainingActivePods) stuckPodDetector.HandleStuckPods() // Not done as can be retried assert.Equal(t, 2, mockLeaseService.reportDoneCalls) assert.Equal(t, []*v1.Pod{}, mockLeaseService.reportDoneArg) // Return lease for retry assert.Equal(t, 1, mockLeaseService.returnLeaseCalls) assert.Equal(t, retryableStuckPod, mockLeaseService.returnLeaseArg) }
explode_data.jsonl/64401
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 623, 1942, 23527, 31606, 53316, 82, 2304, 519, 3036, 61317, 51560, 480, 623, 1942, 23527, 1155, 353, 8840, 836, 8, 341, 17200, 1539, 480, 623, 1942, 23527, 1669, 1281, 51560, 480, 623, 1942, 23527, 2822, 1166, 726, 28678, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRespondentCloseSocketRecv(t *testing.T) { s := GetSocket(t, NewSocket) p := GetSocket(t, surveyor.NewSocket) ConnectPair(t, s, p) MustSucceed(t, s.SetOption(mangos.OptionReadQLen, 1)) // Fill the pipe for i := 0; i < 10; i++ { // These all will work, but the back-pressure will go all the // way to the sender. MustSucceed(t, p.Send([]byte(""))) } MustSucceed(t, s.Close()) }
explode_data.jsonl/57397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 65354, 306, 7925, 10286, 63483, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 2126, 10286, 1155, 11, 1532, 10286, 340, 3223, 1669, 2126, 10286, 1155, 11, 10572, 269, 7121, 10286, 340, 197, 14611, 12443, 1155, 11, 274, 11, 281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFileOpen(t *testing.T) { r := fstest.NewRun(t) defer r.Finalise() _, file, _ := fileCreate(t, r, vfscommon.CacheModeOff) fd, err := file.Open(os.O_RDONLY) require.NoError(t, err) _, ok := fd.(*ReadFileHandle) assert.True(t, ok) require.NoError(t, fd.Close()) fd, err = file.Open(os.O_WRONLY) assert.NoError(t, err) _, ok = fd.(*WriteFileHandle) assert.True(t, ok) require.NoError(t, fd.Close()) fd, err = file.Open(os.O_RDWR) assert.NoError(t, err) _, ok = fd.(*WriteFileHandle) assert.True(t, ok) _, err = file.Open(3) assert.Equal(t, EPERM, err) }
explode_data.jsonl/9738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 1703, 5002, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 16867, 435, 991, 977, 1064, 741, 197, 6878, 1034, 11, 716, 1669, 1034, 4021, 1155, 11, 435, 11, 92941, 5464, 46130, 3636, 4596, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCloudTasksListQueues(t *testing.T) { var nextPageToken string = "" var queuesElement *taskspb.Queue = &taskspb.Queue{} var queues = []*taskspb.Queue{queuesElement} var expectedResponse = &taskspb.ListQueuesResponse{ NextPageToken: nextPageToken, Queues: queues, } mockCloudTasks.err = nil mockCloudTasks.reqs = nil mockCloudTasks.resps = append(mockCloudTasks.resps[:0], expectedResponse) var formattedParent string = fmt.Sprintf("projects/%s/locations/%s", "[PROJECT]", "[LOCATION]") var request = &taskspb.ListQueuesRequest{ Parent: formattedParent, } c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.ListQueues(context.Background(), request).Next() if err != nil { t.Fatal(err) } if want, got := request, mockCloudTasks.reqs[0]; !proto.Equal(want, got) { t.Errorf("wrong request %q, want %q", got, want) } want := (interface{})(expectedResponse.Queues[0]) got := (interface{})(resp) var ok bool switch want := (want).(type) { case proto.Message: ok = proto.Equal(want, got.(proto.Message)) default: ok = want == got } if !ok { t.Errorf("wrong response %q, want %q)", got, want) } }
explode_data.jsonl/30839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 16055, 25449, 852, 25776, 1137, 1155, 353, 8840, 836, 8, 341, 2405, 83595, 3323, 914, 284, 8389, 2405, 48094, 1691, 353, 8202, 43467, 50251, 284, 609, 8202, 43467, 50251, 16094, 2405, 48094, 284, 29838, 8202, 43467, 50251, 90,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLoadPresidents(t *testing.T) { db, cleanup := createDB(t) defer cleanup() txt := ` create table Presidents ( ID integer not null primary key, Name text, Term1 int, Term2 int );` if _, err := db.Exec(txt); err != nil { t.Fatal(err) } f, err := Load(db, "Presidents") if err != nil { t.Fatal(err) } frametest.LoadPresidents(t, f) }
explode_data.jsonl/20592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 5879, 14367, 6880, 1155, 353, 8840, 836, 8, 341, 20939, 11, 21290, 1669, 1855, 3506, 1155, 340, 16867, 21290, 2822, 68272, 1669, 22074, 39263, 1965, 78639, 2399, 197, 29580, 7546, 537, 845, 6028, 1376, 345, 197, 21297, 1467, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCPU(t *testing.T) { tc := testCase{ replicas: 3, desiredResourceValues: PodResourceInfo{ "test-pod-0": 5000, "test-pod-1": 5000, "test-pod-2": 5000, }, resourceName: v1.ResourceCPU, targetTimestamp: 1, reportedPodMetrics: [][]int64{{5000}, {5000}, {5000}}, } tc.runTest(t) }
explode_data.jsonl/66345
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 31615, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 54452, 515, 197, 73731, 52210, 25, 220, 18, 345, 197, 52912, 2690, 4783, 6227, 25, 16821, 4783, 1731, 515, 298, 197, 1, 1944, 2268, 347, 12, 15, 788, 220, 20, 15, 15, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGCSPreDownload(t *testing.T) { testCases := []struct { testcase gcsTestCase output error }{ { testcase: gcsTestCase{ client: validClient, conf: validConfig, }, output: nil, }, { testcase: gcsTestCase{ client: &gcsMockClient{ err: errTest, }, conf: validConfig, }, output: nil, }, } for idx, c := range testCases { b := &GoogleCloudStorageBackend{} if err := b.Init(context.Background(), c.testcase.conf, WithGCSClient(c.testcase.client)); err != nil { t.Errorf("%d: error setting up backend - %v", idx, err) } else { err = b.PreDownload(context.Background(), nil) if err != c.output { t.Errorf("%d: Expected %v, got %v", idx, c.output, err) } } } }
explode_data.jsonl/66363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 22863, 4592, 265, 11377, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 18185, 5638, 342, 4837, 16458, 198, 197, 21170, 256, 1465, 198, 197, 59403, 197, 197, 515, 298, 18185, 5638, 25, 342, 4837, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetRequestHeaderValue(t *testing.T) { c, _ := CreateTestContext(httptest.NewRecorder()) c.Request, _ = http.NewRequest("GET", "/chat", nil) c.Request.Header.Set("Gin-Version", "1.0.0") assert.Equal(t, "1.0.0", c.GetHeader("Gin-Version")) assert.Empty(t, c.GetHeader("Connection")) }
explode_data.jsonl/26835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 1949, 1900, 97721, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 4230, 2271, 1972, 73392, 83, 70334, 7121, 47023, 2398, 1444, 9659, 11, 716, 284, 1758, 75274, 445, 3806, 497, 3521, 9686, 497, 2092, 340, 1444, 9659, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateProjectID(t *testing.T) { t.Parallel() Convey(`Test ValidateProjectID`, t, func() { Convey(`With valid project IDs`, func() { So(ValidateProjectID(`project-id-foo`), ShouldBeNil) }) Convey(`W/o a starting, lowercase ASCII letters`, func() { e := `must start with a lowercase ASCII letter` So(ValidateProjectID(`0123456`), ShouldErrLike, e) So(ValidateProjectID(`ProjectID`), ShouldErrLike, e) So(ValidateProjectID(`-project-id`), ShouldErrLike, e) So(ValidateProjectID(`ö-project-id`), ShouldErrLike, e) }) Convey(`With a trailing hyphen`, func() { e := `must not have a trailing hyphen` So(ValidateProjectID(`project-id-`), ShouldErrLike, e) So(ValidateProjectID(`project--id--`), ShouldErrLike, e) }) Convey(`With len() < 6 or > 30`, func() { e := `must contain 6 to 30 ASCII letters, digits, or hyphens` So(ValidateProjectID(``), ShouldErrLike, e) So(ValidateProjectID(`pro`), ShouldErrLike, e) So(ValidateProjectID(`project-id-1234567890-1234567890`), ShouldErrLike, e) }) Convey(`With non-ascii letters`, func() { e := `invalid letter` So(ValidateProjectID(`project-✅-id`), ShouldErrLike, e) So(ValidateProjectID(`project-✅-👀`), ShouldErrLike, e) So(ValidateProjectID(`project-id-🍀🍀🍀🍀s`), ShouldErrLike, e) }) }) }
explode_data.jsonl/54591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 556 }
[ 2830, 3393, 17926, 7849, 915, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 93070, 5617, 5809, 2271, 23282, 7849, 915, 7808, 259, 11, 2915, 368, 341, 197, 93070, 5617, 5809, 2354, 2697, 2390, 28360, 7808, 2915, 368, 341, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTemplate_ExcludeTemplateTypes(t *testing.T) { steps := ParallelSteps{ []WorkflowStep{ { Name: "Test", Template: "testtmpl", }, }, } tmpl := Template{ Name: "step", Steps: []ParallelSteps{steps}, Script: &ScriptTemplate{Source: "test"}, Container: &corev1.Container{Name: "container"}, DAG: &DAGTemplate{FailFast: pointer.BoolPtr(true)}, Resource: &ResourceTemplate{Action: "Create"}, Data: &Data{Source: DataSource{ArtifactPaths: &ArtifactPaths{}}}, Suspend: &SuspendTemplate{Duration: "10s"}, } t.Run("StepTemplateType", func(t *testing.T) { stepTmpl := tmpl.DeepCopy() stepTmpl.SetType(TemplateTypeSteps) assert.NotNil(t, stepTmpl.Steps) assert.Nil(t, stepTmpl.Script) assert.Nil(t, stepTmpl.Resource) assert.Nil(t, stepTmpl.Data) assert.Nil(t, stepTmpl.DAG) assert.Nil(t, stepTmpl.Container) assert.Nil(t, stepTmpl.Suspend) }) t.Run("DAGTemplateType", func(t *testing.T) { dagTmpl := tmpl.DeepCopy() dagTmpl.SetType(TemplateTypeDAG) assert.NotNil(t, dagTmpl.DAG) assert.Nil(t, dagTmpl.Script) assert.Nil(t, dagTmpl.Resource) assert.Nil(t, dagTmpl.Data) assert.Len(t, dagTmpl.Steps, 0) assert.Nil(t, dagTmpl.Container) assert.Nil(t, dagTmpl.Suspend) }) t.Run("ScriptTemplateType", func(t *testing.T) { scriptTmpl := tmpl.DeepCopy() scriptTmpl.SetType(TemplateTypeScript) assert.NotNil(t, scriptTmpl.Script) assert.Nil(t, scriptTmpl.DAG) assert.Nil(t, scriptTmpl.Resource) assert.Nil(t, scriptTmpl.Data) assert.Len(t, scriptTmpl.Steps, 0) assert.Nil(t, scriptTmpl.Container) assert.Nil(t, scriptTmpl.Suspend) }) t.Run("ResourceTemplateType", func(t *testing.T) { resourceTmpl := tmpl.DeepCopy() resourceTmpl.SetType(TemplateTypeResource) assert.NotNil(t, resourceTmpl.Resource) assert.Nil(t, resourceTmpl.Script) assert.Nil(t, resourceTmpl.DAG) assert.Nil(t, resourceTmpl.Data) assert.Len(t, resourceTmpl.Steps, 0) assert.Nil(t, resourceTmpl.Container) assert.Nil(t, resourceTmpl.Suspend) }) t.Run("ContainerTemplateType", func(t *testing.T) { containerTmpl := tmpl.DeepCopy() containerTmpl.SetType(TemplateTypeContainer) assert.NotNil(t, containerTmpl.Container) assert.Nil(t, containerTmpl.Script) assert.Nil(t, containerTmpl.DAG) assert.Nil(t, containerTmpl.Data) assert.Len(t, containerTmpl.Steps, 0) assert.Nil(t, containerTmpl.Resource) assert.Nil(t, containerTmpl.Suspend) }) t.Run("DataTemplateType", func(t *testing.T) { dataTmpl := tmpl.DeepCopy() dataTmpl.SetType(TemplateTypeData) assert.NotNil(t, dataTmpl.Data) assert.Nil(t, dataTmpl.Script) assert.Nil(t, dataTmpl.DAG) assert.Nil(t, dataTmpl.Container) assert.Len(t, dataTmpl.Steps, 0) assert.Nil(t, dataTmpl.Resource) assert.Nil(t, dataTmpl.Suspend) }) t.Run("SuspendTemplateType", func(t *testing.T) { suspendTmpl := tmpl.DeepCopy() suspendTmpl.SetType(TemplateTypeSuspend) assert.NotNil(t, suspendTmpl.Suspend) assert.Nil(t, suspendTmpl.Script) assert.Nil(t, suspendTmpl.DAG) assert.Nil(t, suspendTmpl.Container) assert.Len(t, suspendTmpl.Steps, 0) assert.Nil(t, suspendTmpl.Resource) assert.Nil(t, suspendTmpl.Data) }) }
explode_data.jsonl/26058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1497 }
[ 2830, 3393, 7275, 62531, 857, 7275, 4173, 1155, 353, 8840, 836, 8, 341, 18388, 7124, 1669, 49272, 33951, 515, 197, 197, 1294, 62768, 8304, 515, 298, 197, 515, 571, 21297, 25, 257, 330, 2271, 756, 571, 197, 7275, 25, 330, 1944, 71359, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCrudVariantsWithFailures(t *testing.T) { var res middleware.Responder db := entity.NewTestDB() c := &crud{} defer db.Close() defer gostub.StubFunc(&getDB, db).Reset() c.CreateFlag(flag.CreateFlagParams{ Body: &models.CreateFlagRequest{ Description: util.StringPtr("funny flag"), }, }) c.CreateVariant(variant.CreateVariantParams{ FlagID: int64(1), Body: &models.CreateVariantRequest{ Key: util.StringPtr("control"), }, }) t.Run("CreateVariant - r2e MapAttachment error", func(t *testing.T) { defer gostub.StubFunc(&r2eMapAttachment, nil, fmt.Errorf("r2e MapAttachment error")).Reset() res = c.CreateVariant(variant.CreateVariantParams{ FlagID: int64(1), Body: &models.CreateVariantRequest{ Key: util.StringPtr("control"), }, }) assert.NotZero(t, res.(*variant.CreateVariantDefault).Payload) }) t.Run("CreateVariant - creation validation error", func(t *testing.T) { res = c.CreateVariant(variant.CreateVariantParams{ FlagID: int64(1), Body: &models.CreateVariantRequest{ Key: util.StringPtr(" 123_invalid_key"), }, }) assert.NotZero(t, res.(*variant.CreateVariantDefault).Payload) }) t.Run("CreateVariant - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.CreateVariant(variant.CreateVariantParams{ FlagID: int64(1), Body: &models.CreateVariantRequest{ Key: util.StringPtr("key"), }, }) assert.NotZero(t, res.(*variant.CreateVariantDefault).Payload) db.Error = nil }) t.Run("FindVariants - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.FindVariants(variant.FindVariantsParams{ FlagID: int64(1), }) assert.NotZero(t, res.(*variant.FindVariantsDefault).Payload) db.Error = nil }) t.Run("PutVariant - put on a non-existing variant", func(t *testing.T) { res = c.PutVariant(variant.PutVariantParams{ FlagID: int64(1), VariantID: int64(999999), Body: &models.PutVariantRequest{ Key: util.StringPtr("another_control"), }, }) assert.NotZero(t, *res.(*variant.PutVariantDefault).Payload) }) t.Run("PutVariant - put validation error", func(t *testing.T) { res = c.PutVariant(variant.PutVariantParams{ FlagID: int64(1), VariantID: int64(1), Body: &models.PutVariantRequest{ Key: util.StringPtr(" spaces in key 123_invalid_key"), }, }) assert.NotZero(t, *res.(*variant.PutVariantDefault).Payload) }) t.Run("PutVariant - validatePutVariantForDistributions error", func(t *testing.T) { defer gostub.StubFunc(&validatePutVariantForDistributions, NewError(500, "validatePutVariantForDistributions error")).Reset() res = c.PutVariant(variant.PutVariantParams{ FlagID: int64(1), VariantID: int64(1), Body: &models.PutVariantRequest{ Key: util.StringPtr("key"), }, }) assert.NotZero(t, *res.(*variant.PutVariantDefault).Payload) }) t.Run("DeleteVariant - validateDeleteVariant error", func(t *testing.T) { defer gostub.StubFunc(&validateDeleteVariant, NewError(500, "validateDeleteVariant error")).Reset() res = c.DeleteVariant(variant.DeleteVariantParams{ FlagID: int64(1), VariantID: int64(1), }) assert.NotZero(t, res.(*variant.DeleteVariantDefault).Payload) }) t.Run("DeleteVariant - db generic error", func(t *testing.T) { db.Error = fmt.Errorf("db generic error") res = c.DeleteVariant(variant.DeleteVariantParams{ FlagID: int64(1), VariantID: int64(1), }) assert.NotZero(t, res.(*variant.DeleteVariantDefault).Payload) db.Error = nil }) }
explode_data.jsonl/19456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1466 }
[ 2830, 3393, 92061, 56135, 1783, 2354, 19524, 1413, 1155, 353, 8840, 836, 8, 341, 2405, 592, 29679, 8377, 20328, 198, 20939, 1669, 5387, 7121, 2271, 3506, 741, 1444, 1669, 609, 53569, 31483, 16867, 2927, 10421, 741, 16867, 67934, 392, 7758...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFakeSpan(t *testing.T) { ctx := context.Background() // It should be safe to call all the usual methods as if a plugin were installed. span1, ctx := NewSpan(ctx, "label") span1.Finish() span2, ctx := NewSpan(ctx, "label") span2.Annotate("key", 42) span2.Finish() span3, _ := NewSpan(ctx, "label") span3.Annotate("key", 42) span3.Finish() }
explode_data.jsonl/9102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 52317, 12485, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 197, 322, 1084, 1265, 387, 6092, 311, 1618, 678, 279, 13475, 5413, 438, 421, 264, 9006, 1033, 10275, 624, 197, 1480, 16, 11, 5635, 1669, 1532, 124...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDashboardIDFormatter(t *testing.T) { actual := NewDashboardID("12345678-1234-9876-4563-123456789012", "group1", "dashboard1").ID() expected := "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/group1/providers/Microsoft.Portal/dashboards/dashboard1" if actual != expected { t.Fatalf("Expected %q but got %q", expected, actual) } }
explode_data.jsonl/40730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 26947, 915, 14183, 1155, 353, 8840, 836, 8, 341, 88814, 1669, 1532, 26947, 915, 445, 16, 17, 18, 19, 20, 21, 22, 23, 12, 16, 17, 18, 19, 12, 24, 23, 22, 21, 12, 19, 20, 21, 18, 12, 16, 17, 18, 19, 20, 21, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReconnectExits(t *testing.T) { server := newTestServer("localhost:8899") // random addr server.Start() defer server.Stop() time.Sleep(time.Millisecond) client, err := NewRedialableAgentClient("localhost:8899", grpc.WithInsecure()) if err != nil { t.Fatal(err) } err = client.Send(&agent.Packet{ Type: agent.PacketType_DIAL_REQ, }) if err != nil { t.Error(err) } client1 := make(chan bool) go func() { _, err := client.Recv() if err != nil { if err2, ok := err.(*ReconnectError); ok { err2.Wait() client1 <- true } } }() client2 := make(chan bool) go func() { _, err := client.Recv() if err != nil { if err2, ok := err.(*ReconnectError); ok { err2.Wait() client2 <- true } } }() client.interrupt() var got1 bool var got2 bool select { case got1 = <-client1: case <-time.After(time.Second): } select { case got2 = <-client2: case <-time.After(time.Second): } if !got1 || !got2 { t.Errorf("expect both clients get unblocked; not they don't (%t %t)", got1, got2) } }
explode_data.jsonl/69215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 693, 6459, 840, 1199, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 501, 2271, 5475, 445, 8301, 25, 23, 23, 24, 24, 899, 442, 4194, 10789, 198, 41057, 12101, 741, 16867, 3538, 30213, 2822, 21957, 31586, 9730, 71482, 692, 2529...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRenderList(t *testing.T) { type scenario struct { input interface{} expectedString string expectedError error } scenarios := []scenario{ { []*myDisplayable{ {[]string{"aa", "b"}}, {[]string{"c", "d"}}, }, "aa b\nc d", nil, }, { []*myStruct{ {}, {}, }, "", errors.New("item does not implement the Displayable interface"), }, { &myStruct{}, "", errors.New("RenderList given a non-slice type"), }, } for _, s := range scenarios { str, err := RenderList(s.input) assert.EqualValues(t, s.expectedString, str) assert.EqualValues(t, s.expectedError, err) } }
explode_data.jsonl/11577
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 298 }
[ 2830, 3393, 6750, 852, 1155, 353, 8840, 836, 8, 341, 13158, 15048, 2036, 341, 197, 22427, 688, 3749, 16094, 197, 42400, 703, 914, 198, 197, 42400, 1454, 220, 1465, 198, 197, 630, 29928, 60494, 1669, 3056, 61422, 515, 197, 197, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAESSIV_KeySizes(t *testing.T) { keyStr := "198371900187498172316311acf81d238ff7619873a61983d619c87b63a1987f" + "987131819803719b847126381cd763871638aa71638176328761287361231321" + "812731321de508761437195ff231765aa4913219873ac6918639816312130011" + "abc900bba11400187984719827431246bbab1231eb4145215ff7141436616beb" + "9817298148712fed3aab61000ff123313e" key, _ := hex.DecodeString(keyStr) for i := 0; i < len(key); i++ { _, err := subtle.NewAESSIV(key[:i]) if i == subtle.AESSIVKeySize && err != nil { t.Errorf("Rejected valid key size: %v, %v", i, err) } if i != subtle.AESSIVKeySize && err == nil { t.Errorf("Allowed invalid key size: %v", i) } } }
explode_data.jsonl/7234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 335 }
[ 2830, 3393, 32, 9996, 3090, 35253, 34930, 1155, 353, 8840, 836, 8, 341, 23634, 2580, 19687, 197, 197, 1, 16, 24, 23, 18, 22, 16, 24, 15, 15, 16, 23, 22, 19, 24, 23, 16, 22, 17, 18, 16, 21, 18, 16, 16, 62594, 23, 16, 67, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAlternativeOperatorScenarios(t *testing.T) { for _, tt := range alternativeOperatorScenarios { testScenario(t, &tt) } documentScenarios(t, "alternative-default-value", alternativeOperatorScenarios) }
explode_data.jsonl/79118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 75763, 18461, 3326, 60494, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 10555, 18461, 3326, 60494, 341, 197, 18185, 54031, 1155, 11, 609, 5566, 340, 197, 532, 17470, 3326, 60494, 1155, 11, 330, 76995, 13672, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestStreamRecvWithErrorAndResolvedGoBack(t *testing.T) { if !util.FailpointBuild { t.Skip("skip when this is not a failpoint build") } ctx, cancel := context.WithCancel(context.Background()) wg := &sync.WaitGroup{} var requestID uint64 ch1 := make(chan *cdcpb.ChangeDataEvent, 10) srv1 := newMockChangeDataService(t, ch1) srv1.recvLoop = func(server cdcpb.ChangeData_EventFeedServer) { for { req, err := server.Recv() if err != nil { log.Error("mock server error", zap.Error(err)) return } atomic.StoreUint64(&requestID, req.RequestId) } } server1, addr1 := newMockService(ctx, t, srv1, wg) defer func() { close(ch1) server1.Stop() wg.Wait() }() rpcClient, cluster, pdClient, err := testutils.NewMockTiKV("", mockcopr.NewCoprRPCHandler()) require.Nil(t, err) pdClient = &mockPDClient{Client: pdClient, versionGen: defaultVersionGen} kvStorage, err := tikv.NewTestTiKVStore(rpcClient, pdClient, nil, nil, 0) require.Nil(t, err) defer kvStorage.Close() //nolint:errcheck regionID := uint64(3) cluster.AddStore(1, addr1) cluster.Bootstrap(regionID, []uint64{1}, []uint64{4}, 4) baseAllocatedID := currentRequestID() lockResolver := txnutil.NewLockerResolver(kvStorage, model.DefaultChangeFeedID("changefeed-test"), util.RoleTester) isPullInit := &mockPullerInit{} grpcPool := NewGrpcPoolImpl(ctx, &security.Credential{}) defer grpcPool.Close() regionCache := tikv.NewRegionCache(pdClient) defer regionCache.Close() cdcClient := NewCDCClient( ctx, pdClient, grpcPool, regionCache, pdutil.NewClock4Test(), model.DefaultChangeFeedID(""), config.GetDefaultServerConfig().KVClient) eventCh := make(chan model.RegionFeedEvent, 50) wg.Add(1) go func() { defer wg.Done() defer close(eventCh) err := cdcClient.EventFeed(ctx, regionspan.ComparableSpan{Start: []byte("a"), End: []byte("b")}, 100, lockResolver, isPullInit, eventCh) require.Equal(t, context.Canceled, errors.Cause(err)) }() // wait request id allocated with: new session, new request waitRequestID(t, baseAllocatedID+1) err = retry.Do(context.Background(), func() error { if atomic.LoadUint64(&requestID) == currentRequestID() { return nil } return errors.Errorf("request is not received, requestID: %d, expected: %d", atomic.LoadUint64(&requestID), currentRequestID()) }, retry.WithBackoffBaseDelay(50), retry.WithMaxTries(10)) require.Nil(t, err) initialized1 := mockInitializedEvent(regionID, currentRequestID()) ch1 <- initialized1 err = retry.Do(context.Background(), func() error { if len(ch1) == 0 { return nil } return errors.New("message is not sent") }, retry.WithBackoffBaseDelay(200), retry.WithBackoffMaxDelay(60*1000), retry.WithMaxTries(10)) require.Nil(t, err) resolved := &cdcpb.ChangeDataEvent{Events: []*cdcpb.Event{ { RegionId: regionID, RequestId: currentRequestID(), Event: &cdcpb.Event_ResolvedTs{ResolvedTs: 120}, }, }} ch1 <- resolved err = retry.Do(context.Background(), func() error { if len(ch1) == 0 { return nil } return errors.New("message is not sent") }, retry.WithBackoffBaseDelay(200), retry.WithBackoffMaxDelay(60*1000), retry.WithMaxTries(10)) require.Nil(t, err) err = failpoint.Enable("github.com/pingcap/tiflow/cdc/kv/kvClientStreamRecvError", "1*return(\"\")") require.Nil(t, err) defer func() { _ = failpoint.Disable("github.com/pingcap/tiflow/cdc/kv/kvClientStreamRecvError") }() ch1 <- resolved // another stream will be established, so we notify and wait the first // EventFeed loop exits. callback := srv1.notifyExit(0) select { case <-callback: case <-time.After(time.Second * 3): require.Fail(t, "event feed loop can't exit") } // wait request id allocated with: new session, new request*2 waitRequestID(t, baseAllocatedID+2) err = retry.Do(context.Background(), func() error { if atomic.LoadUint64(&requestID) == currentRequestID() { return nil } return errors.Errorf("request is not received, requestID: %d, expected: %d", atomic.LoadUint64(&requestID), currentRequestID()) }, retry.WithBackoffBaseDelay(50), retry.WithMaxTries(10)) require.Nil(t, err) initialized2 := mockInitializedEvent(regionID, currentRequestID()) ch1 <- initialized2 err = retry.Do(context.Background(), func() error { if len(ch1) == 0 { return nil } return errors.New("message is not sent") }, retry.WithBackoffBaseDelay(200), retry.WithBackoffMaxDelay(60*1000), retry.WithMaxTries(10)) require.Nil(t, err) resolved = &cdcpb.ChangeDataEvent{Events: []*cdcpb.Event{ { RegionId: regionID, RequestId: currentRequestID(), Event: &cdcpb.Event_ResolvedTs{ResolvedTs: 130}, }, }} ch1 <- resolved received := make([]model.RegionFeedEvent, 0, 4) defer cancel() ReceiveLoop: for { select { case event, ok := <-eventCh: if !ok { break ReceiveLoop } received = append(received, event) if event.Resolved.ResolvedTs == 130 { break ReceiveLoop } case <-time.After(time.Second): require.Fail(t, "event received timeout") } } var lastResolvedTs uint64 for _, e := range received { if lastResolvedTs > e.Resolved.ResolvedTs { require.Fail(t, fmt.Sprintf("the resolvedTs is back off %#v", resolved)) } } }
explode_data.jsonl/32869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2045 }
[ 2830, 3393, 3027, 63483, 66102, 3036, 65394, 10850, 3707, 1155, 353, 8840, 836, 8, 341, 743, 753, 1314, 57243, 2768, 11066, 341, 197, 3244, 57776, 445, 20599, 979, 419, 374, 537, 264, 3690, 2768, 1936, 1138, 197, 532, 20985, 11, 9121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAtNamespace(t *testing.T) { expectPrinted(t, "@namespace\"http://www.com\";", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace \"http://www.com\";", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace \"http://www.com\" ;", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace url();", "@namespace \"\";\n") expectPrinted(t, "@namespace url(http://www.com);", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace url(http://www.com) ;", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace url(\"http://www.com\");", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace url(\"http://www.com\") ;", "@namespace \"http://www.com\";\n") expectPrinted(t, "@namespace ns\"http://www.com\";", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns \"http://www.com\";", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns \"http://www.com\" ;", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns url();", "@namespace ns \"\";\n") expectPrinted(t, "@namespace ns url(http://www.com);", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns url(http://www.com) ;", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns url(\"http://www.com\");", "@namespace ns \"http://www.com\";\n") expectPrinted(t, "@namespace ns url(\"http://www.com\") ;", "@namespace ns \"http://www.com\";\n") expectParseError(t, "@namespace;", "<stdin>: warning: Expected URL token but found \";\"\n") expectParseError(t, "@namespace \"http://www.com\"", "<stdin>: warning: Expected \";\" but found end of file\n") expectParseError(t, "@namespace url(\"http://www.com\";", "<stdin>: warning: Expected \")\" but found \";\"\n") expectParseError(t, "@namespace noturl(\"http://www.com\");", "<stdin>: warning: Expected URL token but found \"noturl(\"\n") expectParseError(t, "@namespace url(", `<stdin>: warning: Expected URL token but found bad URL token <stdin>: error: Expected ")" to end URL token <stdin>: warning: Expected ";" but found end of file `) expectParseError(t, "@namespace ns;", "<stdin>: warning: Expected URL token but found \";\"\n") expectParseError(t, "@namespace ns \"http://www.com\"", "<stdin>: warning: Expected \";\" but found end of file\n") expectParseError(t, "@namespace ns url(\"http://www.com\";", "<stdin>: warning: Expected \")\" but found \";\"\n") expectParseError(t, "@namespace ns noturl(\"http://www.com\");", "<stdin>: warning: Expected URL token but found \"noturl(\"\n") expectParseError(t, "@namespace ns url(", `<stdin>: warning: Expected URL token but found bad URL token <stdin>: error: Expected ")" to end URL token <stdin>: warning: Expected ";" but found end of file `) expectParseError(t, "@namespace \"http://www.com\" {}", `<stdin>: warning: Expected ";" <stdin>: warning: Unexpected "{" `) }
explode_data.jsonl/32627
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1087 }
[ 2830, 3393, 1655, 22699, 1155, 353, 8840, 836, 8, 341, 24952, 8994, 291, 1155, 11, 8428, 2231, 2105, 1254, 1110, 2136, 905, 2105, 32503, 8428, 2231, 7245, 1254, 1110, 2136, 905, 2105, 17882, 77, 1138, 24952, 8994, 291, 1155, 11, 8428, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPutFoundInPrevious(t *testing.T) { dir, err := ioutil.TempDir("", "dcrtimed.test") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) fs, err := internalNew(dir) if err != nil { t.Fatal(err) } // Set testing flag. fs.testing = true // Put batch success in current time var hashes [][sha256.Size]byte count := 10 for i := 0; i < count; i++ { hash := [sha256.Size]byte{} hash[0] = byte(i) hashes = append(hashes, hash) } timestamp, me, err := fs.Put(hashes) if err != nil { t.Fatal(err) } if len(me) != count { t.Fatalf("expected %v multi error", count) } // Verify all return codes for _, m := range me { if m.ErrorCode != backend.ErrorOK { t.Fatalf("expected ErrorCode %v got %v", backend.ErrorOK, m.ErrorCode) } } // Override Now() function and move time 1 duration forward. This // causes Put to use the next timestamp container. fs.myNow = func() time.Time { return time.Unix(timestamp, 0).Add(fs.duration) } // Try again, now we expect count ErrorExists from previous // container(foundPrevious). timestamp, me, err = fs.Put(hashes) if err != nil { t.Fatal(err) } if len(me) != count { t.Fatalf("expected %v multi error", count) } // Verify all return codes for _, m := range me { if m.ErrorCode != foundPrevious { t.Fatalf("expected ErrorCode %v got %v", foundPrevious, m.ErrorCode) } } }
explode_data.jsonl/14960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 551 }
[ 2830, 3393, 19103, 6650, 641, 21291, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 7628, 3342, 75485, 5958, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 16867, 2643, 8442...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKubernetesStore_StoreHook(t *testing.T) { f := fake.Clientset{} store := kubernetesStore{ namespace: "dummy", client: &f, } // return a hook with a generated hook name as the fake f.AddReactor("create", "hooks", func(action clienttesting.Action) (handled bool, ret runtime.Object, err error) { hook := action.(clienttesting.UpdateAction).GetObject().(*v1alpha12.Hook) assert.Equal(t, hook.Name, "") assert.Equal(t, hook.GenerateName, "hook-") assert.Equal(t, hook.Spec.ForwardURL, "http://thing.com") assert.Equal(t, hook.Spec.Body, "OK") assert.Equal(t, hook.Spec.Headers, make(map[string][]string)) assert.Equal(t, hook.Status.Phase, v1alpha12.HookPhasePending) hook.ObjectMeta.Name = "generatedHookName" return true, hook, nil }) hookName, err := store.StoreHook("http://thing.com", []byte("OK"), make(http.Header)) assert.NoError(t, err) assert.Equal(t, "generatedHookName", hookName) assert.Equal(t, 1, len(f.Actions())) assert.Equal(t, "create", f.Actions()[0].GetVerb()) assert.Equal(t, "hooks", f.Actions()[0].GetResource().Resource) assert.Equal(t, "v1alpha1", f.Actions()[0].GetResource().Version) assert.Equal(t, "captainhook.io", f.Actions()[0].GetResource().Group) }
explode_data.jsonl/23004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 42, 29827, 6093, 92684, 31679, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 12418, 11716, 746, 31483, 57279, 1669, 595, 29827, 6093, 515, 197, 56623, 25, 330, 31390, 756, 197, 25291, 25, 262, 609, 69, 345, 197, 630, 197, 322,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreAndRetrieveHTTP(t *testing.T) { target := core.NewBuildTarget(core.NewBuildLabel("pkg/name", "label_name")) target.AddOutput("testfile2") config := core.DefaultConfiguration() config.Cache.HTTPURL = "http://127.0.0.1:8989" config.Cache.HTTPWriteable = true cache := newHTTPCache(config) key := []byte("test_key") cache.Store(target, key, target.Outputs()) b, err := ioutil.ReadFile("plz-out/gen/pkg/name/testfile2") assert.NoError(t, err) // Remove the file before we retrieve metadata := cache.Retrieve(target, key, nil) assert.NotNil(t, metadata) b2, err := ioutil.ReadFile("plz-out/gen/pkg/name/testfile2") assert.NoError(t, err) assert.Equal(t, b, b2) }
explode_data.jsonl/46756
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 6093, 3036, 87665, 9230, 1155, 353, 8840, 836, 8, 341, 28861, 1669, 6200, 7121, 11066, 6397, 47867, 7121, 11066, 2476, 445, 30069, 75992, 497, 330, 1502, 1269, 5455, 28861, 1904, 5097, 445, 1944, 1192, 17, 1138, 25873, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStyle256(t *testing.T) { is := assert.New(t) ForceColor() s := S256(192, 38) s.Println("style 256 colored text") is.Equal("\x1b[38;5;192;48;5;38m MSG \x1b[0m", s.Sprint(" MSG ")) s.SetOpts(Opts{OpUnderscore}) s.Println("style 256 colored text - with option OpUnderscore") is.Equal("\x1b[38;5;192;48;5;38;4m MSG \x1b[0m", s.Sprint(" MSG ")) s.AddOpts(OpBold) s.Println("style 256 colored text - add option OpBold") is.Equal("\x1b[38;5;192;48;5;38;4;1m MSG \x1b[0m", s.Sprint(" MSG ")) buf := forceOpenColorRender() defer resetColorRender() // empty s = S256() is.Equal("", s.String()) is.Equal("MSG", s.Sprint("MSG")) // only fg s = S256(132) is.Equal("38;5;132", s.String()) is.Equal("\x1b[38;5;132mMSG\x1b[0m", s.Sprint("MSG")) is.Equal("\x1b[38;5;132mMSG\x1b[0m", s.Sprintf("%s", "MSG")) // only bg s = S256(132) is.Equal("38;5;132", s.String()) is.Equal("\x1b[38;5;132mMSG\x1b[0m", s.Sprint("MSG")) // fg and bg s = S256(132, 23) is.Equal("38;5;132;48;5;23", s.String()) is.Equal("\x1b[38;5;132;48;5;23mMSG\x1b[0m", s.Sprint("MSG")) s = S256().Set(132, 23) is.Equal("38;5;132;48;5;23", s.String()) is.Equal("\x1b[38;5;132;48;5;23mMSG\x1b[0m", s.Sprint("MSG")) s = S256().Set(132, 23, OpStrikethrough) is.Equal("38;5;132;48;5;23;9", s.String()) s = S256().SetFg(132).SetBg(23) is.Equal("38;5;132;48;5;23", s.String()) is.Equal("\x1b[38;5;132;48;5;23mMSG\x1b[0m", s.Sprint("MSG")) s = S256(132) // Color256.Print s.Print("MSG") str := buf.String() buf.Reset() is.Equal("\x1b[38;5;132mMSG\x1b[0m", str) // Color256.Printf s.Printf("A %s", "MSG") str = buf.String() buf.Reset() is.Equal("\x1b[38;5;132mA MSG\x1b[0m", str) // Color256.Println s.Println("MSG") str = buf.String() buf.Reset() is.Equal("\x1b[38;5;132mMSG\x1b[0m\n", str) }
explode_data.jsonl/7007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 972 }
[ 2830, 3393, 2323, 17, 20, 21, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 2060, 7121, 1155, 692, 197, 18573, 1636, 741, 1903, 1669, 328, 17, 20, 21, 7, 16, 24, 17, 11, 220, 18, 23, 340, 1903, 12419, 445, 3528, 220, 17, 20, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompletionFile(t *testing.T) { defer func() { if r := recover(); r != nil { t.Errorf("Generating the completion_file panicked: %v", r) } }() g := &gcli{ root: "Root", format: true, } g.genCompletionCmdFile() if g.response.GetError() != "" { t.Errorf("Error generating the completion_file: %s", g.response.GetError()) return } file := g.response.File[0] if file.GetName() != "completion.go" { t.Errorf("(%s).genCompletionCmdFile() = %s, want %s", g.root, file.GetName(), "completion.go") } txtdiff.Diff(t, "completion_file", file.GetContent(), filepath.Join("testdata", "completion_file.want")) }
explode_data.jsonl/28900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 33190, 1703, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 435, 1669, 11731, 2129, 435, 961, 2092, 341, 298, 3244, 13080, 445, 73314, 279, 9755, 2458, 93010, 25, 1018, 85, 497, 435, 340, 197, 197, 532, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEscapes(t *testing.T) { dec := json.NewDecoder(strings.NewReader(testCases)) var tcs []testCase if err := dec.Decode(&tcs); err != nil { t.Fatal(err) } for _, tc := range tcs { en := escape(tc.Raw) if !(en == tc.Full || en == tc.Min) { t.Errorf("encode %q: got %q, want %q or %q", tc.Raw, en, tc.Min, tc.Full) } m, err := unescape(tc.Min) if err != nil { t.Errorf("decode %q: %v", tc.Min, err) } if m != tc.Raw { t.Errorf("decode %q: got %q, want %q", tc.Min, m, tc.Raw) } f, err := unescape(tc.Full) if err != nil { t.Errorf("decode %q: %v", tc.Full, err) } if f != tc.Raw { t.Errorf("decode %q: got %q, want %q", tc.Full, f, tc.Raw) } } }
explode_data.jsonl/46956
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 36121, 9337, 1155, 353, 8840, 836, 8, 341, 197, 8169, 1669, 2951, 7121, 20732, 51442, 68587, 8623, 37302, 1171, 2405, 259, 4837, 3056, 66194, 198, 743, 1848, 1669, 1622, 56372, 2099, 83, 4837, 1215, 1848, 961, 2092, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGithub_BuildName_Fallback(t *testing.T) { name, _ := ioutil.TempDir(os.TempDir(), "build-tools") defer func() { _ = os.RemoveAll(name) }() oldpwd, _ := os.Getwd() defer func() { _ = os.Chdir(oldpwd) }() _ = os.Chdir(name) ci := &Github{Common: &Common{}} assert.Equal(t, filepath.Base(name), ci.BuildName()) }
explode_data.jsonl/53500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 78717, 96686, 675, 1400, 3420, 1155, 353, 8840, 836, 8, 341, 11609, 11, 716, 1669, 43144, 65009, 6184, 9638, 65009, 6184, 1507, 330, 5834, 44646, 1138, 16867, 2915, 368, 314, 716, 284, 2643, 84427, 3153, 8, 50746, 61828, 255...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKeyDelimiter(t *testing.T) { v := NewWithOptions(KeyDelimiter("::")) v.SetConfigType("yaml") r := strings.NewReader(string(yamlExampleWithDot)) err := v.unmarshalReader(r, v.config) require.NoError(t, err) values := map[string]interface{}{ "image": map[string]interface{}{ "repository": "someImage", "tag": "1.0.0", }, "ingress": map[string]interface{}{ "annotations": map[string]interface{}{ "traefik.frontend.rule.type": "PathPrefix", "traefik.ingress.kubernetes.io/ssl-redirect": "true", }, }, } v.SetDefault("charts::values", values) assert.Equal(t, "leather", v.GetString("clothing::jacket")) assert.Equal(t, "01/02/03", v.GetString("emails::steve@hacker.com::created")) type config struct { Charts struct { Values map[string]interface{} } } expected := config{ Charts: struct { Values map[string]interface{} }{ Values: values, }, } var actual config assert.NoError(t, v.Unmarshal(&actual)) assert.Equal(t, expected, actual) }
explode_data.jsonl/9927
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 444 }
[ 2830, 3393, 1592, 91098, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 1532, 74238, 21358, 91098, 445, 486, 5455, 5195, 4202, 2648, 929, 445, 41466, 1138, 7000, 1669, 9069, 68587, 3609, 7021, 9467, 13314, 2354, 34207, 4390, 9859, 1669, 348, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPronoun(t *testing.T) { source := lexer.FromBytes([]byte("he")) tokens := lexer.Scan(source) program := Pronoun(tokens.Advance(), tokens) assert.Equal(t, "Pronoun: he", program.String()) }
explode_data.jsonl/52118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 47, 2248, 1624, 1155, 353, 8840, 836, 8, 341, 47418, 1669, 53259, 11439, 7078, 10556, 3782, 445, 383, 5455, 3244, 9713, 1669, 53259, 54874, 12437, 340, 197, 14906, 1669, 86915, 1624, 34052, 17865, 85, 681, 1507, 11211, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBuildPipelines_BuildVarious(t *testing.T) { factories := createTestFactories() tests := []struct { dataType string shouldFail bool }{ { dataType: "logs", shouldFail: false, }, { dataType: "nosuchdatatype", shouldFail: true, }, } for _, test := range tests { t.Run(test.dataType, func(t *testing.T) { dataType := test.dataType cfg := createExampleConfig(dataType) // BuildProcessors the pipeline allExporters, err := BuildExporters(componenttest.NewNopTelemetrySettings(), component.NewDefaultBuildInfo(), cfg, factories.Exporters) if test.shouldFail { assert.Error(t, err) return } require.NoError(t, err) require.EqualValues(t, 1, len(allExporters)) pipelineProcessors, err := BuildPipelines(componenttest.NewNopTelemetrySettings(), component.NewDefaultBuildInfo(), cfg, allExporters, factories.Processors) assert.NoError(t, err) require.NotNil(t, pipelineProcessors) err = pipelineProcessors.StartProcessors(context.Background(), componenttest.NewNopHost()) assert.NoError(t, err) pipelineName := dataType processor := pipelineProcessors[cfg.Service.Pipelines[pipelineName]] // Ensure pipeline has its fields correctly populated. require.NotNil(t, processor) assert.Nil(t, processor.firstTC) assert.Nil(t, processor.firstMC) assert.NotNil(t, processor.firstLC) // Compose the list of created exporters. exporterIDs := []config.ComponentID{config.NewComponentID("exampleexporter")} var exporters []*builtExporter for _, expID := range exporterIDs { // Ensure exporter is created. exp := allExporters[expID] require.NotNil(t, exp) exporters = append(exporters, exp) } // Send Logs via processor and verify that all exporters of the pipeline receive it. // First check that there are no logs in the exporters yet. var exporterConsumers []*testcomponents.ExampleExporterConsumer for _, exporter := range exporters { expConsumer := exporter.getLogExporter().(*testcomponents.ExampleExporterConsumer) exporterConsumers = append(exporterConsumers, expConsumer) require.Equal(t, len(expConsumer.Logs), 0) } // Send one custom data. log := pdata.Logs{} require.NoError(t, processor.firstLC.ConsumeLogs(context.Background(), log)) // Now verify received data. for _, expConsumer := range exporterConsumers { // Check that the trace is received by exporter. require.Equal(t, 1, len(expConsumer.Logs)) // Verify that span is successfully delivered. assert.EqualValues(t, log, expConsumer.Logs[0]) } err = pipelineProcessors.ShutdownProcessors(context.Background()) assert.NoError(t, err) }) } }
explode_data.jsonl/24781
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 989 }
[ 2830, 3393, 11066, 47, 93997, 96686, 72641, 1155, 353, 8840, 836, 8, 1476, 1166, 52893, 1669, 1855, 2271, 17417, 2433, 2822, 78216, 1669, 3056, 1235, 341, 197, 59254, 256, 914, 198, 197, 197, 5445, 19524, 1807, 198, 197, 59403, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLogsExporter_NilLogger(t *testing.T) { le, err := NewLogsExporter(&fakeLogsExporterConfig, component.ExporterCreateSettings{}, newPushLogsData(nil)) require.Nil(t, le) require.Equal(t, errNilLogger, err) }
explode_data.jsonl/59602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 51053, 88025, 1604, 321, 7395, 1155, 353, 8840, 836, 8, 341, 197, 273, 11, 1848, 1669, 1532, 51053, 88025, 2099, 30570, 51053, 88025, 2648, 11, 3692, 81077, 261, 4021, 6086, 22655, 501, 16644, 51053, 1043, 27907, 1171, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_Hoverfly_SetMiddleware_WontSetMiddlewareIfCannotRunScript(t *testing.T) { RegisterTestingT(t) unit := NewHoverflyWithConfiguration(&Configuration{}) err := unit.SetMiddleware("python", "ewfaet4rafgre", "") Expect(err).ToNot(BeNil()) Expect(unit.Cfg.Middleware.Binary).To(Equal("")) script, _ := unit.Cfg.Middleware.GetScript() Expect(script).To(Equal("")) }
explode_data.jsonl/45390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 2039, 1975, 21642, 14812, 24684, 2763, 544, 1649, 24684, 2679, 17444, 6727, 5910, 1155, 353, 8840, 836, 8, 341, 79096, 16451, 51, 1155, 692, 81189, 1669, 1532, 34379, 21642, 2354, 7688, 2099, 7688, 6257, 692, 9859, 1669, 4982,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInboxServerVersion(t *testing.T) { tc, inbox, _ := setupInboxTest(t, "basic") // Create an inbox with a bunch of convos, merge it and read it back out numConvs := 10 var convs []types.RemoteConversation for i := numConvs - 1; i >= 0; i-- { convs = append(convs, makeConvo(gregor1.Time(i), 1, 1)) } require.NoError(t, inbox.Merge(context.TODO(), 1, utils.PluckConvs(convs), nil, nil)) _, res, _, err := inbox.Read(context.TODO(), nil, nil) require.NoError(t, err) require.Equal(t, numConvs, len(res)) // Increase server version cerr := tc.Context().ServerCacheVersions.Set(context.TODO(), chat1.ServerCacheVers{ InboxVers: 5, }) require.NoError(t, cerr) _, res, _, err = inbox.Read(context.TODO(), nil, nil) require.Error(t, err) require.IsType(t, MissError{}, err) require.NoError(t, inbox.Merge(context.TODO(), 1, utils.PluckConvs(convs), nil, nil)) idata, err := inbox.readDiskInbox(context.TODO()) require.NoError(t, err) require.Equal(t, 5, idata.ServerVersion) }
explode_data.jsonl/16810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 641, 2011, 5475, 5637, 1155, 353, 8840, 836, 8, 341, 78255, 11, 22883, 11, 716, 1669, 6505, 641, 2011, 2271, 1155, 11, 330, 22342, 5130, 197, 322, 4230, 458, 22883, 448, 264, 15493, 315, 5686, 436, 11, 10880, 432, 323, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFFTSettings_ZeroPolyViaMultiplication_Python(t *testing.T) { fs := NewFFTSettings(4) exists := []bool{ true, false, false, true, false, true, true, false, false, false, true, true, false, true, false, true, } var missingIndices []uint64 for i, v := range exists { if !v { missingIndices = append(missingIndices, uint64(i)) } } zeroEval, zeroPoly := fs.ZeroPolyViaMultiplication(missingIndices, uint64(len(exists))) // produced from python implementation, check it's exactly correct. expectedEval := []bls.Fr{ bls.ToFr("14588039771402811141309184187446855981335438080893546259057924963590957391610"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("25282314916481609559521954076339682473205592322878335865825728051159013479404"), bls.ToFr("0"), bls.ToFr("9734294374130760583715448090686447252507379360428151468094660312309164340954"), bls.ToFr("46174059940592560972885266237294437331033682990367334129313899533918398326759"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("19800438175532257114364592658377771559959372488282871075375645402573059163542"), bls.ToFr("51600792158839053735333095261675086809225297622863271039022341472045686698468"), bls.ToFr("0"), bls.ToFr("30826826002656394595578928901119179510733149506206612508564887111870905005459"), bls.ToFr("0"), bls.ToFr("15554185610546001233857357261484634664347627247695018404843511652636226542123"), } for i := range zeroEval { if !bls.EqualFr(&expectedEval[i], &zeroEval[i]) { t.Errorf("at eval %d, expected: %s, got: %s", i, bls.FrStr(&expectedEval[i]), bls.FrStr(&zeroEval[i])) } } expectedPoly := []bls.Fr{ bls.ToFr("16624801632831727463500847948913128838752380757508923660793891075002624508302"), bls.ToFr("657600938076390596890050185197950209451778703253960215879283709261059409858"), bls.ToFr("3323305725086409462431021445881322078102454991213853012292210556336005043908"), bls.ToFr("28834633028751086963335689622252225417970192887686504864119125368464893106943"), bls.ToFr("13240145897582070561550318352041568075426755012978281815272419515864405431856"), bls.ToFr("29207346592337407428161116115756746704727357067233245260187026881605970530301"), bls.ToFr("26541641805327388562620144855073374836076680779273352463774100034531024896251"), bls.ToFr("1030314501662711061715476678702471496208942882800700611947185222402136833216"), bls.ToFr("1"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), bls.ToFr("0"), } for i := range zeroPoly { if !bls.EqualFr(&expectedPoly[i], &zeroPoly[i]) { t.Errorf("at poly %d, expected: %s, got: %s", i, bls.FrStr(&expectedPoly[i]), bls.FrStr(&zeroPoly[i])) } } }
explode_data.jsonl/6183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1271 }
[ 2830, 3393, 86414, 6086, 97672, 38164, 54428, 57251, 1693, 1088, 26895, 1155, 353, 8840, 836, 8, 341, 53584, 1669, 1532, 86414, 6086, 7, 19, 692, 8122, 1671, 1669, 3056, 2641, 515, 197, 42808, 11, 895, 11, 895, 11, 830, 345, 197, 3601...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestReversiAnz16(t *testing.T) { r := NewReversiAnz() if r.GetEdgeSideThreeCnt() != 0 { t.Errorf("NG") } }
explode_data.jsonl/23039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 57 }
[ 2830, 3393, 693, 3004, 72, 2082, 89, 16, 21, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 693, 3004, 72, 2082, 89, 741, 743, 435, 2234, 11656, 16384, 19641, 33747, 368, 961, 220, 15, 341, 197, 3244, 13080, 445, 6140, 1138, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSettingsFromConfigAndArgs(t *testing.T) { for _, tc := range []struct { name string args []string config map[string][]string expected map[string][]string }{ { name: "args only", args: []string{"--a", "1", "--a", "2", "--b", "3", "--a", "4", "5", "6"}, config: nil, expected: map[string][]string{ "a": {"1", "2", "4"}, "b": {"3"}, "c": {"5", "6"}, }, }, { name: "config only", args: nil, config: map[string][]string{ "b": {"7", "8"}, "c": {"9"}, }, expected: map[string][]string{ "b": {"7", "8"}, "c": {"9"}, }, }, { name: "args trump config", args: []string{"--a", "1", "--a", "2", "--a", "4", "5", "6"}, config: map[string][]string{ "b": {"7", "8"}, "c": {"9"}, }, expected: map[string][]string{ "a": {"1", "2", "4"}, "b": {"7", "8"}, "c": {"5", "6"}, }, }, } { t.Run(tc.name, func(t *testing.T) { f := NewFixture(t, model.NewUserConfigState(tc.args)) defer f.TearDown() f.File("Tiltfile", ` config.define_string_list('a') config.define_string_list('b') config.define_string_list('c', args=True) cfg = config.parse() print("a=", cfg.get('a', 'missing')) print("b=", cfg.get('b', 'missing')) print("c=", cfg.get('c', 'missing')) `) if tc.config != nil { b := &bytes.Buffer{} err := json.NewEncoder(b).Encode(tc.config) require.NoError(t, err) f.File(UserConfigFileName, b.String()) } _, err := f.ExecFile("Tiltfile") require.NoError(t, err) for _, arg := range []string{"a", "b", "c"} { expected := "missing" if vs, ok := tc.expected[arg]; ok { var s []string for _, v := range vs { s = append(s, fmt.Sprintf(`"%s"`, v)) } expected = fmt.Sprintf("[%s]", strings.Join(s, ", ")) } require.Contains(t, f.PrintOutput(), fmt.Sprintf("%s= %s", arg, expected)) } }) } }
explode_data.jsonl/65237
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 943 }
[ 2830, 3393, 6086, 3830, 2648, 3036, 4117, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 31215, 257, 3056, 917, 198, 197, 25873, 256, 2415, 14032, 45725, 917, 198, 197, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRProcessBlockSuccess(t *testing.T) { tests := []testFields{ { name: "noop - no blocks over current height", steps: []pcFsmMakeStateValues{ { currentState: &params{}, event: rProcessBlock{}, wantState: &params{}, wantNextEvent: noOp, }, }, }, { name: "noop - high new blocks", steps: []pcFsmMakeStateValues{ { currentState: &params{height: 5, items: []pcBlock{{"P1", 30}, {"P2", 31}}}, event: rProcessBlock{}, wantState: &params{height: 5, items: []pcBlock{{"P1", 30}, {"P2", 31}}}, wantNextEvent: noOp, }, }, }, { name: "blocks H+1 and H+2 present", steps: []pcFsmMakeStateValues{ { currentState: &params{items: []pcBlock{{"P1", 1}, {"P2", 2}}}, event: rProcessBlock{}, wantState: &params{height: 1, items: []pcBlock{{"P2", 2}}, blocksSynced: 1}, wantNextEvent: pcBlockProcessed{height: 1, peerID: "P1"}, }, }, }, { name: "blocks H+1 and H+2 present after draining", steps: []pcFsmMakeStateValues{ { // some contiguous blocks - on stop check draining is set currentState: &params{items: []pcBlock{{"P1", 1}, {"P2", 2}, {"P1", 4}}}, event: scFinishedEv{}, wantState: &params{items: []pcBlock{{"P1", 1}, {"P2", 2}, {"P1", 4}}, draining: true}, wantNextEvent: noOp, }, { event: rProcessBlock{}, wantState: &params{height: 1, items: []pcBlock{{"P2", 2}, {"P1", 4}}, blocksSynced: 1, draining: true}, wantNextEvent: pcBlockProcessed{height: 1, peerID: "P1"}, }, { // finish when H+1 or/and H+2 are missing event: rProcessBlock{}, wantState: &params{height: 1, items: []pcBlock{{"P2", 2}, {"P1", 4}}, blocksSynced: 1, draining: true}, wantNextEvent: pcFinished{tmState: tmState.State{LastBlockHeight: 1}, blocksSynced: 1}, }, }, }, } executeProcessorTests(t, tests) }
explode_data.jsonl/30582
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 857 }
[ 2830, 3393, 49, 7423, 4713, 7188, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1944, 8941, 515, 197, 197, 515, 298, 11609, 25, 330, 40162, 481, 902, 10010, 916, 1482, 2608, 756, 298, 18388, 7124, 25, 3056, 3992, 37, 3563, 8078, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCAConfigSecurityProviderCase(t *testing.T) { // we expect the following values const expectedPkcs11Value = "pkcs11" const expectedSwValue = "sw" // map key represents what we will input providerTestValues := map[string]string{ // all upper case "SW": expectedSwValue, "PKCS11": expectedPkcs11Value, // all lower case "sw": expectedSwValue, "pkcs11": expectedPkcs11Value, // mixed case "Sw": expectedSwValue, "Pkcs11": expectedPkcs11Value, } for inputValue, expectedValue := range providerTestValues { // set the input value, overriding what's in file os.Setenv("FABRIC_SDK_CLIENT_BCCSP_SECURITY_DEFAULT_PROVIDER", inputValue) backend, err := config.FromFile(configTestFilePath)() if err != nil { t.Fatal("Failed to get config backend") } customBackend := getCustomBackend(backend...) cryptoConfig := ConfigFromBackend(customBackend).(*Config) // expected values should be uppercase if expectedValue != cryptoConfig.SecurityProvider() { t.Fatalf( "Incorrect BCCSP SecurityProvider - input:%s actual:%s, expected:%s", inputValue, cryptoConfig.SecurityProvider(), expectedValue, ) } } }
explode_data.jsonl/58383
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 5049, 2648, 15352, 5179, 4207, 1155, 353, 8840, 836, 8, 1476, 197, 322, 582, 1720, 279, 2701, 2750, 198, 4777, 3601, 58415, 4837, 16, 16, 1130, 284, 330, 20819, 4837, 16, 16, 698, 4777, 3601, 13218, 1130, 284, 330, 2280, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestExifEditor_IfdBuilder(t *testing.T) { je := getJpegEditor(LeicaImg, t) je.Exif().SetDirty() //force write of software tag builder, changed := je.Exif().IfdBuilder() if builder == nil { t.Fatalf("IfdBuilder is nil") } if !changed { t.Fatalf("Expected builder to have changed") } je.Exif().SetDirty() //force jpegEditor to use the IfdBuilder b, err := je.Bytes() if err != nil { t.Fatalf("Error getting bytes") } md, err := NewMetaData(b) if err != nil { t.Fatalf("Could not open metadata") } softwareTag := "" if err = md.exifData.ScanIfdRoot(IFD_Software, &softwareTag); err != nil { t.Fatalf("Expected IFD_Software Tag got error: %v", err) } if softwareTag != ExifEditorSoftware { t.Errorf("Expected %s got %s", ExifEditorSoftware, softwareTag) } }
explode_data.jsonl/79879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 840, 333, 9410, 62, 2679, 67, 3297, 1155, 353, 8840, 836, 8, 341, 197, 3756, 1669, 633, 41, 10311, 9410, 7, 2304, 3001, 13033, 11, 259, 340, 197, 3756, 5121, 333, 1005, 1649, 36485, 368, 442, 8833, 3270, 315, 3162, 4772,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMinimalMode(t *testing.T) { if testing.Short() { t.Skip() } execStatements(t, []string{ "create table t1(id int, val1 varbinary(128), val2 varbinary(128), primary key(id))", "insert into t1 values(1, 'aaa', 'bbb')", }) defer execStatements(t, []string{ "drop table t1", }) engine.se.Reload(context.Background()) // Record position before the next few statements. pos := primaryPosition(t) execStatements(t, []string{ "set @@session.binlog_row_image='minimal'", "update t1 set val1='bbb' where id=1", "set @@session.binlog_row_image='full'", }) ctx, cancel := context.WithCancel(context.Background()) defer cancel() ch := make(chan []*binlogdatapb.VEvent) go func() { for evs := range ch { t.Errorf("received: %v", evs) } }() defer close(ch) err := vstream(ctx, t, pos, nil, nil, ch) want := "partial row image encountered" if err == nil || !strings.Contains(err.Error(), want) { t.Errorf("err: %v, must contain '%s'", err, want) } }
explode_data.jsonl/10425
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 88328, 3636, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 630, 67328, 93122, 1155, 11, 3056, 917, 515, 197, 197, 1, 3182, 1965, 259, 16, 3724, 526, 11, 1044, 16, 762, 25891, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccKeycloakOpenidClientAuthorizationRolePolicy_basic(t *testing.T) { t.Parallel() clientId := acctest.RandomWithPrefix("tf-acc") roleName := acctest.RandomWithPrefix("tf-acc") resource.Test(t, resource.TestCase{ ProviderFactories: testAccProviderFactories, PreCheck: func() { testAccPreCheck(t) }, CheckDestroy: testResourceKeycloakOpenidClientAuthorizationRolePolicyDestroy(), Steps: []resource.TestStep{ { Config: testResourceKeycloakOpenidClientAuthorizationRolePolicy_basic(roleName, clientId), Check: testResourceKeycloakOpenidClientAuthorizationRolePolicyExists("keycloak_openid_client_role_policy.test"), }, }, }) }
explode_data.jsonl/60776
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 14603, 1592, 88751, 5002, 307, 2959, 18124, 9030, 13825, 34729, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25291, 764, 1669, 1613, 67880, 26709, 2354, 14335, 445, 8935, 12, 4475, 1138, 197, 5778, 675, 1669, 1613, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubmitCancelContextAfterFirstSubmission(t *testing.T) { if testing.Short() { t.Skip("skip test in short mode") } sess, err := NewSession() if err != nil { t.Fatal(err) } ctx := sess.NewContext() if err := DoSubmission(ctx, sess); err != nil { t.Fatal(err) } ctx.Cancel() // fail second submission err = DoSubmission(ctx, sess) if err == nil || !strings.HasPrefix(err.Error(), "session_test.go: submit failed") { t.Fatalf("not the error we expected: %+v", err) } if !errors.Is(err, context.Canceled) { t.Fatalf("not the error we expected: %+v", err) } }
explode_data.jsonl/78955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 8890, 9269, 1972, 6025, 5338, 86621, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 20599, 1273, 304, 2805, 3856, 1138, 197, 532, 1903, 433, 11, 1848, 1669, 1532, 5283, 741, 743, 1848, 961...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSyncPodBackoff(t *testing.T) { var fakeClock = clock.NewFakeClock(time.Now()) startTime := fakeClock.Now() dm, fakeDocker := newTestDockerManager() containers := []api.Container{ {Name: "good"}, {Name: "bad"}, } pod := makePod("podfoo", &api.PodSpec{ Containers: containers, }) stableId := "k8s_bad." + strconv.FormatUint(kubecontainer.HashContainer(&containers[1]), 16) + "_podfoo_new_12345678" dockerContainers := []*FakeContainer{ { ID: "9876", Name: "/k8s_POD." + strconv.FormatUint(generatePodInfraContainerHash(pod), 16) + "_podfoo_new_12345678_0", StartedAt: startTime, Running: true, }, { ID: "1234", Name: "/k8s_good." + strconv.FormatUint(kubecontainer.HashContainer(&containers[0]), 16) + "_podfoo_new_12345678_0", StartedAt: startTime, Running: true, }, { ID: "5678", Name: "/k8s_bad." + strconv.FormatUint(kubecontainer.HashContainer(&containers[1]), 16) + "_podfoo_new_12345678_0", ExitCode: 42, StartedAt: startTime, FinishedAt: fakeClock.Now(), }, } startCalls := []string{"create", "start", "inspect_container"} backOffCalls := []string{} startResult := &kubecontainer.SyncResult{Action: kubecontainer.StartContainer, Target: "bad", Error: nil, Message: ""} backoffResult := &kubecontainer.SyncResult{Action: kubecontainer.StartContainer, Target: "bad", Error: kubecontainer.ErrCrashLoopBackOff, Message: ""} tests := []struct { tick int backoff int killDelay int result []string expectErr bool }{ {1, 1, 1, startCalls, false}, {2, 2, 2, startCalls, false}, {3, 2, 3, backOffCalls, true}, {4, 4, 4, startCalls, false}, {5, 4, 5, backOffCalls, true}, {6, 4, 6, backOffCalls, true}, {7, 4, 7, backOffCalls, true}, {8, 8, 129, startCalls, false}, {130, 1, 0, startCalls, false}, } backOff := flowcontrol.NewBackOff(time.Second, time.Minute) backOff.Clock = fakeClock for _, c := range tests { fakeDocker.SetFakeContainers(dockerContainers) fakeClock.SetTime(startTime.Add(time.Duration(c.tick) * time.Second)) result := runSyncPod(t, dm, fakeDocker, pod, backOff, c.expectErr) verifyCalls(t, fakeDocker, c.result) // Verify whether the correct sync pod result is generated if c.expectErr { verifySyncResults(t, []*kubecontainer.SyncResult{backoffResult}, result) } else { verifySyncResults(t, []*kubecontainer.SyncResult{startResult}, result) } if backOff.Get(stableId) != time.Duration(c.backoff)*time.Second { t.Errorf("At tick %s expected backoff=%s got=%s", time.Duration(c.tick)*time.Second, time.Duration(c.backoff)*time.Second, backOff.Get(stableId)) } if len(fakeDocker.Created) > 0 { // pretend kill the container fakeDocker.Created = nil dockerContainers[2].FinishedAt = startTime.Add(time.Duration(c.killDelay) * time.Second) } } }
explode_data.jsonl/31170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1199 }
[ 2830, 3393, 12154, 23527, 3707, 1847, 1155, 353, 8840, 836, 8, 341, 2405, 12418, 26104, 284, 8866, 7121, 52317, 26104, 9730, 13244, 2398, 21375, 1462, 1669, 12418, 26104, 13244, 2822, 2698, 76, 11, 12418, 35, 13659, 1669, 501, 2271, 35, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestComputeProposalBinding(t *testing.T) { expectedDigestHex := "5093dd4f4277e964da8f4afbde0a9674d17f2a6a5961f0670fc21ae9b67f2983" expectedDigest, _ := hex.DecodeString(expectedDigestHex) chdr, _ := proto.Marshal(&common.ChannelHeader{ Epoch: uint64(10), }) shdr, _ := proto.Marshal(&common.SignatureHeader{ Nonce: []byte("nonce"), Creator: []byte("creator"), }) hdr, _ := proto.Marshal(&common.Header{ ChannelHeader: chdr, SignatureHeader: shdr, }) prop := &pb.Proposal{ Header: hdr, } binding, _ := utils.ComputeProposalBinding(prop) assert.Equal(t, expectedDigest, binding, "Binding does not match expected digest") }
explode_data.jsonl/39988
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 46254, 98637, 15059, 1155, 353, 8840, 836, 8, 341, 42400, 45217, 20335, 1669, 330, 20, 15, 24, 18, 631, 19, 69, 19, 17, 22, 22, 68, 24, 21, 19, 3235, 23, 69, 19, 96834, 450, 15, 64, 24, 21, 22, 19, 67, 16, 22, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGormMysql(t *testing.T) { t.Run("test gorm mysql", func(t *testing.T) { gormDb := gormMysql() if gormDb == nil { t.Error("gorm db is nil") } }) }
explode_data.jsonl/19487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 38, 493, 44, 14869, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 342, 493, 10564, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3174, 493, 7994, 1669, 342, 493, 44, 14869, 741, 197, 743, 342, 493, 7994, 621, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestModifierDoubleQuotes(t *testing.T) { josn := `{ "data": [ { "name": "Product P4", "productId": "1bb3", "vendorId": "10de" }, { "name": "Product P4", "productId": "1cc3", "vendorId": "20de" }, { "name": "Product P4", "productId": "1dd3", "vendorId": "30de" } ] }` AddModifier("string", func(josn, arg string) string { return strconv.Quote(josn) }) res := Get(josn, "data.#.{name,value:{productId,vendorId}.@string.@ugly}") assert(t, res.Raw == `[`+ `{"name":"Product P4","value":"{\"productId\":\"1bb3\",\"vendorId\":\"10de\"}"},`+ `{"name":"Product P4","value":"{\"productId\":\"1cc3\",\"vendorId\":\"20de\"}"},`+ `{"name":"Product P4","value":"{\"productId\":\"1dd3\",\"vendorId\":\"30de\"}"}`+ `]`) }
explode_data.jsonl/43486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 34405, 7378, 43780, 1155, 353, 8840, 836, 8, 341, 12428, 436, 77, 1669, 1565, 515, 197, 197, 97115, 788, 2278, 7847, 341, 298, 197, 31486, 788, 330, 4816, 393, 19, 756, 298, 197, 1, 46166, 788, 330, 16, 6066, 18, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDupParams(t *testing.T) { const SCRIPT = ` function F(x, y, x) { return x; } F(1, 2); ` testScript1(SCRIPT, _undefined, t) }
explode_data.jsonl/75289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 85713, 4870, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 434, 2075, 11, 379, 11, 856, 8, 341, 197, 853, 856, 280, 197, 630, 12727, 7, 16, 11, 220, 17, 317, 197, 19324, 18185, 5910, 16, 7, 24787, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestClient_Stderr(t *testing.T) { stderr := new(bytes.Buffer) process := helperProcess("stderr") c := NewClient(&ClientConfig{ Cmd: process, Stderr: stderr, HandshakeConfig: testHandshake, Plugins: testPluginMap, }) defer c.Kill() if _, err := c.Start(); err != nil { t.Fatalf("err: %s", err) } for !c.Exited() { time.Sleep(10 * time.Millisecond) } if c.killed() { t.Fatal("process failed to exit gracefully") } if !strings.Contains(stderr.String(), "HELLO\n") { t.Fatalf("bad log data: '%s'", stderr.String()) } if !strings.Contains(stderr.String(), "WORLD\n") { t.Fatalf("bad log data: '%s'", stderr.String()) } }
explode_data.jsonl/57847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 2959, 62, 22748, 615, 1155, 353, 8840, 836, 8, 341, 6736, 615, 1669, 501, 23158, 22622, 340, 53314, 1669, 13137, 7423, 445, 36422, 1138, 1444, 1669, 1532, 2959, 2099, 2959, 2648, 515, 197, 6258, 2277, 25, 1797, 1882, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCertificateDelete(t *testing.T) { withTestClient(t, func(config *CmdConfig, tm *tcMocks) { cID := "892071a0-bb95-49bc-8021-3afd67a210bf" tm.certificates.On("Delete", cID).Return(nil) config.Args = append(config.Args, cID) config.Doit.Set(config.NS, doctl.ArgForce, true) err := RunCertificateDelete(config) assert.NoError(t, err) }) }
explode_data.jsonl/4821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 33202, 6435, 1155, 353, 8840, 836, 8, 341, 46948, 2271, 2959, 1155, 11, 2915, 8754, 353, 15613, 2648, 11, 17333, 353, 10413, 72577, 8, 341, 197, 1444, 915, 1669, 330, 23, 24, 17, 15, 22, 16, 64, 15, 1455, 65, 24, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetHistoricalDataAt(t *testing.T) { clientCertTemDir := testutils.GenerateTestCrypto(t, []string{"admin", "alice", "server"}) testServer, _, _, err := SetupTestServerWithParams(t, clientCertTemDir, time.Second, 5, false, false) defer testServer.Stop() require.NoError(t, err) _, _, aliceSession := startServerConnectOpenAdminCreateUserAndUserSession(t, testServer, clientCertTemDir, "alice") // 5 blocks, 5 tx each for i := 0; i < 5; i++ { keys := make([]string, 0) values := make([]string, 0) for j := 0; j < 5; j++ { keys = append(keys, fmt.Sprintf("key%d", j)) values = append(values, fmt.Sprintf("value%d_%d", i, j)) } putMultipleKeysAndValues(t, keys, values, "alice", aliceSession) } tests := []struct { name string key string version *types.Version want *types.ValueWithMetadata wantErr bool }{ { name: "key0, block 3, index 0", key: "key0", version: &types.Version{ BlockNum: 3, TxNum: 0, }, want: &types.ValueWithMetadata{ Value: []byte("value0_0"), Metadata: &types.Metadata{ Version: &types.Version{ BlockNum: 3, TxNum: 0, }, AccessControl: &types.AccessControl{ ReadUsers: map[string]bool{"alice": true}, ReadWriteUsers: map[string]bool{"alice": true}, }, }, }, wantErr: false, }, { name: "key1, block 5, index 1", key: "key1", version: &types.Version{ BlockNum: 5, TxNum: 1, }, want: &types.ValueWithMetadata{ Value: []byte("value2_1"), Metadata: &types.Metadata{ Version: &types.Version{ BlockNum: 5, TxNum: 1, }, AccessControl: &types.AccessControl{ ReadUsers: map[string]bool{"alice": true}, ReadWriteUsers: map[string]bool{"alice": true}, }, }, }, wantErr: false, }, { name: "key2, block 10, block not exist", key: "key1", version: &types.Version{ BlockNum: 12, TxNum: 2, }, want: nil, wantErr: false, }, { name: "key2, block 5, index 12, index not exist", key: "key1", version: &types.Version{ BlockNum: 5, TxNum: 12, }, want: nil, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { p, err := aliceSession.Provenance() require.NoError(t, err) got, err := p.GetHistoricalDataAt("bdb", tt.key, tt.version) if !tt.wantErr { require.NoError(t, err) gotStr, err := json.Marshal(got) require.NoError(t, err) wantStr, err := json.Marshal(tt.want) require.NoError(t, err) require.True(t, proto.Equal(tt.want, got), fmt.Sprintf("expected \n%s, got \n%s ", wantStr, gotStr)) } else { require.Error(t, err) require.Nil(t, got) } }) } }
explode_data.jsonl/47195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1311 }
[ 2830, 3393, 1949, 48983, 938, 1043, 1655, 1155, 353, 8840, 836, 8, 341, 25291, 36934, 21988, 6184, 1669, 1273, 6031, 57582, 2271, 58288, 1155, 11, 3056, 917, 4913, 2882, 497, 330, 63195, 497, 330, 4030, 23625, 18185, 5475, 11, 8358, 835...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSingleBucketMapStringKeys_NoDupLen(t *testing.T) { testMapLookups(t, map[string]string{ "x": "x1val", "xx": "x2val", "foo": "fooval", "xxxx": "x4val", "xxxxx": "x5val", "xxxxxx": "x6val", strings.Repeat("x", 128): "longval", }) }
explode_data.jsonl/19917
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 10888, 36018, 2227, 703, 8850, 36989, 85713, 11271, 1155, 353, 8840, 836, 8, 341, 18185, 2227, 10380, 8602, 1155, 11, 2415, 14032, 30953, 515, 197, 197, 65438, 788, 2549, 330, 87, 16, 831, 756, 197, 197, 1, 4146, 788, 3824...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_findSelectedCategoryFilters(t *testing.T) { filterSortDataTypeCategories := []FilterSortDataTypeCategory{ { FilterSortCategory: FilterSortCategoryTypeFacility, FilterSort: FacilityFilterDataTypes, }, } want1 := FilterSortDataTypeCategory{ FilterSortCategory: FilterSortCategoryTypeFacility, FilterSort: FacilityFilterDataTypes, } type args struct { filterCategories []FilterSortDataTypeCategory categoryInput FilterSortCategoryType } tests := []struct { name string args args want bool want1 *FilterSortDataTypeCategory }{ { name: "valid type", args: args{ filterCategories: filterSortDataTypeCategories, categoryInput: FilterSortCategoryTypeFacility, }, want: true, want1: &want1, }, { name: "invalid country type", args: args{ filterCategories: filterSortDataTypeCategories, categoryInput: FilterSortCategoryType("invalid"), }, want: false, want1: nil, }, { name: "invalid country list type", args: args{ filterCategories: []FilterSortDataTypeCategory{}, categoryInput: FilterSortCategoryTypeFacility, }, want: false, want1: nil, }, { name: "empty args", args: args{}, want: false, want1: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, got1 := findSelectedCategoryFilters(tt.args.filterCategories, tt.args.categoryInput) if got != tt.want { t.Errorf("findSelectedCategoryFilters() got = %v, want %v", got, tt.want) } if !reflect.DeepEqual(got1, tt.want1) { t.Errorf("findSelectedCategoryFilters() got1 = %v, want %v", got1, tt.want1) } }) } }
explode_data.jsonl/58360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 691 }
[ 2830, 3393, 21814, 6316, 6746, 28351, 1155, 353, 8840, 836, 8, 1476, 50108, 10231, 22653, 20970, 1669, 3056, 5632, 10231, 22653, 6746, 515, 197, 197, 515, 298, 197, 5632, 10231, 6746, 25, 12339, 10231, 6746, 929, 28702, 1403, 345, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStackSetAvailableCongestionControl(t *testing.T) { c := context.New(t, 1500) defer c.Cleanup() s := c.Stack() // Setting AvailableCongestionControlOption should fail. aCC := tcpip.TCPAvailableCongestionControlOption("xyz") if err := s.SetTransportProtocolOption(tcp.ProtocolNumber, &aCC); err == nil { t.Fatalf("s.SetTransportProtocolOption(%d, &%T(%s)) = nil, want non-nil", tcp.ProtocolNumber, aCC, aCC) } // Verify that we still get the expected list of congestion control options. var cc tcpip.TCPAvailableCongestionControlOption if err := s.TransportProtocolOption(tcp.ProtocolNumber, &cc); err != nil { t.Fatalf("s.TransportProtocolOptio(%d, &%T(%s)): %s", tcp.ProtocolNumber, cc, cc, err) } if got, want := cc, tcpip.TCPAvailableCongestionControlOption("reno cubic"); got != want { t.Fatalf("got tcpip.TCPAvailableCongestionControlOption = %s, want = %s", got, want) } }
explode_data.jsonl/76003
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 333 }
[ 2830, 3393, 4336, 1649, 16485, 30421, 42498, 3273, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 220, 16, 20, 15, 15, 340, 16867, 272, 727, 60639, 2822, 1903, 1669, 272, 58646, 2822, 197, 322, 20037, 16136, 30421, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRestDslTraitApplicability(t *testing.T) { catalog, err := camel.DefaultCatalog() assert.Nil(t, err) e := &Environment{ CamelCatalog: catalog, } trait := newOpenAPITrait() enabled, err := trait.Configure(e) assert.Nil(t, err) assert.False(t, enabled) e.Integration = &v1.Integration{ Status: v1.IntegrationStatus{ Phase: v1.IntegrationPhaseNone, }, } enabled, err = trait.Configure(e) assert.Nil(t, err) assert.False(t, enabled) resource := v1.ResourceSpec{ Type: v1.ResourceTypeOpenAPI, } e.Integration.Spec.Resources = append(e.Integration.Spec.Resources, resource) enabled, err = trait.Configure(e) assert.Nil(t, err) assert.False(t, enabled) e.Integration.Status.Phase = v1.IntegrationPhaseInitialization enabled, err = trait.Configure(e) assert.Nil(t, err) assert.True(t, enabled) }
explode_data.jsonl/41411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 324 }
[ 2830, 3393, 12416, 35, 3226, 49257, 2164, 415, 2897, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 11, 1848, 1669, 49152, 13275, 41606, 741, 6948, 59678, 1155, 11, 1848, 692, 7727, 1669, 609, 12723, 515, 197, 6258, 35562, 41606, 25, 16403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlayerDevices(t *testing.T) { client, server := testClientFile(http.StatusOK, "test_data/player_available_devices.txt") defer server.Close() list, err := client.PlayerDevices() if err != nil { t.Error(err) return } if len(list) != 2 { t.Error("Expected two devices") } if list[0].Volume != 100 { t.Error("Expected volume to be 100%") } if list[1].Volume != 0 { t.Error("Expected null becomes 0") } }
explode_data.jsonl/80124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 4476, 40835, 1155, 353, 8840, 836, 8, 341, 25291, 11, 3538, 1669, 1273, 2959, 1703, 19886, 52989, 11, 330, 1944, 1769, 59503, 26962, 41334, 3909, 1138, 16867, 3538, 10421, 2822, 14440, 11, 1848, 1669, 2943, 23756, 40835, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_postWorkflowGroupWithLessThanRWXProjectHandler(t *testing.T) { api, db, router, end := newTestAPI(t, bootstrap.InitiliazeDB) defer end() u, pass := assets.InsertAdminUser(api.mustDB()) key := sdk.RandomString(10) proj := assets.InsertTestProject(t, db, api.Cache, key, key, u) //First pipeline pip := sdk.Pipeline{ ProjectID: proj.ID, ProjectKey: proj.Key, Name: "pip1", } test.NoError(t, pipeline.InsertPipeline(api.mustDB(), api.Cache, proj, &pip, u)) w := sdk.Workflow{ Name: sdk.RandomString(10), WorkflowData: &sdk.WorkflowData{ Node: sdk.Node{ Name: "root", Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, }, }, }, ProjectID: proj.ID, ProjectKey: proj.Key, } (&w).RetroMigrate() proj2, errP := project.Load(api.mustDB(), api.Cache, proj.Key, u, project.LoadOptions.WithPipelines, project.LoadOptions.WithGroups) test.NoError(t, errP) test.NoError(t, workflow.Insert(api.mustDB(), api.Cache, &w, proj2, u)) t.Logf("%+v\n", proj) newGrp := assets.InsertTestGroup(t, db, sdk.RandomString(10)) test.NoError(t, group.InsertGroupInProject(db, proj.ID, newGrp.ID, permission.PermissionReadWriteExecute)) //Prepare request vars := map[string]string{ "key": proj.Key, "permWorkflowName": w.Name, } reqG := sdk.GroupPermission{ Permission: 4, Group: *newGrp, } uri := router.GetRoute("POST", api.postWorkflowGroupHandler, vars) test.NotEmpty(t, uri) req := assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, reqG) //Do the request rec := httptest.NewRecorder() router.Mux.ServeHTTP(rec, req) assert.Equal(t, 400, rec.Code) }
explode_data.jsonl/64784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 730 }
[ 2830, 3393, 6333, 62768, 2808, 2354, 27451, 26067, 56368, 55, 7849, 3050, 1155, 353, 8840, 836, 8, 341, 54299, 11, 2927, 11, 9273, 11, 835, 1669, 501, 2271, 7082, 1155, 11, 26925, 26849, 24078, 2986, 3506, 340, 16867, 835, 741, 10676, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Problem709(t *testing.T) { qs := []question709{ { para709{"Hello"}, ans709{"hello"}, }, { para709{"here"}, ans709{"here"}, }, { para709{"LOVELY"}, ans709{"lovely"}, }, } fmt.Printf("------------------------Leetcode Problem 709------------------------\n") for _, q := range qs { _, p := q.ans709, q.para709 fmt.Printf("【input】:%v 【output】:%v\n", p, toLowerCase(p.one)) } fmt.Printf("\n\n\n") }
explode_data.jsonl/27130
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 16670, 10121, 22, 15, 24, 1155, 353, 8840, 836, 8, 1476, 18534, 82, 1669, 3056, 7841, 22, 15, 24, 4257, 197, 197, 515, 298, 197, 14794, 22, 15, 24, 4913, 9707, 7115, 298, 43579, 22, 15, 24, 4913, 14990, 7115, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestApplicationV1alpha2ToV1beta1(t *testing.T) { r := require.New(t) expected := &v1beta1.Application{} ApplicationV1alpha2ToV1beta1(&app, expected) r.Equal(expected, &v1beta1.Application{ Spec: v1beta1.ApplicationSpec{ Components: []common.ApplicationComponent{{ Name: "test-component", Type: "worker", Properties: &runtime.RawExtension{}, Traits: []common.ApplicationTrait{}, Scopes: map[string]string{}, }}, }, }) }
explode_data.jsonl/7609
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 4988, 53, 16, 7141, 17, 1249, 53, 16, 19127, 16, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 340, 42400, 1669, 609, 85, 16, 19127, 16, 17521, 16094, 78329, 53, 16, 7141, 17, 1249, 53, 16, 19127, 16, 209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp_Run_CommandWithSubcommandHasHelpTopic(t *testing.T) { var subcommandHelpTopics = [][]string{ {"command", "foo", "--help"}, {"command", "foo", "-h"}, {"command", "foo", "help"}, } for _, flagSet := range subcommandHelpTopics { t.Logf("==> checking with flags %v", flagSet) app := &App{} buf := new(bytes.Buffer) app.Writer = buf subCmdBar := &Command{ Name: "bar", Usage: "does bar things", } subCmdBaz := &Command{ Name: "baz", Usage: "does baz things", } cmd := &Command{ Name: "foo", Description: "descriptive wall of text about how it does foo things", Subcommands: []*Command{subCmdBar, subCmdBaz}, Action: func(c *Context) error { return nil }, } app.Commands = []*Command{cmd} err := app.Run(flagSet) if err != nil { t.Error(err) } output := buf.String() t.Logf("output: %q\n", buf.Bytes()) if strings.Contains(output, "No help topic for") { t.Errorf("expect a help topic, got none: \n%q", output) } for _, shouldContain := range []string{ cmd.Name, cmd.Description, subCmdBar.Name, subCmdBar.Usage, subCmdBaz.Name, subCmdBaz.Usage, } { if !strings.Contains(output, shouldContain) { t.Errorf("want help to contain %q, did not: \n%q", shouldContain, output) } } } }
explode_data.jsonl/52583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 550 }
[ 2830, 3393, 2164, 84158, 66751, 2354, 3136, 5631, 10281, 12689, 26406, 1155, 353, 8840, 836, 8, 341, 2405, 1186, 5631, 12689, 45003, 284, 52931, 917, 515, 197, 197, 4913, 5631, 497, 330, 7975, 497, 14482, 8653, 7115, 197, 197, 4913, 563...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHttpPost(t *testing.T) { var test = test{Obj: "{\"getwayMac\":\"39FFD505474D383737780643\"}"} info, err := json.Marshal(test) if err != nil { fmt.Println(err) } s, err := HttpPost("http://124.193.136.53:6200//LedService.svc/GetDeviceGateWayDetail", "application/json", string(info)) if err != nil { fmt.Println(err) } fmt.Println(s) }
explode_data.jsonl/60469
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 23214, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 284, 1273, 90, 5261, 25, 54734, 455, 3117, 19552, 23488, 18, 24, 1748, 35, 20, 15, 20, 19, 22, 19, 35, 18, 23, 18, 22, 18, 22, 22, 23, 15, 21, 19, 18, 2105, 9863, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRoundTrip(t *testing.T) { // Some form of AWS credentials must be set up for tests to succeed awsCreds := fetchMockCredentials() defaultRoundTripper := (http.RoundTripper)(http.DefaultTransport.(*http.Transport).Clone()) errorRoundTripper := &ErrorRoundTripper{} tests := []struct { name string rt http.RoundTripper shouldError bool authConfig AuthConfig }{ { "valid_round_tripper", defaultRoundTripper, false, AuthConfig{Region: "region", Service: "service"}, }, { "round_tripper_error", errorRoundTripper, true, AuthConfig{Region: "region", Service: "service", RoleArn: "arn:aws:iam::123456789012:role/IAMRole"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { _, err := v4.GetSignedRequestSignature(r) assert.NoError(t, err) w.WriteHeader(200) })) defer server.Close() serverURL, _ := url.Parse(server.URL) authConfig := AuthConfig{Region: "region", Service: "service"} rt, err := newSigningRoundTripperWithCredentials(authConfig, awsCreds, tt.rt) assert.NoError(t, err) req, err := http.NewRequest("POST", serverURL.String(), strings.NewReader("")) assert.NoError(t, err) res, err := rt.RoundTrip(req) if tt.shouldError { assert.Nil(t, res) assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, res.StatusCode, 200) }) } }
explode_data.jsonl/19762
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 627 }
[ 2830, 3393, 27497, 56352, 1155, 353, 8840, 836, 8, 341, 197, 322, 4329, 1352, 315, 23245, 16387, 1969, 387, 738, 705, 369, 7032, 311, 11996, 198, 197, 8635, 34, 53369, 1669, 7807, 11571, 27025, 2822, 11940, 27497, 21884, 6922, 1669, 320...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadClusterConfigs(t *testing.T) { cases := []struct { name string kubeconfig string kubeconfigDir string projectedTokenFile string expected map[string]rest.Config expectedErr bool }{ { name: "load from kubeconfig", kubeconfig: filepath.Join(filepath.Join("testdata", "load_from_kubeconfig"), "kubeconfig"), expected: map[string]rest.Config{ "": { Host: "https://api.ci.l2s4.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, "app.ci": { Host: "https://api.ci.l2s4.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, "build01": { Host: "https://api.build01.ci.devcluster.openshift.com:6443", BearerToken: "REDACTED", }, "build02": { Host: "https://api.build02.gcp.ci.openshift.org:6443", BearerToken: "REDACTED", }, "default": { Host: "https://api.ci.l2s4.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, "hive": { Host: "https://api.hive.9xw5.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, }, }, { name: "load from kubeconfigDir", kubeconfigDir: filepath.Join("testdata", "load_from_kubeconfigDir"), expected: map[string]rest.Config{ "": { Host: "https://api.build02.gcp.ci.openshift.org:6443", BearerToken: "REDACTED", }, "app.ci": { Host: "https://api.ci.l2s4.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, "build01": { Host: "https://api.build01.ci.devcluster.openshift.com:6443", BearerToken: "REDACTED", }, "build02": { Host: "https://api.build02.gcp.ci.openshift.org:6443", BearerToken: "REDACTED", }, "default": { Host: "https://api.build02.gcp.ci.openshift.org:6443", BearerToken: "REDACTED", }, "hive": { Host: "https://api.hive.9xw5.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, }, }, { name: "load from kubeconfigDir having contexts with the same name", kubeconfigDir: filepath.Join("testdata", "load_from_kubeconfigDir_having_contexts_with_the_same_name"), expectedErr: true, }, { name: "load from kubeconfig and kubeconfigDir", kubeconfig: filepath.Join(filepath.Join("testdata", "load_from_kubeconfig"), "kubeconfig2"), kubeconfigDir: filepath.Join("testdata", "load_from_kubeconfig_and_kubeconfigDir"), expected: map[string]rest.Config{ "": { Host: "https://api.build01.ci.devcluster.openshift.com:6443", BearerToken: "REDACTED", }, "app.ci": { Host: "https://api.ci.l2s4.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, "build01": { Host: "https://api.build01.ci.devcluster.openshift.com:6443", BearerToken: "REDACTED", }, "build02": { Host: "https://api.build02.gcp.ci.openshift.org:6443", BearerToken: "REDACTED", }, "default": { Host: "https://api.build01.ci.devcluster.openshift.com:6443", BearerToken: "REDACTED", }, "hive": { Host: "https://api.hive.9xw5.p1.openshiftapps.com:6443", BearerToken: "REDACTED", }, }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { actual, actualErr := LoadClusterConfigs(NewConfig(ConfigFile(tc.kubeconfig), ConfigDir(tc.kubeconfigDir), ConfigProjectedTokenFile(tc.projectedTokenFile))) if tc.expectedErr != (actualErr != nil) { t.Errorf("%s: actualErr %v does not match expectedErr %v", tc.name, actualErr, tc.expectedErr) return } if diff := cmp.Diff(tc.expected, actual, cmpopts.IgnoreFields(rest.Config{}, "UserAgent")); !tc.expectedErr && diff != "" { t.Errorf("%s: actual does not match expected, diff: %s", tc.name, diff) } }) } }
explode_data.jsonl/80358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1981 }
[ 2830, 3393, 5879, 28678, 84905, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 16463, 3760, 1676, 260, 914, 198, 197, 16463, 3760, 1676, 6184, 414, 914, 198, 197, 72470, 291, 3323, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateAccessTokens(t *testing.T) { errs := ValidateAccessToken(&oapi.OAuthAccessToken{ ObjectMeta: metav1.ObjectMeta{Name: "accessTokenNameWithMinimumLength"}, ClientName: "myclient", UserName: "myusername", UserUID: "myuseruid", }) if len(errs) != 0 { t.Errorf("expected success: %v", errs) } errorCases := map[string]struct { Token oapi.OAuthAccessToken T field.ErrorType F string }{ "zero-length name": { Token: oapi.OAuthAccessToken{ ClientName: "myclient", UserName: "myusername", UserUID: "myuseruid", }, T: field.ErrorTypeRequired, F: "metadata.name", }, "disallowed namespace": { Token: oapi.OAuthAccessToken{ ObjectMeta: metav1.ObjectMeta{Name: "accessTokenNameWithMinimumLength", Namespace: "foo"}, ClientName: "myclient", UserName: "myusername", UserUID: "myuseruid", }, T: field.ErrorTypeForbidden, F: "metadata.namespace", }, "no scope handler": { Token: oapi.OAuthAccessToken{ ObjectMeta: metav1.ObjectMeta{Name: "accessTokenNameWithMinimumLength"}, ClientName: "myclient", UserName: "myusername", UserUID: "myuseruid", Scopes: []string{"invalid"}, }, T: field.ErrorTypeInvalid, F: "scopes[0]", }, "bad scope": { Token: oapi.OAuthAccessToken{ ObjectMeta: metav1.ObjectMeta{Name: "accessTokenNameWithMinimumLength"}, ClientName: "myclient", UserName: "myusername", UserUID: "myuseruid", Scopes: []string{"user:dne"}, }, T: field.ErrorTypeInvalid, F: "scopes[0]", }, } for k, v := range errorCases { errs := ValidateAccessToken(&v.Token) if len(errs) == 0 { t.Errorf("expected failure %s for %v", k, v.Token) continue } for i := range errs { if errs[i].Type != v.T { t.Errorf("%s: expected errors to have type %s: %v", k, v.T, errs[i]) } if errs[i].Field != v.F { t.Errorf("%s: expected errors to have field %s: %v", k, v.F, errs[i]) } } } }
explode_data.jsonl/78249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 890 }
[ 2830, 3393, 17926, 6054, 29300, 1155, 353, 8840, 836, 8, 341, 9859, 82, 1669, 23282, 37649, 2099, 78, 2068, 8382, 5087, 37649, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 63121, 25, 330, 41167, 675, 2354, 28695, 4373, 7115, 197, 71724...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGzippedFileUnzipped(t *testing.T) { filePath := "file.gz" fileContent := "File content" gzipFileContent, err := gzipString(fileContent) if err != nil { t.Fatalf("gzipString(%s) returned an error: %v", fileContent, err) } fileChecks := []*ipb.FileCheck{&ipb.FileCheck{ FilesToCheck: []*ipb.FileSet{testconfigcreator.SingleFileWithPath(filePath)}, CheckType: &ipb.FileCheck_Content{Content: &ipb.ContentCheck{Content: "Different content"}}, }} expectedResult := &apb.ComplianceResult{ Id: "id", ComplianceOccurrence: &cpb.ComplianceOccurrence{ NonCompliantFiles: []*cpb.NonCompliantFile{ &cpb.NonCompliantFile{ Path: filePath, Reason: fmt.Sprintf("Got content %q, expected \"Different content\"", fileContent), }, }, }, } check := createFileCheckBatch(t, "id", fileChecks, newFakeAPI(withFileContent(gzipFileContent))) resultMap, err := check.Exec() if err != nil { t.Fatalf("check.Exec() returned an error: %v", err) } result, gotSingleton := singleComplianceResult(resultMap) if !gotSingleton { t.Fatalf("check.Exec() expected to return 1 result, got %d", len(resultMap)) } if diff := cmp.Diff(expectedResult, result, protocmp.Transform()); diff != "" { t.Errorf("check.Exec() returned unexpected diff (-want +got):\n%s", diff) } }
explode_data.jsonl/24476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 38, 89, 6450, 1703, 1806, 89, 6450, 1155, 353, 8840, 836, 8, 341, 17661, 1820, 1669, 330, 1192, 20963, 698, 17661, 2762, 1669, 330, 1703, 2213, 698, 3174, 9964, 1703, 2762, 11, 1848, 1669, 57795, 703, 4866, 2762, 340, 743,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_addSubscriber_WithSeqHashHeight(t *testing.T) { chain, mock33 := createBlockChain(t) defer mock33.Close() blockSeq, err := chain.blockStore.GetBlockSequence(5) assert.Equal(t, err, nil) header, err := chain.blockStore.GetBlockHeaderByHash(blockSeq.Hash) assert.Equal(t, err, nil) subscribe := new(types.PushSubscribeReq) subscribe.Name = "push-test" subscribe.URL = "http://localhost" subscribe.LastSequence = 5 subscribe.LastHeight = header.Height subscribe.LastBlockHash = common.ToHex(blockSeq.Hash) key := calcPushKey(subscribe.Name) _, err = chain.push.store.GetKey(key) assert.NotEqual(t, err, nil) err = chain.push.addSubscriber(subscribe) assert.Equal(t, err, nil) subInfo, err := chain.push.store.GetKey(key) assert.Equal(t, err, nil) assert.NotEqual(t, subInfo, nil) var originSubInfo types.PushWithStatus err = types.Decode(subInfo, &originSubInfo) assert.Equal(t, err, nil) assert.Equal(t, originSubInfo.Push.URL, subscribe.URL) pushes, _ := chain.ProcListPush() assert.Equal(t, subscribe.Name, pushes.Pushes[0].Name) }
explode_data.jsonl/61718
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 2891, 40236, 62, 2354, 20183, 6370, 3640, 1155, 353, 8840, 836, 8, 341, 197, 8819, 11, 7860, 18, 18, 1669, 1855, 4713, 18837, 1155, 340, 16867, 7860, 18, 18, 10421, 2822, 47996, 20183, 11, 1848, 1669, 8781, 15697, 6093, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1