text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestObjectStat(t *testing.T) { obj := "QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V" is := is.New(t) s := NewShell(shellUrl) stat, err := s.ObjectStat("QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V") is.Nil(err) is.Equal(stat.Hash, obj) is.Equal(stat.LinksSize, 3) is.Equal(stat.CumulativeSize, 1688) }
explode_data.jsonl/61088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 1190, 15878, 1155, 353, 8840, 836, 8, 341, 22671, 1669, 330, 48, 76, 57, 2378, 20, 65, 4672, 48, 35, 22, 66, 37, 70, 32350, 88735, 57, 35, 62893, 365, 16, 54, 80, 45124, 8940, 17, 661, 24, 48, 80, 38, 8041, 74, 42,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHostClientPost(t *testing.T) { t.Parallel() skipIfNotUnix(t) addr := "./TestHostClientPost.unix" s := startEchoServer(t, "unix", addr) defer s.Stop() c := createEchoClient(t, "unix", addr) testHostClientPost(t, c, 100) }
explode_data.jsonl/79382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 9296, 2959, 4133, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1903, 13389, 2679, 2623, 55832, 1155, 340, 53183, 1669, 5924, 2271, 9296, 2959, 4133, 6307, 941, 698, 1903, 1669, 1191, 74994, 5475, 1155, 11, 330, 566...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistenceLayer_Query(t *testing.T) { tests := []struct { name string db *mockQueryEventDatabase expectedResult EventsResult expectError bool argAssertions []assertion }{ { "find accounts error", &mockQueryEventDatabase{ findAccountsErr: errors.New("did not work"), }, EventsResult{}, true, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, { "find events error", &mockQueryEventDatabase{ findAccountsResult: []Account{ {UserSalt: "LEWtq55DKObqPK+XEQbnZA=="}, {UserSalt: "kxwkHp6yPBd0tQ85XlayDg=="}, }, findEventsErr: errors.New("did not work"), }, EventsResult{}, true, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, func(q interface{}) error { if query, ok := q.(FindEventsQueryForSecretIDs); ok { if query.Since != "yesterday" { return fmt.Errorf("unexpected since value: %v", query.Since) } if len(query.SecretIDs) != 2 { return fmt.Errorf("unexpected number of user ids: %d", len(query.SecretIDs)) } return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, { "ok", &mockQueryEventDatabase{ findAccountsResult: []Account{ {AccountID: "account-a", UserSalt: "LEWtq55DKObqPK+XEQbnZA=="}, {AccountID: "account-b", UserSalt: "kxwkHp6yPBd0tQ85XlayDg=="}, }, findEventsResult: []Event{ {AccountID: "account-a", EventID: "event-a", Payload: "payload-a"}, {AccountID: "account-b", EventID: "event-b", Payload: "payload-b"}, }, }, EventsResult{ Events: &EventsByAccountID{ "account-a": []EventResult{ {AccountID: "account-a", Payload: "payload-a", EventID: "event-a"}, }, "account-b": []EventResult{ {AccountID: "account-b", Payload: "payload-b", EventID: "event-b"}, }, }, }, false, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, func(q interface{}) error { if query, ok := q.(FindEventsQueryForSecretIDs); ok { if query.Since != "yesterday" { return fmt.Errorf("unexpected since value: %v", query.Since) } if len(query.SecretIDs) != 2 { return fmt.Errorf("unexpected number of user ids: %d", len(query.SecretIDs)) } return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { p := &persistenceLayer{ dal: test.db, } result, err := p.Query(Query{ UserID: "user-id", Since: "yesterday", }) if (err != nil) != test.expectError { t.Errorf("Unexpected error value %v", err) } if !reflect.DeepEqual(test.expectedResult, result) { t.Errorf("Expected %v, got %v", test.expectedResult, result) } if expected, found := len(test.argAssertions), len(test.db.methodArgs); expected != found { t.Fatalf("Number of assertions did not match number of calls, expected %d and found %d", expected, found) } for i, a := range test.argAssertions { if err := a(test.db.methodArgs[i]); err != nil { t.Errorf("Assertion error when checking arguments: %v", err) } } }) } }
explode_data.jsonl/45975
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1644 }
[ 2830, 3393, 71562, 9188, 48042, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 20939, 1797, 353, 16712, 2859, 1556, 5988, 198, 197, 42400, 2077, 17627, 2077, 198, 197, 24952, 1454, 262, 1807...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewStrArrayFromCopy(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []string{"0", "1", "2", "3", "4", "5", "6"} a2 := garray.NewStrArrayFromCopy(a1) a3 := garray.NewStrArrayFromCopy(a1, true) t.Assert(a2.Contains("1"), true) t.Assert(a2.Len(), 7) t.Assert(a2, a3) }) }
explode_data.jsonl/53102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 3564, 2580, 1857, 3830, 12106, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 917, 4913, 15, 497, 330, 16, 497, 330, 17, 497, 330, 18, 497, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvents(t *testing.T) { expectedURL := "/events" filters := filters.NewArgs() filters.Add("label", "label1") filters.Add("label", "label2") expectedFiltersJSON := `{"label":{"label1":true,"label2":true}}` eventsCases := []struct { options types.EventsOptions expectedQueryParams map[string]string }{ { options: types.EventsOptions{ Since: "invalid but valid", }, expectedQueryParams: map[string]string{ "since": "invalid but valid", }, }, { options: types.EventsOptions{ Until: "invalid but valid", }, expectedQueryParams: map[string]string{ "until": "invalid but valid", }, }, { options: types.EventsOptions{ Filters: filters, }, expectedQueryParams: map[string]string{ "filters": expectedFiltersJSON, }, }, } for _, eventsCase := range eventsCases { client := &Client{ transport: newMockClient(nil, func(req *http.Request) (*http.Response, error) { if !strings.HasPrefix(req.URL.Path, expectedURL) { return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL) } query := req.URL.Query() for key, expected := range eventsCase.expectedQueryParams { actual := query.Get(key) if actual != expected { return nil, fmt.Errorf("%s not set in URL query properly. Expected '%s', got %s", key, expected, actual) } } return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewReader([]byte("response"))), }, nil }), } body, err := client.Events(context.Background(), eventsCase.options) if err != nil { t.Fatal(err) } defer body.Close() content, err := ioutil.ReadAll(body) if err != nil { t.Fatal(err) } if string(content) != "response" { t.Fatalf("expected response to contain 'response', got %s", string(content)) } } }
explode_data.jsonl/29018
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 7900, 1155, 353, 8840, 836, 8, 341, 42400, 3144, 1669, 3521, 12389, 1837, 1166, 8612, 1669, 13406, 7121, 4117, 741, 1166, 8612, 1904, 445, 1502, 497, 330, 1502, 16, 1138, 1166, 8612, 1904, 445, 1502, 497, 330, 1502, 17, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIssue11594(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec(`drop table if exists t1;`) tk.MustExec("CREATE TABLE t1 (v bigint(20) UNSIGNED NOT NULL);") tk.MustExec("INSERT INTO t1 VALUES (1), (2);") tk.MustQuery("SELECT SUM(IF(v > 1, v, -v)) FROM t1;").Check(testkit.Rows("1")) tk.MustQuery("SELECT sum(IFNULL(cast(null+rand() as unsigned), -v)) FROM t1;").Check(testkit.Rows("-3")) tk.MustQuery("SELECT sum(COALESCE(cast(null+rand() as unsigned), -v)) FROM t1;").Check(testkit.Rows("-3")) tk.MustQuery("SELECT sum(COALESCE(cast(null+rand() as unsigned), v)) FROM t1;").Check(testkit.Rows("3")) }
explode_data.jsonl/65498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 42006, 16, 16, 20, 24, 19, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenesis(t *testing.T) { const genesis = ` { "msgfee": [ { "msg_path": "foo/bar", "fee": {"whole": 1, "fractional": 2, "ticker": "DOGE"} }, { "msg_path": "a/b", "fee": {"whole": 2, "fractional": 0, "ticker": "ETH"} } ] } ` var opts weave.Options if err := json.Unmarshal([]byte(genesis), &opts); err != nil { t.Fatalf("cannot unmarshal genesis: %s", err) } db := store.MemStore() migration.MustInitPkg(db, "msgfee") var ini Initializer if err := ini.FromGenesis(opts, weave.GenesisParams{}, db); err != nil { t.Fatalf("cannot load genesis: %s", err) } bucket := NewMsgFeeBucket() fee, err := bucket.MessageFee(db, "foo/bar") if err != nil { t.Fatalf("cannot fetch fee: %s", err) } if !fee.Equals(coin.NewCoin(1, 2, "DOGE")) { t.Fatalf("got an unexpected fee value: %s", fee) } fee, err = bucket.MessageFee(db, "a/b") if err != nil { t.Fatalf("cannot fetch fee: %s", err) } if !fee.Equals(coin.NewCoin(2, 0, "ETH")) { t.Fatalf("got an unexpected fee value: %s", fee) } }
explode_data.jsonl/46511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 84652, 1155, 353, 8840, 836, 8, 341, 4777, 59366, 284, 22074, 515, 197, 1, 3236, 30017, 788, 2278, 197, 197, 515, 298, 197, 1, 3236, 2638, 788, 330, 7975, 49513, 756, 298, 197, 1, 30017, 788, 5212, 66633, 788, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRouterMultiLevelCatchAll(t *testing.T) { // Create empty handler h := new(Handler) // Create empty context c := new(Context) c.Params = Params{} // Create route r := Route("/a/b/*", h) // Matching routes rs := []string{"/a/b/c", "a/b/c", "/a/b/c/d/e", "a/b/c/d"} // Check for _, s := range rs { if !r.Match(s, c) { t.Errorf("'%s' should match against '/a/b/*", s) } } }
explode_data.jsonl/35797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 9523, 20358, 4449, 57760, 2403, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 4287, 7013, 198, 9598, 1669, 501, 7, 3050, 692, 197, 322, 4230, 4287, 2266, 198, 1444, 1669, 501, 14001, 340, 1444, 58268, 284, 34352, 31483, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUploadHeapTryUpdate(t *testing.T) { if testing.Short() { t.SkipNow() } t.Parallel() // Create renter and define shorter named helper for uploadHeap rt, err := newRenterTester(t.Name()) if err != nil { t.Fatal(err) } defer func() { if err := rt.renter.Close(); err != nil { t.Fatal(err) } }() uh := &rt.renter.uploadHeap // Define test cases var buf []byte sr := NewStreamShard(bytes.NewReader(buf), buf) var tests = []struct { name string ct chunkType existsUnstuck bool // Indicates if there should be an existing chunk in the unstuck map existsStuck bool // Indicates if there should be an existing chunk in the stuck map existsRepairing bool // Indicates if there should be an existing chunk in the repair map existingChunkSR io.ReadCloser newChunkSR io.ReadCloser existAfterUpdate bool // Indicates if tryUpdate will cancel and remove the chunk from the heap pushAfterUpdate bool // Indicates if the push to the heap should succeed after tryUpdate }{ // Pushing a chunkTypeLocalChunk should always be a no-op regardless of the // start of the chunk in the heap. {"PushLocalChunk_EmptyHeap", chunkTypeLocalChunk, false, false, false, nil, nil, false, true}, // no chunk in heap {"PushLocalChunk_UnstuckChunkNoSRInHeap", chunkTypeLocalChunk, true, false, false, nil, nil, true, false}, // chunk in unstuck map {"PushLocalChunk_UnstuckChunkWithSRInHeap", chunkTypeLocalChunk, true, false, false, sr, nil, true, false}, // chunk in unstuck map with sourceReader {"PushLocalChunk_StuckChunkNoSRInHeap", chunkTypeLocalChunk, false, true, false, nil, nil, true, false}, // chunk in stuck map {"PushLocalChunk_StuckChunkWithSRInHeap", chunkTypeLocalChunk, false, true, false, sr, nil, true, false}, // chunk in stuck map with sourceReader {"PushLocalChunk_RepairingChunkNoSRInHeap", chunkTypeLocalChunk, false, false, true, nil, nil, true, false}, // chunk in repair map {"PushLocalChunk_RepairingChunkWithSRInHeap", chunkTypeLocalChunk, false, false, true, sr, nil, true, false}, // chunk in repair map with sourceReader // Pushing a chunkTypeStreamChunk tests {"PushStreamChunk_EmptyHeap", chunkTypeStreamChunk, false, false, false, nil, sr, false, true}, // no chunk in heap {"PushStreamChunk_UnstuckChunkNoSRInHeap", chunkTypeStreamChunk, true, false, false, nil, sr, false, true}, // chunk in unstuck map {"PushStreamChunk_UnstuckChunkWithSRInHeap", chunkTypeStreamChunk, true, false, false, sr, sr, true, true}, // chunk in unstuck map with sourceReader {"PushStreamChunk_StuckChunkNoSRInHeap", chunkTypeStreamChunk, false, true, false, nil, sr, false, true}, // chunk in stuck map {"PushStreamChunk_StuckChunkWithSRInHeap", chunkTypeStreamChunk, false, true, false, sr, sr, true, true}, // chunk in stuck map with sourceReader {"PushStreamChunk_RepairingChunkNoSRInHeap", chunkTypeStreamChunk, false, false, true, nil, sr, false, true}, // chunk in repair map {"PushStreamChunk_RepairingChunkWithSRInHeap", chunkTypeStreamChunk, false, false, true, sr, sr, true, false}, // chunk in repair map with sourceReader } // Create a test file for the chunks entry, err := rt.renter.newRenterTestFile() if err != nil { t.Fatal(err) } defer func() { if err := entry.Close(); err != nil { t.Fatal(err) } }() // Run test cases for i, test := range tests { // Initialize chunks and heap based on test parameters existingChunk := &unfinishedUploadChunk{ id: uploadChunkID{ fileUID: siafile.SiafileUID(test.name), index: uint64(i), }, fileEntry: entry.Copy(), sourceReader: test.existingChunkSR, piecesRegistered: 1, // This is so the chunk is viewed as incomplete staticMemoryManager: rt.renter.repairMemoryManager, } if test.existsUnstuck { uh.unstuckHeapChunks[existingChunk.id] = existingChunk } if test.existsStuck { existingChunk.stuck = true uh.stuckHeapChunks[existingChunk.id] = existingChunk } if test.existsRepairing { uh.repairingChunks[existingChunk.id] = existingChunk } newChunk := &unfinishedUploadChunk{ id: existingChunk.id, sourceReader: test.newChunkSR, piecesRegistered: 1, // This is so the chunk is viewed as incomplete staticMemoryManager: rt.renter.repairMemoryManager, } // Try and Update the Chunk in the Heap err := uh.managedTryUpdate(newChunk, test.ct) if err != nil { t.Fatalf("Error with TryUpdate for test %v; err: %v", test.name, err) } // Check to see if the chunk is still in the heap if test.existAfterUpdate != uh.managedExists(existingChunk.id) { t.Errorf("Chunk should exist after update %v for test %v", test.existAfterUpdate, test.name) } // Push the new chunk onto the heap if test.pushAfterUpdate != uh.managedPush(newChunk, test.ct) { t.Errorf("Chunk should have been pushed %v for test %v", test.pushAfterUpdate, test.name) } } }
explode_data.jsonl/3315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1854 }
[ 2830, 3393, 13844, 27909, 21453, 4289, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 7039, 741, 197, 532, 3244, 41288, 7957, 2822, 197, 322, 4230, 8016, 261, 323, 6979, 23327, 6941, 13137, 369, 8135, 27909,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetBalance(t *testing.T) { const responseBody = `[ { "currency_code": "JPY", "amount": 1024078, "available": 508000 }, { "currency_code": "BTC", "amount": 10.24, "available": 4.12 }, { "currency_code": "ETH", "amount": 20.48, "available": 16.38 } ]` const ( apikey = "longlongaccesskey" apisecret = "longlonglongapisecret" ) srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { t.Fatalf("request method is unexpected: %s != %s", r.Method, http.MethodGet) } if got := r.Header.Get("ACCESS-KEY"); got != apikey { t.Fatalf("ACCESS-KEY header is unexpected: %s != %s", got, apikey) } // TODO: test sign w.WriteHeader(http.StatusOK) w.Write([]byte(responseBody)) })) defer srv.Close() c := New(apikey, "apisecret") c.httpEndpoint = srv.URL got, err := c.GetBalance(context.Background()) if err != nil { t.Fatal(err) } want := []Balance{ { CurrencyCode: "JPY", Amount: 1024078, Available: 508000, }, { CurrencyCode: "BTC", Amount: 10.24, Available: 4.12, }, { CurrencyCode: "ETH", Amount: 20.48, Available: 16.38, }, } if diff := cmp.Diff(got, want); diff != "" { t.Fatal(diff) } }
explode_data.jsonl/54012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 610 }
[ 2830, 3393, 1949, 21190, 1155, 353, 8840, 836, 8, 341, 4777, 98065, 284, 1565, 9640, 220, 341, 262, 330, 15973, 4136, 788, 330, 27188, 56, 756, 262, 330, 6045, 788, 220, 16, 15, 17, 19, 15, 22, 23, 345, 262, 330, 10334, 788, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnpack(t *testing.T) { headerBuffer := make([]byte, 10) tests := []struct { name string header []byte }{ { name: "UnpackTest", header: headerBuffer, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { h := &PackageHeader{} h.Unpack(tt.header) }) } }
explode_data.jsonl/19768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 1806, 4748, 1155, 353, 8840, 836, 8, 341, 20883, 4095, 1669, 1281, 10556, 3782, 11, 220, 16, 15, 340, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 20883, 3056, 3782, 198, 197, 59403, 197, 197, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAssignDefaultAddonImages(t *testing.T) { customImage := "myimage" defaultAddonImages := map[string]string{ TillerAddonName: "gcr.io/kubernetes-helm/tiller:v2.11.0", ACIConnectorAddonName: "microsoft/virtual-kubelet:latest", ClusterAutoscalerAddonName: "k8s.gcr.io/cluster-autoscaler:v1.2.5", BlobfuseFlexVolumeAddonName: "mcr.microsoft.com/k8s/flexvolume/blobfuse-flexvolume:1.0.8", SMBFlexVolumeAddonName: "mcr.microsoft.com/k8s/flexvolume/smb-flexvolume:1.0.2", KeyVaultFlexVolumeAddonName: "mcr.microsoft.com/k8s/flexvolume/keyvault-flexvolume:v0.0.7", DashboardAddonName: "k8s.gcr.io/kubernetes-dashboard-amd64:v1.10.1", ReschedulerAddonName: "k8s.gcr.io/rescheduler:v0.3.1", MetricsServerAddonName: "k8s.gcr.io/metrics-server-amd64:v0.2.1", NVIDIADevicePluginAddonName: "nvidia/k8s-device-plugin:1.10", ContainerMonitoringAddonName: "mcr.microsoft.com/azuremonitor/containerinsights/ciprod:ciprod07092019", IPMASQAgentAddonName: "k8s.gcr.io/ip-masq-agent-amd64:v2.3.0", AzureCNINetworkMonitoringAddonName: "mcr.microsoft.com/containernetworking/networkmonitor:v0.0.6", DNSAutoscalerAddonName: "k8s.gcr.io/cluster-proportional-autoscaler-amd64:1.1.1", HeapsterAddonName: "k8s.gcr.io/heapster-amd64:v1.5.4", CalicoAddonName: "calico/typha:v3.7.2", AzureNetworkPolicyAddonName: "mcr.microsoft.com/containernetworking/azure-npm:v1.0.24", AADPodIdentityAddonName: "mcr.microsoft.com/k8s/aad-pod-identity/nmi:1.2", } customAddonImages := make(map[string]string) for k := range defaultAddonImages { customAddonImages[k] = customImage } cases := []struct { name string myAddons []KubernetesAddon isUpdate bool expectedImages map[string]string }{ { name: "default", myAddons: getFakeAddons(defaultAddonImages, ""), isUpdate: false, expectedImages: defaultAddonImages, }, { name: "create scenario", myAddons: getFakeAddons(defaultAddonImages, customImage), isUpdate: false, expectedImages: customAddonImages, // Image should not be overridden in create scenarios. }, { name: "upgrade + scale scenario", myAddons: getFakeAddons(defaultAddonImages, customImage), isUpdate: true, expectedImages: defaultAddonImages, // Image should be overridden in update scenarios. }, } for _, c := range cases { c := c t.Run(c.name, func(t *testing.T) { t.Parallel() mockCS := getMockBaseContainerService("1.10.8") mockCS.Properties.OrchestratorProfile.OrchestratorType = Kubernetes mockCS.Properties.OrchestratorProfile.KubernetesConfig.Addons = c.myAddons mockCS.setOrchestratorDefaults(c.isUpdate, c.isUpdate) resultAddons := mockCS.Properties.OrchestratorProfile.KubernetesConfig.Addons for _, result := range resultAddons { if len(result.Containers) > 0 && result.Containers[0].Image != c.expectedImages[result.Name] { t.Errorf("expected setDefaults to set Image to \"%s\" in addon %s, but got \"%s\"", c.expectedImages[result.Name], result.Name, result.Containers[0].Image) } } }) } }
explode_data.jsonl/33863
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1567 }
[ 2830, 3393, 28933, 3675, 84312, 14228, 1155, 353, 8840, 836, 8, 341, 1444, 1450, 1906, 1669, 330, 2408, 1805, 698, 11940, 84312, 14228, 1669, 2415, 14032, 30953, 515, 197, 10261, 15252, 84312, 675, 25, 503, 330, 70, 5082, 4245, 79587, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewEndpoints(t *testing.T) { t.Run("disabled", func(t *testing.T) { e := newEndpoints(&config.AgentConfig{Enabled: false}, "") _, ok := e[0].(*nullEndpoint) assert.True(t, ok) }) t.Run("panic", func(t *testing.T) { for name, tt := range map[string]struct { cfg *config.AgentConfig err string }{ "key": {&config.AgentConfig{Enabled: true}, "must have at least one endpoint with key"}, "key2": {&config.AgentConfig{Enabled: true, Endpoints: []*config.Endpoint{{Host: "123"}}}, "must have at least one endpoint with key"}, "endpoint": {&config.AgentConfig{Enabled: true, Endpoints: []*config.Endpoint{{APIKey: "123"}}}, "must have at least one endpoint with key"}, } { t.Run(name, func(t *testing.T) { defer func() { if e, ok := recover().(error); !ok || e == nil { t.Fatal("expected panic") } else { if e.Error() != tt.err { t.Fatalf("invalid error, got %q", e.Error()) } } }() newEndpoints(tt.cfg, "") }) } }) t.Run("ok", func(t *testing.T) { for name, tt := range map[string]struct { cfg *config.AgentConfig path string exp []*datadogEndpoint }{ "main": { cfg: &config.AgentConfig{Enabled: true, Endpoints: []*config.Endpoint{{Host: "host1", APIKey: "key1"}}}, path: "/api/trace", exp: []*datadogEndpoint{{host: "host1", apiKey: "key1", path: "/api/trace"}}, }, "additional": { cfg: &config.AgentConfig{ Enabled: true, Endpoints: []*config.Endpoint{ {Host: "host1", APIKey: "key1"}, {Host: "host2", APIKey: "key2"}, {Host: "host3", APIKey: "key3"}, {Host: "host4", APIKey: "key4"}, }, }, path: "/api/trace", exp: []*datadogEndpoint{ {host: "host1", apiKey: "key1", path: "/api/trace"}, {host: "host2", apiKey: "key2", path: "/api/trace"}, {host: "host3", apiKey: "key3", path: "/api/trace"}, {host: "host4", apiKey: "key4", path: "/api/trace"}, }, }, } { t.Run(name, func(t *testing.T) { assert := assert.New(t) e := newEndpoints(tt.cfg, tt.path) for i, want := range tt.exp { got := e[i].(*datadogEndpoint) assert.Equal(want.host, got.host) assert.Equal(want.apiKey, got.apiKey) assert.Equal(want.path, got.path) } }) } }) t.Run("proxy", func(t *testing.T) { assert := assert.New(t) proxyURL, err := url.Parse("test_url") if err != nil { t.Fatal(err) } e := newEndpoints(&config.AgentConfig{ Enabled: true, ProxyURL: proxyURL, Endpoints: []*config.Endpoint{ {Host: "host1", APIKey: "key1"}, {Host: "host2", APIKey: "key2"}, {Host: "host3", APIKey: "key3", NoProxy: true}, }, }, "/api/trace") // proxy ok for _, i := range []int{0, 1} { tr := e[i].(*datadogEndpoint).client.Transport.(*http.Transport) p, _ := tr.Proxy(nil) assert.Equal("test_url", p.String()) } // proxy skipped tr := e[2].(*datadogEndpoint).client.Transport.(*http.Transport) assert.Nil(tr.Proxy) }) }
explode_data.jsonl/1532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1395 }
[ 2830, 3393, 3564, 80786, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 11978, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 7727, 1669, 501, 80786, 2099, 1676, 88869, 2648, 90, 5462, 25, 895, 2137, 14676, 197, 197, 6878, 5394, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestHandlerWithIllegalRouterRule(t *testing.T) { meta := middleware.Metadata{Properties: map[string]string{ "rule": "^[A-Za-z0-9/._-]+$", }} log := logger.NewLogger("routerchecker.test") rchecker := NewMiddleware(log) handler, err := rchecker.GetHandler(meta) assert.Nil(t, err) var ctx fasthttp.RequestCtx ctx.Request.SetHost("localhost:5001") ctx.Request.SetRequestURI("/v1.0/invoke/qcg.default/method/ cat password") ctx.Request.Header.SetMethod("GET") output := new(RouterOutput) handler(output.handle)(&ctx) assert.Equal(t, fasthttp.StatusBadRequest, ctx.Response.Header.StatusCode()) }
explode_data.jsonl/34015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 1900, 3050, 2354, 33713, 9523, 11337, 1155, 353, 8840, 836, 8, 341, 84004, 1669, 29679, 46475, 90, 7903, 25, 2415, 14032, 30953, 515, 197, 197, 1, 12937, 788, 330, 27736, 32, 39279, 9141, 15, 12, 24, 14, 1436, 12, 65778, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_NewFileStore(t *testing.T) { storedir := "/tmp/TestStore/_new" f := NewFileStore(storedir) if f.opened { t.Fatalf("filestore was opened without opening it") } if f.directory != storedir { t.Fatalf("filestore directory is wrong") } // storedir might exist or might not, just like with a real client // the point is, we don't care, we just want it to exist after it is // opened }
explode_data.jsonl/37283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 39582, 1703, 6093, 1155, 353, 8840, 836, 8, 341, 18388, 3018, 404, 1669, 3521, 5173, 80527, 6093, 19632, 931, 698, 1166, 1669, 1532, 1703, 6093, 5895, 3018, 404, 340, 743, 282, 5826, 291, 341, 197, 3244, 30762, 445, 1192, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestApplyDiff(t *testing.T) { tcd.WithObjs(t, []tcdata.TCObj{ tcdata.CDNs, tcdata.Types, tcdata.Tenants, tcdata.Parameters, tcdata.Profiles, tcdata.ProfileParameters, tcdata.Divisions, tcdata.Regions, tcdata.PhysLocations, tcdata.CacheGroups, tcdata.Servers, tcdata.Topologies, tcdata.DeliveryServices}, func() { // badass to get initial config files if out, code := t3cUpdateReload(DefaultCacheHostName, "badass"); code != 0 { t.Fatalf("t3c apply badass failed with exit code %d, output: %s", code, out) } if !util.FileExists(RecordsConfigFileName) { t.Fatalf("missing config file '%s' needed to test", RecordsConfigFileName) } t.Run("verify comment is unchanged", func(t *testing.T) { f, err := os.OpenFile(RecordsConfigFileName, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { t.Fatalf("opening file '%s': %v", RecordsConfigFileName, err) } defer f.Close() _, err = f.Write([]byte(" #mycomment\n")) if err != nil { t.Fatalf("writing comment to file '%s': %v", RecordsConfigFileName, err) } // queue and syncds to get changes err = tcd.QueueUpdatesForServer(DefaultCacheHostName, true) if err != nil { t.Fatalf("failed to queue updates: %v", err) } out, code := t3cUpdateReload(DefaultCacheHostName, "syncds") if code != 0 { t.Fatalf("t3c apply failed with exit code %d, output: %s", code, out) } // verify the file wasn't overwritten, as it would be if there were a diff recordsDotConfig, err := ioutil.ReadFile(RecordsConfigFileName) if err != nil { t.Fatalf("reading %s: %v", RecordsConfigFileName, err) } if !bytes.Contains(recordsDotConfig, []byte("#mycomment")) { t.Fatalf("expected records.config to diff clean and not be replaced with comment difference, actual: '%s' t3c-apply output: %s", string(recordsDotConfig), out) } }) t.Run("verify non-comment is overwritten", func(t *testing.T) { f, err := os.OpenFile(RecordsConfigFileName, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { t.Fatalf("opening file '%s': %v", RecordsConfigFileName, err) } _, err = f.Write([]byte("\nmynocomment this line isn't a comment\n")) f.Close() if err != nil { t.Fatalf("writing line to file '%s': %v", RecordsConfigFileName, err) } // queue and syncds to get changes err = tcd.QueueUpdatesForServer(DefaultCacheHostName, true) if err != nil { t.Fatalf("failed to queue updates: %v", err) } out, code := t3cUpdateReload(DefaultCacheHostName, "syncds") if code != 0 { t.Fatalf("t3c apply failed with exit code %d, output: %s", code, out) } t.Logf("t3c apply output: %s", out) recordsDotConfig, err := ioutil.ReadFile(RecordsConfigFileName) if err != nil { t.Fatalf("reading %s: %v", RecordsConfigFileName, err) } content := string(recordsDotConfig) if strings.Contains(content, "#mycomment") { t.Fatalf("expected records.config to have a diff and be replaced with a non-comment difference, actual: %s", content) } else if strings.Contains(content, "mynocomment") { t.Fatalf("expected records.config to have a diff and be replaced with a non-comment difference, actual: %s", content) } }) }) }
explode_data.jsonl/13549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1274 }
[ 2830, 3393, 28497, 21751, 1155, 353, 8840, 836, 8, 341, 3244, 4385, 26124, 4121, 2519, 1155, 11, 3056, 10413, 691, 836, 34, 5261, 515, 197, 78255, 691, 727, 31264, 82, 11, 17130, 691, 29147, 11, 17130, 691, 836, 268, 1783, 11, 17130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestWithDiscoveryFilter(t *testing.T) { ctx := mocks.NewMockContext(mspmocks.NewMockSigningIdentity("test", mspID1)) config := &config{ EndpointConfig: mocks.NewMockEndpointConfig(), peers: channelPeers, } ctx.SetEndpointConfig(config) discClient := clientmocks.NewMockDiscoveryClient() clientProvider = func(ctx contextAPI.Client) (discoveryClient, error) { return discClient, nil } discClient.SetResponses( &clientmocks.MockDiscoverEndpointResponse{ PeerEndpoints: []*discmocks.MockDiscoveryPeerEndpoint{ peer2Org1Endpoint, peer2Org3Endpoint, peer2Org2Endpoint, peer1Org1Endpoint, peer1Org2Endpoint, peer1Org3Endpoint, }, }, ) t.Run("Error", func(t *testing.T) { expectedDiscoveryErrMsg := "simulated discovery service error" service, err := New( ctx, channelID, mocks.NewMockDiscoveryService(fmt.Errorf(expectedDiscoveryErrMsg)), WithRefreshInterval(500*time.Millisecond), WithResponseTimeout(2*time.Second), ) require.NoError(t, err) defer service.Close() _, err = service.GetEndorsersForChaincode([]*fab.ChaincodeCall{cc1ChaincodeCall}) assert.Truef(t, strings.Contains(err.Error(), expectedDiscoveryErrMsg), "expected error due to discovery error") }) t.Run("Peers Down", func(t *testing.T) { service, err := New( ctx, channelID, mocks.NewMockDiscoveryService(nil, peer1Org1, peer2Org1, peer2Org2, peer2Org3), WithRefreshInterval(500*time.Millisecond), WithResponseTimeout(2*time.Second), ) require.NoError(t, err) defer service.Close() endorsers, err := service.GetEndorsersForChaincode([]*fab.ChaincodeCall{cc1ChaincodeCall}) assert.NoError(t, err) assert.Equalf(t, 4, len(endorsers), "Expecting 4 endorser") }) t.Run("Peer Filter", func(t *testing.T) { service, err := New( ctx, channelID, mocks.NewMockDiscoveryService(nil, peer1Org1, peer2Org1, peer2Org2, peer2Org3), WithRefreshInterval(500*time.Millisecond), WithResponseTimeout(2*time.Second), ) require.NoError(t, err) defer service.Close() endorsers, err := service.GetEndorsersForChaincode([]*fab.ChaincodeCall{cc1ChaincodeCall}, options.WithPeerFilter(func(peer fab.Peer) bool { return peer.(fab.PeerState).BlockHeight() > 1001 })) assert.NoError(t, err) assert.Equalf(t, 2, len(endorsers), "Expecting 2 endorser but got") }) }
explode_data.jsonl/43217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 924 }
[ 2830, 3393, 2354, 67400, 5632, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 68909, 7121, 11571, 1972, 35680, 5187, 25183, 7121, 11571, 93358, 18558, 445, 1944, 497, 296, 2154, 915, 16, 1171, 25873, 1669, 609, 1676, 515, 197, 197, 27380, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindTestsTesting(t *testing.T) { test := ` package foo // foo does not import "testing", but defines Examples. func Example() {} func ExampleA() {} ` pkgs := create(t, test) _, tests, benchmarks, examples := ssa.FindTests(pkgs) if len(tests) > 0 { t.Errorf("FindTests.tests = %s, want none", tests) } if len(benchmarks) > 0 { t.Errorf("FindTests.benchmarks = %s, want none", benchmarks) } sort.Sort(funcsByPos(examples)) if got, want := fmt.Sprint(examples), "[foo.Example foo.ExampleA]"; got != want { t.Errorf("FindTests examples = %s, want %s", got, want) } }
explode_data.jsonl/24604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 9885, 18200, 16451, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 22074, 1722, 15229, 271, 322, 15229, 1558, 537, 1159, 330, 8840, 497, 714, 18653, 25311, 382, 2830, 13383, 368, 5613, 2830, 13383, 32, 368, 5613, 3989, 3223, 74,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUnArchiveConversation(t *testing.T) { http.HandleFunc("/conversations.unarchive", okJSONHandler) once.Do(startServer) api := New("testing-token", OptionAPIURL("http://"+serverAddr+"/")) err := api.UnArchiveConversation("CXXXXXXXX") if err != nil { t.Errorf("Unexpected error: %s", err) return } }
explode_data.jsonl/78540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 1806, 42502, 60313, 1155, 353, 8840, 836, 8, 341, 28080, 63623, 4283, 443, 72995, 6307, 16019, 497, 5394, 5370, 3050, 340, 197, 13184, 33596, 10639, 5475, 340, 54299, 1669, 1532, 445, 8840, 34841, 497, 6959, 7082, 3144, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInvalidBoolsSlice(t *testing.T) { type config struct { BadBools []bool `env:"BADBOOLS"` } os.Setenv("BADBOOLS", "t,f,TRUE,faaaalse") cfg := &config{} assert.Error(t, env.Parse(cfg)) }
explode_data.jsonl/7486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 7928, 1233, 3069, 33236, 1155, 353, 8840, 836, 8, 341, 13158, 2193, 2036, 341, 197, 12791, 329, 1233, 3069, 3056, 2641, 1565, 3160, 2974, 53572, 10395, 50, 8805, 197, 630, 25078, 4202, 3160, 445, 53572, 10395, 50, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStaticDatasetQueryUnHappy(t *testing.T) { Convey("Given the graphQL Client is not configured", t, func() { testCtx := context.Background() mockHttpClient := &dphttp.ClienterMock{} cantabularClient := cantabular.NewClient( cantabular.Config{ Host: "cantabular.host", }, mockHttpClient, nil, ) Convey("When the StaticDatasetQuery method is called", func() { req := cantabular.StaticDatasetQueryRequest{} _, err := cantabularClient.StaticDatasetQuery(testCtx, req) So(err, ShouldNotBeNil) Convey("Status Code 503 Service Unavailable should be recoverable from error", func() { _, err := cantabularClient.StaticDatasetQuery(testCtx, req) So(dperrors.StatusCode(err), ShouldEqual, http.StatusServiceUnavailable) }) }) }) Convey("Given a GraphQL error from the /graphql endpoint", t, func() { testCtx := context.Background() mockHttpClient := &dphttp.ClienterMock{} mockGQLClient := &mock.GraphQLClientMock{ QueryFunc: func(ctx context.Context, query interface{}, vars map[string]interface{}) error { if q, ok := query.(*cantabular.StaticDatasetQuery); ok { q.Dataset.Table.Error = "I am error response" return nil } return errors.New("query could not be cast to correct type") }, } cantabularClient := cantabular.NewClient( cantabular.Config{ Host: "cantabular.host", ExtApiHost: "cantabular.ext.host", }, mockHttpClient, mockGQLClient, ) Convey("When the StaticDatasetQuery method is called", func() { req := cantabular.StaticDatasetQueryRequest{} _, err := cantabularClient.StaticDatasetQuery(testCtx, req) Convey("An error should be returned with status code 400 Bad Request", func() { So(err, ShouldNotBeNil) So(dperrors.StatusCode(err), ShouldEqual, http.StatusBadRequest) }) }) }) }
explode_data.jsonl/8456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 724 }
[ 2830, 3393, 11690, 33363, 2859, 1806, 32847, 1155, 353, 8840, 836, 8, 1476, 93070, 5617, 445, 22043, 279, 4771, 3588, 8423, 374, 537, 19755, 497, 259, 11, 2915, 368, 341, 197, 18185, 23684, 1669, 2266, 19047, 2822, 197, 77333, 26316, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPdfWithImageHolder(t *testing.T) { err := initTesting() if err != nil { t.Error(err) return } pdf := setupDefaultA4PDF(t) pdf.AddPage() bytesOfImg, err := ioutil.ReadFile("./test/res/PNG_transparency_demonstration_1.png") if err != nil { t.Error(err) return } imgH, err := ImageHolderByBytes(bytesOfImg) if err != nil { t.Error(err) return } err = pdf.ImageByHolder(imgH, 20.0, 20, nil) if err != nil { t.Error(err) return } err = pdf.ImageByHolder(imgH, 20.0, 200, nil) if err != nil { t.Error(err) return } pdf.SetX(250) pdf.SetY(200) pdf.Cell(nil, "gopher and gopher") pdf.WritePdf("./test/out/image_test.pdf") }
explode_data.jsonl/60941
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 67596, 2354, 1906, 8589, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 2930, 16451, 741, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 853, 198, 197, 630, 3223, 2940, 1669, 6505, 3675, 32, 19, 23424, 1155, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDefaultReplicas(t *testing.T) { cluster := &MySQLCluster{} cluster.EnsureDefaults() if cluster.Spec.Replicas != defaultReplicas { t.Errorf("Expected default replicas to be %d but got %d", defaultReplicas, cluster.Spec.Replicas) } }
explode_data.jsonl/22107
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 3675, 18327, 52210, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 609, 59224, 28678, 16094, 197, 18855, 22834, 19098, 16273, 2822, 743, 10652, 36473, 2817, 79, 52210, 961, 1638, 18327, 52210, 341, 197, 3244, 13080, 445, 1889...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseMatchStatements(t *testing.T) { checkParseStatement( t, ` match optional { case Some(x) { print(x) } case None { print("None") } } `, `(match optional (match-case (Some x) (block (expression-statement (call print x)))) (match-case None (block (expression-statement (call print "None")))))`, ) }
explode_data.jsonl/33527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 14463, 8331, 93122, 1155, 353, 8840, 836, 8, 341, 25157, 14463, 8636, 1006, 197, 3244, 345, 197, 197, 3989, 197, 47706, 10101, 341, 298, 2722, 4329, 2075, 8, 341, 571, 6900, 2075, 340, 298, 197, 532, 298, 2722, 2240, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_KeepAlive(t *testing.T) { endpoint := os.Getenv("ETCD_ENDPOINT") if endpoint == "" { t.Error("not found env ETCD_ENDPOINT or ETCD_ENDPOINT is empty string") return } fmt.Printf("endpoint=[%s]\n", endpoint) Init(endpoint) defer Close() k := "foo" v := "bar" leaseID, err := GrantLease(5) err = PutWithLease(k, v, leaseID) if err != nil { t.Error(err) return } // the key 'foo' will be kept forever ch, err := KeepAliveLease(leaseID) if err != nil { t.Error(err) return } ka := <-ch fmt.Println("ttl:", ka.TTL) time.Sleep(5 * time.Second) }
explode_data.jsonl/27606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 62, 19434, 32637, 1155, 353, 8840, 836, 8, 341, 6246, 2768, 1669, 2643, 64883, 445, 1348, 6484, 48756, 1138, 743, 14887, 621, 1591, 341, 197, 3244, 6141, 445, 1921, 1730, 6105, 17768, 6484, 48756, 476, 17768, 6484, 48756, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestParametricRoutes(t *testing.T) { router := MakeRouter() router.GET("/activity/:user", writeData) router.POST("/activity/:user/:activity", writeData) router.GET("/activity/:user/:activity/comments/:comment", writeData) RunRequest(router, "GET", "/activity/raccoon", 200, "raccoon--", t) RunRequest(router, "POST", "/activity/raccoon/123", 200, "raccoon-123-", t) RunRequest(router, "GET", "/activity/raccoon/123/comments/456", 200, "raccoon-123-456", t) }
explode_data.jsonl/15099
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 2001, 16340, 26653, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 7405, 9523, 741, 67009, 17410, 4283, 7175, 11315, 872, 497, 3270, 1043, 340, 67009, 14721, 4283, 7175, 11315, 872, 11315, 7175, 497, 3270, 1043, 340, 67009, 17410,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWhoisParsing(t *testing.T) { for _, tt := range []struct { domain string err string }{ {domain: "google.ai", err: "could not parse whois response"}, {domain: "domreg.lt", err: ""}, {domain: "fakedomain.foo", err: "could not parse whois response: Domain not found"}, {domain: "google.cn", err: ""}, {domain: "google.com", err: ""}, {domain: "google.de", err: "could not parse whois response"}, {domain: "nic.ua", err: ""}, {domain: "google.com.tw", err: ""}, {domain: "bbc.co.uk", err: ""}, {domain: "google.sk", err: ""}, {domain: "google.ro", err: ""}, // {domain: "google.pt", err: ""}, // timeouts all the time {domain: "google.it", err: ""}, {domain: "watchub.pw", err: ""}, {domain: "google.co.id", err: ""}, {domain: "google.kr", err: ""}, {domain: "google.jp", err: ""}, {domain: "microsoft.im", err: ""}, {domain: "google.rs", err: ""}, } { tt := tt t.Run(tt.domain, func(t *testing.T) { t.Parallel() expiry, err := NewClient().ExpireTime(context.Background(), tt.domain) if tt.err == "" { require.NoError(t, err) require.True(t, time.Since(expiry).Hours() < 0, "domain must not be expired") } else { require.Error(t, err) require.Contains(t, err.Error(), tt.err) } }) } }
explode_data.jsonl/69564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 15191, 285, 68839, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 2698, 3121, 914, 198, 197, 9859, 262, 914, 198, 197, 59403, 197, 197, 90, 12204, 25, 330, 17485, 40383, 497, 1848, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoggerFatalExpected(t *testing.T) { checkMessages(t, zapcore.DebugLevel, nil, zapcore.FatalLevel, []string{ "hello", "world", "foo", }, func(logger *Logger) { logger.Fatal("hello") logger.Fatalf("world") logger.Fatalln("foo") }) }
explode_data.jsonl/58462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 7395, 62396, 18896, 1155, 353, 8840, 836, 8, 341, 25157, 15820, 1155, 11, 32978, 2153, 20345, 4449, 11, 2092, 11, 32978, 2153, 26133, 4449, 11, 3056, 917, 515, 197, 197, 1, 14990, 756, 197, 197, 1, 14615, 756, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsFree(t *testing.T) { s := NewScope() items := []ScopeItem{{10, 80}, {100, 400}, {550, 250}, {900, 600}} for _, v := range items { s.Insert(v.Address, v.Size) } type node struct { addr, size uint64 free bool } nodes := []node{ {5, 4, true}, {5, 5, true}, {10, 80, false}, {90, 10, true}, {95, 2, true}, {90, 30, false}, {120, 400, false}, {90, 450, false}, {1500, 10, true}, {1510, 10, true}, } for _, v := range nodes { free := s.IsFree(v.addr, v.size) if free != v.free { t.Errorf("%v %v free should be: %v, be: %v\n", v.addr, v.size, v.free, free) break } } }
explode_data.jsonl/17765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 3872, 10940, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 10803, 741, 46413, 1669, 3056, 10803, 1234, 2979, 16, 15, 11, 220, 23, 15, 2137, 314, 16, 15, 15, 11, 220, 19, 15, 15, 2137, 314, 20, 20, 15, 11, 220, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBuild_WithTags(t *testing.T) { // Dependency has several tags checkBuildWithOptionalFields(t, "with-tags", chart.Dependency{ Tags: []string{"tag1", "tag2"}, }) }
explode_data.jsonl/24405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 11066, 62, 2354, 15930, 1155, 353, 8840, 836, 8, 341, 197, 322, 32977, 702, 3807, 9492, 198, 25157, 11066, 2354, 15309, 8941, 1155, 11, 330, 4197, 63814, 497, 9487, 49918, 515, 197, 10261, 2032, 25, 3056, 917, 4913, 4578, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestOpenRenamedFileFails(t *testing.T) { testCases := []struct { name string // The original file is renamed if changeFile is true. changeFile bool // The Merkle tree file is renamed if changeMerkleFile is true. changeMerkleFile bool }{ { name: "FileOnly", changeFile: true, changeMerkleFile: false, }, { name: "MerkleOnly", changeFile: false, changeMerkleFile: true, }, { name: "FileAndMerkle", changeFile: true, changeMerkleFile: true, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { vfsObj, root, ctx, err := newVerityRoot(t, SHA256) if err != nil { t.Fatalf("newVerityRoot: %v", err) } filename := "verity-test-file" fd, _, err := newFileFD(ctx, t, vfsObj, root, filename, 0644) if err != nil { t.Fatalf("newFileFD: %v", err) } // Enable verity on the file. enableVerity(ctx, t, fd) newFilename := "renamed-test-file" if tc.changeFile { if err := dentryFromVD(t, root).renameLowerAt(ctx, vfsObj, filename, newFilename); err != nil { t.Fatalf("RenameAt: %v", err) } } if tc.changeMerkleFile { if err := dentryFromVD(t, root).renameLowerMerkleAt(ctx, vfsObj, filename, newFilename); err != nil { t.Fatalf("UnlinkAt: %v", err) } } // Ensure reopening the verity enabled file fails. if _, err = openVerityAt(ctx, vfsObj, root, filename, linux.O_RDONLY, linux.ModeRegular); err != syserror.EIO { t.Errorf("got OpenAt error: %v, expected EIO", err) } }) } }
explode_data.jsonl/56768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 733 }
[ 2830, 3393, 5002, 34625, 3606, 1703, 37, 6209, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 197, 322, 576, 4024, 1034, 374, 27280, 421, 2297, 1703, 374, 830, 624, 197, 68380, 1703, 1807...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSessionBindRoutingKey(t *testing.T) { cluster := createCluster() cluster.PoolConfig.HostSelectionPolicy = TokenAwareHostPolicy(RoundRobinHostPolicy()) session := createSessionFromCluster(cluster, t) defer session.Close() if err := createTable(session, `CREATE TABLE gocql_test.test_bind_routing_key ( key varchar, value int, PRIMARY KEY (key) )`); err != nil { t.Fatal(err) } const ( key = "routing-key" value = 5 ) fn := func(info *QueryInfo) ([]interface{}, error) { return []interface{}{key, value}, nil } q := session.Bind("INSERT INTO test_bind_routing_key(key, value) VALUES(?, ?)", fn) if err := q.Exec(); err != nil { t.Fatal(err) } }
explode_data.jsonl/11185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 5283, 9950, 24701, 1592, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 1855, 28678, 741, 197, 18855, 89701, 2648, 29840, 11177, 13825, 284, 9660, 58793, 9296, 13825, 2785, 795, 76671, 9296, 13825, 12367, 25054, 1669, 1855, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHookMessageWillBePosted(t *testing.T) { t.Run("rejected", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { return nil, "rejected" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message_", CreateAt: model.GetMillis() - 10000, } _, err := th.App.CreatePost(post, th.BasicChannel, false) if assert.NotNil(t, err) { assert.Equal(t, "Post rejected by plugin. rejected", err.Message) } }) t.Run("rejected, returned post ignored", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { post.Message = "ignored" return post, "rejected" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message_", CreateAt: model.GetMillis() - 10000, } _, err := th.App.CreatePost(post, th.BasicChannel, false) if assert.NotNil(t, err) { assert.Equal(t, "Post rejected by plugin. rejected", err.Message) } }) t.Run("allowed", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { return nil, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message", CreateAt: model.GetMillis() - 10000, } post, err := th.App.CreatePost(post, th.BasicChannel, false) if err != nil { t.Fatal(err) } assert.Equal(t, "message", post.Message) retrievedPost, errSingle := th.App.Srv.Store.Post().GetSingle(post.Id) require.Nil(t, errSingle) assert.Equal(t, "message", retrievedPost.Message) }) t.Run("updated", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { post.Message = post.Message + "_fromplugin" return post, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message", CreateAt: model.GetMillis() - 10000, } post, err := th.App.CreatePost(post, th.BasicChannel, false) if err != nil { t.Fatal(err) } assert.Equal(t, "message_fromplugin", post.Message) if retrievedPost, errSingle := th.App.Srv.Store.Post().GetSingle(post.Id); err != nil { t.Fatal(errSingle) } else { assert.Equal(t, "message_fromplugin", retrievedPost.Message) } }) t.Run("multiple updated", func(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { post.Message = "prefix_" + post.Message return post, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageWillBePosted(c *plugin.Context, post *model.Post) (*model.Post, string) { post.Message = post.Message + "_suffix" return post, "" } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message", CreateAt: model.GetMillis() - 10000, } post, err := th.App.CreatePost(post, th.BasicChannel, false) if err != nil { t.Fatal(err) } assert.Equal(t, "prefix_message_suffix", post.Message) }) }
explode_data.jsonl/30303
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2402 }
[ 2830, 3393, 31679, 2052, 9945, 3430, 17372, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 95353, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 197, 16867, 270, 836, 682, 4454, 2822, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGlobalVarsOnExit(t *testing.T) { wf := unmarshalWF(globalVarsOnExit) wftmpl := unmarshalWFTmpl(wftmplGlobalVarsOnExit) cancel, controller := newController(wf, wftmpl) defer cancel() woc := newWorkflowOperationCtx(wf, controller) ctx := context.Background() woc.operate(ctx) node := woc.wf.Status.Nodes["hello-world-6gphm-8n22g-3224262006"] if assert.NotNil(t, node) && assert.NotNil(t, node.Inputs) && assert.NotEmpty(t, node.Inputs.Parameters) { assert.Equal(t, "nononono", node.Inputs.Parameters[0].Value.String()) } }
explode_data.jsonl/71025
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 11646, 28305, 1925, 15339, 1155, 353, 8840, 836, 8, 341, 6692, 69, 1669, 650, 27121, 32131, 31951, 28305, 1925, 15339, 340, 6692, 723, 54010, 1669, 650, 27121, 54, 3994, 54010, 3622, 723, 54010, 11646, 28305, 1925, 15339, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestKeylevelValidationLedgerFailures(t *testing.T) { // Scenario: we validate a transaction that updates // the key-level validation parameters for a key. // we simulate the case where we fail to retrieve // the validation parameters from the ledger with // both deterministic and non-deterministic errors rwsb := rwsetBytes(t, "cc") prp := []byte("barf") t.Run("CollConfigNotDefinedError", func(t *testing.T) { mr := &mockState{GetStateMetadataErr: &ledger.CollConfigNotDefinedError{Ns: "mycc"}} ms := &mockStateFetcher{FetchStateRv: mr} pm := &KeyLevelValidationParameterManagerImpl{PolicyTranslator: &mockTranslator{}, StateFetcher: ms} validator := NewKeyLevelValidator(NewV13Evaluator(&mockPolicyEvaluator{}, pm), pm) err := validator.Validate("cc", 1, 0, rwsb, prp, []byte("CCEP"), []*pb.Endorsement{}) assert.NoError(t, err) }) t.Run("InvalidCollNameError", func(t *testing.T) { mr := &mockState{GetStateMetadataErr: &ledger.InvalidCollNameError{Ns: "mycc", Coll: "mycoll"}} ms := &mockStateFetcher{FetchStateRv: mr} pm := &KeyLevelValidationParameterManagerImpl{PolicyTranslator: &mockTranslator{}, StateFetcher: ms} validator := NewKeyLevelValidator(NewV13Evaluator(&mockPolicyEvaluator{}, pm), pm) err := validator.Validate("cc", 1, 0, rwsb, prp, []byte("CCEP"), []*pb.Endorsement{}) assert.NoError(t, err) }) t.Run("I/O error", func(t *testing.T) { mr := &mockState{GetStateMetadataErr: fmt.Errorf("some I/O error")} ms := &mockStateFetcher{FetchStateRv: mr} pm := &KeyLevelValidationParameterManagerImpl{PolicyTranslator: &mockTranslator{}, StateFetcher: ms} validator := NewKeyLevelValidator(NewV13Evaluator(&mockPolicyEvaluator{}, pm), pm) err := validator.Validate("cc", 1, 0, rwsb, prp, []byte("CCEP"), []*pb.Endorsement{}) assert.Error(t, err) assert.IsType(t, &errors.VSCCExecutionFailureError{}, err) }) }
explode_data.jsonl/80490
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 6608, 967, 889, 13799, 60850, 1389, 19524, 1413, 1155, 353, 8840, 836, 8, 341, 197, 322, 58663, 25, 582, 9593, 264, 7745, 429, 8837, 198, 197, 322, 279, 1376, 11591, 10519, 5029, 369, 264, 1376, 624, 197, 322, 582, 37453, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGeneratePackage(t *testing.T) { generatePackage := func(tool string, pkg *schema.Package, files map[string][]byte) (map[string][]byte, error) { for f := range files { t.Logf("Ignoring extraFile %s", f) } return GeneratePackage(tool, pkg) } test.TestSDKCodegen(t, &test.SDKCodegenOptions{ Language: "go", GenPackage: generatePackage, Checks: map[string]test.CodegenCheck{ "go/compile": typeCheckGeneratedPackage, "go/test": testGeneratedPackage, }, }) }
explode_data.jsonl/22934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 31115, 13100, 1155, 353, 8840, 836, 8, 341, 3174, 13220, 13100, 1669, 2915, 48950, 914, 11, 24793, 353, 17349, 49834, 11, 3542, 2415, 14032, 45725, 3782, 8, 320, 2186, 14032, 45725, 3782, 11, 1465, 8, 1476, 197, 2023, 282, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProcessAssociationUnableToLoadAssociationDetail(t *testing.T) { processor := createProcessor() svcMock := service.NewMockDefault() assocRawData := createAssociationRawData() parserMock := parserMock{} sys = &systemStub{} complianceUploader := complianceUploader.NewMockDefault() // Arrange processor.assocSvc = svcMock processor.complianceUploader = complianceUploader assocParser = &parserMock // Mock service svcMock.On("CreateNewServiceIfUnHealthy", mock.AnythingOfType("*log.Mock")) svcMock.On( "ListInstanceAssociations", mock.AnythingOfType("*log.Mock"), mock.AnythingOfType("string")).Return(assocRawData, nil) svcMock.On( "LoadAssociationDetail", mock.AnythingOfType("*log.Mock"), mock.AnythingOfType("*model.InstanceAssociation")).Return(errors.New("unable to load detail")) svcMock.On( "UpdateInstanceAssociationStatus", mock.AnythingOfType("*log.Mock"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("*ssm.InstanceAssociationExecutionResult")) complianceUploader.On("CreateNewServiceIfUnHealthy", mock.AnythingOfType("*log.Mock")) complianceUploader.On( "UpdateAssociationCompliance", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("time.Time")).Return(nil) // Act processor.ProcessAssociation() // Assert assert.True(t, svcMock.AssertNumberOfCalls(t, "CreateNewServiceIfUnHealthy", 1)) assert.True(t, svcMock.AssertNumberOfCalls(t, "ListInstanceAssociations", 1)) assert.True(t, svcMock.AssertNumberOfCalls(t, "LoadAssociationDetail", 1)) assert.True(t, svcMock.AssertNumberOfCalls(t, "UpdateInstanceAssociationStatus", 1)) }
explode_data.jsonl/74692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 649 }
[ 2830, 3393, 7423, 63461, 17075, 1249, 5879, 63461, 10649, 1155, 353, 8840, 836, 8, 341, 197, 29474, 1669, 1855, 22946, 741, 1903, 7362, 11571, 1669, 2473, 7121, 11571, 3675, 741, 197, 46877, 20015, 1043, 1669, 1855, 63461, 20015, 1043, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIAVLReverseIterator(t *testing.T) { db := dbm.NewMemDB() tree, err := iavl.NewMutableTree(db, cacheSize) require.NoError(t, err) iavlStore := UnsafeNewStore(tree) iavlStore.Set([]byte{0x00}, []byte("0")) iavlStore.Set([]byte{0x00, 0x00}, []byte("0 0")) iavlStore.Set([]byte{0x00, 0x01}, []byte("0 1")) iavlStore.Set([]byte{0x00, 0x02}, []byte("0 2")) iavlStore.Set([]byte{0x01}, []byte("1")) var testReverseIterator = func(t *testing.T, start []byte, end []byte, expected []string) { iter := iavlStore.ReverseIterator(start, end) var i int for i = 0; iter.Valid(); iter.Next() { expectedValue := expected[i] value := iter.Value() require.EqualValues(t, string(value), expectedValue) i++ } require.Equal(t, len(expected), i) } testReverseIterator(t, nil, nil, []string{"1", "0 2", "0 1", "0 0", "0"}) testReverseIterator(t, []byte{0x00}, nil, []string{"1", "0 2", "0 1", "0 0", "0"}) testReverseIterator(t, []byte{0x00}, []byte{0x00, 0x01}, []string{"0 0", "0"}) testReverseIterator(t, []byte{0x00}, []byte{0x01}, []string{"0 2", "0 1", "0 0", "0"}) testReverseIterator(t, []byte{0x00, 0x01}, []byte{0x01}, []string{"0 2", "0 1"}) testReverseIterator(t, nil, []byte{0x01}, []string{"0 2", "0 1", "0 0", "0"}) }
explode_data.jsonl/38063
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 560 }
[ 2830, 3393, 5863, 30698, 45695, 11951, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 76, 7121, 18816, 3506, 2822, 51968, 11, 1848, 1669, 600, 67311, 7121, 11217, 6533, 9791, 11, 6500, 1695, 340, 17957, 35699, 1155, 11, 1848, 692, 823...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVolumeValidate(t *testing.T) { tests := []struct { volumemount *Volume shouldError bool }{ { nil, false, }, { &Volume{ Name: "", Source: Source{ Secret: map[string]string{ "a": "this is a test", }, }, }, true, }, { &Volume{ Name: "a", Source: Source{ Secret: map[string]string{ "b": "this is a test", }, }, }, false, }, { &Volume{ Name: "test123-_", Source: Source{ Secret: map[string]string{ "b": "this is a test", }, }, }, false, }, { &Volume{ Name: "test/.", Source: Source{ Secret: map[string]string{ "b": "this is a test", }, }, }, true, }, } for _, test := range tests { err := test.volumemount.Validate() if test.shouldError && err == nil { t.Fatalf("Expected volume: %v to error but it didn't", test.volumemount) } if !test.shouldError && err != nil { t.Fatalf("volume: %v shouldn't have errored, but it did; err: %v", test.volumemount, err) } } }
explode_data.jsonl/27199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 547 }
[ 2830, 3393, 18902, 17926, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 5195, 1132, 336, 629, 353, 18902, 198, 197, 197, 5445, 1454, 1807, 198, 197, 59403, 197, 197, 515, 298, 84131, 345, 298, 36012, 345, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSharedMemory(t *testing.T) { assert := assert.New(t) chainID0 := ids.GenerateTestID() chainID1 := ids.GenerateTestID() for _, test := range SharedMemoryTests { m := Memory{} baseDB := memdb.New() memoryDB := prefixdb.New([]byte{0}, baseDB) testDB := prefixdb.New([]byte{1}, baseDB) err := m.Initialize(logging.NoLog{}, memoryDB) assert.NoError(err) sm0 := m.NewSharedMemory(chainID0) sm1 := m.NewSharedMemory(chainID1) test(t, chainID0, chainID1, sm0, sm1, testDB) } }
explode_data.jsonl/45650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 16997, 10642, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 197, 8819, 915, 15, 1669, 14151, 57582, 2271, 915, 741, 197, 8819, 915, 16, 1669, 14151, 57582, 2271, 915, 2822, 2023, 8358, 1273, 1669, 2088, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEatBodyChunked(t *testing.T) { logp.TestingSetup(logp.WithSelectors("http", "httpdetailed")) msgs := [][]byte{ []byte("03\r"), []byte("\n123\r\n03\r\n123\r"), []byte("\n0\r\n\r\n"), } st := &stream{ data: msgs[0], parseOffset: 0, bodyReceived: 0, parseState: stateBodyChunkedStart, } msg := &message{ chunkedLength: 5, contentLength: 0, } parser := newParser(&testParserConfig) cont, ok, complete := parser.parseBodyChunkedStart(st, msg) if cont != false || ok != true || complete != false { t.Errorf("Wrong return values") } assert.Equal(t, 0, st.parseOffset) st.data = append(st.data, msgs[1]...) cont, ok, complete = parser.parseBodyChunkedStart(st, msg) assert.True(t, cont) assert.Equal(t, 3, msg.chunkedLength) assert.Equal(t, 4, st.parseOffset) assert.Equal(t, stateBodyChunked, st.parseState) cont, ok, complete = parser.parseBodyChunked(st, msg) assert.True(t, cont) assert.Equal(t, stateBodyChunkedStart, st.parseState) assert.Equal(t, 9, st.parseOffset) cont, ok, complete = parser.parseBodyChunkedStart(st, msg) assert.True(t, cont) assert.Equal(t, 3, msg.chunkedLength) assert.Equal(t, 13, st.parseOffset) assert.Equal(t, stateBodyChunked, st.parseState) cont, ok, complete = parser.parseBodyChunked(st, msg) assert.False(t, cont) assert.True(t, ok) assert.False(t, complete) assert.Equal(t, 13, st.parseOffset) assert.Equal(t, 0, st.bodyReceived) assert.Equal(t, stateBodyChunked, st.parseState) st.data = append(st.data, msgs[2]...) cont, ok, complete = parser.parseBodyChunked(st, msg) assert.True(t, cont) assert.Equal(t, 18, st.parseOffset) assert.Equal(t, stateBodyChunkedStart, st.parseState) cont, ok, complete = parser.parseBodyChunkedStart(st, msg) assert.False(t, cont) assert.True(t, ok) assert.True(t, complete) }
explode_data.jsonl/16504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 764 }
[ 2830, 3393, 88611, 5444, 28304, 291, 1155, 353, 8840, 836, 8, 341, 6725, 79, 8787, 287, 21821, 12531, 79, 26124, 96995, 445, 1254, 497, 330, 1254, 67, 10111, 28075, 21169, 82, 1669, 52931, 3782, 515, 197, 197, 1294, 3782, 445, 15, 18,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGauge(t *testing.T) { // just for test originPath := types.MosnConfigPath types.MosnConfigPath = "." defer func() { types.MosnConfigPath = originPath }() zone := InitMetricsZone("TestGauge", 10*1024) defer func() { zone.Detach() Reset() }() entry, err := defaultZone.alloc("TestGauge") if err != nil { t.Error(err) } gauge := ShmGauge(unsafe.Pointer(&entry.value)) // update gauge.Update(5) // value if gauge.Value() != 5 { t.Error("gauge ops failed") } gauge.Update(123) if gauge.Value() != 123 { t.Error("gauge ops failed") } }
explode_data.jsonl/70928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 38, 19392, 1155, 353, 8840, 836, 8, 341, 197, 322, 1101, 369, 1273, 198, 197, 8611, 1820, 1669, 4494, 1321, 436, 77, 2648, 1820, 198, 98785, 1321, 436, 77, 2648, 1820, 284, 330, 2217, 16867, 2915, 368, 341, 197, 98785, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCandidateRecvNewEntry(t *testing.T) { ctx := startup_candidate_test(t) defer teardown_candidate_test(t, ctx) ctx.svr.election_timeout = 3 * time.Second for _, o := range ctx.svr.cluster_info { oo, _ := o.client.(*MockClient) // holy shit oo.ReplaceVoteFunctor(func(id string) func(ctx context.Context, req *pb.RequestVoteReq) (*pb.RequestVoteRes, error) { return func(ctx context.Context, req *pb.RequestVoteReq) (*pb.RequestVoteRes, error) { // mock timeout time.Sleep(3 * time.Second) rsp := new(pb.RequestVoteRes) rsp.Header = new(pb.ResHeader) rsp.Term = req.Term rsp.VoteGranted = id return rsp, nil } }(o.id)) } go ctx.svr.Elect() req := new(pb.AppendEntriesReq) req.Header = new(pb.ReqHeader) req.Term = int64(0) req.LeaderId = "id1" req.LeaderCommit = -1 req.PrevLogIndex = -1 req.PrevLogTerm = -1 ctx.svr.new_entry_pair.input <- req output := <-ctx.svr.new_entry_pair.output if output.Result != int32(AE_SMALL_TERM) { t.Errorf("bad entry result:%v", output) } req.Term = int64(1) ctx.svr.new_entry_pair.input <- req output = <-ctx.svr.new_entry_pair.output if output.Result != int32(AE_RETRY) { t.Errorf("bad entry result:%v", output) } }
explode_data.jsonl/59091
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 532 }
[ 2830, 3393, 63901, 63483, 3564, 5874, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 20567, 62360, 4452, 1155, 340, 16867, 49304, 62360, 4452, 1155, 11, 5635, 340, 20985, 514, 18920, 1734, 1170, 20537, 284, 220, 18, 353, 882, 32435, 198, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPI(t *testing.T) { require := require.New(t) c := New(DefaultCoherentConfig) k1, k2 := [20]byte{1}, [20]byte{2} db := memdb.NewTestDB(t) get := func(key [20]byte, expectTxnID uint64) (res [1]chan []byte) { wg := sync.WaitGroup{} for i := 0; i < len(res); i++ { wg.Add(1) res[i] = make(chan []byte) go func(out chan []byte) { require.NoError(db.View(context.Background(), func(tx kv.Tx) error { if expectTxnID != tx.ViewID() { panic(fmt.Sprintf("epxected: %d, got: %d", expectTxnID, tx.ViewID())) } wg.Done() cacheView, err := c.View(context.Background(), tx) view := cacheView.(*CoherentView) if err != nil { panic(err) } v, err := c.Get(key[:], tx, view.viewID) if err != nil { panic(err) } out <- common.Copy(v) return nil })) }(res[i]) } wg.Wait() // ensure that all goroutines started their transactions return res } put := func(k, v []byte) uint64 { var txID uint64 require.NoError(db.Update(context.Background(), func(tx kv.RwTx) error { _ = tx.Put(kv.PlainState, k, v) txID = tx.ViewID() return nil })) return txID } // block 1 - represents existing state (no notifications about this data will come to client) txID1 := put(k2[:], []byte{42}) wg := sync.WaitGroup{} res1, res2 := get(k1, txID1), get(k2, txID1) // will return immediately wg.Add(1) go func() { defer wg.Done() for i := range res1 { require.Nil(<-res1[i]) } for i := range res2 { require.Equal([]byte{42}, <-res2[i]) } fmt.Printf("done1: \n") }() txID2 := put(k1[:], []byte{2}) fmt.Printf("-----1 %d, %d\n", txID1, txID2) res3, res4 := get(k1, txID2), get(k2, txID2) // will see View of transaction 2 txID3 := put(k1[:], []byte{3}) // even if core already on block 3 c.OnNewBlock(&remote.StateChangeBatch{ DatabaseViewID: txID2, PendingBlockBaseFee: 1, ChangeBatch: []*remote.StateChange{ { Direction: remote.Direction_FORWARD, BlockHeight: 2, BlockHash: gointerfaces.ConvertHashToH256([32]byte{}), Changes: []*remote.AccountChange{{ Action: remote.Action_UPSERT, Address: gointerfaces.ConvertAddressToH160(k1), Data: []byte{2}, }}, }, }, }) wg.Add(1) go func() { defer wg.Done() for i := range res3 { require.Equal([]byte{2}, <-res3[i]) } for i := range res4 { require.Equal([]byte{42}, <-res4[i]) } fmt.Printf("done2: \n") }() fmt.Printf("-----2\n") res5, res6 := get(k1, txID3), get(k2, txID3) // will see View of transaction 3, even if notification has not enough changes c.OnNewBlock(&remote.StateChangeBatch{ DatabaseViewID: txID3, PendingBlockBaseFee: 1, ChangeBatch: []*remote.StateChange{ { Direction: remote.Direction_FORWARD, BlockHeight: 3, BlockHash: gointerfaces.ConvertHashToH256([32]byte{}), Changes: []*remote.AccountChange{{ Action: remote.Action_UPSERT, Address: gointerfaces.ConvertAddressToH160(k1), Data: []byte{3}, }}, }, }, }) wg.Add(1) go func() { defer wg.Done() for i := range res5 { require.Equal([]byte{3}, <-res5[i]) } fmt.Printf("-----21\n") for i := range res6 { require.Equal([]byte{42}, <-res6[i]) } fmt.Printf("done3: \n") }() fmt.Printf("-----3\n") txID4 := put(k1[:], []byte{2}) _ = txID4 c.OnNewBlock(&remote.StateChangeBatch{ DatabaseViewID: txID4, PendingBlockBaseFee: 1, ChangeBatch: []*remote.StateChange{ { Direction: remote.Direction_UNWIND, BlockHeight: 2, BlockHash: gointerfaces.ConvertHashToH256([32]byte{}), Changes: []*remote.AccountChange{{ Action: remote.Action_UPSERT, Address: gointerfaces.ConvertAddressToH160(k1), Data: []byte{2}, }}, }, }, }) fmt.Printf("-----4\n") txID5 := put(k1[:], []byte{4}) // reorg to new chain c.OnNewBlock(&remote.StateChangeBatch{ DatabaseViewID: txID4, PendingBlockBaseFee: 1, ChangeBatch: []*remote.StateChange{ { Direction: remote.Direction_FORWARD, BlockHeight: 3, BlockHash: gointerfaces.ConvertHashToH256([32]byte{2}), Changes: []*remote.AccountChange{{ Action: remote.Action_UPSERT, Address: gointerfaces.ConvertAddressToH160(k1), Data: []byte{4}, }}, }, }, }) fmt.Printf("-----5\n") res7, res8 := get(k1, txID5), get(k2, txID5) // will see View of transaction 3, even if notification has not enough changes wg.Add(1) go func() { defer wg.Done() for i := range res7 { require.Equal([]byte{4}, <-res7[i]) } for i := range res8 { require.Equal([]byte{42}, <-res8[i]) } fmt.Printf("done4: \n") }() err := db.View(context.Background(), func(tx kv.Tx) error { _, err := AssertCheckValues(context.Background(), tx, c) require.NoError(err) return nil }) require.NoError(err) wg.Wait() }
explode_data.jsonl/11840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2296 }
[ 2830, 3393, 7082, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 1444, 1669, 1532, 87874, 7339, 37538, 2648, 340, 16463, 16, 11, 595, 17, 1669, 508, 17, 15, 90184, 90, 16, 2137, 508, 17, 15, 90184, 90, 17, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFewItemsNoExtend(t *testing.T) { q := New(3) q.Push(0) q.Push(1) q.Push(2) if q.Len() != 3 { t.Fatal("Size does not reflect correct number of items") } q.Pop() if item, ok := q.Pop(); item != 1 || !ok { t.Fatal("Invalid pop return", item, "expected", 1) } q.Push(3) q.Push(4) if q.Len() != 3 { t.Fatal("Size not correct after push/pop operations") } if item, ok := q.Pop(); item != 2 || !ok { t.Fatal("Invalid pop return", item, "expected", 2) } }
explode_data.jsonl/7365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 71104, 4353, 2753, 72136, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 1532, 7, 18, 340, 18534, 34981, 7, 15, 340, 18534, 34981, 7, 16, 340, 18534, 34981, 7, 17, 340, 743, 2804, 65819, 368, 961, 220, 18, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test_SetterNotFound(t *testing.T) { c1 := NewLidi(Settings{}) type A struct{} type B struct { a *A `lidi:"inject(MySetter)"` } if err := c1.Provide(&A{}); err != nil { t.Fatal(err) } if err := c1.Provide(&B{}); err != nil { if err.Error() != "lidi: setter method 'MySetter' not found" { t.Fatal("Not Equal") } } else { t.Fatal(err) } }
explode_data.jsonl/40207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 14812, 465, 10372, 1155, 353, 8840, 836, 8, 341, 1444, 16, 1669, 1532, 43, 12278, 57395, 6257, 692, 13158, 362, 2036, 16094, 13158, 425, 2036, 341, 197, 11323, 353, 32, 1565, 75, 12278, 2974, 32133, 37485, 44294, 8, 8805, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCancelWorkflow_RecoversFromManuallyClosedChannel(t *testing.T) { w := testWorkflow() if w.isCanceled { t.Error("Didn't expect workflow to be canceled.") } close(w.Cancel) w.CancelWorkflow() if !w.isCanceled { t.Error("Expect workflow to be canceled.") } }
explode_data.jsonl/3868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 9269, 62768, 62, 3820, 8969, 3830, 1658, 1832, 26884, 9629, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 1273, 62768, 741, 743, 289, 2079, 63263, 341, 197, 3244, 6141, 445, 86519, 944, 1720, 28288, 311, 387, 33446, 13053, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCACommon_GenerateRSAPKCS1PrivateKey(t *testing.T) { if _, errCA = RSAGeneratePriKey(512, pathcarsapksc1512, caPriKeyFileName, "PRIVATE KEY", RSAPKSC1()); nil != errCA { t.Error(errCA) } priData, errCA = ioutil.ReadFile(filepath.Join(pathcarsapksc1512, caPriKeyFileName)) if nil != errCA { t.Error(errCA) } if _, errCA = CAGenerateRSACertificateRequest(&CertRequest{ PrivateKeyData: priData, CertificateRequestFilePath: filepath.Join(pathcarsapksc1512, caCertificateRequestFileName), SignatureAlgorithm: x509.SHA256WithRSAPSS, Subject: CAMockSubject, }, "PRIVATE KEY", RSAPKSC1()); nil != errCA { t.Error(errCA) } if _, errCA = RSAGeneratePriKeyWithPass(1024, pathcarsapksc11024, caPriKeyFileName, "123456", "PRIVATE KEY", x509.PEMCipher3DES, RSAPKSC1()); nil != errCA { t.Error(errCA) } priData, errCA = ioutil.ReadFile(filepath.Join(pathcarsapksc11024, caPriKeyFileName)) if nil != errCA { t.Error(errCA) } if _, errCA = CAGenerateRSACertificateRequestWithPass(&CertRequest{ PrivateKeyData: priData, CertificateRequestFilePath: filepath.Join(pathcarsapksc11024, caCertificateRequestFileName), SignatureAlgorithm: x509.SHA384WithRSAPSS, Subject: CAMockSubject, }, "123456", "PRIVATE KEY", RSAPKSC1()); nil != errCA { t.Error(errCA) } }
explode_data.jsonl/24074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 596 }
[ 2830, 3393, 5049, 10839, 2646, 13220, 11451, 2537, 42, 6412, 16, 75981, 1155, 353, 8840, 836, 8, 341, 743, 8358, 1848, 5049, 284, 23229, 1890, 13220, 92878, 1592, 7, 20, 16, 17, 11, 1815, 50708, 93812, 2388, 16, 20, 16, 17, 11, 2162...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestWebhookManagement(t *testing.T) { framework. NewTest(t). Run(func(ctx framework.TestContext) { ctx.Skip("TODO(github.com/istio/istio/issues/20289)") // Test that webhook configurations are enabled through istioctl successfully. args := []string{"experimental", "post-install", "webhook", "enable", "--validation", "--webhook-secret", "dns.istio-galley-service-account", "--namespace", "istio-system", "--validation-path", "./config/galley-webhook.yaml", "--injection-path", "./config/sidecar-injector-webhook.yaml"} istioCtl := istioctl.NewOrFail(ctx, ctx, istioctl.Config{}) output, fErr := istioCtl.Invoke(args) if fErr != nil { t.Fatalf("error returned for 'istioctl %s': %v", strings.Join(args, " "), fErr) } // Check that the webhook configurations are successful expectedRegexps := []*regexp.Regexp{ regexp.MustCompile(`finished reading cert`), regexp.MustCompile(`create webhook configuration istio-galley`), regexp.MustCompile(`create webhook configuration istio-sidecar-injector`), regexp.MustCompile(`webhook configurations have been enabled`), } for _, regexp := range expectedRegexps { if !regexp.MatchString(output) { t.Fatalf("output didn't match for 'istioctl %s'\n got %v\nwant: %v", strings.Join(args, " "), output, regexp) } } // Test that webhook statuses returned by running istioctl are as expected. args = []string{"experimental", "post-install", "webhook", "status"} istioCtl = istioctl.NewOrFail(ctx, ctx, istioctl.Config{}) output, fErr = istioCtl.Invoke(args) if fErr != nil { t.Fatalf("error returned for 'istioctl %s': %v", strings.Join(args, " "), fErr) } // Check that the webhook statuses are as expected expectedRegexps = []*regexp.Regexp{ regexp.MustCompile(`ValidatingWebhookConfiguration istio-galley is`), regexp.MustCompile(`MutatingWebhookConfiguration istio-sidecar-injector is`), } for _, regexp := range expectedRegexps { if !regexp.MatchString(output) { t.Fatalf("output didn't match for 'istioctl %s'\n got %v\nwant: %v", strings.Join(args, " "), output, regexp) } } // Currently, unable to test disabling webhooks because the disable command requires // user interaction: "Are you sure to delete webhook configuration(s)?", the deletion // will only proceed after user entering "yes". }) }
explode_data.jsonl/46393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 950 }
[ 2830, 3393, 5981, 20873, 22237, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 624, 197, 197, 3564, 2271, 1155, 4292, 197, 85952, 18552, 7502, 12626, 8787, 1972, 8, 341, 298, 20985, 57776, 445, 14732, 3268, 3827, 905, 14, 380, 815, 14, 380...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestValidateInValidBodyRequest(t *testing.T) { body := invalidWebhookEvent{ ID: "1337", } req := getRequest() JSONBody, _ := json.Marshal(body) req.Body = ioutil.NopCloser(strings.NewReader(string(JSONBody))) client := NewWebhookRequestHandler(req) _, err := client.ValidateAndGetData() assert.Error(t, err) }
explode_data.jsonl/25842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 17926, 641, 4088, 5444, 1900, 1155, 353, 8840, 836, 8, 1476, 35402, 1669, 8318, 5981, 20873, 1556, 515, 197, 29580, 25, 330, 16, 18, 18, 22, 756, 197, 630, 24395, 1669, 78731, 2822, 197, 5370, 5444, 11, 716, 1669, 2951, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSenderServiceTagEvent(t *testing.T) { resetAggregator() InitAggregator(nil, "testhostname") senderMetricSampleChan := make(chan senderMetricSample, 10) serviceCheckChan := make(chan metrics.ServiceCheck, 10) eventChan := make(chan metrics.Event, 10) bucketChan := make(chan senderHistogramBucket, 10) orchestratorChan := make(chan senderOrchestratorMetadata, 10) checkSender := newCheckSender(checkID1, "", senderMetricSampleChan, serviceCheckChan, eventChan, bucketChan, orchestratorChan) checkTags := []string{"check:tag1", "check:tag2"} event := metrics.Event{ Title: "title", Host: "testhostname", Ts: time.Now().Unix(), Text: "text", Tags: checkTags, } // only tags added by the check checkSender.SetCheckService("") checkSender.FinalizeCheckServiceTag() checkSender.Event(event) e := <-eventChan assert.Equal(t, checkTags, e.Tags) // only last call is added as a tag checkSender.SetCheckService("service1") checkSender.SetCheckService("service2") checkSender.FinalizeCheckServiceTag() checkSender.Event(event) e = <-eventChan assert.Equal(t, append(checkTags, "service:service2"), e.Tags) }
explode_data.jsonl/78301
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 1949, 20381, 1860, 5668, 1556, 1155, 353, 8840, 836, 8, 341, 70343, 9042, 58131, 741, 98762, 9042, 58131, 27907, 11, 330, 1944, 27806, 5130, 1903, 1659, 54310, 17571, 46019, 1669, 1281, 35190, 4646, 54310, 17571, 11, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApixu_ReadResponseBodyError(t *testing.T) { a := &apixu{ config: Config{}, httpClient: &httpClientMock{ response: &http.Response{ StatusCode: http.StatusOK, Body: &bodyMock{}, }, err: nil, }, read: func(r io.Reader) ([]byte, error) { return []byte{}, errors.New("error") }, } res, err := a.Search("query") assert.Nil(t, res) assert.Error(t, err) }
explode_data.jsonl/14948
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 10611, 941, 84, 38381, 29637, 1454, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 609, 391, 941, 84, 515, 197, 25873, 25, 5532, 38837, 197, 28080, 2959, 25, 609, 83417, 11571, 515, 298, 21735, 25, 609, 1254, 12574, 515, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTombstonedNodes(t *testing.T) { opts := NewOptions() opts.Logger = newTestLogger(t) tcpAddr, httpAddr, nsqlookupd := mustStartLookupd(opts) defer nsqlookupd.Exit() lookupdHTTPAddrs := []string{fmt.Sprintf("%s", httpAddr)} topicName := "inactive_nodes" conn := mustConnectLookupd(t, tcpAddr) defer conn.Close() identify(t, conn, "ip.address", 5000, 5555, "fake-version") nsq.Register(topicName, "channel1").WriteTo(conn) _, err := nsq.ReadResponse(conn) equal(t, err, nil) producers, _ := lookuputil.GetLookupdProducers(lookupdHTTPAddrs) equal(t, len(producers), 1) equal(t, len(producers[0].Topics), 1) equal(t, producers[0].Topics[0].Topic, topicName) equal(t, producers[0].Topics[0].Tombstoned, false) endpoint := fmt.Sprintf("http://%s/topic/tombstone?topic=%s&node=%s", httpAddr, topicName, "ip.address:5555") _, err = http_api.NegotiateV1("POST", endpoint, nil) equal(t, err, nil) producers, _ = lookuputil.GetLookupdProducers(lookupdHTTPAddrs) equal(t, len(producers), 1) equal(t, len(producers[0].Topics), 1) equal(t, producers[0].Topics[0].Topic, topicName) equal(t, producers[0].Topics[0].Tombstoned, true) }
explode_data.jsonl/12603
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 482 }
[ 2830, 3393, 51, 2855, 7720, 291, 12288, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 1532, 3798, 741, 64734, 12750, 284, 501, 2271, 7395, 1155, 340, 3244, 4672, 13986, 11, 1758, 13986, 11, 12268, 80, 21020, 67, 1669, 1969, 3479, 34247, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateReminder(t *testing.T) { testActorsRuntime := newTestActorsRuntime() actorType, actorID := getTestActorTypeAndID() err := testActorsRuntime.CreateReminder(&CreateReminderRequest{ ActorID: actorID, ActorType: actorType, Name: "reminder1", Period: "1s", DueTime: "1s", Data: nil, }) assert.Nil(t, err) }
explode_data.jsonl/12877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 4021, 95359, 1155, 353, 8840, 836, 8, 341, 18185, 2414, 1087, 15123, 1669, 501, 2271, 2414, 1087, 15123, 741, 93410, 929, 11, 12089, 915, 1669, 633, 2271, 18870, 929, 3036, 915, 741, 9859, 1669, 1273, 2414, 1087, 15123, 7251...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListWatchedCMD(t *testing.T) { testTelegramClientInst, answerChan, _ := NewTestMovieBot("./test_data/test_data.sql") updates := make(chan tgbotapi.Update) go testTelegramClientInst.GetWatchedFilms(updates) updates <- tgbotapi.Update{Message: &tgbotapi.Message{Text: "", Chat: &tgbotapi.Chat{ID: 1}}} answer := <-answerChan expectedAnswer := "Список просмотренных фильмов:\nWatchedFilm\n" if answer != expectedAnswer { t.Errorf(fmt.Sprintf("Not expected bot answer: %s, expected: %s", answer, expectedAnswer)) return } t.Logf("TestListWatchedCMD complete") }
explode_data.jsonl/19226
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 852, 14247, 291, 38680, 1155, 353, 8840, 836, 8, 341, 18185, 72244, 2959, 8724, 11, 4226, 46019, 11, 716, 1669, 1532, 2271, 19668, 23502, 13988, 1944, 1769, 12697, 1769, 10045, 1138, 197, 49661, 1669, 1281, 35190, 53188, 6331,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Hoverfly_GetSimulation_ReturnsASingleRequestResponsePair(t *testing.T) { RegisterTestingT(t) unit := NewHoverflyWithConfiguration(&Configuration{}) unit.Simulation.AddPair(&models.RequestMatcherResponsePair{ RequestMatcher: models.RequestMatcher{ Destination: []models.RequestFieldMatchers{ { Matcher: matchers.Exact, Value: "test.com", }, }, }, Response: models.ResponseDetails{ Status: 200, Body: "test-body", }, }) simulation, err := unit.GetSimulation() Expect(err).To(BeNil()) Expect(simulation.RequestResponsePairs).To(HaveLen(1)) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Destination).To(HaveLen(1)) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Destination[0].Matcher).To(Equal("exact")) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Destination[0].Value).To(Equal("test.com")) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Path).To(BeNil()) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Method).To(BeNil()) Expect(simulation.RequestResponsePairs[0].RequestMatcher.DeprecatedQuery).To(BeNil()) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Scheme).To(BeNil()) Expect(simulation.RequestResponsePairs[0].RequestMatcher.Headers).To(HaveLen(0)) Expect(simulation.RequestResponsePairs[0].Response.Status).To(Equal(200)) Expect(simulation.RequestResponsePairs[0].Response.EncodedBody).To(BeFalse()) Expect(simulation.RequestResponsePairs[0].Response.Body).To(Equal("test-body")) Expect(simulation.RequestResponsePairs[0].Response.Headers).To(HaveLen(0)) Expect(nil).To(BeNil()) }
explode_data.jsonl/45367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 2039, 1975, 21642, 13614, 64554, 53316, 82, 1911, 2173, 1900, 2582, 12443, 1155, 353, 8840, 836, 8, 341, 79096, 16451, 51, 1155, 692, 81189, 1669, 1532, 34379, 21642, 2354, 7688, 2099, 7688, 6257, 692, 81189, 808, 60361, 1904,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiMap(t *testing.T) { e := &Env{} e.Set("foo", "bar") e.Set("bar", "baz") e.Set("hello", "world") m := e.MultiMap() e2 := &Env{} e2.Set("old_key", "something something something") e2.InitMultiMap(m) if v := e2.Get("old_key"); v != "" { t.Fatalf("%#v", v) } if v := e2.Get("bar"); v != "baz" { t.Fatalf("%#v", v) } if v := e2.Get("hello"); v != "world" { t.Fatalf("%#v", v) } }
explode_data.jsonl/15391
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 20358, 2227, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 609, 14359, 16094, 7727, 4202, 445, 7975, 497, 330, 2257, 1138, 7727, 4202, 445, 2257, 497, 330, 42573, 1138, 7727, 4202, 445, 14990, 497, 330, 14615, 1138, 2109, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMutableTree_LoadVersion_Empty(t *testing.T) { memDB := db.NewMemDB() tree, err := NewMutableTree(memDB, 0) require.NoError(t, err) version, err := tree.LoadVersion(0) require.NoError(t, err) assert.EqualValues(t, 0, version) version, err = tree.LoadVersion(-1) require.NoError(t, err) assert.EqualValues(t, 0, version) _, err = tree.LoadVersion(3) require.Error(t, err) }
explode_data.jsonl/23775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 11217, 6533, 19553, 5637, 76060, 1595, 1155, 353, 8840, 836, 8, 341, 14145, 3506, 1669, 2927, 7121, 18816, 3506, 741, 51968, 11, 1848, 1669, 1532, 11217, 6533, 39908, 3506, 11, 220, 15, 340, 17957, 35699, 1155, 11, 1848, 692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSharedAutoprovisionVolume(t *testing.T) { taskEngine, done, _ := setupWithDefaultConfig(t) defer done() stateChangeEvents := taskEngine.StateChangeEvents() // Set the task clean up duration to speed up the test taskEngine.(*DockerTaskEngine).cfg.TaskCleanupWaitDuration = 1 * time.Second testTask, tmpDirectory, err := createVolumeTask("shared", "TestSharedAutoprovisionVolume", "TestSharedAutoprovisionVolume", true) defer os.Remove(tmpDirectory) require.NoError(t, err, "creating test task failed") go taskEngine.AddTask(testTask) verifyTaskIsRunning(stateChangeEvents, testTask) verifyTaskIsStopped(stateChangeEvents, testTask) assert.Equal(t, *testTask.Containers[0].GetKnownExitCode(), 0) assert.Equal(t, testTask.ResourcesMapUnsafe["dockerVolume"][0].(*taskresourcevolume.VolumeResource).VolumeConfig.DockerVolumeName, "TestSharedAutoprovisionVolume", "task volume name is not the same as specified in task definition") // Wait for task to be cleaned up testTask.SetSentStatus(apitaskstatus.TaskStopped) waitForTaskCleanup(t, taskEngine, testTask.Arn, 5) client := taskEngine.(*DockerTaskEngine).client response := client.InspectVolume(context.TODO(), "TestSharedAutoprovisionVolume", 1*time.Second) assert.NoError(t, response.Error, "expect shared volume not removed") cleanVolumes(testTask, taskEngine) }
explode_data.jsonl/39473
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 16997, 19602, 45926, 13013, 18902, 1155, 353, 8840, 836, 8, 341, 49115, 4571, 11, 2814, 11, 716, 1669, 6505, 2354, 3675, 2648, 1155, 340, 16867, 2814, 741, 24291, 4072, 7900, 1669, 3383, 4571, 18942, 4072, 7900, 741, 197, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetData(t *testing.T) { var dataPath = "../test/data/bes3tint.dta" var cfgPath = "../test/data/bes3tint.dsc" var testEprFile, _ = NewEprFile(dataPath, cfgPath) testEprFile.cfg["BSEQ"] = "LIT" data1, _ := testEprFile.GetData() testEprFile.cfg["BSEQ"] = "BIG" data2, _ := testEprFile.GetData() assert.NotEqual(t, data1, data2) }
explode_data.jsonl/5233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 68957, 1155, 353, 8840, 836, 8, 341, 2405, 821, 1820, 284, 7005, 1944, 13167, 3470, 288, 18, 83, 396, 950, 2565, 698, 2405, 13286, 1820, 284, 7005, 1944, 13167, 3470, 288, 18, 83, 396, 950, 2388, 698, 2405, 1273, 36, 649...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrepositions(t *testing.T) { if pass := prepositions("written_by"); !pass { t.Error("Given \"written_by\", prepositions() returned false, expected true") } if pass := prepositions("all_except"); !pass { t.Error("Given \"all_except\", prepositions() returned false, expected true") } if pass := prepositions("process_after"); !pass { t.Error("Given \"process_after\", prepositions() returned false, expected true") } if pass := prepositions("between_rocks_by_shore"); !pass { t.Error("Given \"between_rocks_by_shore\", prepositions() returned false, expected true") } if pass := prepositions("no_preps_here"); pass { t.Error("Given \"magic\", prepositions() returned true, expected false") } }
explode_data.jsonl/45807
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 4703, 27436, 1155, 353, 8840, 836, 8, 341, 743, 1494, 1669, 855, 27436, 445, 25569, 3710, 5038, 753, 6385, 341, 197, 3244, 6141, 445, 22043, 7245, 25569, 3710, 16215, 855, 27436, 368, 5927, 895, 11, 3601, 830, 1138, 197, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUnmarshalWithoutNameType(t *testing.T) { var x TestThree if err := Unmarshal([]byte(withoutNameTypeData), &x); err != nil { t.Fatalf("Unmarshal: %s", err) } if x.Attr != OK { t.Fatalf("have %v\nwant %v", x.Attr, OK) } }
explode_data.jsonl/25292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 1806, 27121, 26040, 675, 929, 1155, 353, 8840, 836, 8, 341, 2405, 856, 3393, 19641, 198, 743, 1848, 1669, 1230, 27121, 10556, 3782, 16980, 411, 675, 929, 1043, 701, 609, 87, 1215, 1848, 961, 2092, 341, 197, 3244, 30762, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPathExpr(t *testing.T) { tests := []struct { expr string res document.Value fails bool }{ {"a", document.NewIntegerValue(1), false}, {"b", func() document.Value { fb := document.NewFieldBuffer() err := json.Unmarshal([]byte(`{"foo bar": [1, 2]}`), fb) require.NoError(t, err) return document.NewDocumentValue(fb) }(), false}, {"b.`foo bar`[0]", document.NewIntegerValue(1), false}, {"_v.b.`foo bar`[0]", document.NewNullValue(), false}, {"b.`foo bar`[1]", document.NewIntegerValue(2), false}, {"b.`foo bar`[2]", nullLitteral, false}, {"b[0]", nullLitteral, false}, {"c[0]", document.NewIntegerValue(1), false}, {"c[1].foo", document.NewTextValue("bar"), false}, {"c.foo", nullLitteral, false}, {"d", nullLitteral, false}, } d := document.NewFromJSON([]byte(`{ "a": 1, "b": {"foo bar": [1, 2]}, "c": [1, {"foo": "bar"}, [1, 2]] }`)) for _, test := range tests { t.Run(test.expr, func(t *testing.T) { testExpr(t, test.expr, expr.NewEnvironment(d), test.res, test.fails) }) } t.Run("empty env", func(t *testing.T) { testExpr(t, "a", &expr.Environment{}, nullLitteral, true) }) }
explode_data.jsonl/70245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 1820, 16041, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 8122, 649, 220, 914, 198, 197, 10202, 256, 2197, 6167, 198, 197, 1166, 6209, 1807, 198, 197, 59403, 197, 197, 4913, 64, 497, 2197, 7121, 3486, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPhantomValidation(t *testing.T) { testDBEnv := testEnvs[levelDBtestEnvName] testDBEnv.Init(t) defer testDBEnv.Cleanup() db := testDBEnv.GetDBHandle("TestDB") //populate db with initial data batch := privacyenabledstate.NewUpdateBatch() batch.PubUpdates.Put("ns1", "key1", []byte("value1"), version.NewHeight(1, 0)) batch.PubUpdates.Put("ns1", "key2", []byte("value2"), version.NewHeight(1, 1)) batch.PubUpdates.Put("ns1", "key3", []byte("value3"), version.NewHeight(1, 2)) batch.PubUpdates.Put("ns1", "key4", []byte("value4"), version.NewHeight(1, 3)) batch.PubUpdates.Put("ns1", "key5", []byte("value5"), version.NewHeight(1, 4)) db.ApplyPrivacyAwareUpdates(batch, version.NewHeight(1, 4)) testValidator := &validator{db: db, hashFunc: testHashFunc} //rwset1 should be valid rwsetBuilder1 := rwsetutil.NewRWSetBuilder() rqi1 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key4", ItrExhausted: true} rwsetutil.SetRawReads(rqi1, []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 2))}) rwsetBuilder1.AddToRangeQuerySet("ns1", rqi1) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder1), []int{}) //rwset2 should not be valid - Version of key4 changed rwsetBuilder2 := rwsetutil.NewRWSetBuilder() rqi2 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key4", ItrExhausted: false} rwsetutil.SetRawReads(rqi2, []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 2)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 2))}) rwsetBuilder2.AddToRangeQuerySet("ns1", rqi2) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder2), []int{0}) //rwset3 should not be valid - simulate key3 got committed to db rwsetBuilder3 := rwsetutil.NewRWSetBuilder() rqi3 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key4", ItrExhausted: false} rwsetutil.SetRawReads(rqi3, []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 3))}) rwsetBuilder3.AddToRangeQuerySet("ns1", rqi3) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder3), []int{0}) // //Remove a key in rwset4 and rwset5 should become invalid rwsetBuilder4 := rwsetutil.NewRWSetBuilder() rwsetBuilder4.AddToWriteSet("ns1", "key3", nil) rwsetBuilder5 := rwsetutil.NewRWSetBuilder() rqi5 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key4", ItrExhausted: false} rwsetutil.SetRawReads(rqi5, []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 2)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 3))}) rwsetBuilder5.AddToRangeQuerySet("ns1", rqi5) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder4, rwsetBuilder5), []int{1}) //Add a key in rwset6 and rwset7 should become invalid rwsetBuilder6 := rwsetutil.NewRWSetBuilder() rwsetBuilder6.AddToWriteSet("ns1", "key2_1", []byte("value2_1")) rwsetBuilder7 := rwsetutil.NewRWSetBuilder() rqi7 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key4", ItrExhausted: false} rwsetutil.SetRawReads(rqi7, []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 2)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 3))}) rwsetBuilder7.AddToRangeQuerySet("ns1", rqi7) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder6, rwsetBuilder7), []int{1}) }
explode_data.jsonl/65028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1464 }
[ 2830, 3393, 3357, 30002, 13799, 1155, 353, 8840, 836, 8, 341, 18185, 3506, 14359, 1669, 1273, 1702, 11562, 64586, 3506, 1944, 14359, 675, 921, 18185, 3506, 14359, 26849, 1155, 340, 16867, 1273, 3506, 14359, 727, 60639, 741, 20939, 1669, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCloudHypervisorStartSandbox(t *testing.T) { assert := assert.New(t) clhConfig, err := newClhConfig() assert.NoError(err) store, err := persist.GetDriver() assert.NoError(err) clhConfig.VMStorePath = store.RunVMStoragePath() clhConfig.RunStorePath = store.RunStoragePath() clh := &cloudHypervisor{ config: clhConfig, APIClient: &clhClientMock{}, virtiofsDaemon: &virtiofsdMock{}, } err = clh.StartVM(context.Background(), 10) assert.NoError(err) }
explode_data.jsonl/68502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 16055, 39, 1082, 31396, 3479, 50, 31536, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 39407, 71, 2648, 11, 1848, 1669, 501, 5066, 71, 2648, 741, 6948, 35699, 3964, 692, 57279, 11, 1848, 1669, 22334, 223...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCandidateVoteSuccess(t *testing.T) { ctx := startup_candidate_test(t) defer teardown_candidate_test(t, ctx) for _, o := range ctx.svr.cluster_info { oo, _ := o.client.(*MockClient) oo.ReplaceVoteFunctor(func(ctx context.Context, req *pb.RequestVoteReq) (*pb.RequestVoteRes, error) { rsp := new(pb.RequestVoteRes) rsp.Header = new(pb.ResHeader) rsp.Term = int64(1) rsp.VoteGranted = "id0" return rsp, nil }) } go ctx.svr.Elect() time.Sleep(1 * time.Second) if ctx.svr.state != pb.PeerState_Leader { t.Errorf("peer should become leader after elect succ:%v", ctx.svr.state) } }
explode_data.jsonl/59088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 63901, 41412, 7188, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 20567, 62360, 4452, 1155, 340, 16867, 49304, 62360, 4452, 1155, 11, 5635, 340, 2023, 8358, 297, 1669, 2088, 5635, 514, 18920, 40501, 3109, 341, 197, 197, 2624, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAgentConnectAuthorize_defaultDeny(t *testing.T) { t.Parallel() assert := assert.New(t) a := NewTestAgent(t.Name(), TestACLConfig()) defer a.Shutdown() args := &structs.ConnectAuthorizeRequest{ Target: "foo", ClientCertURI: connect.TestSpiffeIDService(t, "web").URI().String(), } req, _ := http.NewRequest("POST", "/v1/agent/connect/authorize?token=root", jsonReader(args)) resp := httptest.NewRecorder() respRaw, err := a.srv.AgentConnectAuthorize(resp, req) assert.Nil(err) assert.Equal(200, resp.Code) obj := respRaw.(*connectAuthorizeResp) assert.False(obj.Authorized) assert.Contains(obj.Reason, "Default behavior") }
explode_data.jsonl/33674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 16810, 14611, 37483, 9993, 23619, 88, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 6948, 1669, 2060, 7121, 1155, 340, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 3393, 55393, 2648, 2398, 16867, 264, 10849, 1845...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMessageWorkerStopQueue(t *testing.T) { s1 := newTestSignaler() m1 := message{context: Context{Signal: s1}} s2 := newTestSignaler() m2 := message{context: Context{Signal: s2}} qu := make(chan message, 2) qu <- m1 qu <- m2 stopQueue(qu) assert.False(t, s1.wait()) assert.False(t, s2.wait()) }
explode_data.jsonl/72809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 2052, 21936, 10674, 7554, 1155, 353, 8840, 836, 8, 341, 1903, 16, 1669, 501, 2271, 7264, 13111, 741, 2109, 16, 1669, 1943, 90, 2147, 25, 9608, 90, 26810, 25, 274, 16, 47449, 1903, 17, 1669, 501, 2271, 7264, 13111, 741, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxWithPvtdataMetadata(t *testing.T) { ledgerid, ns, coll := "testtxwithpvtdatametadata", "ns", "coll" btlPolicy := btltestutil.SampleBTLPolicy( map[[2]string]uint64{ {"ns", "coll"}: 1000, }, ) for _, testEnv := range testEnvs { t.Logf("Running test for TestEnv = %s", testEnv.getName()) testEnv.init(t, ledgerid, btlPolicy) testTxWithPvtdataMetadata(t, testEnv, ns, coll) testEnv.cleanup() } }
explode_data.jsonl/63624
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 31584, 2354, 47, 9708, 691, 14610, 1155, 353, 8840, 836, 8, 341, 197, 50704, 307, 11, 12268, 11, 4530, 1669, 330, 1944, 3998, 4197, 30168, 1296, 266, 309, 7603, 497, 330, 4412, 497, 330, 17222, 698, 2233, 11544, 13825, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDequeueLeavesTimeOrdering(t *testing.T) { // Queue two small batches of leaves at different timestamps. Do two separate dequeue // transactions and make sure the returned leaves are respecting the time ordering of the // queue. ctx := context.Background() cleanTestDB(t, db) as := NewAdminStorage(db) tree := mustCreateTree(ctx, t, as, testonly.LogTree) s := NewLogStorage(db, nil) mustSignAndStoreLogRoot(ctx, t, s, tree, 0) batchSize := 2 leaves := createTestLeaves(int64(batchSize), 0) leaves2 := createTestLeaves(int64(batchSize), int64(batchSize)) if _, err := s.QueueLeaves(ctx, tree, leaves, fakeQueueTime); err != nil { t.Fatalf("QueueLeaves(1st batch) = %v", err) } // These are one second earlier so should be dequeued first if _, err := s.QueueLeaves(ctx, tree, leaves2, fakeQueueTime.Add(-time.Second)); err != nil { t.Fatalf("QueueLeaves(2nd batch) = %v", err) } // Now try to dequeue two leaves and we should get the second batch runLogTX(t, s, tree, func(ctx context.Context, tx2 storage.LogTreeTX) error { dequeue1, err := tx2.DequeueLeaves(ctx, batchSize, fakeQueueTime) if err != nil { t.Fatalf("DequeueLeaves(1st) = %v", err) } if got, want := len(dequeue1), batchSize; got != want { t.Fatalf("Dequeue count mismatch (1st) got: %d, want: %d", got, want) } ensureAllLeavesDistinct(t, dequeue1) // Ensure this is the second batch queued by comparing leaf hashes (must be distinct as // the leaf data was). if !leafInBatch(dequeue1[0], leaves2) || !leafInBatch(dequeue1[1], leaves2) { t.Fatalf("Got leaf from wrong batch (1st dequeue): %v", dequeue1) } iTimestamp := timestamppb.Now() for i, l := range dequeue1 { l.IntegrateTimestamp = iTimestamp l.LeafIndex = int64(i) } if err := tx2.UpdateSequencedLeaves(ctx, dequeue1); err != nil { t.Fatalf("UpdateSequencedLeaves(): %v", err) } return nil }) // Try to dequeue again and we should get the batch that was queued first, though at a later time runLogTX(t, s, tree, func(ctx context.Context, tx3 storage.LogTreeTX) error { dequeue2, err := tx3.DequeueLeaves(ctx, batchSize, fakeQueueTime) if err != nil { t.Fatalf("DequeueLeaves(2nd) = %v", err) } if got, want := len(dequeue2), batchSize; got != want { t.Fatalf("Dequeue count mismatch (2nd) got: %d, want: %d", got, want) } ensureAllLeavesDistinct(t, dequeue2) // Ensure this is the first batch by comparing leaf hashes. if !leafInBatch(dequeue2[0], leaves) || !leafInBatch(dequeue2[1], leaves) { t.Fatalf("Got leaf from wrong batch (2nd dequeue): %v", dequeue2) } return nil }) }
explode_data.jsonl/30690
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 985 }
[ 2830, 3393, 1912, 4584, 2304, 4693, 1462, 4431, 287, 1155, 353, 8840, 836, 8, 341, 197, 322, 18745, 1378, 2613, 44792, 315, 10901, 518, 2155, 48781, 13, 3155, 1378, 8651, 83873, 198, 197, 322, 14131, 323, 1281, 2704, 279, 5927, 10901, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestConsumer_Metadata(t *testing.T) { for _, tc := range []struct { name string td consumerdata.TraceData }{ {name: "jaeger", td: consumerdata.TraceData{ SourceFormat: "jaeger", Node: &commonpb.Node{ Identifier: &commonpb.ProcessIdentifier{ HostName: "host-foo", Pid: 107892, StartTimestamp: testStartTime()}, LibraryInfo: &commonpb.LibraryInfo{ExporterVersion: "Jaeger-C++-3.2.1"}, ServiceInfo: &commonpb.ServiceInfo{Name: "foo"}, Attributes: map[string]string{"client-uuid": "xxf0", "ip": "17.0.10.123", "foo": "bar"}}, Resource: &resourcepb.Resource{ Type: "request", Labels: map[string]string{"a": "b", "c": "d"}, }}}, {name: "jaeger-version", td: consumerdata.TraceData{SourceFormat: "jaeger", Node: &commonpb.Node{LibraryInfo: &commonpb.LibraryInfo{ Language: 7, ExporterVersion: "Jaeger-3.4.12"}}}}, {name: "jaeger-no-language", td: consumerdata.TraceData{SourceFormat: "jaeger", Node: &commonpb.Node{LibraryInfo: &commonpb.LibraryInfo{ExporterVersion: "Jaeger-3.4.12"}}}}, {name: "jaeger_minimal", td: consumerdata.TraceData{ SourceFormat: "jaeger", Node: &commonpb.Node{ Identifier: &commonpb.ProcessIdentifier{}, LibraryInfo: &commonpb.LibraryInfo{}, ServiceInfo: &commonpb.ServiceInfo{}, }}}, {name: "minimal", td: consumerdata.TraceData{SourceFormat: "foo"}}, } { t.Run(tc.name, func(t *testing.T) { reporter := func(ctx context.Context, req publish.PendingReq) error { metadata := req.Tcontext.Metadata out, err := json.Marshal(metadata) require.NoError(t, err) approvals.AssertApproveResult(t, file("metadata_"+tc.name), out) return nil } require.NoError(t, (&Consumer{Reporter: reporter}).ConsumeTraceData(context.Background(), tc.td)) }) } }
explode_data.jsonl/33582
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 811 }
[ 2830, 3393, 29968, 62219, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 76373, 256, 11502, 691, 46920, 1043, 198, 197, 59403, 197, 197, 47006, 25, 330, 5580, 1878, 756, 298, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWRBuffer(t *testing.T) { for _, tc := range []struct { name string want rtests.ROOTer }{ { name: "TFree", want: &freeSegment{ first: 21, last: 24, }, }, { name: "TFree", want: &freeSegment{ first: 21, last: kStartBigFile + 24, }, }, { name: "TKey", want: &Key{ nbytes: 1024, rvers: 4, // small file objlen: 10, datetime: datime2time(1576331001), keylen: 12, cycle: 2, seekkey: 1024, seekpdir: 2048, class: "MyClass", name: "my-key", title: "my key title", }, }, { name: "TKey", want: &Key{ nbytes: 1024, rvers: 1004, // big file objlen: 10, datetime: datime2time(1576331001), keylen: 12, cycle: 2, seekkey: 1024, seekpdir: 2048, class: "MyClass", name: "my-key", title: "my key title", }, }, { name: "TDirectory", want: &tdirectory{ rvers: 4, // small file named: *rbase.NewNamed("my-name", "my-title"), uuid: rbase.UUID{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, }, }, }, { name: "TDirectory", want: &tdirectory{ rvers: 1004, // big file named: *rbase.NewNamed("my-name", "my-title"), uuid: rbase.UUID{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, }, }, }, { name: "TDirectoryFile", want: &tdirectoryFile{ dir: tdirectory{ rvers: 4, // small file named: *rbase.NewNamed("", ""), uuid: rbase.UUID{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, }, }, ctime: datime2time(1576331001), mtime: datime2time(1576331010), nbyteskeys: 1, nbytesname: 2, seekdir: 3, seekparent: 4, seekkeys: 5, }, }, { name: "TDirectoryFile", want: &tdirectoryFile{ dir: tdirectory{ rvers: 1004, // big file named: *rbase.NewNamed("", ""), uuid: rbase.UUID{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, }, }, ctime: datime2time(1576331001), mtime: datime2time(1576331010), nbyteskeys: 1, nbytesname: 2, seekdir: 3, seekparent: 4, seekkeys: 5, }, }, } { t.Run(tc.name, func(t *testing.T) { { wbuf := rbytes.NewWBuffer(nil, nil, 0, nil) wbuf.SetErr(io.EOF) _, err := tc.want.MarshalROOT(wbuf) if err == nil { t.Fatalf("expected an error") } if err != io.EOF { t.Fatalf("got=%v, want=%v", err, io.EOF) } } wbuf := rbytes.NewWBuffer(nil, nil, 0, nil) _, err := tc.want.MarshalROOT(wbuf) if err != nil { t.Fatalf("could not marshal ROOT: %v", err) } rbuf := rbytes.NewRBuffer(wbuf.Bytes(), nil, 0, nil) class := tc.want.Class() obj := rtypes.Factory.Get(class)().Interface().(rbytes.Unmarshaler) { rbuf.SetErr(io.EOF) err = obj.UnmarshalROOT(rbuf) if err == nil { t.Fatalf("expected an error") } if err != io.EOF { t.Fatalf("got=%v, want=%v", err, io.EOF) } rbuf.SetErr(nil) } err = obj.UnmarshalROOT(rbuf) if err != nil { t.Fatalf("could not unmarshal ROOT: %v", err) } if !reflect.DeepEqual(obj, tc.want) { t.Fatalf("error\ngot= %+v\nwant=%+v\n", obj, tc.want) } }) } }
explode_data.jsonl/2118
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1876 }
[ 2830, 3393, 17925, 4095, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 50780, 435, 23841, 13, 23888, 261, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 10808, 765, 756,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestAddWiredTigerStats(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "wiredTiger", CacheDirtyPercent: 0, CacheUsedPercent: 0, TrackedDirtyBytes: 0, CurrentCachedBytes: 0, MaxBytesConfigured: 0, AppThreadsPageReadCount: 0, AppThreadsPageReadTime: 0, AppThreadsPageWriteCount: 0, BytesWrittenFrom: 0, BytesReadInto: 0, PagesEvictedByAppThread: 0, PagesQueuedForEviction: 0, PagesWrittenFromCache: 1247, ServerEvictingPages: 0, WorkerThreadEvictingPages: 0, FaultsCnt: 204, }, tags, ) var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) for key := range wiredTigerStats { require.True(t, acc.HasFloatField("mongodb", key), key) } for key := range wiredTigerExtStats { require.True(t, acc.HasFloatField("mongodb", key) || acc.HasInt64Field("mongodb", key), key) } require.True(t, acc.HasInt64Field("mongodb", "page_faults")) }
explode_data.jsonl/35775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 499 }
[ 2830, 3393, 2212, 54, 2690, 51, 7272, 16635, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 1532, 44, 21225, 1043, 1006, 197, 197, 5, 15878, 2460, 515, 298, 197, 5793, 4571, 25, 1797, 330, 86, 2690, 51, 7272, 756, 298, 6258, 1777, 36485,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetComment(t *testing.T) { // 初始化请求地址 uri := "/comments" // 发起Get请求 body := util.Get(uri, r) fmt.Printf("response:%v\n", string(body)) // 判断响应是否和预期一致 // if string(body) != "success" { // t.Errorf("响应字符串不符, body:%v\n", string(body)) // } }
explode_data.jsonl/32308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 1949, 10677, 1155, 353, 8840, 836, 8, 341, 197, 322, 76090, 34859, 46477, 198, 197, 6070, 1669, 3521, 14727, 1837, 197, 322, 69425, 71618, 1949, 34859, 198, 35402, 1669, 4094, 2234, 25797, 11, 435, 340, 11009, 19367, 445, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMerge_WAN(t *testing.T) { cases := []struct { members []*serf.Member expect string }{ // Not a server { members: []*serf.Member{ makeNode("dc2", "node1", "96430788-246f-4379-94ce-257f7429e340", false), }, expect: "not a server", }, // Good cluster. { members: []*serf.Member{ makeNode("dc2", "node1", "6185913b-98d7-4441-bd8f-f7f7d854a4af", true), makeNode("dc3", "node2", "cda916bc-a357-4a19-b886-59419fcee50c", true), }, expect: "", }, } delegate := &wanMergeDelegate{} for i, c := range cases { if err := delegate.NotifyMerge(c.members); c.expect == "" { if err != nil { t.Fatalf("case %d: err: %v", i+1, err) } } else { if err == nil || !strings.Contains(err.Error(), c.expect) { t.Fatalf("case %d: err: %v", i+1, err) } } } }
explode_data.jsonl/82642
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 52096, 2763, 1093, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 2109, 7062, 29838, 799, 69, 46404, 198, 197, 24952, 220, 914, 198, 197, 59403, 197, 197, 322, 2806, 264, 3538, 198, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAssigningSubscriberIgnoreOutstandingAcks(t *testing.T) { const subscription = "projects/123456/locations/us-central1-b/subscriptions/my-sub" receiver := newTestMessageReceiver(t) msg1 := seqMsgWithOffsetAndSize(11, 100) msg2 := seqMsgWithOffsetAndSize(22, 200) verifiers := test.NewVerifiers(t) // Assignment stream asnStream := test.NewRPCVerifier(t) asnStream.Push(initAssignmentReq(subscription, fakeUUID[:]), assignmentResp([]int64{1}), nil) assignmentBarrier1 := asnStream.PushWithBarrier(assignmentAckReq(), assignmentResp([]int64{}), nil) assignmentBarrier2 := asnStream.PushWithBarrier(assignmentAckReq(), nil, nil) verifiers.AddAssignmentStream(subscription, asnStream) // Partition 1 subStream := test.NewRPCVerifier(t) subStream.Push(initSubReqCommit(subscriptionPartition{Path: subscription, Partition: 1}), initSubResp(), nil) subStream.Push(initFlowControlReq(), msgSubResp(msg1, msg2), nil) verifiers.AddSubscribeStream(subscription, 1, subStream) cmtStream := test.NewRPCVerifier(t) cmtStream.Push(initCommitReq(subscriptionPartition{Path: subscription, Partition: 1}), initCommitResp(), nil) cmtStream.Push(commitReq(12), commitResp(1), nil) verifiers.AddCommitStream(subscription, 1, cmtStream) mockServer.OnTestStart(verifiers) defer mockServer.OnTestEnd() sub := newTestAssigningSubscriber(t, receiver.onMessage, noopReassignmentHandler, subscription) if gotErr := sub.WaitStarted(); gotErr != nil { t.Errorf("Start() got err: (%v)", gotErr) } // Partition assignments are initially {1}. receiver.ValidateMsg(msg1).Ack() ack2 := receiver.ValidateMsg(msg2) subscribers := sub.Subscribers() // Partition assignments will now be {}. assignmentBarrier1.Release() assignmentBarrier2.ReleaseAfter(func() { // Verify that the assignment is acked after the subscriber has terminated. if got, want := len(subscribers), 1; got != want { t.Errorf("singlePartitionSubcriber count: got %d, want %d", got, want) return } if got, want := subscribers[0].Status(), serviceTerminated; got != want { t.Errorf("singlePartitionSubcriber status: got %v, want %v", got, want) } }) // Partition 1 has already been unassigned, so this ack is discarded. ack2.Ack() sub.Stop() if gotErr := sub.WaitStopped(); gotErr != nil { t.Errorf("Stop() got err: (%v)", gotErr) } }
explode_data.jsonl/31657
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 827 }
[ 2830, 3393, 28933, 287, 40236, 12497, 2662, 10070, 32, 14553, 1155, 353, 8840, 836, 8, 341, 4777, 15142, 284, 330, 17161, 14, 16, 17, 18, 19, 20, 21, 14, 31309, 62431, 84081, 16, 1455, 37885, 29966, 34198, 17967, 698, 17200, 12862, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFSGapsInSequenceWithSliceMaxMsgsLimits(t *testing.T) { opts := testFSGetOptionsForGapsTests() for _, o := range opts { t.Run(o.name, func(t *testing.T) { cleanupFSDatastore(t) defer cleanupFSDatastore(t) s := createDefaultFileStore(t, o.opt, SliceConfig(3, 0, 0, "")) defer s.Close() cs := storeCreateChannel(t, s, "foo") payload := []byte("msg") storeMsg(t, cs, "foo", 1, payload) storeMsg(t, cs, "foo", 2, payload) storeMsg(t, cs, "foo", 5, payload) n, _ := msgStoreState(t, cs.Msgs) // Gaps are still counted as messages if n != 5 { t.Fatalf("Expected 5 messages, got %v", n) } storeMsg(t, cs, "foo", 6, payload) storeMsg(t, cs, "foo", 7, payload) storeMsg(t, cs, "foo", 8, payload) n, _ = msgStoreState(t, cs.Msgs) if n != 8 { t.Fatalf("Expected 8 messages, got %v", n) } first, last := msgStoreFirstAndLastSequence(t, cs.Msgs) if first != 1 || last != 8 { t.Fatalf("Unexpected first/last: %v/%v", first, last) } ms := cs.Msgs.(*FileMsgStore) ms.Lock() numSlices := len(ms.files) if numSlices != 3 { ms.Unlock() t.Fatalf("Expected 3 file slices, got %v", numSlices) } // The first slice will have 1, 2, [3, 4] // The second will have 5, 6, 7 // THe third will have 8 type firstLast struct { first, last uint64 } expected := make(map[int]firstLast) expected[1] = firstLast{1, 4} expected[2] = firstLast{5, 7} expected[3] = firstLast{8, 8} for i := ms.firstFSlSeq; i <= ms.lastFSlSeq; i++ { sl := ms.files[i] first := sl.firstSeq last := sl.lastSeq if first != expected[i].first || last != expected[i].last { ms.Unlock() t.Fatalf("Expected first/last to be %v/%v for slice %d, got %v/%v", expected[i].first, expected[i].last, i, first, last) } } ms.Unlock() }) } }
explode_data.jsonl/7784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 871 }
[ 2830, 3393, 37, 7783, 2625, 641, 14076, 2354, 33236, 5974, 6611, 82, 94588, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 1273, 8485, 1949, 3798, 2461, 38, 2625, 18200, 741, 2023, 8358, 297, 1669, 2088, 12185, 341, 197, 3244, 16708, 10108,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestPaymentControlSwitchDoubleSend(t *testing.T) { t.Parallel() db, err := initDB() if err != nil { t.Fatalf("unable to init db: %v", err) } pControl := NewPaymentControl(db) info, attempt, preimg, err := genInfo() if err != nil { t.Fatalf("unable to generate htlc message: %v", err) } // Sends base htlc message which initiate base status and move it to // StatusInFlight and verifies that it was changed. err = pControl.InitPayment(info.PaymentHash, info) if err != nil { t.Fatalf("unable to send htlc message: %v", err) } assertPaymentStatus(t, pControl, info.PaymentHash, StatusInFlight) assertPaymentInfo( t, pControl, info.PaymentHash, info, nil, nil, ) // Try to initiate double sending of htlc message with the same // payment hash, should result in error indicating that payment has // already been sent. err = pControl.InitPayment(info.PaymentHash, info) if err != ErrPaymentInFlight { t.Fatalf("payment control wrong behaviour: " + "double sending must trigger ErrPaymentInFlight error") } // Record an attempt. _, err = pControl.RegisterAttempt(info.PaymentHash, attempt) if err != nil { t.Fatalf("unable to send htlc message: %v", err) } assertPaymentStatus(t, pControl, info.PaymentHash, StatusInFlight) htlc := &htlcStatus{ HTLCAttemptInfo: attempt, } assertPaymentInfo( t, pControl, info.PaymentHash, info, nil, htlc, ) // Sends base htlc message which initiate StatusInFlight. err = pControl.InitPayment(info.PaymentHash, info) if err != ErrPaymentInFlight { t.Fatalf("payment control wrong behaviour: " + "double sending must trigger ErrPaymentInFlight error") } // After settling, the error should be ErrAlreadyPaid. _, err = pControl.SettleAttempt( info.PaymentHash, attempt.AttemptID, &HTLCSettleInfo{ Preimage: preimg, }, ) if err != nil { t.Fatalf("error shouldn't have been received, got: %v", err) } assertPaymentStatus(t, pControl, info.PaymentHash, StatusSucceeded) htlc.settle = &preimg assertPaymentInfo(t, pControl, info.PaymentHash, info, nil, htlc) err = pControl.InitPayment(info.PaymentHash, info) if err != ErrAlreadyPaid { t.Fatalf("unable to send htlc message: %v", err) } }
explode_data.jsonl/73789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 788 }
[ 2830, 3393, 20188, 3273, 16837, 7378, 11505, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20939, 11, 1848, 1669, 2930, 3506, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 45928, 311, 2930, 2927, 25, 1018, 85, 497, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestEmptyGoPrefix(t *testing.T) { files := []fileSpec{ {path: "WORKSPACE"}, { path: "foo/foo.go", content: "package foo", }, { path: "bar/bar.go", content: ` package bar import ( _ "fmt" _ "foo" ) `, }, } dir, err := createFiles(files) if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) args := []string{"-go_prefix", ""} if err := runGazelle(dir, args); err != nil { t.Fatal(err) } checkFiles(t, dir, []fileSpec{{ path: filepath.Join("bar", config.DefaultValidBuildFileNames[0]), content: ` load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = ["bar.go"], importpath = "bar", visibility = ["//visibility:public"], deps = ["//foo:go_default_library"], ) `, }}) }
explode_data.jsonl/40394
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 3522, 10850, 14335, 1155, 353, 8840, 836, 8, 341, 74075, 1669, 3056, 1192, 8327, 515, 197, 197, 90, 2343, 25, 330, 18470, 44641, 7115, 197, 197, 515, 298, 26781, 25, 262, 330, 7975, 60555, 18002, 756, 298, 27751, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateEc2(t *testing.T) { terraformOptions := &terraform.Options{ TerraformDir: "../examples/ec2-test", Vars: map[string]interface{}{}, } defer terraform.Destroy(t, terraformOptions) terraform.InitAndApply(t, terraformOptions) instanceURL := terraform.Output(t, terraformOptions, "instance-url") maxRetries := 30 timeBetweenRetries := 5 * time.Second instanceText := "Welcome to nginx on Amazon Linux" http_helper.HttpGetWithRetry(t, instanceURL, 200, instanceText, maxRetries, timeBetweenRetries) }
explode_data.jsonl/78321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 4021, 50730, 17, 1155, 353, 8840, 836, 8, 1476, 197, 61385, 3798, 1669, 609, 61385, 22179, 515, 197, 10261, 13886, 627, 6184, 25, 7005, 51668, 73058, 17, 16839, 756, 197, 17446, 1561, 25, 260, 2415, 14032, 31344, 6257, 38837...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_PutTransientWithNilKey(t *testing.T) { testValue := "testingValue" err := mp.PutTransient(nil, testValue, 1, time.Millisecond) AssertErrorNotNil(t, err, "putTransient did not return an error for nil key") mp.Clear() }
explode_data.jsonl/56974
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 2227, 16219, 1088, 332, 48183, 2354, 19064, 1592, 1155, 353, 8840, 836, 8, 341, 18185, 1130, 1669, 330, 8840, 1130, 698, 9859, 1669, 10490, 39825, 48183, 27907, 11, 1273, 1130, 11, 220, 16, 11, 882, 71482, 340, 18017, 1454, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDownloadBatchDownloadDataReadFail(t *testing.T) { dm, cancel := newTestDownloadManager(t) defer cancel() reader := ioutil.NopCloser(iotest.ErrReader(fmt.Errorf("read failed"))) mss := dm.sharedstorage.(*sharedstoragemocks.Plugin) mss.On("DownloadData", mock.Anything, "ref1").Return(reader, nil) _, _, err := dm.downloadBatch(dm.ctx, downloadBatchData{ Namespace: "ns1", PayloadRef: "ref1", }) assert.Regexp(t, "FF10376", err) mss.AssertExpectations(t) }
explode_data.jsonl/45627
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 11377, 21074, 11377, 1043, 4418, 19524, 1155, 353, 8840, 836, 8, 1476, 2698, 76, 11, 9121, 1669, 501, 2271, 11377, 2043, 1155, 340, 16867, 9121, 2822, 61477, 1669, 43144, 2067, 453, 51236, 799, 1956, 354, 477, 27862, 5062, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatBorderInterpolate(t *testing.T) { n := BorderInterpolate(1, 5, 1) if n == 0 { t.Error("TestBorderInterpolate dst should not be 0.") } }
explode_data.jsonl/81729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 60 }
[ 2830, 3393, 11575, 10691, 3306, 45429, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 13992, 3306, 45429, 7, 16, 11, 220, 20, 11, 220, 16, 340, 743, 308, 621, 220, 15, 341, 197, 3244, 6141, 445, 2271, 10691, 3306, 45429, 10648, 1265, 537...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDecodeCmdMessageGetStreamLength(t *testing.T) { bin := []byte{ // nil 0x05, // string: abc 0x02, 0x00, 0x03, 0x61, 0x62, 0x63, } r := bytes.NewReader(bin) d := amf0.NewDecoder(r) var v AMFConvertible err := CmdBodyDecoderFor("getStreamLength", 42)(r, d, &v) assert.Nil(t, err) assert.Equal(t, &NetStreamGetStreamLength{ StreamName: "abc", }, v) }
explode_data.jsonl/7730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 32564, 15613, 2052, 1949, 3027, 4373, 1155, 353, 8840, 836, 8, 341, 2233, 258, 1669, 3056, 3782, 515, 197, 197, 322, 2092, 198, 197, 197, 15, 87, 15, 20, 345, 197, 197, 322, 914, 25, 39022, 198, 197, 197, 15, 87, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLeakingBody(t *testing.T) { // Some form of AWS credentials must be set up for tests to succeed awsCreds := fetchMockCredentials() authConfig := AuthConfig{Region: "region", Service: "service"} server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { _, err := v4.GetSignedRequestSignature(r) assert.NoError(t, err) w.WriteHeader(200) })) defer server.Close() serverURL, err := url.Parse(server.URL) assert.NoError(t, err) setting := confighttp.HTTPClientSettings{ Endpoint: serverURL.String(), TLSSetting: configtls.TLSClientSetting{}, ReadBufferSize: 0, WriteBufferSize: 0, Timeout: 0, CustomRoundTripper: func(next http.RoundTripper) (http.RoundTripper, error) { return newSigningRoundTripperWithCredentials(authConfig, awsCreds, next) }, } client, _ := setting.ToClient() checker := &checkCloser{Reader: strings.NewReader("a=1&b=2")} req, err := http.NewRequest("POST", setting.Endpoint, checker) assert.NoError(t, err) req.GetBody = func() (io.ReadCloser, error) { checker.Reader = strings.NewReader("a=1&b=2") return checker, nil } _, err = client.Do(req) assert.NoError(t, err) assert.True(t, checker.closed) }
explode_data.jsonl/19760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 2304, 1765, 5444, 1155, 353, 8840, 836, 8, 341, 197, 322, 4329, 1352, 315, 23245, 16387, 1969, 387, 738, 705, 369, 7032, 311, 11996, 198, 197, 8635, 34, 53369, 1669, 7807, 11571, 27025, 741, 78011, 2648, 1669, 7366, 2648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouter_getSetup(t *testing.T) { t.Run("empty", func(t *testing.T) { rt := router{ config: &config.Config{}, db: &mockGetSetupDatabase{result: true}, } m := gin.New() m.GET("/", rt.getSetup) r := httptest.NewRequest(http.MethodGet, "/", nil) w := httptest.NewRecorder() m.ServeHTTP(w, r) if w.Code != http.StatusNoContent { t.Errorf("Unexpected status code %v", w.Code) } }) t.Run("not empty", func(t *testing.T) { rt := router{ config: &config.Config{}, db: &mockGetSetupDatabase{result: false}, } m := gin.New() m.GET("/", rt.getSetup) r := httptest.NewRequest(http.MethodGet, "/", nil) w := httptest.NewRecorder() m.ServeHTTP(w, r) if w.Code != http.StatusForbidden { t.Errorf("Unexpected status code %v", w.Code) } }) }
explode_data.jsonl/11626
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 9523, 3062, 21821, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3194, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 55060, 1669, 9273, 515, 298, 25873, 25, 609, 1676, 10753, 38837, 298, 20939, 25, 257, 609, 16712, 1949,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStoreWins(t *testing.T) { store := StubPlayerStore{ map[string]int{}, nil,nil, } server := NewPlayerServer(&store) t.Run("it records wins on POST", func(t *testing.T) { player := "Pepper" request := newPostWinRequest(player) response := httptest.NewRecorder() server.ServeHTTP(response, request) assertStatus(t, response.Code, http.StatusAccepted) if len(store.winCalls) != 1 { t.Fatalf("got %d calls to RecordWin want %d", len(store.winCalls), 1) } if store.winCalls[0] != player { t.Errorf("did not store correct winner got %q want %q", store.winCalls[0], player) } }) }
explode_data.jsonl/29294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 6093, 96186, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 66611, 4476, 6093, 515, 197, 19567, 14032, 63025, 38837, 197, 84131, 37760, 345, 197, 532, 41057, 1669, 1532, 4476, 5475, 2099, 4314, 692, 3244, 16708, 445, 275, 7424, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKernelMemoryUsage(t *testing.T) { c, err := NewContainer(ContainerName) if err != nil { t.Errorf(err.Error()) } if _, err := c.KernelMemoryUsage(); err != nil { t.Errorf(err.Error()) } }
explode_data.jsonl/2779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 26343, 10642, 14783, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 1532, 4502, 75145, 675, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 3964, 6141, 2398, 197, 630, 743, 8358, 1848, 1669, 272, 11352, 5454, 10642, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestJoinArgType(t *testing.T) { p := asm.NewPlan([]interface{}{ []interface{}{"join", 1, "x"}, }) err := p.Execute(map[string]interface{}{}) tt.NotNil(t, err) }
explode_data.jsonl/39853
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 12292, 2735, 929, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 32828, 7121, 20485, 10556, 4970, 67066, 197, 197, 1294, 4970, 6257, 4913, 5987, 497, 220, 16, 11, 330, 87, 7115, 197, 3518, 9859, 1669, 281, 13827, 9147, 14032, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRouterParamBacktraceNotFound(t *testing.T) { e := New() r := e.router // Add r.Add(http.MethodGet, "/:param1", func(c Context) error { return nil }) r.Add(http.MethodGet, "/:param1/foo", func(c Context) error { return nil }) r.Add(http.MethodGet, "/:param1/bar", func(c Context) error { return nil }) r.Add(http.MethodGet, "/:param1/bar/:param2", func(c Context) error { return nil }) c := e.NewContext(nil, nil).(*context) //Find r.Find(http.MethodGet, "/a", c) assert.Equal(t, "a", c.Param("param1")) c = e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/a/foo", c) assert.Equal(t, "a", c.Param("param1")) c = e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/a/bar", c) assert.Equal(t, "a", c.Param("param1")) c = e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/a/bar/b", c) assert.Equal(t, "a", c.Param("param1")) assert.Equal(t, "b", c.Param("param2")) c = e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/a/bbbbb", c) he := c.handler(c).(*HTTPError) assert.Equal(t, http.StatusNotFound, he.Code) }
explode_data.jsonl/47131
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 475 }
[ 2830, 3393, 9523, 2001, 3707, 15067, 10372, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 7000, 1669, 384, 22125, 271, 197, 322, 2691, 198, 7000, 1904, 19886, 20798, 1949, 11, 3521, 25, 903, 16, 497, 2915, 1337, 9608, 8, 1465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_WithoutConfig_MigrationDirectoryExists(t *testing.T) { args := os.Args defer func() { os.Args = args }() os.Args = []string{""} initializeDefaults() _, err := migrationDirectoryExists() if err != nil { t.Error("failed....") t.Log(err) } }
explode_data.jsonl/22638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 62, 26040, 2648, 1245, 5033, 9310, 15575, 1155, 353, 8840, 836, 8, 341, 31215, 1669, 2643, 51015, 198, 16867, 2915, 368, 314, 2643, 51015, 284, 2827, 50746, 25078, 51015, 284, 3056, 917, 90, 3014, 532, 97129, 16273, 741, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMailGetMails(t *testing.T) { teardown := setup(t, nil) defer teardown(t) mails := testDb.GetMails() if len(mails) > 0 { t.Errorf("testDb.GetMails() should be 0") } }
explode_data.jsonl/66562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 16702, 1949, 44, 6209, 1155, 353, 8840, 836, 8, 341, 197, 665, 37496, 1669, 6505, 1155, 11, 2092, 340, 16867, 49304, 1155, 692, 2109, 6209, 1669, 1273, 7994, 2234, 44, 6209, 741, 743, 2422, 1255, 6209, 8, 861, 220, 15, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestMsgTransferNFTGetSignBytesMethod(t *testing.T) { newMsgTransferNFT := types.NewMsgTransferNFT(denomID, denom, id, tokenURI, uriHash, tokenData, address.String(), address2.String()) sortedBytes := newMsgTransferNFT.GetSignBytes() expected := `{"type":"irismod/nft/MsgTransferNFT","value":{"data":"https://google.com/token-1.json","denom_id":"denom","id":"denom","name":"id1","recipient":"cosmos15ky9du8a2wlstz6fpx3p4mqpjyrm5cgp0ctjdj","sender":"cosmos15ky9du8a2wlstz6fpx3p4mqpjyrm5cgqjwl8sq","uri":"https://google.com/token-1.json","uri_hash":"uriHash"}}` require.Equal(t, expected, string(sortedBytes)) }
explode_data.jsonl/28171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 6611, 21970, 45, 3994, 1949, 7264, 7078, 3523, 1155, 353, 8840, 836, 8, 341, 8638, 6611, 21970, 45, 3994, 1669, 4494, 7121, 6611, 21970, 45, 3994, 1500, 268, 316, 915, 11, 49744, 11, 877, 11, 3950, 10301, 11, 13071, 6370, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccKeycloakDefaultGroups_import(t *testing.T) { realmName := "terraform-" + acctest.RandString(10) groupName := "terraform-group-" + acctest.RandString(10) resource.Test(t, resource.TestCase{ ProviderFactories: testAccProviderFactories, PreCheck: func() { testAccPreCheck(t) }, Steps: []resource.TestStep{ { Config: testKeycloakDefaultGroups_basic(realmName, groupName), Check: testAccCheckGroupsAreDefault("keycloak_default_groups.group_default", []string{groupName}), }, { ResourceName: "keycloak_default_groups.group_default", ImportState: true, ImportStateVerify: true, ImportStateId: realmName, }, }, }) }
explode_data.jsonl/28422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 14603, 1592, 88751, 3675, 22173, 18434, 1155, 353, 8840, 836, 8, 341, 17200, 7673, 675, 1669, 330, 61385, 27651, 488, 1613, 67880, 2013, 437, 703, 7, 16, 15, 340, 44260, 675, 1669, 330, 61385, 4351, 27651, 488, 1613, 67880, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterAndTransform(t *testing.T) { carotte := "carotte" ghLabelCarotte := &github.Label{ Name: &carotte, } courgette := "courgette" ghLabelCourgette := &github.Label{ Name: &courgette, } tomate := "tomate" ghLabelTomate := &github.Label{ Name: &tomate, } legumes := []*github.Label{ghLabelCarotte, ghLabelCourgette, ghLabelTomate} names := FilterAndTransform(legumes, All, NameIdentity) expected := []string{carotte, courgette, tomate} assert.EqualValues(t, expected, names) }
explode_data.jsonl/16580
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 5632, 3036, 8963, 1155, 353, 8840, 836, 8, 341, 1444, 277, 50011, 1669, 330, 6918, 50011, 698, 197, 866, 2476, 8852, 50011, 1669, 609, 5204, 4679, 515, 197, 21297, 25, 609, 6918, 50011, 345, 197, 630, 1444, 413, 455, 665, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadLinesFromByteBuffer(t *testing.T) { testFn := func(byteArray []byte, expected []string) { index := 0 readIndex := 0 for ; readIndex < len(byteArray); index++ { line, n := utiliptables.ReadLine(readIndex, byteArray) readIndex = n if expected[index] != line { t.Errorf("expected:%q, actual:%q", expected[index], line) } } // for if readIndex < len(byteArray) { t.Errorf("Byte buffer was only partially read. Buffer length is:%d, readIndex is:%d", len(byteArray), readIndex) } if index < len(expected) { t.Errorf("All expected strings were not compared. expected arr length:%d, matched count:%d", len(expected), index-1) } } byteArray1 := []byte("\n Line 1 \n\n\n L ine4 \nLine 5 \n \n") expected1 := []string{"", "Line 1", "", "", "L ine4", "Line 5", ""} testFn(byteArray1, expected1) byteArray1 = []byte("") expected1 = []string{} testFn(byteArray1, expected1) byteArray1 = []byte("\n\n") expected1 = []string{"", ""} testFn(byteArray1, expected1) }
explode_data.jsonl/9283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 4418, 16794, 3830, 79229, 1155, 353, 8840, 836, 8, 341, 18185, 24911, 1669, 2915, 19738, 1857, 3056, 3782, 11, 3601, 3056, 917, 8, 341, 197, 26327, 1669, 220, 15, 198, 197, 37043, 1552, 1669, 220, 15, 198, 197, 2023, 2587,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestServer_ModifyTx_corrupt(t *testing.T) { scheme := "unix" srcAddr, dstAddr := newUnixAddr(), newUnixAddr() defer func() { os.RemoveAll(srcAddr) os.RemoveAll(dstAddr) }() ln := listen(t, scheme, dstAddr, transport.TLSInfo{}) defer ln.Close() p := NewServer(ServerConfig{ Logger: testLogger, From: url.URL{Scheme: scheme, Host: srcAddr}, To: url.URL{Scheme: scheme, Host: dstAddr}, }) <-p.Ready() defer p.Close() p.ModifyTx(func(d []byte) []byte { d[len(d)/2]++ return d }) data := []byte("Hello World!") send(t, data, scheme, srcAddr, transport.TLSInfo{}) if d := receive(t, ln); bytes.Equal(d, data) { t.Fatalf("expected corrupted data, got %q", string(d)) } p.UnmodifyTx() send(t, data, scheme, srcAddr, transport.TLSInfo{}) if d := receive(t, ln); !bytes.Equal(d, data) { t.Fatalf("expected uncorrupted data, got %q", string(d)) } }
explode_data.jsonl/63064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 5475, 71485, 1437, 31584, 14734, 6585, 1155, 353, 8840, 836, 8, 341, 1903, 8058, 1669, 330, 56646, 698, 41144, 13986, 11, 10648, 13986, 1669, 501, 55832, 13986, 1507, 501, 55832, 13986, 741, 16867, 2915, 368, 341, 197, 25078, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTempUrl(t *testing.T) { ctx := context.Background() c, rollback := makeConnectionWithContainer(t) defer rollback() err := c.ObjectPutBytes(ctx, CONTAINER, OBJECT, []byte(CONTENTS), "") if err != nil { t.Fatal(err) } defer func() { err = c.ObjectDelete(ctx, CONTAINER, OBJECT) if err != nil { t.Fatal(err) } }() m := swift.Metadata{} m["temp-url-key"] = SECRET_KEY err = c.AccountUpdate(ctx, m.AccountHeaders()) if err != nil { t.Fatal(err) } expiresTime := time.Now().Add(20 * time.Minute) tempUrl := c.ObjectTempUrl(CONTAINER, OBJECT, SECRET_KEY, "GET", expiresTime) resp, err := http.Get(tempUrl) if err != nil { t.Fatal("Failed to retrieve file from temporary url") } defer func() { err := resp.Body.Close() if err != nil { t.Error("Close failed", err) } }() if resp.StatusCode == 401 { t.Log("Server doesn't support tempurl") } else if resp.StatusCode != 200 { t.Fatal("HTTP Error retrieving file from temporary url", resp.StatusCode) } else { var content []byte if content, err = ioutil.ReadAll(resp.Body); err != nil || string(content) != CONTENTS { t.Error("Bad content", err) } resp, err = http.Post(tempUrl, "image/jpeg", bytes.NewReader([]byte(CONTENTS))) if err != nil { t.Fatal("Failed to retrieve file from temporary url") } defer func() { err := resp.Body.Close() if err != nil { t.Error("Close failed", err) } }() if resp.StatusCode != 401 { t.Fatal("Expecting server to forbid access to object") } } }
explode_data.jsonl/12718
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 607 }
[ 2830, 3393, 12151, 2864, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 60414, 1669, 1281, 4526, 2354, 4502, 1155, 340, 16867, 60414, 741, 9859, 1669, 272, 8348, 19103, 7078, 7502, 11, 16120, 34521, 11, 39786, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVPCs_Create(t *testing.T) { setup() defer teardown() svc := client.VPCs path := "/v2/vpcs" want := vTestObj req := &VPCCreateRequest{ Name: "my-new-vpc", RegionSlug: "s2r7", } jsonBlob := ` { "vpc": ` + vTestJSON + ` } ` mux.HandleFunc(path, func(w http.ResponseWriter, r *http.Request) { c := new(VPCCreateRequest) err := json.NewDecoder(r.Body).Decode(c) if err != nil { t.Fatal(err) } testMethod(t, r, http.MethodPost) require.Equal(t, c, req) fmt.Fprint(w, jsonBlob) }) got, _, err := svc.Create(ctx, req) require.NoError(t, err) require.Equal(t, want, got) }
explode_data.jsonl/41241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 53, 4872, 82, 34325, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 1903, 7362, 1669, 2943, 5058, 4872, 82, 198, 26781, 1669, 3521, 85, 17, 5457, 47313, 698, 50780, 1669, 348, 2271, 5261, 198, 24395, 1669, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMembersIntersect(t *testing.T) { members1 := Members{ {PKIid: common.PKIidType("p0"), Endpoint: "p0"}, {PKIid: common.PKIidType("p1"), Endpoint: "p1"}, } members2 := Members{ {PKIid: common.PKIidType("p1"), Endpoint: "p1"}, {PKIid: common.PKIidType("p2"), Endpoint: "p2"}, } assert.Equal(t, Members{{PKIid: common.PKIidType("p1"), Endpoint: "p1"}}, members1.Intersect(members2)) }
explode_data.jsonl/62279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 24371, 3306, 9687, 1155, 353, 8840, 836, 8, 341, 2109, 7062, 16, 1669, 16954, 515, 197, 197, 90, 22242, 40, 307, 25, 4185, 1069, 80971, 307, 929, 445, 79, 15, 3975, 47269, 25, 330, 79, 15, 7115, 197, 197, 90, 22242, 40...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1