text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestServer_Response_Data_SniffLenType(t *testing.T) { const msg = "<html>this is HTML." testServerResponse(t, func(w http.ResponseWriter, r *http.Request) error { io.WriteString(w, msg) return nil }, func(st *serverTester) { getSlash(st) hf := st.wantHeaders() if hf.StreamEnded() { t.Fatal("don't want END_STREAM, expecting data") } if !hf.HeadersEnded() { t.Fatal("want END_HEADERS flag") } goth := st.decodeHeader(hf.HeaderBlockFragment()) wanth := [][2]string{ {":status", "200"}, {"content-type", "text/html; charset=utf-8"}, {"content-length", strconv.Itoa(len(msg))}, } if !reflect.DeepEqual(goth, wanth) { t.Errorf("Got headers %v; want %v", goth, wanth) } df := st.wantData() if !df.StreamEnded() { t.Error("expected DATA to have END_STREAM flag") } if got := string(df.Data()); got != msg { t.Errorf("got DATA %q; want %q", got, msg) } }) }
explode_data.jsonl/71667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 5475, 65873, 17817, 1098, 77, 3092, 11271, 929, 1155, 353, 8840, 836, 8, 341, 4777, 3750, 284, 4055, 1551, 89810, 374, 9308, 10040, 18185, 5475, 2582, 1155, 11, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 1465, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOpenIdImplicitFlowRejectsTokenWithoutAnAllowedDomainInEmailClaim(t *testing.T) { clockTime := time.Date(2021, 12, 1, 0, 0, 0, 0, time.UTC) util.Clock = util.ClockMock{Time: clockTime} cfg := config.NewConfig() cfg.LoginToken.SigningKey = "kiali67890123456" cfg.LoginToken.ExpirationSeconds = 1 cfg.Auth.OpenId.AllowedDomains = []string{ "foo.com", } config.Set(cfg) stateHash := sha256.Sum224([]byte(fmt.Sprintf("%s+%s+%s", "nonceString", clockTime.UTC().Format("060102150405"), config.GetSigningKey()))) // Same as openIdTestToken, but with an added email=jdoe@domain.com claim oidcToken := "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJqZG9lQGRvbWFpbi5jb20iLCJlbWFpbCI6Impkb2VAZG9tYWluLmNvbSIsIm5hbWUiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMiwibm9uY2UiOiIxYmE5YjgzNGQwOGFjODFmZWIzNGUyMDg0MDJlYjE4ZTkwOWJlMDg0NTE4YzMyODUxMDk0MDE4NCIsImV4cCI6MTYzODMxNjgwMX0.8oA-SgrQveJgmzCVOCrAQyQlswYwlWMAuUvGMJ8T748" requestBody := strings.NewReader(fmt.Sprintf("id_token=%s&state=%x-%s", oidcToken, stateHash, clockTime.UTC().Format("060102150405"))) request := httptest.NewRequest(http.MethodPost, "/api/authenticate", requestBody) request.Header.Add("Content-Type", "application/x-www-form-urlencoded") request.AddCookie(&http.Cookie{ Name: OpenIdNonceCookieName, Value: "nonceString", }) controller := NewOpenIdAuthController(CookieSessionPersistor{}, func(authInfo *api.AuthInfo) (*business.Layer, error) { assert.Failf(t, "business instantiator shouldn't have been called", "") return nil, nil }) rr := httptest.NewRecorder() sData, err := controller.Authenticate(request, rr) assert.Equal(t, "domain domain.com not allowed to login", err.Error()) assert.Nil(t, sData) // nonce cookie cleanup response := rr.Result() assert.Len(t, response.Cookies(), 1) assert.Equal(t, OpenIdNonceCookieName, response.Cookies()[0].Name) assert.True(t, clockTime.After(response.Cookies()[0].Expires)) }
explode_data.jsonl/72699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 833 }
[ 2830, 3393, 5002, 764, 59558, 18878, 78413, 82, 3323, 26040, 2082, 35382, 13636, 641, 4781, 45544, 1155, 353, 8840, 836, 8, 341, 84165, 1462, 1669, 882, 8518, 7, 17, 15, 17, 16, 11, 220, 16, 17, 11, 220, 16, 11, 220, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDupSuppressProto(t *testing.T) { once.Do(testSetup) // Start two getters. The first should block (waiting reading // from stringc) and the second should latch on to the first // one. resc := make(chan *testpb.TestMessage, 2) for i := 0; i < 2; i++ { go func() { tm := new(testpb.TestMessage) if err := protoGroup.Get(dummyCtx, fromChan, ProtoSink(tm)); err != nil { tm.Name = proto.String("ERROR:" + err.Error()) } resc <- tm }() } // Wait a bit so both goroutines get merged together via // singleflight. // TODO(bradfitz): decide whether there are any non-offensive // debug/test hooks that could be added to singleflight to // make a sleep here unnecessary. time.Sleep(250 * time.Millisecond) // Unblock the first getter, which should unblock the second // as well. stringc <- "Fluffy" want := &testpb.TestMessage{ Name: proto.String("ECHO:Fluffy"), City: proto.String("SOME-CITY"), } for i := 0; i < 2; i++ { select { case v := <-resc: if !reflect.DeepEqual(v, want) { t.Errorf(" Got: %v\nWant: %v", proto.CompactTextString(v), proto.CompactTextString(want)) } case <-time.After(5 * time.Second): t.Errorf("timeout waiting on getter #%d of 2", i+1) } } }
explode_data.jsonl/62842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 1949, 85713, 17670, 31549, 1155, 353, 8840, 836, 8, 341, 197, 13184, 33596, 8623, 21821, 340, 197, 322, 5145, 1378, 52894, 13, 576, 1156, 1265, 2504, 320, 49534, 5290, 198, 197, 322, 504, 914, 66, 8, 323, 279, 2086, 1265, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPackageCompletion(t *testing.T) { testenv.NeedsGo1Point(t, 14) const files = ` -- go.mod -- module mod.com go 1.12 -- fruits/apple.go -- package apple fun apple() int { return 0 } -- fruits/testfile.go -- // this is a comment /* this is a multiline comment */ import "fmt" func test() {} -- fruits/testfile2.go -- package -- fruits/testfile3.go -- pac -- 123f_r.u~its-123/testfile.go -- package -- .invalid-dir@-name/testfile.go -- package ` var ( testfile4 = "" testfile5 = "/*a comment*/ " testfile6 = "/*a comment*/\n" ) for _, tc := range []struct { name string filename string content *string triggerRegexp string want []string editRegexp string }{ { name: "package completion at valid position", filename: "fruits/testfile.go", triggerRegexp: "\n()", want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: "\n()", }, { name: "package completion in a comment", filename: "fruits/testfile.go", triggerRegexp: "th(i)s", want: nil, }, { name: "package completion in a multiline comment", filename: "fruits/testfile.go", triggerRegexp: `\/\*\n()`, want: nil, }, { name: "package completion at invalid position", filename: "fruits/testfile.go", triggerRegexp: "import \"fmt\"\n()", want: nil, }, { name: "package completion after keyword 'package'", filename: "fruits/testfile2.go", triggerRegexp: "package()", want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: "package\n", }, { name: "package completion with 'pac' prefix", filename: "fruits/testfile3.go", triggerRegexp: "pac()", want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: "pac", }, { name: "package completion for empty file", filename: "fruits/testfile4.go", triggerRegexp: "^$", content: &testfile4, want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: "^$", }, { name: "package completion without terminal newline", filename: "fruits/testfile5.go", triggerRegexp: `\*\/ ()`, content: &testfile5, want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: `\*\/ ()`, }, { name: "package completion on terminal newline", filename: "fruits/testfile6.go", triggerRegexp: `\*\/\n()`, content: &testfile6, want: []string{"package apple", "package apple_test", "package fruits", "package fruits_test", "package main"}, editRegexp: `\*\/\n()`, }, // Issue golang/go#44680 { name: "package completion for dir name with punctuation", filename: "123f_r.u~its-123/testfile.go", triggerRegexp: "package()", want: []string{"package fruits123", "package fruits123_test", "package main"}, editRegexp: "package\n", }, { name: "package completion for invalid dir name", filename: ".invalid-dir@-name/testfile.go", triggerRegexp: "package()", want: []string{"package main"}, editRegexp: "package\n", }, } { t.Run(tc.name, func(t *testing.T) { Run(t, files, func(t *testing.T, env *Env) { if tc.content != nil { env.WriteWorkspaceFile(tc.filename, *tc.content) env.Await( env.DoneWithChangeWatchedFiles(), ) } env.OpenFile(tc.filename) completions := env.Completion(tc.filename, env.RegexpSearch(tc.filename, tc.triggerRegexp)) // Check that the completion item suggestions are in the range // of the file. lineCount := len(strings.Split(env.Editor.BufferText(tc.filename), "\n")) for _, item := range completions.Items { if start := int(item.TextEdit.Range.Start.Line); start >= lineCount { t.Fatalf("unexpected text edit range start line number: got %d, want less than %d", start, lineCount) } if end := int(item.TextEdit.Range.End.Line); end >= lineCount { t.Fatalf("unexpected text edit range end line number: got %d, want less than %d", end, lineCount) } } if tc.want != nil { start, end := env.RegexpRange(tc.filename, tc.editRegexp) expectedRng := protocol.Range{ Start: fake.Pos.ToProtocolPosition(start), End: fake.Pos.ToProtocolPosition(end), } for _, item := range completions.Items { gotRng := item.TextEdit.Range if expectedRng != gotRng { t.Errorf("unexpected completion range for completion item %s: got %v, want %v", item.Label, gotRng, expectedRng) } } } diff := compareCompletionResults(tc.want, completions.Items) if diff != "" { t.Error(diff) } }) }) } }
explode_data.jsonl/50188
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2281 }
[ 2830, 3393, 13100, 33190, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 2067, 68, 6767, 10850, 16, 2609, 1155, 11, 220, 16, 19, 340, 4777, 3542, 284, 22074, 313, 728, 10929, 39514, 4352, 1463, 905, 271, 3346, 220, 16, 13, 16, 17, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestDiv(t *testing.T) { t.Parallel() s1 := []float64{5, 12, 27} s2 := []float64{1, 2, 3} ans := []float64{5, 6, 9} Div(s1, s2) if !EqualApprox(s1, ans, EqTolerance) { t.Errorf("Div doesn't give correct answer") } s1short := []float64{1} if !Panics(func() { Div(s1short, s2) }) { t.Errorf("Did not panic with unequal lengths") } s2short := []float64{1} if !Panics(func() { Div(s1, s2short) }) { t.Errorf("Did not panic with unequal lengths") } }
explode_data.jsonl/1216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 12509, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1903, 16, 1669, 3056, 3649, 21, 19, 90, 20, 11, 220, 16, 17, 11, 220, 17, 22, 532, 1903, 17, 1669, 3056, 3649, 21, 19, 90, 16, 11, 220, 17, 11, 220, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseSnapListAllExeTest(t *testing.T) { baseDir, err := ioutil.TempDir("", t.Name()) require.NoError(t, err) defer os.RemoveAll(baseDir) repoDir, err := ioutil.TempDir(baseDir, "repo") require.NoError(t, err) sourceDir, err := ioutil.TempDir(baseDir, "source") require.NoError(t, err) ks, err := NewKopiaSnapshotter(repoDir) if errors.Is(err, ErrExeVariableNotSet) { t.Skip("KOPIA_EXE not set, skipping test") } require.NoError(t, err) err = ks.ConnectOrCreateFilesystem(repoDir) require.NoError(t, err) // Empty snapshot list snapIDListSnap, err := ks.snapIDsFromSnapListAll() require.NoError(t, err) if got, want := len(snapIDListSnap), 0; got != want { t.Errorf("Snapshot list (len %d) should be empty", got) } fmt.Println(snapIDIsLastInList("asdf", snapIDListSnap)) const numSnapsToTest = 5 for snapCount := 0; snapCount < numSnapsToTest; snapCount++ { snapID, err := ks.CreateSnapshot(sourceDir) require.NoError(t, err) // Validate the list against kopia snapshot list --all snapIDListSnap, err := ks.snapIDsFromSnapListAll() require.NoError(t, err) if got, want := len(snapIDListSnap), snapCount+1; got != want { t.Errorf("Snapshot list len (%d) does not match expected number of snapshots (%d)", got, want) } if !snapIDIsLastInList(snapID, snapIDListSnap) { t.Errorf("Snapshot ID that was just created %s was not in the snapshot list", snapID) } // Validate the list against kopia snapshot list --all snapIDListMan, err := ks.snapIDsFromManifestList() require.NoError(t, err) if got, want := len(snapIDListMan), snapCount+1; got != want { t.Errorf("Snapshot list len (%d) does not match expected number of snapshots (%d)", got, want) } if !snapIDIsLastInList(snapID, snapIDListSnap) { t.Errorf("Snapshot ID that was just created %s was not in the manifest list", snapID) } } }
explode_data.jsonl/21781
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 705 }
[ 2830, 3393, 14463, 61871, 852, 2403, 840, 68, 2271, 1155, 353, 8840, 836, 8, 341, 24195, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 259, 2967, 2398, 17957, 35699, 1155, 11, 1848, 692, 16867, 2643, 84427, 12663, 6184, 692, 17200, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestConnParameters(t *testing.T) { must := require.New(t) at := assert.New(t) tests := []struct { para ConnParameters out string }{ { ConnParameters{ time.Second * 10, time.Second * 5, "vCcJKmYQcIf801WDAAAB", []string{"websocket", "polling"}, }, "{\"sid\":\"vCcJKmYQcIf801WDAAAB\",\"upgrades\":[\"websocket\",\"polling\"],\"pingInterval\":10000,\"pingTimeout\":5000}\n", }, } for _, test := range tests { buf := bytes.NewBuffer(nil) n, err := test.para.WriteTo(buf) must.Nil(err) at.Equal(int64(len(test.out)), n) at.Equal(test.out, buf.String()) conn, err := ReadConnParameters(buf) must.Nil(err) at.Equal(test.para, conn) } }
explode_data.jsonl/67218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 319 }
[ 2830, 3393, 9701, 9706, 1155, 353, 8840, 836, 8, 341, 2109, 590, 1669, 1373, 7121, 1155, 340, 35447, 1669, 2060, 7121, 1155, 692, 78216, 1669, 3056, 1235, 341, 197, 197, 14794, 18213, 9706, 198, 197, 13967, 220, 914, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGeneralPredicates(t *testing.T) { resourceTests := []struct { pod *v1.Pod nodeInfo *schedulerframework.NodeInfo node *v1.Node fits bool name string wErr error reasons []PredicateFailureReason }{ { pod: &v1.Pod{}, nodeInfo: schedulerframework.NewNodeInfo( newResourcePod(schedulerframework.Resource{MilliCPU: 9, Memory: 19})), node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{Name: "machine1"}, Status: v1.NodeStatus{Capacity: makeResources(10, 20, 32, 0, 0, 0).Capacity, Allocatable: makeAllocatableResources(10, 20, 32, 0, 0, 0)}, }, fits: true, wErr: nil, name: "no resources/port/host requested always fits", }, { pod: newResourcePod(schedulerframework.Resource{MilliCPU: 8, Memory: 10}), nodeInfo: schedulerframework.NewNodeInfo( newResourcePod(schedulerframework.Resource{MilliCPU: 5, Memory: 19})), node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{Name: "machine1"}, Status: v1.NodeStatus{Capacity: makeResources(10, 20, 32, 0, 0, 0).Capacity, Allocatable: makeAllocatableResources(10, 20, 32, 0, 0, 0)}, }, fits: false, wErr: nil, reasons: []PredicateFailureReason{ &InsufficientResourceError{ResourceName: v1.ResourceCPU, Requested: 8, Used: 5, Capacity: 10}, &InsufficientResourceError{ResourceName: v1.ResourceMemory, Requested: 10, Used: 19, Capacity: 20}, }, name: "not enough cpu and memory resource", }, { pod: &v1.Pod{ Spec: v1.PodSpec{ NodeName: "machine2", }, }, nodeInfo: schedulerframework.NewNodeInfo(), node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{Name: "machine1"}, Status: v1.NodeStatus{Capacity: makeResources(10, 20, 32, 0, 0, 0).Capacity, Allocatable: makeAllocatableResources(10, 20, 32, 0, 0, 0)}, }, fits: false, wErr: nil, reasons: []PredicateFailureReason{&PredicateFailureError{nodename.Name, nodename.ErrReason}}, name: "host not match", }, { pod: newPodWithPort(123), nodeInfo: schedulerframework.NewNodeInfo(newPodWithPort(123)), node: &v1.Node{ ObjectMeta: metav1.ObjectMeta{Name: "machine1"}, Status: v1.NodeStatus{Capacity: makeResources(10, 20, 32, 0, 0, 0).Capacity, Allocatable: makeAllocatableResources(10, 20, 32, 0, 0, 0)}, }, fits: false, wErr: nil, reasons: []PredicateFailureReason{&PredicateFailureError{nodeports.Name, nodeports.ErrReason}}, name: "hostport conflict", }, } for _, test := range resourceTests { t.Run(test.name, func(t *testing.T) { test.nodeInfo.SetNode(test.node) reasons, err := GeneralPredicates(test.pod, test.nodeInfo) fits := len(reasons) == 0 && err == nil if err != nil { t.Errorf("unexpected error: %v", err) } if !fits && !reflect.DeepEqual(reasons, test.reasons) { t.Errorf("unexpected failure reasons: %v, want: %v", reasons, test.reasons) } if fits != test.fits { t.Errorf("expected: %v got %v", test.fits, fits) } }) } }
explode_data.jsonl/42
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1288 }
[ 2830, 3393, 15415, 50925, 24821, 1155, 353, 8840, 836, 8, 341, 50346, 18200, 1669, 3056, 1235, 341, 197, 3223, 347, 414, 353, 85, 16, 88823, 198, 197, 20831, 1731, 353, 63122, 3794, 21714, 1731, 198, 197, 20831, 257, 353, 85, 16, 2171...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDaoReportLogList(t *testing.T) { var ( c = context.TODO() sql = "" start = int32(0) end = int32(0) ) convey.Convey("ReportLogList", t, func(ctx convey.C) { res, tids, err := d.ReportLogList(c, sql, start, end) ctx.Convey("Then err should be nil.res,tids should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(tids, convey.ShouldHaveLength, 0) ctx.So(res, convey.ShouldHaveLength, 0) }) }) }
explode_data.jsonl/51304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 12197, 10361, 2201, 852, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 257, 284, 2266, 90988, 741, 197, 30633, 256, 284, 8389, 197, 21375, 284, 526, 18, 17, 7, 15, 340, 197, 6246, 256, 284, 526, 18, 17, 7, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_KeySetWihPredicate(t *testing.T) { expected := "5" for i := 0; i < 10; i++ { mp.Put(strconv.Itoa(i), int32(i)) } keySet, _ := mp.KeySetWithPredicate(Equal("this", "5")) if len(keySet) != 1 || keySet[0].(string) != expected { t.Fatalf("map KeySetWithPredicate failed") } }
explode_data.jsonl/57016
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 2227, 16219, 35253, 1649, 54, 6996, 36329, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 20, 698, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 26, 600, 1027, 341, 197, 53230, 39825, 4199, 12027, 64109, 1956, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHappysMax(t *testing.T) { start := int64(math.MaxInt64 - 1000) happys := gomath.Happys(start) found := false happy, ok := happys.Next() for ; ok; happy, ok = happys.Next() { assertTrue(t, happy >= start) found = true } assertTrue(t, found) }
explode_data.jsonl/23247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 39, 676, 1047, 5974, 1155, 353, 8840, 836, 8, 341, 21375, 1669, 526, 21, 19, 37270, 14535, 1072, 21, 19, 481, 220, 16, 15, 15, 15, 340, 9598, 676, 1047, 1669, 342, 316, 587, 3839, 676, 1047, 10639, 340, 58102, 1669, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestZscan(t *testing.T) { testRaw(t, func(c *client) { // No set yet c.Do("ZSCAN", "h", "0") c.Do("ZADD", "h", "1.0", "key1") c.Do("ZSCAN", "h", "0") c.Do("ZSCAN", "h", "0", "COUNT", "12") c.Do("ZSCAN", "h", "0", "cOuNt", "12") c.Do("ZADD", "h", "2.0", "anotherkey") c.Do("ZSCAN", "h", "0", "MATCH", "anoth*") c.Do("ZSCAN", "h", "0", "MATCH", "anoth*", "COUNT", "100") c.Do("ZSCAN", "h", "0", "COUNT", "100", "MATCH", "anoth*") // Can't really test multiple keys. // c.Do("SET", "key2", "value2") // c.Do("SCAN", "0") // Error cases c.Error("wrong number", "ZSCAN") c.Error("wrong number", "ZSCAN", "noint") c.Error("not an integer", "ZSCAN", "h", "0", "COUNT", "noint") c.Error("syntax error", "ZSCAN", "h", "0", "COUNT") c.Error("syntax error", "ZSCAN", "h", "0", "MATCH") c.Error("syntax error", "ZSCAN", "h", "0", "garbage") c.Error("syntax error", "ZSCAN", "h", "0", "COUNT", "12", "MATCH", "foo", "garbage") // c.Do("ZSCAN", "nosuch", "0", "COUNT", "garbage") c.Do("SET", "str", "1") c.Error("wrong kind", "ZSCAN", "str", "0") }) }
explode_data.jsonl/23351
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 57, 16405, 1155, 353, 8840, 836, 8, 341, 18185, 20015, 1155, 11, 2915, 1337, 353, 2972, 8, 341, 197, 197, 322, 2308, 738, 3602, 198, 197, 1444, 33596, 445, 57, 92559, 497, 330, 71, 497, 330, 15, 5130, 197, 1444, 33596, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVersionExtraBytes(t *testing.T) { extraBytes := []byte("junk") h1 := NewHeight(10, 100) b := h1.ToBytes() b1 := append(b, extraBytes...) h2, n := NewHeightFromBytes(b1) testutil.AssertEquals(t, h2, h1) testutil.AssertEquals(t, n, len(b)) testutil.AssertEquals(t, b1[n:], extraBytes) }
explode_data.jsonl/20999
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 5637, 11612, 7078, 1155, 353, 8840, 836, 8, 341, 8122, 2172, 7078, 1669, 3056, 3782, 445, 73, 3122, 1138, 9598, 16, 1669, 1532, 3640, 7, 16, 15, 11, 220, 16, 15, 15, 340, 2233, 1669, 305, 16, 3274, 7078, 741, 2233, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFQDNValidation(t *testing.T) { tests := []struct { param string expected bool }{ {"test.example.com", true}, {"example.com", true}, {"example24.com", true}, {"test.example24.com", true}, {"test24.example24.com", true}, {"test.example.com.", true}, {"example.com.", true}, {"example24.com.", true}, {"test.example24.com.", true}, {"test24.example24.com.", true}, {"test24.example24.com..", false}, {"example", false}, {"192.168.0.1", false}, {"email@example.com", false}, {"2001:cdba:0000:0000:0000:0000:3257:9652", false}, {"2001:cdba:0:0:0:0:3257:9652", false}, {"2001:cdba::3257:9652", false}, {"", false}, } validate := New() for i, test := range tests { errs := validate.Var(test.param, "fqdn") if test.expected { if !IsEqual(errs, nil) { t.Fatalf("Index: %d fqdn failed Error: %v", i, errs) } } else { if IsEqual(errs, nil) { t.Fatalf("Index: %d fqdn failed Error: %v", i, errs) } else { val := getError(errs, "", "") if val.Tag() != "fqdn" { t.Fatalf("Index: %d fqdn failed Error: %v", i, errs) } } } } }
explode_data.jsonl/77354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 516 }
[ 2830, 3393, 37, 48, 31264, 13799, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 36037, 262, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 1944, 7724, 905, 497, 830, 1583, 197, 197, 4913, 8687, 905, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRejectUnhealthyRemove(t *testing.T) { defer testutil.AfterTest(t) c := NewCluster(t, 5) for _, m := range c.Members { m.ServerConfig.StrictReconfigCheck = true } c.Launch(t) defer c.Terminate(t) // make cluster unhealthy and wait for downed peer; (3 up, 2 down) c.Members[0].Stop(t) c.Members[1].Stop(t) c.WaitLeader(t) // reject remove active member since (3,2)-(1,0) => (2,2) lacks quorum err := c.removeMember(t, uint64(c.Members[2].s.ID())) if err == nil { t.Fatalf("should reject quorum breaking remove") } // TODO: client should return more descriptive error codes for internal errors if !strings.Contains(err.Error(), "has no leader") { t.Errorf("unexpected error (%v)", err) } // member stopped after launch; wait for missing heartbeats time.Sleep(time.Duration(electionTicks * int(tickDuration))) // permit remove dead member since (3,2) - (0,1) => (3,1) has quorum if err = c.removeMember(t, uint64(c.Members[0].s.ID())); err != nil { t.Fatalf("should accept removing down member") } // bring cluster to (4,1) c.Members[0].Restart(t) // restarted member must be connected for a HealthInterval before remove is accepted time.Sleep((3 * etcdserver.HealthInterval) / 2) // accept remove member since (4,1)-(1,0) => (3,1) has quorum if err = c.removeMember(t, uint64(c.Members[0].s.ID())); err != nil { t.Fatalf("expected to remove member, got error %v", err) } }
explode_data.jsonl/16305
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 78413, 1806, 37028, 13021, 1155, 353, 8840, 836, 8, 341, 16867, 1273, 1314, 36892, 2271, 1155, 340, 1444, 1669, 1532, 28678, 1155, 11, 220, 20, 340, 2023, 8358, 296, 1669, 2088, 272, 91758, 341, 197, 2109, 22997, 2648, 77428...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidateCryptoKeyConfig(t *testing.T) { var testcases = []struct { name string config *CryptoKeyConfig shouldErr bool err error }{ { name: "default shared key in default context for verify", config: &CryptoKeyConfig{ ID: "0", Usage: "verify", TokenName: "foobar token", Source: "config", Algorithm: "hmac", Secret: "foobar", TokenLifetime: 900, parsed: true, }, }, { name: "invalid key usage", config: &CryptoKeyConfig{ ID: "0", Usage: "both", TokenName: "foobar token", Source: "config", Algorithm: "hmac", Secret: "foobar", TokenLifetime: 900, parsed: true, }, shouldErr: true, err: fmt.Errorf("key usage %q is invalid", "both"), }, { name: "empty key usage", config: &CryptoKeyConfig{ ID: "0", TokenName: "foobar token", Source: "config", Algorithm: "hmac", Secret: "foobar", TokenLifetime: 900, parsed: true, }, shouldErr: true, err: fmt.Errorf("key usage is not set"), }, { name: "invalid key source", config: &CryptoKeyConfig{ ID: "0", Usage: "verify", TokenName: "foobar token", Source: "foo", Algorithm: "hmac", Secret: "foobar", TokenLifetime: 900, parsed: true, }, shouldErr: true, err: fmt.Errorf("key source %q is invalid", "foo"), }, { name: "empty key source", config: &CryptoKeyConfig{ ID: "0", Usage: "verify", TokenName: "foobar token", Algorithm: "hmac", Secret: "foobar", TokenLifetime: 900, parsed: true, }, shouldErr: true, err: fmt.Errorf("key source not found"), }, { name: "invalid key algo", config: &CryptoKeyConfig{ ID: "0", Usage: "verify", TokenName: "foobar token", Source: "config", Algorithm: "foo", Secret: "foobar", TokenLifetime: 900, parsed: true, }, shouldErr: true, err: fmt.Errorf("key algorithm %q is invalid", "foo"), }, { name: "empty source type for env", config: &CryptoKeyConfig{ ID: "cb315f43c868", Usage: "verify", Source: "env", EnvVarName: "JWT_SECRET_KEY", TokenName: "access_token", TokenLifetime: 900, parsed: true, validated: true, }, shouldErr: true, err: fmt.Errorf("key source type for env not set"), }, { name: "invalid source type for env", config: &CryptoKeyConfig{ ID: "cb315f43c868", Usage: "verify", Source: "env", EnvVarName: "JWT_SECRET_KEY", EnvVarType: "foo", TokenName: "access_token", TokenLifetime: 900, parsed: true, validated: true, }, shouldErr: true, err: fmt.Errorf("key source type %q for env is invalid", "foo"), }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { msgs := []string{fmt.Sprintf("test name: %s", tc.name)} msgs = append(msgs, fmt.Sprintf("config: %v", tc.config)) err := tc.config.validate() if tests.EvalErrWithLog(t, err, nil, tc.shouldErr, tc.err, msgs) { return } }) } }
explode_data.jsonl/49056
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1872 }
[ 2830, 3393, 17926, 58288, 1592, 2648, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 23910, 284, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 25873, 262, 353, 58288, 1592, 2648, 198, 197, 197, 5445, 7747, 1807, 198, 197, 9859, 981, 146...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNonZero(t *testing.T) { var empty string tests := []struct { desc string v interface{} err error }{ {"nil", nil, errors.New(`"x" is nil`)}, {"zero bool", false, errors.New(`"x" is zero value`)}, {"zero string", "", errors.New(`"x" is zero value`)}, {"zero int", int(0), errors.New(`"x" is zero value`)}, {"zero int8", int8(0), errors.New(`"x" is zero value`)}, {"zero int16", int16(0), errors.New(`"x" is zero value`)}, {"zero int32", int32(0), errors.New(`"x" is zero value`)}, {"zero int64", int64(0), errors.New(`"x" is zero value`)}, {"zero uint", uint(0), errors.New(`"x" is zero value`)}, {"zero uint8", uint8(0), errors.New(`"x" is zero value`)}, {"zero uint16", uint16(0), errors.New(`"x" is zero value`)}, {"zero uint32", uint32(0), errors.New(`"x" is zero value`)}, {"zero uint64", uint64(0), errors.New(`"x" is zero value`)}, {"zero float32", float32(0), errors.New(`"x" is zero value`)}, {"zero float64", float64(0), errors.New(`"x" is zero value`)}, {"ptr to zero value", &empty, errors.New(`"*x" is zero value`)}, {"empty slice", []string{}, errors.New(`"x" is empty slice`)}, {"slice with zero value", []string{""}, errors.New(`"x[0]" is zero value`)}, {"empty map", map[string]string{}, errors.New(`"x" is empty map`)}, {"map with zero value key", map[string]string{"": "y"}, errors.New(`"x" has zero value map key`)}, {"map with zero value elem", map[string]string{"y": ""}, errors.New(`"x[y]" is zero value`)}, {"struct with nil field", struct{ Y *int }{}, errors.New(`"x.Y" is nil`)}, {"struct with zero value field", struct{ Y string }{}, errors.New(`"x.Y" is zero value`)}, {"struct with empty array", struct{ Y []string }{}, errors.New(`"x.Y" is empty slice`)}, } for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { if got, want := nonZero("x", nil, tt.v), tt.err; !reflect.DeepEqual(got, want) { t.Fatalf("got error %v want %v", got, want) } }) } }
explode_data.jsonl/67907
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 823 }
[ 2830, 3393, 8121, 17999, 1155, 353, 8840, 836, 8, 341, 2405, 4287, 914, 271, 78216, 1669, 3056, 1235, 341, 197, 41653, 914, 198, 197, 5195, 262, 3749, 16094, 197, 9859, 220, 1465, 198, 197, 59403, 197, 197, 4913, 8385, 497, 2092, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMetaAddress_Receive_LegacySegwit_Address(t *testing.T) { path := NewDerivationPath(BaseCoinBip49MainNet, 0, 0) wallet := NewHDWalletFromWords(w, BaseCoinBip49MainNet) usableAddress, err := newUsableAddressWithDerivationPath(wallet, path) assert.Nil(t, err) meta, err := usableAddress.MetaAddress() assert.Nil(t, err) expectedAddr := "37VucYSaXLCAsxYyAPfbSi9eh4iEcbShgf" expectedPubkey := "049b3b694b8fc5b5e07fb069c783cac754f5d38c3e08bed1960e31fdb1dda35c2449bdd1f0ae7d37a04991d4f5927efd359c13189437d9eae0faf7d003ffd04c89" assert.Equal(t, expectedAddr, meta.Address) assert.Equal(t, path, meta.DerivationPath) assert.Equal(t, expectedPubkey, meta.UncompressedPublicKey) }
explode_data.jsonl/64005
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 12175, 4286, 62, 14742, 2351, 791, 2757, 10998, 88519, 64899, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 1532, 22171, 39127, 1820, 22225, 41180, 33, 573, 19, 24, 6202, 6954, 11, 220, 15, 11, 220, 15, 340, 6692, 7464, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeAssuredCalls(t *testing.T) { resp := httptest.NewRecorder() expected, err := ioutil.ReadFile("../testdata/calls.json") require.NoError(t, err) err = encodeAssuredCall(ctx, resp, []*Call{testCall1(), testCall2(), testCall3()}) require.NoError(t, err) require.Equal(t, "application/json", resp.HeaderMap.Get("Content-Type")) require.JSONEq(t, string(expected), resp.Body.String()) }
explode_data.jsonl/20261
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 32535, 5615, 3073, 55292, 1155, 353, 8840, 836, 8, 341, 34653, 1669, 54320, 70334, 7121, 47023, 741, 42400, 11, 1848, 1669, 43144, 78976, 17409, 92425, 2899, 5583, 4323, 1138, 17957, 35699, 1155, 11, 1848, 340, 9859, 284, 1616...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue26989(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("set names utf8mb4 collate utf8mb4_general_ci;") tk.MustQuery("select position('a' in 'AA');").Check(testkit.Rows("0")) tk.MustQuery("select locate('a', 'AA');").Check(testkit.Rows("0")) tk.MustQuery("select locate('a', 'a');").Check(testkit.Rows("1")) }
explode_data.jsonl/65539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 42006, 17, 21, 24, 23, 24, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApiHandler(t *testing.T) { req, err := http.NewRequest("GET", "/api", nil) test.Assert(t, err == nil, "Unable to create request") rr := httptest.NewRecorder() handler := http.HandlerFunc(APIHandler) handler.ServeHTTP(rr, req) test.Assert(t, rr.Code == http.StatusOK, "Unexpected status code") test.Assert(t, rr.Header().Get("Content-Type") == "application/json", fmt.Sprintf("Unexpected content type %s", rr.Header().Get("Content-Type"))) var entity map[string]string json.Unmarshal(rr.Body.Bytes(), &entity) test.Assert(t, entity["name"] == "go-api-server-template", "Unexpected name") test.Assert(t, entity["message"] == "You have reached the /api endpoint!", "Unexpeted message") }
explode_data.jsonl/70906
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 6563, 3050, 1155, 353, 8840, 836, 8, 341, 24395, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 3521, 2068, 497, 2092, 340, 18185, 11711, 1155, 11, 1848, 621, 2092, 11, 330, 17075, 311, 1855, 1681, 5130, 197, 634, 1669, 5432...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIncident_ResponderRequest(t *testing.T) { setup() defer teardown() id := "1" mux.HandleFunc("/incidents/"+id+"/responder_requests", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") _, _ = w.Write([]byte(`{ "responder_request": { "requester": { "id": "PL1JMK5", "type": "user_reference" }, "message": "Help", "responder_request_targets": [{ "responder_request_target": { "id": "PJ25ZYX", "type": "user_reference", "incident_responders": { "state": "pending", "user": { "id": "PJ25ZYX" } } } }] } }`)) }) client := defaultTestClient(server.URL, "foo") from := "foo@bar.com" r := ResponderRequestTarget{} r.ID = "PJ25ZYX" r.Type = "user_reference" targets := []ResponderRequestTargets{ ResponderRequestTargets{Target: r}, } input := ResponderRequestOptions{ From: from, Message: "help", RequesterID: "PL1JMK5", Targets: targets, } user := User{} user.ID = "PL1JMK5" user.Type = "user_reference" target := ResponderRequestTarget{} target.ID = "PJ25ZYX" target.Type = "user_reference" target.Responders.State = "pending" target.Responders.User.ID = "PJ25ZYX" targets = []ResponderRequestTargets{ ResponderRequestTargets{Target: target}, } want := &ResponderRequestResponse{ ResponderRequest: ResponderRequest{ Incident: Incident{}, Requester: user, Message: "Help", Targets: targets, }, } res, err := client.ResponderRequest(id, input) if err != nil { t.Fatal(err) } testEqual(t, want, res) }
explode_data.jsonl/76402
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 712 }
[ 2830, 3393, 39245, 1713, 62, 30884, 1900, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 15710, 1669, 330, 16, 698, 2109, 2200, 63623, 4283, 2840, 6880, 33778, 307, 27569, 416, 20328, 37216, 497, 2915, 3622, 1758, 37508, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSyncFileFromIndexErrorsInExportRoot(t *testing.T) { t.Parallel() ctx := project.TestContext(t) testDB, _ := testDatabaseInstance.NewDatabase(t) exportImportDB := exportimportdb.New(testDB) fromTime := time.Now().UTC().Add(-1 * time.Second) config := &model.ExportImport{ IndexFile: "index.txt", ExportRoot: "%zzzzz", Region: "US", From: fromTime, Thru: nil, } if err := exportImportDB.AddConfig(ctx, config); err != nil { t.Fatal(err) } // test data ensures that URL parsing strips extra slashes. index := strings.Join([]string{"a.zip", "/b.zip", "//c.zip", ""}, "\n") _, _, err := syncFilesFromIndex(ctx, exportImportDB, config, index) errcmp.MustMatch(t, err, "invalid URL escape") }
explode_data.jsonl/15394
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 12154, 1703, 3830, 1552, 13877, 641, 16894, 8439, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 2390, 8787, 1972, 1155, 340, 18185, 3506, 11, 716, 1669, 1273, 5988, 2523, 7121, 5988, 1155, 340, 59440, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReset(t *testing.T) { config := must.Config(rdb.ParseConfigURL(testConnectionString)) config.PoolInitCapacity = 1 config.PoolMaxCapacity = 1 config.ResetQuery = `set XACT_ABORT on;` db := must.Open(config) defer db.Close() cmd := &rdb.Command{Sql: `select 16384 & @@OPTIONS;`} for i := range [100]struct{}{} { res := db.Query(cmd) v := 0 res.Scan(&v) res.Close() if v == 0 { t.Fail() t.Logf("Run %d: Should always be 1, but value is 0", i+1) } } }
explode_data.jsonl/74086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 14828, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 1969, 10753, 2601, 1999, 8937, 2648, 3144, 8623, 40431, 1171, 25873, 89701, 3803, 29392, 284, 220, 16, 198, 25873, 89701, 5974, 29392, 284, 220, 16, 198, 25873, 36660, 2859, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildConfigWithImageSource(t *testing.T) { source := &SourceRef{ Name: "binarybuild", SourceImage: &ImageRef{ Reference: reference.DockerImageReference{ Name: "foo", Registry: "bar", }, }, } build := &BuildRef{Source: source, Binary: false} config, err := build.BuildConfig() if err != nil { t.Fatalf("unexpected error: %v", err) } foundICT := false foundCCT := false for _, trigger := range config.Spec.Triggers { if trigger.Type == buildv1.ImageChangeBuildTriggerType { foundICT = true } if trigger.Type == buildv1.ConfigChangeBuildTriggerType { foundCCT = true } } if !foundICT { t.Fatalf("expected to find an imagechangetrigger on the build") } if !foundCCT { t.Fatalf("expected to find a configchangetrigger on the build") } }
explode_data.jsonl/17584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 11066, 2648, 2354, 1906, 3608, 1155, 353, 8840, 836, 8, 341, 47418, 1669, 609, 3608, 3945, 515, 197, 21297, 25, 330, 25891, 5834, 756, 197, 197, 3608, 1906, 25, 609, 1906, 3945, 515, 298, 197, 8856, 25, 5785, 909, 13659, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAccessors(t *testing.T) { require := require.New(t) // Root domain name, err := NewName(".") require.Nil(err) require.True(name.IsRoot()) require.True(name.IsFQDN()) // Simple FQDN name, err = NewName("example.com.") require.Nil(err) require.False(name.IsRoot()) require.True(name.IsFQDN()) // Simple domain name, err = NewName("example.com") require.Nil(err) require.False(name.IsRoot()) require.False(name.IsFQDN()) // FQDN conversion name = name.ToFQDN() require.Equal("example.com.", name.String()) require.False(name.IsRoot()) require.True(name.IsFQDN()) }
explode_data.jsonl/47253
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 6054, 1087, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 197, 322, 18854, 7947, 198, 11609, 11, 1848, 1669, 1532, 675, 5680, 1138, 17957, 59678, 3964, 340, 17957, 32443, 3153, 4506, 8439, 2398, 17957, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMemoryStoreSequentialAccess(t *testing.T) { tests.TestStoreSequentialAccess(t, memory.NewStoreWithOptions(limiter.StoreOptions{ Prefix: "limiter:memory:sequential", CleanUpInterval: 30 * time.Second, })) }
explode_data.jsonl/1375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 10642, 6093, 22046, 6054, 1155, 353, 8840, 836, 8, 341, 78216, 8787, 6093, 22046, 6054, 1155, 11, 4938, 7121, 6093, 74238, 2333, 17700, 38047, 3798, 515, 197, 10025, 5060, 25, 688, 330, 4659, 2015, 25, 17269, 25, 92350, 756,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUntarInvalidHardlink(t *testing.T) { // TODO Windows. There may be a way of running this, but turning off for now if runtime.GOOS == "windows" { t.Skip("hardlinks on Windows") } for i, headers := range [][]*tar.Header{ { // try reading victim/hello (../) { Name: "dotdot", Typeflag: tar.TypeLink, Linkname: "../victim/hello", Mode: 0644, }, }, { // try reading victim/hello (/../) { Name: "slash-dotdot", Typeflag: tar.TypeLink, // Note the leading slash Linkname: "/../victim/hello", Mode: 0644, }, }, { // try writing victim/file { Name: "loophole-victim", Typeflag: tar.TypeLink, Linkname: "../victim", Mode: 0755, }, { Name: "loophole-victim/file", Typeflag: tar.TypeReg, Mode: 0644, }, }, { // try reading victim/hello (hardlink, symlink) { Name: "loophole-victim", Typeflag: tar.TypeLink, Linkname: "../victim", Mode: 0755, }, { Name: "symlink", Typeflag: tar.TypeSymlink, Linkname: "loophole-victim/hello", Mode: 0644, }, }, { // Try reading victim/hello (hardlink, hardlink) { Name: "loophole-victim", Typeflag: tar.TypeLink, Linkname: "../victim", Mode: 0755, }, { Name: "hardlink", Typeflag: tar.TypeLink, Linkname: "loophole-victim/hello", Mode: 0644, }, }, { // Try removing victim directory (hardlink) { Name: "loophole-victim", Typeflag: tar.TypeLink, Linkname: "../victim", Mode: 0755, }, { Name: "loophole-victim", Typeflag: tar.TypeReg, Mode: 0644, }, }, } { if err := testBreakout("untar", "docker-TestUntarInvalidHardlink", headers); err != nil { t.Fatalf("i=%d. %v", i, err) } } }
explode_data.jsonl/82001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 930 }
[ 2830, 3393, 20250, 277, 7928, 26907, 2080, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 5515, 13, 2619, 1231, 387, 264, 1616, 315, 4303, 419, 11, 714, 13054, 1007, 369, 1431, 198, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_wagerService_Buy(t *testing.T) { type fields struct { wagerRepository repositories.WagerRepository wagerTransactionRepository repositories.WagerTransactionRepository db database.DBAdapter } type args struct { ctx context.Context req *dtos.BuyWagerRequest } var ( wagerRepository = &mocksRepo.WagerRepository{} errWagerRepository = &mocksRepo.WagerRepository{} wagerTransactionRepository = &mocksRepo.WagerTransactionRepository{} errWagerTransactionRepository = &mocksRepo.WagerTransactionRepository{} db = &mocksDB.DBAdapter{} reqs map[string]*dtos.BuyWagerRequest = map[string]*dtos.BuyWagerRequest{ "good": { WagerID: 1, BuyingPrice: 10, }, "wager not found": { WagerID: 0, BuyingPrice: 10, }, "get wager error": {}, "buy error": { WagerID: 2, BuyingPrice: 10, }, "cannot buy more": { WagerID: 1, BuyingPrice: 1200, }, } wagers map[string]*models.Wager = map[string]*models.Wager{ "good": { ID: 1, CurrentSellingPrice: 1000, }, "buy error": { ID: 2, CurrentSellingPrice: 1000, }, } ) wagerRepository.On("Get", mock.Anything, int64(1)).Return(wagers["good"], nil) wagerRepository.On("Get", mock.Anything, int64(2)).Return(wagers["buy error"], nil) wagerRepository.On("Get", mock.Anything, int64(0)).Return(nil, nil) wagerRepository.On("Buy", mock.Anything, wagers["good"], mock.Anything, mock.Anything).Return(nil) wagerRepository.On("Buy", mock.Anything, wagers["buy error"], mock.Anything, mock.Anything).Return(errors.New("just an error")) errWagerRepository.On("Get", mock.Anything, mock.Anything).Return(nil, errors.New("just an error")) wagerTransactionRepository.On("Create", mock.Anything, mock.Anything).Return(nil) errWagerTransactionRepository.On("Create", mock.Anything, mock.Anything).Return(errors.New("just an error")) db.On("Begin", mock.Anything).Return(db) db.On("RollbackUselessCommitted").Return() db.On("Commit").Return() tests := []struct { name string fields fields args args wantErr bool }{ { name: "good", fields: fields{ wagerRepository: wagerRepository, wagerTransactionRepository: wagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["good"], }, wantErr: false, }, { name: "wager not found", fields: fields{ wagerRepository: wagerRepository, wagerTransactionRepository: wagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["wager not found"], }, wantErr: true, }, { name: "get wager error", fields: fields{ wagerRepository: errWagerRepository, wagerTransactionRepository: errWagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["get wager error"], }, wantErr: true, }, { name: "buy error", fields: fields{ wagerRepository: wagerRepository, wagerTransactionRepository: errWagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["buy error"], }, wantErr: true, }, { name: "cannot buy more", fields: fields{ wagerRepository: wagerRepository, wagerTransactionRepository: errWagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["cannot buy more"], }, wantErr: true, }, { name: "create transaction error", fields: fields{ wagerRepository: wagerRepository, wagerTransactionRepository: errWagerTransactionRepository, db: db, }, args: args{ ctx: context.Background(), req: reqs["good"], }, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { s := &wagerService{ wagerRepository: tt.fields.wagerRepository, wagerTransactionRepository: tt.fields.wagerTransactionRepository, db: tt.fields.db, } _, err := s.Buy(tt.args.ctx, tt.args.req) if (err != nil) != tt.wantErr { t.Errorf("Buy() error = %v, wantErr %v", err, tt.wantErr) return } if tt.wantErr { return } }) } }
explode_data.jsonl/47644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2340 }
[ 2830, 3393, 1670, 1409, 1860, 1668, 4076, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 6692, 1409, 4624, 310, 49657, 1175, 1409, 4624, 198, 197, 6692, 1409, 8070, 4624, 49657, 1175, 1409, 8070, 4624, 198, 197, 20939, 5108,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInstanceUpdate(t *testing.T) { cases := []struct { name string older *servicecatalog.ServiceInstance newer *servicecatalog.ServiceInstance shouldGenerationIncrement bool shouldPlanRefClear bool }{ { name: "no spec change", older: getTestInstance(), newer: getTestInstance(), }, { name: "UpdateRequest increment", older: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.UpdateRequests = 1 return i }(), newer: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.UpdateRequests = 2 return i }(), shouldGenerationIncrement: true, }, { name: "external plan name change", older: getTestInstance(), newer: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.ClusterServicePlanExternalName = "new-plan" return i }(), shouldGenerationIncrement: true, shouldPlanRefClear: true, }, { name: "external plan id change", older: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.ClusterServiceClassExternalName = "" i.Spec.ClusterServicePlanExternalName = "" i.Spec.ClusterServiceClassExternalID = "test-serviceclass" i.Spec.ClusterServicePlanExternalID = "test-plan" return i }(), newer: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.ClusterServiceClassExternalName = "" i.Spec.ClusterServicePlanExternalName = "" i.Spec.ClusterServiceClassExternalID = "test-serviceclass" i.Spec.ClusterServicePlanExternalID = "new plan" return i }(), shouldGenerationIncrement: true, shouldPlanRefClear: true, }, { name: "k8s plan change", older: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.ClusterServiceClassExternalName = "" i.Spec.ClusterServicePlanExternalName = "" i.Spec.ClusterServiceClassName = "test-serviceclass" i.Spec.ClusterServicePlanName = "test-plan" return i }(), newer: func() *servicecatalog.ServiceInstance { i := getTestInstance() i.Spec.ClusterServiceClassExternalName = "" i.Spec.ClusterServicePlanExternalName = "" i.Spec.ClusterServiceClassName = "test-serviceclass" i.Spec.ClusterServicePlanName = "new plan" return i }(), shouldGenerationIncrement: true, shouldPlanRefClear: true, }, } for _, tc := range cases { instanceRESTStrategies.PrepareForUpdate(nil, tc.newer, tc.older) expectedGeneration := tc.older.Generation if tc.shouldGenerationIncrement { expectedGeneration = expectedGeneration + 1 } if e, a := expectedGeneration, tc.newer.Generation; e != a { t.Errorf("%v: expected %v, got %v for generation", tc.name, e, a) continue } if tc.shouldPlanRefClear { if tc.newer.Spec.ClusterServicePlanRef != nil { t.Errorf("%v: expected ServicePlanRef to be nil", tc.name) } } else { if tc.newer.Spec.ClusterServicePlanRef == nil { t.Errorf("%v: expected ServicePlanRef to not be nil", tc.name) } } } }
explode_data.jsonl/67001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1275 }
[ 2830, 3393, 2523, 4289, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 2549, 914, 198, 197, 197, 2018, 3824, 353, 7936, 26539, 13860, 2523, 198, 197, 8638, 261, 3824, 353, 7936, 26539, 13860, 2523, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilParams(t *testing.T) { firstMember := createMember(t) _, err := signedRequestWithEmptyRequestRef(t, firstMember, "member.transfer", nil) require.Error(t, err) require.Contains(t, err.Error(), "call params are nil") }
explode_data.jsonl/26359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 19064, 4870, 1155, 353, 8840, 836, 8, 341, 42190, 9366, 1669, 1855, 9366, 1155, 692, 197, 6878, 1848, 1669, 8499, 1900, 2354, 3522, 1900, 3945, 1155, 11, 1156, 9366, 11, 330, 9597, 49428, 497, 2092, 340, 17957, 6141, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestServerBaseContext(t *testing.T) { r := NewRouter() r.Get("/", func(w http.ResponseWriter, r *http.Request) { baseYes := r.Context().Value(ctxKey{"base"}).(string) if _, ok := r.Context().Value(http.ServerContextKey).(*http.Server); !ok { panic("missing server context") } if _, ok := r.Context().Value(http.LocalAddrContextKey).(net.Addr); !ok { panic("missing local addr context") } w.Write([]byte(baseYes)) }) // Setup http Server with a base context ctx := context.WithValue(context.Background(), ctxKey{"base"}, "yes") ts := httptest.NewUnstartedServer(r) ts.Config.BaseContext = func(_ net.Listener) context.Context { return ctx } ts.Start() defer ts.Close() if _, body := testRequest(t, ts, "GET", "/", nil); body != "yes" { t.Fatalf(body) } }
explode_data.jsonl/42896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 5475, 93824, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 9523, 741, 7000, 2234, 35460, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 24195, 9454, 1669, 435, 9328, 1005, 1130, 7502, 1592, 4913, 3152, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTabletServerConcludeTransaction(t *testing.T) { _, tsv, db := newTestTxExecutor(t) defer tsv.StopService() defer db.Close() target := querypb.Target{TabletType: topodatapb.TabletType_PRIMARY} db.AddQuery("delete from _vt.dt_state where dtid = 'aa'", &sqltypes.Result{}) db.AddQuery("delete from _vt.dt_participant where dtid = 'aa'", &sqltypes.Result{}) err := tsv.ConcludeTransaction(ctx, &target, "aa") require.NoError(t, err) }
explode_data.jsonl/79987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 2556, 83, 5475, 1109, 857, 8070, 1155, 353, 8840, 836, 8, 341, 197, 6878, 259, 3492, 11, 2927, 1669, 501, 2271, 31584, 25255, 1155, 340, 16867, 259, 3492, 30213, 1860, 741, 16867, 2927, 10421, 741, 28861, 1669, 3239, 16650, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_MongoDataSource_GetData(t *testing.T) { mongoDs := mongodatasource.NewMongoDataSource("localhost", "data") security, err := entity.ParseSecurity("000001.SZ") util.Assert(err == nil, "") util.Assert(security != nil, "") err, period1 := period.PeriodFromString("M1") util.Assert(err == nil, "") start := util.Tick() err, data := mongoDs.GetData(security, period1) fmt.Printf("time cost: %dms\n", util.Tick() - start) util.Assert(err == nil, "") fmt.Println(len(data)) fmt.Printf("%+v\n", &data[0]) fmt.Printf("%+v\n", &data[len(data) - 1]) err, r := mongoDs.GetLastRecord(security, period1) util.Assert(err == nil, fmt.Sprintf("%+v", err)) fmt.Printf("%+v\n", r) err, data = mongoDs.GetDataEx(security, period1, 1423704660000, 100) util.Assert(err == nil, "") util.Assert(len(data) == 100, "") err = mongoDs.RemoveData(security, period1, 0, 0) util.Assert(err == nil, "") err, data = mongoDs.GetData(security, period1) util.Assert(err == nil, "") util.Assert(len(data) == 0, "") }
explode_data.jsonl/61192
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 432 }
[ 2830, 3393, 1245, 6363, 17173, 13614, 1043, 1155, 353, 8840, 836, 8, 341, 2109, 6363, 66950, 1669, 74542, 347, 19346, 919, 7121, 54998, 17173, 445, 8301, 497, 330, 691, 1138, 197, 17039, 11, 1848, 1669, 5387, 8937, 15352, 445, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetIterator(t *testing.T) { s := New(WithGoroutineSafe(), WithKeyComparator(comparator.IntComparator)) for i := 0; i < 10; i++ { s.Insert(i) } iter := s.Begin() assert.True(t, iter.Equal(iter.Clone())) assert.False(t, iter.Equal(nil)) assert.False(t, iter.Equal(s.Last())) iter = s.Find(5) assert.Equal(t, 5, iter.Value()) s.Clear() assert.Equal(t, 0, s.Size()) }
explode_data.jsonl/71964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 1649, 11951, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 7, 2354, 38, 269, 14159, 25663, 1507, 3085, 1592, 38658, 14094, 12356, 7371, 38658, 1171, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 26, 600, 1027, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResourceReconcile(t *testing.T) { t.Run("test resource reconcile without eventbus", func(t *testing.T) { cl := fake.NewClientBuilder().Build() args := &AdaptorArgs{ Image: testImage, Sensor: sensorObj, Labels: testLabels, } err := Reconcile(cl, nil, args, logging.NewArgoEventsLogger()) assert.Error(t, err) assert.False(t, sensorObj.Status.IsReady()) }) t.Run("test resource reconcile with eventbus", func(t *testing.T) { ctx := context.TODO() cl := fake.NewClientBuilder().Build() testBus := fakeEventBus.DeepCopy() testBus.Status.MarkDeployed("test", "test") testBus.Status.MarkConfigured() err := cl.Create(ctx, testBus) assert.Nil(t, err) args := &AdaptorArgs{ Image: testImage, Sensor: sensorObj, Labels: testLabels, } err = Reconcile(cl, testBus, args, logging.NewArgoEventsLogger()) assert.Nil(t, err) assert.True(t, sensorObj.Status.IsReady()) deployList := &appv1.DeploymentList{} err = cl.List(ctx, deployList, &client.ListOptions{ Namespace: testNamespace, }) assert.NoError(t, err) assert.Equal(t, 1, len(deployList.Items)) svcList := &corev1.ServiceList{} err = cl.List(ctx, svcList, &client.ListOptions{ Namespace: testNamespace, }) assert.NoError(t, err) assert.Equal(t, 0, len(svcList.Items)) }) }
explode_data.jsonl/74851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 534 }
[ 2830, 3393, 4783, 693, 40446, 457, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 5101, 63408, 2041, 1538, 10338, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 39407, 1669, 12418, 7121, 2959, 3297, 1005, 11066, 741, 197, 31215, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMachingEndpointIndex(t *testing.T) { endpoints := Endpoints{ { Request: Request{ RegexRoute: regexp.MustCompile("GET/list/1/$"), }, }, { Request: Request{ RegexRoute: regexp.MustCompile("GET/list/[^/&]+?/name/$"), }, }, { Request: Request{ RegexRoute: regexp.MustCompile("GET/list\\?id=[^/&]+?$"), }, }, { Request: Request{ RegexRoute: regexp.MustCompile("GET/search\\?ei=[^/&]+?&q=[^/&]+?$"), }, }, } index := endpoints.GetMachingEndpointIndex("GET", "/list/1/") if index == -1 { t.Errorf("GetMachingEndpointIndex(method, path) expected: %v, got: %v", 0, index) } index = endpoints.GetMachingEndpointIndex("GET", "/list/{:id}/name/") if index == -1 { t.Errorf("GetMachingEndpointIndex(method, path) expected: %v, got: %v", 1, index) } index = endpoints.GetMachingEndpointIndex("GET", "/list?id={:id}") if index == -1 { t.Errorf("GetMachingEndpointIndex(method, path) expected: %v, got: %v", 2, index) } index = endpoints.GetMachingEndpointIndex("GET", "/search?ei={:ei}&q={:q}") if index == -1 { t.Errorf("GetMachingEndpointIndex(method, path) expected: %v, got: %v", 3, index) } index = endpoints.GetMachingEndpointIndex("GET", "/search?ei={:ei}&q={:q}&hoge=11") if index != -1 { t.Errorf("GetMachingEndpointIndex(method, path) expected: %v, got: %v", -1, index) } }
explode_data.jsonl/28344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 596 }
[ 2830, 3393, 1949, 44, 11829, 27380, 1552, 1155, 353, 8840, 836, 8, 341, 6246, 7706, 1669, 3972, 7706, 515, 197, 197, 515, 298, 73806, 25, 6145, 515, 571, 197, 32464, 4899, 25, 41877, 98626, 445, 3806, 20936, 14, 16, 10749, 4461, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestShouldFailToConvertApplicationConfigStateToDataModelWhenMatchSpecificationIsNotValid(t *testing.T) { testHelper := NewTestHelper(t) resourceHandle := NewApplicationConfigResourceHandle() resourceData := testHelper.CreateEmptyResourceDataForResourceHandle(resourceHandle) resourceData.SetId(applicationConfigID) resourceData.Set(ApplicationConfigFieldFullLabel, defaultLabel) resourceData.Set(ApplicationConfigFieldMatchSpecification, "INVALID") resourceData.Set(ApplicationConfigFieldScope, string(restapi.ApplicationConfigScopeIncludeNoDownstream)) resourceData.Set(ApplicationConfigFieldBoundaryScope, string(restapi.BoundaryScopeAll)) _, err := resourceHandle.MapStateToDataObject(resourceData, testHelper.ResourceFormatter()) require.NotNil(t, err) }
explode_data.jsonl/64948
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 14996, 19524, 1249, 12012, 4988, 2648, 1397, 1249, 1043, 1712, 4498, 8331, 56139, 3872, 2623, 4088, 1155, 353, 8840, 836, 8, 341, 18185, 5511, 1669, 1532, 2271, 5511, 1155, 340, 50346, 6999, 1669, 1532, 4988, 2648, 4783, 6999,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_AssertExpectationsForObjects_Helper_Failed(t *testing.T) { var mockedService1 = new(TestExampleImplementation) var mockedService2 = new(TestExampleImplementation) var mockedService3 = new(TestExampleImplementation) mockedService1.On("Test_AssertExpectationsForObjects_Helper_Failed", 1).Return() mockedService2.On("Test_AssertExpectationsForObjects_Helper_Failed", 2).Return() mockedService3.On("Test_AssertExpectationsForObjects_Helper_Failed", 3).Return() mockedService1.Called(1) mockedService3.Called(3) tt := new(testing.T) assert.False(t, AssertExpectationsForObjects(tt, &mockedService1.Mock, &mockedService2.Mock, &mockedService3.Mock)) assert.False(t, AssertExpectationsForObjects(tt, mockedService1, mockedService2, mockedService3)) }
explode_data.jsonl/8598
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 62222, 529, 17536, 804, 2461, 11543, 67828, 1400, 5687, 1155, 353, 8840, 836, 8, 8022, 2405, 46149, 1860, 16, 284, 501, 31159, 13314, 36850, 1218, 2405, 46149, 1860, 17, 284, 501, 31159, 13314, 36850, 1218, 2405, 46149, 1860, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthenticationHook(t *testing.T) { defer leaktest.AfterTest(t)() testCases := []struct { insecure bool tls *tls.ConnectionState username string buildHookSuccess bool publicHookSuccess bool privateHookSuccess bool }{ // Insecure mode, empty username. {true, nil, "", true, false, false}, // Insecure mode, non-empty username. {true, nil, "foo", true, true, false}, // Secure mode, no TLS state. {false, nil, "", false, false, false}, // Secure mode, bad user. {false, makeFakeTLSState([]string{"foo"}, []int{1}), "node", true, false, false}, // Secure mode, node user. {false, makeFakeTLSState([]string{security.NodeUser}, []int{1}), "node", true, true, true}, // Secure mode, root user. {false, makeFakeTLSState([]string{security.RootUser}, []int{1}), "node", true, false, false}, } for tcNum, tc := range testCases { hook, err := security.UserAuthCertHook(tc.insecure, tc.tls) if (err == nil) != tc.buildHookSuccess { t.Fatalf("#%d: expected success=%t, got err=%v", tcNum, tc.buildHookSuccess, err) } if err != nil { continue } err = hook(tc.username, true /*public*/) if (err == nil) != tc.publicHookSuccess { t.Fatalf("#%d: expected success=%t, got err=%v", tcNum, tc.publicHookSuccess, err) } err = hook(tc.username, false /*not public*/) if (err == nil) != tc.privateHookSuccess { t.Fatalf("#%d: expected success=%t, got err=%v", tcNum, tc.privateHookSuccess, err) } } }
explode_data.jsonl/23538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 19297, 31679, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 17430, 25132, 1843, 1807, 198, 197, 3244, 4730, 394, 353, 34488, 17463, 1397, 198, 197, 72...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestKustomizeDeclarativeInvalidApp(t *testing.T) { Given(t). Path("invalid-kustomize"). When(). Declarative("declarative-apps/app.yaml"). Then(). Expect(Success("")). Expect(HealthIs(health.HealthStatusHealthy)). Expect(SyncStatusIs(SyncStatusCodeUnknown)). Expect(Condition(ApplicationConditionComparisonError, "invalid-kustomize/does-not-exist.yaml: no such file or directory")) }
explode_data.jsonl/37134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 42, 1450, 551, 1912, 12821, 1388, 7928, 2164, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 445, 11808, 12646, 1450, 551, 38609, 197, 197, 4498, 25829, 197, 197, 1912, 12821, 1388, 445, 56305, 1388, 20023...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTopologySelectorRequirementsAsSelector(t *testing.T) { mustParse := func(s string) labels.Selector { out, e := labels.Parse(s) if e != nil { panic(e) } return out } tc := []struct { in []v1.TopologySelectorLabelRequirement out labels.Selector expectErr bool }{ {in: nil, out: labels.Nothing()}, {in: []v1.TopologySelectorLabelRequirement{}, out: labels.Nothing()}, { in: []v1.TopologySelectorLabelRequirement{{ Key: "foo", Values: []string{"bar", "baz"}, }}, out: mustParse("foo in (baz,bar)"), }, { in: []v1.TopologySelectorLabelRequirement{{ Key: "foo", Values: []string{}, }}, expectErr: true, }, { in: []v1.TopologySelectorLabelRequirement{ { Key: "foo", Values: []string{"bar", "baz"}, }, { Key: "invalid", Values: []string{}, }, }, expectErr: true, }, { in: []v1.TopologySelectorLabelRequirement{{ Key: "/invalidkey", Values: []string{"bar", "baz"}, }}, expectErr: true, }, } for i, tc := range tc { out, err := TopologySelectorRequirementsAsSelector(tc.in) if err == nil && tc.expectErr { t.Errorf("[%v]expected error but got none.", i) } if err != nil && !tc.expectErr { t.Errorf("[%v]did not expect error but got: %v", i, err) } if !reflect.DeepEqual(out, tc.out) { t.Errorf("[%v]expected:\n\t%+v\nbut got:\n\t%+v", i, tc.out, out) } } }
explode_data.jsonl/25712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 704 }
[ 2830, 3393, 60954, 5877, 59202, 2121, 5877, 1155, 353, 8840, 836, 8, 341, 2109, 590, 14463, 1669, 2915, 1141, 914, 8, 9201, 14752, 269, 341, 197, 13967, 11, 384, 1669, 9201, 8937, 1141, 340, 197, 743, 384, 961, 2092, 341, 298, 30764, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseErrors(t *testing.T) { for _, th := range testParseErrors { v, err := StrToTime(th.in) assert.NotEqual(t, nil, err, "%v for %v", v, th.in) } }
explode_data.jsonl/32328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 14463, 13877, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 270, 1669, 2088, 1273, 14463, 13877, 341, 197, 5195, 11, 1848, 1669, 4509, 1249, 1462, 24365, 1858, 340, 197, 6948, 15000, 2993, 1155, 11, 2092, 11, 1848, 11, 5962, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestLoggingError(t *testing.T) { r := NewMock() err := errors.New("hello") got := r.Error(err, "", "context", "nothing") if got != err { t.Errorf("Error(%q, \"\") == %q but expected %q", err, got, err) } got = r.Error(err, "hey", "context", "nothing") expected := "hey: hello" if got.Error() != expected { t.Errorf("Error(%q, \"hey\") == %q but expected %q", err, got, expected) } }
explode_data.jsonl/82378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 34575, 1454, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 11571, 741, 9859, 1669, 5975, 7121, 445, 14990, 1138, 3174, 354, 1669, 435, 6141, 3964, 11, 7342, 330, 2147, 497, 330, 41212, 1138, 743, 2684, 961, 1848, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestResponseWriter(t *testing.T) { tests := []struct { format tchannel.Format apply func(responseWriter) arg2 []byte arg3 []byte applicationError bool headerCase headerCase }{ { format: tchannel.Raw, apply: func(w responseWriter) { headers := transport.HeadersFromMap(map[string]string{"foo": "bar"}) w.AddHeaders(headers) _, err := w.Write([]byte("hello ")) require.NoError(t, err) _, err = w.Write([]byte("world")) require.NoError(t, err) }, arg2: []byte{ 0x00, 0x01, 0x00, 0x03, 'f', 'o', 'o', 0x00, 0x03, 'b', 'a', 'r', }, arg3: []byte("hello world"), }, { format: tchannel.Raw, apply: func(w responseWriter) { headers := transport.HeadersFromMap(map[string]string{"FoO": "bAr"}) w.AddHeaders(headers) _, err := w.Write([]byte("hello ")) require.NoError(t, err) _, err = w.Write([]byte("world")) require.NoError(t, err) }, arg2: []byte{ 0x00, 0x01, 0x00, 0x03, 'F', 'o', 'O', 0x00, 0x03, 'b', 'A', 'r', }, arg3: []byte("hello world"), headerCase: originalHeaderCase, }, { format: tchannel.Raw, apply: func(w responseWriter) { _, err := w.Write([]byte("foo")) require.NoError(t, err) _, err = w.Write([]byte("bar")) require.NoError(t, err) }, arg2: []byte{0x00, 0x00}, arg3: []byte("foobar"), }, { format: tchannel.JSON, apply: func(w responseWriter) { headers := transport.HeadersFromMap(map[string]string{"foo": "bar"}) w.AddHeaders(headers) _, err := w.Write([]byte("{}")) require.NoError(t, err) }, arg2: []byte(`{"foo":"bar"}` + "\n"), arg3: []byte("{}"), }, { format: tchannel.JSON, apply: func(w responseWriter) { headers := transport.HeadersFromMap(map[string]string{"FoO": "bAr"}) w.AddHeaders(headers) _, err := w.Write([]byte("{}")) require.NoError(t, err) }, arg2: []byte(`{"FoO":"bAr"}` + "\n"), arg3: []byte("{}"), headerCase: originalHeaderCase, }, { format: tchannel.JSON, apply: func(w responseWriter) { _, err := w.Write([]byte("{}")) require.NoError(t, err) }, arg2: []byte("{}\n"), arg3: []byte("{}"), }, { format: tchannel.Raw, apply: func(w responseWriter) { w.SetApplicationError() _, err := w.Write([]byte("hello")) require.NoError(t, err) }, arg2: []byte{0x00, 0x00}, arg3: []byte("hello"), applicationError: true, }, } for _, tt := range tests { call := &fakeInboundCall{format: tt.format} resp := newResponseRecorder() call.resp = resp w := newHandlerWriter(call.Response(), call.Format(), tt.headerCase) tt.apply(w) assert.NoError(t, w.Close()) assert.Nil(t, resp.systemErr) assert.Equal(t, tt.arg2, resp.arg2.Bytes()) assert.Equal(t, tt.arg3, resp.arg3.Bytes()) if tt.applicationError { assert.True(t, resp.applicationError, "expected an application error") } } }
explode_data.jsonl/53839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1474 }
[ 2830, 3393, 2582, 6492, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 59416, 1843, 259, 10119, 9978, 198, 197, 197, 10280, 310, 2915, 5684, 6492, 340, 197, 47903, 17, 1797, 3056, 3782, 198, 197, 47903, 18, 1797, 305...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStdLogger(t *testing.T) { buf := bytes.NewBuffer(nil) DefaultLogger = NewLogger(buf, "prefix: ", log.Lshortfile, LvlTrace) Info("msg1", "k1", "v1") Log(LvlInfo, 0, "msg2", "k2", "v2") StdLogger("stdlog: ", LvlDebug).Print("msg3") Infof("msg4: %s=%s", "k3", "v3") IfErr(errors.New("error"), "msg5") expects := []string{ `prefix: std_test.go:29: msg1; level=info; k1=v1`, `prefix: std_test.go:30: msg2; level=info; k2=v2`, `stdlog: std_test.go:31: msg3`, `prefix: std_test.go:32: msg4: k3=v3; level=info`, `prefix: std_test.go:33: msg5; level=error; err=error`, ``, } results := strings.Split(buf.String(), "\n") if len(expects) != len(results) { t.Errorf("expect %d line logs, but got %d", len(expects), len(results)) } else { for i, line := range expects { if results[i] != line { t.Errorf("%d: expect '%s', but got '%s'", i, line, results[i]) } } } }
explode_data.jsonl/75127
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 420 }
[ 2830, 3393, 22748, 7395, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 5820, 7121, 4095, 27907, 340, 91084, 7395, 284, 1532, 7395, 10731, 11, 330, 11849, 25, 3670, 1487, 1214, 8676, 1192, 11, 444, 14536, 6550, 692, 197, 1731, 445, 3236, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMountUFS(t *testing.T) { type fields struct { runtime *datav1alpha1.GooseFSRuntime dataset *datav1alpha1.Dataset name string namespace string Log logr.Logger Client client.Client } tests := []struct { name string fields fields wantErr bool }{ { name: "test", fields: fields{ runtime: &datav1alpha1.GooseFSRuntime{ ObjectMeta: v1.ObjectMeta{ Name: "spark", Namespace: "default", }, }, dataset: &datav1alpha1.Dataset{ ObjectMeta: v1.ObjectMeta{ Name: "spark", Namespace: "default", }, Spec: datav1alpha1.DatasetSpec{ Mounts: []datav1alpha1.Mount{ { Name: "test0", MountPoint: "cos://test0", Path: "/spec", }, }, }, Status: datav1alpha1.DatasetStatus{ Mounts: []datav1alpha1.Mount{ { Name: "test0", MountPoint: "cos://test0", Path: "/status", }, }, }, }, name: "spark", namespace: "default", Log: fake.NullLogger(), }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { testObjs := []runtime.Object{} testObjs = append(testObjs, tt.fields.runtime, tt.fields.dataset) client := fake.NewFakeClientWithScheme(testScheme, testObjs...) e := &GooseFSEngine{ runtime: tt.fields.runtime, name: tt.fields.name, namespace: tt.fields.namespace, Log: tt.fields.Log, Client: client, } var goosefsFileUtils operations.GooseFSFileUtils patch1 := ApplyMethod(reflect.TypeOf(goosefsFileUtils), "Ready", func(_ operations.GooseFSFileUtils) bool { return true }) defer patch1.Reset() patch2 := ApplyMethod(reflect.TypeOf(goosefsFileUtils), "Mount", func(_ operations.GooseFSFileUtils, goosefsPath string, ufsPath string, options map[string]string, readOnly bool, shared bool) error { return nil }) defer patch2.Reset() patch3 := ApplyMethod(reflect.TypeOf(goosefsFileUtils), "IsMounted", func(_ operations.GooseFSFileUtils, goosefsPath string, ) (bool, error) { return false, nil }) defer patch3.Reset() if err := e.mountUFS(); (err != nil) != tt.wantErr { t.Errorf("GooseFSEngine.mountUFS() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/70302
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1182 }
[ 2830, 3393, 16284, 52, 8485, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 7000, 4466, 256, 353, 5911, 402, 16, 7141, 16, 1224, 13752, 8485, 15123, 198, 197, 2698, 8369, 256, 353, 5911, 402, 16, 7141, 16, 79356, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenesisGood(t *testing.T) { // test a good one by raw json genDocBytes := []byte( `{ "genesis_time": "0001-01-01T00:00:00Z", "chain_id": "test-chain-QDKdJr", "initial_height": "1000", "consensus_params": null, "validators": [{ "pub_key":{"type":"tendermint/PubKeyEd25519","value":"AT/+aaL1eB0477Mud9JMm8Sh8BIvOYlPGC9KkIUmFaE="}, "power":"10", "name":"" }], "app_hash":"", "app_state":{"account_owner": "Bob"} }`, ) _, err := GenesisDocFromJSON(genDocBytes) assert.NoError(t, err, "expected no error for good genDoc json") pubkey := ed25519.GenPrivKey().PubKey() // create a base gendoc from struct baseGenDoc := &GenesisDoc{ ChainID: "abc", Validators: []GenesisValidator{{pubkey.Address(), pubkey, 10, "myval"}}, } genDocBytes, err = tmjson.Marshal(baseGenDoc) assert.NoError(t, err, "error marshalling genDoc") // test base gendoc and check consensus params were filled genDoc, err := GenesisDocFromJSON(genDocBytes) assert.NoError(t, err, "expected no error for valid genDoc json") assert.NotNil(t, genDoc.ConsensusParams, "expected consensus params to be filled in") // check validator's address is filled assert.NotNil(t, genDoc.Validators[0].Address, "expected validator's address to be filled in") // create json with consensus params filled genDocBytes, err = tmjson.Marshal(genDoc) assert.NoError(t, err, "error marshalling genDoc") genDoc, err = GenesisDocFromJSON(genDocBytes) assert.NoError(t, err, "expected no error for valid genDoc json") // test with invalid consensus params genDoc.ConsensusParams.Block.MaxBytes = 0 genDocBytes, err = tmjson.Marshal(genDoc) assert.NoError(t, err, "error marshalling genDoc") _, err = GenesisDocFromJSON(genDocBytes) assert.Error(t, err, "expected error for genDoc json with block size of 0") // Genesis doc from raw json missingValidatorsTestCases := [][]byte{ []byte(`{"chain_id":"mychain"}`), // missing validators []byte(`{"chain_id":"mychain","validators":[]}`), // missing validators []byte(`{"chain_id":"mychain","validators":null}`), // nil validator []byte(`{"chain_id":"mychain"}`), // missing validators } for _, tc := range missingValidatorsTestCases { _, err := GenesisDocFromJSON(tc) assert.NoError(t, err) } }
explode_data.jsonl/61610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 883 }
[ 2830, 3393, 84652, 15216, 1155, 353, 8840, 836, 8, 341, 197, 322, 1273, 264, 1661, 825, 553, 7112, 2951, 198, 82281, 9550, 7078, 1669, 3056, 3782, 1006, 197, 197, 63, 515, 298, 197, 1, 77894, 3009, 788, 330, 15, 15, 15, 16, 12, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDirMgrCollection_CopyOut_02(t *testing.T) { dMgrs := DirMgrCollection{} _, err := dMgrs.CopyOut() if err == nil { t.Errorf("Expected Error return from 'dMgrs' because the collection\n" + "has zero members in the collection. However, NO ERROR WAS RETURNED!!!") } }
explode_data.jsonl/61227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 6184, 25567, 6482, 77637, 2662, 62, 15, 17, 1155, 353, 8840, 836, 8, 1476, 220, 294, 25567, 82, 1669, 30094, 25567, 6482, 31483, 220, 8358, 1848, 1669, 294, 25567, 82, 31770, 2662, 2822, 220, 421, 1848, 621, 2092, 341, 262...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDefaultVolumeSnapshotLocations(t *testing.T) { namespace := "heptio-ark" arkClient := fakeclientset.NewSimpleClientset() location := &v1.VolumeSnapshotLocation{ObjectMeta: metav1.ObjectMeta{Name: "location1"}, Spec: v1.VolumeSnapshotLocationSpec{Provider: "provider1"}} arkClient.ArkV1().VolumeSnapshotLocations(namespace).Create(location) defaultVolumeSnapshotLocations := make(map[string]string) // No defaults volumeSnapshotLocations, err := getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Equal(t, 0, len(volumeSnapshotLocations)) assert.NoError(t, err) // Bad location defaultVolumeSnapshotLocations["provider1"] = "badlocation" volumeSnapshotLocations, err = getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Equal(t, 0, len(volumeSnapshotLocations)) assert.Error(t, err) // Bad provider defaultVolumeSnapshotLocations["provider2"] = "badlocation" volumeSnapshotLocations, err = getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Equal(t, 0, len(volumeSnapshotLocations)) assert.Error(t, err) // Good provider, good location delete(defaultVolumeSnapshotLocations, "provider2") defaultVolumeSnapshotLocations["provider1"] = "location1" volumeSnapshotLocations, err = getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Equal(t, 1, len(volumeSnapshotLocations)) assert.NoError(t, err) location2 := &v1.VolumeSnapshotLocation{ObjectMeta: metav1.ObjectMeta{Name: "location2"}, Spec: v1.VolumeSnapshotLocationSpec{Provider: "provider2"}} arkClient.ArkV1().VolumeSnapshotLocations(namespace).Create(location2) // Mutliple Provider/Location 1 good, 1 bad defaultVolumeSnapshotLocations["provider2"] = "badlocation" volumeSnapshotLocations, err = getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Error(t, err) location21 := &v1.VolumeSnapshotLocation{ObjectMeta: metav1.ObjectMeta{Name: "location2-1"}, Spec: v1.VolumeSnapshotLocationSpec{Provider: "provider2"}} arkClient.ArkV1().VolumeSnapshotLocations(namespace).Create(location21) location11 := &v1.VolumeSnapshotLocation{ObjectMeta: metav1.ObjectMeta{Name: "location1-1"}, Spec: v1.VolumeSnapshotLocationSpec{Provider: "provider1"}} arkClient.ArkV1().VolumeSnapshotLocations(namespace).Create(location11) // Mutliple Provider/Location all good defaultVolumeSnapshotLocations["provider2"] = "location2" volumeSnapshotLocations, err = getDefaultVolumeSnapshotLocations(arkClient, namespace, defaultVolumeSnapshotLocations) assert.Equal(t, 2, len(volumeSnapshotLocations)) assert.NoError(t, err) assert.Equal(t, volumeSnapshotLocations["provider1"].ObjectMeta.Name, "location1") assert.Equal(t, volumeSnapshotLocations["provider2"].ObjectMeta.Name, "location2") }
explode_data.jsonl/40907
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 903 }
[ 2830, 3393, 3675, 18902, 15009, 43037, 1155, 353, 8840, 836, 8, 341, 56623, 1669, 330, 383, 417, 815, 12, 838, 698, 197, 838, 2959, 1669, 12418, 2972, 746, 7121, 16374, 2959, 746, 2822, 53761, 1669, 609, 85, 16, 79106, 15009, 4707, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChunkStore_getMetricNameChunks(t *testing.T) { ctx := context.Background() now := model.Now() chunk1 := dummyChunkFor(now, labels.Labels{ {Name: labels.MetricName, Value: "foo"}, {Name: "bar", Value: "baz"}, {Name: "flip", Value: "flop"}, {Name: "toms", Value: "code"}, }) chunk2 := dummyChunkFor(now, labels.Labels{ {Name: labels.MetricName, Value: "foo"}, {Name: "bar", Value: "beep"}, {Name: "toms", Value: "code"}, }) testCases := []struct { query string expect []Chunk }{ { `foo`, []Chunk{chunk1, chunk2}, }, { `foo{flip=""}`, []Chunk{chunk2}, }, { `foo{bar="baz"}`, []Chunk{chunk1}, }, { `foo{bar="beep"}`, []Chunk{chunk2}, }, { `foo{toms="code"}`, []Chunk{chunk1, chunk2}, }, { `foo{bar!="baz"}`, []Chunk{chunk2}, }, { `foo{bar=~"beep|baz"}`, []Chunk{chunk1, chunk2}, }, { `foo{toms="code", bar=~"beep|baz"}`, []Chunk{chunk1, chunk2}, }, { `foo{toms="code", bar="baz"}`, []Chunk{chunk1}, }, } for _, schema := range schemas { for _, storeCase := range stores { storeCfg := storeCase.configFn() store := newTestChunkStoreConfig(t, schema, storeCfg) defer store.Stop() if err := store.Put(ctx, []Chunk{chunk1, chunk2}); err != nil { t.Fatal(err) } for _, tc := range testCases { t.Run(fmt.Sprintf("%s / %s / %s", tc.query, schema, storeCase.name), func(t *testing.T) { t.Log("========= Running query", tc.query, "with schema", schema) matchers, err := parser.ParseMetricSelector(tc.query) if err != nil { t.Fatal(err) } chunks, err := store.Get(ctx, userID, now.Add(-time.Hour), now, matchers...) require.NoError(t, err) if !reflect.DeepEqual(tc.expect, chunks) { t.Fatalf("%s: wrong chunks - %s", tc.query, test.Diff(tc.expect, chunks)) } }) } } } }
explode_data.jsonl/43819
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 924 }
[ 2830, 3393, 28304, 6093, 3062, 54310, 675, 89681, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 80922, 1669, 1614, 13244, 741, 23049, 3122, 16, 1669, 17292, 28304, 2461, 32263, 11, 9201, 4679, 82, 515, 197, 197, 63121, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateJobs_WhenTransactionAlwaysFailsForOneBatch_ReturnsErrorForThatBatch_OtherChangesWork(t *testing.T) { withRepository(func(r *RedisJobRepository) { job1 := addTestJobWithClientId(t, r, "queue1", "my-job-1") job2 := addTestJobWithClientId(t, r, "queue2", "my-job-1") job3 := addTestJobWithClientId(t, r, "queue3", "my-job-1") newSchedName := "custom" results := r.updateJobs([]string{job1.Id, job2.Id, job3.Id}, func(jobs []*api.Job) { assert.Equal(t, 1, len(jobs)) job := jobs[0] if job.Id == job2.Id { results2, err := r.UpdateJobs([]string{job2.Id}, func(jobs []*api.Job) {}) // 2nd update in middle of transaction if err != nil { t.Fatalf("expected no error but got: %s", err) } assert.Equal(t, 1, len(results2)) assert.Nil(t, results2[0].Error) } job.PodSpec.SchedulerName = newSchedName }, 1, 3, time.Microsecond) assert.Equal(t, 3, len(results)) assert.Equal(t, job1.Id, results[0].JobId) assert.Equal(t, job2.Id, results[1].JobId) assert.Equal(t, job3.Id, results[2].JobId) assert.Equal(t, job1.Id, results[0].Job.Id) assert.Nil(t, results[1].Job) assert.Equal(t, job3.Id, results[2].Job.Id) assert.Equal(t, newSchedName, results[0].Job.PodSpec.SchedulerName) assert.Equal(t, newSchedName, results[2].Job.PodSpec.SchedulerName) assert.Nil(t, results[0].Error) assert.Equal(t, redis.TxFailedErr, results[1].Error) assert.Nil(t, results[2].Error) reloadedJobs, err := r.GetExistingJobsByIds([]string{job1.Id, job2.Id, job3.Id}) assert.Nil(t, err) assert.Equal(t, 3, len(reloadedJobs)) assert.Equal(t, newSchedName, reloadedJobs[0].PodSpec.SchedulerName) assert.Equal(t, "", reloadedJobs[1].PodSpec.SchedulerName) assert.Equal(t, newSchedName, reloadedJobs[2].PodSpec.SchedulerName) }) }
explode_data.jsonl/32072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 801 }
[ 2830, 3393, 4289, 40667, 62, 4498, 8070, 37095, 37, 6209, 2461, 3966, 21074, 53316, 82, 1454, 2461, 4792, 21074, 2232, 696, 11317, 6776, 1155, 353, 8840, 836, 8, 341, 46948, 4624, 18552, 2601, 353, 48137, 12245, 4624, 8, 341, 197, 68577...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPointBls12377G2Neg(t *testing.T) { bls12377G2 := BLS12377G1() g := bls12377G2.Point.Generator().Neg() require.True(t, g.Neg().Equal(bls12377G2.Point.Generator())) require.True(t, bls12377G2.Point.Identity().Neg().Equal(bls12377G2.Point.Identity())) }
explode_data.jsonl/15768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 2609, 33, 4730, 16, 17, 18, 22, 22, 38, 17, 47800, 1155, 353, 8840, 836, 8, 341, 96421, 82, 16, 17, 18, 22, 22, 38, 17, 1669, 425, 7268, 16, 17, 18, 22, 22, 38, 16, 741, 3174, 1669, 1501, 82, 16, 17, 18, 22, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_setJavaExecutableFile(t *testing.T) { pipelineId := uuid.New() lc, _ := fs_tool.NewLifeCycle(pb.Sdk_SDK_JAVA, pipelineId, filepath.Join(os.Getenv("APP_WORK_DIR"), pipelinesFolder)) lc.Paths.ExecutableName = fakeExecutableName executorBuilder := executors.NewExecutorBuilder().WithRunner().WithCommand("fake cmd").ExecutorBuilder type args struct { lc *fs_tool.LifeCycle id uuid.UUID service cache.Cache ctx context.Context executorBuilder *executors.ExecutorBuilder dir string } tests := []struct { name string args args want executors.Executor wantErr bool }{ { name: "set executable name to runner", args: args{ lc: lc, id: pipelineId, service: cacheService, ctx: context.Background(), executorBuilder: &executorBuilder, dir: pipelinesFolder, }, want: executors.NewExecutorBuilder(). WithExecutableFileName(fileName). WithRunner(). WithCommand("fake cmd"). WithTestRunner(). Build(), wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := setJavaExecutableFile(tt.args.lc.Paths, tt.args.id, tt.args.service, tt.args.ctx, tt.args.executorBuilder, tt.args.dir) if (err != nil) != tt.wantErr { t.Errorf("setJavaExecutableFile() error = %v, wantErr %v", err, tt.wantErr) } if !reflect.DeepEqual(got, tt.want) { t.Errorf("setJavaExecutableFile() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/27
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 732 }
[ 2830, 3393, 2602, 15041, 94772, 1703, 1155, 353, 8840, 836, 8, 341, 3223, 8790, 764, 1669, 16040, 7121, 741, 8810, 66, 11, 716, 1669, 8619, 22785, 7121, 25749, 44820, 76878, 97113, 84197, 10598, 35610, 11, 15301, 764, 11, 26054, 22363, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCounterWaitGroup(t *testing.T) { var mutex sync.Mutex var wait sync.WaitGroup counter := 0 for i := 0; i < 5000; i++ { wait.Add(1) go func() { defer func() { mutex.Unlock() }() mutex.Lock() counter++ wait.Done() }() } wait.Wait() t.Logf("counter = %d", counter) }
explode_data.jsonl/34247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 14099, 14190, 2808, 1155, 353, 8840, 836, 8, 341, 2405, 30863, 12811, 99014, 198, 2405, 3783, 12811, 28384, 2808, 198, 58261, 1669, 220, 15, 198, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 15, 15, 15, 26, 600, 1027...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMigrate_upApplyOne(t *testing.T) { db := dbtest.OpenWithoutMigrations(t) session, err := dbpkg.Open(db.DSN) require.NoError(t, err) n, err := Migrate(session, migrate.Up, 1) require.NoError(t, err) assert.Equal(t, 1, n) ids := []string{} err = session.Select(&ids, `SELECT id FROM gorp_migrations`) require.NoError(t, err) wantIDs := []string{ "20200309000000-initial-1.sql", } assert.Equal(t, wantIDs, ids) }
explode_data.jsonl/39323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 44, 34479, 8237, 28497, 3966, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 1944, 12953, 26040, 44, 17824, 1155, 340, 25054, 11, 1848, 1669, 2927, 30069, 12953, 9791, 909, 18966, 340, 17957, 35699, 1155, 11, 1848, 692, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBatchStats(t *testing.T) { session := createSession(t) defer session.Close() if session.cfg.ProtoVersion == 1 { t.Skip("atomic batches not supported. Please use Cassandra >= 2.0") } if err := createTable(session, "CREATE TABLE gocql_test.batchStats (id int, PRIMARY KEY (id))"); err != nil { t.Fatalf("failed to create table with error '%v'", err) } b := session.NewBatch(LoggedBatch) b.Query("INSERT INTO batchStats (id) VALUES (?)", 1) b.Query("INSERT INTO batchStats (id) VALUES (?)", 2) if err := session.ExecuteBatch(b); err != nil { t.Fatalf("query failed. %v", err) } else { if b.Attempts() < 1 { t.Fatal("expected at least 1 attempt, but got 0") } if b.Latency() <= 0 { t.Fatalf("expected latency to be greater than 0, but got %v instead.", b.Latency()) } } }
explode_data.jsonl/11168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 21074, 16635, 1155, 353, 8840, 836, 8, 341, 25054, 1669, 1855, 5283, 1155, 340, 16867, 3797, 10421, 2822, 743, 3797, 30481, 7763, 983, 5637, 621, 220, 16, 341, 197, 3244, 57776, 445, 6618, 44792, 537, 7248, 13, 5209, 990, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStreamGroup(t *testing.T) { s, err := Run() ok(t, err) defer s.Close() c, err := redis.Dial("tcp", s.Addr()) ok(t, err) defer c.Close() _, err = redis.String(c.Do("XGROUP", "CREATE", "s", "processing", "$")) mustFail(t, err, "ERR stream s not exists") _, err = redis.String(c.Do("XGROUP", "CREATE", "s", "processing", "$", "MKSTREAM")) ok(t, err) count, err := redis.Int(c.Do("XLEN", "s")) ok(t, err) equals(t, 0, count) }
explode_data.jsonl/31883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 3027, 2808, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 6452, 741, 59268, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 1444, 11, 1848, 1669, 20870, 98462, 445, 27161, 497, 274, 93626, 2398, 59268, 1155, 11, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMaxInactivityOnConnectionClose(t *testing.T) { o := GetDefaultOptions() o.MaxInactivity = 250 * time.Millisecond o.ClientHBInterval = 100 * time.Millisecond o.ClientHBTimeout = 50 * time.Millisecond o.ClientHBFailCount = 2 s := runServerWithOpts(t, o, nil) defer s.Shutdown() sc := NewDefaultConnection(t) defer sc.Close() if _, err := sc.Subscribe("foo", func(_ *stan.Msg) {}); err != nil { t.Fatalf("Error on subscribe: %v", err) } if s.channels.get("foo") == nil { t.Fatalf("Channel should exit") } // Close connection without closing subscription sc.Close() // Wait for server to remove... waitForNumClients(t, s, 0) // Wait for channel to be removed waitFor(t, 2*time.Second, 100*time.Millisecond, func() error { if s.channels.get("foo") != nil { return fmt.Errorf("Channel should have been removed") } return nil }) sc.Close() sc = NewDefaultConnection(t) defer sc.Close() if _, err := sc.Subscribe("foo", func(_ *stan.Msg) {}); err != nil { t.Fatalf("Error on subscribe: %v", err) } if s.channels.get("foo") == nil { t.Fatalf("Channel should exit") } // Close NATS connection to cause server to remove it sc.NatsConn().Close() // Wait for server to remove... waitForNumClients(t, s, 0) // Wait for channel to be removed waitFor(t, 2*time.Second, 100*time.Millisecond, func() error { if s.channels.get("foo") != nil { return fmt.Errorf("Channel should have been removed") } return nil }) }
explode_data.jsonl/23100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 5974, 641, 7175, 1925, 4526, 7925, 1155, 353, 8840, 836, 8, 341, 22229, 1669, 2126, 3675, 3798, 741, 22229, 14535, 641, 7175, 284, 220, 17, 20, 15, 353, 882, 71482, 198, 22229, 11716, 30725, 10256, 284, 220, 16, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportFlushesBodyChunks(t *testing.T) { defer afterTest(t) resBody := make(chan io.Reader, 1) connr, connw := io.Pipe() // connection pipe pair lw := &logWritesConn{ rch: resBody, w: connw, } tr := &Transport{ Dial: func(network, addr string) (net.Conn, error) { return lw, nil }, } bodyr, bodyw := io.Pipe() // body pipe pair go func() { defer bodyw.Close() for i := 0; i < 3; i++ { fmt.Fprintf(bodyw, "num%d\n", i) } }() resc := make(chan *Response) go func() { req, _ := NewRequest("POST", "http://localhost:8080", bodyr) req.Header.Set("User-Agent", "x") // known value for test res, err := tr.RoundTrip(req) if err != nil { t.Errorf("RoundTrip: %v", err) close(resc) return } resc <- res }() // Fully consume the request before checking the Write log vs. want. req, err := ReadRequest(bufio.NewReader(connr)) if err != nil { t.Fatal(err) } io.Copy(ioutil.Discard, req.Body) // Unblock the transport's roundTrip goroutine. resBody <- strings.NewReader("HTTP/1.1 204 No Content\r\nConnection: close\r\n\r\n") res, ok := <-resc if !ok { return } defer res.Body.Close() want := []string{ "POST / HTTP/1.1\r\nHost: localhost:8080\r\nUser-Agent: x\r\nTransfer-Encoding: chunked\r\nAccept-Encoding: gzip\r\n\r\n", "5\r\nnum0\n\r\n", "5\r\nnum1\n\r\n", "5\r\nnum2\n\r\n", "0\r\n\r\n", } if !reflect.DeepEqual(lw.writes, want) { t.Errorf("Writes differed.\n Got: %q\nWant: %q\n", lw.writes, want) } }
explode_data.jsonl/14140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 693 }
[ 2830, 3393, 27560, 46874, 288, 5444, 89681, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 10202, 5444, 1669, 1281, 35190, 6399, 47431, 11, 220, 16, 340, 32917, 81, 11, 4534, 86, 1669, 6399, 1069, 3444, 368, 442, 3633, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestState_UpdateJoin(t *testing.T) { t.Parallel() st := setupNewState() ev := &irc.Event{ Name: irc.JOIN, Sender: users[0], Args: []string{channels[0]}, } st.addChannel(channels[0]) if st.IsOn(users[0], channels[0]) { t.Errorf("Expected %v to not be on %v", users[0], channels[0]) } u := st.Update(ev) if len(u.Seen) != 1 || u.Seen[0] != users[0] { t.Errorf("Expected %v to be seen, got: %v", users[0], u.Seen) } if !st.IsOn(users[0], channels[0]) { t.Errorf("Expected %v to be on %v", users[0], channels[0]) } st = setupNewState() st.addChannel(channels[0]) if st.IsOn(users[0], channels[0]) { t.Errorf("Expected %v to not be on %v", users[0], channels[0]) } u = st.Update(ev) if len(u.Seen) != 1 || u.Seen[0] != users[0] { t.Errorf("Expected %v to be seen, got: %v", users[0], u.Seen) } if !st.IsOn(users[0], channels[0]) { t.Errorf("Expected %v to be on %v", users[0], channels[0]) } }
explode_data.jsonl/32093
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 440 }
[ 2830, 3393, 1397, 47393, 12292, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18388, 1669, 6505, 3564, 1397, 741, 74837, 1669, 609, 2437, 6904, 515, 197, 21297, 25, 256, 79923, 3503, 46, 687, 345, 197, 7568, 1659, 25, 3847, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestErrors(t *testing.T) { initTest(ModeMulti, defaultTestOpts(), &testopts{}, t) tests := []struct { SQL string Err string }{ { SQL: "INVALID SQL", Err: "vtexplain execute error in 'INVALID SQL': syntax error at position 8 near 'INVALID'", }, { SQL: "SELECT * FROM THIS IS NOT SQL", Err: "vtexplain execute error in 'SELECT * FROM THIS IS NOT SQL': syntax error at position 22 near 'IS'", }, { SQL: "SELECT * FROM table_not_in_vschema", Err: "vtexplain execute error in 'SELECT * FROM table_not_in_vschema': table table_not_in_vschema not found", }, { SQL: "SELECT * FROM table_not_in_schema", Err: "unknown error: unable to resolve table name table_not_in_schema", }, } for _, test := range tests { t.Run(test.SQL, func(t *testing.T) { _, err := Run(test.SQL) require.Error(t, err) require.Contains(t, err.Error(), test.Err) }) } }
explode_data.jsonl/12769
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 13877, 1155, 353, 8840, 836, 8, 341, 28248, 2271, 3189, 534, 20358, 11, 1638, 2271, 43451, 1507, 609, 1944, 10518, 22655, 259, 692, 78216, 1669, 3056, 1235, 341, 197, 197, 6688, 914, 198, 197, 197, 7747, 914, 198, 197, 594...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNumericNormalize(t *testing.T) { testutil.TestSuccessfulNormalizeEqFunc(t, []testutil.NormalizeTest{ { SQL: "select '0'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "0"), Status: pgtype.Present}, }, { SQL: "select '1'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "1"), Status: pgtype.Present}, }, { SQL: "select '10.00'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "10.00"), Status: pgtype.Present}, }, { SQL: "select '1e-3'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "0.001"), Status: pgtype.Present}, }, { SQL: "select '-1'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "-1"), Status: pgtype.Present}, }, { SQL: "select '10000'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "10000"), Status: pgtype.Present}, }, { SQL: "select '3.14'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "3.14"), Status: pgtype.Present}, }, { SQL: "select '1.1'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "1.1"), Status: pgtype.Present}, }, { SQL: "select '100010001'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "100010001"), Status: pgtype.Present}, }, { SQL: "select '100010001.0001'::numeric", Value: &shopspring.Numeric{Decimal: mustParseDecimal(t, "100010001.0001"), Status: pgtype.Present}, }, { SQL: "select '4237234789234789289347892374324872138321894178943189043890124832108934.43219085471578891547854892438945012347981'::numeric", Value: &shopspring.Numeric{ Decimal: mustParseDecimal(t, "4237234789234789289347892374324872138321894178943189043890124832108934.43219085471578891547854892438945012347981"), Status: pgtype.Present, }, }, { SQL: "select '0.8925092023480223478923478978978937897879595901237890234789243679037419057877231734823098432903527585734549035904590854890345905434578345789347890402348952348905890489054234237489234987723894789234'::numeric", Value: &shopspring.Numeric{ Decimal: mustParseDecimal(t, "0.8925092023480223478923478978978937897879595901237890234789243679037419057877231734823098432903527585734549035904590854890345905434578345789347890402348952348905890489054234237489234987723894789234"), Status: pgtype.Present, }, }, { SQL: "select '0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000123'::numeric", Value: &shopspring.Numeric{ Decimal: mustParseDecimal(t, "0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000123"), Status: pgtype.Present, }, }, }, func(aa, bb interface{}) bool { a := aa.(shopspring.Numeric) b := bb.(shopspring.Numeric) return a.Status == b.Status && a.Decimal.Equal(b.Decimal) }) }
explode_data.jsonl/39593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1275 }
[ 2830, 3393, 36296, 87824, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 8787, 36374, 87824, 27312, 9626, 1155, 11, 3056, 1944, 1314, 69523, 2271, 515, 197, 197, 515, 298, 197, 6688, 25, 256, 330, 1742, 364, 15, 6, 486, 19600, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestManifestInitCorruptedBlock(t *testing.T) { ctx := testlogging.Context(t) data := blobtesting.DataMap{} st := blobtesting.NewMapStorage(data, nil, nil) f := &content.FormattingOptions{ Hash: "HMAC-SHA256-128", Encryption: encryption.NoneAlgorithm, MaxPackSize: 100000, Version: 1, } // write some data to storage bm, err := content.NewManager(ctx, st, f, content.CachingOptions{}, nil) if err != nil { t.Fatalf("err: %v", err) } mgr, err := NewManager(ctx, bm) if err != nil { t.Fatalf("err: %v", err) } mgr.Put(ctx, map[string]string{"type": "foo"}, map[string]string{"some": "value"}) //nolint:errcheck mgr.Flush(ctx) bm.Flush(ctx) // corrupt data at the storage level. for blobID, v := range data { for _, prefix := range content.PackBlobIDPrefixes { if strings.HasPrefix(string(blobID), string(prefix)) { for i := 0; i < len(v); i++ { v[i] ^= 1 } } } } // make a new content manager based on corrupted data. bm, err = content.NewManager(ctx, st, f, content.CachingOptions{}, nil) if err != nil { t.Fatalf("err: %v", err) } mgr, err = NewManager(ctx, bm) if err != nil { t.Fatalf("err: %v", err) } cases := []struct { desc string f func() error }{ {"GetRaw", func() error { _, err := mgr.GetRaw(ctx, "anything"); return err }}, {"GetMetadata", func() error { _, err := mgr.GetMetadata(ctx, "anything"); return err }}, {"Get", func() error { return mgr.Get(ctx, "anything", nil) }}, {"Delete", func() error { return mgr.Delete(ctx, "anything") }}, {"Find", func() error { _, err := mgr.Find(ctx, nil); return err }}, {"Put", func() error { _, err := mgr.Put(ctx, map[string]string{ "type": "foo", }, map[string]string{ "some": "value", }) return err }}, } for _, tc := range cases { tc := tc t.Run(tc.desc, func(t *testing.T) { err := tc.f() if err == nil || !strings.Contains(err.Error(), "invalid checksum") { t.Errorf("invalid error when initializing malformed manifest manager: %v", err) } }) } }
explode_data.jsonl/77797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 855 }
[ 2830, 3393, 38495, 3803, 10580, 85954, 4713, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1273, 25263, 9328, 1155, 340, 8924, 1669, 23404, 8840, 3336, 2227, 16094, 18388, 1669, 23404, 8840, 7121, 2227, 5793, 2592, 11, 2092, 11, 2092, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateBuilderIncludeDesiredInfoIfEnabled(t *testing.T) { var tests = map[string]struct { IncludeInfo map[types.IncludeInfoKey]bool expectedDesiredCount int }{ "include all": { IncludeInfo: map[types.IncludeInfoKey]bool{ types.IncludeAllInfo: true, }, expectedDesiredCount: 1, }, "exclude all": { IncludeInfo: map[types.IncludeInfoKey]bool{ types.IncludeAllInfo: false, }, expectedDesiredCount: 0, }, "include *": { IncludeInfo: map[types.IncludeInfoKey]bool{ "*": true, }, expectedDesiredCount: 1, }, "exclude *": { IncludeInfo: map[types.IncludeInfoKey]bool{ "*": false, }, expectedDesiredCount: 0, }, "include none": { IncludeInfo: map[types.IncludeInfoKey]bool{}, expectedDesiredCount: 0, }, "include nil": { expectedDesiredCount: 0, }, "include desired": { IncludeInfo: map[types.IncludeInfoKey]bool{ types.IncludeDesiredInfo: true, }, expectedDesiredCount: 1, }, "exclude desired": { IncludeInfo: map[types.IncludeInfoKey]bool{ types.IncludeDesiredInfo: false, }, expectedDesiredCount: 0, }, } for name, mock := range tests { name := name mock := mock t.Run(name, func(t *testing.T) { b := &UpdateStatesBuilder{ Request: UpdateRequest{ IncludeInfo: mock.IncludeInfo, }, Result: &types.Result{}, } b.includeDesiredInfoIfEnabled(&unstructured.Unstructured{ Object: map[string]interface{}{ "kind": "Pod", "apiVersion": "v1", "metadata": map[string]interface{}{ "name": "my-pod", "namespace": "my-ns", }, }, }, "Desired info") if mock.expectedDesiredCount != len(b.Result.DesiredResourcesInfo) { t.Fatalf( "Expected desired count %d got %d", mock.expectedDesiredCount, len(b.Result.DesiredResourcesInfo), ) } }) } }
explode_data.jsonl/54334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 853 }
[ 2830, 3393, 4289, 3297, 22283, 4896, 2690, 1731, 2679, 5462, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 2415, 14032, 60, 1235, 341, 197, 197, 22283, 1731, 688, 2415, 58, 9242, 55528, 1731, 1592, 96436, 198, 197, 42400, 4896, 2690, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidAwsCredentialsAreAccepted(t *testing.T) { driver := NewCustomTestDriver(&fakeEC2WithLogin{}) driver.awsCredentialsFactory = NewValidAwsCredentials options := &commandstest.FakeFlagger{ Data: map[string]interface{}{ "name": "test", "amazonec2-region": "us-east-1", "amazonec2-zone": "e", }, } err := driver.SetConfigFromFlags(options) assert.NoError(t, err) }
explode_data.jsonl/7419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 4088, 47359, 27025, 11526, 65906, 1155, 353, 8840, 836, 8, 341, 33652, 1669, 1532, 10268, 2271, 11349, 2099, 30570, 7498, 17, 2354, 6231, 37790, 33652, 35166, 27025, 4153, 284, 1532, 4088, 47359, 27025, 198, 35500, 1669, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpgradeKymaOperationManager_OperationSucceeded(t *testing.T) { // given memory := storage.NewMemoryStorage() operations := memory.Operations() opManager := NewUpgradeKymaOperationManager(operations) op := fixUpgradeKymaOperation() err := operations.InsertUpgradeKymaOperation(op) require.NoError(t, err) // when op, when, err := opManager.OperationSucceeded(op, "task succeeded", logrus.New()) // then assert.NoError(t, err) assert.Equal(t, domain.Succeeded, op.State) assert.Equal(t, time.Duration(0), when) }
explode_data.jsonl/73200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 43861, 42, 1600, 64, 8432, 2043, 2232, 91774, 50, 43805, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 2109, 4731, 1669, 5819, 7121, 10642, 5793, 741, 197, 38163, 1669, 4938, 13, 35120, 741, 39703, 2043, 1669, 1532, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithoutPostings(t *testing.T) { var cases = []struct { base Postings drop Postings res Postings }{ { base: EmptyPostings(), drop: EmptyPostings(), res: EmptyPostings(), }, { base: EmptyPostings(), drop: newListPostings(1, 2), res: EmptyPostings(), }, { base: newListPostings(1, 2), drop: EmptyPostings(), res: newListPostings(1, 2), }, { base: newListPostings(), drop: newListPostings(), res: newListPostings(), }, { base: newListPostings(1, 2, 3), drop: newListPostings(), res: newListPostings(1, 2, 3), }, { base: newListPostings(1, 2, 3), drop: newListPostings(4, 5, 6), res: newListPostings(1, 2, 3), }, { base: newListPostings(1, 2, 3), drop: newListPostings(3, 4, 5), res: newListPostings(1, 2), }, } for _, c := range cases { t.Run("", func(t *testing.T) { if c.res == nil { t.Fatal("without result expectancy cannot be nil") } expected, err := ExpandPostings(c.res) testutil.Ok(t, err) w := Without(c.base, c.drop) if c.res == EmptyPostings() { testutil.Equals(t, EmptyPostings(), w) return } if w == EmptyPostings() { t.Fatal("without unexpected result: EmptyPostings sentinel") } res, err := ExpandPostings(w) testutil.Ok(t, err) testutil.Equals(t, expected, res) }) } }
explode_data.jsonl/13133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 622 }
[ 2830, 3393, 26040, 4133, 819, 1155, 353, 8840, 836, 8, 341, 2405, 5048, 284, 3056, 1235, 341, 197, 24195, 3877, 819, 198, 197, 2698, 887, 3877, 819, 271, 197, 10202, 3877, 819, 198, 197, 59403, 197, 197, 515, 298, 24195, 25, 22228, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestX5C_Init(t *testing.T) { type ProvisionerValidateTest struct { p *X5C err error extraValid func(*X5C) error } tests := map[string]func(*testing.T) ProvisionerValidateTest{ "fail/empty": func(t *testing.T) ProvisionerValidateTest { return ProvisionerValidateTest{ p: &X5C{}, err: errors.New("provisioner type cannot be empty"), } }, "fail/empty-name": func(t *testing.T) ProvisionerValidateTest { return ProvisionerValidateTest{ p: &X5C{ Type: "X5C", }, err: errors.New("provisioner name cannot be empty"), } }, "fail/empty-type": func(t *testing.T) ProvisionerValidateTest { return ProvisionerValidateTest{ p: &X5C{Name: "foo"}, err: errors.New("provisioner type cannot be empty"), } }, "fail/empty-key": func(t *testing.T) ProvisionerValidateTest { return ProvisionerValidateTest{ p: &X5C{Name: "foo", Type: "bar"}, err: errors.New("provisioner root(s) cannot be empty"), } }, "fail/no-valid-root-certs": func(t *testing.T) ProvisionerValidateTest { return ProvisionerValidateTest{ p: &X5C{Name: "foo", Type: "bar", Roots: []byte("foo")}, err: errors.Errorf("no x509 certificates found in roots attribute for provisioner 'foo'"), } }, "fail/invalid-duration": func(t *testing.T) ProvisionerValidateTest { p, err := generateX5C(nil) assert.FatalError(t, err) p.Claims = &Claims{DefaultTLSDur: &Duration{0}} return ProvisionerValidateTest{ p: p, err: errors.New("claims: MinTLSCertDuration must be greater than 0"), } }, "ok": func(t *testing.T) ProvisionerValidateTest { p, err := generateX5C(nil) assert.FatalError(t, err) return ProvisionerValidateTest{ p: p, } }, "ok/root-chain": func(t *testing.T) ProvisionerValidateTest { p, err := generateX5C([]byte(`-----BEGIN CERTIFICATE----- MIIBtjCCAVygAwIBAgIQNr+f4IkABY2n4wx4sLOMrTAKBggqhkjOPQQDAjAUMRIw EAYDVQQDEwlyb290LXRlc3QwIBcNMTkxMDAyMDI0MDM0WhgPMjExOTA5MDgwMjQw MzJaMBwxGjAYBgNVBAMTEWludGVybWVkaWF0ZS10ZXN0MFkwEwYHKoZIzj0CAQYI KoZIzj0DAQcDQgAEflfRhPjgJXv4zsPWahXjM2UU61aRFErN0iw88ZPyxea22fxl qN9ezntTXxzsS+mZiWapl8B40ACJgvP+WLQBHKOBhTCBgjAOBgNVHQ8BAf8EBAMC AQYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUnJAxiZcy2ibHcuvfFx99 oDwzKXMwHwYDVR0jBBgwFoAUpHS7FfaQ5bCrTxUeu6R2ZC3VGOowHAYDVR0RBBUw E4IRaW50ZXJtZWRpYXRlLXRlc3QwCgYIKoZIzj0EAwIDSAAwRQIgII8XpQ8ezDO1 2xdq3hShf155C5X/5jO8qr0VyEJgzlkCIQCTqph1Gwu/dmuf6dYLCfQqJyb371LC lgsqsR63is+0YQ== -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- MIIBhTCCASqgAwIBAgIRAMalM7pKi0GCdKjO6u88OyowCgYIKoZIzj0EAwIwFDES MBAGA1UEAxMJcm9vdC10ZXN0MCAXDTE5MTAwMjAyMzk0OFoYDzIxMTkwOTA4MDIz OTQ4WjAUMRIwEAYDVQQDEwlyb290LXRlc3QwWTATBgcqhkjOPQIBBggqhkjOPQMB BwNCAAS29QTCXUu7cx9sa9wZPpRSFq/zXaw8Ai3EIygayrBsKnX42U2atBUjcBZO BWL6A+PpLzU9ja867U5SYNHERS+Oo1swWTAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0T AQH/BAgwBgEB/wIBATAdBgNVHQ4EFgQUpHS7FfaQ5bCrTxUeu6R2ZC3VGOowFAYD VR0RBA0wC4IJcm9vdC10ZXN0MAoGCCqGSM49BAMCA0kAMEYCIQC2vgqwla0u8LHH 1MHob14qvS5o76HautbIBW7fcHzz5gIhAIx5A2+wkJYX4026kqaZCk/1sAwTxSGY M46l92gdOozT -----END CERTIFICATE-----`)) assert.FatalError(t, err) return ProvisionerValidateTest{ p: p, extraValid: func(p *X5C) error { // nolint:staticcheck // We don't have a different way to // check the number of certificates in the pool. numCerts := len(p.rootPool.Subjects()) if numCerts != 2 { return errors.Errorf("unexpected number of certs: want 2, but got %d", numCerts) } return nil }, } }, } config := Config{ Claims: globalProvisionerClaims, Audiences: testAudiences, } for name, get := range tests { t.Run(name, func(t *testing.T) { tc := get(t) err := tc.p.Init(config) if err != nil { if assert.NotNil(t, tc.err) { assert.Equals(t, tc.err.Error(), err.Error()) } } else { if assert.Nil(t, tc.err) { assert.Equals(t, *tc.p.ctl.Audiences, config.Audiences.WithFragment(tc.p.GetID())) if tc.extraValid != nil { assert.Nil(t, tc.extraValid(tc.p)) } } } }) } }
explode_data.jsonl/44602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2119 }
[ 2830, 3393, 55, 20, 34, 15644, 1155, 353, 8840, 836, 8, 341, 13158, 71573, 261, 17926, 2271, 2036, 341, 197, 3223, 688, 353, 55, 20, 34, 198, 197, 9859, 286, 1465, 198, 197, 8122, 2172, 4088, 2915, 4071, 55, 20, 34, 8, 1465, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSplitComment_UnderMax(t *testing.T) { comment := "comment under max size" split := common.SplitComment(comment, len(comment)+1, "sepEnd", "sepStart") Equals(t, []string{comment}, split) }
explode_data.jsonl/67900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 20193, 10677, 6665, 900, 5974, 1155, 353, 8840, 836, 8, 341, 96268, 1669, 330, 6182, 1212, 1932, 1379, 698, 1903, 2292, 1669, 4185, 19823, 10677, 39327, 11, 2422, 39327, 7257, 16, 11, 330, 28036, 3727, 497, 330, 28036, 3479,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMintTokensFromMobile(t *testing.T) { scriptHash := "9121e89e8a0849857262d67c8408601b5e8e0524" // encryptedKey := "" // passphrase := "" // wif, _ := neoutils.NEP2Decrypt(encryptedKey, passphrase) wif := "" wallet, err := neoutils.GenerateFromWIF(wif) if err != nil { log.Printf("%v", err) t.Fail() return } log.Printf("address = %v\n address hash = %x", wallet.Address, wallet.HashedSignature) // neo := string(smartcontract.NEO) gas := string(smartcontract.GAS) amount := float64(1) remark := "FIRST! APISIT FROM O3 :D" network := "main" networkFeeAmountInGAS := float64(0) tx, err := neoutils.MintTokensRawTransactionMobile(network, scriptHash, wif, gas, amount, remark, networkFeeAmountInGAS) if err != nil { log.Printf("%v", err) t.Fail() return } log.Printf("txID =%v", tx.TXID) log.Printf("tx = %x", tx.Data) }
explode_data.jsonl/22989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 44, 396, 29300, 3830, 18370, 1155, 353, 8840, 836, 8, 341, 86956, 6370, 1669, 330, 24, 16, 17, 16, 68, 23, 24, 68, 23, 64, 15, 23, 19, 24, 23, 20, 22, 17, 21, 17, 67, 21, 22, 66, 23, 19, 15, 23, 21, 15, 16, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQuotedStringArg(t *testing.T) { cl, err := client.NewURI(tcpAddr) require.Nil(t, err) // should NOT be unquoted val := "\"abc\"" got, err := echoViaHTTP(cl, val) require.Nil(t, err) assert.Equal(t, got, val) }
explode_data.jsonl/62869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 2183, 9253, 703, 2735, 1155, 353, 8840, 836, 8, 341, 39407, 11, 1848, 1669, 2943, 7121, 10301, 98203, 13986, 340, 17957, 59678, 1155, 11, 1848, 340, 197, 322, 1265, 4183, 387, 650, 63725, 198, 19302, 1669, 15898, 13683, 2105...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecurseManifestsInDir(t *testing.T) { service := newService(".") src := argoappv1.ApplicationSource{Path: "./testdata/recurse", Directory: &argoappv1.ApplicationSourceDirectory{Recurse: true}} q := apiclient.ManifestRequest{Repo: &argoappv1.Repository{}, ApplicationSource: &src} res1, err := service.GenerateManifest(context.Background(), &q) assert.Nil(t, err) assert.Equal(t, 2, len(res1.Manifests)) }
explode_data.jsonl/5665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 693, 2352, 325, 38495, 82, 641, 6184, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 501, 1860, 445, 31225, 41144, 1669, 1392, 78, 676, 85, 16, 17521, 3608, 90, 1820, 25, 5924, 92425, 10758, 2352, 325, 497, 18033, 25, 609, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManageDataValidateName(t *testing.T) { kp0 := newKeypair0() sourceAccount := NewSimpleAccount(kp0.Address(), int64(3556091187167235)) manageData := ManageData{ Name: "This is a very long name for a field that only accepts 64 characters", Value: []byte(""), } _, err := NewTransaction( TransactionParams{ SourceAccount: &sourceAccount, IncrementSequenceNum: false, Operations: []Operation{&manageData}, BaseFee: MinBaseFee, Timebounds: NewInfiniteTimeout(), }, ) if assert.Error(t, err) { expected := "validation failed for *txnbuild.ManageData operation: Field: Name, Error: maximum length is 64 characters" assert.Contains(t, err.Error(), expected) } }
explode_data.jsonl/62883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 33076, 1043, 17926, 675, 1155, 353, 8840, 836, 8, 341, 16463, 79, 15, 1669, 501, 6608, 1082, 1310, 15, 741, 47418, 7365, 1669, 1532, 16374, 7365, 5969, 79, 15, 26979, 1507, 526, 21, 19, 7, 18, 20, 20, 21, 15, 24, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConjInvolutiveInt64(t *testing.T) { f := func(x *Int64) bool { // t.Logf("x = %v", x) l := new(Int64) l.Conj(l.Conj(x)) return l.Equals(x) } if err := quick.Check(f, nil); err != nil { t.Error(err) } }
explode_data.jsonl/29671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 1109, 73, 641, 12536, 6704, 1072, 21, 19, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 2915, 2075, 353, 1072, 21, 19, 8, 1807, 341, 197, 197, 322, 259, 98954, 445, 87, 284, 1018, 85, 497, 856, 340, 197, 8810, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoProjectID(t *testing.T) { client, err := NewClient(context.Background(), &internal.MessagingConfig{}) if client != nil || err == nil { t.Errorf("NewClient() = (%v, %v); want = (nil, error)", client, err) } }
explode_data.jsonl/3627
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 2753, 7849, 915, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 5378, 19047, 1507, 609, 10481, 88969, 2648, 37790, 743, 2943, 961, 2092, 1369, 1848, 621, 2092, 341, 197, 3244, 13080, 445, 3564, 2959, 368, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestAssigningSubscriberPermanentError(t *testing.T) { const subscription = "projects/123456/locations/us-central1-b/subscriptions/my-sub" receiver := newTestMessageReceiver(t) msg1 := seqMsgWithOffsetAndSize(11, 100) msg2 := seqMsgWithOffsetAndSize(22, 200) serverErr := status.Error(codes.FailedPrecondition, "failed") verifiers := test.NewVerifiers(t) // Assignment stream asnStream := test.NewRPCVerifier(t) asnStream.Push(initAssignmentReq(subscription, fakeUUID[:]), assignmentResp([]int64{1, 2}), nil) errBarrier := asnStream.PushWithBarrier(assignmentAckReq(), nil, serverErr) verifiers.AddAssignmentStream(subscription, asnStream) // Partition 1 subStream1 := test.NewRPCVerifier(t) subStream1.Push(initSubReqCommit(subscriptionPartition{Path: subscription, Partition: 1}), initSubResp(), nil) subStream1.Push(initFlowControlReq(), msgSubResp(msg1), nil) verifiers.AddSubscribeStream(subscription, 1, subStream1) cmtStream1 := test.NewRPCVerifier(t) cmtStream1.Push(initCommitReq(subscriptionPartition{Path: subscription, Partition: 1}), initCommitResp(), nil) cmtStream1.Push(commitReq(12), commitResp(1), nil) verifiers.AddCommitStream(subscription, 1, cmtStream1) // Partition 2 subStream2 := test.NewRPCVerifier(t) subStream2.Push(initSubReqCommit(subscriptionPartition{Path: subscription, Partition: 2}), initSubResp(), nil) subStream2.Push(initFlowControlReq(), msgSubResp(msg2), nil) verifiers.AddSubscribeStream(subscription, 2, subStream2) cmtStream2 := test.NewRPCVerifier(t) cmtStream2.Push(initCommitReq(subscriptionPartition{Path: subscription, Partition: 2}), initCommitResp(), nil) cmtStream2.Push(commitReq(23), commitResp(1), nil) verifiers.AddCommitStream(subscription, 2, cmtStream2) mockServer.OnTestStart(verifiers) defer mockServer.OnTestEnd() sub := newTestAssigningSubscriber(t, receiver.onMessage, noopReassignmentHandler, subscription) if gotErr := sub.WaitStarted(); gotErr != nil { t.Errorf("Start() got err: (%v)", gotErr) } receiver.ValidateMsgs(join(partitionMsgs(1, msg1), partitionMsgs(2, msg2))) // Permanent assignment stream error should terminate subscriber. Commits are // still flushed. errBarrier.Release() if gotErr := sub.WaitStopped(); !test.ErrorEqual(gotErr, serverErr) { t.Errorf("Final error got: (%v), want: (%v)", gotErr, serverErr) } }
explode_data.jsonl/31656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 833 }
[ 2830, 3393, 28933, 287, 40236, 78793, 1454, 1155, 353, 8840, 836, 8, 341, 4777, 15142, 284, 330, 17161, 14, 16, 17, 18, 19, 20, 21, 14, 31309, 62431, 84081, 16, 1455, 37885, 29966, 34198, 17967, 698, 17200, 12862, 1669, 501, 2271, 205...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConcurrentT_StageN(t *testing.T) { t.Run("happy case", func(t *testing.T) { ct := test.NewConcurrent(t) var executed, returned sync.WaitGroup executed.Add(2) returned.Add(2) for i := 0; i < 2; i++ { go func() { ct.StageN("stage", 2, func(t test.ConcT) { executed.Done() }) returned.Done() }() } ctxtest.AssertTerminates(t, timeout, executed.Wait) ctxtest.AssertTerminates(t, timeout, returned.Wait) }) t.Run("n*m happy", func(t *testing.T) { N := 100 M := 100 ct := test.NewConcurrent(t) for g := 0; g < N; g++ { go func(g int) { for stage := 0; stage < M; stage++ { if g&1 == 0 { ct.StageN(strconv.Itoa(stage), N/2, func(t test.ConcT) { }) } else { ct.Wait(strconv.Itoa(stage)) } } }(g) } }) t.Run("n*m sad", func(t *testing.T) { N := 100 M := 100 test.AssertFatal(t, func(t test.T) { ct := test.NewConcurrent(t) var wg sync.WaitGroup wg.Add(N) for g := 0; g < N; g++ { go func(g int) { defer wg.Done() for stage := 0; stage < M; stage++ { ct.StageN(strconv.Itoa(stage), N, func(t test.ConcT) { if g == N/2 { t.FailNow() } }) } }(g) } wg.Wait() }) }) t.Run("too few goroutines", func(t *testing.T) { ct := test.NewConcurrent(t) ctxtest.AssertNotTerminates(t, timeout, func() { ct.StageN("stage", 2, func(test.ConcT) {}) }) }) t.Run("too many goroutines", func(t *testing.T) { ct := test.NewConcurrent(t) go ct.StageN("stage", 2, func(test.ConcT) {}) ct.StageN("stage", 2, func(test.ConcT) {}) assert.Panics(t, func() { ct.StageN("stage", 2, func(test.ConcT) {}) }) }) t.Run("inconsistent N", func(t *testing.T) { ct := test.NewConcurrent(t) var created sync.WaitGroup created.Add(1) go ct.StageN("stage", 2, func(test.ConcT) { created.Done() }) created.Wait() assert.Panics(t, func() { ct.StageN("stage", 3, func(test.ConcT) {}) }) }) t.Run("panic", func(t *testing.T) { test.AssertFatal(t, func(t test.T) { ct := test.NewConcurrent(t) ct.Stage("stage", func(test.ConcT) { panic(nil) }) }) }) }
explode_data.jsonl/69826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1093 }
[ 2830, 3393, 1109, 3231, 51, 62, 19398, 45, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 56521, 1142, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 89216, 1669, 1273, 7121, 1109, 3231, 1155, 340, 197, 2405, 15695, 11, 5927, 12811, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileResponsiveGC(t *testing.T) { table := TableTest{{ Name: "Update stale lastPinned", Ctx: setResponsiveGCFeature(context.Background(), cfgmap.Disabled), Objects: []runtime.Object{ Route("default", "stale-lastpinned", WithConfigTarget("config"), WithURL, WithAddress, WithRouteConditionsAutoTLSDisabled, MarkTrafficAssigned, MarkIngressReady, WithRouteFinalizer, WithRouteGeneration(1), WithRouteObservedGeneration, WithStatusTraffic( v1.TrafficTarget{ RevisionName: "config-00001", Percent: ptr.Int64(100), LatestRevision: ptr.Bool(true), })), cfg("default", "config", WithConfigGeneration(1), WithLatestCreated("config-00001"), WithLatestReady("config-00001"), // The Route controller attaches our label to this Configuration. WithConfigLabel("serving.knative.dev/route", "stale-lastpinned"), ), rev("default", "config", 1, MarkRevisionReady, WithRevName("config-00001"), WithLastPinned(fakeCurTime.Add(-10*time.Minute))), simpleReadyIngress( Route("default", "stale-lastpinned", WithConfigTarget("config"), WithURL), &traffic.Config{ RevisionTargets: traffic.RevisionTargets{{ TrafficTarget: v1.TrafficTarget{ ConfigurationName: "config", RevisionName: "config-00001", Percent: ptr.Int64(100), LatestRevision: ptr.Bool(true), }, }}, Targets: map[string]traffic.RevisionTargets{ traffic.DefaultTarget: {{ TrafficTarget: v1.TrafficTarget{ RevisionName: "config-00001", Percent: ptr.Int64(100), }, Active: true, }}, }, }, ), simpleK8sService(Route("default", "stale-lastpinned", WithConfigTarget("config"))), }, WantPatches: []clientgotesting.PatchActionImpl{ patchLastPinned("default", "config-00001"), }, Key: "default/stale-lastpinned", }, { Name: "lastPinned update disabled", Ctx: setResponsiveGCFeature(context.Background(), cfgmap.Enabled), Objects: []runtime.Object{ Route("default", "stale-lastpinned", WithConfigTarget("config"), WithURL, WithAddress, WithRouteConditionsAutoTLSDisabled, MarkTrafficAssigned, MarkIngressReady, WithRouteFinalizer, WithRouteGeneration(1), WithRouteObservedGeneration, WithStatusTraffic( v1.TrafficTarget{ RevisionName: "config-00001", Percent: ptr.Int64(100), LatestRevision: ptr.Bool(true), })), cfg("default", "config", WithConfigGeneration(1), WithLatestCreated("config-00001"), WithLatestReady("config-00001"), // The Route controller attaches our label to this Configuration. WithConfigLabel("serving.knative.dev/route", "stale-lastpinned"), ), rev("default", "config", 1, MarkRevisionReady, WithRevName("config-00001")), simpleReadyIngress( Route("default", "stale-lastpinned", WithConfigTarget("config"), WithURL), &traffic.Config{ RevisionTargets: traffic.RevisionTargets{{ TrafficTarget: v1.TrafficTarget{ ConfigurationName: "config", RevisionName: "config-00001", Percent: ptr.Int64(100), LatestRevision: ptr.Bool(true), }, }}, Targets: map[string]traffic.RevisionTargets{ traffic.DefaultTarget: {{ TrafficTarget: v1.TrafficTarget{ // Use the Revision name from the config. RevisionName: "config-00001", Percent: ptr.Int64(100), }, Active: true, }}, }, }, ), simpleK8sService(Route("default", "stale-lastpinned", WithConfigTarget("config"))), }, // WantPatches: Expecting no patch for when disabled. Key: "default/stale-lastpinned", }} table.Test(t, MakeFactory(func(ctx context.Context, listers *Listers, cmw configmap.Watcher) controller.Reconciler { r := &Reconciler{ kubeclient: kubeclient.Get(ctx), client: servingclient.Get(ctx), netclient: networkingclient.Get(ctx), configurationLister: listers.GetConfigurationLister(), revisionLister: listers.GetRevisionLister(), serviceLister: listers.GetK8sServiceLister(), ingressLister: listers.GetIngressLister(), tracker: ctx.Value(TrackerKey).(tracker.Interface), clock: FakeClock{Time: fakeCurTime}, } return routereconciler.NewReconciler(ctx, logging.FromContext(ctx), servingclient.Get(ctx), listers.GetRouteLister(), controller.GetEventRecorder(ctx), r, controller.Options{ConfigStore: &testConfigStore{config: ReconcilerTestConfig(false)}}) })) }
explode_data.jsonl/7814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1940 }
[ 2830, 3393, 693, 40446, 457, 78795, 22863, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 6633, 2271, 90, 515, 197, 21297, 25, 330, 4289, 50351, 1537, 47, 20561, 756, 197, 6258, 3998, 25, 220, 738, 78795, 22863, 13859, 5378, 19047, 1507, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChequebookIssueErrorSend(t *testing.T) { address := common.HexToAddress("0xabcd") beneficiary := common.HexToAddress("0xdddd") ownerAdress := common.HexToAddress("0xfff") store := storemock.NewStateStore() amount := big.NewInt(20) sig := common.Hex2Bytes("0xffff") chequeSigner := &chequeSignerMock{} chequebookService, err := chequebook.New( transactionmock.New(), address, ownerAdress, store, chequeSigner, erc20mock.New(), ) if err != nil { t.Fatal(err) } chequeSigner.sign = func(cheque *chequebook.Cheque) ([]byte, error) { return sig, nil } _, err = chequebookService.Issue(context.Background(), beneficiary, amount, func(cheque *chequebook.SignedCheque) error { return errors.New("err") }) if err == nil { t.Fatal("expected error") } // verify the cheque was not saved _, err = chequebookService.LastCheque(beneficiary) if !errors.Is(err, chequebook.ErrNoCheque) { t.Fatalf("wrong error. wanted %v, got %v", chequebook.ErrNoCheque, err) } }
explode_data.jsonl/41444
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 26843, 591, 2190, 42006, 1454, 11505, 1155, 353, 8840, 836, 8, 341, 63202, 1669, 4185, 91538, 1249, 4286, 445, 15, 52616, 4385, 1138, 2233, 50465, 26813, 1669, 4185, 91538, 1249, 4286, 445, 15, 53432, 631, 1138, 197, 8118, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepeatableBatchSource(t *testing.T) { batch := coldata.NewMemBatch([]coltypes.T{coltypes.Int64}) batchLen := uint16(10) batch.SetLength(batchLen) input := NewRepeatableBatchSource(batch) b := input.Next(context.Background()) b.SetLength(0) b.SetSelection(true) b = input.Next(context.Background()) if b.Length() != batchLen { t.Fatalf("expected RepeatableBatchSource to reset batch length to %d, found %d", batchLen, b.Length()) } if b.Selection() != nil { t.Fatalf("expected RepeatableBatchSource to reset selection vector, found %+v", b.Selection()) } }
explode_data.jsonl/63467
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 38718, 480, 21074, 3608, 1155, 353, 8840, 836, 8, 341, 2233, 754, 1669, 1375, 691, 7121, 18816, 21074, 10556, 2074, 9242, 836, 90, 2074, 9242, 7371, 21, 19, 3518, 2233, 754, 11271, 1669, 2622, 16, 21, 7, 16, 15, 340, 223...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInsightStore_StampBackfill(t *testing.T) { timescale, cleanup := insightsdbtesting.TimescaleDB(t) defer cleanup() now := time.Now().Round(0).Truncate(time.Microsecond) ctx := context.Background() store := NewInsightStore(timescale) store.Now = func() time.Time { return now } series := types.InsightSeries{ SeriesID: "unique-1", Query: "query-1", OldestHistoricalAt: now.Add(-time.Hour * 24 * 365), LastRecordedAt: now.Add(-time.Hour * 24 * 365), NextRecordingAfter: now, LastSnapshotAt: now, NextSnapshotAfter: now, RecordingIntervalDays: 4, } created, err := store.CreateSeries(ctx, series) if err != nil { t.Fatal(err) } _, err = store.StampBackfill(ctx, created) if err != nil { t.Fatal(err) } t.Run("test only incomplete", func(t *testing.T) { got, err := store.GetDataSeries(ctx, GetDataSeriesArgs{ BackfillIncomplete: true, }) if err != nil { t.Fatal(err) } want := 0 if diff := cmp.Diff(want, len(got)); diff != "" { t.Errorf("mismatched updated backfill_stamp count want/got: %v", diff) } }) t.Run("test get all", func(t *testing.T) { got, err := store.GetDataSeries(ctx, GetDataSeriesArgs{}) if err != nil { t.Fatal(err) } want := 1 if diff := cmp.Diff(want, len(got)); diff != "" { t.Errorf("mismatched updated backfill_stamp count want/got: %v", diff) } }) }
explode_data.jsonl/33316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 15474, 491, 6093, 62, 20906, 3707, 7559, 1155, 353, 8840, 836, 8, 341, 3244, 1733, 2246, 11, 21290, 1669, 25709, 1999, 8840, 836, 1733, 2246, 3506, 1155, 340, 16867, 21290, 741, 80922, 1669, 882, 13244, 1005, 27497, 7, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAll(t *testing.T) { env := kubetest.New(t, kubetest.Config{ Clusters: []kubetest.ClusterConfig{ {Name: robotClusterName}, }, SchemeFunc: crcapps.AddToScheme, }) defer env.Teardown() env.InstallChartArchive( robotClusterName, "base-test", "default", "src/app_charts/base/base-test-0.0.1.tgz", map[string]string{ "robot.name": robotClusterName, "registry": os.Getenv("REGISTRY"), "webhook.enabled": "false", }, ) if err := backoff.Retry( func() error { return kubetest.DeploymentReady(env.Ctx(), env.Client(robotClusterName), "default", "chart-assignment-controller") }, backoff.WithMaxRetries(backoff.NewConstantBackOff(3*time.Second), 40), ); err != nil { t.Errorf("wait for chart-assignment-controller: %s", err) t.Fatalf("maybe REGISTRY or ACCESS_TOKEN is not set?") } env.Run( testCreateChartAssignment_WithChartReference_Works, testCreateChartAssignment_WithInlineChart_BecomesReady, testCreateChartAssignment_WithBadDeployment_BecomesFailed, testUpdateChartAssignment_WithFixedDeployment_BecomesReady, testUpdateChartAssignment_WithFixedJob_BecomesReady, ) }
explode_data.jsonl/64747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 2403, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 595, 392, 57824, 7121, 1155, 11, 595, 392, 57824, 10753, 515, 197, 197, 94992, 25, 3056, 74, 392, 57824, 72883, 2648, 515, 298, 197, 63121, 25, 12305, 28678, 675, 1583, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_CrawlerBadGet(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.Method { case http.MethodHead: w.Header().Add(contentType, contentHTML) case http.MethodGet: w.WriteHeader(http.StatusInternalServerError) } })) defer ts.Close() c := New("", 1, 1, time.Millisecond*50, false, false, RobotsIgnore) if err := c.Run(ts.URL, nil); err != nil { t.Error("run - error") } }
explode_data.jsonl/2968
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 920, 33369, 17082, 1949, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 8961, 435, 20798, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvokeNOKDuplicateNs(t *testing.T) { t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() testInvokeNOKDuplicateNs(t, l, v) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() testInvokeNOKDuplicateNs(t, l, v) }) }
explode_data.jsonl/47798
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 17604, 45, 3925, 53979, 47360, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 13, 17, 63746, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 11, 348, 11, 21290, 1669, 6505, 60850, 1389, 3036, 14256, 2354, 53, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnicast(t *testing.T) { ctx := context.Background() n := 10 agents := make([]*Agent, 0) defer func() { var err error for _, agent := range agents { err = agent.Stop(ctx) } require.NoError(t, err) }() counts := make(map[uint8]int) var src string var mutex sync.RWMutex b := func(_ context.Context, _ uint32, _ string, _ proto.Message) {} u := func(_ context.Context, _ uint32, peer peerstore.PeerInfo, msg proto.Message) { mutex.Lock() defer mutex.Unlock() testMsg, ok := msg.(*testingpb.TestPayload) require.True(t, ok) idx := testMsg.MsgBody[0] if _, ok = counts[idx]; ok { counts[idx]++ } else { counts[idx] = 1 } src = peer.ID.Pretty() } bootnode := NewAgent(config.Config{ Network: config.Network{Host: "127.0.0.1", Port: testutil.RandomPort()}, }, b, u) require.NoError(t, bootnode.Start(ctx)) addrs, err := bootnode.Self() require.NoError(t, err) for i := 0; i < n; i++ { cfg := config.Config{ Network: config.Network{ Host: "127.0.0.1", Port: testutil.RandomPort(), BootstrapNodes: []string{addrs[0].String()}, }, } agent := NewAgent(cfg, b, u) require.NoError(t, agent.Start(ctx)) agents = append(agents, agent) } for i := 0; i < n; i++ { neighbors, err := agents[i].Neighbors(ctx) require.NoError(t, err) require.True(t, len(neighbors) > 0) for _, neighbor := range neighbors { require.NoError(t, agents[i].UnicastOutbound(WitContext(ctx, Context{ChainID: 1}), neighbor, &testingpb.TestPayload{ MsgBody: []byte{uint8(i)}, })) } require.NoError(t, testutil.WaitUntil(100*time.Millisecond, 20*time.Second, func() (bool, error) { mutex.RLock() defer mutex.RUnlock() info, err := agents[i].Info() if err != nil { return false, err } return counts[uint8(i)] == len(neighbors) && src == info.ID.Pretty(), nil })) } }
explode_data.jsonl/64179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 824 }
[ 2830, 3393, 1806, 35446, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 9038, 1669, 220, 16, 15, 198, 197, 54875, 1669, 1281, 85288, 16810, 11, 220, 15, 340, 16867, 2915, 368, 341, 197, 2405, 1848, 1465, 198, 197, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRenderNodeHookCode(t *testing.T) { t.Parallel() tests := []string{ "a\n```go\ncode\n```\nb", "<p>a</p>\ncode_replacement\n<p>b</p>\n", } opts := html.RendererOptions{ RenderNodeHook: renderHookCodeBlock, } params := TestParams{ RendererOptions: opts, extensions: parser.CommonExtensions, } doTestsParam(t, tests, params) }
explode_data.jsonl/72528
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 6750, 1955, 31679, 2078, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 917, 515, 197, 197, 56693, 1699, 73594, 3346, 59, 1016, 534, 1699, 13874, 61069, 18080, 756, 197, 197, 22476, 79, 43875, 522, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadURLFailure(t *testing.T) { contentTypes := []string{ "application/pdf", "application/json", "application/tlsrpt+gzip", "application/vnd.3gpp.pic-bw-small", "application/vnd.collabio.xodocuments.document-template", "application/vnd.ctc-posml", "application/vnd.gov.sk.e-form+zip", "audio/mp4", "audio/vnd.sealedmedia.softseal.mpeg", "image/png", "image/vnd.adobe.photoshop", "message/example", "message/vnd.wfa.wsc", "model/vnd.usdz+zip", "model/vnd.valve.source.compiled-map", "multipart/signed", "text/css", "text/html", "video/quicktime", "video/JPEG", } for _, contentType := range contentTypes { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", contentType) })) defer server.Close() _, err := LoadURL(server.URL) if err != nil && err.Error() == fmt.Sprintf("invalid XML document(%s)", contentType) { return } t.Fatalf("Want invalid XML document(%s), got %v", contentType, err) } }
explode_data.jsonl/18838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 440 }
[ 2830, 3393, 5879, 3144, 17507, 1155, 353, 8840, 836, 8, 341, 27751, 4173, 1669, 3056, 917, 515, 197, 197, 1, 5132, 47751, 756, 197, 197, 1, 5132, 8931, 756, 197, 197, 1, 5132, 5523, 4730, 81, 417, 10, 70996, 756, 197, 197, 1, 5132...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSm4Cipher_Encrypt(t *testing.T) { for _, data := range testData { c, err := NewCipher(data.key) if err != nil { t.Error(err.Error()) return } encrypter := cipher.NewCBCEncrypter(c, data.iv) result := make([]byte, len(data.out)) encrypter.CryptBlocks(result, util.PKCS5Padding(data.in, BlockSize)) fmt.Printf("encrypt result:%s\n", hex.EncodeToString(result)) /* if !bytes.Equal(result, data.out) { t.Error("encrypt result not equal expected") return } */ decrypter := cipher.NewCBCDecrypter(c, data.iv) plain := make([]byte, len(result)) decrypter.CryptBlocks(plain, result) fmt.Printf("decrypt result:%s\n", hex.EncodeToString(plain)) plain = util.PKCS5UnPadding(plain) /* if !bytes.Equal(plain, data.in) { t.Error("decrypt result not equal expected") return } */ } key_str := "abdcskdjlelrmfng" ivstr := "oisdpoekmnmfjgkl" key := []byte(key_str) iv := []byte(ivstr) c, err := NewCipher(key) if err != nil { t.Error(err.Error()) return } first := "就算打发了看见啊撒发的立刻就阿三地方" buf := util.PKCS5Padding([]byte(first), BlockSize) fmt.Printf("encrypt before:%s\n", hex.EncodeToString(buf)) ///fmt.Println(buf, len(buf)) encrypter := cipher.NewCBCEncrypter(c, iv) result := make([]byte, len(buf)) encrypter.CryptBlocks(result, buf) fmt.Printf("encrypt result:%s\n", hex.EncodeToString(result)) decrypter := cipher.NewCBCDecrypter(c, iv) plain := make([]byte, len(result)) decrypter.CryptBlocks(plain, result) fmt.Printf("decrypt result:%s\n", hex.EncodeToString(plain)) plain = util.PKCS5UnPadding(plain) fmt.Printf("decrypt result:%s\n", string(plain)) }
explode_data.jsonl/53523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 726 }
[ 2830, 3393, 10673, 19, 79460, 93529, 3571, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 821, 1669, 2088, 67348, 341, 197, 1444, 11, 1848, 1669, 1532, 79460, 2592, 4735, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, 6141, 3964, 6141, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUpdate(t *testing.T) { dictionary := Dictionary{"test": "this is just a test"} t.Run("existing word", func(t *testing.T) { err := dictionary.Update("test", "new test") assert.Equal(t, err, nil) }) t.Run("not exist word", func(t *testing.T) { err := dictionary.Update("foo", "bar") assert.NotEqual(t, err, nil) }) t.Run("test search", func(t *testing.T) { got, err := dictionary.Search("test") want := "new test" assert.Equal(t, err, nil) assert.Equal(t, got, want) got, err = dictionary.Search("foo") want = "" assert.NotEqual(t, err, nil) assert.Equal(t, got, want) }) }
explode_data.jsonl/54104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 2698, 3916, 1669, 10466, 4913, 1944, 788, 330, 574, 374, 1101, 264, 1273, 63159, 3244, 16708, 445, 36895, 3409, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 9859, 1669, 10997, 16689, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEventCallbacks(t *testing.T) { var ( order = &Order{} orderStateMachine = getStateMachine() prevState, afterState string ) orderStateMachine.Event("checkout").To("checkout").From("draft").Before(func(order interface{}, tx *gorm.DB) error { prevState = order.(*Order).State return nil }).After(func(order interface{}, tx *gorm.DB) error { afterState = order.(*Order).State return nil }) order.State = "draft" if err := orderStateMachine.Trigger("checkout", order, nil); err != nil { t.Errorf("should not raise any error when trigger event checkout") } if prevState != "draft" { t.Errorf("Before callback triggered after state change") } if afterState != "checkout" { t.Errorf("After callback triggered after state change") } }
explode_data.jsonl/2615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 1556, 44461, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 42245, 338, 284, 609, 4431, 16094, 197, 42245, 94666, 257, 284, 39070, 21605, 741, 197, 50728, 1397, 11, 1283, 1397, 914, 198, 197, 692, 42245, 94666, 6904, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLookupByKeyMin(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) ctx := context.Background() st := cluster.MakeTestingClusterSettings() stopper := stop.NewStopper() defer stopper.Stop(context.Background()) cache := NewRangeDescriptorCache(st, nil, staticSize(2<<10), stopper) startToMeta2Desc := roachpb.RangeDescriptor{ StartKey: roachpb.RKeyMin, EndKey: keys.RangeMetaKey(roachpb.RKey("a")), } cache.Insert(ctx, roachpb.RangeInfo{Desc: startToMeta2Desc}) entMin := cache.GetCached(ctx, roachpb.RKeyMin, false /* inverted */) require.NotNil(t, entMin) require.NotNil(t, entMin.Desc()) require.Equal(t, startToMeta2Desc, *entMin.Desc()) entNext := cache.GetCached(ctx, roachpb.RKeyMin.Next(), false /* inverted */) require.True(t, entMin == entNext) entNext = cache.GetCached(ctx, roachpb.RKeyMin.Next().Next(), false /* inverted */) require.True(t, entMin == entNext) }
explode_data.jsonl/28184
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 34247, 67749, 6217, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20985, 1669, 2266, 19047, 2822, 18388, 1669, 10652, 50133, 16451, 28678, 6086, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_emptyDiff(t *testing.T) { m := testModule(t, "plan-empty") p := testProvider("aws") p.DiffFn = func( info *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) { return nil, nil } ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), }) plan, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } if !ctx.State().Empty() { t.Fatal("expected empty state, got:", ctx.State()) } if len(plan.Changes.Resources) != 2 { t.Error("expected 2 resource in plan, got", len(plan.Changes.Resources)) } actions := map[string]plans.Action{} for _, res := range plan.Changes.Resources { actions[res.Addr.String()] = res.Action } expected := map[string]plans.Action{ "aws_instance.foo": plans.Create, "aws_instance.bar": plans.Create, } if !cmp.Equal(expected, actions) { t.Fatal(cmp.Diff(expected, actions)) } }
explode_data.jsonl/28639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 414 }
[ 2830, 3393, 1972, 17, 20485, 15124, 21751, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 39433, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 98063, 24911, 284, 2915, 1006, 197, 27043, 353, 2523, 173...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpload(t *testing.T) { distroStruct := test_distro.New() arch, err := distroStruct.GetArch(test_distro.TestArchName) if err != nil { t.Fatalf("error getting arch from distro: %v", err) } imageType, err := arch.GetImageType(test_distro.TestImageTypeName) if err != nil { t.Fatalf("error getting image type from arch: %v", err) } manifest, err := imageType.Manifest(nil, distro.ImageOptions{Size: imageType.Size(0)}, nil, nil, 0) if err != nil { t.Fatalf("error creating osbuild manifest: %v", err) } server := newTestServer(t, t.TempDir(), time.Duration(0), "/api/worker/v1") handler := server.Handler() jobID, err := server.EnqueueOSBuild(arch.Name(), &worker.OSBuildJob{Manifest: manifest}, "") require.NoError(t, err) j, token, typ, args, dynamicArgs, err := server.RequestJob(context.Background(), arch.Name(), []string{"osbuild"}, []string{""}) require.NoError(t, err) require.Equal(t, jobID, j) require.Equal(t, "osbuild", typ) require.NotNil(t, args) require.Nil(t, dynamicArgs) test.TestRoute(t, handler, false, "PUT", fmt.Sprintf("/api/worker/v1/jobs/%s/artifacts/foobar", token), `this is my artifact`, http.StatusOK, `?`) }
explode_data.jsonl/1105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 436 }
[ 2830, 3393, 13844, 1155, 353, 8840, 836, 8, 341, 2698, 15561, 9422, 1669, 1273, 814, 15561, 7121, 741, 197, 1113, 11, 1848, 1669, 1582, 299, 9422, 2234, 18727, 8623, 814, 15561, 8787, 18727, 675, 340, 743, 1848, 961, 2092, 341, 197, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHandleLogin(t *testing.T) { ac := Client{} loginDetails := creds.LoginDetails{ Username: "fdsa", Password: "secret", URL: "https://example.com/foo", } ctx := context.WithValue(context.Background(), ctxKey("login"), &loginDetails) data, err := ioutil.ReadFile("example/login.html") require.Nil(t, err) doc, err := goquery.NewDocumentFromReader(bytes.NewReader(data)) require.Nil(t, err) _, req, err := ac.handleLogin(ctx, doc) require.Nil(t, err) b, err := ioutil.ReadAll(req.Body) require.Nil(t, err) s := string(b[:]) require.Contains(t, s, "pf.username=fdsa") require.Contains(t, s, "pf.pass=secret") }
explode_data.jsonl/4321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 6999, 6231, 1155, 353, 8840, 836, 8, 341, 81200, 1669, 8423, 16094, 75755, 7799, 1669, 73177, 32499, 7799, 515, 197, 197, 11115, 25, 330, 64834, 64, 756, 197, 197, 4876, 25, 330, 20474, 756, 197, 79055, 25, 414, 330, 2428,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTiFlashFallback(t *testing.T) { store, clean := testkit.CreateMockStore(t, mockstore.WithClusterInspector(func(c testutils.Cluster) { mockCluster := c.(*unistore.Cluster) _, _, region1 := mockstore.BootstrapWithSingleStore(c) store := c.AllocID() peer := c.AllocID() mockCluster.AddStore(store, "tiflash0", &metapb.StoreLabel{Key: "engine", Value: "tiflash"}) mockCluster.AddPeer(region1, store, peer) }), mockstore.WithStoreType(mockstore.EmbedUnistore), ) defer clean() cc := &clientConn{ alloc: arena.NewAllocator(1024), chunkAlloc: chunk.NewAllocator(), pkt: &packetIO{ bufWriter: bufio.NewWriter(bytes.NewBuffer(nil)), }, } tk := testkit.NewTestKit(t, store) tk.MustExec("use test") cc.setCtx(&TiDBContext{Session: tk.Session(), stmts: make(map[int]*TiDBStatement)}) tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int not null primary key, b int not null)") tk.MustExec("alter table t set tiflash replica 1") tb := external.GetTableByName(t, tk, "test", "t") err := domain.GetDomain(tk.Session()).DDL().UpdateTableReplicaInfo(tk.Session(), tb.Meta().ID, true) require.NoError(t, err) dml := "insert into t values" for i := 0; i < 50; i++ { dml += fmt.Sprintf("(%v, 0)", i) if i != 49 { dml += "," } } tk.MustExec(dml) tk.MustQuery("select count(*) from t").Check(testkit.Rows("50")) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/copr/ReduceCopNextMaxBackoff", `return(true)`)) defer func() { require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/copr/ReduceCopNextMaxBackoff")) }() require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/BatchCopRpcErrtiflash0", "return(\"tiflash0\")")) // test COM_STMT_EXECUTE ctx := context.Background() tk.MustExec("set @@tidb_allow_fallback_to_tikv='tiflash'") tk.MustExec("set @@tidb_allow_mpp=OFF") require.NoError(t, cc.handleStmtPrepare(ctx, "select sum(a) from t")) require.NoError(t, cc.handleStmtExecute(ctx, []byte{0x1, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0})) tk.MustQuery("show warnings").Check(testkit.Rows("Error 9012 TiFlash server timeout")) // test COM_STMT_FETCH (cursor mode) require.NoError(t, cc.handleStmtExecute(ctx, []byte{0x1, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0})) require.Error(t, cc.handleStmtFetch(ctx, []byte{0x1, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0})) tk.MustExec("set @@tidb_allow_fallback_to_tikv=''") require.Error(t, cc.handleStmtExecute(ctx, []byte{0x1, 0x0, 0x0, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0})) require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/BatchCopRpcErrtiflash0")) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/server/fetchNextErr", "return(\"firstNext\")")) // test COM_STMT_EXECUTE (cursor mode) tk.MustExec("set @@tidb_allow_fallback_to_tikv='tiflash'") require.NoError(t, cc.handleStmtExecute(ctx, []byte{0x1, 0x0, 0x0, 0x0, 0x1, 0x1, 0x0, 0x0, 0x0})) require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/server/fetchNextErr")) // test that TiDB would not retry if the first execution already sends data to client require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/server/fetchNextErr", "return(\"secondNext\")")) tk.MustExec("set @@tidb_allow_fallback_to_tikv='tiflash'") require.Error(t, cc.handleQuery(ctx, "select * from t t1 join t t2 on t1.a = t2.a")) require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/server/fetchNextErr")) // simple TiFlash query (unary + non-streaming) tk.MustExec("set @@tidb_allow_batch_cop=0; set @@tidb_allow_mpp=0;") require.NoError(t, failpoint.Enable("tikvclient/tikvStoreSendReqResult", "return(\"requestTiFlashError\")")) testFallbackWork(t, tk, cc, "select sum(a) from t") require.NoError(t, failpoint.Disable("tikvclient/tikvStoreSendReqResult")) // TiFlash query based on batch cop (batch + streaming) tk.MustExec("set @@tidb_allow_batch_cop=1; set @@tidb_allow_mpp=0;") require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/BatchCopRpcErrtiflash0", "return(\"tiflash0\")")) testFallbackWork(t, tk, cc, "select count(*) from t") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/BatchCopRpcErrtiflash0")) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/batchCopRecvTimeout", "return(true)")) testFallbackWork(t, tk, cc, "select count(*) from t") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/batchCopRecvTimeout")) // TiFlash MPP query (MPP + streaming) tk.MustExec("set @@tidb_allow_batch_cop=0; set @@tidb_allow_mpp=1;") require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/mppDispatchTimeout", "return(true)")) testFallbackWork(t, tk, cc, "select * from t t1 join t t2 on t1.a = t2.a") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/mppDispatchTimeout")) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/mppRecvTimeout", "return(-1)")) testFallbackWork(t, tk, cc, "select * from t t1 join t t2 on t1.a = t2.a") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/mppRecvTimeout")) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/establishMppConnectionErr", "return(true)")) testFallbackWork(t, tk, cc, "select * from t t1 join t t2 on t1.a = t2.a") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/mockstore/unistore/establishMppConnectionErr")) // When fallback is not set, TiFlash mpp will return the original error message require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/mockstore/unistore/mppDispatchTimeout", "return(true)")) tk.MustExec("set @@tidb_allow_fallback_to_tikv=''") tk.MustExec("set @@tidb_allow_mpp=ON") tk.MustExec("set @@tidb_enforce_mpp=ON") tk.MustExec("set @@tidb_isolation_read_engines='tiflash,tidb'") err = cc.handleQuery(ctx, "select count(*) from t") require.Error(t, err) require.NotEqual(t, err.Error(), tikverr.ErrTiFlashServerTimeout.Error()) }
explode_data.jsonl/73156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2579 }
[ 2830, 3393, 45351, 23876, 87206, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 345, 197, 77333, 4314, 26124, 28678, 46230, 18552, 1337, 1273, 6031, 72883, 8, 341, 298, 77333, 28678, 1669, 272, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransformBlackboxesToUptos(t *testing.T) { // given bbs := map[string]*Upto{} m := [][]string{ {"keyA", "value A"}, } // when TransformBlackboxesToUptos(bbs, m, BBApplication) // then assert.Equal(t, m[0][1], bbs[m[0][0]].Comment) }
explode_data.jsonl/58727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 8963, 14417, 22204, 1249, 52, 417, 436, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 2233, 1279, 1669, 2415, 14032, 8465, 52, 57991, 16094, 2109, 1669, 52931, 917, 515, 197, 197, 4913, 792, 32, 497, 330, 957, 362, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRenegotiateOnce(t *testing.T) { config := testConfig.Clone() config.Renegotiation = RenegotiateOnceAsClient test := &clientTest{ name: "RenegotiateOnce", args: []string{"-state"}, config: config, numRenegotiations: 1, } runClientTestTLS12(t, test) }
explode_data.jsonl/27722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 34625, 65978, 6493, 12522, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 1273, 2648, 64463, 741, 25873, 2013, 268, 65978, 7101, 284, 13775, 65978, 6493, 12522, 2121, 2959, 271, 18185, 1669, 609, 2972, 2271, 515, 197, 11609, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLambdaWrongApplicationId(t *testing.T) { skill := Skill{ ApplicationID: "amzn1.echo-sdk-ams.app.000000-d0ed-0000-ad00-000000d00ebe", SkipValidation: false, Verbose: false, } skillHandler := skill.GetLambdaSkillHandler() launchRequestReader, err := os.Open("../resources/lambda_launch_request.json") if err != nil { t.Error("Error reading input file", err) } var event map[string]interface{} json.NewDecoder(launchRequestReader).Decode(&event) event["context"].(map[string]interface{})["System"].(map[string]interface{})["application"].(map[string]interface{})["applicationId"] = "wrong-app-id" _, err = skillHandler(context.TODO(), event) assert.Error(t, err) assert.Equal(t, "Request too old to continue (>150s)", err.Error()) }
explode_data.jsonl/29505
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 58266, 29185, 4988, 764, 1155, 353, 8840, 836, 8, 341, 1903, 10851, 1669, 27482, 515, 197, 78329, 915, 25, 220, 330, 309, 20308, 16, 68645, 35478, 12, 4122, 1601, 13, 15, 15, 15, 15, 15, 15, 1737, 15, 291, 12, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReversiAnz27(t *testing.T) { r := NewReversiAnz() r.SetOwnPointCnt(1) if r.GetOwnPointCnt() != 1 { t.Errorf("NG") } }
explode_data.jsonl/23050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 693, 3004, 72, 2082, 89, 17, 22, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 693, 3004, 72, 2082, 89, 741, 7000, 4202, 14182, 2609, 33747, 7, 16, 340, 743, 435, 2234, 14182, 2609, 33747, 368, 961, 220, 16, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2