text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestSortedArray_FilterEmpty(t *testing.T) { gtest.C(t, func(t *gtest.T) { array := garray.NewSortedArrayFrom(g.Slice{0, 1, 2, 3, 4, "", g.Slice{}}, gutil.ComparatorInt) t.Assert(array.FilterEmpty(), g.Slice{1, 2, 3, 4}) }) gtest.C(t, func(t *gtest.T) { array := garray.NewSortedArrayFrom(g.Slice{1, 2, 3, 4}, gutil.ComparatorInt) t.Assert(array.FilterEmpty(), g.Slice{1, 2, 3, 4}) }) }
explode_data.jsonl/67038
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 51051, 1857, 68935, 3522, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11923, 1669, 342, 1653, 7121, 51051, 1857, 3830, 3268, 95495, 90, 15, 11, 220, 16, 11, 220, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMvFromDir(t *testing.T) { first := "sample.md" second := "second.md" attachment := "images/sample.png" src := "test" dest := "folder" testFs := newTmpFS(t) copyFile(t, testFs, filepath.Join("testdata", first), filepath.Join(src, first)) copyFile(t, testFs, filepath.Join("testdata", second), filepath.Join(src, second)) copyFile(t, testFs, filepath.Join("testdata", attachment), filepath.Join(src, attachment)) // At first, test if it fails with non-existing directory err := mv(testFs, src, dest) require.Error(t, err) err = testFs.Mkdir(dest, os.ModePerm) failOn(t, err, "create directory") err = mv(testFs, src, dest) require.NoError(t, err) require.False(t, fileExists(testFs, filepath.Join(src, first)), "First source file exists") require.False(t, fileExists(testFs, filepath.Join(src, second)), "Second source file exists") require.False(t, dirExists(testFs, src), "Source directory exists") require.True(t, fileExists(testFs, filepath.Join(dest, first)), "First file moved") require.True(t, fileExists(testFs, filepath.Join(dest, second)), "Second file moved") require.False(t, fileExists(testFs, attachment), "Source attachment path exists") require.True(t, fileExists(testFs, filepath.Join(dest, attachment)), "Attachment moved") require.False(t, dirExists(testFs, filepath.Dir(attachment)), "Source attachment dir exists") }
explode_data.jsonl/36831
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 44, 85, 3830, 6184, 1155, 353, 8840, 836, 8, 341, 42190, 1669, 330, 13611, 21324, 698, 197, 5569, 1669, 330, 5569, 21324, 698, 197, 21981, 1669, 330, 3642, 69851, 3508, 698, 41144, 1669, 330, 1944, 698, 49616, 1669, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_PostgresProvider(t *testing.T) { Convey("Test postgres session provider", t, func() { opt := session.Options{ Provider: "postgres", ProviderConfig: "user=jiahuachen dbname=macaron port=5432 sslmode=disable", } Convey("Basic operation", func() { m := macaron.New() m.Use(session.Sessioner(opt)) m.Get("/", func(ctx *macaron.Context, sess session.Store) { So(sess.Set("uname", "unknwon"), ShouldBeNil) }) m.Get("/reg", func(ctx *macaron.Context, sess session.Store) { raw, err := sess.RegenerateId(ctx) So(err, ShouldBeNil) So(raw, ShouldNotBeNil) uname := raw.Get("uname") So(uname, ShouldNotBeNil) So(uname, ShouldEqual, "unknwon") }) m.Get("/get", func(ctx *macaron.Context, sess session.Store) { sid := sess.ID() So(sid, ShouldNotBeEmpty) raw, err := sess.Read(sid) So(err, ShouldBeNil) So(raw, ShouldNotBeNil) So(raw.Release(), ShouldBeNil) uname := sess.Get("uname") So(uname, ShouldNotBeNil) So(uname, ShouldEqual, "unknwon") So(sess.Delete("uname"), ShouldBeNil) So(sess.Get("uname"), ShouldBeNil) So(sess.Destory(ctx), ShouldBeNil) }) resp := httptest.NewRecorder() req, err := http.NewRequest("GET", "/", nil) So(err, ShouldBeNil) m.ServeHTTP(resp, req) cookie := resp.Header().Get("Set-Cookie") resp = httptest.NewRecorder() req, err = http.NewRequest("GET", "/reg", nil) So(err, ShouldBeNil) req.Header.Set("Cookie", cookie) m.ServeHTTP(resp, req) cookie = resp.Header().Get("Set-Cookie") resp = httptest.NewRecorder() req, err = http.NewRequest("GET", "/get", nil) So(err, ShouldBeNil) req.Header.Set("Cookie", cookie) m.ServeHTTP(resp, req) }) Convey("Regenrate empty session", func() { m := macaron.New() m.Use(session.Sessioner(opt)) m.Get("/", func(ctx *macaron.Context, sess session.Store) { raw, err := sess.RegenerateId(ctx) So(err, ShouldBeNil) So(raw, ShouldNotBeNil) So(sess.Destory(ctx), ShouldBeNil) }) resp := httptest.NewRecorder() req, err := http.NewRequest("GET", "/", nil) So(err, ShouldBeNil) req.Header.Set("Cookie", "MacaronSession=ad2c7e3cbecfcf48; Path=/;") m.ServeHTTP(resp, req) }) Convey("GC session", func() { m := macaron.New() opt2 := opt opt2.Gclifetime = 1 m.Use(session.Sessioner(opt2)) m.Get("/", func(sess session.Store) { So(sess.Set("uname", "unknwon"), ShouldBeNil) So(sess.ID(), ShouldNotBeEmpty) uname := sess.Get("uname") So(uname, ShouldNotBeNil) So(uname, ShouldEqual, "unknwon") So(sess.Flush(), ShouldBeNil) So(sess.Get("uname"), ShouldBeNil) time.Sleep(2 * time.Second) sess.GC() So(sess.Count(), ShouldEqual, 0) }) resp := httptest.NewRecorder() req, err := http.NewRequest("GET", "/", nil) So(err, ShouldBeNil) m.ServeHTTP(resp, req) }) }) }
explode_data.jsonl/69500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1358 }
[ 2830, 3393, 66726, 17818, 5179, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 59826, 3797, 9109, 497, 259, 11, 2915, 368, 341, 197, 64838, 1669, 3797, 22179, 515, 298, 197, 5179, 25, 981, 330, 43070, 756, 298, 197, 5179, 2648,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnsupportedOptions(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) _, err := db.BeginTx(context.Background(), &TxOptions{ Isolation: LevelSerializable, ReadOnly: true, }) if err == nil { t.Fatal("expected error when using unsupported options, got nil") } }
explode_data.jsonl/15955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 41884, 3798, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 340, 197, 6878, 1848, 1669, 2927, 28467, 31584, 5378, 19047, 1507, 609, 31584, 3798, 515...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCopyURLToWriter(t *testing.T) { ctx := context.Background() contents := "file contents\n" // check when reading from regular HTTP server status := 0 handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if status != 0 { http.Error(w, "an error ocurred", status) return } _, err := w.Write([]byte(contents)) assert.NoError(t, err) }) ts := httptest.NewServer(handler) defer ts.Close() // test normal fetch var buf bytes.Buffer err := operations.CopyURLToWriter(ctx, ts.URL, &buf) require.NoError(t, err) assert.Equal(t, contents, buf.String()) // test fetch with error status = http.StatusNotFound buf.Reset() err = operations.CopyURLToWriter(ctx, ts.URL, &buf) require.Error(t, err) assert.Contains(t, err.Error(), "Not Found") assert.Equal(t, 0, len(buf.String())) }
explode_data.jsonl/51938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 12106, 3144, 1249, 6492, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 197, 17610, 1669, 330, 1192, 8794, 1699, 1837, 197, 322, 1779, 979, 5290, 504, 5792, 10130, 3538, 198, 23847, 1669, 220, 15, 198, 53326, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMakePortMappings(t *testing.T) { tests := []struct { container *api.Container expectedPortMappings []kubecontainer.PortMapping }{ { &api.Container{ Name: "fooContainer", Ports: []api.ContainerPort{ { Protocol: api.ProtocolTCP, ContainerPort: 80, HostPort: 8080, HostIP: "127.0.0.1", }, { Protocol: api.ProtocolTCP, ContainerPort: 443, HostPort: 4343, HostIP: "192.168.0.1", }, { Name: "foo", Protocol: api.ProtocolUDP, ContainerPort: 555, HostPort: 5555, }, { Name: "foo", // Duplicated, should be ignored. Protocol: api.ProtocolUDP, ContainerPort: 888, HostPort: 8888, }, { Protocol: api.ProtocolTCP, // Duplicated, should be ignored. ContainerPort: 80, HostPort: 8888, }, }, }, []kubecontainer.PortMapping{ { Name: "fooContainer-TCP:80", Protocol: api.ProtocolTCP, ContainerPort: 80, HostPort: 8080, HostIP: "127.0.0.1", }, { Name: "fooContainer-TCP:443", Protocol: api.ProtocolTCP, ContainerPort: 443, HostPort: 4343, HostIP: "192.168.0.1", }, { Name: "fooContainer-foo", Protocol: api.ProtocolUDP, ContainerPort: 555, HostPort: 5555, HostIP: "", }, }, }, } for i, tt := range tests { actual := makePortMappings(tt.container) if !reflect.DeepEqual(tt.expectedPortMappings, actual) { t.Errorf("%d: Expected: %#v, saw: %#v", i, tt.expectedPortMappings, actual) } } }
explode_data.jsonl/43347
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 995 }
[ 2830, 3393, 8078, 7084, 83421, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 53290, 310, 353, 2068, 33672, 198, 197, 42400, 7084, 83421, 3056, 97717, 3586, 43013, 6807, 198, 197, 59403, 197, 197, 515, 298, 197, 5, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRoute_SetPort(t *testing.T) { testCases := []struct { param string errWanted bool }{ {"8080", false}, {"", true}, } for _, tc := range testCases { route := Route{} errGot := route.SetPort(tc.param) if tc.errWanted != (errGot != nil) { t.Errorf("SetPort(%s) = %v; errWanted = %t", route.port, errGot, tc.errWanted) } if errGot == nil && route.port != tc.param { t.Errorf("SetPort(%s) != want %s", route.port, tc.param) } } }
explode_data.jsonl/67790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 4899, 14812, 7084, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 36037, 257, 914, 198, 197, 9859, 54, 7566, 1807, 198, 197, 59403, 197, 197, 4913, 23, 15, 23, 15, 497, 895, 1583, 197, 197, 4913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCollectionReplica_hasCollection(t *testing.T) { node := newQueryNodeMock() collectionID := UniqueID(0) initTestMeta(t, node, collectionID, 0) hasCollection := node.historical.replica.hasCollection(collectionID) assert.Equal(t, hasCollection, true) hasCollection = node.historical.replica.hasCollection(UniqueID(1)) assert.Equal(t, hasCollection, false) err := node.Stop() assert.NoError(t, err) }
explode_data.jsonl/11481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 6482, 18327, 15317, 21778, 6482, 1155, 353, 8840, 836, 8, 341, 20831, 1669, 501, 2859, 1955, 11571, 741, 1444, 1908, 915, 1669, 28650, 915, 7, 15, 340, 28248, 2271, 12175, 1155, 11, 2436, 11, 4426, 915, 11, 220, 15, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBindPFlagStringToString(t *testing.T) { tests := []struct { Expected map[string]string Value string }{ {map[string]string{}, ""}, {map[string]string{"yo": "hi"}, "yo=hi"}, {map[string]string{"yo": "hi", "oh": "hi=there"}, "yo=hi,oh=hi=there"}, {map[string]string{"yo": ""}, "yo="}, {map[string]string{"yo": "", "oh": "hi=there"}, "yo=,oh=hi=there"}, } v := New() // create independent Viper object defaultVal := map[string]string{} v.SetDefault("stringtostring", defaultVal) for _, testValue := range tests { flagSet := pflag.NewFlagSet("test", pflag.ContinueOnError) flagSet.StringToString("stringtostring", testValue.Expected, "test") for _, changed := range []bool{true, false} { flagSet.VisitAll(func(f *pflag.Flag) { f.Value.Set(testValue.Value) f.Changed = changed }) err := v.BindPFlags(flagSet) if err != nil { t.Fatalf("error binding flag set, %v", err) } type TestMap struct { StringToString map[string]string } val := &TestMap{} if err := v.Unmarshal(val); err != nil { t.Fatalf("%+#v cannot unmarshal: %s", testValue.Value, err) } if changed { assert.Equal(t, testValue.Expected, val.StringToString) } else { assert.Equal(t, defaultVal, val.StringToString) } } } }
explode_data.jsonl/9897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 9950, 47, 12135, 703, 5870, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 18896, 2415, 14032, 30953, 198, 197, 47399, 262, 914, 198, 197, 59403, 197, 197, 90, 2186, 14032, 30953, 22655, 77496, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollectNetworkStats(t *testing.T) { dummyProcDir, err := newTempFolder("test-find-docker-networks") assert.Nil(t, err) defer dummyProcDir.removeAll() // clean up config.Datadog.SetDefault("container_proc_root", dummyProcDir.RootPath) for _, tc := range []struct { pid int name string dev string networks map[string]string stat ContainerNetStats summedStat *InterfaceNetStats }{ { pid: 1245, name: "one-container-interface", dev: detab(` Inter-| Receive | Transmit face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed eth0: 1345 10 0 0 0 0 0 0 0 0 0 0 0 0 0 0 lo: 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 `), networks: map[string]string{ "eth0": "bridge", }, stat: ContainerNetStats{ &InterfaceNetStats{ NetworkName: "bridge", BytesRcvd: 1345, PacketsRcvd: 10, BytesSent: 0, PacketsSent: 0, }, }, summedStat: &InterfaceNetStats{ BytesRcvd: 1345, PacketsRcvd: 10, BytesSent: 0, PacketsSent: 0, }, }, // Multiple docker networks { pid: 5153, name: "multiple-networks", dev: detab(` Inter-| Receive | Transmit face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed lo: 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 eth0: 648 8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 eth1: 1478 19 0 0 0 0 0 0 182 3 0 0 0 0 0 0`), networks: map[string]string{ "eth0": "bridge", "eth1": "test", }, stat: ContainerNetStats{ &InterfaceNetStats{ NetworkName: "bridge", BytesRcvd: 648, PacketsRcvd: 8, BytesSent: 0, PacketsSent: 0, }, &InterfaceNetStats{ NetworkName: "test", BytesRcvd: 1478, PacketsRcvd: 19, BytesSent: 182, PacketsSent: 3, }, }, summedStat: &InterfaceNetStats{ BytesRcvd: 2126, PacketsRcvd: 27, BytesSent: 182, PacketsSent: 3, }, }, // Fallback to interface name if network not in map { pid: 5155, name: "multiple-ifaces-missing-network", dev: detab(` Inter-| Receive | Transmit face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed lo: 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 eth0: 648 8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 eth1: 1478 19 0 0 0 0 0 0 182 3 0 0 0 0 0 0`), networks: map[string]string{ "eth1": "test", }, stat: ContainerNetStats{ &InterfaceNetStats{ NetworkName: "eth0", BytesRcvd: 648, PacketsRcvd: 8, BytesSent: 0, PacketsSent: 0, }, &InterfaceNetStats{ NetworkName: "test", BytesRcvd: 1478, PacketsRcvd: 19, BytesSent: 182, PacketsSent: 3, }, }, summedStat: &InterfaceNetStats{ BytesRcvd: 2126, PacketsRcvd: 27, BytesSent: 182, PacketsSent: 3, }, }, // Dumb error case to make sure we don't panic, fallback to interface name { pid: 5157, name: "nil-network-map", dev: detab(` Inter-| Receive | Transmit face |bytes packets errs drop fifo frame compressed multicast|bytes packets errs drop fifo colls carrier compressed eth0: 1111 2 0 0 0 0 0 0 1024 80 0 0 0 0 0 0 lo: 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 `), networks: nil, stat: ContainerNetStats{ &InterfaceNetStats{ NetworkName: "eth0", BytesRcvd: 1111, PacketsRcvd: 2, BytesSent: 1024, PacketsSent: 80, }, }, summedStat: &InterfaceNetStats{ BytesRcvd: 1111, PacketsRcvd: 2, BytesSent: 1024, PacketsSent: 80, }, }, } { t.Run("", func(t *testing.T) { err = dummyProcDir.add(filepath.Join(strconv.Itoa(int(tc.pid)), "net", "dev"), tc.dev) assert.NoError(t, err) stat, err := CollectNetworkStats(tc.pid, tc.networks) assert.NoError(t, err) assert.Equal(t, tc.stat, stat) assert.Equal(t, tc.summedStat, stat.SumInterfaces()) }) } }
explode_data.jsonl/19611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3181 }
[ 2830, 3393, 47504, 12320, 16635, 1155, 353, 8840, 836, 8, 341, 2698, 8574, 24508, 6184, 11, 1848, 1669, 501, 12151, 13682, 445, 1944, 75784, 1737, 13659, 56732, 82, 1138, 6948, 59678, 1155, 11, 1848, 340, 16867, 17292, 24508, 6184, 41159,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_grpc(t *testing.T) { process := helperProcess("test-grpc") c := NewClient(&ClientConfig{ Cmd: process, HandshakeConfig: testHandshake, Plugins: testGRPCPluginMap, AllowedProtocols: []Protocol{ProtocolGRPC}, }) defer c.Kill() if _, err := c.Start(); err != nil { t.Fatalf("err: %s", err) } if v := c.Protocol(); v != ProtocolGRPC { t.Fatalf("bad: %s", v) } // Grab the RPC client client, err := c.Client() if err != nil { t.Fatalf("err should be nil, got %s", err) } // Grab the impl raw, err := client.Dispense("test") if err != nil { t.Fatalf("err should be nil, got %s", err) } impl, ok := raw.(testInterface) if !ok { t.Fatalf("bad: %#v", raw) } result := impl.Double(21) if result != 42 { t.Fatalf("bad: %#v", result) } // Kill it c.Kill() // Test that it knows it is exited if !c.Exited() { t.Fatal("should say client has exited") } if c.killed() { t.Fatal("process failed to exit gracefully") } }
explode_data.jsonl/57837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 2959, 15682, 3992, 1155, 353, 8840, 836, 8, 341, 53314, 1669, 13137, 7423, 445, 1944, 24321, 3992, 1138, 1444, 1669, 1532, 2959, 2099, 2959, 2648, 515, 197, 6258, 2277, 25, 1060, 1882, 345, 197, 197, 2314, 29661, 2648, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestKeeperDB_BatchDeleteUpkeepsForJob(t *testing.T) { t.Parallel() store, orm, cleanup := setupKeeperDB(t) defer cleanup() db := store.DB ethKeyStore := cltest.NewKeyStore(t, store.DB).Eth() registry, job := cltest.MustInsertKeeperRegistry(t, store, ethKeyStore) for i := int64(0); i < 3; i++ { cltest.MustInsertUpkeepForRegistry(t, store, registry) } cltest.AssertCount(t, db, &keeper.UpkeepRegistration{}, 3) _, err := orm.BatchDeleteUpkeepsForJob(context.Background(), job.ID, []int64{0, 2}) require.NoError(t, err) cltest.AssertCount(t, db, &keeper.UpkeepRegistration{}, 1) var remainingUpkeep keeper.UpkeepRegistration err = store.DB.First(&remainingUpkeep).Error require.NoError(t, err) require.Equal(t, int64(1), remainingUpkeep.UpkeepID) }
explode_data.jsonl/27008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 77233, 3506, 1668, 754, 6435, 2324, 440, 7124, 2461, 12245, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 57279, 11, 67602, 11, 21290, 1669, 6505, 77233, 3506, 1155, 340, 16867, 21290, 741, 20939, 1669, 3553, 22537, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNoMatchingJailedValidators(t *testing.T) { codec := newStakingTestCodec() opAddr1, err := sdk.AccAddressFromBech32("cosmosaccaddr1chchjxgackcqkn9fqgpsc4n9xamx4flgndapzg") require.NoError(t, err) opAddr2, err := sdk.AccAddressFromBech32("cosmosaccaddr1y2z20pwqu5qpclque3pqkguruvheum2djtzjw3") require.NoError(t, err) validators := []stake.Validator{ stake.Validator{Owner: opAddr1, Revoked: true}, } raw, err := codec.MarshalJSON(validators) require.NoError(t, err) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) w.Write(raw) })) defer ts.Close() clients := []string{ts.URL} cfg := config.Config{ Filters: config.Filters{ Validators: []config.ValidatorFilter{ config.ValidatorFilter{Operator: opAddr2.String()}, }, }, Network: config.NetworkConfig{Clients: clients}, } jvm := newTestJailedValidatorMonitor(t, cfg) resp, id, err := jvm.Exec() require.Error(t, err) require.Nil(t, resp) require.Nil(t, id) }
explode_data.jsonl/78972
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 441 }
[ 2830, 3393, 2753, 64430, 41, 5687, 31748, 1155, 353, 8840, 836, 8, 341, 43343, 66, 1669, 501, 623, 1765, 2271, 36913, 2822, 39703, 13986, 16, 11, 1848, 1669, 45402, 77538, 4286, 3830, 3430, 331, 18, 17, 445, 9407, 8631, 4475, 6214, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlainGenomeWriter_WriteNetworkNode(t *testing.T) { nodeId, traitId, nodeType, neuronType := 1, 10, network.SensorNode, network.InputNeuron nodeStr := fmt.Sprintf("%d %d %d %d SigmoidSteepenedActivation", nodeId, traitId, nodeType, neuronType) trait := neat.NewTrait() trait.Id = 10 node := network.NewNNode(nodeId, neuronType) node.Trait = trait outBuffer := bytes.NewBufferString("") wr := plainGenomeWriter{w: bufio.NewWriter(outBuffer)} err := wr.writeNetworkNode(node) require.NoError(t, err, "failed to write network node") err = wr.w.Flush() require.NoError(t, err) outStr := outBuffer.String() assert.Equal(t, nodeStr, outStr, "Node serialization failed") }
explode_data.jsonl/18932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 26982, 9967, 635, 6492, 31825, 12320, 1955, 1155, 353, 8840, 836, 8, 341, 20831, 764, 11, 17567, 764, 11, 2436, 929, 11, 48284, 929, 1669, 220, 16, 11, 220, 16, 15, 11, 3922, 808, 3805, 1955, 11, 3922, 16130, 8813, 36090...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAllowAll(t *testing.T) { t.Parallel() cases := []string{ "", "# comment", "User-Agent: * \nAllow: /", "User-Agent: * \nDisallow: ", } for i, input := range cases { t.Run(strconv.Itoa(i), func(t *testing.T) { r, err := FromString(input) require.NoError(t, err) expectAll(t, r, true) }) } }
explode_data.jsonl/51678
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 18605, 2403, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1444, 2264, 1669, 3056, 917, 515, 197, 197, 39680, 197, 197, 57676, 3980, 756, 197, 197, 1, 1474, 45118, 25, 353, 1124, 77, 18605, 25, 608, 756, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFailureBadParentSpanID(t *testing.T) { badParentSpanIDESSpan, err := loadESSpanFixture(1) require.NoError(t, err) badParentSpanIDESSpan.ParentSpanID = "zz" failingSpanTransformAnyMsg(t, &badParentSpanIDESSpan) }
explode_data.jsonl/5154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 17507, 17082, 8387, 12485, 915, 1155, 353, 8840, 836, 8, 341, 2233, 329, 8387, 12485, 915, 9996, 848, 11, 1848, 1669, 2795, 9996, 848, 18930, 7, 16, 340, 17957, 35699, 1155, 11, 1848, 340, 2233, 329, 8387, 12485, 915, 9996...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExportValue(t *testing.T) { t.Parallel() test := func(tt exportTest) { t.Run(tt.label, func(t *testing.T) { t.Parallel() actual := exportValueWithInterpreter(tt.value, nil) assert.Equal(t, tt.expected, actual) if !tt.skipReverse { original := importValue(actual) assert.Equal(t, tt.value, original) } }) } for _, tt := range exportTests { test(tt) } }
explode_data.jsonl/4620
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 16894, 1130, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 18185, 1669, 2915, 47152, 7485, 2271, 8, 1476, 197, 3244, 16708, 47152, 2909, 11, 2915, 1155, 353, 8840, 836, 8, 1476, 298, 3244, 41288, 7957, 2822, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReceiveOnUnixDomainSocket(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("skipping test on windows") } tt, err := obsreporttest.SetupTelemetry() require.NoError(t, err) t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) socketName := tempSocketName(t) gss := &GRPCServerSettings{ NetAddr: confignet.NetAddr{ Endpoint: socketName, Transport: "unix", }, } ln, err := gss.ToListener() assert.NoError(t, err) opts, err := gss.ToServerOption(componenttest.NewNopHost(), componenttest.NewNopTelemetrySettings()) assert.NoError(t, err) s := grpc.NewServer(opts...) otlpgrpc.RegisterTracesServer(s, &grpcTraceServer{}) go func() { _ = s.Serve(ln) }() gcs := &GRPCClientSettings{ Endpoint: "unix://" + ln.Addr().String(), TLSSetting: configtls.TLSClientSetting{ Insecure: true, }, } clientOpts, errClient := gcs.ToDialOptions(componenttest.NewNopHost(), tt.TelemetrySettings) assert.NoError(t, errClient) grpcClientConn, errDial := grpc.Dial(gcs.Endpoint, clientOpts...) assert.NoError(t, errDial) client := otlpgrpc.NewTracesClient(grpcClientConn) ctx, cancelFunc := context.WithTimeout(context.Background(), 2*time.Second) resp, errResp := client.Export(ctx, otlpgrpc.NewTracesRequest(), grpc.WaitForReady(true)) assert.NoError(t, errResp) assert.NotNil(t, resp) cancelFunc() s.Stop() }
explode_data.jsonl/80334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 545 }
[ 2830, 3393, 14742, 1925, 55832, 13636, 10286, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 389, 11030, 1138, 197, 532, 3244, 83, 11, 1848, 1669, 7448, 11736, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_destroyJobs(t *testing.T) { ctx := context.Background() job := &batchv1.Job{ ObjectMeta: metav1.ObjectMeta{ Name: "test", Namespace: "ns", Labels: map[string]string{model.StackNameLabel: "stack-test"}, }, } client := fake.NewSimpleClientset(job) var tests = []struct { name string stack *model.Stack expectedDeployments int }{ { name: "not destroy anything", stack: &model.Stack{ Namespace: "ns", Name: "stack-test", Services: map[string]*model.Service{ "test": { Image: "test_image", RestartPolicy: corev1.RestartPolicyNever, }, }, }, expectedDeployments: 1, }, { name: "destroy dep not in stack", stack: &model.Stack{ Namespace: "ns", Name: "stack-test", Services: map[string]*model.Service{ "test-2": { Image: "test_image", RestartPolicy: corev1.RestartPolicyNever, }, }, }, expectedDeployments: 0, }, { name: "destroy dep which is not deployment anymore", stack: &model.Stack{ Namespace: "ns", Name: "stack-test", Services: map[string]*model.Service{ "test": { Image: "test_image", RestartPolicy: corev1.RestartPolicyAlways, }, }, }, expectedDeployments: 0, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { spinner := utils.NewSpinner("testing") err := destroyJobs(ctx, spinner, tt.stack, client) if err != nil { t.Fatal("Not destroyed correctly") } jobsList, err := jobs.List(ctx, "ns", tt.stack.GetLabelSelector(), client) if err != nil { t.Fatal("could not retrieve list correctly") } if len(jobsList) != tt.expectedDeployments { t.Fatal("Not destroyed correctly") } }) } }
explode_data.jsonl/22838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 854 }
[ 2830, 3393, 18066, 40667, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 68577, 1669, 609, 14049, 85, 16, 45293, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 1944, 756, 298, 90823, 25, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReadPidFileWithErrors(t *testing.T) { ctx := newTestContext("TestReadPidFileWithErrors", t) var err error if _, err = util.ReadPidFile(ctx); err == nil { t.Fatal("error expected in reading pid file") } gotil.WriteStringToFile("hello", util.PidFilePath(ctx)) if _, err = util.ReadPidFile(ctx); err == nil { t.Fatal("error expected in reading pid file") } }
explode_data.jsonl/15040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 4418, 32339, 1703, 2354, 13877, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 501, 2271, 1972, 445, 2271, 4418, 32339, 1703, 2354, 13877, 497, 259, 692, 2405, 1848, 1465, 198, 743, 8358, 1848, 284, 4094, 6503, 32339, 1703, 7502...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRegistry_Ping(t *testing.T) { v2Implemented := true ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet || r.URL.Path != "/v2/" { t.Errorf("unexpected access: %s %s", r.Method, r.URL) w.WriteHeader(http.StatusNotFound) return } if v2Implemented { w.WriteHeader(http.StatusOK) } else { w.WriteHeader(http.StatusNotFound) } })) defer ts.Close() uri, err := url.Parse(ts.URL) if err != nil { t.Fatalf("invalid test http server: %v", err) } reg, err := NewRegistry(uri.Host) if err != nil { t.Fatalf("NewRegistry() error = %v", err) } reg.PlainHTTP = true ctx := context.Background() if err := reg.Ping(ctx); err != nil { t.Errorf("Registry.Ping() error = %v", err) } v2Implemented = false if err := reg.Ping(ctx); err == nil { t.Errorf("Registry.Ping() error = %v, wantErr %v", err, errdef.ErrNotFound) } }
explode_data.jsonl/82487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 15603, 1088, 287, 1155, 353, 8840, 836, 8, 341, 5195, 17, 18300, 1669, 830, 198, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20798, 961, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUnique(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk1 := testkit.NewTestKit(t, store) tk1.MustExec("use test") tk2 := testkit.NewTestKit(t, store) tk2.MustExec("use test") tk.MustExec("set @@tidb_disable_txn_auto_retry = 0") tk1.MustExec("set @@tidb_disable_txn_auto_retry = 0") tk.MustExec(`CREATE TABLE test ( id int(11) UNSIGNED NOT NULL AUTO_INCREMENT, val int UNIQUE, PRIMARY KEY (id)); `) tk.MustExec("begin;") tk.MustExec("insert into test(id, val) values(1, 1);") tk1.MustExec("begin;") tk1.MustExec("insert into test(id, val) values(2, 2);") tk2.MustExec("begin;") tk2.MustExec("insert into test(id, val) values(1, 2);") tk2.MustExec("commit;") _, err := tk.Exec("commit") require.Error(t, err) // Check error type and error message require.True(t, terror.ErrorEqual(err, kv.ErrKeyExists), fmt.Sprintf("err %v", err)) require.Equal(t, "previous statement: insert into test(id, val) values(1, 1);: [kv:1062]Duplicate entry '1' for key 'PRIMARY'", err.Error()) _, err = tk1.Exec("commit") require.Error(t, err) require.True(t, terror.ErrorEqual(err, kv.ErrKeyExists), fmt.Sprintf("err %v", err)) require.Equal(t, "previous statement: insert into test(id, val) values(2, 2);: [kv:1062]Duplicate entry '2' for key 'val'", err.Error()) // Test for https://github.com/pingcap/tidb/issues/463 tk.MustExec("drop table test;") tk.MustExec(`CREATE TABLE test ( id int(11) UNSIGNED NOT NULL AUTO_INCREMENT, val int UNIQUE, PRIMARY KEY (id) );`) tk.MustExec("insert into test(id, val) values(1, 1);") _, err = tk.Exec("insert into test(id, val) values(2, 1);") require.Error(t, err) tk.MustExec("insert into test(id, val) values(2, 2);") tk.MustExec("begin;") tk.MustExec("insert into test(id, val) values(3, 3);") _, err = tk.Exec("insert into test(id, val) values(4, 3);") require.Error(t, err) tk.MustExec("insert into test(id, val) values(4, 4);") tk.MustExec("commit;") tk1.MustExec("begin;") tk1.MustExec("insert into test(id, val) values(5, 6);") tk.MustExec("begin;") tk.MustExec("insert into test(id, val) values(20, 6);") tk.MustExec("commit;") _, _ = tk1.Exec("commit") tk1.MustExec("insert into test(id, val) values(5, 5);") tk.MustExec("drop table test;") tk.MustExec(`CREATE TABLE test ( id int(11) UNSIGNED NOT NULL AUTO_INCREMENT, val1 int UNIQUE, val2 int UNIQUE, PRIMARY KEY (id) );`) tk.MustExec("insert into test(id, val1, val2) values(1, 1, 1);") tk.MustExec("insert into test(id, val1, val2) values(2, 2, 2);") _, _ = tk.Exec("update test set val1 = 3, val2 = 2 where id = 1;") tk.MustExec("insert into test(id, val1, val2) values(3, 3, 3);") }
explode_data.jsonl/5791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1147 }
[ 2830, 3393, 22811, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntry_PageUpDown(t *testing.T) { t.Run("single line", func(*testing.T) { e, window := setupImageTest(t, false) defer teardownImageTest(window) c := window.Canvas() c.Focus(e) e.SetText("Testing") test.AssertRendersToMarkup(t, "entry/select_initial.xml", c) // move right, press & hold shift and pagedown typeKeys(e, fyne.KeyRight, keyShiftLeftDown, fyne.KeyPageDown) assert.Equal(t, "esting", e.SelectedText()) assert.Equal(t, 0, e.CursorRow) assert.Equal(t, 7, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_single_line_shift_pagedown.xml", c) // while shift is held press pageup typeKeys(e, fyne.KeyPageUp) assert.Equal(t, "T", e.SelectedText()) assert.Equal(t, 0, e.CursorRow) assert.Equal(t, 0, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_single_line_shift_pageup.xml", c) // release shift and press pagedown typeKeys(e, keyShiftLeftUp, fyne.KeyPageDown) assert.Equal(t, "", e.SelectedText()) assert.Equal(t, 0, e.CursorRow) assert.Equal(t, 7, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_single_line_pagedown.xml", c) }) t.Run("page down single line", func(*testing.T) { e, window := setupImageTest(t, true) defer teardownImageTest(window) c := window.Canvas() c.Focus(e) e.SetText("Testing\nTesting\nTesting") test.AssertRendersToMarkup(t, "entry/select_multi_line_initial.xml", c) // move right, press & hold shift and pagedown typeKeys(e, fyne.KeyRight, keyShiftLeftDown, fyne.KeyPageDown) assert.Equal(t, "esting\nTesting\nTesting", e.SelectedText()) assert.Equal(t, 2, e.CursorRow) assert.Equal(t, 7, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_multi_line_shift_pagedown.xml", c) // while shift is held press pageup typeKeys(e, fyne.KeyPageUp) assert.Equal(t, "T", e.SelectedText()) assert.Equal(t, 0, e.CursorRow) assert.Equal(t, 0, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_multi_line_shift_pageup.xml", c) // release shift and press pagedown typeKeys(e, keyShiftLeftUp, fyne.KeyPageDown) assert.Equal(t, "", e.SelectedText()) assert.Equal(t, 2, e.CursorRow) assert.Equal(t, 7, e.CursorColumn) test.AssertRendersToMarkup(t, "entry/select_multi_line_pagedown.xml", c) }) }
explode_data.jsonl/12348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 961 }
[ 2830, 3393, 5874, 51540, 33801, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 15338, 1555, 497, 2915, 4071, 8840, 836, 8, 341, 197, 7727, 11, 3241, 1669, 6505, 1906, 2271, 1155, 11, 895, 340, 197, 16867, 49304, 1906, 2271, 15906, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_isStraight(t *testing.T) { type args struct { hand deck.Hand } tests := []struct { name string args args want bool }{ { name: "Should be a straight flush", args: args{deck.Hand{ deck.Card{IsRoyal: true, RoyalType: deck.Royal("king")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("jack")}, deck.Card{Value: 10}, deck.Card{Value: 9}, }}, want: true, }, { name: "Should be a straight flush 2", args: args{deck.Hand{ deck.Card{Value: 8}, deck.Card{Value: 9}, deck.Card{Value: 10}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("jack")}, deck.Card{IsRoyal: true, RoyalType: deck.Royal("queen")}, }}, want: true, }, { name: "Should be a straight flush 3", args: args{deck.Hand{ deck.Card{Value: 6}, deck.Card{Value: 7}, deck.Card{Value: 8}, deck.Card{Value: 9}, deck.Card{Value: 10}, }}, want: true, }, { name: "Should not be a straight flush", args: args{deck.Hand{ deck.Card{Value: 5}, deck.Card{Value: 7}, deck.Card{Value: 8}, deck.Card{Value: 9}, deck.Card{Value: 10}, }}, want: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := isStraight(tt.args.hand); got != tt.want { t.Errorf("isFlush() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/7187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 664 }
[ 2830, 3393, 6892, 88854, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 9598, 437, 9530, 35308, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 1807, 198, 197, 59403, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDismissDeviceChangeNotifications(t *testing.T) { tc := setupTest(t, "ddcn") mctx := libkb.NewMetaContextForTest(*tc) dismisser := &libkb.FakeGregorState{} exceptedDeviceID := "active-device-id" state := &FakeGregorState{ items: []gregor.Item{ buildGregorItem("anything.else", "a-device-id", "not-dismissable-1"), buildGregorItem("device.new", exceptedDeviceID, "not-dismissable-2"), buildGregorItem("device.new", "a-device-id", "dismissable-1"), buildGregorItem("device.revoked", "another-device-id", "dismissable-2"), }, } expectedDismissedIDs := []gregor.MsgID{ gregor1.MsgID("dismissable-1"), gregor1.MsgID("dismissable-2"), } require.Equal(t, []gregor.MsgID(nil), dismisser.PeekDismissedIDs()) err := service.LoopAndDismissForDeviceChangeNotifications(mctx, dismisser, state, exceptedDeviceID) require.NoError(t, err) require.Equal(t, expectedDismissedIDs, dismisser.PeekDismissedIDs()) }
explode_data.jsonl/42657
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 51702, 6985, 4072, 34736, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 6505, 2271, 1155, 11, 330, 631, 14271, 1138, 2109, 3773, 1669, 3051, 21310, 7121, 12175, 1972, 2461, 2271, 4071, 10413, 692, 2698, 2142, 73233, 1669, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoleStoreCache(t *testing.T) { fakeRole := model.Role{Id: "123", Name: "role-name"} t.Run("first call not cached, second cached and returning same data", func(t *testing.T) { mockStore := getMockStore() mockCacheProvider := getMockCacheProvider() cachedStore := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider) role, err := cachedStore.Role().GetByName("role-name") require.Nil(t, err) assert.Equal(t, role, &fakeRole) mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 1) require.Nil(t, err) assert.Equal(t, role, &fakeRole) cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 1) }) t.Run("first call not cached, save, and then not cached again", func(t *testing.T) { mockStore := getMockStore() mockCacheProvider := getMockCacheProvider() cachedStore := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider) cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 1) cachedStore.Role().Save(&fakeRole) cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 2) }) t.Run("first call not cached, delete, and then not cached again", func(t *testing.T) { mockStore := getMockStore() mockCacheProvider := getMockCacheProvider() cachedStore := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider) cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 1) cachedStore.Role().Delete("123") cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 2) }) t.Run("first call not cached, permanent delete all, and then not cached again", func(t *testing.T) { mockStore := getMockStore() mockCacheProvider := getMockCacheProvider() cachedStore := NewLocalCacheLayer(mockStore, nil, nil, mockCacheProvider) cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 1) cachedStore.Role().PermanentDeleteAll() cachedStore.Role().GetByName("role-name") mockStore.Role().(*mocks.RoleStore).AssertNumberOfCalls(t, "GetByName", 2) }) }
explode_data.jsonl/36703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 853 }
[ 2830, 3393, 9030, 6093, 8233, 1155, 353, 8840, 836, 8, 341, 1166, 726, 9030, 1669, 1614, 35955, 90, 764, 25, 330, 16, 17, 18, 497, 3988, 25, 330, 5778, 11494, 63159, 3244, 16708, 445, 3896, 1618, 537, 20579, 11, 2086, 20579, 323, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMuxerError(t *testing.T) { _, err := MuxerConstructor(func() (mux.Transport, error) { return nil, nil }) if err != nil { t.Fatal(err) } }
explode_data.jsonl/23665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 44, 2200, 261, 1454, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 386, 2200, 261, 13288, 18552, 368, 320, 75066, 87669, 11, 1465, 8, 314, 470, 2092, 11, 2092, 2751, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGDPRConsentRequired(t *testing.T) { rr := doPost(`{"gdpr":1,"bidders":["appnexus", "pubmatic"]}`, nil, false, nil) assert.Equal(t, rr.Header().Get("Content-Type"), "text/plain; charset=utf-8") assert.Equal(t, http.StatusBadRequest, rr.Code) assert.Equal(t, "gdpr_consent is required if gdpr=1\n", rr.Body.String()) }
explode_data.jsonl/21686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 40849, 6480, 15220, 306, 8164, 1155, 353, 8840, 836, 8, 341, 197, 634, 1669, 65156, 5809, 4913, 28584, 649, 788, 16, 1335, 65, 99129, 36799, 676, 77, 23666, 497, 330, 9585, 37244, 1341, 28350, 2092, 11, 895, 11, 2092, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCaptureAuthorization(t *testing.T) { c, _ := NewClient(testClientID, testSecret, APIBaseSandBox) c.GetAccessToken() _, err := c.CaptureAuthorization(testAuthID, &Amount{Total: "200", Currency: "USD"}, true) if err == nil { t.Errorf("Auth is expired, 400 error must be returned") } }
explode_data.jsonl/18443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 27429, 18124, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 1532, 2959, 8623, 2959, 915, 11, 1273, 19773, 11, 5333, 3978, 47044, 1611, 340, 1444, 2234, 37649, 2822, 197, 6878, 1848, 1669, 272, 727, 11850, 18124, 8623, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateDisableValidation(t *testing.T) { Given(t). Path("baddir"). When(). Create("--validate=false"). Then(). And(func(app *Application) { _, err := RunCli("app", "create", app.Name, "--upsert", "--validate=false", "--repo", RepoURL(RepoURLTypeFile), "--path", "baddir2", "--project", app.Spec.Project, "--dest-server", common.KubernetesInternalAPIServerAddr, "--dest-namespace", DeploymentNamespace()) assert.NoError(t, err) }). When(). AppSet("--path", "baddir3", "--validate=false") }
explode_data.jsonl/66707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 4021, 25479, 13799, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 445, 65, 44525, 38609, 197, 197, 4498, 25829, 197, 75569, 21549, 7067, 12219, 38609, 197, 197, 12209, 25829, 197, 197, 3036, 18552, 11462, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFragmentation(t *testing.T) { testutils.WithTestServer(t, nil, func(t testing.TB, ts *testutils.TestServer) { ts.Register(raw.Wrap(newTestHandler(t)), "echo") arg2 := make([]byte, MaxFramePayloadSize*2) for i := 0; i < len(arg2); i++ { arg2[i] = byte('a' + (i % 10)) } arg3 := make([]byte, MaxFramePayloadSize*3) for i := 0; i < len(arg3); i++ { arg3[i] = byte('A' + (i % 10)) } ctx, cancel := NewContext(time.Second) defer cancel() respArg2, respArg3, _, err := raw.Call(ctx, ts.Server(), ts.HostPort(), ts.ServiceName(), "echo", arg2, arg3) require.NoError(t, err) assert.Equal(t, arg2, respArg2) assert.Equal(t, arg3, respArg3) calls := relaytest.NewMockStats() calls.Add(ts.ServiceName(), ts.ServiceName(), "echo").Succeeded().End() ts.AssertRelayStats(calls) }) }
explode_data.jsonl/78189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 9488, 367, 1155, 353, 8840, 836, 8, 341, 18185, 6031, 26124, 2271, 5475, 1155, 11, 2092, 11, 2915, 1155, 7497, 836, 33, 11, 10591, 353, 1944, 6031, 8787, 5475, 8, 341, 197, 57441, 19983, 22460, 38968, 1755, 2271, 3050, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCmdWithFeeHappyPath(t *testing.T) { sendMsg := &cash.SendMsg{ Metadata: &weave.Metadata{Schema: 1}, Source: fromHex(t, "b1ca7e78f74423ae01da3b51e676934d9105f282"), Destination: fromHex(t, "E28AE9A6EB94FC88B73EB7CBD6B87BF93EB9BEF0"), Amount: coin.NewCoinp(5, 0, "DOGE"), Memo: "a memo", } sendTx := &blogapp.Tx{ Sum: &blogapp.Tx_CashSendMsg{ CashSendMsg: sendMsg, }, } var input bytes.Buffer if _, err := writeTx(&input, sendTx); err != nil { t.Fatalf("cannot serialize transaction: %s", err) } var output bytes.Buffer args := []string{ "-payer", "b1ca7e78f74423ae01da3b51e676934d9105f282", "-amount", "5 DOGE", } if err := cmdWithFee(&input, &output, args); err != nil { t.Fatalf("cannot attach a fee to transaction: %s", err) } tx, _, err := readTx(&output) if err != nil { t.Fatalf("cannot unmarshal created transaction: %s", err) } assert.Equal(t, fromHex(t, "b1ca7e78f74423ae01da3b51e676934d9105f282"), []byte(tx.Fees.Payer)) assert.Equal(t, coin.NewCoinp(5, 0, "DOGE"), tx.Fees.Fees) txmsg, err := tx.GetMsg() if err != nil { t.Fatalf("cannot get transaction message: %s", err) } // Message must be unmodified. assert.Equal(t, sendMsg, txmsg) }
explode_data.jsonl/19676
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 571 }
[ 2830, 3393, 15613, 2354, 41941, 32847, 1820, 1155, 353, 8840, 836, 8, 341, 32817, 6611, 1669, 609, 41271, 20176, 6611, 515, 197, 9209, 7603, 25, 262, 609, 896, 523, 46475, 90, 8632, 25, 220, 16, 1583, 197, 197, 3608, 25, 414, 504, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEnvironmentVariableNotPresent(t *testing.T) { var args struct { NotPresent string `arg:"env"` } os.Args = []string{"example"} MustParse(&args) assert.Equal(t, "", args.NotPresent) }
explode_data.jsonl/13035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 12723, 7827, 2623, 21195, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 197, 2623, 21195, 914, 1565, 858, 2974, 3160, 8805, 197, 532, 25078, 51015, 284, 3056, 917, 4913, 8687, 16707, 9209, 590, 14463, 2099, 2116,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDBAddSSTable(t *testing.T) { defer leaktest.AfterTest(t)() t.Run("store=in-memory", func(t *testing.T) { s, _, db := serverutils.StartServer(t, base.TestServerArgs{Insecure: true}) ctx := context.Background() defer s.Stopper().Stop(ctx) runTestDBAddSSTable(ctx, t, db, nil) }) t.Run("store=on-disk", func(t *testing.T) { dir, dirCleanupFn := testutils.TempDir(t) defer dirCleanupFn() storeSpec := base.DefaultTestStoreSpec storeSpec.InMemory = false storeSpec.Path = dir s, _, db := serverutils.StartServer(t, base.TestServerArgs{ Insecure: true, StoreSpecs: []base.StoreSpec{storeSpec}, }) ctx := context.Background() defer s.Stopper().Stop(ctx) store, err := s.GetStores().(*storage.Stores).GetStore(s.GetFirstStoreID()) if err != nil { t.Fatal(err) } runTestDBAddSSTable(ctx, t, db, store) }) }
explode_data.jsonl/65681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 3506, 2212, 50, 784, 480, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 3244, 16708, 445, 4314, 67384, 64096, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1903, 11, 8358, 2927, 1669, 3538, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScrubDNS(t *testing.T) { tcs := []struct { nameserversIn []string searchesIn []string nameserversOut []string searchesOut []string }{ { nameserversIn: []string{"1.2.3.4", "5.6.7.8"}, nameserversOut: []string{"1.2.3.4", "5.6.7.8"}, }, { searchesIn: []string{"c.prj.internal.", "12345678910.google.internal.", "google.internal."}, searchesOut: []string{"c.prj.internal.", "google.internal."}, }, { searchesIn: []string{"c.prj.internal.", "12345678910.google.internal.", "zone.c.prj.internal.", "google.internal."}, searchesOut: []string{"c.prj.internal.", "zone.c.prj.internal.", "google.internal."}, }, { searchesIn: []string{"c.prj.internal.", "12345678910.google.internal.", "zone.c.prj.internal.", "google.internal.", "unexpected"}, searchesOut: []string{"c.prj.internal.", "zone.c.prj.internal.", "google.internal.", "unexpected"}, }, } gce := &GCECloud{} for i := range tcs { n, s := gce.ScrubDNS(tcs[i].nameserversIn, tcs[i].searchesIn) if !reflect.DeepEqual(n, tcs[i].nameserversOut) { t.Errorf("Expected %v, got %v", tcs[i].nameserversOut, n) } if !reflect.DeepEqual(s, tcs[i].searchesOut) { t.Errorf("Expected %v, got %v", tcs[i].searchesOut, s) } } }
explode_data.jsonl/28873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 571 }
[ 2830, 3393, 3326, 59430, 61088, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 93940, 18729, 641, 220, 3056, 917, 198, 197, 45573, 288, 641, 257, 3056, 917, 198, 197, 93940, 18729, 2662, 3056, 917, 198, 197, 455...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCleanupTaskENIs(t *testing.T) { cfg := getTestConfig() ctrl := gomock.NewController(t) mockTime := mock_ttime.NewMockTime(ctrl) mockState := mock_dockerstate.NewMockTaskEngineState(ctrl) mockClient := mock_dockerapi.NewMockDockerClient(ctrl) mockImageManager := mock_engine.NewMockImageManager(ctrl) defer ctrl.Finish() ctx, cancel := context.WithCancel(context.TODO()) defer cancel() taskEngine := &DockerTaskEngine{ ctx: ctx, cfg: &cfg, saver: statemanager.NewNoopStateManager(), state: mockState, client: mockClient, imageManager: mockImageManager, } mTask := &managedTask{ ctx: ctx, cancel: cancel, Task: testdata.LoadTask("sleep5"), _time: mockTime, engine: taskEngine, acsMessages: make(chan acsTransition), dockerMessages: make(chan dockerContainerChange), resourceStateChangeEvent: make(chan resourceStateChange), cfg: taskEngine.cfg, saver: taskEngine.saver, } mTask.SetTaskENI(&apieni.ENI{ ID: "TestCleanupTaskENIs", IPV4Addresses: []*apieni.ENIIPV4Address{ { Primary: true, Address: ipv4, }, }, MacAddress: mac, IPV6Addresses: []*apieni.ENIIPV6Address{ { Address: ipv6, }, }, }) mTask.SetKnownStatus(apitaskstatus.TaskStopped) mTask.SetSentStatus(apitaskstatus.TaskStopped) container := mTask.Containers[0] dockerContainer := &apicontainer.DockerContainer{ DockerName: "dockerContainer", } // Expectations for triggering cleanup now := mTask.GetKnownStatusTime() taskStoppedDuration := 1 * time.Minute mockTime.EXPECT().Now().Return(now).AnyTimes() cleanupTimeTrigger := make(chan time.Time) mockTime.EXPECT().After(gomock.Any()).Return(cleanupTimeTrigger) go func() { cleanupTimeTrigger <- now }() // Expectations to verify that the task gets removed mockState.EXPECT().ContainerMapByArn(mTask.Arn).Return(map[string]*apicontainer.DockerContainer{container.Name: dockerContainer}, true) mockClient.EXPECT().RemoveContainer(gomock.Any(), dockerContainer.DockerName, gomock.Any()).Return(nil) mockImageManager.EXPECT().RemoveContainerReferenceFromImageState(container).Return(nil) mockState.EXPECT().RemoveTask(mTask.Task) mockState.EXPECT().RemoveENIAttachment(mac) mTask.cleanupTask(taskStoppedDuration) }
explode_data.jsonl/24583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1035 }
[ 2830, 3393, 67335, 6262, 953, 3872, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 633, 2271, 2648, 741, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 77333, 1462, 1669, 7860, 528, 1678, 7121, 11571, 1462, 62100, 340, 77333, 1397, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_fileName(t *testing.T) { type args struct { p string r int c int } tests := []struct { name string args args want string }{ { name: "test10x20", args: args{ p: "test", r: 10, c: 20, }, want: "test10x20.csv", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := fileName(tt.args.p, tt.args.r, tt.args.c); got != tt.want { t.Errorf("fileName() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/20935
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 2458, 675, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 3223, 914, 198, 197, 7000, 526, 198, 197, 1444, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSaveTableIndex(t *testing.T) { s := objmock.NewStore() enc := objects.NewStrListEncoder(true) idx := testutils.BuildRawCSV(2, 10)[1:] buf := bytes.NewBuffer(nil) _, err := objects.WriteBlockTo(enc, buf, idx) require.NoError(t, err) sum := testutils.SecureRandomBytes(16) require.NoError(t, objects.SaveTableIndex(s, sum, buf.Bytes())) assert.True(t, objects.TableIndexExist(s, sum)) obj, err := objects.GetTableIndex(s, sum) require.NoError(t, err) assert.Equal(t, idx, obj) require.NoError(t, objects.DeleteTableIndex(s, sum)) assert.False(t, objects.TableIndexExist(s, sum)) bb := make([]byte, 1024) _, _, err = objects.GetBlock(s, bb, sum) assert.Equal(t, objects.ErrKeyNotFound, err) }
explode_data.jsonl/57829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 8784, 2556, 1552, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 2839, 16712, 7121, 6093, 741, 197, 954, 1669, 6171, 7121, 2580, 852, 19921, 3715, 692, 62077, 1669, 1273, 6031, 25212, 20015, 44209, 7, 17, 11, 220, 16, 15, 6620,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeJsonWithNoTime(t *testing.T) { dec := json.NewDecoder(strings.NewReader("{\"event\":\"hello\"}")) dec.More() var msg Event err := dec.Decode(&msg) assert.NoError(t, err) assert.Nil(t, msg.Time) }
explode_data.jsonl/5173
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 32564, 5014, 2354, 2753, 1462, 1155, 353, 8840, 836, 8, 341, 197, 8169, 1669, 2951, 7121, 20732, 51442, 68587, 99141, 3087, 23488, 14990, 2105, 92, 28075, 197, 8169, 92768, 741, 2405, 3750, 3665, 198, 9859, 1669, 1622, 56372, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestResourceSQLGlobalConfigCreateDefault(t *testing.T) { d, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { Method: "PUT", Resource: "/api/2.0/sql/config/endpoints", ExpectedRequest: map[string]interface{}{ "data_access_config": []interface{}{}, "enable_serverless_compute": false, "security_policy": "DATA_ACCESS_CONTROL", }, }, { Method: "GET", Resource: "/api/2.0/sql/config/endpoints", ReuseRequest: true, Response: GlobalConfigForRead{ SecurityPolicy: "DATA_ACCESS_CONTROL", }, }, }, Resource: ResourceSQLGlobalConfig(), Create: true, HCL: ` `, }.Apply(t) require.NoError(t, err, err) assert.Equal(t, "global", d.Id(), "Id should not be empty") assert.Equal(t, "DATA_ACCESS_CONTROL", d.Get("security_policy")) }
explode_data.jsonl/48782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 4783, 6688, 11646, 2648, 4021, 3675, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 88496, 20766, 18930, 515, 197, 12727, 941, 18513, 25, 3056, 15445, 27358, 18930, 515, 298, 197, 515, 571, 84589, 25, 256, 330, 6221, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertWithMisMatchListTypes(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", ` struct Inner { 1: optional string field } struct Foo { 1: optional list<Inner> one 2: required string two } struct Bar { 1: optional list<Inner> one 2: required list<Inner> two }`, nil, nil, ) assert.Error(t, err) assert.Equal(t, "", lines) assert.Equal(t, "Could not convert field (two): type is not list", err.Error(), ) }
explode_data.jsonl/62049
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 12012, 2354, 83159, 8331, 852, 4173, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 3989, 197, 6472, 36356, 341, 298, 197, 16, 25, 10101, 914, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecordingMetrics(t *testing.T) { state := &framework.CycleState{} state.SetRecordPluginMetrics(true) tests := []struct { name string action func(f framework.Framework) inject injectedResult wantExtensionPoint string wantStatus framework.Code }{ { name: "PreFilter - Success", action: func(f framework.Framework) { f.RunPreFilterPlugins(context.Background(), state, pod) }, wantExtensionPoint: "PreFilter", wantStatus: framework.Success, }, { name: "PreScore - Success", action: func(f framework.Framework) { f.RunPreScorePlugins(context.Background(), state, pod, nil) }, wantExtensionPoint: "PreScore", wantStatus: framework.Success, }, { name: "Score - Success", action: func(f framework.Framework) { f.RunScorePlugins(context.Background(), state, pod, nodes) }, wantExtensionPoint: "Score", wantStatus: framework.Success, }, { name: "Reserve - Success", action: func(f framework.Framework) { f.RunReservePluginsReserve(context.Background(), state, pod, "") }, wantExtensionPoint: "Reserve", wantStatus: framework.Success, }, { name: "Unreserve - Success", action: func(f framework.Framework) { f.RunReservePluginsUnreserve(context.Background(), state, pod, "") }, wantExtensionPoint: "Unreserve", wantStatus: framework.Success, }, { name: "PreBind - Success", action: func(f framework.Framework) { f.RunPreBindPlugins(context.Background(), state, pod, "") }, wantExtensionPoint: "PreBind", wantStatus: framework.Success, }, { name: "Bind - Success", action: func(f framework.Framework) { f.RunBindPlugins(context.Background(), state, pod, "") }, wantExtensionPoint: "Bind", wantStatus: framework.Success, }, { name: "PostBind - Success", action: func(f framework.Framework) { f.RunPostBindPlugins(context.Background(), state, pod, "") }, wantExtensionPoint: "PostBind", wantStatus: framework.Success, }, { name: "Permit - Success", action: func(f framework.Framework) { f.RunPermitPlugins(context.Background(), state, pod, "") }, wantExtensionPoint: "Permit", wantStatus: framework.Success, }, { name: "PreFilter - Error", action: func(f framework.Framework) { f.RunPreFilterPlugins(context.Background(), state, pod) }, inject: injectedResult{PreFilterStatus: int(framework.Error)}, wantExtensionPoint: "PreFilter", wantStatus: framework.Error, }, { name: "PreScore - Error", action: func(f framework.Framework) { f.RunPreScorePlugins(context.Background(), state, pod, nil) }, inject: injectedResult{PreScoreStatus: int(framework.Error)}, wantExtensionPoint: "PreScore", wantStatus: framework.Error, }, { name: "Score - Error", action: func(f framework.Framework) { f.RunScorePlugins(context.Background(), state, pod, nodes) }, inject: injectedResult{ScoreStatus: int(framework.Error)}, wantExtensionPoint: "Score", wantStatus: framework.Error, }, { name: "Reserve - Error", action: func(f framework.Framework) { f.RunReservePluginsReserve(context.Background(), state, pod, "") }, inject: injectedResult{ReserveStatus: int(framework.Error)}, wantExtensionPoint: "Reserve", wantStatus: framework.Error, }, { name: "PreBind - Error", action: func(f framework.Framework) { f.RunPreBindPlugins(context.Background(), state, pod, "") }, inject: injectedResult{PreBindStatus: int(framework.Error)}, wantExtensionPoint: "PreBind", wantStatus: framework.Error, }, { name: "Bind - Error", action: func(f framework.Framework) { f.RunBindPlugins(context.Background(), state, pod, "") }, inject: injectedResult{BindStatus: int(framework.Error)}, wantExtensionPoint: "Bind", wantStatus: framework.Error, }, { name: "Permit - Error", action: func(f framework.Framework) { f.RunPermitPlugins(context.Background(), state, pod, "") }, inject: injectedResult{PermitStatus: int(framework.Error)}, wantExtensionPoint: "Permit", wantStatus: framework.Error, }, { name: "Permit - Wait", action: func(f framework.Framework) { f.RunPermitPlugins(context.Background(), state, pod, "") }, inject: injectedResult{PermitStatus: int(framework.Wait)}, wantExtensionPoint: "Permit", wantStatus: framework.Wait, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { metrics.Register() metrics.FrameworkExtensionPointDuration.Reset() metrics.PluginExecutionDuration.Reset() plugin := &TestPlugin{name: testPlugin, inj: tt.inject} r := make(Registry) r.Register(testPlugin, func(_ runtime.Object, fh framework.Handle) (framework.Plugin, error) { return plugin, nil }) pluginSet := config.PluginSet{Enabled: []config.Plugin{{Name: testPlugin, Weight: 1}}} plugins := &config.Plugins{ Score: pluginSet, PreFilter: pluginSet, Filter: pluginSet, PreScore: pluginSet, Reserve: pluginSet, Permit: pluginSet, PreBind: pluginSet, Bind: pluginSet, PostBind: pluginSet, } recorder := newMetricsRecorder(100, time.Nanosecond) f, err := newFrameworkWithQueueSortAndBind(r, plugins, emptyArgs, withMetricsRecorder(recorder), WithProfileName(testProfileName)) if err != nil { t.Fatalf("Failed to create framework for testing: %v", err) } tt.action(f) // Stop the goroutine which records metrics and ensure it's stopped. close(recorder.stopCh) <-recorder.isStoppedCh // Try to clean up the metrics buffer again in case it's not empty. recorder.flushMetrics() collectAndCompareFrameworkMetrics(t, tt.wantExtensionPoint, tt.wantStatus) collectAndComparePluginMetrics(t, tt.wantExtensionPoint, testPlugin, tt.wantStatus) }) } }
explode_data.jsonl/35748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2867 }
[ 2830, 3393, 52856, 27328, 1155, 353, 8840, 836, 8, 341, 24291, 1669, 609, 3794, 727, 5449, 1397, 16094, 24291, 4202, 6471, 11546, 27328, 3715, 692, 78216, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 38933, 1797, 2915, 955, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMem(t *testing.T) { bms := NewMemoryBitmapSet() b, _ := NewBloomFilter(1000, 4, bms) if ok, _ := b.TestString("abc"); ok { t.Errorf("error") } b.AddString("abc") b.AddString("def") b.AddString("test") if ok, _ := b.TestString("abc"); !ok { t.Errorf("error") } if ok, _ := b.TestString("def"); !ok { t.Errorf("error") } if ok, _ := b.TestString("test"); !ok { t.Errorf("error") } }
explode_data.jsonl/53515
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 18816, 1155, 353, 8840, 836, 8, 341, 2233, 1011, 1669, 1532, 10642, 16773, 1649, 741, 2233, 11, 716, 1669, 1532, 33, 18474, 5632, 7, 16, 15, 15, 15, 11, 220, 19, 11, 293, 1011, 692, 743, 5394, 11, 716, 1669, 293, 8787,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetBucketRegion(t *testing.T) { expectRegion := aws.StringValue(integSess.Config.Region) ctx := aws.BackgroundContext() region, err := s3manager.GetBucketRegion(ctx, integSess, aws.StringValue(bucketName), expectRegion) if err != nil { t.Fatalf("expect no error, got %v", err) } if e, a := expectRegion, region; e != a { t.Errorf("expect %s bucket region, got %s", e, a) } }
explode_data.jsonl/9082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 1949, 36018, 14091, 1155, 353, 8840, 836, 8, 341, 24952, 14091, 1669, 31521, 6431, 1130, 1548, 791, 50, 433, 10753, 65642, 692, 20985, 1669, 31521, 19047, 1972, 741, 197, 3943, 11, 1848, 1669, 274, 18, 13297, 2234, 36018, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCallerSubscriberClient_GetEthBalance(t *testing.T) { t.Parallel() tests := []struct { name string input string expected string }{ {"basic", "0x0100", "0.000000000000000256"}, {"larger than signed 64 bit integer", "0x4b3b4ca85a86c47a098a224000000000", "100000000000000000000.000000000000000000"}, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { ethClientMock := new(mocks.CallerSubscriber) ethClient := &eth.CallerSubscriberClient{CallerSubscriber: ethClientMock} ethClientMock.On("Call", mock.Anything, "eth_getBalance", mock.Anything, "latest"). Return(nil). Run(func(args mock.Arguments) { res := args.Get(0).(*string) *res = test.input }) result, err := ethClient.GetEthBalance(cltest.NewAddress()) assert.NoError(t, err) assert.Equal(t, test.expected, result.String()) }) } }
explode_data.jsonl/63846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 58735, 40236, 2959, 13614, 65390, 21190, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 22427, 262, 914, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParse(t *testing.T) { b, err := ioutil.ReadFile(name) if err != nil { t.Fatal(err) } _, err = Parse(name, string(b)) if err != nil { t.Fatal(err) } }
explode_data.jsonl/49580
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 14463, 1155, 353, 8840, 836, 8, 341, 2233, 11, 1848, 1669, 43144, 78976, 3153, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 197, 6878, 1848, 284, 14775, 3153, 11, 914, 1883, 1171, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func Test_parseCommandWrongArgsCount(t *testing.T) { if os.Getenv("DIE_PROCESS_DIE") == "1" { ctx := log.NewSyncLogger(log.NewLogfmtLogger(os.Stdout)) mm := createMockVMExtensionEnvironmentManager() ii, _ := GetInitializationInfo("yaba", "5.0", true, testEnableCallback) ext, _ := getVMExtensionInternal(ctx, ii, mm) args := make([]string, 1) args[0] = "install" ext.parseCmd(args) return } // Verify that the process exits cmd := exec.Command(os.Args[0], "-test.run=Test_parseCommandWrongArgsCount") cmd.Env = append(os.Environ(), "DIE_PROCESS_DIE=1") err := cmd.Run() if e, ok := err.(*exec.ExitError); ok && !e.Success() { return } t.Fatalf("process ran with err %v, want exit status 1", err) }
explode_data.jsonl/18583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 21039, 4062, 29185, 4117, 2507, 1155, 353, 8840, 836, 8, 341, 743, 2643, 64883, 445, 35, 5371, 36199, 1557, 5371, 899, 621, 330, 16, 1, 341, 197, 20985, 1669, 1487, 7121, 12154, 7395, 12531, 7121, 2201, 12501, 7395, 9638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUserEntityVerifySUCCESS(t *testing.T) { auth = sendPost("http://localhost:8080/Login", APPJASON_UTF_8, UserEntityVerifySUCCESS) response := responseToString(auth) compareResults(t, response, HyperText.CustomResponses["success-login"]) }
explode_data.jsonl/59336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 1474, 3030, 32627, 39308, 1155, 353, 8840, 836, 8, 341, 78011, 284, 3624, 4133, 445, 1254, 1110, 8301, 25, 23, 15, 23, 15, 54803, 497, 17912, 41, 35304, 55061, 62, 23, 11, 2657, 3030, 32627, 39308, 340, 21735, 1669, 2033, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMemChunk_IteratorBounds(t *testing.T) { var createChunk = func() *MemChunk { t.Helper() c := NewMemChunk(EncNone, 1e6, 1e6) if err := c.Append(&logproto.Entry{ Timestamp: time.Unix(0, 1), Line: "1", }); err != nil { t.Fatal(err) } if err := c.Append(&logproto.Entry{ Timestamp: time.Unix(0, 2), Line: "2", }); err != nil { t.Fatal(err) } return c } for _, tt := range []struct { mint, maxt time.Time direction logproto.Direction expect []bool // array of expected values for next call in sequence }{ {time.Unix(0, 0), time.Unix(0, 1), logproto.FORWARD, []bool{false}}, {time.Unix(0, 1), time.Unix(0, 1), logproto.FORWARD, []bool{true, false}}, {time.Unix(0, 1), time.Unix(0, 2), logproto.FORWARD, []bool{true, false}}, {time.Unix(0, 2), time.Unix(0, 2), logproto.FORWARD, []bool{true, false}}, {time.Unix(0, 1), time.Unix(0, 3), logproto.FORWARD, []bool{true, true, false}}, {time.Unix(0, 2), time.Unix(0, 3), logproto.FORWARD, []bool{true, false}}, {time.Unix(0, 3), time.Unix(0, 3), logproto.FORWARD, []bool{false}}, {time.Unix(0, 0), time.Unix(0, 1), logproto.BACKWARD, []bool{false}}, {time.Unix(0, 1), time.Unix(0, 1), logproto.BACKWARD, []bool{true, false}}, {time.Unix(0, 1), time.Unix(0, 2), logproto.BACKWARD, []bool{true, false}}, {time.Unix(0, 2), time.Unix(0, 2), logproto.BACKWARD, []bool{true, false}}, {time.Unix(0, 1), time.Unix(0, 3), logproto.BACKWARD, []bool{true, true, false}}, {time.Unix(0, 2), time.Unix(0, 3), logproto.BACKWARD, []bool{true, false}}, {time.Unix(0, 3), time.Unix(0, 3), logproto.BACKWARD, []bool{false}}, } { t.Run( fmt.Sprintf("mint:%d,maxt:%d,direction:%s", tt.mint.UnixNano(), tt.maxt.UnixNano(), tt.direction), func(t *testing.T) { tt := tt c := createChunk() // testing headchunk it, err := c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, logql.NoopPipeline) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) } require.NoError(t, it.Close()) // testing chunk blocks require.NoError(t, c.cut()) it, err = c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, logql.NoopPipeline) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) } require.NoError(t, it.Close()) }) } }
explode_data.jsonl/15707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1128 }
[ 2830, 3393, 18816, 28304, 7959, 465, 850, 11394, 1155, 353, 8840, 836, 8, 1476, 2405, 1855, 28304, 284, 2915, 368, 353, 18816, 28304, 341, 197, 3244, 69282, 741, 197, 1444, 1669, 1532, 18816, 28304, 7, 7408, 4064, 11, 220, 16, 68, 21,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWorkshop_AddJob(t *testing.T) { rand.Seed(time.Now().Unix()) seqMaker := sequence.New(1000000) shop := New(3) for i := 0; i < 40; i++ { time.Sleep(time.Duration(rand.Intn(5)) * time.Millisecond) n := rand.Intn(5) job := mockjob.New("job"+strconv.Itoa(n), seqMaker.Next()) shop.AddJob(job) } time.Sleep(2 * time.Minute) }
explode_data.jsonl/29108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 6776, 8675, 21346, 12245, 1155, 353, 8840, 836, 8, 341, 7000, 437, 5732, 291, 9730, 13244, 1005, 55832, 12367, 78561, 33259, 1669, 8500, 7121, 7, 16, 15, 15, 15, 15, 15, 15, 340, 197, 8675, 1669, 1532, 7, 18, 340, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNegativeReplicas(t *testing.T) { size := int32(-1) jaeger := v1.NewJaeger(types.NamespacedName{Name: "TestNegativeReplicas"}) jaeger.Spec.Collector.Replicas = &size collector := NewCollector(jaeger) dep := collector.Get() assert.Equal(t, int32(1), *dep.Spec.Replicas) }
explode_data.jsonl/69111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 38489, 18327, 52210, 1155, 353, 8840, 836, 8, 341, 13832, 1669, 526, 18, 17, 4080, 16, 340, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25, 330, 2271, 38489, 18327, 52210, 23625, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManifestGenErrorCacheFileContentsChange(t *testing.T) { tmpDir := t.TempDir() service := newService(tmpDir) service.initConstants = RepoServerInitConstants{ ParallelismLimit: 1, PauseGenerationAfterFailedGenerationAttempts: 2, PauseGenerationOnFailureForMinutes: 0, PauseGenerationOnFailureForRequests: 4, } for step := 0; step < 3; step++ { // step 1) Attempt to generate manifests against invalid helm chart (should return uncached error) // step 2) Attempt to generate manifest against valid helm chart (should succeed and return valid response) // step 3) Attempt to generate manifest against invalid helm chart (should return cached value from step 2) errorExpected := step%2 == 0 // Ensure that the target directory will succeed or fail, so we can verify the cache correctly handles it err := os.RemoveAll(tmpDir) assert.NoError(t, err) err = os.MkdirAll(tmpDir, 0777) assert.NoError(t, err) if errorExpected { // Copy invalid helm chart into temporary directory, ensuring manifest generation will fail err = fileutil.CopyDir("./testdata/invalid-helm", tmpDir) assert.NoError(t, err) } else { // Copy valid helm chart into temporary directory, ensuring generation will succeed err = fileutil.CopyDir("./testdata/my-chart", tmpDir) assert.NoError(t, err) } res, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, AppName: "test", ApplicationSource: &argoappv1.ApplicationSource{ Path: ".", }, }) fmt.Println("-", step, "-", res != nil, err != nil, errorExpected) fmt.Println(" err: ", err) fmt.Println(" res: ", res) if step < 2 { assert.True(t, (err != nil) == errorExpected, "error return value and error expected did not match") assert.True(t, (res != nil) == !errorExpected, "GenerateManifest return value and expected value did not match") } if step == 2 { assert.NoError(t, err, "error ret val was non-nil on step 3") assert.NotNil(t, res, "GenerateManifest ret val was nil on step 3") } } }
explode_data.jsonl/5671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 729 }
[ 2830, 3393, 38495, 9967, 1454, 8233, 1703, 14803, 4072, 1155, 353, 8840, 836, 8, 1476, 20082, 6184, 1669, 259, 65009, 6184, 2822, 52934, 1669, 501, 1860, 10368, 6184, 692, 52934, 8271, 9386, 284, 71509, 5475, 3803, 9386, 515, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGoodCrossSignature(t *testing.T) { // This public key has a signing subkey, and the subkey has an // embedded cross-signature which correctly validates over the // primary and subkey. keys, err := ReadArmoredKeyRing(bytes.NewBufferString(goodCrossSignatureKey)) if err != nil { t.Fatal(err) } if len(keys) != 1 { t.Errorf("Failed to accept key with good cross signature, %d", len(keys)) } if len(keys[0].Subkeys) != 1 { t.Errorf("Failed to accept good subkey, %d", len(keys[0].Subkeys)) } }
explode_data.jsonl/14736
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 15216, 28501, 25088, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 584, 1376, 702, 264, 15971, 1186, 792, 11, 323, 279, 1186, 792, 702, 458, 198, 197, 322, 22864, 5312, 27953, 1568, 892, 12440, 26257, 916, 279, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEntry_SetReadOnly_OnFocus(t *testing.T) { entry, window := setupImageTest(false) defer teardownImageTest(window) c := window.Canvas() entry.SetReadOnly(true) entry.FocusGained() test.AssertImageMatches(t, "entry/set_readonly_on_focus_readonly.png", c.Capture()) entry.SetReadOnly(false) entry.FocusGained() test.AssertImageMatches(t, "entry/set_readonly_on_focus_writable.png", c.Capture()) }
explode_data.jsonl/57326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 5874, 14812, 20914, 35482, 13819, 1155, 353, 8840, 836, 8, 341, 48344, 11, 3241, 1669, 6505, 1906, 2271, 3576, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 48344, 4202, 20914, 3715, 340, 48344, 401...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResponse(t *testing.T) { t.Run("MarshalJSON", func(t *testing.T) { for _, test := range responseTests { if test.Res == nil { continue } t.Run(test.Name, func(t *testing.T) { assert := assert.New(t) data, err := test.Res.MarshalJSON() assert.NoError(err) assert.Equal(test.Data, string(data)) }) } }) }
explode_data.jsonl/25941
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 2582, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 55438, 5370, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 2023, 8358, 1273, 1669, 2088, 2033, 18200, 341, 298, 743, 1273, 8377, 621, 2092, 341, 571, 11664, 198, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHasDedicatedImageFs(t *testing.T) { for desc, test := range map[string]struct { rootfsDevice string imagefsDevice string dedicated bool }{ "dedicated device for image filesystem": { rootfsDevice: "root/device", imagefsDevice: "image/device", dedicated: true, }, "shared device for image filesystem": { rootfsDevice: "share/device", imagefsDevice: "share/device", dedicated: false, }, } { t.Logf("TestCase %q", desc) var ( mockCadvisor = new(cadvisortest.Mock) mockPodManager = new(kubepodtest.MockManager) mockRuntimeCache = new(kubecontainertest.MockRuntimeCache) ) mockCadvisor.On("RootFsInfo").Return(cadvisorapiv2.FsInfo{Device: test.rootfsDevice}, nil) provider := newStatsProvider(mockCadvisor, mockPodManager, mockRuntimeCache, fakeContainerStatsProvider{ device: test.imagefsDevice, }) dedicated, err := provider.HasDedicatedImageFs() assert.NoError(t, err) assert.Equal(t, test.dedicated, dedicated) mockCadvisor.AssertExpectations(t) } }
explode_data.jsonl/3073
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 10281, 35, 291, 10089, 1906, 48300, 1155, 353, 8840, 836, 8, 341, 2023, 6560, 11, 1273, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 33698, 3848, 6985, 220, 914, 198, 197, 31426, 3848, 6985, 914, 198, 197, 2698, 291, 10089...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_reportStatusSaved(t *testing.T) { ctx := log.NewSyncLogger(log.NewLogfmtLogger(os.Stdout)) ext := createTestVMExtension() c := cmd{nil, "Install", true, 99} ext.HandlerEnv.StatusFolder = statusTestDirectory ext.RequestedSequenceNumber = 45 createDirsForVMExtension(ext) defer cleanupDirsForVMExtension(ext) err := reportStatus(ctx, ext, status.StatusSuccess, c, "msg") require.NoError(t, err, "reportStatus failed") _, err = os.Stat(path.Join(statusTestDirectory, "45.status")) require.NoError(t, err, "File doesn't exist") }
explode_data.jsonl/18574
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 14813, 2522, 41133, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1487, 7121, 12154, 7395, 12531, 7121, 2201, 12501, 7395, 9638, 83225, 1171, 95450, 1669, 1855, 2271, 11187, 12049, 2822, 1444, 1669, 5439, 90, 8385, 11, 330, 24690...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReverse(t *testing.T) { toReverse := "to_reverse" reversed := "esrever_ot" if reverse(toReverse) != reversed { t.Errorf("Reversal was incorrect, got: %s", reverse(toReverse)) } }
explode_data.jsonl/49055
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 45695, 1155, 353, 8840, 836, 8, 341, 31709, 45695, 1669, 330, 983, 43277, 698, 17200, 43776, 1669, 330, 288, 265, 423, 65614, 1837, 743, 9931, 12186, 45695, 8, 961, 27437, 341, 197, 3244, 13080, 445, 693, 3004, 278, 572, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestInitializeMultichannelRegistrar(t *testing.T) { cleanup := configtest.SetDevFabricConfigPath(t) defer cleanup() genesisFile := produceGenesisFile(t, genesisconfig.SampleDevModeSoloProfile, "testchannelid") defer os.Remove(genesisFile) conf := genesisConfig(t, genesisFile) cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore()) assert.NoError(t, err) signer := &server_mocks.SignerSerializer{} mockIdentity := &server_mocks.Identity{} t.Run("registrar with a system channel", func(t *testing.T) { lf, _, err := createLedgerFactory(conf, &disabled.Provider{}) assert.NoError(t, err) bootBlock := file.New(genesisFile).GenesisBlock() initializeBootstrapChannel(bootBlock, lf) registrar := initializeMultichannelRegistrar( bootBlock, onboarding.NewReplicationInitiator(lf, bootBlock, conf, comm.SecureOptions{}, signer, cryptoProvider), &cluster.PredicateDialer{}, comm.ServerConfig{}, nil, conf, signer, mockIdentity, &disabled.Provider{}, &server_mocks.HealthChecker{}, lf, cryptoProvider, ) assert.NotNil(t, registrar) assert.Equal(t, "testchannelid", registrar.SystemChannelID()) }) t.Run("registrar without a system channel", func(t *testing.T) { conf.General.BootstrapMethod = "none" conf.General.GenesisFile = "" srv, err := comm.NewGRPCServer("127.0.0.1:0", comm.ServerConfig{}) assert.NoError(t, err) lf, _, err := createLedgerFactory(conf, &disabled.Provider{}) assert.NoError(t, err) registrar := initializeMultichannelRegistrar( nil, nil, &cluster.PredicateDialer{}, comm.ServerConfig{}, srv, conf, signer, mockIdentity, &disabled.Provider{}, &server_mocks.HealthChecker{}, lf, cryptoProvider, ) assert.NotNil(t, registrar) assert.Empty(t, registrar.SystemChannelID()) }) }
explode_data.jsonl/32655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 727 }
[ 2830, 3393, 9928, 40404, 713, 2594, 70252, 1155, 353, 8840, 836, 8, 341, 1444, 60639, 1669, 2193, 1944, 4202, 14592, 81731, 2648, 1820, 1155, 340, 16867, 21290, 741, 82281, 13774, 1703, 1669, 8193, 84652, 1703, 1155, 11, 59366, 1676, 7626...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_s3mngr_NewUploaderWithClient(t *testing.T) { t.Parallel() type args struct { svc s3iface.S3API options []func(*Uploader) } type want struct { want UploadClient } type test struct { name string args args s *s3mngr want want checkFunc func(want, UploadClient) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want, got UploadClient) error { if !reflect.DeepEqual(got, w.want) { return errors.Errorf("got: \"%#v\",\n\t\t\t\twant: \"%#v\"", got, w.want) } return nil } tests := []test{ // TODO test cases /* { name: "test_case_1", args: args { svc: nil, options: nil, }, want: want{}, checkFunc: defaultCheckFunc, }, */ // TODO test cases /* func() test { return test { name: "test_case_2", args: args { svc: nil, options: nil, }, want: want{}, checkFunc: defaultCheckFunc, } }(), */ } for _, test := range tests { t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } s := &s3mngr{} got := s.NewUploaderWithClient(test.args.svc, test.args.options...) if err := test.checkFunc(test.want, got); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/20550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 845 }
[ 2830, 3393, 643, 18, 21775, 901, 39582, 67574, 2354, 2959, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 1903, 7362, 257, 274, 18, 52674, 808, 18, 7082, 198, 197, 35500, 3056, 2830, 4071, 67574, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInMeetsPrepareAndExecute(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("prepare pr1 from 'select ? in (1,?,?)'") tk.MustExec("set @a=1, @b=2, @c=3") tk.MustQuery("execute pr1 using @a,@b,@c").Check(testkit.Rows("1")) tk.MustExec("prepare pr2 from 'select 3 in (1,?,?)'") tk.MustExec("set @a=2, @b=3") tk.MustQuery("execute pr2 using @a,@b").Check(testkit.Rows("1")) tk.MustExec("prepare pr3 from 'select ? in (1,2,3)'") tk.MustExec("set @a=4") tk.MustQuery("execute pr3 using @a").Check(testkit.Rows("0")) tk.MustExec("prepare pr4 from 'select ? in (?,?,?)'") tk.MustExec("set @a=1, @b=2, @c=3, @d=4") tk.MustQuery("execute pr4 using @a,@b,@c,@d").Check(testkit.Rows("0")) }
explode_data.jsonl/65505
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 641, 7823, 1415, 50590, 3036, 17174, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShareLowOrderPubkey(t *testing.T) { var fooConn, barConn = makeKVStoreConnPair() defer fooConn.Close() defer barConn.Close() locEphPub, _ := genEphKeys() // all blacklisted low order points: for _, remLowOrderPubKey := range blacklist { _, _ = cmn.Parallel( func(_ int) (val interface{}, err error, abort bool) { _, err = shareEphPubKey(fooConn, locEphPub) require.Error(t, err) require.Equal(t, err, ErrSmallOrderRemotePubKey) return nil, nil, false }, func(_ int) (val interface{}, err error, abort bool) { readRemKey, err := shareEphPubKey(barConn, &remLowOrderPubKey) require.NoError(t, err) require.Equal(t, locEphPub, readRemKey) return nil, nil, false }) } }
explode_data.jsonl/39908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 12115, 24187, 4431, 29162, 792, 1155, 353, 8840, 836, 8, 341, 2405, 15229, 9701, 11, 3619, 9701, 284, 1281, 82707, 6093, 9701, 12443, 741, 16867, 15229, 9701, 10421, 741, 16867, 3619, 9701, 10421, 741, 71128, 36, 759, 29162, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInstanceCertificates(t *testing.T) { certs := new(corev1.Secret) certs.Name = "some-name" projections := instanceCertificates(certs) assert.Assert(t, marshalEquals(projections, strings.TrimSpace(` - secret: items: - key: patroni.ca-roots path: ~postgres-operator/patroni.ca-roots - key: patroni.crt-combined path: ~postgres-operator/patroni.crt+key name: some-name `)+"\n")) }
explode_data.jsonl/63108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 2523, 97140, 1155, 353, 8840, 836, 8, 341, 1444, 15546, 1669, 501, 47867, 85, 16, 74779, 340, 1444, 15546, 2967, 284, 330, 14689, 11494, 1837, 197, 776, 24575, 1669, 2867, 97140, 1337, 15546, 692, 6948, 11711, 1155, 11, 6077...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListDevicesChildListDevicesChild(t *testing.T) { executor := &exectest.MockExecutor{ MockExecuteCommandWithOutput: func(command string, arg ...string) (string, error) { logger.Infof("command %s", command) return lsblkChildOutput, nil }, } device := "/dev/vdb" child, err := ListDevicesChild(executor, device) assert.NoError(t, err) assert.Equal(t, 3, len(child)) }
explode_data.jsonl/65043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 852, 40835, 3652, 852, 40835, 3652, 1155, 353, 8840, 836, 8, 341, 67328, 4831, 1669, 609, 327, 439, 477, 24664, 25255, 515, 197, 9209, 1176, 17174, 4062, 2354, 5097, 25, 2915, 15143, 914, 11, 1392, 2503, 917, 8, 320, 917, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouterMultiLevelParamUnmatch(t *testing.T) { // Create empty handler h := new(Handler) // Create empty context c := new(Context) c.Params = Params{} // Create route r := Route("/a/b/:param", h) // Non-matching routes rs := []string{"/", "", "/a/b", "a/b", "/a/b/c/d", "/a/b/"} // Check for _, s := range rs { if r.Match(s, c) { t.Errorf("'%s' shouldn't match against '/a/b/:param'", s) } } }
explode_data.jsonl/35805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 9523, 20358, 4449, 2001, 1806, 6347, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 4287, 7013, 198, 9598, 1669, 501, 7, 3050, 692, 197, 322, 4230, 4287, 2266, 198, 1444, 1669, 501, 14001, 340, 1444, 58268, 284, 34352, 3148...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestProvisioning_ClusterParameters(t *testing.T) { for tn, tc := range map[string]struct { planID string platformRegion string platformProvider internal.CloudProvider zonesCount *int region string expectedProfile gqlschema.KymaProfile expectedProvider string expectedMinimalNumberOfNodes int expectedMaximumNumberOfNodes int expectedMachineType string expectedSharedSubscription bool expectedSubsciptionHyperscalerType hyperscaler.Type }{ "Regular trial": { planID: broker.TrialPlanID, expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 1, expectedMachineType: "Standard_D4_v3", expectedProfile: gqlschema.KymaProfileEvaluation, expectedProvider: "azure", expectedSharedSubscription: true, expectedSubsciptionHyperscalerType: hyperscaler.Azure, }, "Freemium aws": { planID: broker.FreemiumPlanID, platformProvider: internal.AWS, expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 1, expectedProfile: gqlschema.KymaProfileEvaluation, expectedProvider: "aws", expectedSharedSubscription: false, expectedMachineType: "m5.xlarge", expectedSubsciptionHyperscalerType: hyperscaler.AWS, }, "Freemium azure": { planID: broker.FreemiumPlanID, platformProvider: internal.Azure, expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 1, expectedProfile: gqlschema.KymaProfileEvaluation, expectedProvider: "azure", expectedSharedSubscription: false, expectedMachineType: "Standard_D4_v3", expectedSubsciptionHyperscalerType: hyperscaler.Azure, }, "Production Azure": { planID: broker.AzurePlanID, expectedMinimalNumberOfNodes: 2, expectedMaximumNumberOfNodes: 10, expectedMachineType: "Standard_D8_v3", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "azure", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.Azure, }, "HA Azure - provided zonesCount": { planID: broker.AzureHAPlanID, zonesCount: ptr.Integer(3), expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 10, expectedMachineType: "Standard_D8_v3", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "azure", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.Azure, }, "HA Azure - default zonesCount": { planID: broker.AzureHAPlanID, expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 10, expectedMachineType: "Standard_D8_v3", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "azure", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.Azure, }, "Production AWS": { planID: broker.AWSPlanID, expectedMinimalNumberOfNodes: 2, expectedMaximumNumberOfNodes: 10, expectedMachineType: "m5.2xlarge", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "aws", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.AWS, }, "HA AWS - provided zonesCount": { planID: broker.AWSHAPlanID, zonesCount: ptr.Integer(3), region: "us-east-1", expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 10, expectedMachineType: "m5.2xlarge", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "aws", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.AWS, }, "HA AWS - default zonesCount": { planID: broker.AWSHAPlanID, region: "eu-central-1", expectedMinimalNumberOfNodes: 1, expectedMaximumNumberOfNodes: 10, expectedMachineType: "m5.2xlarge", expectedProfile: gqlschema.KymaProfileProduction, expectedProvider: "aws", expectedSharedSubscription: false, expectedSubsciptionHyperscalerType: hyperscaler.AWS, }, } { t.Run(tn, func(t *testing.T) { // given suite := NewProvisioningSuite(t) // when provisioningOperationID := suite.CreateProvisioning(RuntimeOptions{ PlanID: tc.planID, ZonesCount: tc.zonesCount, PlatformRegion: tc.platformRegion, PlatformProvider: tc.platformProvider, Region: tc.region, }) // then suite.WaitForProvisioningState(provisioningOperationID, domain.InProgress) suite.AssertProvisionerStartedProvisioning(provisioningOperationID) // when suite.FinishProvisioningOperationByProvisioner(provisioningOperationID) // simulate the installed fresh Kyma sets the proper label in the Director suite.MarkDirectorWithConsoleURL(provisioningOperationID) // then suite.WaitForProvisioningState(provisioningOperationID, domain.Succeeded) suite.AssertAllStagesFinished(provisioningOperationID) suite.AssertKymaProfile(tc.expectedProfile) suite.AssertProvider(tc.expectedProvider) suite.AssertMinimalNumberOfNodes(tc.expectedMinimalNumberOfNodes) suite.AssertMaximumNumberOfNodes(tc.expectedMaximumNumberOfNodes) suite.AssertMachineType(tc.expectedMachineType) suite.AssertZonesCount(tc.zonesCount, tc.planID) suite.AssertSubscription(tc.expectedSharedSubscription, tc.expectedSubsciptionHyperscalerType) }) } }
explode_data.jsonl/25517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2716 }
[ 2830, 3393, 1336, 13013, 287, 85110, 4993, 9706, 1155, 353, 8840, 836, 8, 341, 2023, 43308, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 197, 10393, 915, 1843, 914, 198, 197, 197, 15734, 14091, 256, 914, 198, 197, 197, 1573...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsValidRekeyRequestBasicV2(t *testing.T) { tlfID := tlf.FakeID(1, tlf.Private) uid := keybase1.MakeTestUID(1) bh, err := tlf.MakeHandle( []keybase1.UserOrTeamID{uid.AsUserOrTeam()}, nil, nil, nil, nil) require.NoError(t, err) brmd, err := MakeInitialRootMetadataV2(tlfID, bh) require.NoError(t, err) ctx := context.Background() codec := kbfscodec.NewMsgpack() signer := kbfscrypto.SigningKeySigner{ Key: kbfscrypto.MakeFakeSigningKeyOrBust("key1"), } err = brmd.SignWriterMetadataInternally(ctx, codec, signer) require.NoError(t, err) newBrmd, err := MakeInitialRootMetadataV2(tlfID, bh) require.NoError(t, err) ok, err := newBrmd.IsValidRekeyRequest( codec, brmd, newBrmd.LastModifyingWriter(), nil, nil) require.NoError(t, err) // Should fail because the copy bit is unset. require.False(t, ok) // Set the copy bit; note the writer metadata is the same. newBrmd.SetWriterMetadataCopiedBit() signer2 := kbfscrypto.SigningKeySigner{ Key: kbfscrypto.MakeFakeSigningKeyOrBust("key2"), } err = newBrmd.SignWriterMetadataInternally(ctx, codec, signer2) require.NoError(t, err) ok, err = newBrmd.IsValidRekeyRequest( codec, brmd, newBrmd.LastModifyingWriter(), nil, nil) require.NoError(t, err) // Should fail because of mismatched writer metadata siginfo. require.False(t, ok) // Re-sign to get the same signature. err = newBrmd.SignWriterMetadataInternally(ctx, codec, signer) require.NoError(t, err) ok, err = newBrmd.IsValidRekeyRequest( codec, brmd, newBrmd.LastModifyingWriter(), nil, nil) require.NoError(t, err) require.True(t, ok) }
explode_data.jsonl/8463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 627 }
[ 2830, 3393, 55470, 693, 792, 1900, 15944, 53, 17, 1155, 353, 8840, 836, 8, 341, 3244, 11008, 915, 1669, 259, 11008, 991, 726, 915, 7, 16, 11, 259, 11008, 87738, 692, 197, 2423, 1669, 1376, 3152, 16, 50133, 2271, 6463, 7, 16, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMessageExportSettingsIsValidGlobalRelaySettingsMissing(t *testing.T) { fs := &FileSettings{ DriverName: NewString("foo"), // bypass file location check } mes := &MessageExportSettings{ EnableExport: NewBool(true), ExportFormat: NewString(COMPLIANCE_EXPORT_TYPE_GLOBALRELAY), ExportFromTimestamp: NewInt64(0), DailyRunTime: NewString("15:04"), BatchSize: NewInt(100), } // should fail because globalrelay settings are missing require.Error(t, mes.isValid(*fs)) }
explode_data.jsonl/50687
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 2052, 16894, 6086, 55470, 11646, 6740, 352, 6086, 25080, 1155, 353, 8840, 836, 8, 341, 53584, 1669, 609, 1703, 6086, 515, 197, 10957, 5469, 675, 25, 1532, 703, 445, 7975, 3975, 442, 30718, 1034, 3728, 1779, 198, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoundSkipOnNilPolkaFromHigherRound(t *testing.T) { cs1, vss := randState(4) vs2, vs3, vs4 := vss[1], vss[2], vss[3] height, round := cs1.Height, cs1.Round timeoutWaitCh := subscribe(cs1.eventBus, types.EventQueryTimeoutWait) newRoundCh := subscribe(cs1.eventBus, types.EventQueryNewRound) pv1, err := cs1.privValidator.GetPubKey() require.NoError(t, err) addr := pv1.Address() voteCh := subscribeToVoter(cs1, addr) // start round startTestRound(cs1, height, round) ensureNewRound(newRoundCh, height, round) ensurePrevote(voteCh, height, round) incrementRound(vss[1:]...) signAddVotes(cs1, types.PrecommitType, nil, types.PartSetHeader{}, vs2, vs3, vs4) round++ // moving to the next round ensureNewRound(newRoundCh, height, round) ensurePrecommit(voteCh, height, round) validatePrecommit(t, cs1, round, -1, vss[0], nil, nil) ensureNewTimeout(timeoutWaitCh, height, round, cs1.config.Precommit(round).Nanoseconds()) round++ // moving to the next round ensureNewRound(newRoundCh, height, round) }
explode_data.jsonl/81658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 27497, 35134, 1925, 19064, 14658, 4554, 3830, 87445, 27497, 1155, 353, 8840, 836, 8, 341, 71899, 16, 11, 348, 778, 1669, 10382, 1397, 7, 19, 340, 5195, 82, 17, 11, 6165, 18, 11, 6165, 19, 1669, 348, 778, 58, 16, 1125, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestXRef_AddCrossReferences(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) // Issue #1 to test against itarget := testCreateIssue(t, 1, 2, "title1", "content1", false) // PR to close issue #1 content := fmt.Sprintf("content2, closes #%d", itarget.Index) pr := testCreateIssue(t, 1, 2, "title2", content, true) ref := AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypePullRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) assert.True(t, ref.RefIsPull) assert.Equal(t, references.XRefActionCloses, ref.RefAction) // Comment on PR to reopen issue #1 content = fmt.Sprintf("content2, reopens #%d", itarget.Index) c := testCreateComment(t, 1, 2, pr.ID, content) ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID}).(*Comment) assert.Equal(t, CommentTypeCommentRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) assert.True(t, ref.RefIsPull) assert.Equal(t, references.XRefActionReopens, ref.RefAction) // Issue mentioning issue #1 content = fmt.Sprintf("content3, mentions #%d", itarget.Index) i := testCreateIssue(t, 1, 2, "title3", content, false) ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypeIssueRef, ref.Type) assert.Equal(t, pr.RepoID, ref.RefRepoID) assert.False(t, ref.RefIsPull) assert.Equal(t, references.XRefActionNone, ref.RefAction) // Issue #4 to test against itarget = testCreateIssue(t, 3, 3, "title4", "content4", false) // Cross-reference to issue #4 by admin content = fmt.Sprintf("content5, mentions user3/repo3#%d", itarget.Index) i = testCreateIssue(t, 2, 1, "title5", content, false) ref = AssertExistsAndLoadBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}).(*Comment) assert.Equal(t, CommentTypeIssueRef, ref.Type) assert.Equal(t, i.RepoID, ref.RefRepoID) assert.False(t, ref.RefIsPull) assert.Equal(t, references.XRefActionNone, ref.RefAction) // Cross-reference to issue #4 with no permission content = fmt.Sprintf("content6, mentions user3/repo3#%d", itarget.Index) i = testCreateIssue(t, 4, 5, "title6", content, false) AssertNotExistsBean(t, &Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0}) }
explode_data.jsonl/23001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 900 }
[ 2830, 3393, 55, 3945, 21346, 28501, 31712, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 12367, 197, 322, 25226, 671, 16, 311, 1273, 2348, 198, 23374, 1284, 1669, 1273, 4021, 42006, 1155, 11, 220, 16, 11, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteLoop(t *testing.T) { // go c.deleteLoop(time.Second * 1) c, _ := newTestClient(t) pod := &api_v1.Pod{} pod.Status.PodIP = "1.1.1.1" c.handlePodAdd(pod) assert.Equal(t, len(c.Pods), 1) assert.Equal(t, len(c.deleteQueue), 0) c.handlePodDelete(pod) assert.Equal(t, len(c.Pods), 1) assert.Equal(t, len(c.deleteQueue), 1) gracePeriod := time.Millisecond * 500 go c.deleteLoop(time.Millisecond, gracePeriod) go func() { time.Sleep(time.Millisecond * 50) c.m.Lock() assert.Equal(t, len(c.Pods), 1) c.m.Unlock() c.deleteMut.Lock() assert.Equal(t, len(c.deleteQueue), 1) c.deleteMut.Unlock() time.Sleep(gracePeriod + (time.Millisecond * 50)) c.m.Lock() assert.Equal(t, len(c.Pods), 0) c.m.Unlock() c.deleteMut.Lock() assert.Equal(t, len(c.deleteQueue), 0) c.deleteMut.Unlock() close(c.stopCh) }() <-c.stopCh }
explode_data.jsonl/56850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 6435, 14620, 1155, 353, 8840, 836, 8, 341, 197, 322, 728, 272, 7440, 14620, 9730, 32435, 353, 220, 16, 340, 1444, 11, 716, 1669, 501, 2271, 2959, 1155, 692, 3223, 347, 1669, 609, 2068, 2273, 16, 88823, 16094, 3223, 347, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInMemoryChannelStatus_SetAddressable(t *testing.T) { testCases := map[string]struct { url *apis.URL want *InMemoryChannelStatus }{ "empty string": { want: &InMemoryChannelStatus{ Status: duckv1beta1.Status{ Conditions: []apis.Condition{ { Type: InMemoryChannelConditionAddressable, Status: corev1.ConditionFalse, }, // Note that Ready is here because when the condition is marked False, duck // automatically sets Ready to false. { Type: InMemoryChannelConditionReady, Status: corev1.ConditionFalse, }, }, }, AddressStatus: duckv1alpha1.AddressStatus{Address: &duckv1alpha1.Addressable{}}, }, }, "has domain": { url: &apis.URL{Scheme: "http", Host: "test-domain"}, want: &InMemoryChannelStatus{ AddressStatus: duckv1alpha1.AddressStatus{ Address: &duckv1alpha1.Addressable{ duckv1beta1.Addressable{ URL: &apis.URL{ Scheme: "http", Host: "test-domain", }, }, "test-domain", }, }, Status: duckv1beta1.Status{ Conditions: []apis.Condition{{ Type: InMemoryChannelConditionAddressable, Status: corev1.ConditionTrue, }}, }, }, }, } for n, tc := range testCases { t.Run(n, func(t *testing.T) { cs := &InMemoryChannelStatus{} cs.SetAddress(tc.url) if diff := cmp.Diff(tc.want, cs, ignoreAllButTypeAndStatus); diff != "" { t.Errorf("unexpected conditions (-want, +got) = %v", diff) } }) } }
explode_data.jsonl/22198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 698 }
[ 2830, 3393, 641, 10642, 9629, 2522, 14812, 4286, 480, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 19320, 220, 353, 13725, 20893, 198, 197, 50780, 353, 641, 10642, 9629, 2522, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoadLocalGitHubContents(t *testing.T) { tests := []struct { name string githubContent []string fs map[string]string expectContents []GithubContent }{ { name: "none to set", githubContent: nil, expectContents: nil, }, { name: "set one file", fs: map[string]string{ "/foo/bar.txt": "some-contents", }, githubContent: []string{"replicatedhq/test-stuff:/bar.txt:master:/foo"}, expectContents: []GithubContent{ { Repo: "replicatedhq/test-stuff", Path: "/bar.txt", Ref: "master", Files: []GithubFile{ { Path: "/bar.txt", Name: "bar.txt", Size: 13, Sha: "6e32ea34db1b3755d7dec972eb72c705338f0dd8e0be881d966963438fb2e800", Data: "c29tZS1jb250ZW50", }, }, }, }, }, { name: "set many files from two repos", fs: map[string]string{ "/foo/bar.txt": "some-contents", "/foo/baz.txt": "some-contents", "/foo/bar/baz.txt": "some-contents", "/spam/eggs.txt": "some-other-contents", }, githubContent: []string{ "replicatedhq/test-stuff:/:master:/foo", "replicatedhq/other-tests:/eggs.txt:release:/spam", }, expectContents: []GithubContent{ { Repo: "replicatedhq/test-stuff", Path: "/", Ref: "master", Files: []GithubFile{ { Path: "/bar/baz.txt", Name: "baz.txt", Size: 13, Sha: "6e32ea34db1b3755d7dec972eb72c705338f0dd8e0be881d966963438fb2e800", Data: "c29tZS1jb250ZW50", }, { Path: "/bar.txt", Name: "bar.txt", Size: 13, Sha: "6e32ea34db1b3755d7dec972eb72c705338f0dd8e0be881d966963438fb2e800", Data: "c29tZS1jb250ZW50", }, { Path: "/baz.txt", Name: "baz.txt", Size: 13, Sha: "6e32ea34db1b3755d7dec972eb72c705338f0dd8e0be881d966963438fb2e800", Data: "c29tZS1jb250ZW50", }, }, }, { Repo: "replicatedhq/other-tests", Path: "/eggs.txt", Ref: "release", Files: []GithubFile{ { Path: "/eggs.txt", Name: "eggs.txt", Size: 19, Sha: "a2c0a8c54d71e14e9533749c32716c12f92f61294dfdce4f3b4c07303c0119b0", Data: "c29tZS1vdGhlci1jb250ZW50", }, }, }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { req := require.New(t) mockFs := afero.Afero{Fs: afero.NewMemMapFs()} for key, value := range test.fs { err := mockFs.WriteFile(key, []byte(value), 0777) req.NoError(err) } resolver := &resolver{ Logger: &logger.TestLogger{T: t}, FS: mockFs, SetGitHubContents: test.githubContent, } result, err := resolver.loadLocalGitHubContents() req.NoError(err) req.Equal(test.expectContents, result) }) } }
explode_data.jsonl/73835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1628 }
[ 2830, 3393, 5879, 7319, 75615, 14803, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 3174, 3827, 2762, 220, 3056, 917, 198, 197, 53584, 1797, 2415, 14032, 30953, 198, 197, 24952, 14803, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOverflowNumericCell(t *testing.T) { f, err := OpenFile(filepath.Join("test", "OverflowNumericCell.xlsx")) if !assert.NoError(t, err) { t.FailNow() } val, err := f.GetCellValue("Sheet1", "A1") assert.NoError(t, err) // GOARCH=amd64 - all ok; GOARCH=386 - actual: "-2147483648" assert.Equal(t, "8595602512225", val, "A1 should be 8595602512225") }
explode_data.jsonl/953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 42124, 36296, 3599, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 5264, 1703, 34793, 22363, 445, 1944, 497, 330, 42124, 36296, 3599, 46838, 5455, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 3244, 57243, 7039, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDiscoverTelemetryEndpoint(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() client, mc, _ := NewMockClient(mockCtrl, ec2.NewBlackholeEC2MetadataClient(), nil) expectedEndpoint := "http://127.0.0.1" mc.EXPECT().DiscoverPollEndpoint(gomock.Any()).Return(&ecs.DiscoverPollEndpointOutput{TelemetryEndpoint: &expectedEndpoint}, nil) endpoint, err := client.DiscoverTelemetryEndpoint("containerInstance") if err != nil { t.Error("Error getting telemetry endpoint: ", err) } if expectedEndpoint != endpoint { t.Errorf("Expected telemetry endpoint(%s) != endpoint(%s)", expectedEndpoint, endpoint) } }
explode_data.jsonl/61449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 50002, 6639, 35958, 27380, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 741, 25291, 11, 19223, 11, 716, 1669, 1532, 11571, 2959, 30389, 15001, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQueryValsetConfirm(t *testing.T) { var ( nonce = uint64(1) myValidatorCosmosAddr, _ = sdk.AccAddressFromBech32("cosmos1ees2tqhhhm9ahlhceh2zdguww9lqn2ckukn86l") myValidatorEthereumAddr gethcommon.Address = gethcommon.BytesToAddress(bytes.Repeat([]byte{byte(50)}, 20)) ) input := CreateTestEnv(t) ctx := input.Context input.GravityKeeper.SetValsetConfirm(ctx, types.MsgValsetConfirm{ Nonce: nonce, Orchestrator: myValidatorCosmosAddr.String(), EthAddress: myValidatorEthereumAddr.String(), Signature: "alksdjhflkasjdfoiasjdfiasjdfoiasdj", }) specs := map[string]struct { srcNonce string srcAddr string expErr bool expResp []byte }{ "all good": { srcNonce: "1", srcAddr: myValidatorCosmosAddr.String(), expResp: []byte(`{"type":"nab/MsgValsetConfirm", "value":{"eth_address":"0x3232323232323232323232323232323232323232", "nonce": "1", "orchestrator": "cosmos1ees2tqhhhm9ahlhceh2zdguww9lqn2ckukn86l", "signature": "alksdjhflkasjdfoiasjdfiasjdfoiasdj"}}`), }, "unknown nonce": { srcNonce: "999999", srcAddr: myValidatorCosmosAddr.String(), }, "invalid address": { srcNonce: "1", srcAddr: "not a valid addr", expErr: true, }, "invalid nonce": { srcNonce: "not a valid nonce", srcAddr: myValidatorCosmosAddr.String(), expErr: true, }, } for msg, spec := range specs { t.Run(msg, func(t *testing.T) { got, err := queryValsetConfirm(ctx, []string{spec.srcNonce, spec.srcAddr}, input.GravityKeeper) if spec.expErr { require.Error(t, err) return } require.NoError(t, err) if spec.expResp == nil { assert.Nil(t, got) return } assert.JSONEq(t, string(spec.expResp), string(got)) }) } }
explode_data.jsonl/8793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 866 }
[ 2830, 3393, 2859, 2208, 746, 16728, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 39593, 11070, 284, 2622, 21, 19, 7, 16, 340, 197, 13624, 14256, 54224, 8631, 13986, 11, 716, 503, 284, 45402, 77538, 4286, 3830, 3430, 331, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMuxWith(t *testing.T) { var cmwInit1, cmwHandler1 uint64 var cmwInit2, cmwHandler2 uint64 mw1 := func(next Handler) Handler { cmwInit1++ return HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { cmwHandler1++ ctx = context.WithValue(ctx, ctxKey{"inline1"}, "yes") next.ServeHTTP(ctx, rc) }) } mw2 := func(next Handler) Handler { cmwInit2++ return HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { cmwHandler2++ ctx = context.WithValue(ctx, ctxKey{"inline2"}, "yes") next.ServeHTTP(ctx, rc) }) } r := NewRouter() r.Get("/hi", HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { rc.Write([]byte("bye")) })) r.With(mw1).With(mw2).Get("/inline", HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { v1 := ctx.Value(ctxKey{"inline1"}).(string) v2 := ctx.Value(ctxKey{"inline2"}).(string) rc.Write([]byte(fmt.Sprintf("inline %s %s", v1, v2))) })) ts := NewTestServer(r) defer ts.Close() if _, body := testRequest(t, ts, "GET", "/hi", nil); body != "bye" { t.Fatalf(body) } if _, body := testRequest(t, ts, "GET", "/inline", nil); body != "inline yes yes" { t.Fatalf(body) } if cmwInit1 != 1 { t.Fatalf("expecting cmwInit1 to be 1, got %d", cmwInit1) } if cmwHandler1 != 1 { t.Fatalf("expecting cmwHandler1 to be 1, got %d", cmwHandler1) } if cmwInit2 != 1 { t.Fatalf("expecting cmwInit2 to be 1, got %d", cmwInit2) } if cmwHandler2 != 1 { t.Fatalf("expecting cmwHandler2 to be 1, got %d", cmwHandler2) } }
explode_data.jsonl/47947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 681 }
[ 2830, 3393, 44, 2200, 2354, 1155, 353, 8840, 836, 8, 341, 2405, 9961, 86, 3803, 16, 11, 9961, 86, 3050, 16, 2622, 21, 19, 198, 2405, 9961, 86, 3803, 17, 11, 9961, 86, 3050, 17, 2622, 21, 19, 198, 2109, 86, 16, 1669, 2915, 16913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColorMConcatSelf(t *testing.T) { expected := [4][5]float64{ {30, 40, 30, 25, 30}, {40, 54, 43, 37, 37}, {30, 43, 51, 39, 34}, {25, 37, 39, 46, 36}, } m := ebiten.ColorM{} for i := 0; i < 4; i++ { for j := 0; j < 5; j++ { m.SetElement(i, j, float64((i+j)%5+1)) } } m.Concat(m) for i := 0; i < 4; i++ { for j := 0; j < 5; j++ { got := m.Element(i, j) want := expected[i][j] if want != got { t.Errorf("m.Element(%d, %d) = %f, want %f", i, j, got, want) } } } }
explode_data.jsonl/48453
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 1636, 44, 78440, 12092, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 508, 19, 1457, 20, 60, 3649, 21, 19, 515, 197, 197, 90, 18, 15, 11, 220, 19, 15, 11, 220, 18, 15, 11, 220, 17, 20, 11, 220, 18, 15, 1583, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRoleList_filterForbidden(t *testing.T) { h := newHelper(t) // @todo this can be a problematic test because it leaves // behind roles that are not denied this context // db purge might be needed h.repoMakeRole("role") f := h.repoMakeRole() helpers.AllowMe(h, types.ComponentRbacResource(), "roles.search") helpers.DenyMe(h, f.RbacResource(), "read") h.apiInit(). Get("/roles/"). Query("handle", f.Handle). Expect(t). Status(http.StatusOK). Assert(helpers.AssertNoErrors). Assert(jsonpath.NotPresent(fmt.Sprintf(`$.response.set[? @.handle=="%s"]`, f.Handle))). End() }
explode_data.jsonl/8335
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 9030, 852, 8727, 69115, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 501, 5511, 1155, 692, 197, 322, 569, 17370, 419, 646, 387, 264, 34933, 1273, 1576, 432, 10901, 198, 197, 322, 981, 4815, 12783, 429, 525, 537, 14820, 419, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChangeFeedInfoClone(t *testing.T) { t.Parallel() info := &ChangeFeedInfo{ SinkURI: "blackhole://", Opts: map[string]string{}, StartTs: 417257993615179777, Config: &config.ReplicaConfig{ CaseSensitive: true, EnableOldValue: true, CheckGCSafePoint: true, }, } cloned, err := info.Clone() require.Nil(t, err) sinkURI := "mysql://unix:/var/run/tidb.sock" cloned.SinkURI = sinkURI cloned.Config.EnableOldValue = false require.Equal(t, sinkURI, cloned.SinkURI) require.False(t, cloned.Config.EnableOldValue) require.Equal(t, "blackhole://", info.SinkURI) require.True(t, info.Config.EnableOldValue) }
explode_data.jsonl/10735
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 4072, 28916, 1731, 37677, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 27043, 1669, 609, 4072, 28916, 1731, 515, 197, 7568, 766, 10301, 25, 330, 11453, 30420, 1110, 756, 197, 197, 43451, 25, 262, 2415, 14032, 30953...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlockAccept_PostForkOption_SetsLastAcceptedBlock(t *testing.T) { // setup coreVM, _, _, _, proVM, coreGenBlk, _ := initTestProposerVM(t, time.Time{}, 0) proVM.Set(coreGenBlk.Timestamp()) // create post fork oracle block ... oracleCoreBlk := &TestOptionsBlock{ TestBlock: snowman.TestBlock{ TestDecidable: choices.TestDecidable{ IDV: ids.Empty.Prefix(1111), StatusV: choices.Processing, }, BytesV: []byte{1}, ParentV: coreGenBlk.ID(), TimestampV: coreGenBlk.Timestamp(), }, } oracleCoreBlk.opts = [2]snowman.Block{ &snowman.TestBlock{ TestDecidable: choices.TestDecidable{ IDV: ids.Empty.Prefix(2222), StatusV: choices.Processing, }, BytesV: []byte{2}, ParentV: oracleCoreBlk.ID(), TimestampV: oracleCoreBlk.Timestamp(), }, &snowman.TestBlock{ TestDecidable: choices.TestDecidable{ IDV: ids.Empty.Prefix(3333), StatusV: choices.Processing, }, BytesV: []byte{3}, ParentV: oracleCoreBlk.ID(), TimestampV: oracleCoreBlk.Timestamp(), }, } coreVM.BuildBlockF = func() (snowman.Block, error) { return oracleCoreBlk, nil } coreVM.GetBlockF = func(blkID ids.ID) (snowman.Block, error) { switch blkID { case coreGenBlk.ID(): return coreGenBlk, nil case oracleCoreBlk.ID(): return oracleCoreBlk, nil case oracleCoreBlk.opts[0].ID(): return oracleCoreBlk.opts[0], nil case oracleCoreBlk.opts[1].ID(): return oracleCoreBlk.opts[1], nil default: return nil, database.ErrNotFound } } coreVM.ParseBlockF = func(b []byte) (snowman.Block, error) { switch { case bytes.Equal(b, coreGenBlk.Bytes()): return coreGenBlk, nil case bytes.Equal(b, oracleCoreBlk.Bytes()): return oracleCoreBlk, nil case bytes.Equal(b, oracleCoreBlk.opts[0].Bytes()): return oracleCoreBlk.opts[0], nil case bytes.Equal(b, oracleCoreBlk.opts[1].Bytes()): return oracleCoreBlk.opts[1], nil default: return nil, errUnknownBlock } } parentBlk, err := proVM.BuildBlock() if err != nil { t.Fatal("could not build post fork oracle block") } // accept oracle block if err := parentBlk.Accept(); err != nil { t.Fatal("could not accept block") } coreVM.LastAcceptedF = func() (ids.ID, error) { if oracleCoreBlk.Status() == choices.Accepted { return oracleCoreBlk.ID(), nil } return coreGenBlk.ID(), nil } if acceptedID, err := proVM.LastAccepted(); err != nil { t.Fatal("could not retrieve last accepted block") } else if acceptedID != parentBlk.ID() { t.Fatal("unexpected last accepted ID") } // accept one of the options postForkOracleBlk, ok := parentBlk.(*postForkBlock) if !ok { t.Fatal("expected post fork block") } opts, err := postForkOracleBlk.Options() if err != nil { t.Fatal("could not retrieve options from post fork oracle block") } if err := opts[0].Accept(); err != nil { t.Fatal("could not accept option") } coreVM.LastAcceptedF = func() (ids.ID, error) { if oracleCoreBlk.opts[0].Status() == choices.Accepted { return oracleCoreBlk.opts[0].ID(), nil } return oracleCoreBlk.ID(), nil } if acceptedID, err := proVM.LastAccepted(); err != nil { t.Fatal("could not retrieve last accepted block") } else if acceptedID != opts[0].ID() { t.Fatal("unexpected last accepted ID") } }
explode_data.jsonl/19403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1388 }
[ 2830, 3393, 4713, 16646, 66726, 37, 669, 5341, 1098, 1415, 5842, 65906, 4713, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 198, 71882, 11187, 11, 8358, 8358, 8358, 462, 11187, 11, 6200, 9967, 4923, 74, 11, 716, 1669, 2930, 2271, 2008...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_WriteJSON(t *testing.T) { out := bytes.NewBuffer([]byte{}) val := struct{ Key, Value string }{Key: "n", Value: "v"} err := ctl.WriteJSON(out, val) require.NoError(t, err) assert.Equal(t, `{ "Key": "n", "Value": "v" }`, string(out.Bytes())) }
explode_data.jsonl/6331
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 31825, 5370, 1155, 353, 8840, 836, 8, 341, 13967, 1669, 5820, 7121, 4095, 10556, 3782, 37790, 19302, 1669, 2036, 90, 5309, 11, 5162, 914, 335, 90, 1592, 25, 330, 77, 497, 5162, 25, 330, 85, 63159, 9859, 1669, 64935, 4073, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestModMap(t *testing.T) { m := map[string]string{ "bill": "bill smith", "mary": "mary smith", } _, err := starlight.Eval([]byte(`contacts["bill"] = "john smith"`), map[string]interface{}{"contacts": m}, nil) if err != nil { t.Fatal(err) } expected := "john smith" if m["bill"] != expected { t.Fatalf("expected %q, but was %q", expected, m["bill"]) } }
explode_data.jsonl/47076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 4459, 2227, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 29642, 788, 330, 29642, 76721, 756, 197, 197, 1, 1534, 788, 330, 1534, 76721, 756, 197, 630, 197, 6878, 1848, 1669, 6774, 4145, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMapProxy_PutAll(t *testing.T) { testMap := make(map[interface{}]interface{}) for i := 0; i < 10; i++ { testMap["testingKey"+strconv.Itoa(i)] = "testingValue" + strconv.Itoa(i) } err := mp.PutAll(testMap) if err != nil { t.Fatal(err) } else { entryList, err := mp.EntrySet() if err != nil { t.Fatal(err) } for _, pair := range entryList { key := pair.Key() value := pair.Value() expectedValue, found := testMap[key] if !found || expectedValue != value { t.Fatalf("Map PutAll failed") } } } mp.Clear() }
explode_data.jsonl/57013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 2227, 16219, 1088, 332, 2403, 1155, 353, 8840, 836, 8, 341, 18185, 2227, 1669, 1281, 9147, 58, 4970, 78134, 4970, 37790, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 26, 600, 1027, 341, 197, 18185, 2227, 1183, 88...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRunner_Processor_Run_Error(t *testing.T) { t.Parallel() repo := &RepositoryMock{} publisher := &PublisherMock{} r := New(repo, WithRetryDuration(20*time.Millisecond), WithPublisher(100, publisher), ) ctx := context.Background() ctx, cancel := context.WithCancel(ctx) repo.GetLastEventsFunc = func(ctx context.Context, limit uint64) ([]Event, error) { return nil, nil } repo.GetLastSequenceFunc = func(ctx context.Context, id PublisherID) (uint64, error) { return 0, nil } repo.GetUnprocessedEventsFunc = func(ctx context.Context, limit uint64) ([]Event, error) { return []Event{ {ID: 100, Sequence: 10}, {ID: 99, Sequence: 18}, }, nil } repo.UpdateSequencesFunc = func(ctx context.Context, events []Event) error { return errors.New("update-seq-error") } var wg sync.WaitGroup wg.Add(1) go func() { defer wg.Done() r.Run(ctx) }() time.Sleep(30 * time.Millisecond) cancel() wg.Wait() assert.Equal(t, 2, len(repo.GetLastEventsCalls())) }
explode_data.jsonl/21368
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 19486, 70241, 269, 84158, 28651, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 17200, 5368, 1669, 609, 4624, 11571, 16094, 3223, 15182, 1669, 609, 34550, 11571, 31483, 7000, 1669, 1532, 50608, 11, 3085, 51560, 12945, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJetStreamSubscribe_AckDupInProgress(t *testing.T) { s := RunBasicJetStreamServer() defer s.Shutdown() if config := s.JetStreamConfig(); config != nil { defer os.RemoveAll(config.StoreDir) } nc, err := nats.Connect(s.ClientURL()) if err != nil { t.Fatalf("Unexpected error: %v", err) } defer nc.Close() js, err := nc.JetStream() if err != nil { t.Fatalf("Unexpected error: %v", err) } // Create the stream using our client API. _, err = js.AddStream(&nats.StreamConfig{ Name: "TEST", Subjects: []string{"foo"}, }) if err != nil { t.Fatalf("Unexpected error: %v", err) } js.Publish("foo", []byte("hello")) ctx, cancel := context.WithTimeout(context.Background(), 500*time.Millisecond) defer cancel() pings := make(chan struct{}, 3) nc.Subscribe("$JS.ACK.TEST.>", func(msg *nats.Msg) { pings <- struct{}{} }) nc.Flush() ch := make(chan error, 3) _, err = js.Subscribe("foo", func(m *nats.Msg) { // InProgress ACK can be sent any number of times. ch <- m.InProgress() ch <- m.InProgress() ch <- m.Ack() }, nats.Durable("WQ"), nats.ManualAck()) if err != nil { t.Fatalf("Unexpected error: %v", err) } <-ctx.Done() ackErr1 := <-ch ackErr2 := <-ch ackErr3 := <-ch if ackErr1 != nil { t.Errorf("Unexpected error: %v", ackErr1) } if ackErr2 != nil { t.Errorf("Unexpected error: %v", ackErr2) } if ackErr3 != nil { t.Errorf("Unexpected error: %v", ackErr3) } if len(pings) != 3 { t.Logf("Expected to receive multiple acks, got: %v", len(pings)) } }
explode_data.jsonl/29174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 655 }
[ 2830, 3393, 35641, 3027, 28573, 1566, 377, 85713, 88711, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6452, 15944, 35641, 3027, 5475, 741, 16867, 274, 10849, 18452, 2822, 743, 2193, 1669, 274, 3503, 295, 3027, 2648, 2129, 2193, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnsureTLSAndReturnAddr(t *testing.T) { for _, testCase := range []struct { endpoint string expectedOut string errExpected bool }{ { endpoint: "scanner.stackrox", errExpected: true, }, { endpoint: "http://scanner.stackrox", errExpected: true, }, { endpoint: "https://scanner.stackrox", expectedOut: "scanner.stackrox:443", }, { endpoint: "https://scanner.stackrox:8080", expectedOut: "scanner.stackrox:8080", }, { endpoint: "https://scanner.stackrox/", expectedOut: "scanner.stackrox:443", }, { endpoint: "https://scanner.stackrox/ping", expectedOut: "scanner.stackrox:443", }, { endpoint: "https://scanner.stackrox:8080/", expectedOut: "scanner.stackrox:8080", }, { endpoint: "https://scanner.stackrox:8080/ping", expectedOut: "scanner.stackrox:8080", }, } { c := testCase t.Run(c.endpoint, func(t *testing.T) { got, err := ensureTLSAndReturnAddr(c.endpoint) if c.errExpected { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, c.expectedOut, got) }) } }
explode_data.jsonl/64356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 475 }
[ 2830, 3393, 64439, 45439, 3036, 5598, 13986, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 54452, 1669, 2088, 3056, 1235, 341, 197, 6246, 2768, 262, 914, 198, 197, 42400, 2662, 914, 198, 197, 9859, 18896, 1807, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseTracestateHeader(t *testing.T) { assertParseError := func(h, expect string) { _, err := apmhttp.ParseTracestateHeader(h) if assert.Error(t, err) { assert.Regexp(t, expect, err.Error()) } } assertParseError("a", `missing '=' in tracestate entry`) assertParseError("a=b, c ", `missing '=' in tracestate entry`) assertParse := func(h ...string) (apm.TraceState, bool) { out, err := apmhttp.ParseTracestateHeader(h...) return out, assert.NoError(t, err) } tracestate, _ := assertParse("vendorname1=opaqueValue1,vendorname2=opaqueValue2") assert.Equal(t, "vendorname1=opaqueValue1,vendorname2=opaqueValue2", tracestate.String()) tracestate, _ = assertParse("vendorname1=opaqueValue1", "vendorname2=opaqueValue2") assert.Equal(t, "vendorname1=opaqueValue1,vendorname2=opaqueValue2", tracestate.String()) }
explode_data.jsonl/34189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 14463, 6550, 2454, 4047, 1155, 353, 8840, 836, 8, 341, 6948, 14463, 1454, 1669, 2915, 3203, 11, 1720, 914, 8, 341, 197, 197, 6878, 1848, 1669, 1443, 76, 1254, 8937, 6550, 2454, 4047, 3203, 340, 197, 743, 2060, 6141, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestListProfileTypesOps(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleListOpsSuccessfully(t) allPages, err := profiletypes.ListOps(fake.ServiceClient(), ProfileTypeName).AllPages() th.AssertNoErr(t, err) allPolicyTypes, err := profiletypes.ExtractOps(allPages) th.AssertNoErr(t, err) for k, v := range allPolicyTypes { tools.PrintResource(t, k) tools.PrintResource(t, v) } }
explode_data.jsonl/49317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 852, 8526, 4173, 38904, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 197, 6999, 852, 38904, 35959, 1155, 692, 50960, 17713, 11, 1848, 1669, 5526, 9242, 5814, 38904, 74138, 13860,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSerializerCreatesSerializedVersionOfAssertionResult(t *testing.T) { thing1 := Thing1{"Hi"} thing2 := Thing2{"Bye"} message := "Super-hip failure message." serializer := newSerializer() actualResult := serializer.serialize(thing1, thing2, message) expectedResult, _ := json.Marshal(reporting.FailureView{ Message: message, Expected: fmt.Sprintf("%+v", thing1), Actual: fmt.Sprintf("%+v", thing2), }) if actualResult != string(expectedResult) { t.Errorf("\nExpected: %s\nActual: %s", string(expectedResult), actualResult) } actualResult = serializer.serializeDetailed(thing1, thing2, message) expectedResult, _ = json.Marshal(reporting.FailureView{ Message: message, Expected: fmt.Sprintf("%#v", thing1), Actual: fmt.Sprintf("%#v", thing2), }) if actualResult != string(expectedResult) { t.Errorf("\nExpected: %s\nActual: %s", string(expectedResult), actualResult) } }
explode_data.jsonl/55043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 13909, 54868, 77521, 5637, 2124, 68639, 2077, 1155, 353, 8840, 836, 8, 341, 197, 1596, 16, 1669, 37200, 16, 4913, 13048, 16707, 197, 1596, 17, 1669, 37200, 17, 4913, 1359, 68, 16707, 24753, 1669, 330, 19284, 12, 2151, 7901, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestForPhraseUDT(t *testing.T) { gopClTest(t, ` type foo struct { } func (p *foo) Gop_Enum(c func(val string)) { } for v <- new(foo) { println(v) } `, `package main import fmt "fmt" type foo struct { } func (p *foo) Gop_Enum(c func(val string)) { } func main() { new(foo).Gop_Enum(func(v string) { fmt.Println(v) }) } `) }
explode_data.jsonl/73641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 2461, 46806, 4656, 51, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1313, 15229, 2036, 341, 630, 2830, 320, 79, 353, 7975, 8, 479, 453, 62, 10766, 1337, 2915, 9098, 914, 593, 341, 630, 1958, 348,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogger_leveledWriter(t *testing.T) { t.Run("writes errors to stderr", func(t *testing.T) { var stderr bytes.Buffer var stdout bytes.Buffer logger := New(&LoggerOptions{ Name: "test", Output: NewLeveledWriter(&stdout, map[Level]io.Writer{Error: &stderr}), }) logger.Error("this is an error", "who", "programmer", "why", "testing") errStr := stderr.String() errDataIdx := strings.IndexByte(errStr, ' ') errRest := errStr[errDataIdx+1:] assert.Equal(t, "[ERROR] test: this is an error: who=programmer why=testing\n", errRest) }) t.Run("writes non-errors to stdout", func(t *testing.T) { var stderr bytes.Buffer var stdout bytes.Buffer logger := New(&LoggerOptions{ Name: "test", Output: NewLeveledWriter(&stdout, map[Level]io.Writer{Error: &stderr}), }) logger.Info("this is test", "who", "programmer", "why", "testing") outStr := stdout.String() outDataIdx := strings.IndexByte(outStr, ' ') outRest := outStr[outDataIdx+1:] assert.Equal(t, "[INFO] test: this is test: who=programmer why=testing\n", outRest) }) t.Run("writes errors and non-errors correctly", func(t *testing.T) { var stderr bytes.Buffer var stdout bytes.Buffer logger := New(&LoggerOptions{ Name: "test", Output: NewLeveledWriter(&stdout, map[Level]io.Writer{Error: &stderr}), }) logger.Info("this is test", "who", "programmer", "why", "testing") logger.Error("this is an error", "who", "programmer", "why", "testing") errStr := stderr.String() errDataIdx := strings.IndexByte(errStr, ' ') errRest := errStr[errDataIdx+1:] outStr := stdout.String() outDataIdx := strings.IndexByte(outStr, ' ') outRest := outStr[outDataIdx+1:] assert.Equal(t, "[ERROR] test: this is an error: who=programmer why=testing\n", errRest) assert.Equal(t, "[INFO] test: this is test: who=programmer why=testing\n", outRest) }) }
explode_data.jsonl/22706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 746 }
[ 2830, 3393, 7395, 11751, 93964, 6492, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 59079, 5975, 311, 26436, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 2405, 26436, 5820, 22622, 198, 197, 2405, 20075, 5820, 22622, 271, 197, 17060, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequiresASMSecret(t *testing.T) { secret := apicontainer.Secret{ Provider: "asm", Name: "secret", Region: "us-west-2", ValueFrom: "/test/secretName", } container := &apicontainer.Container{ Name: "myName", Image: "image:tag", Secrets: []apicontainer.Secret{secret}, TransitionDependenciesMap: make(map[apicontainerstatus.ContainerStatus]apicontainer.TransitionDependencySet), } container1 := &apicontainer.Container{ Name: "myName", Image: "image:tag", Secrets: nil, TransitionDependenciesMap: make(map[apicontainerstatus.ContainerStatus]apicontainer.TransitionDependencySet), } task := &Task{ Arn: "test", ResourcesMapUnsafe: make(map[string][]taskresource.TaskResource), Containers: []*apicontainer.Container{container, container1}, } assert.True(t, task.requiresASMSecret()) }
explode_data.jsonl/37244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 46961, 1911, 4826, 50856, 1155, 353, 8840, 836, 8, 341, 197, 20474, 1669, 1443, 51160, 1743, 74779, 515, 197, 197, 5179, 25, 220, 330, 10530, 756, 197, 21297, 25, 414, 330, 20474, 756, 197, 197, 14091, 25, 262, 330, 355, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsIncomplete(t *testing.T) { t.Parallel() tests := []struct { in string want bool }{ {"foo\n", false}, {"foo;", false}, {"\n", false}, {"'incomp", true}, {"foo; 'incomp", true}, {" (incomp", true}, {"badsyntax)", false}, } p := NewParser() for i, tc := range tests { t.Run(fmt.Sprintf("Parse%02d", i), func(t *testing.T) { r := strings.NewReader(tc.in) _, err := p.Parse(r, "") if got := IsIncomplete(err); got != tc.want { t.Fatalf("%q got %t, wanted %t", tc.in, got, tc.want) } }) t.Run(fmt.Sprintf("Interactive%02d", i), func(t *testing.T) { r := strings.NewReader(tc.in) err := p.Interactive(r, func([]*Stmt) bool { return false }) if got := IsIncomplete(err); got != tc.want { t.Fatalf("%q got %t, wanted %t", tc.in, got, tc.want) } }) } }
explode_data.jsonl/55139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 3872, 96698, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 914, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 4913, 7975, 1699, 497, 895, 1583, 197, 197, 4913, 79...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatchAtEnd(t *testing.T) { m := NewStringMatcher([]string{"teel", "eel", "el"}) hits := m.Match([]byte("The Man Of Steel")) assert(t, len(hits) == 3) assert(t, hits[0].Index == 0) assert(t, hits[1].Index == 1) assert(t, hits[2].Index == 2) hits = m.MatchThreadSafe([]byte("The Man Of Steel")) assert(t, len(hits) == 3) assert(t, hits[0].Index == 0) assert(t, hits[1].Index == 1) assert(t, hits[2].Index == 2) }
explode_data.jsonl/22647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 8331, 1655, 3727, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 703, 37554, 10556, 917, 4913, 665, 301, 497, 330, 95451, 497, 330, 301, 23625, 9598, 1199, 1669, 296, 36062, 10556, 3782, 445, 785, 2363, 4940, 12500, 5455, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Finalizer_PassThru(t *testing.T) { // Arrange handler, _ := newErrorsFakeHandler(mockErrorsArangoClientBackup{}) obj, _ := newObjectSet(backupApi.ArangoBackupStateCreate) time := meta.Time{ Time: time.Now(), } obj.DeletionTimestamp = &time // Act //createArangoDeployment(t, handler, deployment) createArangoBackup(t, handler, obj) require.NoError(t, handler.Handle(newItemFromBackup(operation.Delete, obj))) // Assert newObj := refreshArangoBackup(t, handler, obj) require.Equal(t, newObj.Status, obj.Status) require.Equal(t, newObj.Spec, obj.Spec) }
explode_data.jsonl/5626
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 77072, 3135, 1088, 395, 1001, 2672, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 53326, 11, 716, 1669, 501, 13877, 52317, 3050, 30389, 13877, 6953, 5170, 2959, 56245, 6257, 692, 22671, 11, 716, 1669, 501, 1190, 1649, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRabbitMQParseMetadata(t *testing.T) { for _, testData := range testRabbitMQMetadata { _, err := parseRabbitMQMetadata(&ScalerConfig{ResolvedEnv: sampleRabbitMqResolvedEnv, TriggerMetadata: testData.metadata, AuthParams: testData.authParams}) if err != nil && !testData.isError { t.Error("Expected success but got error", err) } if testData.isError && err == nil { t.Error("Expected error but got success") } } }
explode_data.jsonl/61596
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 49, 19949, 35169, 14463, 14610, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 67348, 1669, 2088, 1273, 49, 19949, 35169, 14610, 341, 197, 197, 6878, 1848, 1669, 4715, 49, 19949, 35169, 14610, 2099, 59553, 2648, 90, 65394, 14359, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTriggerPluginDispatcher(t *testing.T) { t.Parallel() type record struct { keyPath string } tests := []struct { name string trigger *FakeTrigger on string records []record wantCalledWith string wantCalledWithLen int }{ { name: "only records that match the keypath should be triggered", trigger: NewFakeTrigger(false), on: "AAPL/1Min/OHLCV", records: []record{ {keyPath: "AAPL/1Min/OHLCV/2017.bin"}, {keyPath: "TSLA/1Min/OHLCV/2017.bin"}, }, wantCalledWith: "AAPL/1Min/OHLCV/2017.bin", wantCalledWithLen: 1, }, { name: "recovered when panic is triggered", trigger: NewFakeTrigger(true), on: "AAPL/1Min/OHLCV", records: []record{ {keyPath: "AAPL/1Min/OHLCV/2017.bin"}, }, wantCalledWith: "AAPL/1Min/OHLCV/2017.bin", wantCalledWithLen: 0, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() // --- given --- matchers := []*trigger.TriggerMatcher{trigger.NewMatcher(tt.trigger, tt.on)} tpd := executor.NewTriggerPluginDispatcher(matchers) fakeBuffer := io.SwapSliceData([]int64{0, 5}, byte(0)).([]byte) // --- when for _, r := range tt.records { tpd.AppendRecord(r.keyPath, wal.OffsetIndexBuffer(fakeBuffer).IndexAndPayload()) } tpd.DispatchRecords() <-tt.trigger.fireC // wait until fired // --- then --- assert.Equal(t, len(tt.trigger.calledWith), tt.wantCalledWithLen) if tt.wantCalledWithLen > 0 { assert.Equal(t, tt.trigger.calledWith[0][0].(string), tt.wantCalledWith) } }) } }
explode_data.jsonl/77156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 774 }
[ 2830, 3393, 17939, 11546, 21839, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 13158, 3255, 2036, 341, 197, 23634, 1820, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 1060, 914, 198, 197, 83228, 1843, 353, 52317...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsConnTimeoutForWrappedConnTimeouts(t *testing.T) { dialer := DialTCPFn(testFreeTCPAddr(t), time.Millisecond, ed25519.GenPrivKey()) _, err := dialer() assert.Error(t, err) err = cmn.ErrorWrap(ErrConnTimeout, err.Error()) assert.True(t, IsConnTimeout(err)) }
explode_data.jsonl/11246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 3872, 9701, 7636, 2461, 67795, 9701, 7636, 82, 1155, 353, 8840, 836, 8, 341, 2698, 530, 261, 1669, 66155, 49896, 24911, 8623, 10940, 49896, 13986, 1155, 701, 882, 71482, 11, 1578, 17, 20, 20, 16, 24, 65384, 32124, 1592, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFills(t *testing.T) { b.SetDefaults() TestSetup(t) _, err := b.GetFills("", "BTC-USD", "", "", "") if areTestAPIKeysSet() && err != nil { t.Errorf("Could not get fills: %s", err) } else if !areTestAPIKeysSet() && err == nil { t.Error("Expecting an error when no keys are set") } }
explode_data.jsonl/14538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 1949, 37, 3305, 1155, 353, 8840, 836, 8, 341, 2233, 4202, 16273, 741, 73866, 21821, 1155, 692, 197, 6878, 1848, 1669, 293, 2234, 37, 3305, 19814, 330, 59118, 12, 26749, 497, 7342, 7342, 14676, 743, 525, 2271, 7082, 8850, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5