text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestListCommandRunEClosureWithTable(t *testing.T) { assert := assert.New(t) cli := test.NewCLI() config := cli.Config.(*client.MockConfig) config.On("Format").Return("none") client := cli.Client.(*client.MockClient) client.On("ListAssets", mock.Anything).Return([]types.Asset{ *types.FixtureAsset("one"), *types.FixtureAsset("two"), }, nil) cmd := ListCommand(cli) out, err := test.RunCmd(cmd, []string{}) assert.NotEmpty(out) assert.Nil(err) }
explode_data.jsonl/78031
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 852, 4062, 6727, 7498, 11653, 2354, 2556, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 86448, 1669, 1273, 7121, 63959, 2822, 25873, 1669, 21348, 10753, 41399, 2972, 24664, 2648, 340, 25873, 8071, 445, 4061,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetsTheCorrectCurencies(t *testing.T) { //arrange //act result := GetCurrencies() //assert resultJSON, _ := json.Marshal(result) expected, _ := json.Marshal([]*Currency{ {ID: 1, Name: "USD"}, {ID: 2, Name: "EUR"}, }) if string(resultJSON) != string(expected) { t.Error("Did not get expected currencies") } }
explode_data.jsonl/43533
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 49358, 785, 33092, 34, 552, 1016, 550, 1155, 353, 8840, 836, 8, 341, 197, 322, 1118, 844, 271, 197, 322, 531, 198, 9559, 1669, 2126, 34, 19607, 2822, 197, 322, 2207, 198, 9559, 5370, 11, 716, 1669, 2951, 37271, 4456, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEntityFromSecring(t *testing.T) { ent, err := EntityFromSecring("26F5ABDA", "testdata/test-secring.gpg") if err != nil { t.Fatalf("EntityFromSecring: %v", err) } if ent == nil { t.Fatalf("nil entity") } if _, ok := ent.Identities["Camli Tester <camli-test@example.com>"]; !ok { t.Errorf("missing expected identity") } }
explode_data.jsonl/44738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 3030, 3830, 8430, 12640, 1155, 353, 8840, 836, 8, 341, 77655, 11, 1848, 1669, 10390, 3830, 8430, 12640, 445, 17, 21, 37, 20, 1867, 6352, 497, 330, 92425, 12697, 12, 5024, 12640, 1302, 3517, 1138, 743, 1848, 961, 2092, 341,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNumInput(t *testing.T) { var testCases = []struct { name string query string expected int }{ { name: "basic positional", query: "select * from table where a = ?", expected: 1, }, { name: "basic named", query: "select * from table where a = @name", expected: 1, }, { name: "reused named", query: "select * from table where a = @name and b = @name and c = @id", expected: 2, }, { name: "positional character in comment", query: `select * --test? from table where a = 1`, expected: 0, }, { name: "positional character in string", query: "select * from test where a = 'test?'", expected: 0, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { stmt, _ := newStmt(nil, tc.query) result := stmt.NumInput() if result != tc.expected { t.Errorf("Expected %d query inputs, got %d", tc.expected, result) } }) } }
explode_data.jsonl/25532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 4651, 2505, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 27274, 262, 914, 198, 197, 42400, 526, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 257, 330, 22342, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEndpointsByNetworkFilter_RegistryServiceName(t *testing.T) { // - 1 gateway for network1 // - 1 gateway for network2 // - 1 gateway for network3 // - 0 gateways for network4 env := environment() env.MeshNetworks.Networks["network2"] = &meshconfig.Network{ Endpoints: []*meshconfig.Network_NetworkEndpoints{ { Ne: &meshconfig.Network_NetworkEndpoints_FromRegistry{ FromRegistry: "cluster2", }, }, }, Gateways: []*meshconfig.Network_IstioNetworkGateway{ { Gw: &meshconfig.Network_IstioNetworkGateway_RegistryServiceName{ RegistryServiceName: "istio-ingressgateway.istio-system.svc.cluster.local", }, Port: 80, }, }, } gwSvcName := config.Hostname("istio-ingressgateway.istio-system.svc.cluster.local") serviceDiscovery := NewMemServiceDiscovery(map[config.Hostname]*model.Service{ gwSvcName: { Hostname: gwSvcName, Attributes: model.ServiceAttributes{ ClusterExternalAddresses: map[string][]string{ "cluster2": {"2.2.2.2"}, }, }, }, }, 0) env.ServiceDiscovery = serviceDiscovery // Test endpoints creates: // - 2 endpoints in network1 // - 1 endpoints in network2 // - 0 endpoints in network3 // - 1 endpoints in network4 testEndpoints := testEndpoints() // The tests below are calling the endpoints filter from each one of the // networks and examines the returned filtered endpoints tests := []struct { name string endpoints []*endpoint.LocalityLbEndpoints conn *XdsConnection env *model.Environment want []LocLbEpInfo }{ { name: "from_network1", conn: xdsConnection("network1"), env: env, endpoints: testEndpoints, want: []LocLbEpInfo{ { lbEps: []LbEpInfo{ // 2 local endpoints {address: "10.0.0.1", weight: 1}, {address: "10.0.0.2", weight: 1}, // 1 endpoint to gateway of network2 with weight 1 because it has 1 endpoint {address: "2.2.2.2", weight: 1}, }, weight: 3, }, }, }, { name: "from_network2", conn: xdsConnection("network2"), env: env, endpoints: testEndpoints, want: []LocLbEpInfo{ { lbEps: []LbEpInfo{ // 1 local endpoint {address: "20.0.0.1", weight: 1}, // 1 endpoint to gateway of network1 with weight 2 because it has 2 endpoints {address: "1.1.1.1", weight: 2}, }, weight: 3, }, }, }, { name: "from_network3", conn: xdsConnection("network3"), env: env, endpoints: testEndpoints, want: []LocLbEpInfo{ { lbEps: []LbEpInfo{ // 1 endpoint to gateway of network1 with weight 2 because it has 2 endpoints {address: "1.1.1.1", weight: 2}, // 1 endpoint to gateway of network2 with weight 1 because it has 1 endpoint {address: "2.2.2.2", weight: 1}, }, weight: 3, }, }, }, { name: "from_network4", conn: xdsConnection("network4"), env: env, endpoints: testEndpoints, want: []LocLbEpInfo{ { lbEps: []LbEpInfo{ // 1 local endpoint {address: "40.0.0.1", weight: 1}, // 1 endpoint to gateway of network1 with weight 2 because it has 2 endpoints {address: "1.1.1.1", weight: 2}, // 1 endpoint to gateway of network2 with weight 1 because it has 1 endpoint {address: "2.2.2.2", weight: 1}, }, weight: 4, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { filtered := EndpointsByNetworkFilter(tt.endpoints, tt.conn, tt.env) if len(filtered) != len(tt.want) { t.Errorf("Unexpected number of filtered endpoints: got %v, want %v", len(filtered), len(tt.want)) return } sort.Slice(filtered, func(i, j int) bool { addrI := filtered[i].LbEndpoints[0].GetEndpoint().Address.GetSocketAddress().Address addrJ := filtered[j].LbEndpoints[0].GetEndpoint().Address.GetSocketAddress().Address return addrI < addrJ }) for i, ep := range filtered { if len(ep.LbEndpoints) != len(tt.want[i].lbEps) { t.Errorf("Unexpected number of LB endpoints within endpoint %d: %v, want %v", i, len(ep.LbEndpoints), len(tt.want[i].lbEps)) } if ep.LoadBalancingWeight.GetValue() != tt.want[i].weight { t.Errorf("Unexpected weight for endpoint %d: got %v, want %v", i, ep.LoadBalancingWeight.GetValue(), tt.want[i].weight) } for _, lbEp := range ep.LbEndpoints { addr := lbEp.GetEndpoint().Address.GetSocketAddress().Address found := false for _, wantLbEp := range tt.want[i].lbEps { if addr == wantLbEp.address { found = true break } } if !found { t.Errorf("Unexpected address for endpoint %d: %v", i, addr) } } } }) } }
explode_data.jsonl/59674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2132 }
[ 2830, 3393, 80786, 1359, 12320, 5632, 62, 15603, 1860, 675, 1155, 353, 8840, 836, 8, 1476, 197, 322, 220, 481, 220, 16, 28795, 369, 3922, 16, 198, 197, 322, 220, 481, 220, 16, 28795, 369, 3922, 17, 198, 197, 322, 220, 481, 220, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEventingRoundTripTypesToJSON(t *testing.T) { scheme := runtime.NewScheme() utilruntime.Must(AddToScheme(scheme)) fuzzerFuncs := fuzzer.MergeFuzzerFuncs( pkgfuzzer.Funcs, FuzzerFuncs, ) roundtrip.ExternalTypesViaJSON(t, scheme, fuzzerFuncs) }
explode_data.jsonl/5882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 1556, 287, 27497, 56352, 4173, 1249, 5370, 1155, 353, 8840, 836, 8, 341, 1903, 8058, 1669, 15592, 7121, 28906, 741, 79138, 22255, 50463, 7, 2212, 1249, 28906, 1141, 8058, 4390, 1166, 91447, 9626, 82, 1669, 282, 91447, 93855, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsKVPathElement(t *testing.T) { tests := []struct { desc string in string expect bool }{ { desc: "valid", in: "[1:2]", expect: true, }, { desc: "invalid", in: "[:2]", expect: false, }, { desc: "invalid-2", in: "[1:]", expect: false, }, { desc: "empty", in: "", expect: false, }, { desc: "no-brackets", in: "1:2", expect: false, }, { desc: "one-bracket", in: "[1:2", expect: false, }, } for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { if got := IsKVPathElement(tt.in); got != tt.expect { t.Errorf("%s: expect %v got %v", tt.desc, tt.expect, got) } }) } }
explode_data.jsonl/70571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 3872, 82707, 1820, 1691, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 41653, 256, 914, 198, 197, 17430, 257, 914, 198, 197, 24952, 1807, 198, 197, 59403, 197, 197, 515, 298, 41653, 25, 256, 330, 1891, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPushToRemoteSuccessRemoteMain(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) err := testRepo.sut.PushToRemote(git.DefaultRemote, git.Remotify(git.DefaultBranch)) require.Nil(t, err) }
explode_data.jsonl/14031
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 16644, 1249, 24703, 7188, 24703, 6202, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 9859, 1669, 1273, 25243, 514, 332, 34981, 1249, 24703, 3268, 275, 1327...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDryRunDeleteDoesntDelete(t *testing.T) { s, destroy := NewDryRunnableTestStorage(t) defer destroy() obj := UnstructuredOrDie(`{"kind": "Pod"}`) out := UnstructuredOrDie(`{}`) err := s.Create(context.Background(), "key", obj, out, 0, false) if err != nil { t.Fatalf("Failed to create new object: %v", err) } err = s.Delete(context.Background(), "key", out, nil, rest.ValidateAllObjectFunc, true, nil) if err != nil { t.Fatalf("Failed to dry-run delete the object: %v", err) } err = s.Get(context.Background(), "key", storage.GetOptions{}, out) if err != nil { t.Fatalf("Failed to retrieve dry-run deleted object: %v", err) } }
explode_data.jsonl/5465
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 85215, 6727, 6435, 21468, 406, 6435, 1155, 353, 8840, 836, 8, 341, 1903, 11, 6921, 1669, 1532, 85215, 68836, 2271, 5793, 1155, 340, 16867, 6921, 2822, 22671, 1669, 1230, 51143, 2195, 18175, 5809, 4913, 15314, 788, 330, 23527, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVoctreeModify(t *testing.T) { const testX = 128 const testY = 128 const testZ = 128 v := NewVoctree(testX, testY) // V starts as all black. Insert a horizontal bar of y pixels on the middle plane z := testZ / 2 y := testY / 2 for x := 0; x < v.Dx(); x++ { v.Set(Point{X: uint16(x), Y: uint16(y), Z: uint16(z)}, color.Gray{0xff}) } // Collect the image plane := v.GetPlane(z) // Check the image for py := 0; py < testY; py++ { for px := 0; px < testX; px++ { var expected uint8 if py == y { expected = 0xff } c := plane.At(px, py).(color.Gray).Y if c != expected { t.Errorf("%v, %v: expected %+#v, got %+#v", px, py, expected, c) } } } }
explode_data.jsonl/18351
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 27515, 302, 765, 44427, 1155, 353, 8840, 836, 8, 341, 4777, 1273, 55, 284, 220, 16, 17, 23, 198, 4777, 1273, 56, 284, 220, 16, 17, 23, 198, 4777, 1273, 57, 284, 220, 16, 17, 23, 271, 5195, 1669, 1532, 27515, 302, 765...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFormatArgsWithoutAttrs(t *testing.T) { // given m := &sysl.Module{ Apps: map[string]*sysl.Application{ "test": { Types: map[string]*sysl.Type{ "User": { Attrs: make(map[string]*sysl.Attribute), }, }, }, }, } // when actual := formatArgs(m, "test", "User") assert.Equal(t, "<color blue>test.User</color> <<color green>?, ?</color>>", actual) }
explode_data.jsonl/58738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 4061, 4117, 26040, 53671, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 2109, 1669, 609, 7791, 75, 26958, 515, 197, 197, 53602, 25, 2415, 14032, 8465, 7791, 75, 17521, 515, 298, 197, 1, 1944, 788, 341, 571, 197, 417...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColor_CMYKAWhole(t *testing.T) { t.Run("red", func(t *testing.T) { color, _ := ParseString("#ff0000") c, m, y, k, a := color.CMYKAWhole() fmt.Println(c, m, y, k) assert.Equal(t, uint8(0), c) assert.Equal(t, uint8(100), m) assert.Equal(t, uint8(100), y) assert.Equal(t, uint8(0), k) assert.Equal(t, uint8(100), a) }) t.Run("black", func(t *testing.T) { color, _ := ParseString("#000000") c, m, y, k, a := color.CMYKAWhole() assert.Equal(t, uint8(0), c) assert.Equal(t, uint8(0), m) assert.Equal(t, uint8(0), y) assert.Equal(t, uint8(100), k) assert.Equal(t, uint8(100), a) }) }
explode_data.jsonl/68043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 1636, 920, 19159, 26444, 90582, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1151, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 21481, 11, 716, 1669, 14775, 703, 3584, 542, 15, 15, 15, 15, 1138, 197, 1444, 11, 296, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildHttpRoutesForCompositeDependencyWithInstanceBasedRules(t *testing.T) { dependencyInst := "mydep" instName := "myinst" svcTemplate := v1alpha2.Component{ ObjectMeta: metav1.ObjectMeta{ Name: "mycomponent1", }, } expected := []*v1alpha3.HTTPRoute{ { Match: []*v1alpha3.HTTPMatchRequest{ { Authority: &v1alpha3.StringMatch{ Regex: fmt.Sprintf("^(%s)(--%s)(\\S*)$", dependencyInst, svcTemplate.Name), }, SourceLabels: map[string]string{ meta.CellLabelKeySource: instName, meta.ComponentLabelKeySource: "true", }, Headers: map[string]*v1alpha3.StringMatch{ InstanceId: { Exact: "1", }, }, }, }, Route: []*v1alpha3.DestinationWeight{ { Destination: &v1alpha3.Destination{ Host: CompositeK8sServiceNameFromInstance(dependencyInst, svcTemplate), }, }, }, }, { Match: []*v1alpha3.HTTPMatchRequest{ { Authority: &v1alpha3.StringMatch{ Regex: fmt.Sprintf("^(%s)(--%s)(\\S*)$", dependencyInst, svcTemplate.Name), }, SourceLabels: map[string]string{ meta.CellLabelKeySource: instName, meta.ComponentLabelKeySource: "true", }, Headers: map[string]*v1alpha3.StringMatch{ InstanceId: { Exact: "2", }, }, }, }, Route: []*v1alpha3.DestinationWeight{ { Destination: &v1alpha3.Destination{ Host: CompositeK8sServiceNameFromInstance(dependencyInst, svcTemplate), }, }, }, }, { Match: []*v1alpha3.HTTPMatchRequest{ { Authority: &v1alpha3.StringMatch{ Regex: fmt.Sprintf("^(%s)(--%s)(\\S*)$", dependencyInst, svcTemplate.Name), }, SourceLabels: map[string]string{ meta.CellLabelKeySource: instName, meta.ComponentLabelKeySource: "true", }, }, }, Route: []*v1alpha3.DestinationWeight{ { Destination: &v1alpha3.Destination{ Host: CompositeK8sServiceNameFromInstance(dependencyInst, svcTemplate), }, }, }, }, } actual := BuildHttpRoutesForCompositeDependency(instName, dependencyInst, []v1alpha2.Component{svcTemplate}, true) if diff := cmp.Diff(expected, actual); diff != "" { t.Errorf("BuildHttpRoutesForCompositeDependency (-expected, +actual)\n%v", diff) } }
explode_data.jsonl/54870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1086 }
[ 2830, 3393, 11066, 2905, 26653, 2461, 41685, 36387, 2354, 2523, 28715, 26008, 1155, 353, 8840, 836, 8, 341, 197, 53690, 8724, 1669, 330, 2408, 14891, 698, 88656, 675, 1669, 330, 2408, 6308, 698, 1903, 7362, 7275, 1669, 348, 16, 7141, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStatusHandlePost(t *testing.T) { h := StatusHandler{StatusService: mockStatusService()} status, _ := handlerTest(h.Post, "POST", testStatusURL, nil) if status != http.StatusMethodNotAllowed { t.Error("Got:", status, "Expected:", http.StatusMethodNotAllowed) } }
explode_data.jsonl/39131
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 2522, 6999, 4133, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 8104, 3050, 90, 2522, 1860, 25, 7860, 2522, 1860, 23509, 23847, 11, 716, 1669, 7013, 2271, 3203, 23442, 11, 330, 2946, 497, 1273, 2522, 3144, 11, 2092, 692, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFormDNSNameserversFitsLimits(t *testing.T) { recorder := record.NewFakeRecorder(20) nodeRef := &v1.ObjectReference{ Kind: "Node", Name: string("testNode"), UID: types.UID("testNode"), Namespace: "", } testClusterDNSDomain := "TEST" configurer := NewConfigurer(recorder, nodeRef, nil, nil, testClusterDNSDomain, "") pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ UID: "", Name: "test_pod", Namespace: "testNS", Annotations: map[string]string{}, }, } testCases := []struct { desc string nameservers []string expectedNameserver []string expectedEvent bool }{ { desc: "valid: 1 nameserver", nameservers: []string{"127.0.0.1"}, expectedNameserver: []string{"127.0.0.1"}, expectedEvent: false, }, { desc: "valid: 3 nameservers", nameservers: []string{"127.0.0.1", "10.0.0.10", "8.8.8.8"}, expectedNameserver: []string{"127.0.0.1", "10.0.0.10", "8.8.8.8"}, expectedEvent: false, }, { desc: "invalid: 4 nameservers, trimmed to 3", nameservers: []string{"127.0.0.1", "10.0.0.10", "8.8.8.8", "1.2.3.4"}, expectedNameserver: []string{"127.0.0.1", "10.0.0.10", "8.8.8.8"}, expectedEvent: true, }, } for _, tc := range testCases { appliedNameservers := configurer.formDNSNameserversFitsLimits(tc.nameservers, pod) assert.EqualValues(t, tc.expectedNameserver, appliedNameservers, tc.desc) event := fetchEvent(recorder) if tc.expectedEvent && len(event) == 0 { t.Errorf("%s: formDNSNameserversFitsLimits(%v) expected event, got no event.", tc.desc, tc.nameservers) } else if !tc.expectedEvent && len(event) > 0 { t.Errorf("%s: formDNSNameserversFitsLimits(%v) expected no event, got event: %v", tc.desc, tc.nameservers, event) } } }
explode_data.jsonl/59246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 888 }
[ 2830, 3393, 1838, 61088, 7980, 18729, 37, 1199, 94588, 1155, 353, 8840, 836, 8, 341, 67904, 1358, 1669, 3255, 7121, 52317, 47023, 7, 17, 15, 340, 20831, 3945, 1669, 609, 85, 16, 8348, 8856, 515, 197, 197, 10629, 25, 414, 330, 1955, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTextAttachmentToStep(t *testing.T) { allure.Test(t, allure.Description("Testing a text attachment"), allure.Action(func() { allure.Step(allure.Description("adding a text attachment"), allure.Action(func() { _ = allure.AddAttachment("text!", allure.TextPlain, []byte("Some text!")) })) })) }
explode_data.jsonl/58994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 1178, 33569, 1249, 8304, 1155, 353, 8840, 836, 8, 341, 50960, 552, 8787, 1155, 11, 87403, 28773, 445, 16451, 264, 1467, 19984, 3975, 87403, 11360, 18552, 368, 341, 197, 50960, 552, 68402, 20388, 552, 28773, 445, 2998, 264, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuntimeConfiguration(t *testing.T) { testCases := []struct { desc string serviceConfig map[string]*dynamic.Service routerConfig map[string]*dynamic.Router middlewareConfig map[string]*dynamic.Middleware expectedError int }{ { desc: "No error", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1:8085", }, { URL: "http://127.0.0.1:8086", }, }, HealthCheck: &dynamic.HealthCheck{ Interval: "500ms", Path: "/health", }, }, }, }, routerConfig: map[string]*dynamic.Router{ "foo": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`bar.foo`)", }, "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", }, }, expectedError: 0, }, { desc: "One router with wrong rule", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, routerConfig: map[string]*dynamic.Router{ "foo": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "WrongRule(`bar.foo`)", }, "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", }, }, expectedError: 1, }, { desc: "All router with wrong rule", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, routerConfig: map[string]*dynamic.Router{ "foo": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "WrongRule(`bar.foo`)", }, "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "WrongRule(`foo.bar`)", }, }, expectedError: 2, }, { desc: "Router with unknown service", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, routerConfig: map[string]*dynamic.Router{ "foo": { EntryPoints: []string{"web"}, Service: "wrong-service", Rule: "Host(`bar.foo`)", }, "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", }, }, expectedError: 1, }, { desc: "Router with broken service", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: nil, }, }, routerConfig: map[string]*dynamic.Router{ "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", }, }, expectedError: 2, }, { desc: "Router with middleware", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, middlewareConfig: map[string]*dynamic.Middleware{ "auth": { BasicAuth: &dynamic.BasicAuth{ Users: []string{"admin:admin"}, }, }, "addPrefixTest": { AddPrefix: &dynamic.AddPrefix{ Prefix: "/toto", }, }, }, routerConfig: map[string]*dynamic.Router{ "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", Middlewares: []string{"auth", "addPrefixTest"}, }, "test": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar.other`)", Middlewares: []string{"addPrefixTest", "auth"}, }, }, }, { desc: "Router with unknown middleware", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, middlewareConfig: map[string]*dynamic.Middleware{ "auth": { BasicAuth: &dynamic.BasicAuth{ Users: []string{"admin:admin"}, }, }, }, routerConfig: map[string]*dynamic.Router{ "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", Middlewares: []string{"unknown"}, }, }, expectedError: 1, }, { desc: "Router with broken middleware", serviceConfig: map[string]*dynamic.Service{ "foo-service": { LoadBalancer: &dynamic.ServersLoadBalancer{ Servers: []dynamic.Server{ { URL: "http://127.0.0.1", }, }, }, }, }, middlewareConfig: map[string]*dynamic.Middleware{ "auth": { BasicAuth: &dynamic.BasicAuth{ Users: []string{"foo"}, }, }, }, routerConfig: map[string]*dynamic.Router{ "bar": { EntryPoints: []string{"web"}, Service: "foo-service", Rule: "Host(`foo.bar`)", Middlewares: []string{"auth"}, }, }, expectedError: 2, }, } for _, test := range testCases { test := test t.Run(test.desc, func(t *testing.T) { t.Parallel() entryPoints := []string{"web"} rtConf := runtime.NewConfig(dynamic.Configuration{ HTTP: &dynamic.HTTPConfiguration{ Services: test.serviceConfig, Routers: test.routerConfig, Middlewares: test.middlewareConfig, }, }) serviceManager := service.NewManager(rtConf.Services, http.DefaultTransport, nil, nil) middlewaresBuilder := middleware.NewBuilder(rtConf.Middlewares, serviceManager) responseModifierFactory := responsemodifiers.NewBuilder(map[string]*runtime.MiddlewareInfo{}) chainBuilder := middleware.NewChainBuilder(static.Configuration{}, nil, nil) routerManager := NewManager(rtConf, serviceManager, middlewaresBuilder, responseModifierFactory, chainBuilder) _ = routerManager.BuildHandlers(context.Background(), entryPoints, false) // even though rtConf was passed by argument to the manager builders above, // it's ok to use it as the result we check, because everything worth checking // can be accessed by pointers in it. var allErrors int for _, v := range rtConf.Services { if v.Err != nil { allErrors++ } } for _, v := range rtConf.Routers { if len(v.Err) > 0 { allErrors++ } } for _, v := range rtConf.Middlewares { if v.Err != nil { allErrors++ } } assert.Equal(t, test.expectedError, allErrors) }) } }
explode_data.jsonl/25182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3428 }
[ 2830, 3393, 15123, 7688, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 41653, 1797, 914, 198, 197, 52934, 2648, 262, 2415, 14032, 8465, 21544, 13860, 198, 197, 67009, 2648, 257, 2415, 14032, 8465, 21544, 31413, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestLookupWillFailOnClosedStateMachine(t *testing.T) { sm := NewNativeSM(config.Config{}, &dummySM{}, nil) sm.Loaded() sm.Offloaded() sm.Close() if _, err := sm.Lookup(nil); err != ErrClusterClosed { t.Errorf("failed to return ErrClusterClosed") } if _, err := sm.NALookup(nil); err != ErrClusterClosed { t.Errorf("failed to return ErrClusterClosed") } }
explode_data.jsonl/62405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 34247, 9945, 19524, 1925, 26884, 94666, 1155, 353, 8840, 836, 8, 341, 72023, 1669, 1532, 20800, 9501, 8754, 10753, 22655, 609, 31390, 9501, 22655, 2092, 340, 72023, 13969, 291, 741, 72023, 13, 4596, 15589, 741, 72023, 10421, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsTar(t *testing.T) { tests := map[string]bool{ "foo.tgz": true, "foo/bar/baz.tgz": true, "foo-1.2.3.4.5.tgz": true, "foo.tar.gz": false, // for our purposes "foo.tgz.1": false, "footgz": false, } for src, expect := range tests { if isTar(src) != expect { t.Errorf("%q should be %t", src, expect) } } }
explode_data.jsonl/1370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 3872, 62733, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 96436, 515, 197, 197, 1, 7975, 734, 46589, 788, 1843, 830, 345, 197, 197, 1, 7975, 49513, 3470, 1370, 734, 46589, 788, 256, 830, 345, 197, 197, 1, 7975...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMissingCrossSignature(t *testing.T) { // This public key has a signing subkey, but the subkey does not // contain a cross-signature. keys, err := ReadArmoredKeyRing(bytes.NewBufferString(missingCrossSignatureKey)) if len(keys) != 0 { t.Errorf("Accepted key with missing cross signature") } if err == nil { t.Fatal("Failed to detect error in keyring with missing cross signature") } structural, ok := err.(errors.StructuralError) if !ok { t.Fatalf("Unexpected class of error: %T. Wanted StructuralError", err) } const expectedMsg = "signing subkey is missing cross-signature" if !strings.Contains(string(structural), expectedMsg) { t.Fatalf("Unexpected error: %q. Expected it to contain %q", err, expectedMsg) } }
explode_data.jsonl/14734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 25080, 28501, 25088, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 584, 1376, 702, 264, 15971, 1186, 792, 11, 714, 279, 1186, 792, 1558, 537, 198, 197, 322, 6644, 264, 5312, 27953, 1568, 624, 80112, 11, 1848, 1669, 4457, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAddApiToExistingApi(t *testing.T) { router := NewRouter() router.AddChild([]string{"p1", "{p2}", "p3"}, &ApiGroup{ Post: &Api{ Description: "Hello World", }, }) assert.Equal( t, "Hello World", router.Children["p1"].Children["{VAR}"].Children["p3"].Api.Post.Description, ) router.AddChild([]string{"p1", "{p2}", "p3"}, &ApiGroup{ Post: &Api{ Description: "Hello World 2", }, }) assert.Equal( t, "Hello World 2", router.Children["p1"].Children["{VAR}"].Children["p3"].Api.Post.Description, ) }
explode_data.jsonl/71590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 2212, 6563, 1249, 53067, 6563, 1155, 353, 8840, 836, 8, 220, 341, 67009, 1669, 1532, 9523, 741, 67009, 1904, 3652, 10556, 917, 4913, 79, 16, 497, 13868, 79, 17, 9545, 330, 79, 18, 14345, 609, 6563, 2808, 515, 197, 197, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlockedRetrieval(t *testing.T) { tev, fl := initialize(t) defer tev.tearDown() it, num := fl.Iterator(&ab.SeekPosition{Type: &ab.SeekPosition_Specified{Specified: &ab.SeekSpecified{Number: 1}}}) defer it.Close() assert.Equal(t, uint64(1), num, "Expected block iterator at 1, but got %d", num) fl.Append(blockledger.CreateNextBlock(fl, []*cb.Envelope{{Payload: []byte("My Data")}})) block, status := it.Next() assert.Equal(t, cb.Status_SUCCESS, status, "Expected to successfully read the second block") assert.Equal(t, uint64(1), block.Header.Number, "Expected to successfully retrieve the second block") go func() { // Add explicit sleep here to make sure `it.Next` is actually blocked waiting // for new block. According to Golang sched, `it.Next()` is run before this // goroutine, however it's not guaranteed to run till the channel operation // we desire, due to I/O operation in the middle. Consider making the // implementation more testable so we don't need to sleep here. time.Sleep(100 * time.Millisecond) fl.Append(blockledger.CreateNextBlock(fl, []*cb.Envelope{{Payload: []byte("Another Data")}})) }() block, status = it.Next() assert.Equal(t, cb.Status_SUCCESS, status, "Expected to successfully read the third block") assert.Equal(t, uint64(2), block.Header.Number, "Expected to successfully retrieve the third block") }
explode_data.jsonl/53183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 95847, 12020, 7231, 831, 1155, 353, 8840, 836, 8, 341, 197, 665, 85, 11, 1320, 1669, 9468, 1155, 340, 16867, 1013, 85, 31853, 59342, 741, 23374, 11, 1629, 1669, 1320, 40846, 2099, 370, 76465, 3812, 90, 929, 25, 609, 370, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilcheckBug(t *testing.T) { c := testConfig(t) ptrType := c.config.Types.BytePtr fun := c.Fun("entry", Bloc("entry", Valu("mem", OpInitMem, types.TypeMem, 0, nil), Valu("sb", OpSB, c.config.Types.Uintptr, 0, nil), Goto("checkPtr")), Bloc("checkPtr", Valu("ptr1", OpLoad, ptrType, 0, nil, "sb", "mem"), Valu("nilptr", OpConstNil, ptrType, 0, nil), Valu("bool1", OpNeqPtr, c.config.Types.Bool, 0, nil, "ptr1", "nilptr"), If("bool1", "secondCheck", "couldBeNil")), Bloc("couldBeNil", Goto("secondCheck")), Bloc("secondCheck", Valu("bool2", OpIsNonNil, c.config.Types.Bool, 0, nil, "ptr1"), If("bool2", "extra", "exit")), Bloc("extra", // prevent fuse from eliminating this block Valu("store", OpStore, types.TypeMem, 0, ptrType, "ptr1", "nilptr", "mem"), Goto("exit")), Bloc("exit", Valu("phi", OpPhi, types.TypeMem, 0, nil, "mem", "store"), Exit("phi"))) CheckFunc(fun.f) // we need the opt here to rewrite the user nilcheck opt(fun.f) nilcheckelim(fun.f) // clean up the removed nil check fusePlain(fun.f) deadcode(fun.f) CheckFunc(fun.f) foundSecondCheck := false for _, b := range fun.f.Blocks { if b == fun.blocks["secondCheck"] && isNilCheck(b) { foundSecondCheck = true } } if !foundSecondCheck { t.Errorf("secondCheck was eliminated, but shouldn't have") } }
explode_data.jsonl/6053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 586 }
[ 2830, 3393, 19064, 2028, 46773, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1273, 2648, 1155, 340, 43811, 929, 1669, 272, 5423, 29147, 32119, 5348, 198, 90126, 1669, 272, 991, 359, 445, 4085, 756, 197, 12791, 1074, 445, 4085, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPathTransforms(t *testing.T) { tests := []struct { desc string path string pathTransform string addQueries []config.AddNameValue removeQueries []string targetPath string }{ { desc: "test0", path: "/v1/petstore?query=value", pathTransform: "", addQueries: []config.AddNameValue{}, removeQueries: []string{}, targetPath: "/petstore?query=value", }, { desc: "test1", path: "/v1/petstore?remove1=test", pathTransform: "/v2/{request.path}", addQueries: []config.AddNameValue{ {Name: "append", Value: "append1", Append: true}, {Name: "set", Value: "set1", Append: false}, }, removeQueries: []string{"remove*"}, targetPath: "/v2/petstore?append=append1&set=set1", }, { desc: "test2", path: "/v1/petstore?remove1=test", pathTransform: "/v2/{request.path}", addQueries: []config.AddNameValue{ {Name: "append", Value: "append1", Append: true}, {Name: "append", Value: "append2", Append: true}, {Name: "query", Value: "{query.remove1}", Append: false}, }, removeQueries: []string{"Remove1", "missing"}, targetPath: "/v2/petstore?append=append1&append=append2&query=test", }, { desc: "test3", path: "/v1/petstore?remove1=test&remove2=test", pathTransform: "/v2/{request.path}", addQueries: []config.AddNameValue{}, removeQueries: []string{"Remove*"}, targetPath: "/v2/petstore", }, } for _, test := range tests { t.Run(test.desc, func(t *testing.T) { envSpec := createAuthEnvSpec() envSpec.APIs[0].HTTPRequestTransforms = config.HTTPRequestTransforms{ PathTransform: test.pathTransform, QueryTransforms: config.NameValueTransforms{ Add: test.addQueries, Remove: test.removeQueries, }, } specExt, err := config.NewEnvironmentSpecExt(&envSpec) if err != nil { t.Fatalf("%v", err) } envoyReq := testutil.NewEnvoyRequest("GET", test.path, nil, nil) specReq := config.NewEnvironmentSpecRequest(nil, specExt, envoyReq) okResponse := &authv3.OkHttpResponse{} addRequestHeaderTransforms(envoyReq, specReq, okResponse) // path pathSet := getHeaderValueOption(okResponse.Headers, envoyPathHeader) if pathSet == nil { t.Errorf("expected :path header mod") } else if pathSet.Append.Value { t.Errorf("expected :path set, got append") } else if pathSet.Header.Value != test.targetPath { want, err := url.Parse(test.targetPath) if err != nil { t.Fatalf("%v", err) } got, err := url.Parse(pathSet.Header.Value) if err != nil { t.Fatalf("%v", err) } if want.Path != got.Path { t.Errorf("expected path: %q, got: %q", want.Path, got.Path) } if diff := cmp.Diff(want.Query(), got.Query()); diff != "" { t.Errorf("query diff (-want +got):\n%s", diff) } } }) } }
explode_data.jsonl/73188
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1319 }
[ 2830, 3393, 1820, 8963, 82, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 41653, 688, 914, 198, 197, 26781, 688, 914, 198, 197, 26781, 8963, 914, 198, 197, 12718, 55261, 262, 3056, 1676, 1904, 675, 1130, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestParseBadScriptTemplate(t *testing.T) { const ( // these are only used in text; no access to qf101 organization or user is needed qfTestOrg = "qf101" image = "quickfeed:go" githubUserName = "user" accessToken = "open sesame" ) randomSecret := rand.String() const scriptTemplate = `#image/quickfeed:go` runData := testRunData(qfTestOrg, githubUserName, "access_token", scriptTemplate) _, err := runData.parseScriptTemplate(randomSecret) const wantMsg = "no script template for assignment lab1 in https://github.com/qf101/tests" if err.Error() != wantMsg { t.Errorf("err = '%s', want '%s'", err, wantMsg) } const scriptTemplate2 = ` start=$SECONDS printf "*** Preparing for Test Execution ***\n" ` runData = testRunData(qfTestOrg, githubUserName, "access_token", scriptTemplate2) _, err = runData.parseScriptTemplate(randomSecret) const wantMsg2 = "no docker image specified in script template for assignment lab1 in https://github.com/qf101/tests" if err.Error() != wantMsg2 { t.Errorf("err = '%s', want '%s'", err, wantMsg2) } }
explode_data.jsonl/23306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 14463, 17082, 5910, 7275, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 197, 322, 1493, 525, 1172, 1483, 304, 1467, 26, 902, 2615, 311, 2804, 69, 16, 15, 16, 7321, 476, 1196, 374, 4362, 198, 197, 18534, 69, 2271, 42437,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRatCmp(t *testing.T) { for i, test := range ratCmpTests { x, _ := new(Rat).SetString(test.rat1) y, _ := new(Rat).SetString(test.rat2) out := x.Cmp(y) if out != test.out { t.Errorf("#%d got out = %v; want %v", i, out, test.out) } } }
explode_data.jsonl/35070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 49, 266, 34, 1307, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 1273, 1669, 2088, 11244, 34, 1307, 18200, 341, 197, 10225, 11, 716, 1669, 501, 2785, 266, 568, 1649, 703, 8623, 1746, 266, 16, 340, 197, 14522, 11, 716, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddBasicEntity(t *testing.T) { for _, service := range services { t.Run(fmt.Sprintf("%v_%v", t.Name(), service), func(t *testing.T) { client, delete := initClientTest(t, service, true) defer delete() basicEntity := basicTestEntity{ Entity: Entity{ PartitionKey: "pk001", RowKey: "rk001", }, Integer: 10, String: "abcdef", Bool: true, } marshalled, err := json.Marshal(basicEntity) require.Nil(t, err) _, err = client.AddEntity(ctx, marshalled, nil) require.Nil(t, err) resp, err := client.GetEntity(ctx, "pk001", "rk001", nil) require.Nil(t, err) receivedEntity := basicTestEntity{} err = json.Unmarshal(resp.Value, &receivedEntity) require.Nil(t, err) require.Equal(t, receivedEntity.PartitionKey, "pk001") require.Equal(t, receivedEntity.RowKey, "rk001") queryString := "PartitionKey eq 'pk001'" listOptions := ListEntitiesOptions{Filter: &queryString} pager := client.ListEntities(&listOptions) count := 0 for pager.More() { resp, err := pager.NextPage(ctx) require.NoError(t, err) for _, e := range resp.Entities { err = json.Unmarshal(e, &receivedEntity) require.NoError(t, err) require.Equal(t, receivedEntity.PartitionKey, "pk001") require.Equal(t, receivedEntity.RowKey, "rk001") count += 1 } } require.Equal(t, count, 1) }) } }
explode_data.jsonl/59178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 2212, 15944, 3030, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2473, 1669, 2088, 3516, 341, 197, 3244, 16708, 28197, 17305, 4430, 85, 18695, 85, 497, 259, 2967, 1507, 2473, 701, 2915, 1155, 353, 8840, 836, 8, 341, 298, 25291...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQueryEncode(t *testing.T) { testCases := []struct { queryKey string valueToEncode []string // Expected result. result string }{ {"prefix", []string{"test@123", "test@456"}, "prefix=test%40123&prefix=test%40456"}, {"@prefix", []string{"test@123"}, "%40prefix=test%40123"}, {"@prefix", []string{"a/b/c/"}, "%40prefix=a%2Fb%2Fc%2F"}, {"prefix", []string{"test#123"}, "prefix=test%23123"}, {"prefix#", []string{"test#123"}, "prefix%23=test%23123"}, {"prefix", []string{"test123"}, "prefix=test123"}, {"prefix", []string{"test本語123", "test123"}, "prefix=test%E6%9C%AC%E8%AA%9E123&prefix=test123"}, } for i, testCase := range testCases { urlValues := make(url.Values) for _, valueToEncode := range testCase.valueToEncode { urlValues.Add(testCase.queryKey, valueToEncode) } result := QueryEncode(urlValues) if testCase.result != result { t.Errorf("Test %d: Expected queryEncode result to be \"%s\", but found it to be \"%s\" instead", i+1, testCase.result, result) } } }
explode_data.jsonl/20473
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 2859, 32535, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 27274, 1592, 414, 914, 198, 197, 16309, 1249, 32535, 3056, 917, 198, 197, 197, 322, 31021, 1102, 624, 197, 9559, 914, 198, 197, 59403, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClientSetUserAgent(t *testing.T) { t.Parallel() ln := fasthttputil.NewInmemoryListener() userAgentSeen := "" s := &Server{ Handler: func(ctx *RequestCtx) { userAgentSeen = string(ctx.UserAgent()) }, } go s.Serve(ln) //nolint:errcheck userAgent := "I'm not fasthttp" c := &Client{ Name: userAgent, Dial: func(addr string) (net.Conn, error) { return ln.Dial() }, } req := AcquireRequest() res := AcquireResponse() req.SetRequestURI("http://example.com") err := c.Do(req, res) if err != nil { t.Fatal(err) } if userAgentSeen != userAgent { t.Fatalf("User-Agent defers %q != %q", userAgentSeen, userAgent) } }
explode_data.jsonl/79345
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 2959, 1649, 1474, 16810, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 2261, 1669, 4937, 96336, 628, 321, 7121, 641, 17269, 2743, 2822, 19060, 16810, 85675, 1669, 8389, 1903, 1669, 609, 5475, 515, 197, 197, 305...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPreviewValuesConfig(t *testing.T) { t.Parallel() tests := []struct { opts preview.PreviewOptions env map[string]string domain string expectedYAMLConfig string }{ { opts: preview.PreviewOptions{ HelmValuesConfig: config.HelmValuesConfig{ ExposeController: &config.ExposeController{}, }, }, env: map[string]string{ preview.DOCKER_REGISTRY: "my.registry", preview.ORG: "my-org", preview.APP_NAME: "my-app", preview.PREVIEW_VERSION: "1.0.0", }, expectedYAMLConfig: `expose: config: {} preview: image: repository: my.registry/my-org/my-app tag: 1.0.0 `, }, { opts: preview.PreviewOptions{ HelmValuesConfig: config.HelmValuesConfig{ ExposeController: &config.ExposeController{ Config: config.ExposeControllerConfig{ HTTP: "false", TLSAcme: "true", }, }, }, }, env: map[string]string{ preview.DOCKER_REGISTRY: "my.registry", preview.ORG: "my-org", preview.APP_NAME: "my-app", preview.PREVIEW_VERSION: "1.0.0", }, domain: "jenkinsx.io", expectedYAMLConfig: `expose: config: domain: jenkinsx.io http: "false" tlsacme: "true" preview: image: repository: my.registry/my-org/my-app tag: 1.0.0 `, }, } co := &opts.CommonOptions{} testhelpers.ConfigureTestOptions(co, gits_test.NewMockGitter(), helm_test.NewMockHelmer()) for i, test := range tests { for k, v := range test.env { os.Setenv(k, v) } test.opts.CommonOptions = co config, err := test.opts.GetPreviewValuesConfig(nil, test.domain) if err != nil { t.Errorf("[%d] got unexpected err: %v", i, err) continue } configYAML, err := config.String() if err != nil { t.Errorf("[%d] %v", i, err) continue } if test.expectedYAMLConfig != configYAML { t.Errorf("[%d] expected %#v but got %#v", i, test.expectedYAMLConfig, configYAML) } } }
explode_data.jsonl/2233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 961 }
[ 2830, 3393, 1949, 24625, 6227, 2648, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 64734, 2290, 17124, 1069, 19417, 3798, 198, 197, 57538, 394, 2415, 14032, 30953, 198, 197, 2698, 3121, 1797, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCustomDNS(t *testing.T) { n, err := e2e.RandInt(255) if err != nil { t.Fatalf("unexpected error: %v", err) } cliPath, err := e2e.Bin() if err != nil { t.Fatalf("unexpected error: %v", err) } projectName := fmt.Sprintf("e2e-integration-custom-dns-%v", n) projectID, err := createProject(projectName) if err != nil { t.Fatalf("unexpected error: %v", err) } defer func() { if e := deleteProject(projectID); e != nil { t.Errorf("error deleting project: %v", e) } }() t.Run("Enable", func(t *testing.T) { cmd := exec.Command(cliPath, atlasEntity, customDNSEntity, awsEntity, "enable", "--projectId", projectID, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() a := assert.New(t) a.NoError(err, string(resp)) var dns mongodbatlas.AWSCustomDNSSetting if err := json.Unmarshal(resp, &dns); a.NoError(err) { a.True(dns.Enabled) } }) t.Run("Describe", func(t *testing.T) { cmd := exec.Command(cliPath, atlasEntity, customDNSEntity, awsEntity, "describe", "--projectId", projectID, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() a := assert.New(t) a.NoError(err, string(resp)) var dns mongodbatlas.AWSCustomDNSSetting if err := json.Unmarshal(resp, &dns); a.NoError(err) { a.True(dns.Enabled) } }) t.Run("Disable", func(t *testing.T) { cmd := exec.Command(cliPath, atlasEntity, customDNSEntity, awsEntity, "disable", "--projectId", projectID, "-o=json") cmd.Env = os.Environ() resp, err := cmd.CombinedOutput() a := assert.New(t) a.NoError(err, string(resp)) var dns mongodbatlas.AWSCustomDNSSetting if err := json.Unmarshal(resp, &dns); a.NoError(err) { a.False(dns.Enabled) } }) }
explode_data.jsonl/34726
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 822 }
[ 2830, 3393, 10268, 61088, 1155, 353, 8840, 836, 8, 341, 9038, 11, 1848, 1669, 384, 17, 68, 2013, 437, 1072, 7, 17, 20, 20, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1465, 25, 1018, 85, 497, 1848, 340, 197, 630,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindResources(t *testing.T) { testCases := []struct { name string include string exclude string expectedNames []string }{{ name: "Include One Match", include: "subdir/deploymentSub.yaml", expectedNames: []string{"nginx-deployment-sub"}, }, { name: "Include Everything", include: "*.yaml", expectedNames: []string{"nginx-deployment", "nginx-deployment-sub"}, }, { name: "Include Subdirectory", include: "**/*.yaml", expectedNames: []string{"nginx-deployment-sub"}, }, { name: "Include No Matches", include: "nothing.yaml", expectedNames: []string{}, }, { name: "Exclude - One Match", exclude: "subdir/deploymentSub.yaml", expectedNames: []string{"nginx-deployment"}, }, { name: "Exclude - Everything", exclude: "*.yaml", expectedNames: []string{}, }} for i := range testCases { tc := testCases[i] t.Run(tc.name, func(t *testing.T) { objs, err := findManifests("testdata/app-include-exclude", ".", nil, argoappv1.ApplicationSourceDirectory{ Recurse: true, Include: tc.include, Exclude: tc.exclude, }, map[string]bool{}) if !assert.NoError(t, err) { return } var names []string for i := range objs { names = append(names, objs[i].GetName()) } assert.ElementsMatch(t, tc.expectedNames, names) }) } }
explode_data.jsonl/5700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 650 }
[ 2830, 3393, 9885, 11277, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 24699, 981, 914, 198, 197, 8122, 857, 981, 914, 198, 197, 42400, 7980, 3056, 917, 198, 197, 15170, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_Workspace_WhenPropertiesConverted_RoundTripsWithoutLoss(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip from Workspace to Workspace via AssignPropertiesToWorkspace & AssignPropertiesFromWorkspace returns original", prop.ForAll(RunPropertyAssignmentTestForWorkspace, WorkspaceGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(false, 240, os.Stdout)) }
explode_data.jsonl/43358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 87471, 8746, 62, 4498, 7903, 61941, 2568, 795, 21884, 1690, 26040, 39838, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, 16, 15, 198, 8692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResponseCompare(t *testing.T) { //t.SkipNow() for i, tt := range responseCompareTests { got := tt.r.Compare(tt.res) if !reflect.DeepEqual(got, tt.want) { t.Errorf("#%d: got: \"%s\"\nwant: \"%s\"", i, got, tt.want) } } }
explode_data.jsonl/774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 2582, 27374, 1155, 353, 8840, 836, 8, 341, 197, 322, 83, 57776, 7039, 741, 2023, 600, 11, 17853, 1669, 2088, 2033, 27374, 18200, 341, 197, 3174, 354, 1669, 17853, 1746, 32377, 47152, 4705, 340, 197, 743, 753, 34913, 94750, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateRule(t *testing.T) { args := &CreateRuleArgs{ RuleName: "goSdkRule", Limit: 2, Enabled: 1, TagStr: "msinstancekey:msinstancevalue", Extra: "extra", } res, err := BBC_CLIENT.CreateRule(args) ExpectEqual(t.Errorf, err, nil) fmt.Println(res) }
explode_data.jsonl/4086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 4021, 11337, 1155, 353, 8840, 836, 8, 341, 31215, 1669, 609, 4021, 11337, 4117, 515, 197, 11143, 1111, 675, 25, 330, 3346, 57175, 11337, 756, 197, 15070, 2353, 25, 262, 220, 17, 345, 197, 197, 5462, 25, 220, 220, 16, 345...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubStr(t *testing.T) { validSQL := []struct { input string output string }{{ input: `select substr('foobar', 1) from t`, }, { input: "select substr(a, 1, 6) from t", }, { input: "select substring(a, 1) from t", output: "select substr(a, 1) from t", }, { input: "select substring(a, 1, 6) from t", output: "select substr(a, 1, 6) from t", }, { input: "select substr(a from 1 for 6) from t", output: "select substr(a, 1, 6) from t", }, { input: "select substring(a from 1 for 6) from t", output: "select substr(a, 1, 6) from t", }, { input: `select substr("foo" from 1 for 2) from t`, output: `select substr('foo', 1, 2) from t`, }, { input: `select substring("foo", 1, 2) from t`, output: `select substr('foo', 1, 2) from t`, }, { input: `select substr(substr("foo" from 1 for 2), 1, 2) from t`, output: `select substr(substr('foo', 1, 2), 1, 2) from t`, }, { input: `select substr(substring("foo", 1, 2), 3, 4) from t`, output: `select substr(substr('foo', 1, 2), 3, 4) from t`, }, { input: `select substring(substr("foo", 1), 2) from t`, output: `select substr(substr('foo', 1), 2) from t`, }} for _, tcase := range validSQL { if tcase.output == "" { tcase.output = tcase.input } tree, err := Parse(tcase.input) if err != nil { t.Errorf("input: %s, err: %v", tcase.input, err) continue } out := String(tree) if out != tcase.output { t.Errorf("out: %s, want %s", out, tcase.output) } } }
explode_data.jsonl/27186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 635 }
[ 2830, 3393, 3136, 2580, 1155, 353, 8840, 836, 8, 1476, 56322, 6688, 1669, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 21170, 914, 198, 197, 15170, 515, 197, 22427, 25, 1565, 1742, 15769, 492, 50267, 516, 220, 16, 8, 504, 259, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestApiDevAuthPostTenants(t *testing.T) { testCases := map[string]struct { req *http.Request devAuthErr error respCode int respBody string }{ "ok": { req: test.MakeSimpleRequest("POST", "http://1.2.3.4/api/internal/v1/devauth/tenants", model.NewTenant{TenantId: "foo"}), respCode: 201, respBody: "", }, "error: empty request": { req: test.MakeSimpleRequest("POST", "http://1.2.3.4/api/internal/v1/devauth/tenants", nil), respCode: 400, respBody: RestError("EOF"), }, "error: no tenant_id": { req: test.MakeSimpleRequest("POST", "http://1.2.3.4/api/internal/v1/devauth/tenants", model.NewTenant{TenantId: ""}, ), respCode: 400, respBody: RestError("tenant_id must be provided"), }, "error: generic": { req: test.MakeSimpleRequest("POST", "http://1.2.3.4/api/internal/v1/devauth/tenants", model.NewTenant{TenantId: "foo"}, ), devAuthErr: errors.New("can't provision tenant"), respCode: 500, respBody: RestError("internal error"), }, } for name, tc := range testCases { t.Logf("test case: %s", name) da := &mocks.App{} da.On("ProvisionTenant", mock.MatchedBy(func(c context.Context) bool { return true }), mock.AnythingOfType("string")).Return(tc.devAuthErr) apih := makeMockApiHandler(t, da, nil) rest.ErrorFieldName = "error" runTestRequest(t, apih, tc.req, tc.respCode, tc.respBody) } }
explode_data.jsonl/641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 643 }
[ 2830, 3393, 6563, 14592, 5087, 4133, 32687, 1783, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 24395, 286, 353, 1254, 9659, 198, 197, 27302, 5087, 7747, 1465, 198, 197, 34653, 2078, 256, 526, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEnv(t *testing.T) { t.Parallel() testVarKey := "TEST_ONLY_FOR_UNIT_TEST_STRING" res := util.GetEnv(testVarKey, "noVal") assert.Equal(t, "noVal", res) os.Setenv(testVarKey, "string") defer os.Unsetenv(testVarKey) res = util.GetEnv(testVarKey, "noVal") assert.Equal(t, "string", res) }
explode_data.jsonl/6082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 1949, 14359, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 3962, 1592, 1669, 330, 10033, 31263, 14516, 31718, 11641, 12283, 698, 10202, 1669, 4094, 2234, 14359, 8623, 3962, 1592, 11, 330, 2152, 2208, 1138, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateRun_ThroughPipelineVersion(t *testing.T) { // Create experiment, pipeline, and pipeline version. store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) defer store.Close() pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) assert.True(t, ok) pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ Name: "version_for_run", ResourceReferences: []*api.ResourceReference{ &api.ResourceReference{ Key: &api.ResourceKey{ Id: pipeline.UUID, Type: api.ResourceType_PIPELINE, }, Relationship: api.Relationship_OWNER, }, }, }, []byte(testWorkflow.ToStringForStore()), true) assert.Nil(t, err) apiRun := &api.Run{ Name: "run1", PipelineSpec: &api.PipelineSpec{ Parameters: []*api.Parameter{ {Name: "param1", Value: "world"}, }, }, ResourceReferences: []*api.ResourceReference{ { Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER, }, { Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: version.UUID}, Relationship: api.Relationship_CREATOR, }, }, ServiceAccount: "sa1", } runDetail, err := manager.CreateRun(apiRun) assert.Nil(t, err) expectedRuntimeWorkflow := testWorkflow.DeepCopy() expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ {Name: "param1", Value: util.StringPointer("world")}} expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.ServiceAccountName = "sa1" expectedRunDetail := &model.RunDetail{ Run: model.Run{ UUID: "123e4567-e89b-12d3-a456-426655440000", ExperimentUUID: experiment.UUID, DisplayName: "run1", Name: "workflow-name", Namespace: "ns1", ServiceAccount: "sa1", StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), CreatedAtInSec: 4, Conditions: "Running", PipelineSpec: model.PipelineSpec{ WorkflowSpecManifest: testWorkflow.ToStringForStore(), Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", }, ResourceReferences: []*model.ResourceReference{ { ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", ResourceType: common.Run, ReferenceUUID: experiment.UUID, ReferenceName: "e1", ReferenceType: common.Experiment, Relationship: common.Owner, }, { ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", ResourceType: common.Run, ReferenceUUID: version.UUID, ReferenceName: "version_for_run", ReferenceType: common.PipelineVersion, Relationship: common.Creator, }, }, }, PipelineRuntime: model.PipelineRuntime{ WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), }, } assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") runDetail, err = manager.GetRun(runDetail.UUID) assert.Nil(t, err) assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") }
explode_data.jsonl/77009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1398 }
[ 2830, 3393, 4021, 6727, 62, 23857, 34656, 5637, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 9342, 11, 15301, 11, 323, 15301, 2319, 624, 57279, 11, 6645, 11, 9342, 11, 15301, 1669, 13864, 77780, 3036, 34656, 1155, 340, 16867, 3553, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertVLabsTelemetryProfile(t *testing.T) { vlabscs := &vlabs.ContainerService{ Properties: &vlabs.Properties{ TelemetryProfile: &vlabs.TelemetryProfile{ ApplicationInsightsKey: "app_insights_key", }, }, } cs, err := ConvertVLabsContainerService(vlabscs, false) if err != nil { t.Errorf("Error converting ContainerService: %s", err) } if cs.Properties.TelemetryProfile == nil { t.Error("Expected TelemetryProfile to be populated") } if cs.Properties.TelemetryProfile.ApplicationInsightsKey != "app_insights_key" { t.Error("Expected TelemetryProfile.ApplicationInsightsKey to be set") } }
explode_data.jsonl/34639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 12012, 30698, 3435, 6639, 35958, 8526, 1155, 353, 8840, 836, 8, 341, 5195, 14380, 2388, 82, 1669, 609, 14536, 3435, 33672, 1860, 515, 197, 197, 7903, 25, 609, 14536, 3435, 15945, 515, 298, 197, 6639, 35958, 8526, 25, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStrOpsTextJustify_String_01(t *testing.T) { testStr := "Center" txtJustify := TextJustify(0).Center() actualStr := txtJustify.String() if actualStr != testStr { t.Errorf("Error: Expected return of object string value= \"Center\".\n"+ "Instead, object string value = '%v'\n", testStr) } }
explode_data.jsonl/29336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 2580, 38904, 1178, 9952, 1437, 31777, 62, 15, 16, 1155, 353, 8840, 836, 8, 1476, 18185, 2580, 1669, 330, 9392, 1837, 68272, 9952, 1437, 1669, 2918, 9952, 1437, 7, 15, 568, 9392, 2822, 88814, 2580, 1669, 7932, 9952, 1437, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTerragruntInitConfirmation(t *testing.T) { t.Parallel() s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId())) tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_OUTPUT_ALL) rootTerragruntConfigPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_OUTPUT_ALL, config.DefaultTerragruntConfigPath) copyTerragruntConfigAndFillPlaceholders(t, rootTerragruntConfigPath, rootTerragruntConfigPath, s3BucketName, "not-used", "not-used") stdout := bytes.Buffer{} stderr := bytes.Buffer{} err := runTerragruntCommand(t, fmt.Sprintf("terragrunt run-all init --terragrunt-working-dir %s", tmpEnvPath), &stdout, &stderr) require.Error(t, err) errout := string(stderr.Bytes()) assert.Equal(t, 1, strings.Count(errout, "does not exist or you don't have permissions to access it. Would you like Terragrunt to create it? (y/n)")) }
explode_data.jsonl/10185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 51402, 68305, 3850, 3803, 53540, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1903, 18, 36018, 675, 1669, 8879, 17305, 445, 465, 4101, 81, 3850, 16839, 1455, 11152, 11069, 82, 497, 9069, 29983, 53440, 764, 49962, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeDecodeBase64Url(t *testing.T) { // creates a set of shares s := "nghiatcxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" arr, err := Create(3, 6, s, true) if err != nil { t.Errorf(err.Error()) } if len(arr) != 6 { t.Errorf("Create shares: FAIL") } // combines shares into secret s1, err := Combine(arr[:3], true) if s1 != s { t.Errorf("combines shares 1 length = 3: FAIL") } s2, err := Combine(arr[3:], true) if s2 != s { t.Errorf("combines shares 2 length = 3: FAIL") } s3, err := Combine(arr[1:5], true) if s3 != s { t.Errorf("combines shares 3 length = 4: FAIL") } if err != nil { t.Errorf(err.Error()) } }
explode_data.jsonl/38123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 32535, 32564, 3978, 21, 19, 2864, 1155, 353, 8840, 836, 8, 341, 197, 322, 11450, 264, 738, 315, 13248, 198, 1903, 1669, 330, 77, 866, 10358, 66, 44102, 44102, 44102, 44102, 44102, 44102, 44102, 44102, 44102, 44102, 44102, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestDialWithBlockingBalancer(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) dialDone := make(chan struct{}) go func() { DialContext(ctx, "Non-Existent.Server:80", WithBlock(), WithInsecure(), WithBalancer(newBlockingBalancer())) close(dialDone) }() cancel() <-dialDone }
explode_data.jsonl/6663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 35, 530, 2354, 48266, 93825, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 2698, 530, 17453, 1669, 1281, 35190, 2036, 37790, 30680, 2915, 368, 341, 197, 10957, 530, 1972, 7502, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeadline(t *testing.T) { // not Parallel because this uses the global mock signalNotify. // t.Parallel() Convey(`TrackSoftDeadline`, t, func() { t0 := testclock.TestTimeUTC ctx, tc := testclock.UseTime(context.Background(), t0) ctx, cancel := context.WithCancel(ctx) defer cancel() defer assertEmptySignals() // we explicitly remove the section to make these tests work correctly when // run in a context using LUCI_CONTEXT. ctx = Set(ctx, "deadline", nil) Convey(`Empty context`, func() { ac, shutdown := TrackSoftDeadline(ctx, 5*time.Second) defer shutdown() deadline, ok := ac.Deadline() So(ok, ShouldBeFalse) So(deadline.IsZero(), ShouldBeTrue) // however, Interrupt/SIGTERM handler is still installed mockGenerateInterrupt() // soft deadline will happen, but context.Done won't. So(<-SoftDeadlineDone(ac), ShouldEqual, InterruptEvent) So(ac, shouldWaitForNotDone) // Advance the clock by 25s, and presto tc.Add(25 * time.Second) <-ac.Done() }) Convey(`deadline context`, func() { ctx, cancel := clock.WithDeadline(ctx, t0.Add(100*time.Second)) defer cancel() ac, shutdown := TrackSoftDeadline(ctx, 5*time.Second) defer shutdown() hardDeadline, ok := ac.Deadline() So(ok, ShouldBeTrue) // hard deadline is still 95s because we the presumed grace period for the // context was 30s, but we reserved 5s for cleanup. Thus, this should end // 5s before the overall deadline, So(hardDeadline, ShouldEqual, t0.Add(95*time.Second)) got := GetDeadline(ac) expect := &Deadline{GracePeriod: 25} // SoftDeadline is always GracePeriod earlier than the hard (context) // deadline. expect.SetSoftDeadline(t0.Add(70 * time.Second)) So(got, ShouldResembleProto, expect) shutdown() <-SoftDeadlineDone(ac) // force monitor to make timer before we increment the clock tc.Add(25 * time.Second) <-ac.Done() }) Convey(`deadline context reserve`, func() { ctx, cancel := clock.WithDeadline(ctx, t0.Add(95*time.Second)) defer cancel() ac, shutdown := TrackSoftDeadline(ctx, 0) defer shutdown() deadline, ok := ac.Deadline() So(ok, ShouldBeTrue) // hard deadline is 95s because we reserved 5s. So(deadline, ShouldEqual, t0.Add(95*time.Second)) got := GetDeadline(ac) expect := &Deadline{GracePeriod: 30} // SoftDeadline is always GracePeriod earlier than the hard (context) // deadline. expect.SetSoftDeadline(t0.Add(65 * time.Second)) So(got, ShouldResembleProto, expect) shutdown() <-SoftDeadlineDone(ac) // force monitor to make timer before we increment the clock tc.Add(30 * time.Second) <-ac.Done() }) Convey(`Deadline in LUCI_CONTEXT`, func() { externalSoftDeadline := t0.Add(100 * time.Second) // Note, LUCI_CONTEXT asserts that non-zero SoftDeadlines must be enforced // by 'an external process', so we mock that with the goroutine here. // // Must do clock.After outside goroutine to force this time calculation to // happen before we start manipulating `tc`. externalTimeout := clock.After(ctx, 100*time.Second) go func() { if (<-externalTimeout).Err == nil { mockGenerateInterrupt() } }() dl := &Deadline{GracePeriod: 40} dl.SetSoftDeadline(externalSoftDeadline) // 100s into the future ctx := SetDeadline(ctx, dl) Convey(`no deadline in context`, func() { ac, shutdown := TrackSoftDeadline(ctx, 5*time.Second) defer shutdown() softDeadline := GetDeadline(ac).SoftDeadlineTime() So(softDeadline, ShouldHappenWithin, time.Millisecond, externalSoftDeadline) hardDeadline, ok := ac.Deadline() So(ok, ShouldBeTrue) // hard deadline is soft deadline + adjusted grace period. // Cleanup reservation of 5s means that the adjusted grace period is // 35s. So(hardDeadline, ShouldHappenWithin, time.Millisecond, externalSoftDeadline.Add(35*time.Second)) Convey(`natural expiration`, func() { tc.Add(100 * time.Second) So(<-SoftDeadlineDone(ac), ShouldEqual, TimeoutEvent) So(ac, shouldWaitForNotDone) tc.Add(35 * time.Second) <-ac.Done() // We should have ended right around the deadline; there's some slop // in the clock package though, and this doesn't seem to be zero. So(tc.Now(), ShouldHappenWithin, time.Millisecond, hardDeadline) }) Convey(`signal`, func() { mockGenerateInterrupt() So(<-SoftDeadlineDone(ac), ShouldEqual, InterruptEvent) So(ac, shouldWaitForNotDone) tc.Add(35 * time.Second) <-ac.Done() // should still have 65s before the soft deadline So(tc.Now(), ShouldHappenWithin, time.Millisecond, softDeadline.Add(-65*time.Second)) }) Convey(`cancel context`, func() { cancel() So(<-SoftDeadlineDone(ac), ShouldEqual, ClosureEvent) <-ac.Done() }) }) Convey(`earlier deadline in context`, func() { ctx, cancel := clock.WithDeadline(ctx, externalSoftDeadline.Add(-50*time.Second)) defer cancel() ac, shutdown := TrackSoftDeadline(ctx, 5*time.Second) defer shutdown() hardDeadline, ok := ac.Deadline() So(ok, ShouldBeTrue) So(hardDeadline, ShouldEqual, externalSoftDeadline.Add(-55*time.Second)) Convey(`natural expiration`, func() { tc.Add(10 * time.Second) So(<-SoftDeadlineDone(ac), ShouldEqual, TimeoutEvent) So(ac, shouldWaitForNotDone) tc.Add(35 * time.Second) <-ac.Done() // We should have ended right around the deadline; there's some slop // in the clock package though, and this doesn't seem to be zero. So(tc.Now(), ShouldHappenWithin, time.Millisecond, hardDeadline) }) Convey(`signal`, func() { mockGenerateInterrupt() So(<-SoftDeadlineDone(ac), ShouldEqual, InterruptEvent) So(ac, shouldWaitForNotDone) tc.Add(35 * time.Second) <-ac.Done() // Should have about 10s of time left before the deadline. So(tc.Now(), ShouldHappenWithin, time.Millisecond, hardDeadline.Add(-10*time.Second)) }) }) }) }) }
explode_data.jsonl/30989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2355 }
[ 2830, 3393, 83593, 1155, 353, 8840, 836, 8, 341, 197, 322, 537, 49272, 1576, 419, 5711, 279, 3644, 7860, 8286, 28962, 624, 197, 322, 259, 41288, 7957, 2822, 93070, 5617, 5809, 15667, 30531, 83593, 7808, 259, 11, 2915, 368, 341, 197, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewTelegramClientCheckTimeout(t *testing.T) { tbl := []struct { timeout, expected time.Duration }{ {0, time.Second * 60}, {300, 300}, {100500, 100500}, } for i, tt := range tbl { i := i tt := tt t.Run(strconv.Itoa(i), func(t *testing.T) { client, err := NewTelegramClient("", "", tt.timeout, &duration.Service{}, &TelegramSenderImpl{}) assert.NoError(t, err) assert.Equal(t, tt.expected, client.Timeout) }) } }
explode_data.jsonl/50721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 3564, 72244, 2959, 3973, 7636, 1155, 353, 8840, 836, 8, 341, 3244, 2024, 1669, 3056, 1235, 341, 197, 78395, 11, 3601, 882, 33795, 198, 197, 59403, 197, 197, 90, 15, 11, 882, 32435, 353, 220, 21, 15, 1583, 197, 197, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPerformRequestWithCompressionEnabled(t *testing.T) { testPerformRequestWithCompression(t, &http.Client{ Transport: &http.Transport{ DisableCompression: true, }, }) }
explode_data.jsonl/38028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 46951, 1900, 2354, 81411, 5462, 1155, 353, 8840, 836, 8, 341, 18185, 46951, 1900, 2354, 81411, 1155, 11, 609, 1254, 11716, 515, 197, 197, 27560, 25, 609, 1254, 87669, 515, 298, 197, 25479, 81411, 25, 830, 345, 197, 197, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestLegalMoves(t *testing.T) { tests := []struct { msg string sfen string legalMovesNr int }{ {"initial", "lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1", 30}, {"nifu", "8k/PP7/2P6/3P5/9/5P3/6P2/7P1/8P b P 1", 18}, {"test", "9/4B1SGL/PN2R4/1N1P5/6N2/5+R3/3+B5/9/8L b - 1", 112}, {"oute(1)", "9/9/3rR2B1/9/8b/4s4/4K4/3N5/9 b 2P 1", 8}, {"oute(2)", "4r4/9/3R5/7B1/9/9/9/9/4K4 b G 1", 16}, {"floodgate", "l+S3ks1R/3g2g1+L/4pp1p1/p5p2/1KPS1P1P1/P2p1BP2/+bg2P4/1P5R1/1N7 b 3N2L5Pgs 1", 153}, // 以下はpython-shogiから {"stalemate", "+R+N+SGKG+S+N+R/+B+N+SG+LG+S+N+B/P+LPP+LPP+LP/1P2P2P1/9/9/9/9/6k2 b - 200", 0}, {"checkmate by dropping FU(1)", "kn7/9/1G7/9/9/9/9/9/9 b P 1", 76}, {"checkmate by dropping FU(2)", "kn7/9/9/1NN6/9/9/9/9/9 b P 1", 73}, {"check by dropping FU(1)", "k8/9/9/9/9/9/9/9/9 b P 1", 72}, {"check by dropping FU(2)", "kn7/1n7/9/9/9/9/9/9/9 b P 1", 71}, {"check by dropping FU(3)", "kn7/9/9/1N7/9/9/9/9/9 b P 1", 73}, // 82歩打で相手がstalemateになるけど王手でないので打ち歩詰めではない(?) {"check by dropping FU(4)", "k8/9/1S7/9/9/9/9/9/9 b P 1", 81}, {"check by dropping FU(5)", "kg7/9/1G7/9/9/9/9/9/9 b P 1", 77}, } for _, test := range tests { p, err := NewPositionFromSFEN(test.sfen) if err != nil { t.Fatal(err) } moves := p.LegalMoves() if len(moves) != test.legalMovesNr { t.Errorf("LegalMoves[%v]: want %v, got %v\nposition %v\n moves %v", test.msg, test.legalMovesNr, len(moves), p, moves) } } }
explode_data.jsonl/40639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 936 }
[ 2830, 3393, 52786, 45789, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21169, 688, 914, 198, 197, 1903, 30353, 260, 914, 198, 197, 197, 6428, 45789, 60952, 526, 198, 197, 59403, 197, 197, 4913, 9426, 497, 330, 2261...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTargetScrapeScrapeNotFound(t *testing.T) { server := httptest.NewServer( http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotFound) }), ) defer server.Close() serverURL, err := url.Parse(server.URL) if err != nil { panic(err) } ts := &targetScraper{ Target: &Target{ labels: labels.FromStrings( model.SchemeLabel, serverURL.Scheme, model.AddressLabel, serverURL.Host, ), }, client: http.DefaultClient, } if _, err := ts.scrape(context.Background(), ioutil.Discard); !strings.Contains(err.Error(), "404") { t.Fatalf("Expected \"404 NotFound\" error but got: %s", err) } }
explode_data.jsonl/56138
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 6397, 3326, 19842, 3326, 19842, 10372, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 5475, 1006, 197, 28080, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 298, 6692, 69794, 19886, 10538, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFuzzString(t *testing.T) { data := []byte{0x3, 0x41, 0x42, 0x43} f := &F{Data: data, T: t} fuzzString(f) }
explode_data.jsonl/69274
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 37, 8889, 703, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 90, 15, 87, 18, 11, 220, 15, 87, 19, 16, 11, 220, 15, 87, 19, 17, 11, 220, 15, 87, 19, 18, 532, 1166, 1669, 609, 37, 90, 1043, 25, 821, 11, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestWebHookAuthor_String(t *testing.T) { v := WebHookAuthor{ Email: String(""), Name: String(""), Username: String(""), } want := `github.WebHookAuthor{Email:"", Name:"", Username:""}` if got := v.String(); got != want { t.Errorf("WebHookAuthor.String = %v, want %v", got, want) } }
explode_data.jsonl/33301
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 5981, 31679, 7133, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 4895, 31679, 7133, 515, 197, 197, 4781, 25, 262, 923, 445, 4461, 197, 21297, 25, 257, 923, 445, 4461, 197, 197, 11115, 25, 923, 445, 4461, 197, 532, 507...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFactory_ReleasePage(t *testing.T) { ctrl := gomock.NewController(t) defer func() { _ = fileutil.RemoveDir(testPath) removeFileFunc = fileutil.RemoveFile ctrl.Finish() }() fct, err := NewFactory(testPath, 128) assert.NoError(t, err) p, err := fct.AcquirePage(10) assert.NoError(t, err) assert.NotNil(t, p) files, err := fileutil.ListDir(testPath) assert.NoError(t, err) assert.Len(t, files, 1) assert.Equal(t, int64(128), fct.Size()) // remove file err removeFileFunc = func(file string) error { return fmt.Errorf("err") } err = fct.ReleasePage(10) assert.Error(t, err) files, err = fileutil.ListDir(testPath) assert.NoError(t, err) assert.Len(t, files, 1) // remove file success removeFileFunc = fileutil.RemoveFile err = fct.ReleasePage(10) assert.NoError(t, err) files, err = fileutil.ListDir(testPath) assert.NoError(t, err) assert.Len(t, files, 0) assert.Equal(t, int64(0), fct.Size()) err = fct.Close() assert.NoError(t, err) }
explode_data.jsonl/53074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 4153, 85573, 2665, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 692, 16867, 2915, 368, 341, 197, 197, 62, 284, 1034, 1314, 13270, 6184, 8623, 1820, 340, 197, 47233, 1703, 9626, 284, 1034, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetState(t *testing.T) { testActorRuntime := newTestActorsRuntime() actorType, actorID := getTestActorTypeAndID() fakeData := strconv.Quote("fakeData") var val interface{} jsoniter.ConfigFastest.Unmarshal([]byte(fakeData), &val) actorKey := testActorRuntime.constructCompositeKey(actorType, actorID) fakeCallAndActivateActor(testActorRuntime, actorKey) testActorRuntime.SaveState(&SaveStateRequest{ ActorID: actorID, ActorType: actorType, Key: TestKeyName, Value: val, }) // act response, err := testActorRuntime.GetState(&GetStateRequest{ ActorID: actorID, ActorType: actorType, Key: TestKeyName, }) // assert assert.NoError(t, err) assert.Equal(t, fakeData, string(response.Data)) }
explode_data.jsonl/12888
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 1949, 1397, 1155, 353, 8840, 836, 8, 341, 18185, 18870, 15123, 1669, 501, 2271, 2414, 1087, 15123, 741, 93410, 929, 11, 12089, 915, 1669, 633, 2271, 18870, 929, 3036, 915, 741, 1166, 726, 1043, 1669, 33317, 13, 19466, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJsonListValue_ConvertToNative_Json(t *testing.T) { list := NewJSONList(NewRegistry(), &structpb.ListValue{Values: []*structpb.Value{ {Kind: &structpb.Value_StringValue{StringValue: "hello"}}, {Kind: &structpb.Value_NumberValue{NumberValue: 1}}}}) listVal, err := list.ConvertToNative(jsonListValueType) if err != nil { t.Error(err) } if listVal != list.Value().(proto.Message) { t.Error("List did not convert to its underlying representation.") } val, err := list.ConvertToNative(jsonValueType) if err != nil { t.Error(err) } if !proto.Equal(val.(proto.Message), &structpb.Value{Kind: &structpb.Value_ListValue{ ListValue: listVal.(*structpb.ListValue)}}) { t.Errorf("Messages were not equal, got '%v'", val) } }
explode_data.jsonl/55355
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 5014, 852, 1130, 15100, 1621, 1249, 20800, 62, 5014, 1155, 353, 8840, 836, 8, 341, 14440, 1669, 1532, 5370, 852, 35063, 15603, 1507, 609, 1235, 16650, 5814, 1130, 90, 6227, 25, 29838, 1235, 16650, 6167, 515, 197, 197, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestQueryRowClosingStmt(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) var name string var age int err := db.QueryRow("SELECT|people|age,name|age=?", 3).Scan(&age, &name) if err != nil { t.Fatal(err) } if len(db.freeConn) != 1 { t.Fatalf("expected 1 free conn") } fakeConn := db.freeConn[0].ci.(*fakeConn) if made, closed := fakeConn.stmtsMade, fakeConn.stmtsClosed; made != closed { t.Errorf("statement close mismatch: made %d, closed %d", made, closed) } }
explode_data.jsonl/15989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 2859, 3102, 36294, 31063, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 340, 2405, 829, 914, 198, 2405, 4231, 526, 198, 9859, 1669, 2927, 15685, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_newNode(t *testing.T) { tests := []struct { name string text string want Tree }{ { name: "must create node", text: "node-foo", want: &node{ text: "node-foo", nodes: []Tree{}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := newNode(tt.text); !reflect.DeepEqual(got, tt.want) { t.Errorf("newNode() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/46890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 5921, 1955, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 15425, 914, 198, 197, 50780, 8942, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 24812, 1855, 2436, 756, 298, 15425,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKustomize(t *testing.T) { testCase := &tests.KustomizeTestCase{ Package: "../../../../../cert-manager/cert-manager/overlays/self-signed", Expected: "test_data/expected", } tests.RunTestCase(t, testCase) }
explode_data.jsonl/23311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 42, 1450, 551, 1155, 353, 8840, 836, 8, 341, 18185, 4207, 1669, 609, 23841, 11352, 1450, 551, 16458, 515, 197, 10025, 1434, 25, 220, 10208, 26744, 12246, 44896, 2899, 529, 44896, 14, 1975, 63359, 68239, 92553, 756, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_NewValidator_unknownAlg(t *testing.T) { _, err := NewValidator(&SignatureConfig{ Alg: "random", }, nopExtractor) if err == nil || err.Error() != "JOSE: unknown algorithm random" { t.Errorf("unexpected error: %v", err) } }
explode_data.jsonl/67486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 39582, 14256, 57507, 86895, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 1532, 14256, 2099, 25088, 2648, 515, 197, 197, 86895, 25, 330, 11463, 756, 197, 2137, 65026, 56118, 340, 743, 1848, 621, 2092, 1369, 1848, 6141...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateRuntimeConfiguration(t *testing.T) { assert := assert.New(t) assert.NotEqual(defaultAgent, vc.KataContainersAgent) config := oci.RuntimeConfig{} tomlConf := tomlConfig{ Agent: map[string]agent{ // force a non-default value kataAgentTableType: {}, }, } assert.NotEqual(config.AgentType, vc.AgentType(kataAgentTableType)) assert.NotEqual(config.AgentConfig, vc.KataAgentConfig{}) err := updateRuntimeConfig("", tomlConf, &config) assert.NoError(err) assert.Equal(config.AgentType, vc.AgentType(kataAgentTableType)) assert.Equal(config.AgentConfig, vc.KataAgentConfig{}) }
explode_data.jsonl/5137
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 4289, 15123, 7688, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 6948, 15000, 2993, 18978, 16810, 11, 24553, 11352, 459, 74632, 16810, 692, 25873, 1669, 93975, 16706, 2648, 31483, 3244, 316, 75, 15578, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInfoStream_expired(t *testing.T) { assert := assert.New(t) _, log, cleanup := setup(t) defer cleanup() ctx, cancel := context.WithCancel(context.Background()) cancel() infoCh := make(chan *blog.Info, 4) err := log.InfoStream(ctx, infoCh) assert.Equal(ctx.Err(), err) }
explode_data.jsonl/63
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 1731, 3027, 80221, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 197, 6878, 1487, 11, 21290, 1669, 6505, 1155, 340, 16867, 21290, 741, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 84441, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetQuerySnippetGzipped(t *testing.T) { var buf bytes.Buffer zw := gzip.NewWriter(&buf) q := makeQuery(1000) _, err := zw.Write([]byte(q)) if err != nil { t.Fatal(err) } zw.Close() req, err := http.NewRequest("POST", "http://127.0.0.1:9090", &buf) req.Header.Set("Content-Encoding", "gzip") if err != nil { t.Fatal(err) } query := getQuerySnippet(req) if query[:100] != string(q[:100]) { t.Fatalf("got: %q; expected: %q", query[:100], q[:100]) } }
explode_data.jsonl/31703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 1949, 2859, 87852, 38, 89, 6450, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 5820, 22622, 198, 20832, 86, 1669, 57795, 7121, 6492, 2099, 5909, 340, 18534, 1669, 1281, 2859, 7, 16, 15, 15, 15, 340, 197, 6878, 1848, 1669, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIssue1281_JsonPatchAndImageTag(t *testing.T) { th := kusttest_test.MakeHarness(t) th.WriteK("/app", ` resources: - deployment.yaml images: - name: abbott newTag: v2 - name: costello newTag: v8 patchesJson6902: - target: group: apps version: v1 kind: Deployment name: ben path: patch.json `) th.WriteF("/app/deployment.yaml", ` apiVersion: apps/v1 kind: Deployment metadata: name: ben spec: template: spec: dnsPolicy: "None" containers: - name: awesome image: abbott --- apiVersion: apps/v1 kind: Deployment metadata: name: alice spec: template: spec: containers: - name: tomato image: abbott `) th.WriteF("/app/patch.json", ` [ {"op": "add", "path": "/spec/replica", "value": "3"}, {"op": "replace", "path": "/spec/template/spec/containers/0", "value": { "image": "costello" } } ] `) m := th.Run("/app", th.MakeDefaultOptions()) th.AssertActualEqualsExpected(m, ` apiVersion: apps/v1 kind: Deployment metadata: name: ben spec: replica: "3" template: spec: containers: - image: costello:v8 dnsPolicy: None --- apiVersion: apps/v1 kind: Deployment metadata: name: alice spec: template: spec: containers: - image: abbott:v2 name: tomato `) }
explode_data.jsonl/49820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 573 }
[ 2830, 3393, 42006, 16, 17, 23, 16, 62, 5014, 43622, 3036, 1906, 5668, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 595, 590, 1944, 4452, 50133, 74248, 1155, 340, 70479, 4073, 42, 4283, 676, 497, 22074, 12745, 510, 12, 23172, 33406, 271,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKafkaProducer(t *testing.T) { mock := newMockAsyncProducer() toDB := make(chan proto.Message) topic := "occlient" systemID := "Foobar" toDBProducer := &producer{ notifsChan: toDB, kafkaProducer: mock, encoder: openconfig.NewEncoder(topic, sarama.StringEncoder(systemID), ""), done: make(chan struct{}), wg: sync.WaitGroup{}, } toDBProducer.Start() response := &pb.SubscribeResponse{ Response: &pb.SubscribeResponse_Update{ Update: &pb.Notification{ Timestamp: 0, Prefix: newPath("/foo/bar"), Update: []*pb.Update{}, }, }, } document := map[string]interface{}{ "timestamp": int64(0), "update": map[string]interface{}{ "": map[string]interface{}{ "foo": map[string]interface{}{ "bar": map[string]interface{}{}, }, }, }, } toDB <- response kafkaMessage := <-mock.input if kafkaMessage.Topic != topic { t.Errorf("Unexpected Topic: %s, expecting %s", kafkaMessage.Topic, topic) } key, err := kafkaMessage.Key.Encode() if err != nil { t.Fatalf("Error encoding key: %s", err) } if string(key) != systemID { t.Errorf("Kafka message didn't have expected key: %s, expecting %s", string(key), systemID) } valueBytes, err := kafkaMessage.Value.Encode() if err != nil { t.Fatalf("Error encoding value: %s", err) } var result interface{} err = json.Unmarshal(valueBytes, &result) if err != nil { t.Errorf("Error decoding into JSON: %s", err) } if !test.DeepEqual(document["update"], result.(map[string]interface{})["update"]) { t.Errorf("Protobuf sent from Kafka Producer does not match original.\nOriginal: %v\nNew:%v", document, result) } toDBProducer.Stop() }
explode_data.jsonl/4802
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 703 }
[ 2830, 3393, 42, 21883, 45008, 1155, 353, 8840, 836, 8, 341, 77333, 1669, 501, 11571, 6525, 45008, 741, 31709, 3506, 1669, 1281, 35190, 18433, 8472, 340, 3244, 24810, 1669, 330, 509, 2972, 698, 40293, 915, 1669, 330, 91879, 31393, 698, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test_MemTable(t *testing.T) { memTable := New() memTable.Add(1234567, internal.TypeValue, []byte("aadsa34a"), []byte("bb23b3423")) value, _ := memTable.Get([]byte("aadsa34a")) fmt.Println(string(value)) fmt.Println(memTable.ApproximateMemoryUsage()) }
explode_data.jsonl/12763
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 1245, 336, 2556, 1155, 353, 8840, 836, 8, 341, 14145, 2556, 1669, 1532, 741, 14145, 2556, 1904, 7, 16, 17, 18, 19, 20, 21, 22, 11, 5306, 10184, 1130, 11, 3056, 3782, 445, 64, 7664, 64, 18, 19, 64, 3975, 3056, 3782, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ErrMigrationsNotImplementedForDriver(t *testing.T) { err := ErrMigrationsNotImplementedForDriver{ Driver: "fake-driver", } assert.Equal(t, "migrations are not implemented for driver: fake-driver", err.Error()) }
explode_data.jsonl/55408
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 93623, 44, 17824, 2623, 18300, 2461, 11349, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 15495, 44, 17824, 2623, 18300, 2461, 11349, 515, 197, 10957, 5469, 25, 330, 30570, 76490, 756, 197, 630, 6948, 12808, 1155, 11, 330, 76, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSocketPVProposal(t *testing.T) { for _, tc := range socketTestCases(t) { func() { var ( chainID = cmn.RandStr(12) validatorEndpoint, serviceEndpoint = testSetupSocketPair( t, chainID, types.NewMockPV(), tc.addr, tc.dialer) ts = time.Now() privProposal = &types.Proposal{Timestamp: ts} clientProposal = &types.Proposal{Timestamp: ts} ) defer validatorEndpoint.Stop() defer serviceEndpoint.Stop() require.NoError(t, serviceEndpoint.privVal.SignProposal(chainID, privProposal)) require.NoError(t, validatorEndpoint.SignProposal(chainID, clientProposal)) assert.Equal(t, privProposal.Signature, clientProposal.Signature) }() } }
explode_data.jsonl/77980
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 10286, 48469, 98637, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 7575, 2271, 37302, 1155, 8, 341, 197, 29244, 368, 341, 298, 2405, 2399, 571, 197, 8819, 915, 999, 284, 9961, 77, 2013, 437, 2580, 7, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuperHashGetWithNoKeys(t *testing.T) { hashmap := New() k1, k2, k3, value := 1, true, 3, 4 hashmap.Set(k1, k2, k3, value) hashmap.Get() }
explode_data.jsonl/82210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 19284, 6370, 1949, 2354, 2753, 8850, 1155, 353, 8840, 836, 8, 341, 50333, 2186, 1669, 1532, 741, 16463, 16, 11, 595, 17, 11, 595, 18, 11, 897, 1669, 220, 16, 11, 830, 11, 220, 18, 11, 220, 19, 198, 50333, 2186, 4202, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSampleWrite(t *testing.T) { ee, err := getSampleEntries() if err != nil { t.Errorf("Failed to get sample entries:%v", err) } // Write Entries to a file fname := filepath.Join(os.TempDir(), ".zsh_history") err = history.WriteHistoryFile(ee, fname) if err != nil { t.Errorf("Failed to write entries to files:%v", err) } // Compare files hashOriginal, err := getFileHash(filepath.Join("samples", "sample1.test_history")) if err != nil { t.Errorf("Failed to Compute Hash:%v", err) } hashNew, err := getFileHash(fname) if err != nil { t.Errorf("Failed to Compute Hash:%v", err) } if hashOriginal != hashNew { t.Errorf("Hashes do not match. Want %s, have %s", hashOriginal, hashNew) } os.Remove(fname) }
explode_data.jsonl/68866
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 17571, 7985, 1155, 353, 8840, 836, 8, 341, 197, 2127, 11, 1848, 1669, 633, 17571, 24533, 741, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 9408, 311, 633, 6077, 10695, 7533, 85, 497, 1848, 340, 197, 630, 197, 322, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestServiceByCommand(t *testing.T) { cases := []struct { in []string want ServiceName }{ { in: []string{"/usr/bin/memcached", "-m", "64", "-p", "11211", "-u", "memcache", "-l", "127.0.0.1", "-P", "/var/run/memcached/memcached.pid"}, want: MemcachedService, }, } for i, c := range cases { got, ok := serviceByCommand(c.in) if c.want != "" && got != c.want { t.Errorf("serviceByCommand(<case #%d>) == %#v, want %#v", i, got, c.want) } else if c.want == "" && ok { t.Errorf("serviceByCommand(<case #%d>) == %#v, want nothing", i, got) } } }
explode_data.jsonl/27780
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 1860, 1359, 4062, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 17430, 256, 3056, 917, 198, 197, 50780, 5362, 675, 198, 197, 59403, 197, 197, 515, 298, 17430, 25, 256, 3056, 917, 90, 3115, 7063, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRpcSignatureComposer(t *testing.T) { request := requests.NewCommonRequest() request.TransToAcsRequest() c := credentials.NewAccessKeyCredential("accessKeyId", "accessKeySecret") signer := signers.NewAccessKeySigner(c) origTestHookGetDate := hookGetDate defer func() { hookGetDate = origTestHookGetDate }() hookGetDate = mockRpcDate origTestHookGetNonce := hookGetNonce defer func() { hookGetNonce = origTestHookGetNonce }() hookGetNonce = mockRpcGetNonce signRpcRequest(request, signer, "regionId") assert.Equal(t, "mock date", request.GetQueryParams()["Timestamp"]) assert.Equal(t, "MOCK_UUID", request.GetQueryParams()["SignatureNonce"]) assert.Equal(t, "7loPmFjvDnzOVnQeQNj85S6nFGY=", request.GetQueryParams()["Signature"]) signRpcRequest(request, signer, "regionId") assert.Equal(t, "mock date", request.GetQueryParams()["Timestamp"]) assert.Equal(t, "MOCK_UUID", request.GetQueryParams()["SignatureNonce"]) assert.Equal(t, "7loPmFjvDnzOVnQeQNj85S6nFGY=", request.GetQueryParams()["Signature"]) }
explode_data.jsonl/42655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 60248, 25088, 90067, 1155, 353, 8840, 836, 8, 341, 23555, 1669, 7388, 7121, 10839, 1900, 741, 23555, 11815, 1249, 32, 4837, 1900, 741, 1444, 1669, 16387, 7121, 6054, 1592, 48265, 445, 5211, 81343, 497, 330, 5211, 1592, 19773, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTerragruntBeforeAndAfterMergeHook(t *testing.T) { t.Parallel() childPath := util.JoinPath(TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_MERGE_PATH, qaMyAppRelPath) cleanupTerraformFolder(t, childPath) s3BucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId())) t.Logf("bucketName: %s", s3BucketName) defer deleteS3Bucket(t, TERRAFORM_REMOTE_STATE_S3_REGION, s3BucketName) tmpTerragruntConfigPath := createTmpTerragruntConfigWithParentAndChild(t, TEST_FIXTURE_HOOKS_BEFORE_AND_AFTER_MERGE_PATH, qaMyAppRelPath, s3BucketName, config.DefaultTerragruntConfigPath, config.DefaultTerragruntConfigPath) runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntConfigPath, childPath)) _, beforeException := ioutil.ReadFile(childPath + "/before.out") _, beforeChildException := ioutil.ReadFile(childPath + "/before-child.out") _, beforeOverriddenParentException := ioutil.ReadFile(childPath + "/before-parent.out") _, afterException := ioutil.ReadFile(childPath + "/after.out") _, afterParentException := ioutil.ReadFile(childPath + "/after-parent.out") assert.NoError(t, beforeException) assert.NoError(t, beforeChildException) assert.NoError(t, afterException) assert.NoError(t, afterParentException) // PathError because no file found assert.Error(t, beforeOverriddenParentException) }
explode_data.jsonl/10068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 51402, 68305, 3850, 10227, 3036, 6025, 52096, 31679, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 58391, 1820, 1669, 4094, 22363, 1820, 50320, 42635, 41486, 82251, 50, 82218, 21767, 72339, 1245, 92497, 7944, 11, 88496,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteWhenSuccess(t *testing.T) { // Given store := &mocksStore.StoreInterface{} store.On("Delete", "my-key").Return(nil) codec := New(store) // When err := codec.Delete("my-key") // Then assert.Nil(t, err) assert.Equal(t, 0, codec.GetStats().Hits) assert.Equal(t, 0, codec.GetStats().Miss) assert.Equal(t, 0, codec.GetStats().SetSuccess) assert.Equal(t, 0, codec.GetStats().SetError) assert.Equal(t, 1, codec.GetStats().DeleteSuccess) assert.Equal(t, 0, codec.GetStats().DeleteError) assert.Equal(t, 0, codec.GetStats().InvalidateSuccess) assert.Equal(t, 0, codec.GetStats().InvalidateError) }
explode_data.jsonl/29042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 6435, 4498, 7188, 1155, 353, 8840, 836, 8, 341, 197, 322, 16246, 198, 57279, 1669, 609, 16712, 82, 6093, 38047, 5051, 16094, 57279, 8071, 445, 6435, 497, 330, 2408, 16173, 1827, 5598, 27907, 692, 43343, 66, 1669, 1532, 31200...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLockAndTimeout(t *testing.T) { defer cluster.PanicHandler(t) ctx := context.Background() primaryConn, err := mysql.Connect(ctx, &primaryTabletParams) require.Nil(t, err) defer primaryConn.Close() replicaConn, err := mysql.Connect(ctx, &replicaTabletParams) require.Nil(t, err) defer replicaConn.Close() // first make sure that our writes to the primary make it to the replica utils.Exec(t, primaryConn, "insert into t1(id, value) values(1,'a')") checkDataOnReplica(t, replicaConn, `[[VARCHAR("a")]]`) // now lock the replica err = tmcLockTables(ctx, replicaTablet.GrpcPort) require.Nil(t, err) // make sure that writing to the primary does not show up on the replica while locked utils.Exec(t, primaryConn, "insert into t1(id, value) values(2,'b')") checkDataOnReplica(t, replicaConn, `[[VARCHAR("a")]]`) // the tests sets the lock timeout to 5 seconds, so sleeping 8 should be safe time.Sleep(8 * time.Second) checkDataOnReplica(t, replicaConn, `[[VARCHAR("a")] [VARCHAR("b")]]`) // Clean the table for further testing utils.Exec(t, primaryConn, "delete from t1") }
explode_data.jsonl/42995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 11989, 3036, 7636, 1155, 353, 8840, 836, 8, 341, 16867, 10652, 1069, 31270, 3050, 1155, 340, 20985, 1669, 2266, 19047, 2822, 197, 6545, 9701, 11, 1848, 1669, 10564, 43851, 7502, 11, 609, 6545, 2556, 83, 4870, 340, 17957, 596...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLookupDeclByNameNonNullable(t *testing.T) { decl, ok := testSchema(t).lookupDeclByName("ExampleStruct", false) if !ok { t.Fatalf("lookupDeclByName failed") } checkStruct(t, decl.(*StructDecl), "ExampleStruct", false) }
explode_data.jsonl/21384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 34247, 21629, 16898, 8121, 15703, 1155, 353, 8840, 836, 8, 341, 197, 10005, 11, 5394, 1669, 1273, 8632, 1155, 568, 21020, 21629, 16898, 445, 13314, 9422, 497, 895, 340, 743, 753, 562, 341, 197, 3244, 30762, 445, 21020, 21629...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDefaultByteBuf_WriteFloat32(t *testing.T) { buf := EmptyByteBuf() buf.WriteFloat32(math.MaxFloat32) if math.MaxFloat32 != buf.ReadFloat32() { t.Fail() } buf.WriteFloat32LE(math.MaxFloat32) if math.MaxFloat32 != buf.ReadFloat32LE() { t.Fail() } }
explode_data.jsonl/1988
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 3675, 7153, 15064, 31825, 5442, 18, 17, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 22228, 7153, 15064, 741, 26398, 4073, 5442, 18, 17, 37270, 14535, 5442, 18, 17, 340, 743, 6888, 14535, 5442, 18, 17, 961, 6607, 6503, 5442,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_byMigrationRecordVersion(t *testing.T) { unordered := []MigrationRecord{ { Version: 1.1, Description: "Description", Checksum: "7ebca1c6f05333a728a8db4629e8d543", AppliedAt: time.Now(), ExecutionTime: time.Millisecond * 1, }, { Version: 1.0, Description: "Description", Checksum: "7ebca1c6f05333a728a8db4629e8d543", AppliedAt: time.Now(), ExecutionTime: time.Millisecond * 1, }, } sort.Sort(byMigrationRecordVersion(unordered)) if unordered[0].Version != 1.0 { t.Errorf("Must order by version number") } }
explode_data.jsonl/78096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 3710, 20168, 6471, 5637, 1155, 353, 8840, 836, 8, 341, 20479, 10544, 1669, 3056, 20168, 6471, 515, 197, 197, 515, 298, 77847, 25, 981, 220, 16, 13, 16, 345, 298, 47414, 25, 256, 330, 5009, 756, 298, 69472, 1242, 25, 414,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreatePipeline_StorePipelineMetadataError(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() store.DB().Close() manager := NewResourceManager(store) _, err := manager.CreatePipeline("pipeline1", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), "Failed to add pipeline to pipeline table") }
explode_data.jsonl/28350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 4021, 34656, 92684, 34656, 14610, 1454, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 52317, 2959, 2043, 2195, 62396, 67811, 7121, 52317, 1462, 2461, 44338, 2398, 16867, 3553, 10421, 741, 57279, 22537, 1005, 7925, 741, 92272,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTidbClusterControllerEnqueueTidbClusterFailed(t *testing.T) { g := NewGomegaWithT(t) tcc, _, _ := newFakeTidbClusterController() tcc.enqueueTidbCluster(struct{}{}) g.Expect(tcc.queue.Len()).To(Equal(0)) }
explode_data.jsonl/68173
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 51, 307, 65, 28678, 2051, 1702, 4584, 51, 307, 65, 28678, 9408, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 3244, 638, 11, 8358, 716, 1669, 501, 52317, 51, 307, 65, 28678, 2051, 2822, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSTMPutNewKey(t *testing.T) { clus := NewClusterV3(t, &ClusterConfig{Size: 1}) defer clus.Terminate(t) etcdc := clus.RandClient() applyf := func(stm concurrency.STM) error { stm.Put("foo", "bar") return nil } iso := concurrency.WithIsolation(concurrency.RepeatableReads) if _, err := concurrency.NewSTM(etcdc, applyf, iso); err != nil { t.Fatalf("error on stm txn (%v)", err) } resp, err := etcdc.Get(context.TODO(), "foo") if err != nil { t.Fatalf("error fetching key (%v)", err) } if string(resp.Kvs[0].Value) != "bar" { t.Fatalf("bad value. got %+v, expected 'bar' value", resp) } }
explode_data.jsonl/51050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 784, 5781, 332, 3564, 1592, 1155, 353, 8840, 836, 8, 341, 197, 4163, 1669, 1532, 28678, 53, 18, 1155, 11, 609, 28678, 2648, 90, 1695, 25, 220, 16, 3518, 16867, 1185, 355, 836, 261, 34016, 1155, 692, 197, 295, 72026, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScpCommand(t *testing.T) { t.Skip("skipping because local testing ssh configuration is not implemented") ctx := context.TODO() Convey("With files to scp", t, func() { // the local files and target directory for scping evgHome := evergreen.FindEvergreenHome() tmpBase := filepath.Join(evgHome, "command/testdata/tmp") fileToScp := filepath.Join(tmpBase, "copy_me_please.txt") directoryToScp := filepath.Join(tmpBase, "copy_my_children_please") nestedFileToScp := filepath.Join(directoryToScp, "copy_me_too_please.txt") targetDirectory := filepath.Join(tmpBase, "feed_me_files") // remove the files and directories, if they exist (start clean) exists, err := util.FileExists(tmpBase) So(err, ShouldBeNil) if exists { So(os.RemoveAll(tmpBase), ShouldBeNil) } So(os.MkdirAll(tmpBase, 0777), ShouldBeNil) // prevent permissions issues syscall.Umask(0000) // create the files / directories to be used So(ioutil.WriteFile(fileToScp, []byte("hello"), 0777), ShouldBeNil) So(os.Mkdir(directoryToScp, 0777), ShouldBeNil) So(ioutil.WriteFile(nestedFileToScp, []byte("hi"), 0777), ShouldBeNil) So(os.Mkdir(targetDirectory, 0777), ShouldBeNil) Convey("when running scp commands", func() { Convey("copying files should work in both directions (local to"+ " remote and remote to local)", func() { // scp the file from local to remote scpCmd := &scpCommand{ Source: fileToScp, Dest: targetDirectory, Stdout: ioutil.Discard, Stderr: ioutil.Discard, RemoteHostName: TestRemote, User: TestRemoteUser, Options: []string{"-i", TestRemoteKey}, SourceIsRemote: false, } So(scpCmd.Run(ctx), ShouldBeNil) // make sure the file was scp-ed over newFileContents, err := ioutil.ReadFile( filepath.Join(targetDirectory, "copy_me_please.txt")) So(err, ShouldBeNil) So(newFileContents, ShouldResemble, []byte("hello")) // remove the file So(os.Remove(filepath.Join(targetDirectory, "copy_me_please.txt")), ShouldBeNil) // scp the file from remote to local scpCmd = &scpCommand{ Source: fileToScp, Dest: targetDirectory, Stdout: ioutil.Discard, Stderr: ioutil.Discard, RemoteHostName: TestRemote, User: TestRemoteUser, Options: []string{"-i", TestRemoteKey}, SourceIsRemote: true, } So(scpCmd.Run(ctx), ShouldBeNil) // make sure the file was scp-ed over newFileContents, err = ioutil.ReadFile( filepath.Join(targetDirectory, "copy_me_please.txt")) So(err, ShouldBeNil) So(newFileContents, ShouldResemble, []byte("hello")) }) Convey("additional scp options should be passed correctly to the"+ " command", func() { // scp recursively, using the -r flag scpCmd := &scpCommand{ Source: directoryToScp, Dest: targetDirectory, Stdout: ioutil.Discard, Stderr: ioutil.Discard, RemoteHostName: TestRemote, User: TestRemoteUser, Options: []string{"-i", TestRemoteKey, "-r"}, SourceIsRemote: false, } So(scpCmd.Run(ctx), ShouldBeNil) // make sure the entire directory was scp-ed over nestedFileContents, err := ioutil.ReadFile( filepath.Join(targetDirectory, "copy_my_children_please", "copy_me_too_please.txt")) So(err, ShouldBeNil) So(nestedFileContents, ShouldResemble, []byte("hi")) }) }) }) }
explode_data.jsonl/6952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1555 }
[ 2830, 3393, 3326, 79, 4062, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 4886, 5654, 1576, 2205, 7497, 29230, 6546, 374, 537, 11537, 5130, 20985, 1669, 2266, 90988, 741, 93070, 5617, 445, 2354, 3542, 311, 69657, 497, 259, 11, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultilingualDisableLanguage(t *testing.T) { t.Parallel() assert := require.New(t) fs, cfg := newTestBundleSourcesMultilingual(t) cfg.Set("disableLanguages", []string{"nn"}) assert.NoError(loadDefaultSettingsFor(cfg)) assert.NoError(loadLanguageSettings(cfg, nil)) sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) assert.NoError(err) assert.Equal(1, len(sites.Sites)) assert.NoError(sites.Build(BuildCfg{})) s := sites.Sites[0] assert.Equal(8, len(s.RegularPages())) assert.Equal(16, len(s.Pages())) // No nn pages assert.Equal(16, len(s.AllPages())) for _, p := range s.rawAllPages { assert.True(p.Language().Lang != "nn") } for _, p := range s.AllPages() { assert.True(p.Language().Lang != "nn") } }
explode_data.jsonl/68128
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 40404, 49823, 25479, 13806, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 6948, 1669, 1373, 7121, 1155, 340, 53584, 11, 13286, 1669, 501, 2271, 8409, 32200, 40404, 49823, 1155, 340, 50286, 4202, 445, 18015, 59286, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMaxStackSize(t *testing.T) { // This test must not run in parallel with other tests as debug.SetMaxStack // affects all goroutines. n := debug.SetMaxStack(1 << 16) defer debug.SetMaxStack(n) var wg sync.WaitGroup defer wg.Wait() b := make([]byte, 1<<20) for level := HuffmanOnly; level <= BestCompression; level++ { // Run in separate goroutine to increase probability of stack regrowth. wg.Add(1) go func(level int) { defer wg.Done() zw, err := NewWriter(ioutil.Discard, level) if err != nil { t.Errorf("level %d, NewWriter() = %v, want nil", level, err) } if n, err := zw.Write(b); n != len(b) || err != nil { t.Errorf("level %d, Write() = (%d, %v), want (%d, nil)", level, n, err, len(b)) } if err := zw.Close(); err != nil { t.Errorf("level %d, Close() = %v, want nil", level, err) } zw.Reset(ioutil.Discard) }(level) } }
explode_data.jsonl/81417
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 5974, 74026, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 1273, 1969, 537, 1598, 304, 15279, 448, 1008, 7032, 438, 7390, 4202, 5974, 4336, 198, 197, 322, 21501, 678, 45198, 28628, 624, 9038, 1669, 7390, 4202, 5974, 4336, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTxNotifierMultipleHistoricalSpendRescans(t *testing.T) { t.Parallel() const startingHeight = 10 hintCache := newMockHintCache() n := chainntnfs.NewTxNotifier( startingHeight, chainntnfs.ReorgSafetyLimit, hintCache, hintCache, ) // The first registration for an outpoint in the notifier should request // a historical spend rescan as it does not have a historical view of // the chain. op := wire.OutPoint{Index: 1} ntfn1, err := n.RegisterSpend(&op, testRawScript, 1) if err != nil { t.Fatalf("unable to register spend ntfn: %v", err) } if ntfn1.HistoricalDispatch == nil { t.Fatal("expected to receive historical dispatch request") } // We'll register another spend notification for the same outpoint. This // should not request a historical spend rescan since the first one is // still pending. ntfn2, err := n.RegisterSpend(&op, testRawScript, 1) if err != nil { t.Fatalf("unable to register spend ntfn: %v", err) } if ntfn2.HistoricalDispatch != nil { t.Fatal("received unexpected historical rescan request") } // Finally, we'll mark the ongoing historical rescan as complete and // register another notification. We should also expect not to see a // historical rescan request since the confirmation details should be // cached. spendDetails := &chainntnfs.SpendDetail{ SpentOutPoint: &op, SpenderTxHash: &chainntnfs.ZeroHash, SpendingTx: wire.NewMsgTx(2), SpenderInputIndex: 0, SpendingHeight: startingHeight - 1, } err = n.UpdateSpendDetails( ntfn1.HistoricalDispatch.SpendRequest, spendDetails, ) if err != nil { t.Fatalf("unable to update spend details: %v", err) } ntfn3, err := n.RegisterSpend(&op, testRawScript, 1) if err != nil { t.Fatalf("unable to register spend ntfn: %v", err) } if ntfn3.HistoricalDispatch != nil { t.Fatal("received unexpected historical rescan request") } }
explode_data.jsonl/67717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 655 }
[ 2830, 3393, 31584, 64729, 32089, 48983, 938, 50, 3740, 1061, 66, 596, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 4777, 5916, 3640, 284, 220, 16, 15, 198, 9598, 396, 8233, 1669, 501, 11571, 26987, 8233, 741, 9038, 1669, 878...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCrUnmergedSetMtimeAndRemoveModifiedDir(t *testing.T) { origMtime := time.Now().Add(1 * time.Minute) targetMtime := time.Now().Add(2 * time.Minute) test(t, users("alice", "bob"), as(alice, mkdir("a/b/c"), mkfile("a/b/c/d", "hello"), setmtime("a/b/c", origMtime), setmtime("a/b", origMtime), ), as(bob, disableUpdates(), ), as(alice, mkfile("a/b/c/e", "hello2"), mkfile("a/b/f", "hello3"), setmtime("a/b/c", origMtime), setmtime("a/b", origMtime), ), as(bob, noSync(), setmtime("a/b/c", targetMtime), setmtime("a/b", targetMtime), rm("a/b/c/d"), rmdir("a/b/c"), rmdir("a/b"), reenableUpdates(), lsdir("", m{"a$": "DIR"}), lsdir("a", m{"b$": "DIR"}), lsdir("a/b", m{"c$": "DIR", "f$": "FILE"}), mtime("a/b", origMtime), lsdir("a/b/c", m{"e$": "FILE"}), mtime("a/b/c", origMtime), read("a/b/c/e", "hello2"), read("a/b/f", "hello3"), ), as(alice, lsdir("", m{"a$": "DIR"}), lsdir("a", m{"b$": "DIR"}), lsdir("a/b", m{"c$": "DIR", "f$": "FILE"}), mtime("a/b", origMtime), lsdir("a/b/c", m{"e$": "FILE"}), mtime("a/b/c", origMtime), read("a/b/c/e", "hello2"), read("a/b/f", "hello3"), ), ) }
explode_data.jsonl/31379
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 672 }
[ 2830, 3393, 16001, 1806, 40354, 1649, 44, 1678, 3036, 13021, 19148, 6184, 1155, 353, 8840, 836, 8, 341, 197, 4670, 44, 1678, 1669, 882, 13244, 1005, 2212, 7, 16, 353, 882, 75770, 340, 28861, 44, 1678, 1669, 882, 13244, 1005, 2212, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBufferScanSize(t *testing.T) { data := []byte("5 test 4 wtf 3 lol 2 ") buffer := NewSyslogBuffer() buffer.Append(data) assert.Equal(t, 5, buffer.scanSize()) assert.Equal(t, "test 4 wtf 3 lol 2 ", string(buffer.Buffer)) }
explode_data.jsonl/18081
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 4095, 26570, 1695, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 445, 20, 1273, 220, 19, 289, 8935, 220, 18, 27409, 220, 17, 256, 59928, 31122, 1669, 1532, 32792, 839, 4095, 741, 31122, 8982, 2592, 692, 6948, 12808...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRest_CreateWithPictures(t *testing.T) { ts, svc, teardown := startupT(t) defer func() { teardown() os.RemoveAll("/tmp/remark42") }() lgr.Setup(lgr.Debug, lgr.CallerFile, lgr.CallerFunc) imageService := image.NewService(&image.FileSystem{ Staging: "/tmp/remark42/images.staging", Location: "/tmp/remark42/images", }, image.ServiceParams{ EditDuration: 100 * time.Millisecond, MaxSize: 2000, }) defer imageService.Close(context.Background()) svc.privRest.imageService = imageService svc.ImageService = imageService dataService := svc.DataService dataService.ImageService = svc.ImageService svc.privRest.dataService = dataService uploadPicture := func(file string) (id string) { bodyBuf := &bytes.Buffer{} bodyWriter := multipart.NewWriter(bodyBuf) fileWriter, err := bodyWriter.CreateFormFile("file", file) require.NoError(t, err) _, err = io.Copy(fileWriter, gopherPNG()) require.NoError(t, err) contentType := bodyWriter.FormDataContentType() require.NoError(t, bodyWriter.Close()) client := http.Client{} req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/api/v1/picture", ts.URL), bodyBuf) require.NoError(t, err) req.Header.Add("Content-Type", contentType) req.Header.Add("X-JWT", devToken) resp, err := client.Do(req) assert.NoError(t, err) assert.Equal(t, 200, resp.StatusCode) body, err := ioutil.ReadAll(resp.Body) require.NoError(t, err) m := map[string]string{} err = json.Unmarshal(body, &m) assert.NoError(t, err) return m["id"] } var ids [3]string for i := range ids { ids[i] = uploadPicture(fmt.Sprintf("pic%d.png", i)) } text := fmt.Sprintf(`text 123 ![](/api/v1/picture/%s) *xxx* ![](/api/v1/picture/%s) ![](/api/v1/picture/%s)`, ids[0], ids[1], ids[2]) body := fmt.Sprintf(`{"text": "%s", "locator":{"url": "https://radio-t.com/blah1", "site": "remark42"}}`, text) resp, err := post(t, ts.URL+"/api/v1/comment", body) assert.NoError(t, err) b, err := ioutil.ReadAll(resp.Body) assert.NoError(t, err) require.Equal(t, http.StatusCreated, resp.StatusCode, string(b)) for i := range ids { _, err = os.Stat("/tmp/remark42/images/" + ids[i]) assert.Error(t, err, "picture %d not moved from staging yet", i) } time.Sleep(1500 * time.Millisecond) for i := range ids { _, err = os.Stat("/tmp/remark42/images/" + ids[i]) assert.NoError(t, err, "picture %d moved from staging and available in permanent location", i) } }
explode_data.jsonl/37406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1001 }
[ 2830, 3393, 12416, 34325, 2354, 76646, 1155, 353, 8840, 836, 8, 341, 57441, 11, 46154, 11, 49304, 1669, 20567, 51, 1155, 340, 16867, 2915, 368, 341, 197, 197, 665, 37496, 741, 197, 25078, 84427, 4283, 5173, 14, 37448, 19, 17, 1138, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPvmBlocks(t *testing.T) { p := NewPersist() ctx := context.Background() tm := time.Now().UTC().Truncate(1 * time.Second) v := &PvmBlocks{} v.ID = "id1" v.ChainID = "cid1" v.Type = models.BlockTypeAbort v.ParentID = "pid1" v.Serialization = []byte("ser1") v.CreatedAt = tm stream := health.NewStream() rawDBConn, err := dbr.Open(TestDB, TestDSN, stream) if err != nil { t.Fatal("db fail", err) } _, _ = rawDBConn.NewSession(stream).DeleteFrom(TablePvmBlocks).Exec() err = p.InsertPvmBlocks(ctx, rawDBConn.NewSession(stream), v, true) if err != nil { t.Fatal("insert fail", err) } fv, err := p.QueryPvmBlocks(ctx, rawDBConn.NewSession(stream), v) if err != nil { t.Fatal("query fail", err) } if !reflect.DeepEqual(*v, *fv) { t.Fatal("compare fail") } v.ChainID = "cid2" v.Type = models.BlockTypeCommit v.ParentID = "pid2" v.Serialization = []byte("ser2") v.CreatedAt = tm err = p.InsertPvmBlocks(ctx, rawDBConn.NewSession(stream), v, true) if err != nil { t.Fatal("insert fail", err) } fv, err = p.QueryPvmBlocks(ctx, rawDBConn.NewSession(stream), v) if err != nil { t.Fatal("query fail", err) } if string(fv.Serialization) != "ser2" { t.Fatal("compare fail") } if !reflect.DeepEqual(*v, *fv) { t.Fatal("compare fail") } }
explode_data.jsonl/12934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 570 }
[ 2830, 3393, 47, 7338, 29804, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 61267, 741, 20985, 1669, 2266, 19047, 741, 3244, 76, 1669, 882, 13244, 1005, 21183, 1005, 1282, 26900, 7, 16, 353, 882, 32435, 692, 5195, 1669, 609, 47, 7338...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestMempoolConfigValidateBasic(t *testing.T) { cfg := TestMempoolConfig() assert.NoError(t, cfg.ValidateBasic()) fieldsToTest := []string{ "Size", "MaxTxsBytes", "CacheSize", "MaxTxBytes", } for _, fieldName := range fieldsToTest { reflect.ValueOf(cfg).Elem().FieldByName(fieldName).SetInt(-1) assert.Error(t, cfg.ValidateBasic()) reflect.ValueOf(cfg).Elem().FieldByName(fieldName).SetInt(0) } }
explode_data.jsonl/66918
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 44, 3262, 1749, 2648, 17926, 15944, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 3393, 44, 3262, 1749, 2648, 741, 6948, 35699, 1155, 11, 13286, 47667, 15944, 12367, 55276, 1249, 2271, 1669, 3056, 917, 515, 197, 197, 1, 1695, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewLogHandlerOutputStdout(t *testing.T) { testCases := []struct { desc string config *types.AccessLog expectedLog string }{ { desc: "default config", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "default config with empty filters", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{}, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Status code filter not matching", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ StatusCodes: []string{"200"}, }, }, expectedLog: ``, }, { desc: "Status code filter matching", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ StatusCodes: []string{"123"}, }, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Duration filter not matching", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ MinDuration: parse.Duration(1 * time.Hour), }, }, expectedLog: ``, }, { desc: "Duration filter matching", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ MinDuration: parse.Duration(1 * time.Millisecond), }, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Retry attempts filter matching", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Filters: &types.AccessLogFilters{ RetryAttempts: true, }, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Default mode keep", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "keep", }, }, expectedLog: `TestHost - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Default mode keep with override", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "keep", Names: types.FieldNames{ ClientHost: "drop", }, }, }, expectedLog: `- - TestUser [13/Apr/2016:07:14:19 -0700] "POST testpath HTTP/0.0" 123 12 "testReferer" "testUserAgent" 23 "testFrontend" "http://127.0.0.1/testBackend" 1ms`, }, { desc: "Default mode drop", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", }, }, expectedLog: `- - - [-] "- - -" - - "testReferer" "testUserAgent" - - - 0ms`, }, { desc: "Default mode drop with override", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", Names: types.FieldNames{ ClientHost: "drop", ClientUsername: "keep", }, }, }, expectedLog: `- - TestUser [-] "- - -" - - "testReferer" "testUserAgent" - - - 0ms`, }, { desc: "Default mode drop with header dropped", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", Names: types.FieldNames{ ClientHost: "drop", ClientUsername: "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "drop", }, }, }, expectedLog: `- - TestUser [-] "- - -" - - "-" "-" - - - 0ms`, }, { desc: "Default mode drop with header redacted", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", Names: types.FieldNames{ ClientHost: "drop", ClientUsername: "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "redact", }, }, }, expectedLog: `- - TestUser [-] "- - -" - - "REDACTED" "REDACTED" - - - 0ms`, }, { desc: "Default mode drop with header redacted", config: &types.AccessLog{ FilePath: "", Format: CommonFormat, Fields: &types.AccessLogFields{ DefaultMode: "drop", Names: types.FieldNames{ ClientHost: "drop", ClientUsername: "keep", }, Headers: &types.FieldHeaders{ DefaultMode: "keep", Names: types.FieldHeaderNames{ "Referer": "redact", }, }, }, }, expectedLog: `- - TestUser [-] "- - -" - - "REDACTED" "testUserAgent" - - - 0ms`, }, } for _, test := range testCases { test := test t.Run(test.desc, func(t *testing.T) { // NOTE: It is not possible to run these cases in parallel because we capture Stdout file, restoreStdout := captureStdout(t) defer restoreStdout() doLogging(t, test.config) written, err := ioutil.ReadFile(file.Name()) require.NoError(t, err, "unable to read captured stdout from file") assertValidLogData(t, test.expectedLog, written) }) } }
explode_data.jsonl/20081
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2612 }
[ 2830, 3393, 3564, 2201, 3050, 5097, 22748, 411, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 41653, 286, 914, 198, 197, 25873, 414, 353, 9242, 35645, 2201, 198, 197, 42400, 2201, 914, 198, 197, 59403, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBadgerGCRunInfo(t *testing.T) { ctx := inslogger.TestContext(t) t.Run("call every time if frequency equal 1", func(t *testing.T) { t.Parallel() runner := &TestBadgerGCRunner{} info := executor.NewBadgerGCRunInfo(runner, 1) for i := 1; i < 5; i++ { done := info.RunGCIfNeeded(ctx) <-done require.Equal(t, uint(i), runner.getCount()) } }) t.Run("no call if frequency equal 0", func(t *testing.T) { t.Parallel() runner := &TestBadgerGCRunner{} info := executor.NewBadgerGCRunInfo(runner, 0) for i := 1; i < 5; i++ { done := info.RunGCIfNeeded(ctx) <-done require.Equal(t, uint(0), runner.getCount()) } }) t.Run("even calls if frequency equal 2", func(t *testing.T) { t.Parallel() runner := &TestBadgerGCRunner{} info := executor.NewBadgerGCRunInfo(runner, 2) for i := 1; i < 5; i++ { done := info.RunGCIfNeeded(ctx) <-done require.Equal(t, uint(i/2), runner.getCount()) } }) }
explode_data.jsonl/68983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 17082, 1389, 22863, 6727, 1731, 1155, 353, 8840, 836, 8, 1476, 20985, 1669, 1640, 9786, 8787, 1972, 1155, 692, 3244, 16708, 445, 6659, 1449, 882, 421, 11639, 6144, 220, 16, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPeerAddress(t *testing.T) { localIP, err := comm.GetLocalIP() require.NoError(t, err) tests := []struct { name string settings map[string]interface{} expectedPeerAddress string }{ { name: "test1", settings: map[string]interface{}{ "peer.addressAutoDetect": false, "peer.address": "testing.com:7051", }, expectedPeerAddress: "testing.com:7051", }, { name: "test2", settings: map[string]interface{}{ "peer.addressAutoDetect": true, "peer.address": "testing.com:7051", }, expectedPeerAddress: net.JoinHostPort(localIP, "7051"), }, { name: "test3", settings: map[string]interface{}{ "peer.addressAutoDetect": false, "peer.address": "0.0.0.0:7051", }, expectedPeerAddress: net.JoinHostPort(localIP, "7051"), }, { name: "test4", settings: map[string]interface{}{ "peer.addressAutoDetect": true, "peer.address": "127.0.0.1:7051", }, expectedPeerAddress: net.JoinHostPort(localIP, "7051"), }, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { for k, v := range test.settings { viper.Set(k, v) } c, err := GlobalConfig() require.NoError(t, err, "GlobalConfig returned unexpected error") require.Equal(t, test.expectedPeerAddress, c.PeerAddress) }) } }
explode_data.jsonl/71571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 636 }
[ 2830, 3393, 30888, 4286, 1155, 353, 8840, 836, 8, 341, 8854, 3298, 11, 1848, 1669, 1063, 2234, 7319, 3298, 741, 17957, 35699, 1155, 11, 1848, 692, 78216, 1669, 3056, 1235, 341, 197, 11609, 394, 914, 198, 197, 62930, 310, 2415, 14032, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetSingleCommit(t *testing.T) { ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { t.Errorf("Bad method: %s", r.Method) } if r.URL.Path != "/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e" { t.Errorf("Bad request path: %s", r.URL.Path) } fmt.Fprint(w, `{ "commit": { "tree": { "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e" } } }`) })) defer ts.Close() c := getClient(ts.URL) commit, err := c.GetSingleCommit("octocat", "Hello-World", "6dcb09b5b57875f334f61aebed695e2e4193db5e") if err != nil { t.Errorf("Didn't expect error: %v", err) } else if commit.Commit.Tree.SHA != "6dcb09b5b57875f334f61aebed695e2e4193db5e" { t.Errorf("Wrong tree-hash: %s", commit.Commit.Tree.SHA) } }
explode_data.jsonl/6257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 1949, 10888, 33441, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 13470, 1220, 2836, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20798, 961, 1758, 20798, 1949, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInitNodeValidatorFiles(t *testing.T) { home, err := ioutil.TempDir("", "mock-sdk-cmd") require.Nil(t, err) defer func() { os.RemoveAll(home) }() viper.Set(cli.HomeFlag, home) viper.Set(client.FlagName, "moniker") cfg, err := tcmd.ParseConfig() require.Nil(t, err) nodeID, valPubKey, err := InitializeNodeValidatorFiles(cfg) require.Nil(t, err) require.NotEqual(t, "", nodeID) require.NotEqual(t, 0, len(valPubKey.Bytes())) }
explode_data.jsonl/17822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 3803, 1955, 14256, 10809, 1155, 353, 8840, 836, 8, 341, 197, 5117, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 16712, 35478, 1786, 2277, 1138, 17957, 59678, 1155, 11, 1848, 340, 16867, 2915, 368, 341, 197, 25078, 84427, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateTemplate(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleValidateSuccessfully(t, ValidateOutput) opts := stacktemplates.ValidateOpts{ Template: `{ "heat_template_version": "2013-05-23", "description": "Simple template to test heat commands", "parameters": { "flavor": { "default": "m1.tiny", "type": "string" } }, "resources": { "hello_world": { "type": "OS::Nova::Server", "properties": { "key_name": "heat_key", "flavor": { "get_param": "flavor" }, "image": "ad091b52-742f-469e-8f3c-fd81cadf0743", "user_data": "#!/bin/bash -xv\necho \"hello world\" &gt; /root/hello-world.txt\n" } } } }`, } actual, err := stacktemplates.Validate(fake.ServiceClient(), opts).Extract() th.AssertNoErr(t, err) expected := ValidateExpected th.AssertDeepEquals(t, expected, actual) }
explode_data.jsonl/42760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 457 }
[ 2830, 3393, 17926, 7275, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 741, 197, 6999, 17926, 35959, 1155, 11, 23282, 5097, 692, 64734, 1669, 5611, 15463, 47667, 43451, 515, 197, 197, 7275, 25, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandleToken(t *testing.T) { mfaAttempt = 0 pr := &mocks.Prompter{} prompter.SetPrompter(pr) pr.Mock.On("Password", "Enter Token Code (PIN + Token / Passcode for RSA)").Return("5309") data, err := ioutil.ReadFile("example/token.html") require.Nil(t, err) doc, err := goquery.NewDocumentFromReader(bytes.NewReader(data)) require.Nil(t, err) ac := Client{} loginDetails := creds.LoginDetails{ Username: "fdsa", Password: "secret", URL: "https://example.com/foo", } ctx := context.WithValue(context.Background(), ctxKey("login"), &loginDetails) _, req, err := ac.handleToken(ctx, doc) require.Nil(t, err) b, err := ioutil.ReadAll(req.Body) require.Nil(t, err) s := string(b[:]) require.Contains(t, s, "pf.pass=5309") }
explode_data.jsonl/4324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 305 }
[ 2830, 3393, 6999, 3323, 1155, 353, 8840, 836, 8, 341, 2109, 3632, 47052, 284, 220, 15, 198, 25653, 1669, 609, 16712, 82, 1069, 14749, 261, 16094, 3223, 14749, 261, 4202, 54615, 261, 24974, 340, 25653, 24664, 8071, 445, 4876, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNotContainsProjectId(t *testing.T) { endpointContains := []string{"https://as.eu-de.otc.t-systems.com/autoscaling-api/v1", "https://as.eu-de.otc.t-systems.com/autoscaling-api/v1/", "https://as.eu-de.otc.t-systems.com/autoscaling-api/v1/abc", "https://as.eu-de.otc.t-systems.com/autoscaling-api/V1"} for _, enpoint := range endpointContains { th.AssertEquals(t, false, openstack.ContainsProjectId(enpoint)) } }
explode_data.jsonl/82341
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 2623, 23805, 7849, 764, 1155, 353, 8840, 836, 8, 341, 6246, 2768, 23805, 1669, 3056, 917, 4913, 2428, 1110, 300, 35003, 6810, 79361, 66, 734, 36648, 82, 905, 14, 79301, 81552, 23904, 5457, 16, 756, 197, 197, 57557, 1110, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGASameRNGs(t *testing.T) { ga1, err := NewDefaultGAConfig().NewGA() if err != nil { t.Errorf("Expected nil, got %v", err) } ga2, err := NewDefaultGAConfig().NewGA() if err != nil { t.Errorf("Expected nil, got %v", err) } // Use the same random number generators ga1.RNG = rand.New(rand.NewSource(42)) ga2.RNG = rand.New(rand.NewSource(42)) // Run the first GA if err = ga1.init(NewVector); err != nil { t.Errorf("Expected nil, got %v", err) } for i := 0; i < 20; i++ { ga1.evolve() } // Run the second GA if err = ga2.init(NewVector); err != nil { t.Errorf("Expected nil, got %v", err) } for i := 0; i < 20; i++ { ga2.evolve() } // Compare best individuals if ga1.HallOfFame[0].Fitness != ga2.HallOfFame[0].Fitness { t.Errorf("Mismatch: %f != %f", ga1.HallOfFame[0].Fitness, ga2.HallOfFame[0].Fitness) } }
explode_data.jsonl/82088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 38, 1911, 373, 49, 6140, 82, 1155, 353, 8840, 836, 8, 341, 3174, 64, 16, 11, 1848, 1669, 1532, 3675, 16128, 2648, 1005, 3564, 16128, 741, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 18896, 2092, 11, 2684, 1018, 85,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestNewImageStore(t *testing.T) { db := &tests.DBMock{} s := NewImageStore(db) assert.IsType(t, &ImageStore{}, s) assert.Equal(t, db, s.db) }
explode_data.jsonl/53775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 3564, 1906, 6093, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 609, 23841, 22537, 11571, 16094, 1903, 1669, 1532, 1906, 6093, 9791, 692, 6948, 4506, 929, 1155, 11, 609, 1906, 6093, 22655, 274, 340, 6948, 12808, 1155, 11, 2927,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUintUint32(t *testing.T) { var expected uint32 = 123456789 ti := Uint{ ValidFlag: true, uint: uint64(expected), } if ti.Uint32() != expected { t.Errorf("actual:%d, expected:%d", ti.Uint32(), expected) } }
explode_data.jsonl/13397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 21570, 21570, 18, 17, 1155, 353, 8840, 836, 8, 341, 2405, 3601, 2622, 18, 17, 284, 220, 16, 17, 18, 19, 20, 21, 22, 23, 24, 198, 72859, 1669, 27883, 515, 197, 197, 4088, 12135, 25, 830, 345, 197, 8254, 25, 414, 2622,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRegisterWorker(t *testing.T) { tmp, err := ioutil.TempDir("", "TestRegisterWorker") if err != nil { t.Fatal(err) } gaia.Cfg = &gaia.Config{ Logger: hclog.NewNullLogger(), DataPath: tmp, HomePath: tmp, PipelinePath: tmp, DevMode: true, } // Initialize store m := &mockStorageService{} services.MockStorageService(m) dataStore, _ := services.StorageService() defer func() { services.MockStorageService(nil) }() // Initialize certificate store _, err = services.CertificateService() if err != nil { t.Fatalf("cannot initialize certificate service: %v", err) } // Initialize vault v, err := services.VaultService(nil) if err != nil { t.Fatalf("cannot initialize vault service: %v", err) } // Initialize memdb service db, err := services.MemDBService(dataStore) if err != nil { t.Fatal(err) } // Generate global worker secret secret := []byte(security.GenerateRandomUUIDV5()) v.Add(gaia.WorkerRegisterKey, secret) if err := v.SaveSecrets(); err != nil { t.Fatal(err) } // Initialize echo e := echo.New() if err := InitHandlers(e); err != nil { t.Fatal(err) } // Test with wrong global secret t.Run("wrong global secret", func(t *testing.T) { body := registerWorker{ Secret: "random-wrong-secret", } bodyBytes, _ := json.Marshal(body) req := httptest.NewRequest(echo.POST, "/api/"+gaia.APIVersion+"/worker/register", bytes.NewBuffer(bodyBytes)) req.Header.Set("Content-Type", "application/json") rec := httptest.NewRecorder() c := e.NewContext(req, rec) if err := RegisterWorker(c); err != nil { t.Fatal(err) } if rec.Code != http.StatusForbidden { t.Fatalf("expected response code %v got %v", http.StatusForbidden, rec.Code) } bodyBytes, err := ioutil.ReadAll(rec.Body) if err != nil { t.Fatalf("cannot read response body: %s", err.Error()) } if string(bodyBytes[:]) != "wrong global worker secret provided" { t.Fatal("return message is not correct") } }) workerName := "my-worker" t.Run("register worker success", func(t *testing.T) { body := registerWorker{ Name: workerName, Secret: string(secret[:]), Tags: []string{"first-tag", "second-tag", "third-tag"}, } bodyBytes, _ := json.Marshal(body) req := httptest.NewRequest(echo.POST, "/api/"+gaia.APIVersion+"/worker/register", bytes.NewBuffer(bodyBytes)) req.Header.Set("Content-Type", "application/json") rec := httptest.NewRecorder() c := e.NewContext(req, rec) if err := RegisterWorker(c); err != nil { t.Fatal(err) } bodyBytes, err := ioutil.ReadAll(rec.Body) if err != nil { t.Fatalf("cannot read response body: %s", err.Error()) } if rec.Code != http.StatusOK { t.Fatalf("expected response code %v got %v; body: %s", http.StatusOK, rec.Code, string(bodyBytes[:])) } resp := &registerResponse{} if err := json.Unmarshal(bodyBytes, resp); err != nil { t.Fatalf("failed to unmarshal response: %#v", bodyBytes) } if resp.UniqueID == "" { t.Fatal("unique id should be set but got empty string") } if resp.CACert == "" { t.Fatal("ca cert should be set but got empty string") } if resp.Key == "" { t.Fatal("key cert should be set but got empty string") } if resp.Cert == "" { t.Fatal("cert should be set but got empty string") } // Check if store holds the new registered worker worker, err := dataStore.WorkerGet(resp.UniqueID) if err != nil { t.Fatal(err) } if worker == nil { t.Fatal("failed to get worker from store. It was nil.") } // Check if memdb service holds the data worker, err = db.GetWorker(resp.UniqueID) if err != nil { t.Fatal(err) } if worker == nil { t.Fatal("failed to get worker from memdb cache. It was nil.") } }) }
explode_data.jsonl/47210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1477 }
[ 2830, 3393, 8690, 21936, 1155, 353, 8840, 836, 8, 341, 20082, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 2271, 8690, 21936, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 3174, 64, 685, 727, 4817, 284, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1