text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestFromMT(t *testing.T) { Convey("invalid fromMT types should panic", t, func() { So(func() { fromMT(sfxmodel.MetricType(1001)) }, ShouldPanic) }) }
explode_data.jsonl/81472
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 3830, 8505, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 11808, 504, 8505, 4494, 1265, 21975, 497, 259, 11, 2915, 368, 341, 197, 76912, 18552, 368, 341, 298, 42727, 8505, 1141, 8298, 2528, 1321, 16340, 929, 7, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_SingleCommandWithoutSubcommand(t *testing.T) { out, err := test.ExecuteCommand(buildTestCmd(), "completion") if err != nil { assert.Error(t, err) } assert.Contains(t, out, `Error: accepts 1 arg(s), received 0`) assert.Contains(t, out, `completion [bash|zsh|fish|powershell]`) }
explode_data.jsonl/59258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 1098, 2173, 4062, 26040, 3136, 5631, 1155, 353, 8840, 836, 8, 341, 13967, 11, 1848, 1669, 1273, 13827, 4062, 43333, 2271, 15613, 1507, 330, 43312, 1138, 743, 1848, 961, 2092, 341, 197, 6948, 6141, 1155, 11, 1848, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewRxpk(t *testing.T) { data := []byte{2, 165, 210, 1} gw := &model.Gateway{ NsAddress: "127.0.0.1", MacAddress: RandomEUI(), RxpkDate: 1488931200, } rxpk := NewRxpk(data, gw) seconds := time.Time(rxpk.Time).UTC().Unix() if seconds != 1488931200 { t.Fatal("The time assigned to the rxpk should be the one present in the gateway object when set") } if int(rxpk.Size) != len(data) { t.Fatalf("Size parameter should represent the length of the data sent, found %d expected %d", rxpk.Size, len(data)) } strData := base64.StdEncoding.EncodeToString(data) if rxpk.Data != strData { t.Fatal("Data parameter should represent the base64 encoding of data given") } }
explode_data.jsonl/39071
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 3564, 50639, 20819, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 90, 17, 11, 220, 16, 21, 20, 11, 220, 17, 16, 15, 11, 220, 16, 532, 3174, 86, 1669, 609, 2528, 1224, 12043, 515, 197, 18317, 82, 4286, 25, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTypeSystem_ObjectsMustHaveFields_RejectsAnObjectTypeWithMissingFields(t *testing.T) { badObject := graphql.NewObject(graphql.ObjectConfig{ Name: "SomeObject", }) _, err := schemaWithFieldType(badObject) expectedError := `SomeObject fields must be an object with field names as keys or a function which return such an object.` if err == nil || err.Error() != expectedError { t.Fatalf("Expected error: %v, got %v", expectedError, err) } }
explode_data.jsonl/79144
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 929, 2320, 62, 11543, 31776, 12116, 8941, 50693, 583, 82, 2082, 49530, 2354, 25080, 8941, 1155, 353, 8840, 836, 8, 341, 2233, 329, 1190, 1669, 48865, 7121, 1190, 24312, 1470, 8348, 2648, 515, 197, 21297, 25, 330, 8373, 1190,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTimePoint_Before(t *testing.T) { type fields struct { Hour int Minute int } type args struct { t2 models.TimePoint } tests := []struct { name string fields fields args args want bool }{ {"tc1", fields{12, 30}, args{models.TimePoint{13, 30}}, true}, {"tc2", fields{13, 30}, args{models.TimePoint{13, 30}}, false}, {"tc3", fields{12, 29}, args{models.TimePoint{13, 30}}, true}, {"tc4", fields{23, 1}, args{models.TimePoint{1, 23}}, false}, {"tc5", fields{0, 0}, args{models.TimePoint{0, 0}}, false}, {"tc6", fields{0, 0}, args{models.TimePoint{0, 1}}, true}, {"tc7", fields{0, 59}, args{models.TimePoint{0, 1}}, false}, {"tc8", fields{23, 0}, args{models.TimePoint{23, 0}}, false}, {"tc9", fields{23, 0}, args{models.TimePoint{23, 1}}, true}, {"1c0", fields{10, 10}, args{models.TimePoint{9, 11}}, false}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t1 := &models.TimePoint{ Hour: tt.fields.Hour, Minute: tt.fields.Minute, } if got := t1.Before(tt.args.t2); got != tt.want { t.Errorf("Before() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/74981
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 1462, 2609, 1668, 4642, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 13292, 413, 256, 526, 198, 197, 197, 55193, 526, 198, 197, 532, 13158, 2827, 2036, 341, 197, 3244, 17, 4119, 16299, 2609, 198, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPeerConnection_GetConfiguration(t *testing.T) { pc, err := NewPeerConnection(Configuration{}) assert.NoError(t, err) expected := Configuration{ ICEServers: []ICEServer{}, ICETransportPolicy: ICETransportPolicyAll, BundlePolicy: BundlePolicyBalanced, RTCPMuxPolicy: RTCPMuxPolicyRequire, ICECandidatePoolSize: 0, } actual := pc.GetConfiguration() assert.True(t, &expected != &actual) assert.Equal(t, expected.ICEServers, actual.ICEServers) assert.Equal(t, expected.ICETransportPolicy, actual.ICETransportPolicy) assert.Equal(t, expected.BundlePolicy, actual.BundlePolicy) assert.Equal(t, expected.RTCPMuxPolicy, actual.RTCPMuxPolicy) // nolint:godox // TODO(albrow): Uncomment this after #513 is fixed. // See: https://github.com/pion/webrtc/issues/513. // assert.Equal(t, len(expected.Certificates), len(actual.Certificates)) assert.Equal(t, expected.ICECandidatePoolSize, actual.ICECandidatePoolSize) assert.NoError(t, pc.Close()) }
explode_data.jsonl/8645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 30888, 4526, 13614, 7688, 1155, 353, 8840, 836, 8, 341, 82013, 11, 1848, 1669, 1532, 30888, 4526, 45443, 37790, 6948, 35699, 1155, 11, 1848, 692, 42400, 1669, 12221, 515, 197, 197, 15835, 18729, 25, 1843, 3056, 15835, 2836, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDockerImageReferenceForImage(t *testing.T) { reference, ok := DockerImageReferenceForImage(mockImageStream(SourceTagReferencePolicy), "sha256:c3d8a3642ebfa6bd1fd50c2b8b90e99d3e29af1eac88637678f982cde90993fb") if !ok { t.Fatalf("expected success for source tag policy") } if reference != "test/foo@sha256:bar" { t.Errorf("expected source reference to be 'test/foo@sha256:bar', got %q", reference) } reference, ok = DockerImageReferenceForImage(mockImageStream(SourceTagReferencePolicy), "c3d8a3642ebfa6bd1fd50c2b8b90e99d3e29af1eac88637678f982cde90993fb") if !ok { t.Fatalf("expected success for source tag policy") } if reference != "test/foo@sha256:bar" { t.Errorf("expected source reference to be 'test/foo@sha256:bar', got %q", reference) } reference, ok = DockerImageReferenceForImage(mockImageStream(LocalTagReferencePolicy), "sha256:c3d8a3642ebfa6bd1fd50c2b8b90e99d3e29af1eac88637678f982cde90993fb") if !ok { t.Fatalf("expected success for local reference policy") } if reference != "registry:5000/test/foo@sha256:c3d8a3642ebfa6bd1fd50c2b8b90e99d3e29af1eac88637678f982cde90993fb" { t.Errorf("expected local reference to be 'registry:5000/test/foo@sha256:c3d8a3642ebfa6bd1fd50c2b8b90e99d3e29af1eac88637678f982cde90993fb', got %q", reference) } reference, ok = DockerImageReferenceForImage(mockImageStream(LocalTagReferencePolicy), "sha256:unknown") if ok { t.Errorf("expected failure for unknown image") } }
explode_data.jsonl/40840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 575 }
[ 2830, 3393, 35, 13659, 1906, 8856, 2461, 1906, 1155, 353, 8840, 836, 8, 341, 197, 16291, 11, 5394, 1669, 40549, 1906, 8856, 2461, 1906, 30389, 1906, 3027, 54296, 5668, 8856, 13825, 701, 330, 15247, 17, 20, 21, 48031, 18, 67, 23, 64, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestTwoUsersHavingSameActiveAlarmInReverseOrder(t *testing.T) { result := runLocalShardTest(t, change(user2, alarm1, wire.StatusWarning, time3), change(user1, alarm1, wire.StatusCritical, time1), send(user1), send(user2), ) require.Len(t, result, 2) assert.Equal(t, wire.AlarmDigest{ UserID: user1, ActiveAlarms: []wire.Alarm{ { AlarmID: alarm1, Status: wire.StatusCritical, LatestChangedAt: time1, }, }, }, result[0]) assert.Equal(t, wire.AlarmDigest{ UserID: user2, ActiveAlarms: []wire.Alarm{ { AlarmID: alarm1, Status: wire.StatusWarning, LatestChangedAt: time3, }, }, }, result[1]) }
explode_data.jsonl/3265
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 11613, 7137, 28032, 19198, 5728, 43444, 641, 45695, 4431, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 1598, 7319, 2016, 567, 2271, 1155, 345, 197, 68380, 4277, 17, 11, 16624, 16, 11, 9067, 10538, 12087, 11, 882, 18, 1326, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGrammar(t *testing.T) { assert := assert.New(t) parser := sitter.NewParser() parser.SetLanguage(rust.GetLanguage()) sourceCode := []byte("mod one;") tree := parser.ParseString(nil, sourceCode) assert.Equal( "(source_file (mod_item name: (identifier)))", tree.RootNode().String(), ) }
explode_data.jsonl/70748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 97178, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 55804, 1669, 2444, 465, 7121, 6570, 741, 55804, 4202, 13806, 2601, 590, 2234, 13806, 12367, 47418, 2078, 1669, 3056, 3782, 445, 2593, 825, 91812, 51968, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocalShell(t *testing.T) { is := is.New(t) s := NewLocalShell() is.NotNil(s) mhash, err := s.Add(bytes.NewBufferString("Hello IPFS Shell tests")) is.Nil(err) is.Equal(mhash, "QmUfZ9rAdhV5ioBzXKdUTh2ZNsz9bzbkaLVyQ8uc8pj21F") }
explode_data.jsonl/61077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 7319, 25287, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 340, 1903, 1669, 1532, 7319, 25287, 741, 19907, 93882, 1141, 692, 2109, 8296, 11, 1848, 1669, 274, 1904, 23158, 7121, 4095, 703, 445, 9707, 6790, 8485,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlainGenomeWriter_WriteGenome(t *testing.T) { gnome := buildTestGenome(1) outBuf := bytes.NewBufferString("") wr, err := NewGenomeWriter(bufio.NewWriter(outBuf), PlainGenomeEncoding) require.NoError(t, err, "failed to create genome writer") err = wr.WriteGenome(gnome) require.NoError(t, err, "failed to write genome") inputScanner := bufio.NewScanner(strings.NewReader(gnomeStr)) inputScanner.Split(bufio.ScanLines) outScanner := bufio.NewScanner(outBuf) outScanner.Split(bufio.ScanLines) for inputScanner.Scan() { if !outScanner.Scan() { t.Error("Unexpected end of genome data") } inText := inputScanner.Text() outText := outScanner.Text() require.Equal(t, inText, outText, "lines mismatch at") } }
explode_data.jsonl/18936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 26982, 9967, 635, 6492, 31825, 9967, 635, 1155, 353, 8840, 836, 8, 341, 3174, 17081, 1669, 1936, 2271, 9967, 635, 7, 16, 340, 13967, 15064, 1669, 5820, 7121, 4095, 703, 31764, 6692, 81, 11, 1848, 1669, 1532, 9967, 635, 649...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSSLRedirectOverlay(t *testing.T) { rh, cc, done := setup(t) defer done() // i1 is a stock ingress with force-ssl-redirect on the / route i1 := &v1beta1.Ingress{ ObjectMeta: metav1.ObjectMeta{ Name: "app", Namespace: "default", Annotations: map[string]string{ "ingress.kubernetes.io/force-ssl-redirect": "true", }, }, Spec: v1beta1.IngressSpec{ TLS: []v1beta1.IngressTLS{{ Hosts: []string{"example.com"}, SecretName: "example-tls", }}, Rules: []v1beta1.IngressRule{{ Host: "example.com", IngressRuleValue: v1beta1.IngressRuleValue{ HTTP: &v1beta1.HTTPIngressRuleValue{ Paths: []v1beta1.HTTPIngressPath{{ Path: "/", Backend: v1beta1.IngressBackend{ ServiceName: "app-service", ServicePort: intstr.FromInt(8080), }, }}, }, }, }}, }, } rh.OnAdd(i1) rh.OnAdd(&v1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "example-tls", Namespace: "default", }, Type: "kubernetes.io/tls", Data: secretdata(CERTIFICATE, RSA_PRIVATE_KEY), }) rh.OnAdd(&v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "app-service", Namespace: "default", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8080, TargetPort: intstr.FromInt(8080), }}, }, }) // i2 is an overlay to add the let's encrypt handler. i2 := &v1beta1.Ingress{ ObjectMeta: metav1.ObjectMeta{Name: "challenge", Namespace: "nginx-ingress"}, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{{ Host: "example.com", IngressRuleValue: v1beta1.IngressRuleValue{ HTTP: &v1beta1.HTTPIngressRuleValue{ Paths: []v1beta1.HTTPIngressPath{{ Path: "/.well-known/acme-challenge/gVJl5NWL2owUqZekjHkt_bo3OHYC2XNDURRRgLI5JTk", Backend: v1beta1.IngressBackend{ ServiceName: "challenge-service", ServicePort: intstr.FromInt(8009), }, }}, }, }, }}, }, } rh.OnAdd(i2) rh.OnAdd(&v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "challenge-service", Namespace: "nginx-ingress", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8009, TargetPort: intstr.FromInt(8080), }}, }, }) assertRDS(t, cc, "5", virtualhosts( envoy.VirtualHost("example.com", &envoy_api_v2_route.Route{ Match: routePrefix("/.well-known/acme-challenge/gVJl5NWL2owUqZekjHkt_bo3OHYC2XNDURRRgLI5JTk"), Action: routecluster("nginx-ingress/challenge-service/8009/da39a3ee5e"), }, &envoy_api_v2_route.Route{ Match: routePrefix("/"), // match all Action: envoy.UpgradeHTTPS(), }, ), ), virtualhosts( envoy.VirtualHost("example.com", &envoy_api_v2_route.Route{ Match: routePrefix("/.well-known/acme-challenge/gVJl5NWL2owUqZekjHkt_bo3OHYC2XNDURRRgLI5JTk"), Action: routecluster("nginx-ingress/challenge-service/8009/da39a3ee5e"), }, &envoy_api_v2_route.Route{ Match: routePrefix("/"), // match all Action: routecluster("default/app-service/8080/da39a3ee5e"), }, ), )) }
explode_data.jsonl/70753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1587 }
[ 2830, 3393, 22594, 17725, 32755, 1155, 353, 8840, 836, 8, 341, 7000, 71, 11, 12527, 11, 2814, 1669, 6505, 1155, 340, 16867, 2814, 2822, 197, 322, 600, 16, 374, 264, 5591, 78559, 448, 5344, 12, 24635, 12, 8117, 389, 279, 608, 6021, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoleUpdate(t *testing.T) { _, roleServ := initRoleTest() samples := []struct { in basmodel.Role err error }{ { in: basmodel.Role{ FixedCol: types.FixedCol{ ID: 5, CompanyID: 1001, NodeID: 101, }, Name: "num 1 update", Resources: string(base.SuperAccess), Description: "num 1 update", }, err: nil, }, { in: basmodel.Role{ FixedCol: types.FixedCol{ ID: 6, CompanyID: 1001, NodeID: 101, }, Name: "num 2 update", Description: "num 2 update", }, err: errors.New("resources are required"), }, } for _, v := range samples { _, err := roleServ.Save(v.in) if (v.err == nil && err != nil) || (v.err != nil && err == nil) { t.Errorf("ERROR FOR ::::%+v::: \nRETURNS :::%+v:::, \nIT SHOULD BE :::%+v:::", v.in, err, v.err) } } }
explode_data.jsonl/15122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 441 }
[ 2830, 3393, 9030, 4289, 1155, 353, 8840, 836, 8, 341, 197, 6878, 3476, 39159, 1669, 2930, 9030, 2271, 2822, 1903, 4023, 1669, 3056, 1235, 341, 197, 17430, 220, 3046, 2528, 35955, 198, 197, 9859, 1465, 198, 197, 59403, 197, 197, 515, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStructOfGenericAlg(t *testing.T) { st1 := StructOf([]StructField{ {Name: "X", Tag: "x", Type: TypeOf(int64(0))}, {Name: "Y", Type: TypeOf(string(""))}, }) st := StructOf([]StructField{ {Name: "S0", Type: st1}, {Name: "S1", Type: st1}, }) for _, table := range []struct { rt Type idx []int }{ { rt: st, idx: []int{0, 1}, }, { rt: st1, idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf([0]int{})}, {Name: "YY", Type: TypeOf("")}, }, ), idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf([0]int{})}, {Name: "YY", Type: TypeOf("")}, {Name: "ZZ", Type: TypeOf([2]int{})}, }, ), idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf([1]int{})}, {Name: "YY", Type: TypeOf("")}, }, ), idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf([1]int{})}, {Name: "YY", Type: TypeOf("")}, {Name: "ZZ", Type: TypeOf([1]int{})}, }, ), idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf([2]int{})}, {Name: "YY", Type: TypeOf("")}, {Name: "ZZ", Type: TypeOf([2]int{})}, }, ), idx: []int{1}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf(int64(0))}, {Name: "YY", Type: TypeOf(byte(0))}, {Name: "ZZ", Type: TypeOf("")}, }, ), idx: []int{2}, }, { rt: StructOf( []StructField{ {Name: "XX", Type: TypeOf(int64(0))}, {Name: "YY", Type: TypeOf(int64(0))}, {Name: "ZZ", Type: TypeOf("")}, {Name: "AA", Type: TypeOf([1]int64{})}, }, ), idx: []int{2}, }, } { v1 := New(table.rt).Elem() v2 := New(table.rt).Elem() if !DeepEqual(v1.Interface(), v1.Interface()) { t.Errorf("constructed struct %v not equal to itself", v1.Interface()) } v1.FieldByIndex(table.idx).Set(ValueOf("abc")) v2.FieldByIndex(table.idx).Set(ValueOf("def")) if i1, i2 := v1.Interface(), v2.Interface(); DeepEqual(i1, i2) { t.Errorf("constructed structs %v and %v should not be equal", i1, i2) } abc := "abc" v1.FieldByIndex(table.idx).Set(ValueOf(abc)) val := "+" + abc + "-" v2.FieldByIndex(table.idx).Set(ValueOf(val[1:4])) if i1, i2 := v1.Interface(), v2.Interface(); !DeepEqual(i1, i2) { t.Errorf("constructed structs %v and %v should be equal", i1, i2) } // Test hash m := MakeMap(MapOf(table.rt, TypeOf(int(0)))) m.SetMapIndex(v1, ValueOf(1)) if i1, i2 := v1.Interface(), v2.Interface(); !m.MapIndex(v2).IsValid() { t.Errorf("constructed structs %#v and %#v have different hashes", i1, i2) } v2.FieldByIndex(table.idx).Set(ValueOf("abc")) if i1, i2 := v1.Interface(), v2.Interface(); !DeepEqual(i1, i2) { t.Errorf("constructed structs %v and %v should be equal", i1, i2) } if i1, i2 := v1.Interface(), v2.Interface(); !m.MapIndex(v2).IsValid() { t.Errorf("constructed structs %v and %v have different hashes", i1, i2) } } }
explode_data.jsonl/29605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1572 }
[ 2830, 3393, 9422, 2124, 19964, 86895, 1155, 353, 8840, 836, 8, 341, 18388, 16, 1669, 16139, 2124, 10556, 9422, 1877, 515, 197, 197, 63121, 25, 330, 55, 497, 12353, 25, 330, 87, 497, 3990, 25, 3990, 2124, 1548, 21, 19, 7, 15, 593, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestTags_UntagResource(t *testing.T) { setup() defer teardown() untagResourcesRequest := &UntagResourcesRequest{ Resources: []Resource{{ID: "1", Type: DropletResourceType}}, } mux.HandleFunc("/v2/tags/testing-1/resources", func(w http.ResponseWriter, r *http.Request) { v := new(UntagResourcesRequest) err := json.NewDecoder(r.Body).Decode(v) if err != nil { t.Fatalf("decode json: %v", err) } testMethod(t, r, "DELETE") if !reflect.DeepEqual(v, untagResourcesRequest) { t.Errorf("Request body = %+v, expected %+v", v, untagResourcesRequest) } }) _, err := client.Tags.UntagResources("testing-1", untagResourcesRequest) if err != nil { t.Errorf("Tags.UntagResources returned error: %v", err) } }
explode_data.jsonl/19731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 15930, 6665, 94905, 4783, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 197, 3850, 351, 11277, 1900, 1669, 609, 20250, 351, 11277, 1900, 515, 197, 197, 11277, 25, 3056, 4783, 2979, 915, 25, 330, 16, 497, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSubnetList(t *testing.T) { client, err := clients.NewNetworkV1Client() if err != nil { t.Fatalf("Unable to create a subnet : %v", err) } allPages, err := subnets.List(client, subnets.ListOpts{}) tools.PrintResource(t, allPages) }
explode_data.jsonl/43975
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 3136, 4711, 852, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 8239, 7121, 12320, 53, 16, 2959, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 17075, 311, 1855, 264, 51457, 549, 1018, 85, 497, 1848, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSimpleApplicationEventGetArgs(t *testing.T) { tests := []struct { name string appID string event events.ApplicationEventType want int }{ {TestArgsName, "testAppId001", events.SubmitApplication, 0}, } for _, tt := range tests { instance := NewSimpleApplicationEvent(tt.appID, tt.event) args := instance.GetArgs() t.Run(tt.name, func(t *testing.T) { if len(args) != tt.want { t.Errorf("want %d, got %d", len(args), tt.want) } }) } }
explode_data.jsonl/9771
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 16374, 4988, 1556, 1949, 4117, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 28236, 915, 914, 198, 197, 28302, 4357, 17521, 47906, 198, 197, 50780, 220, 526, 198, 197, 59403, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRuleHavingClause(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := []string{ `SELECT s.c_id,count(s.c_id) FROM s where c = test GROUP BY s.c_id HAVING s.c_id <> '1660' AND s.c_id <> '2' order by s.c_id;`, } for _, sql := range sqls { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleHavingClause() if rule.Item != "CLA.013" { t.Error("Rule not match:", rule.Item, "Expect : CLA.013") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 11337, 28032, 28482, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 3056, 917, 515, 197, 197, 63, 4858, 274, 520, 842, 56907, 1141, 520, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTimeoutFailure(t *testing.T) { var defaultTimeout = client.http.Timeout setupServer() defer teardownServer() defer func() { client.http.Timeout = defaultTimeout }() client.http.Timeout = 350 * time.Millisecond mux.HandleFunc("/url", func(w http.ResponseWriter, r *http.Request) { time.Sleep(time.Second) }) req := URL("http://example.com/url") switch err := req.Get(); err.(type) { case *url.Error: if !strings.Contains(err.Error(), "Client.Timeout") { t.Errorf("Expected error due to client timeout, got %v instead", err) } default: t.Errorf("Expected error to be due to timeout, got %v instead", err) } }
explode_data.jsonl/24777
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 7636, 17507, 1155, 353, 8840, 836, 8, 341, 2405, 1638, 7636, 284, 2943, 6964, 63977, 198, 84571, 5475, 741, 16867, 49304, 5475, 741, 16867, 2915, 368, 341, 197, 25291, 6964, 63977, 284, 1638, 7636, 198, 197, 66816, 25291, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoneAddRecord(t *testing.T) { noneReset() err := cache.AddRecord(&shaman.Resource{}) if err != nil { t.Errorf("Failed to add record to none cacher - %v", err) } }
explode_data.jsonl/60812
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 4064, 2212, 6471, 1155, 353, 8840, 836, 8, 341, 197, 6697, 14828, 741, 9859, 1669, 6500, 1904, 6471, 2099, 927, 12715, 20766, 37790, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 9408, 311, 912, 3255, 311, 6857, 272, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAnnotationsRWGTGFails(t *testing.T) { server := mockGtgServer(t, false) defer server.Close() client, err := NewAnnotationsClient(server.URL+"/%s", testingClient) require.NoError(t, err) err = client.GTG() assert.EqualError(t, err, fmt.Sprintf("GTG %v returned a %v status code for generic-rw-aurora", server.URL+"/__gtg", http.StatusServiceUnavailable)) }
explode_data.jsonl/19416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 21418, 56368, 25388, 36332, 6209, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 7860, 38, 41428, 5475, 1155, 11, 895, 340, 16867, 3538, 10421, 2822, 25291, 11, 1848, 1669, 1532, 21418, 2959, 21421, 20893, 27569, 4, 82, 497, 749...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmbeddedAsciiQ(t *testing.T) { var testTable = []struct { input, expect string }{ // Abutting a MIME header comment is legal {"(=?US-ASCII?Q?Keith_Moore?=)", "(Keith Moore)"}, // The entire header does not need to be encoded {"(Keith =?US-ASCII?Q?Moore?=)", "(Keith Moore)"}, } for _, tt := range testTable { result := DecodeHeader(tt.input) assert.Equal(t, tt.expect, result, "Expected %q, got %q for input %q", tt.expect, result, tt.input) } }
explode_data.jsonl/31260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 83466, 84135, 48, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 2556, 284, 3056, 1235, 341, 197, 22427, 11, 1720, 914, 198, 197, 59403, 197, 197, 322, 3680, 73981, 264, 57477, 4247, 3980, 374, 5777, 198, 197, 197, 4913, 7, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDevPortForwardDefaultNamespace(t *testing.T) { MarkIntegrationTest(t, CanRunWithoutGcp) // Run skaffold build first to fail quickly on a build failure skaffold.Build().InDir("examples/microservices").RunOrFail(t) rpcAddr := randomPort() skaffold.Dev("--status-check=false", "--port-forward", "--rpc-port", rpcAddr).InDir("examples/microservices").RunBackground(t) defer skaffold.Delete().InDir("examples/microservices").RunBackground(t) _, entries := apiEvents(t, rpcAddr) // No namespace was provided to `skaffold dev`, so we assume "default" waitForPortForwardEvent(t, entries, "leeroy-app", "service", "default", "leeroooooy app!!\n") original, perms, fErr := replaceInFile("leeroooooy app!!", "test string", "examples/microservices/leeroy-app/app.go") failNowIfError(t, fErr) defer func() { if original != nil { ioutil.WriteFile("examples/microservices/leeroy-app/app.go", original, perms) } }() waitForPortForwardEvent(t, entries, "leeroy-app", "service", "default", "test string\n") }
explode_data.jsonl/35843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 14592, 7084, 25925, 3675, 22699, 1155, 353, 8840, 836, 8, 341, 197, 8949, 52464, 2271, 1155, 11, 2980, 6727, 26040, 38, 4672, 692, 197, 322, 6452, 1901, 2649, 813, 1936, 1156, 311, 3690, 6157, 389, 264, 1936, 7901, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeviceCodeReturnsErrorIfBadRequest(t *testing.T) { sender := mocks.NewSender() body := mocks.NewBody("doesn't matter") sender.AppendResponse(mocks.NewResponseWithBodyAndStatus(body, http.StatusBadRequest, "Bad Request")) _, err := InitiateDeviceAuth(sender, TestOAuthConfig, TestClientID, TestResource) if err == nil || !strings.Contains(err.Error(), errCodeHandlingFails) { t.Fatalf("adal: failed to get correct error expected(%s) actual(%s)", errCodeHandlingFails, err.Error()) } if body.IsOpen() { t.Fatalf("response body was left open!") } }
explode_data.jsonl/27498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 6985, 2078, 16446, 1454, 2679, 46015, 1155, 353, 8840, 836, 8, 341, 1903, 1659, 1669, 68909, 7121, 20381, 741, 35402, 1669, 68909, 7121, 5444, 445, 71914, 944, 4925, 1138, 1903, 1659, 8982, 2582, 1255, 25183, 7121, 2582, 2354,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHook_String(t *testing.T) { v := Hook{ URL: String(""), ID: Int64(0), Config: nil, Active: Bool(false), } want := `github.Hook{URL:"", ID:0, Config:map[], Active:false}` if got := v.String(); got != want { t.Errorf("Hook.String = %v, want %v", got, want) } }
explode_data.jsonl/33245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 31679, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 28171, 515, 197, 79055, 25, 262, 923, 445, 4461, 197, 29580, 25, 257, 1333, 21, 19, 7, 15, 1326, 197, 66156, 25, 2092, 345, 197, 197, 5728, 25, 12608, 3576, 1326, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRoaringPostingsListMax(t *testing.T) { d := NewPostingsList() d.Insert(42) d.Insert(78) d.Insert(103) max, err := d.Max() require.NoError(t, err) require.Equal(t, postings.ID(103), max) d = NewPostingsList() _, err = d.Max() require.Error(t, err) }
explode_data.jsonl/64010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 38872, 3249, 4133, 819, 852, 5974, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 1532, 4133, 819, 852, 741, 2698, 23142, 7, 19, 17, 340, 2698, 23142, 7, 22, 23, 340, 2698, 23142, 7, 16, 15, 18, 692, 22543, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvVarsJSON(t *testing.T) { s := newScaffold(t) defer s.reset() err := s.executeCommand("env", "vars", "dev", "-o", "json", "--k8s:kubeconfig=kubeconfig.yaml") require.NoError(t, err) var data interface{} err = s.jsonOutput(&data) require.NoError(t, err) }
explode_data.jsonl/246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 14359, 28305, 5370, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 50, 27864, 1155, 340, 16867, 274, 13857, 741, 9859, 1669, 274, 7769, 4062, 445, 3160, 497, 330, 15380, 497, 330, 3583, 497, 6523, 78, 497, 330, 2236, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDepToPath(t *testing.T) { tests := []struct { description string dep string expected string }{ { description: "top level file", dep: "//:dispatcher.go", expected: "dispatcher.go", }, { description: "vendored file", dep: "//vendor/github.com/gorilla/mux:mux.go", expected: "vendor/github.com/gorilla/mux/mux.go", }, } for _, test := range tests { testutil.Run(t, test.description, func(t *testutil.T) { path := depToPath(test.dep) t.CheckDeepEqual(test.expected, path) }) } }
explode_data.jsonl/61218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 7839, 1249, 1820, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 197, 14891, 260, 914, 198, 197, 42400, 262, 914, 198, 197, 59403, 197, 197, 515, 298, 42407, 25, 330, 3481, 2188, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiplePatchesWithConflict(t *testing.T) { th := makeTestHarness(t) makeCommonFileForMultiplePatchTest(th) th.WriteF("/app/overlay/staging/deployment-patch1.yaml", ` apiVersion: apps/v1beta2 kind: Deployment metadata: name: nginx spec: template: spec: containers: - name: nginx env: - name: ENABLE_FEATURE_FOO value: TRUE volumes: - name: nginx-persistent-storage emptyDir: null gcePersistentDisk: pdName: nginx-persistent-storage - configMap: name: configmap-in-overlay name: configmap-in-overlay `) th.WriteF("/app/overlay/staging/deployment-patch2.yaml", ` apiVersion: apps/v1beta2 kind: Deployment metadata: name: nginx spec: template: spec: containers: - name: nginx env: - name: ENABLE_FEATURE_FOO value: FALSE `) err := th.RunWithErr("/app/overlay/staging", th.MakeDefaultOptions()) if err == nil { t.Fatalf("expected conflict") } if !strings.Contains( err.Error(), "conflict between ") { t.Fatalf("Unexpected err: %v", err) } }
explode_data.jsonl/51162
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 506 }
[ 2830, 3393, 32089, 47, 9118, 2354, 57974, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 1281, 2271, 74248, 1155, 340, 77438, 10839, 1703, 2461, 32089, 43622, 2271, 24365, 340, 70479, 4073, 37, 4283, 676, 14, 21118, 14272, 4118, 22549, 52799,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWatchBatchUnsynced(t *testing.T) { b, tmpPath := backend.NewDefaultTmpBackend() s := newWatchableStore(b, &lease.FakeLessor{}, nil) oldMaxRevs := watchBatchMaxRevs defer func() { watchBatchMaxRevs = oldMaxRevs s.store.Close() os.Remove(tmpPath) }() batches := 3 watchBatchMaxRevs = 4 v := []byte("foo") for i := 0; i < watchBatchMaxRevs*batches; i++ { s.Put(v, v, lease.NoLease) } w := s.NewWatchStream() w.Watch(v, nil, 1) for i := 0; i < batches; i++ { if resp := <-w.Chan(); len(resp.Events) != watchBatchMaxRevs { t.Fatalf("len(events) = %d, want %d", len(resp.Events), watchBatchMaxRevs) } } s.store.mu.Lock() defer s.store.mu.Unlock() if size := s.synced.size(); size != 1 { t.Errorf("synced size = %d, want 1", size) } }
explode_data.jsonl/12645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 14247, 21074, 1806, 12996, 291, 1155, 353, 8840, 836, 8, 341, 2233, 11, 4174, 1820, 1669, 19163, 7121, 3675, 35986, 29699, 741, 1903, 1669, 501, 14247, 480, 6093, 1883, 11, 609, 1623, 991, 726, 43, 8309, 22655, 2092, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetLatestTx(t *testing.T) { q, mem := initEnv(0) defer q.Close() defer mem.Close() // add 10 txs err := add10Tx(mem.client) if err != nil { t.Error("add tx error", err.Error()) return } msg11 := mem.client.NewMessage("mempool", types.EventTx, tx11) mem.client.Send(msg11, true) mem.client.Wait(msg11) msg := mem.client.NewMessage("mempool", types.EventGetLastMempool, nil) mem.client.Send(msg, true) reply, err := mem.client.Wait(msg) if err != nil { t.Error(err) return } if len(reply.GetData().(*types.ReplyTxList).GetTxs()) != 10 || mem.Size() != 11 { t.Error("TestGetLatestTx failed", len(reply.GetData().(*types.ReplyTxList).GetTxs()), mem.Size()) } }
explode_data.jsonl/16826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 1949, 31992, 31584, 1155, 353, 8840, 836, 8, 341, 18534, 11, 1833, 1669, 2930, 14359, 7, 15, 340, 16867, 2804, 10421, 741, 16867, 1833, 10421, 2822, 197, 322, 912, 220, 16, 15, 9854, 82, 198, 9859, 1669, 912, 16, 15, 315...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetDataDisks(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() var testCases = []struct { desc string nodeName types.NodeName isDataDiskNull bool expectedDataDisks []compute.DataDisk expectedError bool crt azcache.AzureCacheReadType }{ { desc: "an error shall be returned if there's no corresponding vm", nodeName: "vm2", expectedDataDisks: nil, expectedError: true, crt: azcache.CacheReadTypeDefault, }, { desc: "correct list of data disks shall be returned if everything is good", nodeName: "vm1", expectedDataDisks: []compute.DataDisk{ { Lun: to.Int32Ptr(0), Name: to.StringPtr("disk1"), }, }, expectedError: false, crt: azcache.CacheReadTypeDefault, }, { desc: "correct list of data disks shall be returned if everything is good", nodeName: "vm1", expectedDataDisks: []compute.DataDisk{ { Lun: to.Int32Ptr(0), Name: to.StringPtr("disk1"), }, }, expectedError: false, crt: azcache.CacheReadTypeUnsafe, }, { desc: "nil shall be returned if DataDisk is null", nodeName: "vm1", isDataDiskNull: true, expectedDataDisks: nil, expectedError: false, crt: azcache.CacheReadTypeDefault, }, } for i, test := range testCases { testCloud := GetTestCloud(ctrl) vmSet := testCloud.vmSet expectedVMs := setTestVirtualMachines(testCloud, map[string]string{"vm1": "PowerState/Running"}, false) mockVMsClient := testCloud.VirtualMachinesClient.(*mockvmclient.MockInterface) for _, vm := range expectedVMs { if test.isDataDiskNull { vm.StorageProfile = &compute.StorageProfile{} } mockVMsClient.EXPECT().Get(gomock.Any(), testCloud.ResourceGroup, *vm.Name, gomock.Any()).Return(vm, nil).AnyTimes() } mockVMsClient.EXPECT().Get(gomock.Any(), testCloud.ResourceGroup, gomock.Not("vm1"), gomock.Any()).Return(compute.VirtualMachine{}, &retry.Error{HTTPStatusCode: http.StatusNotFound, RawError: cloudprovider.InstanceNotFound}).AnyTimes() mockVMsClient.EXPECT().Update(gomock.Any(), testCloud.ResourceGroup, gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).AnyTimes() dataDisks, err := vmSet.GetDataDisks(test.nodeName, test.crt) assert.Equal(t, test.expectedDataDisks, dataDisks, "TestCase[%d]: %s", i, test.desc) assert.Equal(t, test.expectedError, err != nil, "TestCase[%d]: %s", i, test.desc) if test.crt == azcache.CacheReadTypeUnsafe { time.Sleep(fakeCacheTTL) dataDisks, err := vmSet.GetDataDisks(test.nodeName, test.crt) assert.Equal(t, test.expectedDataDisks, dataDisks, "TestCase[%d]: %s", i, test.desc) assert.Equal(t, test.expectedError, err != nil, "TestCase[%d]: %s", i, test.desc) } } }
explode_data.jsonl/72623
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1243 }
[ 2830, 3393, 68957, 4839, 2787, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 41653, 1060, 914, 198, 197, 20831, 675, 688, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestOldPQL(t *testing.T) { _, err := ParseString(`SetBit(f=11, col=1)`) if err != nil { t.Fatalf("should have parsed: %v", err) } }
explode_data.jsonl/21700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 18284, 47, 3588, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 14775, 703, 5809, 1649, 8344, 955, 28, 16, 16, 11, 1375, 28, 16, 8, 24183, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 5445, 614, 15676, 25, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDeleteRule(t *testing.T) { args := &DeleteRuleArgs{ RuleId: BBC_TestRuleId, } err := BBC_CLIENT.DeleteRule(args) ExpectEqual(t.Errorf, err, nil) }
explode_data.jsonl/4087
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 6435, 11337, 1155, 353, 8840, 836, 8, 341, 31215, 1669, 609, 6435, 11337, 4117, 515, 197, 11143, 1111, 764, 25, 18096, 32541, 11337, 764, 345, 197, 532, 9859, 1669, 18096, 22521, 18872, 11337, 7356, 340, 35911, 2993, 1155, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFuzzyUserCreate(t *testing.T) { th := Setup().InitBasic() Client := th.BasicClient team := model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "test@nowhere.com", Type: model.TEAM_OPEN} rteam, _ := Client.CreateTeam(&team) Client.Logout() for i := 0; i < len(utils.FUZZY_STRINGS_NAMES) || i < len(utils.FUZZY_STRINGS_EMAILS); i++ { testName := "Name" testEmail := "test@nowhere.com" if i < len(utils.FUZZY_STRINGS_NAMES) { testName = utils.FUZZY_STRINGS_NAMES[i] } if i < len(utils.FUZZY_STRINGS_EMAILS) { testEmail = utils.FUZZY_STRINGS_EMAILS[i] } user := model.User{Email: strings.ToLower(model.NewId()) + testEmail, Nickname: testName, Password: "hello1"} ruser, err := Client.CreateUser(&user, "") if err != nil { t.Fatal(err) } LinkUserToTeam(ruser.Data.(*model.User), rteam.Data.(*model.Team)) } }
explode_data.jsonl/13820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 374 }
[ 2830, 3393, 37, 34758, 1474, 4021, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 71724, 1669, 270, 48868, 2959, 271, 197, 9196, 1669, 1614, 65842, 90, 26456, 25, 330, 675, 497, 3988, 25, 330, 89, 9141, 2765...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCloudHypervisorResizeMemory(t *testing.T) { assert := assert.New(t) clhConfig, err := newClhConfig() type args struct { reqMemMB uint32 memoryBlockSizeMB uint32 } tests := []struct { name string args args expectedMemDev MemoryDevice wantErr bool }{ {"Resize to zero", args{0, 128}, MemoryDevice{Probe: false, SizeMB: 0}, FAIL}, {"Resize to aligned size", args{clhConfig.MemorySize + 128, 128}, MemoryDevice{Probe: false, SizeMB: 128}, PASS}, {"Resize to aligned size", args{clhConfig.MemorySize + 129, 128}, MemoryDevice{Probe: false, SizeMB: 256}, PASS}, {"Resize to NOT aligned size", args{clhConfig.MemorySize + 125, 128}, MemoryDevice{Probe: false, SizeMB: 128}, PASS}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { assert.NoError(err) clh := cloudHypervisor{} mockClient := &clhClientMock{} mockClient.vmInfo.Config = *chclient.NewVmConfig(*chclient.NewKernelConfig("")) mockClient.vmInfo.Config.Memory = chclient.NewMemoryConfig(int64(utils.MemUnit(clhConfig.MemorySize) * utils.MiB)) mockClient.vmInfo.Config.Memory.HotplugSize = func(i int64) *int64 { return &i }(int64(40 * utils.GiB.ToBytes())) clh.APIClient = mockClient clh.config = clhConfig newMem, memDev, err := clh.ResizeMemory(context.Background(), tt.args.reqMemMB, tt.args.memoryBlockSizeMB, false) if (err != nil) != tt.wantErr { t.Errorf("cloudHypervisor.ResizeMemory() error = %v, expected to fail = %v", err, tt.wantErr) return } if err != nil { return } expectedMem := clhConfig.MemorySize + uint32(tt.expectedMemDev.SizeMB) if newMem != expectedMem { t.Errorf("cloudHypervisor.ResizeMemory() got = %+v, want %+v", newMem, expectedMem) } if !reflect.DeepEqual(memDev, tt.expectedMemDev) { t.Errorf("cloudHypervisor.ResizeMemory() got = %+v, want %+v", memDev, tt.expectedMemDev) } }) } }
explode_data.jsonl/68503
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 790 }
[ 2830, 3393, 16055, 39, 1082, 31396, 30561, 10642, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 39407, 71, 2648, 11, 1848, 1669, 501, 5066, 71, 2648, 741, 13158, 2827, 2036, 341, 197, 24395, 18816, 8412, 688, 2622, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEngineBatchStaleCachedIterator(t *testing.T) { defer leaktest.AfterTest(t)() // Prevent regression of a bug which caused spurious MVCC errors due to an // invalid optimization which let an iterator return key-value pairs which // had since been deleted from the underlying engine. // Discovered in #6878. runWithAllEngines(func(eng Engine, t *testing.T) { // Focused failure mode: highlights the actual bug. { batch := eng.NewBatch() defer batch.Close() iter := batch.NewIterator(IterOptions{UpperBound: roachpb.KeyMax}) key := MVCCKey{Key: roachpb.Key("b")} if err := batch.Put(key, []byte("foo")); err != nil { t.Fatal(err) } iter.Seek(key) if err := batch.Clear(key); err != nil { t.Fatal(err) } // Iterator should not reuse its cached result. iter.Seek(key) if ok, err := iter.Valid(); err != nil { t.Fatal(err) } else if ok { t.Fatalf("iterator unexpectedly valid: %v -> %v", iter.UnsafeKey(), iter.UnsafeValue()) } iter.Close() } // Higher-level failure mode. Mostly for documentation. { batch := eng.NewBatch() defer batch.Close() key := roachpb.Key("z") // Put a value so that the deletion below finds a value to seek // to. if err := MVCCPut(context.Background(), batch, nil, key, hlc.Timestamp{}, roachpb.MakeValueFromString("x"), nil); err != nil { t.Fatal(err) } // Seek the iterator to `key` and clear the value (but without // telling the iterator about that). if err := MVCCDelete(context.Background(), batch, nil, key, hlc.Timestamp{}, nil); err != nil { t.Fatal(err) } // Trigger a seek on the cached iterator by seeking to the (now // absent) key. // The underlying iterator will already be in the right position // due to a seek in MVCCDelete (followed by a Clear, which does not // invalidate the iterator's cache), and if it reports its cached // result back, we'll see the (newly deleted) value (due to the // failure mode above). if v, _, err := MVCCGet(context.Background(), batch, key, hlc.Timestamp{}, MVCCGetOptions{}); err != nil { t.Fatal(err) } else if v != nil { t.Fatalf("expected no value, got %+v", v) } } }, t) }
explode_data.jsonl/38107
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 844 }
[ 2830, 3393, 4571, 21074, 623, 1574, 70293, 11951, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 197, 322, 38068, 30549, 315, 264, 9876, 892, 8881, 978, 27526, 42271, 3706, 5975, 4152, 311, 458, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNewRowsCanBeCreated(t *testing.T) { req := require.New(t) board := sdk.NewBoard("") panel := New(board, "Some row") req.Equal("Some row", panel.builder.Title) req.True(panel.builder.ShowTitle) }
explode_data.jsonl/39817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 3564, 9024, 69585, 11694, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 1373, 7121, 1155, 340, 59868, 1669, 45402, 7121, 11932, 445, 5130, 44952, 1669, 1532, 20770, 11, 330, 8373, 2802, 5130, 24395, 12808, 445, 8373, 2802, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_encodeComponentProperties(t *testing.T) { epoch := 2 tests := []struct { name string input pkg.Package expected *[]cyclonedx.Property }{ { name: "no metadata", input: pkg.Package{}, expected: nil, }, { name: "from apk", input: pkg.Package{ FoundBy: "cataloger", Locations: source.NewLocationSet( source.Location{Coordinates: source.Coordinates{RealPath: "test"}}, ), Metadata: pkg.ApkMetadata{ Package: "libc-utils", OriginPackage: "libc-dev", Maintainer: "Natanael Copa <ncopa@alpinelinux.org>", Version: "0.7.2-r0", License: "BSD", Architecture: "x86_64", URL: "http://alpinelinux.org", Description: "Meta package to pull in correct libc", Size: 0, InstalledSize: 4096, PullDependencies: "musl-utils", PullChecksum: "Q1p78yvTLG094tHE1+dToJGbmYzQE=", GitCommitOfAport: "97b1c2842faa3bfa30f5811ffbf16d5ff9f1a479", Files: []pkg.ApkFileRecord{}, }, }, expected: &[]cyclonedx.Property{ {Name: "syft:package:foundBy", Value: "cataloger"}, {Name: "syft:location:0:path", Value: "test"}, {Name: "syft:metadata:gitCommitOfApkPort", Value: "97b1c2842faa3bfa30f5811ffbf16d5ff9f1a479"}, {Name: "syft:metadata:installedSize", Value: "4096"}, {Name: "syft:metadata:originPackage", Value: "libc-dev"}, {Name: "syft:metadata:pullChecksum", Value: "Q1p78yvTLG094tHE1+dToJGbmYzQE="}, {Name: "syft:metadata:pullDependencies", Value: "musl-utils"}, {Name: "syft:metadata:size", Value: "0"}, }, }, { name: "from dpkg", input: pkg.Package{ MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ Package: "tzdata", Version: "2020a-0+deb10u1", Source: "tzdata-dev", SourceVersion: "1.0", Architecture: "all", InstalledSize: 3036, Maintainer: "GNU Libc Maintainers <debian-glibc@lists.debian.org>", Files: []pkg.DpkgFileRecord{}, }, }, expected: &[]cyclonedx.Property{ {Name: "syft:package:metadataType", Value: "DpkgMetadata"}, {Name: "syft:metadata:installedSize", Value: "3036"}, {Name: "syft:metadata:source", Value: "tzdata-dev"}, {Name: "syft:metadata:sourceVersion", Value: "1.0"}, }, }, { name: "from go bin", input: pkg.Package{ Name: "golang.org/x/net", Version: "v0.0.0-20211006190231-62292e806868", Language: pkg.Go, Type: pkg.GoModulePkg, MetadataType: pkg.GolangBinMetadataType, Metadata: pkg.GolangBinMetadata{ GoCompiledVersion: "1.17", Architecture: "amd64", H1Digest: "h1:KlOXYy8wQWTUJYFgkUI40Lzr06ofg5IRXUK5C7qZt1k=", }, }, expected: &[]cyclonedx.Property{ {Name: "syft:package:language", Value: pkg.Go.String()}, {Name: "syft:package:metadataType", Value: "GolangBinMetadata"}, {Name: "syft:package:type", Value: "go-module"}, {Name: "syft:metadata:architecture", Value: "amd64"}, {Name: "syft:metadata:goCompiledVersion", Value: "1.17"}, {Name: "syft:metadata:h1Digest", Value: "h1:KlOXYy8wQWTUJYFgkUI40Lzr06ofg5IRXUK5C7qZt1k="}, }, }, { name: "from rpm", input: pkg.Package{ Name: "dive", Version: "0.9.2-1", Type: pkg.RpmPkg, MetadataType: pkg.RpmdbMetadataType, Metadata: pkg.RpmdbMetadata{ Name: "dive", Epoch: &epoch, Arch: "x86_64", Release: "1", Version: "0.9.2", SourceRpm: "dive-0.9.2-1.src.rpm", Size: 12406784, License: "MIT", Vendor: "", Files: []pkg.RpmdbFileRecord{}, }, }, expected: &[]cyclonedx.Property{ {Name: "syft:package:metadataType", Value: "RpmdbMetadata"}, {Name: "syft:package:type", Value: "rpm"}, {Name: "syft:metadata:epoch", Value: "2"}, {Name: "syft:metadata:release", Value: "1"}, {Name: "syft:metadata:size", Value: "12406784"}, {Name: "syft:metadata:sourceRpm", Value: "dive-0.9.2-1.src.rpm"}, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { c := encodeComponent(test.input) assert.Equal(t, test.expected, c.Properties) }) } }
explode_data.jsonl/7817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2245 }
[ 2830, 3393, 11224, 2189, 7903, 1155, 353, 8840, 836, 8, 341, 197, 20197, 1669, 220, 17, 198, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 22427, 262, 24793, 49834, 198, 197, 42400, 353, 1294, 97484, 19684, 87, 15727, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptBaseContextFunc(t *testing.T) { t.Parallel() it := assert.New(t) ctx := context.TODO() bc := func(_ net.Listener) context.Context { return ctx } opt := OptBaseContextFunc(bc) a := App{} err := opt(&a) it.Nil(err) root := a.BaseContext(nil) it.ReferenceEqual(ctx, root) }
explode_data.jsonl/7709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 21367, 93824, 9626, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 23374, 1669, 2060, 7121, 1155, 692, 20985, 1669, 2266, 90988, 741, 2233, 66, 1669, 2915, 2490, 4179, 64091, 8, 2266, 9328, 341, 197, 853, 5635, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_toPortMapParameter(t *testing.T) { require.Equal(t, toPortMapParameter(map[int]int{ }), "", "port map parameter incorrect") require.Equal(t, toPortMapParameter(map[int]int{ 80:8080 }), "80:8080", "port map parameter incorrect") res := toPortMapParameter(map[int]int{ 80:8080, 70:7000 }) require.True(t, res == "80:8080,70:7000" || res == "70:7000,80:8080", "port map parameter incorrect") }
explode_data.jsonl/62513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 2346, 7084, 2227, 4971, 1155, 353, 8840, 836, 8, 341, 17957, 12808, 1155, 11, 311, 7084, 2227, 4971, 9147, 18640, 63025, 90, 31706, 7342, 330, 403, 2415, 5733, 15114, 1138, 17957, 12808, 1155, 11, 311, 7084, 2227, 4971, 9147...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRouter_NewRoute_RegistersComplexRoutes(t *testing.T) { mainRouter := Router{} rb := mainRouter.NewRoute() rb.Method("GET").Path("/users").Handler(testHandlerFunc) rb.Name("users") rb.Host("domain.com") rb.Schemas("https") rb.Header("X-dummy", "dummy") rb.QueryParam("offset", "2") rb.Matcher(func(r *http.Request) bool { return r.ContentLength > 0 }) _ = rb.Register() req, _ := http.NewRequest("GET", "/users?offset=2", strings.NewReader("hello")) req.Host = "domain.com" req.URL.Scheme = "https" req.Header.Set("X-dummy", "dummy") res := httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 200, res.Code) req, _ = http.NewRequest("GET", "/users?offset=1", strings.NewReader("hello")) req.Host = "domain.com" req.URL.Scheme = "https" req.Header.Set("X-dummy", "dummy") res = httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 404, res.Code) req, _ = http.NewRequest("GET", "/users?offset=2", strings.NewReader("hello")) req.Host = "domain2.com" req.URL.Scheme = "https" req.Header.Set("X-dummy", "dummy") res = httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 404, res.Code) req, _ = http.NewRequest("GET", "/users?offset=2", strings.NewReader("hello")) req.Host = "domain.com" req.URL.Scheme = "http" req.Header.Set("X-dummy", "dummy") res = httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 404, res.Code) req, _ = http.NewRequest("GET", "/users?offset=2", strings.NewReader("hello")) req.Host = "domain.com" req.URL.Scheme = "https" req.Header.Set("X-dummy", "dummy2") res = httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 404, res.Code) req, _ = http.NewRequest("GET", "/users?offset=2", nil) req.Host = "domain.com" req.URL.Scheme = "https" req.Header.Set("X-dummy", "dummy") res = httptest.NewRecorder() mainRouter.ServeHTTP(res, req) assertEqual(t, 404, res.Code) url, _ := mainRouter.GenerateURL("users", newURLParameterBag(0)) assertStringEqual(t, "/users", url) }
explode_data.jsonl/31757
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 836 }
[ 2830, 3393, 9523, 39582, 4899, 49384, 9303, 31137, 26653, 1155, 353, 8840, 836, 8, 341, 36641, 9523, 1669, 10554, 31483, 85589, 1669, 1887, 9523, 7121, 4899, 741, 85589, 20798, 445, 3806, 1827, 1820, 4283, 4218, 1827, 3050, 8623, 3050, 96...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewTransferDetails(t *testing.T) { // Case 1: cache with http transfers := NewTransferDetails("cache.edu", false) assert.Equal(t, 2, len(transfers)) assert.Equal(t, "cache.edu:8000", transfers[0].Url.Host) assert.Equal(t, "http", transfers[0].Url.Scheme) assert.Equal(t, true, transfers[0].Proxy) assert.Equal(t, "cache.edu:8000", transfers[1].Url.Host) assert.Equal(t, "http", transfers[1].Url.Scheme) assert.Equal(t, false, transfers[1].Proxy) // Case 2: cache with https transfers = NewTransferDetails("cache.edu", true) assert.Equal(t, 2, len(transfers)) assert.Equal(t, "cache.edu:8444", transfers[0].Url.Host) assert.Equal(t, "https", transfers[0].Url.Scheme) assert.Equal(t, false, transfers[0].Proxy) assert.Equal(t, "cache.edu:8443", transfers[1].Url.Host) assert.Equal(t, "https", transfers[1].Url.Scheme) assert.Equal(t, false, transfers[1].Proxy) // Case 3: cache with port with http transfers = NewTransferDetails("cache.edu:1234", false) assert.Equal(t, 2, len(transfers)) assert.Equal(t, "cache.edu:1234", transfers[0].Url.Host) assert.Equal(t, "http", transfers[0].Url.Scheme) assert.Equal(t, true, transfers[0].Proxy) assert.Equal(t, "cache.edu:1234", transfers[1].Url.Host) assert.Equal(t, "http", transfers[1].Url.Scheme) assert.Equal(t, false, transfers[1].Proxy) // Case 4. cache with port with https transfers = NewTransferDetails("cache.edu:5678", true) assert.Equal(t, 1, len(transfers)) assert.Equal(t, "cache.edu:5678", transfers[0].Url.Host) assert.Equal(t, "https", transfers[0].Url.Scheme) assert.Equal(t, false, transfers[0].Proxy) }
explode_data.jsonl/22510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 632 }
[ 2830, 3393, 3564, 21970, 7799, 1155, 353, 8840, 836, 8, 341, 197, 322, 11538, 220, 16, 25, 6500, 448, 1758, 198, 72453, 49793, 1669, 1532, 21970, 7799, 445, 9360, 12586, 497, 895, 340, 6948, 12808, 1155, 11, 220, 17, 11, 2422, 33089, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccessRevParseTagShort(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) mainRev, err := testRepo.sut.RevParseTagShort(git.DefaultBranch) require.Nil(t, err) require.Equal(t, testRepo.firstCommit[:10], mainRev) branchRev, err := testRepo.sut.RevParseTagShort(testRepo.branchName) require.Nil(t, err) require.Equal(t, testRepo.thirdBranchCommit[:10], branchRev) tagRev, err := testRepo.sut.RevParseTagShort(testRepo.firstTagName) require.Nil(t, err) require.Equal(t, testRepo.firstCommit[:10], tagRev) }
explode_data.jsonl/13990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 7188, 36184, 14463, 5668, 12472, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 36641, 36184, 11, 1848, 1669, 1273, 25243, 514, 332, 2817, 85, 14463, 5668, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_shareSweets(t *testing.T) { sweetsSize := []int{3, 5, 7, 10, 13} childrenNeed := []int{2, 5, 6, 8, 11, 12, 13} shareSweets(sweetsSize, childrenNeed) fmt.Println("------------------------") sweetsSize = []int{3, 5, 7, 10, 13} childrenNeed = []int{2, 5, 6, 8, 14, 15, 20} shareSweets(sweetsSize, childrenNeed) }
explode_data.jsonl/13701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 38272, 50, 25415, 1155, 353, 8840, 836, 8, 341, 1903, 25415, 1695, 1669, 3056, 396, 90, 18, 11, 220, 20, 11, 220, 22, 11, 220, 16, 15, 11, 220, 16, 18, 532, 82470, 23657, 1669, 3056, 396, 90, 17, 11, 220, 20, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_VolumeExists_Positive_VolumeExistsNodeExists(t *testing.T) { // Arrange volumePluginMgr, _ := controllervolumetesting.GetTestVolumePluginMgr((t)) dsw := NewDesiredStateOfWorld(volumePluginMgr) nodeName := "node-name" dsw.AddNode(nodeName) podName := "pod-name" volumeName := api.UniqueDeviceName("volume-name") volumeSpec := controllervolumetesting.GetTestVolumeSpec(string(volumeName), volumeName) generatedVolumeName, _ := dsw.AddPod(podName, volumeSpec, nodeName) // Act volumeExists := dsw.VolumeExists(generatedVolumeName, nodeName) // Assert if !volumeExists { t.Fatalf("Volume %q does not exist, it should.", generatedVolumeName) } volumesToAttach := dsw.GetVolumesToAttach() if len(volumesToAttach) != 1 { t.Fatalf("len(volumesToAttach) Expected: <1> Actual: <%v>", len(volumesToAttach)) } verifyVolumeToAttach(t, volumesToAttach, nodeName, generatedVolumeName, string(volumeName)) }
explode_data.jsonl/40750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 324 }
[ 2830, 3393, 2334, 4661, 15575, 44246, 3404, 2334, 4661, 15575, 1955, 15575, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 5195, 4661, 11546, 25567, 11, 716, 1669, 683, 1100, 648, 1132, 57824, 287, 2234, 2271, 18902, 11546, 25567, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPurge(t *testing.T) { conf, cleanup := testutil.InitConfig(t) defer cleanup(t) // Store it err := conf.PersistConfig(true) assert.NoErrorf(t, err, "Unable to persist configuration expected at %v", conf.LoadedConfigPath()) // Verify that the file is there _, err = os.Stat(conf.LoadedConfigPath()) assert.Falsef(t, os.IsNotExist(err), "Test config was not persisted at %v, cannot validate Purge", conf.LoadedConfigPath()) // Delete it err = conf.Purge() assert.NoErrorf(t, err, "Unable to Purge file at %v", conf.LoadedConfigPath()) // Verify its gone _, err = os.Stat(conf.LoadedConfigPath()) assert.Falsef(t, os.IsExist(err), "Purge failed to remove file at %v", conf.LoadedConfigPath()) }
explode_data.jsonl/57898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 47, 39823, 1155, 353, 8840, 836, 8, 341, 67850, 11, 21290, 1669, 1273, 1314, 26849, 2648, 1155, 340, 16867, 21290, 1155, 692, 197, 322, 9129, 432, 198, 9859, 1669, 2335, 1069, 4975, 2648, 3715, 340, 6948, 35699, 69, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouterMatchSlash(t *testing.T) { e := New() r := e.router handler := func(c Context) error { c.Set("path", c.Path()) return nil } // A minimum of routes r.Add(http.MethodGet, "/video", handler) r.Add(http.MethodGet, "/video/*", handler) c := e.NewContext(nil, nil).(*context) // "/video/" > "/video/*" testVideoSlash := func() { r.Find(http.MethodGet, "/video/", c) c.handler(c) assert.Equal(t, "/video/*", c.Get("path")) assert.Equal(t, "", c.Param("*")) } testVideoSlash() // Adding more routes (comment out any, it doesn't matter) r.Add(http.MethodGet, "/art", handler) r.Add(http.MethodGet, "/art/", handler) r.Add(http.MethodGet, "/art/*", handler) c = e.NewContext(nil, nil).(*context) // Same exact testVideoSlash() not working anymore testVideoSlash() }
explode_data.jsonl/47130
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 318 }
[ 2830, 3393, 9523, 8331, 88004, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 7000, 1669, 384, 22125, 198, 53326, 1669, 2915, 1337, 9608, 8, 1465, 341, 197, 1444, 4202, 445, 2343, 497, 272, 17474, 2398, 197, 853, 2092, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextGetStringMapString(t *testing.T) { c, _ := CreateTestContext(httptest.NewRecorder()) var m = make(map[string]string) m["foo"] = "bar" c.Set("map", m) assert.Equal(t, m, c.GetStringMapString("map")) assert.Equal(t, "bar", c.GetStringMapString("map")["foo"]) }
explode_data.jsonl/26755
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 1972, 48905, 2227, 703, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 4230, 2271, 1972, 73392, 83, 70334, 7121, 47023, 2398, 2405, 296, 284, 1281, 9147, 14032, 30953, 340, 2109, 1183, 7975, 1341, 284, 330, 2257, 698, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFloat32NotZero(t *testing.T) { assert := assert.New(t) var verr error var val float32 = 5.0 verr = Float32(&val).NotZero()() assert.Nil(verr) verr = Float32(nil).NotZero()() assert.NotNil(verr) assert.Nil(ErrValue(verr)) assert.Equal(ErrFloat32NotZero, ErrCause(verr)) val = 0.0 verr = Float32(&val).NotZero()() assert.NotNil(verr) assert.NotNil(ErrValue(verr)) assert.Equal(ErrFloat32NotZero, ErrCause(verr)) }
explode_data.jsonl/11549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 5442, 18, 17, 2623, 17999, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 2405, 71467, 1465, 198, 2405, 1044, 2224, 18, 17, 284, 220, 20, 13, 15, 198, 197, 423, 81, 284, 13001, 18, 17, 2099, 831, 568,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFeeRateInfoBlock(t *testing.T) { block, _ := LoadTestBlockAndSSTX(t) fib := FeeRateInfoBlock(block) t.Log(*fib) fibExpected := dcrjson.FeeInfoBlock{ Height: 138883, Number: 20, Min: 0.5786178114478114, Max: 0.70106, Mean: 0.5969256371196103, Median: 0.595365723905724, StdDev: 0.02656563242880357, } if !reflect.DeepEqual(fibExpected, *fib) { t.Errorf("Fee Info Block mismatch. Expected %v, got %v.", fibExpected, *fib) } }
explode_data.jsonl/38434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 41941, 11564, 1731, 4713, 1155, 353, 8840, 836, 8, 341, 47996, 11, 716, 1669, 8893, 2271, 4713, 3036, 50, 784, 55, 1155, 692, 1166, 579, 1669, 40458, 11564, 1731, 4713, 18682, 340, 3244, 5247, 4071, 75326, 692, 1166, 579, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestViewBufferPosition(t *testing.T) { view := NewView("test", 0, 0, 10, 10, true, nil) numberOfLinesToWrite := 20 for i := 0; i < numberOfLinesToWrite; i++ { fmt.Fprintf(view, "Line %d\n", i) } firstLine, _ := view.Line(0) if firstLine != "Line 0" { t.Errorf("Buffer content is not right, expected: %s got: %s", "Line 0", firstLine) } //view.prepareViewForRender() view.PageDown() testCursor(t, view, 0, 0) testViewPosition(t, view, 0, 9) view.PageDown() testCursor(t, view, 0, 0) testViewPosition(t, view, 0, 18) view.PageDown() testCursor(t, view, 0, 10) //The buffer testViewBufferSize(t, view, numberOfLinesToWrite+1) }
explode_data.jsonl/37944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 851, 4095, 3812, 1155, 353, 8840, 836, 8, 341, 36867, 1669, 1532, 851, 445, 1944, 497, 220, 15, 11, 220, 15, 11, 220, 16, 15, 11, 220, 16, 15, 11, 830, 11, 2092, 340, 197, 37823, 16794, 1249, 7985, 1669, 220, 17, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParams(t *testing.T) { parser := sqltpl.NewSQLParser() query := ` -- sqltpl: Test1 select a@@int, b@@string, c@@sql.NullString from foo where id1 = ?id1@@int and id2 = ?id2@@string and id3 = ?id3@@sql.NullString -- end ` bundle, err := parser.Parse(strings.NewReader(query)) if err != nil { t.Error(err) } if len(bundle.Queries) != 1 { t.Fatal("must be 1 query") } q := bundle.Queries[0] if len(q.Ins) != 3 { t.Fatalf("must be 3 input parameter but got %d", len(q.Ins)) } assertParam(t, q.Ins[0], "Id1", "int", "id1") assertParam(t, q.Ins[1], "Id2", "string", "id2") assertParam(t, q.Ins[2], "Id3", "sql.NullString", "id3") if len(q.Outs) != 3 { t.Fatalf("must be 3 output parameter but got %d", len(q.Ins)) } assertParam(t, q.Outs[0], "A", "int", "a") assertParam(t, q.Outs[1], "B", "string", "b") assertParam(t, q.Outs[2], "C", "sql.NullString", "c") }
explode_data.jsonl/41280
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 4870, 1155, 353, 8840, 836, 8, 1476, 55804, 1669, 5704, 12620, 7121, 6688, 6570, 2822, 27274, 1669, 22074, 313, 5704, 12620, 25, 3393, 16, 4710, 1742, 264, 19191, 396, 345, 8689, 293, 19191, 917, 345, 8689, 272, 19191, 3544,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIntegerExpressionNOT_EQ(t *testing.T) { assertClauseSerialize(t, table1ColInt.NOT_EQ(table2ColInt), "(table1.col_int != table2.col_int)") assertClauseSerialize(t, table1ColInt.NOT_EQ(Int(11)), "(table1.col_int != $1)", int64(11)) }
explode_data.jsonl/41450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 3486, 9595, 14065, 9168, 1155, 353, 8840, 836, 8, 341, 6948, 28482, 15680, 1155, 11, 1965, 16, 6127, 1072, 40420, 9168, 15761, 17, 6127, 1072, 701, 11993, 2005, 16, 13414, 4042, 961, 1965, 17, 13414, 4042, 19107, 6948, 28482...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompositeLit2(t *testing.T) { gopClTest(t, ` type foo struct { A int } x := []*struct{a int}{ {1}, {3}, {5}, } y := map[foo]struct{a string}{ {1}: {"Hi"}, } z := [...]foo{ {1}, {3}, {5}, } `, `package main type foo struct { A int } func main() { x := []*struct { a int }{&struct { a int }{1}, &struct { a int }{3}, &struct { a int }{5}} y := map[foo]struct { a string }{foo{1}: struct { a string }{"Hi"}} z := [...]foo{foo{1}, foo{3}, foo{5}} } `) }
explode_data.jsonl/73629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 41685, 68954, 17, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1313, 15229, 2036, 341, 22985, 526, 198, 630, 87, 1669, 29838, 1235, 90, 64, 526, 59403, 197, 90, 16, 2137, 314, 18, 2137, 314, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidate_NoUnusedFragments_AllFragmentNamesAreUsedByMultipleOperations(t *testing.T) { testutil.ExpectPassesRule(t, graphql.NoUnusedFragmentsRule, ` query Foo { human(id: 4) { ...HumanFields1 } } query Bar { human(id: 4) { ...HumanFields2 } } fragment HumanFields1 on Human { name ...HumanFields3 } fragment HumanFields2 on Human { name } fragment HumanFields3 on Human { name } `) }
explode_data.jsonl/58911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 17926, 36989, 94033, 37, 41956, 53629, 9488, 7980, 11526, 22743, 1359, 32089, 35120, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 81893, 12187, 288, 11337, 1155, 11, 48865, 16766, 94033, 37, 41956, 11337, 11, 22074, 414, 3239, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExpandVolumeMountsWithSubpath(t *testing.T) { cases := []struct { name string container *v1.Container envs []EnvVar expectedSubPath string expectedMountPath string expectedOk bool }{ { name: "subpath with no expansion", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "foo"}}, }, expectedSubPath: "foo", expectedMountPath: "", expectedOk: true, }, { name: "volumes with expanded subpath", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "foo/$(POD_NAME)"}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "bar", }, }, expectedSubPath: "foo/bar", expectedMountPath: "", expectedOk: true, }, { name: "volumes expanded with empty subpath", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: ""}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "bar", }, }, expectedSubPath: "", expectedMountPath: "", expectedOk: true, }, { name: "volumes expanded with no envs subpath", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "/foo/$(POD_NAME)"}}, }, expectedSubPath: "/foo/$(POD_NAME)", expectedMountPath: "", expectedOk: false, }, { name: "volumes expanded with leading environment variable", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "$(POD_NAME)/bar"}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "foo", }, }, expectedSubPath: "foo/bar", expectedMountPath: "", expectedOk: true, }, { name: "volumes with volume and subpath", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{MountPath: "/foo", SubPathExpr: "$(POD_NAME)/bar"}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "foo", }, }, expectedSubPath: "foo/bar", expectedMountPath: "/foo", expectedOk: true, }, { name: "volumes with volume and no subpath", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{MountPath: "/foo"}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "foo", }, }, expectedSubPath: "", expectedMountPath: "/foo", expectedOk: true, }, { name: "subpaths with empty environment variable", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "foo/$(POD_NAME)/$(ANNOTATION)"}}, }, envs: []EnvVar{ { Name: "ANNOTATION", Value: "", }, }, expectedSubPath: "foo/$(POD_NAME)/$(ANNOTATION)", expectedMountPath: "", expectedOk: false, }, { name: "subpaths with missing env variables", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "foo/$(ODD_NAME)/$(POD_NAME)"}}, }, envs: []EnvVar{ { Name: "ODD_NAME", Value: "bar", }, }, expectedSubPath: "foo/$(ODD_NAME)/$(POD_NAME)", expectedMountPath: "", expectedOk: false, }, { name: "subpaths with empty expansion", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "$()"}}, }, expectedSubPath: "$()", expectedMountPath: "", expectedOk: false, }, { name: "subpaths with nested expandable envs", container: &v1.Container{ VolumeMounts: []v1.VolumeMount{{SubPathExpr: "$(POD_NAME$(ANNOTATION))"}}, }, envs: []EnvVar{ { Name: "POD_NAME", Value: "foo", }, { Name: "ANNOTATION", Value: "bar", }, }, expectedSubPath: "$(POD_NAME$(ANNOTATION))", expectedMountPath: "", expectedOk: false, }, } for _, tc := range cases { actualSubPath, err := ExpandContainerVolumeMounts(tc.container.VolumeMounts[0], tc.envs) ok := err == nil if e, a := tc.expectedOk, ok; !reflect.DeepEqual(e, a) { t.Errorf("%v: unexpected validation failure of subpath; expected %v, got %v", tc.name, e, a) } if !ok { // if ExpandContainerVolumeMounts returns an error, we don't care what the actualSubPath value is continue } if e, a := tc.expectedSubPath, actualSubPath; !reflect.DeepEqual(e, a) { t.Errorf("%v: unexpected subpath; expected %v, got %v", tc.name, e, a) } if e, a := tc.expectedMountPath, tc.container.VolumeMounts[0].MountPath; !reflect.DeepEqual(e, a) { t.Errorf("%v: unexpected mountpath; expected %v, got %v", tc.name, e, a) } } }
explode_data.jsonl/18700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2153 }
[ 2830, 3393, 38946, 18902, 16284, 16056, 3136, 2343, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1060, 914, 198, 197, 53290, 260, 353, 85, 16, 33672, 198, 197, 57538, 82, 1060, 3056, 14359, 3962, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestString_ToUint8(t *testing.T) { tests := []struct { name string e String want Uint8 }{ {name: "", e: String{"123"}, want: Uint8{123}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.e.ToUint8(); !got.Equal(tt.want) { t.Errorf("String.ToUint8() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/34781
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 703, 38346, 21570, 23, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 7727, 262, 923, 198, 197, 50780, 27883, 23, 198, 197, 59403, 197, 197, 47006, 25, 7342, 384, 25, 923, 4913, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapProxy_ContainsKeyWithNilKey(t *testing.T) { _, err := mp.ContainsKey(nil) AssertErrorNotNil(t, err, "containsKey did not return an error for nil key") mp.Clear() }
explode_data.jsonl/56977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 2227, 16219, 62, 23805, 1592, 2354, 19064, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 28267, 27907, 340, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 13372, 1592, 1521, 537, 470, 458, 1465, 369, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestExtents(t *testing.T) { if err := server.OpenTest(); err != nil { t.Fatalf("can't open test server: %v\n", err) } defer server.CloseTest() uuid, _ := initTestRepo() makeGrayscale(uuid, t, "grayscale") extents := `{ "MinPoint": [68, 127, 210], "MaxPoint": [1023, 4811, 12187] }` apiStr := fmt.Sprintf("%snode/%s/grayscale/extents", server.WebAPIPath, uuid) server.TestHTTP(t, "POST", apiStr, bytes.NewBufferString(extents)) apiStr = fmt.Sprintf("%snode/%s/grayscale/info", server.WebAPIPath, uuid) result := server.TestHTTP(t, "GET", apiStr, nil) var parsed = struct { Base struct { TypeName, Name string } Extended struct { BlockSize dvid.Point3d VoxelSize dvid.NdFloat32 VoxelUnits dvid.NdString MinPoint dvid.Point3d MaxPoint dvid.Point3d MinIndex dvid.Point3d MaxIndex dvid.Point3d } }{} if err := json.Unmarshal(result, &parsed); err != nil { t.Fatalf("Error parsing JSON response of new instance metadata: %v\n", err) } fmt.Printf("got: %s\n", string(result)) if !parsed.Extended.MinPoint.Equals(dvid.Point3d{68, 127, 210}) { t.Errorf("Bad MinPoint in new uint8blk instance: %s\n", parsed.Extended.MinPoint) } if !parsed.Extended.MaxPoint.Equals(dvid.Point3d{1023, 4811, 12187}) { t.Errorf("Bad MaxPoint in new uint8blk instance: %s\n", parsed.Extended.MaxPoint) } if !parsed.Extended.MinIndex.Equals(dvid.Point3d{2, 3, 6}) { t.Errorf("Bad MinIndex in new uint8blk instance: %s\n", parsed.Extended.MinIndex) } if !parsed.Extended.MaxIndex.Equals(dvid.Point3d{31, 150, 380}) { t.Errorf("Bad MaxIndex in new uint8blk instance: %s\n", parsed.Extended.MaxIndex) } }
explode_data.jsonl/31324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 719 }
[ 2830, 3393, 6756, 805, 1155, 353, 8840, 836, 8, 341, 743, 1848, 1669, 3538, 12953, 2271, 2129, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 4814, 944, 1787, 1273, 3538, 25, 1018, 85, 1699, 497, 1848, 340, 197, 532, 16867, 3538, 10421,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestOtherVersion(t *testing.T) { const want = header.IPv4Version + header.IPv6Version b := make([]byte, 1) b[0] = want << 4 if v := header.IPVersion(b); v != want { t.Fatalf("Bad version, want %v, got %v", want, v) } }
explode_data.jsonl/67079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 11409, 5637, 1155, 353, 8840, 836, 8, 972, 4777, 1366, 284, 4247, 46917, 85, 19, 5637, 488, 4247, 46917, 85, 21, 5637, 319, 2233, 1669, 1281, 10556, 3782, 11, 220, 16, 1218, 2233, 58, 15, 60, 284, 1366, 1115, 220, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserUpdateRoles(t *testing.T) { th := Setup().InitBasic() Client := th.BasicClient team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "test@nowhere.com", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) Client.Logout() user := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) LinkUserToTeam(user, team) store.Must(app.Srv.Store.User().VerifyEmail(user.Id)) user2 := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user2 = Client.Must(Client.CreateUser(user2, "")).Data.(*model.User) LinkUserToTeam(user2, team) store.Must(app.Srv.Store.User().VerifyEmail(user2.Id)) if _, err := Client.UpdateUserRoles(user.Id, ""); err == nil { t.Fatal("Should have errored, not logged in") } Client.Login(user2.Email, "passwd1") Client.SetTeamId(team.Id) if _, err := Client.UpdateUserRoles(user.Id, ""); err == nil { t.Fatal("Should have errored, not admin") } team2 := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "test@nowhere.com", Type: model.TEAM_OPEN} team2 = Client.Must(Client.CreateTeam(team2)).Data.(*model.Team) user3 := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user3 = Client.Must(Client.CreateUser(user3, "")).Data.(*model.User) LinkUserToTeam(user3, team2) store.Must(app.Srv.Store.User().VerifyEmail(user3.Id)) Client.Login(user3.Email, "passwd1") Client.SetTeamId(team2.Id) if _, err := Client.UpdateUserRoles(user2.Id, ""); err == nil { t.Fatal("Should have errored, wrong team") } Client.Login(user.Email, "passwd1") if _, err := Client.UpdateUserRoles("junk", ""); err == nil { t.Fatal("Should have errored, bad id") } if _, err := Client.UpdateUserRoles("system_admin", ""); err == nil { t.Fatal("Should have errored, we want to avoid this mistake") } if _, err := Client.UpdateUserRoles("12345678901234567890123456", ""); err == nil { t.Fatal("Should have errored, bad id") } if _, err := Client.UpdateUserRoles(user2.Id, "junk"); err == nil { t.Fatal("Should have errored, bad role") } }
explode_data.jsonl/13811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 919 }
[ 2830, 3393, 1474, 4289, 25116, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 71724, 1669, 270, 48868, 2959, 271, 197, 9196, 1669, 609, 2528, 65842, 90, 26456, 25, 330, 675, 497, 3988, 25, 330, 89, 9141, 276...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCDecl(t *testing.T) { var buf [50]byte fmtp, _ := syscall.BytePtrFromString("%d %d %d") a, _, _ := GetDLL(t, "user32.dll").Proc("wsprintfA").Call( uintptr(unsafe.Pointer(&buf[0])), uintptr(unsafe.Pointer(fmtp)), 1000, 2000, 3000) if string(buf[:a]) != "1000 2000 3000" { t.Error("cdecl USER32.wsprintfA returns", a, "buf=", buf[:a]) } }
explode_data.jsonl/54656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 34, 21629, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 508, 20, 15, 90184, 198, 1166, 76, 790, 11, 716, 1669, 49345, 32119, 5348, 44491, 4430, 67, 1018, 67, 1018, 67, 1138, 11323, 11, 8358, 716, 1669, 2126, 64187, 1155, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCompile(t *testing.T) { InitAddress("127.0.0.1") hm := host.New(1 << 30) gm := guest.New(1<<30, hm) proc := process.New(mheap.New(gm)) e := memEngine.NewTestEngine() for _, query := range querys { processQuery(query, e, proc) } }
explode_data.jsonl/22370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 46126, 1155, 353, 8840, 836, 8, 341, 98762, 4286, 445, 16, 17, 22, 13, 15, 13, 15, 13, 16, 1138, 9598, 76, 1669, 3468, 7121, 7, 16, 1115, 220, 18, 15, 340, 3174, 76, 1669, 8640, 7121, 7, 16, 2442, 18, 15, 11, 49362...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRelationshipToStringNoProperties(t *testing.T) { rel := neo4j.Relationship{ StartId: 10, EndId: 11, Id: 2, Type: `Something`, Props: map[string]interface{}{}, } actual := input.ToString(rel) expected := `[:Something]` if actual != expected { t.Fatalf(`expected '%v' but got '%v'`, expected, actual) } }
explode_data.jsonl/7077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 50822, 5870, 2753, 7903, 1155, 353, 8840, 836, 8, 341, 197, 3748, 1669, 35082, 19, 73, 38939, 15471, 515, 197, 65999, 764, 25, 220, 16, 15, 345, 197, 38407, 764, 25, 256, 220, 16, 16, 345, 197, 67211, 25, 414, 220, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestControllerRef(t *testing.T) { f := fake.NewSimpleClientset( &api.ReplicationController{ ObjectMeta: metav1.ObjectMeta{ Name: "bar", Namespace: "foo", UID: "123456", }, TypeMeta: metav1.TypeMeta{ Kind: "ReplicationController", }, Spec: api.ReplicationControllerSpec{ Replicas: 1, Selector: map[string]string{"abc": "xyz"}, Template: &api.PodTemplateSpec{ Spec: api.PodSpec{ Containers: []api.Container{ {Image: "mytest-image:latest"}, }, }, }, }, }, &api.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "barpod", Namespace: "foo", Labels: map[string]string{"abc": "xyz"}, OwnerReferences: []metav1.OwnerReference{{Name: "bar", UID: "123456", Controller: boolPtr(true)}}, }, TypeMeta: metav1.TypeMeta{ Kind: "Pod", }, Spec: api.PodSpec{ Containers: []api.Container{ {Image: "mytest-image:latest"}, }, }, Status: api.PodStatus{ Phase: api.PodRunning, }, }, &api.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "orphan", Namespace: "foo", Labels: map[string]string{"abc": "xyz"}, }, TypeMeta: metav1.TypeMeta{ Kind: "Pod", }, Spec: api.PodSpec{ Containers: []api.Container{ {Image: "mytest-image:latest"}, }, }, Status: api.PodStatus{ Phase: api.PodRunning, }, }, &api.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "buzpod", Namespace: "foo", Labels: map[string]string{"abc": "xyz"}, OwnerReferences: []metav1.OwnerReference{{Name: "buz", UID: "654321", Controller: boolPtr(true)}}, }, TypeMeta: metav1.TypeMeta{ Kind: "Pod", }, Spec: api.PodSpec{ Containers: []api.Container{ {Image: "mytest-image:latest"}, }, }, Status: api.PodStatus{ Phase: api.PodRunning, }, }) d := ReplicationControllerDescriber{f} out, err := d.Describe("foo", "bar", printers.DescriberSettings{ShowEvents: false}) if err != nil { t.Errorf("unexpected error: %v", err) } if !strings.Contains(out, "1 Running") { t.Errorf("unexpected out: %s", out) } }
explode_data.jsonl/34952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1045 }
[ 2830, 3393, 2051, 3945, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 12418, 7121, 16374, 2959, 746, 1006, 197, 197, 5, 2068, 2817, 79, 1693, 2051, 515, 298, 23816, 12175, 25, 77520, 16, 80222, 515, 571, 21297, 25, 414, 330, 2257, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWrongJSONFormat(t *testing.T) { content := []byte(`{"DB_HOST": "127.0.0.1""DB_USERNAME": "root","DB_PASSWORD": "","DB_PORT": 3306,"DB_NAME": "test"}`) filename := "tempfile" if err := ioutil.WriteFile(filename, content, 0644); err != nil { log.Fatalf("WriteFile %s: %v", filename, err) } // clean up defer os.Remove(filename) // parse JSON format error _, err := ReadConfig(filename) assert.NotNil(t, err) }
explode_data.jsonl/81638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 29185, 5370, 4061, 1155, 353, 8840, 836, 8, 341, 27751, 1669, 3056, 3782, 5809, 4913, 3506, 17213, 788, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 3014, 3506, 42696, 788, 330, 2888, 2198, 3506, 23059, 788, 330, 2198, 3506,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSnapshotsDirInitializingErrors(t *testing.T) { initKVLedgerProvider := func(conf *ledger.Config) error { cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore()) assert.NoError(t, err) _, err = NewProvider( &lgr.Initializer{ DeployedChaincodeInfoProvider: &mock.DeployedChaincodeInfoProvider{}, MetricsProvider: &disabled.Provider{}, Config: conf, HashProvider: cryptoProvider, }, ) return err } t.Run("invalid-path", func(t *testing.T) { conf, cleanup := testConfig(t) defer cleanup() conf.SnapshotsConfig.RootDir = "./a-relative-path" err := initKVLedgerProvider(conf) require.EqualError(t, err, "invalid path: ./a-relative-path. The path for the snapshot dir is expected to be an absolute path") }) t.Run("snapshots final dir creation returns error", func(t *testing.T) { conf, cleanup := testConfig(t) defer cleanup() completedSnapshotsPath := CompletedSnapshotsPath(conf.SnapshotsConfig.RootDir) require.NoError(t, os.MkdirAll(filepath.Dir(completedSnapshotsPath), 0755)) require.NoError(t, ioutil.WriteFile(completedSnapshotsPath, []byte("some data"), 0644)) err := initKVLedgerProvider(conf) require.Error(t, err) require.Contains(t, err.Error(), "while creating the dir: "+completedSnapshotsPath) }) }
explode_data.jsonl/74142
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 527 }
[ 2830, 3393, 61871, 27634, 6184, 76775, 13877, 1155, 353, 8840, 836, 8, 341, 28248, 42, 30698, 291, 1389, 5179, 1669, 2915, 29879, 353, 50704, 10753, 8, 1465, 341, 197, 1444, 9444, 5179, 11, 1848, 1669, 2021, 7121, 3675, 15352, 4449, 235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestObjectDelete(t *testing.T) { ctx := context.Background() c, rollback := makeConnectionWithObject(t) defer rollback() err := c.ObjectDelete(ctx, CONTAINER, OBJECT) if err != nil { t.Fatal(err) } testExistenceAfterDelete(t, c, CONTAINER, OBJECT) err = c.ObjectDelete(ctx, CONTAINER, OBJECT) if err != swift.ObjectNotFound { t.Fatal("Expecting Object not found", err) } }
explode_data.jsonl/12714
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 1190, 6435, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 60414, 1669, 1281, 4526, 2354, 1190, 1155, 340, 16867, 60414, 741, 9859, 1669, 272, 8348, 6435, 7502, 11, 16120, 34521, 11, 39786, 340, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIncomingPodsMetrics(t *testing.T) { timestamp := time.Now() metrics.Register() var pInfos = make([]*framework.QueuedPodInfo, 0, 3) for i := 1; i <= 3; i++ { p := &framework.QueuedPodInfo{ PodInfo: framework.NewPodInfo(&v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("test-pod-%d", i), Namespace: fmt.Sprintf("ns%d", i), UID: types.UID(fmt.Sprintf("tp-%d", i)), }, }), Timestamp: timestamp, } pInfos = append(pInfos, p) } tests := []struct { name string operations []operation want string }{ { name: "add pods to activeQ", operations: []operation{ add, }, want: ` scheduler_queue_incoming_pods_total{event="PodAdd",queue="active"} 3 `, }, { name: "add pods to unschedulableQ", operations: []operation{ addUnschedulablePodBackToUnschedulableQ, }, want: ` scheduler_queue_incoming_pods_total{event="ScheduleAttemptFailure",queue="unschedulable"} 3 `, }, { name: "add pods to unschedulableQ and then move all to backoffQ", operations: []operation{ addUnschedulablePodBackToUnschedulableQ, moveAllToActiveOrBackoffQ, }, want: ` scheduler_queue_incoming_pods_total{event="ScheduleAttemptFailure",queue="unschedulable"} 3 scheduler_queue_incoming_pods_total{event="UnschedulableTimeout",queue="backoff"} 3 `, }, { name: "add pods to unschedulableQ and then move all to activeQ", operations: []operation{ addUnschedulablePodBackToUnschedulableQ, moveClockForward, moveAllToActiveOrBackoffQ, }, want: ` scheduler_queue_incoming_pods_total{event="ScheduleAttemptFailure",queue="unschedulable"} 3 scheduler_queue_incoming_pods_total{event="UnschedulableTimeout",queue="active"} 3 `, }, { name: "make some pods subject to backoff and add them to backoffQ, then flush backoffQ", operations: []operation{ addUnschedulablePodBackToBackoffQ, moveClockForward, flushBackoffQ, }, want: ` scheduler_queue_incoming_pods_total{event="BackoffComplete",queue="active"} 3 scheduler_queue_incoming_pods_total{event="ScheduleAttemptFailure",queue="backoff"} 3 `, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { metrics.SchedulerQueueIncomingPods.Reset() queue := NewTestQueue(context.Background(), newDefaultQueueSort(), WithClock(clock.NewFakeClock(timestamp))) for _, op := range test.operations { for _, pInfo := range pInfos { op(queue, pInfo) } } metricName := metrics.SchedulerSubsystem + "_" + metrics.SchedulerQueueIncomingPods.Name if err := testutil.CollectAndCompare(metrics.SchedulerQueueIncomingPods, strings.NewReader(queueMetricMetadata+test.want), metricName); err != nil { t.Errorf("unexpected collecting result:\n%s", err) } }) } }
explode_data.jsonl/68205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1210 }
[ 2830, 3393, 97564, 23527, 82, 27328, 1155, 353, 8840, 836, 8, 341, 3244, 4702, 1669, 882, 13244, 741, 2109, 13468, 19983, 741, 2405, 281, 38059, 284, 1281, 85288, 3794, 10003, 361, 3260, 23527, 1731, 11, 220, 15, 11, 220, 18, 340, 202...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNasTypeLocalTimeZone(t *testing.T) { for i, table := range LocalTimeZoneTestTable { t.Logf("Test Cnt:%d", i) a := nasType.NewLocalTimeZone(nasMessage.ConfigurationUpdateCommandLocalTimeZoneType) a.SetIei(table.in.GetIei()) a.SetTimeZone(table.in.Octet) assert.Equalf(t, table.out.Iei, a.Iei, "in(%v): out %v, actual %x", table.in.Iei, table.out.Iei, a.Iei) assert.Equalf(t, table.out.Octet, a.Octet, "in(%v): out %v, actual %x", table.in.Octet, table.out.Octet, a.Octet) } }
explode_data.jsonl/63596
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 45, 300, 929, 7319, 77786, 1155, 353, 8840, 836, 8, 1476, 2023, 600, 11, 1965, 1669, 2088, 8774, 77786, 2271, 2556, 341, 197, 3244, 98954, 445, 2271, 356, 406, 7533, 67, 497, 600, 340, 197, 11323, 1669, 17141, 929, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDelete(t *testing.T) { dictionary := Dictionary{"test": "this is just a test"} t.Run("delete existing word", func(t *testing.T) { err := dictionary.Delete("test") assert.Equal(t, err, nil) _, err = dictionary.Search("test") assert.Equal(t, err, ErrNotFound) }) t.Run("delete unexisting word", func(t *testing.T) { err := dictionary.Delete("test") assert.Equal(t, err, ErrNotFound) }) }
explode_data.jsonl/54105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 2698, 3916, 1669, 10466, 4913, 1944, 788, 330, 574, 374, 1101, 264, 1273, 63159, 3244, 16708, 445, 4542, 6350, 3409, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 9859, 1669, 10997, 1887...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessorsForMetricSet_ProcessorsRead(t *testing.T) { r := NewRegister() source := NewLightModulesSource("testdata/lightmodules") procs, err := source.ProcessorsForMetricSet(r, "unpack", "withprocessors") require.NoError(t, err) require.NotNil(t, procs) require.Len(t, procs.List, 1) }
explode_data.jsonl/9721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 7423, 1087, 2461, 54310, 1649, 70241, 1087, 4418, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 8690, 741, 47418, 1669, 1532, 13911, 28201, 3608, 445, 92425, 76844, 11525, 1138, 197, 90087, 11, 1848, 1669, 2530, 29012, 1087,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntGQL_buildTypes(t *testing.T) { s, err := gen.NewStorage("sql") require.NoError(t, err) graph, err := entc.LoadGraph("./internal/todo/ent/schema", &gen.Config{ Storage: s, }) require.NoError(t, err) disableRelayConnection(graph) plugin := newSchemaGenerator() plugin.genSchema = true plugin.relaySpec = false schema := &ast.Schema{ Types: make(map[string]*ast.Definition), } err = plugin.buildTypes(graph, schema) require.NoError(t, err) require.Equal(t, `type Category { id: ID! text: String! status: Status! config: CategoryConfig duration: Duration count: Uint64 strings: [String!] todos: [Todo!] } """Ordering options for Category connections""" input CategoryOrder { """The ordering direction.""" direction: OrderDirection! = ASC """The field by which to order Categories.""" field: CategoryOrderField! } """Properties by which Category connections can be ordered.""" enum CategoryOrderField { TEXT DURATION } """ CreateTodoInput is used for create Todo object. Input was generated by ent. """ input CreateTodoInput { status: Status! priority: Int text: String! parentID: ID childIDs: [ID!] categoryID: ID secretID: ID } type Group { id: ID! name: String! users: [User!] } type Query { groups: [Group!]! todos: [Todo!]! users: [User!]! } """Status is enum for the field status""" enum Status @goModel(model: "entgo.io/contrib/entgql/internal/todo/ent/todo.Status") { IN_PROGRESS COMPLETED } type Todo { id: ID! createdAt: Time! status: Status! priority: Int! text: String! categoryID: ID parent: Todo children: [Todo!] category: Category } """Ordering options for Todo connections""" input TodoOrder { """The ordering direction.""" direction: OrderDirection! = ASC """The field by which to order Todos.""" field: TodoOrderField! } """Properties by which Todo connections can be ordered.""" enum TodoOrderField { CREATED_AT STATUS PRIORITY TEXT } type User { id: ID! name: String! groups: [Group!] } `, printSchema(schema)) }
explode_data.jsonl/12083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 721 }
[ 2830, 3393, 2250, 38, 3588, 20801, 4173, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 4081, 7121, 5793, 445, 3544, 1138, 17957, 35699, 1155, 11, 1848, 692, 66616, 11, 1848, 1669, 1197, 66, 13969, 11212, 13988, 10481, 93214, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiLineParagraph(t *testing.T) { src := `a b c` expected := ` Doc Paragraph Text[a b c]` assertParse(t, expected, src) }
explode_data.jsonl/21249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 20358, 2460, 42165, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 1565, 64, 198, 197, 2233, 198, 220, 272, 3989, 42400, 1669, 22074, 9550, 198, 197, 42165, 198, 197, 49635, 15481, 293, 272, 60, 3989, 6948, 14463, 1155, 11, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPAC(t *testing.T) { reset, err := GetPAC() if err != nil { t.Fatal(err) } t.Cleanup(func() { if reset == "" { OffPAC() } else { OnPAC(reset) } }) pac := "http://127.0.0.1:1080" err = OnPAC(pac) if err != nil { t.Fatal(err) } got, err := GetPAC() if err != nil { t.Fatal(err) } if !reflect.DeepEqual(pac, got) { t.Fatalf("want %q, got %q", pac, got) } err = OffPAC() if err != nil { t.Fatal(err) } ori, err := GetPAC() if err != nil { t.Fatal(err) } if ori != "" { t.Fatalf("want empty, got %q", ori) } }
explode_data.jsonl/45935
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 47, 1706, 1155, 353, 8840, 836, 8, 341, 70343, 11, 1848, 1669, 2126, 47, 1706, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 3244, 727, 60639, 18552, 368, 341, 197, 743, 7585, 621, 1591, 341, 298...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestModeFromCurve(t *testing.T) { type args struct { curve elliptic.Curve } tests := []struct { name string args args want int }{ {name: "sm2", args: args{curve: gm.GetSm2Curve()}, want: crypto.Sm2p256v1}, {name: "r1", args: args{curve: elliptic.P256()}, want: crypto.Secp256r1}, {name: "k1", args: args{curve: secp256k1.S256()}, want: crypto.Secp256k1}, {name: "384", args: args{curve: elliptic.P384()}, want: crypto.Secp384r1}, {name: "521", args: args{curve: elliptic.P521()}, want: crypto.Secp521r1}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := ModeFromCurve(tt.args.curve); got != tt.want { t.Errorf("ModeFromCurve() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/45154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 3636, 3830, 31325, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 33209, 586, 77783, 292, 727, 73047, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 526, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdate(t *testing.T) { db := newMockDatabase() _, _ = db.Update(Table1).Set(field1, field2).Where(trueExpression()).Execute() assertLastSql(t, "UPDATE `table1` SET `field1` = `field2` WHERE 1") _, _ = db.Update(Table1). Set(field1, 10). Where(field2.Equals(2)). OrderBy(field1.Desc()). Limit(2). Execute() assertLastSql(t, "UPDATE `table1` SET `field1` = 10 WHERE `field2` = 2 ORDER BY `field1` DESC LIMIT 2") _, _ = db.Update(Table1). SetIf(true, field1, 10). SetIf(false, field2, 10). Where(trueExpression()). Execute() assertLastSql(t, "UPDATE `table1` SET `field1` = 10 WHERE 1") if _, err := db.Update(Table1). SetIf(false, field1, 10). Where(trueExpression()). Execute(); err == nil { t.Error("should get error here") } if _, err := db.Update(Table1).Limit(3).Execute(); err == nil { t.Error("should get error here") } errExp := &expression{ builder: func(scope scope) (string, error) { return "", errors.New("error") }, } if _, err := db.Update(Table1). Set(field1, 10). OrderBy(orderBy{by: errExp}). Execute(); err == nil { t.Error("should get error here") } if _, err := db.Update(Table1). Set(field1, errExp). Where(trueExpression()). Execute(); err == nil { t.Error("should get error here") } if _, err := db.Update(Table1). Set(field1, 10). Where(errExp). Execute(); err == nil { t.Error("should get error here") } }
explode_data.jsonl/81764
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 591 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 11571, 5988, 2822, 197, 6878, 716, 284, 2927, 16689, 67848, 16, 568, 1649, 15573, 16, 11, 2070, 17, 568, 9064, 3715, 9595, 6011, 17174, 741, 6948, 5842, 8269, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsEligibleForHTTP3Upgrade(t *testing.T) { cases := []struct { name string server *v1alpha3.Server enableQUICListeners bool expected bool }{ { name: "EnableQUICListeners set to false", server: &v1alpha3.Server{ Port: &v1alpha3.Port{ Number: 80, Protocol: string(protocol.HTTP), Name: "http", }, }, expected: false, enableQUICListeners: false, }, { name: "HTTP as transport protocol and EnableQUICListeners set to true", server: &v1alpha3.Server{ Port: &v1alpha3.Port{ Number: 80, Protocol: string(protocol.HTTP), Name: "http", }, }, expected: false, enableQUICListeners: true, }, { name: "HTTPS traffic with passthrough ServerTLS mode and EnableQUICListeners set to true", server: &v1alpha3.Server{ Port: &v1alpha3.Port{ Number: 80, Protocol: string(protocol.HTTPS), Name: "https", }, Tls: &v1alpha3.ServerTLSSettings{Mode: v1alpha3.ServerTLSSettings_PASSTHROUGH}, }, enableQUICListeners: true, expected: false, }, { name: "HTTPS traffic with istio mutual ServerTLS mode and EnableQUICListeners set to true", server: &v1alpha3.Server{ Port: &v1alpha3.Port{ Number: 80, Protocol: string(protocol.HTTPS), Name: "https", }, Tls: &v1alpha3.ServerTLSSettings{Mode: v1alpha3.ServerTLSSettings_ISTIO_MUTUAL}, }, enableQUICListeners: true, expected: true, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { features.EnableQUICListeners = tc.enableQUICListeners actual := IsEligibleForHTTP3Upgrade(tc.server) if actual != tc.expected { t.Errorf("IsEligibleForHTTP3Upgrade(%s) => %t, want %t", tc.server, actual, tc.expected) } }) } }
explode_data.jsonl/61931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 898 }
[ 2830, 3393, 3872, 6582, 343, 1238, 2461, 9230, 18, 43861, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 394, 914, 198, 197, 41057, 1060, 353, 85, 16, 7141, 18, 22997, 198, 197, 197, 12552, 5757, 1317, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetHTTPStatus(t *testing.T) { mockHTTP := NewTestClient(func(req *http.Request) *http.Response { reqStatusCode := strings.Split(req.URL.String(), "status/") resStatusCode, _ := strconv.Atoi(reqStatusCode[1]) return &http.Response{ StatusCode: resStatusCode, Body: ioutil.NopCloser(bytes.NewBufferString("Some Payload")), Header: make(http.Header), } }) c := Client{hc: *mockHTTP, maxRetries: 1} statusCodes := []int{200, 201, 202, 203, 204, 205, 206, 304, 307, 308, 400, 401, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 421, 426, 428, 429, 430, 431, 451, 500, 501, 502, 503, 504, 505} // 100, 101, 301, 302, 303 not tested var wg sync.WaitGroup for _, statusCode := range statusCodes { wg.Add(1) func(actualStatusCode int) { url := fmt.Sprintf("https://mock/status/%v", actualStatusCode) statusCode := c.GetHTTPStatus(url) if statusCode != actualStatusCode { t.Errorf("Expected status code %v, got %v\n", actualStatusCode, statusCode) } wg.Done() }(statusCode) } wg.Wait() }
explode_data.jsonl/11460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 1949, 9230, 2522, 1155, 353, 8840, 836, 8, 341, 77333, 9230, 1669, 1532, 2271, 2959, 18552, 6881, 353, 1254, 9659, 8, 353, 1254, 12574, 341, 197, 24395, 15872, 1669, 9069, 19823, 6881, 20893, 6431, 1507, 330, 2829, 53006, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptions(t *testing.T) { t.Parallel() tests := []struct { desc string inOptions []string assertError require.ErrorAssertionFunc outOptions Options }{ // Generic option-parsing tests { desc: "Space Delimited", inOptions: []string{"AddKeysToAgent yes"}, assertError: require.NoError, outOptions: Options{ AddKeysToAgent: true, ForwardAgent: client.ForwardAgentNo, RequestTTY: false, StrictHostKeyChecking: true, }, }, { desc: "Equals Sign Delimited", inOptions: []string{"AddKeysToAgent=yes"}, assertError: require.NoError, outOptions: Options{ AddKeysToAgent: true, ForwardAgent: client.ForwardAgentNo, RequestTTY: false, StrictHostKeyChecking: true, }, }, { desc: "Invalid key", inOptions: []string{"foo foo"}, assertError: require.Error, outOptions: Options{}, }, { desc: "Incomplete option", inOptions: []string{"AddKeysToAgent"}, assertError: require.Error, outOptions: Options{}, }, // AddKeysToAgent Tests { desc: "AddKeysToAgent Invalid Value", inOptions: []string{"AddKeysToAgent foo"}, assertError: require.Error, outOptions: Options{}, }, // ForwardAgent Tests { desc: "Forward Agent Yes", inOptions: []string{"ForwardAgent yes"}, assertError: require.NoError, outOptions: Options{ AddKeysToAgent: true, ForwardAgent: client.ForwardAgentYes, RequestTTY: false, StrictHostKeyChecking: true, }, }, { desc: "Forward Agent No", inOptions: []string{"ForwardAgent no"}, assertError: require.NoError, outOptions: Options{ AddKeysToAgent: true, ForwardAgent: client.ForwardAgentNo, RequestTTY: false, StrictHostKeyChecking: true, }, }, { desc: "Forward Agent Local", inOptions: []string{"ForwardAgent local"}, assertError: require.NoError, outOptions: Options{ AddKeysToAgent: true, ForwardAgent: client.ForwardAgentLocal, RequestTTY: false, StrictHostKeyChecking: true, }, }, { desc: "Forward Agent InvalidValue", inOptions: []string{"ForwardAgent potato"}, assertError: require.Error, outOptions: Options{}, }, } for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { options, err := parseOptions(tt.inOptions) tt.assertError(t, err) require.Equal(t, tt.outOptions.AddKeysToAgent, options.AddKeysToAgent) require.Equal(t, tt.outOptions.ForwardAgent, options.ForwardAgent) require.Equal(t, tt.outOptions.RequestTTY, options.RequestTTY) require.Equal(t, tt.outOptions.StrictHostKeyChecking, options.StrictHostKeyChecking) }) } }
explode_data.jsonl/21930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1282 }
[ 2830, 3393, 3798, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 41653, 286, 914, 198, 197, 17430, 3798, 256, 3056, 917, 198, 197, 6948, 1454, 1373, 6141, 68639, 9626, 198, 197, 13967, 3798, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_instance_convertSecurityGroups(t *testing.T) { tests := []struct { name string inst instance want []*string }{ { name: "missing SGs", inst: instance{ Instance: &ec2.Instance{ SecurityGroups: []*ec2.GroupIdentifier{}, }, }, want: []*string{}, }, { name: "single SG", inst: instance{ Instance: &ec2.Instance{ SecurityGroups: []*ec2.GroupIdentifier{{ GroupId: aws.String("sg-123"), GroupName: aws.String("foo"), }}, }, }, want: []*string{aws.String("sg-123")}, }, { name: "multiple SGs", inst: instance{ Instance: &ec2.Instance{ SecurityGroups: []*ec2.GroupIdentifier{{ GroupId: aws.String("sg-123"), GroupName: aws.String("foo"), }, { GroupId: aws.String("sg-456"), GroupName: aws.String("bar"), }, }, }, }, want: []*string{aws.String("sg-123"), aws.String("sg-456")}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.inst.convertSecurityGroups(); !reflect.DeepEqual(got, tt.want) { t.Errorf("instance.convertSecurityGroups() = %v, want %v", spew.Sdump(got), spew.Sdump(tt.want)) } }) } }
explode_data.jsonl/55206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 597 }
[ 2830, 3393, 11904, 34910, 15352, 22173, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 88656, 2867, 198, 197, 50780, 29838, 917, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 30616, 29626,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVerifyChain(t *testing.T) { // testadata files generated with scion-pki: /* scion-pki testcrypto -t topology/default.topo -o gen cp gen/ISD1/ISD1-B1-S1.trc go/lib/scrypto/cppki/testdata/verifychain cp gen/ISD2/ISD2-B1-S1.trc go/lib/scrypto/cppki/testdata/verifychain cp gen/ISD1/ASff00_0_111/certs/ISD1-ASff00_0_111.pem \ go/lib/scrypto/cppki/testdata/verifychain cp gen/ISD1/ASff00_0_110/certs/ISD1-ASff00_0_110.pem \ go/lib/scrypto/cppki/testdata/verifychain cp gen/ISD2/ASff00_0_210/certs/ISD2-ASff00_0_210.pem \ go/lib/scrypto/cppki/testdata/verifychain */ trc := loadTRC(t, "testdata/verifychain/ISD1-B1-S1.trc") trc2 := loadTRC(t, "testdata/verifychain/ISD2-B1-S1.trc") clientChain := xtest.LoadChain(t, "testdata/verifychain/ISD1-ASff00_0_111.pem") issuerChain := xtest.LoadChain(t, "testdata/verifychain/ISD1-ASff00_0_110.pem") isd2Chain := xtest.LoadChain(t, "testdata/verifychain/ISD2-ASff00_0_210.pem") invalidIntermediate := append([]*x509.Certificate{}, clientChain[0], isd2Chain[1]) invalidTRC := trc invalidTRC.TRC.Certificates = append(trc.TRC.Certificates, &x509.Certificate{}) testCases := map[string]struct { chain []*x509.Certificate opts cppki.VerifyOptions assertErr assert.ErrorAssertionFunc }{ "valid client": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.NoError, }, "valid issuer": { chain: issuerChain, opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: issuerChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.NoError, }, "missing TRC": { chain: clientChain, opts: cppki.VerifyOptions{ CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "zero TRC": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &cppki.TRC{}, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "empty roots in TRC": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &cppki.TRC{Quorum: 2}, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "invalid chain": { chain: clientChain[:1], opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "invalid TRC": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &invalidTRC.TRC, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "invalid time before": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: clientChain[0].NotBefore.Add(-time.Hour), }, assertErr: assert.Error, }, "invalid time after": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: clientChain[0].NotAfter.Add(time.Hour), }, assertErr: assert.Error, }, "wrong TRC": { chain: clientChain, opts: cppki.VerifyOptions{ TRC: &trc2.TRC, CurrentTime: clientChain[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, "invalid intermediate": { chain: invalidIntermediate, opts: cppki.VerifyOptions{ TRC: &trc.TRC, CurrentTime: invalidIntermediate[0].NotBefore.Add(time.Hour), }, assertErr: assert.Error, }, } for name, tc := range testCases { name, tc := name, tc t.Run(name, func(t *testing.T) { t.Parallel() err := cppki.VerifyChain(tc.chain, tc.opts) tc.assertErr(t, err) }) } }
explode_data.jsonl/9856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1800 }
[ 2830, 3393, 32627, 18837, 1155, 353, 8840, 836, 8, 341, 197, 322, 1273, 4602, 3542, 7907, 448, 1136, 290, 2268, 6642, 510, 197, 3284, 197, 1903, 22613, 2268, 6642, 1273, 35772, 481, 83, 44882, 28989, 8772, 78, 481, 78, 4081, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_ExecuteOnKey(t *testing.T) { config := hazelcast.NewHazelcastConfig() expectedValue := "newValue" processor := newSimpleEntryProcessor(expectedValue) config.SerializationConfig().AddDataSerializableFactory(processor.identifiedFactory.factoryId, processor.identifiedFactory) client, _ := hazelcast.NewHazelcastClientWithConfig(config) mp2, _ := client.GetMap("testMap2") testKey := "testingKey1" testValue := "testingValue" mp2.Put(testKey, testValue) value, err := mp2.ExecuteOnKey(testKey, processor) AssertEqualf(t, err, value, expectedValue, "ExecuteOnKey failed.") newValue, err := mp2.Get("testingKey1") AssertEqualf(t, err, newValue, expectedValue, "ExecuteOnKey failed") mp.Clear() client.Shutdown() }
explode_data.jsonl/57037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 2227, 16219, 83453, 1925, 1592, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 20144, 301, 3829, 7121, 39, 68326, 3829, 2648, 741, 42400, 1130, 1669, 330, 52830, 698, 197, 29474, 1669, 501, 16374, 5874, 22946, 15253, 1130, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReleaseName(t *testing.T) { cases := []struct { name, input string }{ { "short", "some-release-name", }, { "long", "this-is-an-exceedingly-long-release-name-that-would-fail-installation", }, { "short unicode", "⌘日本語-name", }, { "long unicode", "⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-⌘日本語-long-name", }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { out := ReleaseName(c.input) if i := utf8.RuneCountInString(out); i > 53 { t.Fatalf("length exceeds max of 53: %v", i) } if out == "" { t.Fatalf("blank output") } }) } }
explode_data.jsonl/69449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 16077, 675, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 11, 1946, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 1, 8676, 497, 330, 14689, 44724, 11494, 756, 197, 197, 1583, 197, 197, 515...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTopkGroupByKeyTag(t *testing.T) { // Build the processor var topk TopK topk = *New() topk.Period = oneSecondDuration topk.K = 3 topk.Aggregation = "sum" topk.GroupBy = []string{"tag1", "tag3"} topk.AddGroupByTag = "gbt" // Get the input input := deepCopy(MetricsSet2) // Generate the answer changeSet := map[int]metricChange{ 2: {newTags: tagList(tag{"gbt", "metric1&tag1=TWO&tag3=SIX&"})}, 3: {newTags: tagList(tag{"gbt", "metric2&tag1=ONE&tag3=THREE&"})}, 4: {newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})}, 5: {newTags: tagList(tag{"gbt", "metric2&tag1=TWO&tag3=SEVEN&"})}, } answer := generateAns(input, changeSet) // Run the test runAndCompare(&topk, input, answer, "GroupByKeyTag test", t) }
explode_data.jsonl/64782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 5366, 74, 2808, 67749, 5668, 1155, 353, 8840, 836, 8, 341, 197, 322, 7854, 279, 17654, 198, 2405, 1909, 74, 6909, 42, 198, 42118, 74, 284, 353, 3564, 741, 42118, 74, 14834, 3127, 284, 825, 15666, 12945, 198, 42118, 74, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldAlwaysHaveUglyURLs(t *testing.T) { t.Parallel() for _, uglyURLs := range []bool{true, false} { doTestShouldAlwaysHaveUglyURLs(t, uglyURLs) } }
explode_data.jsonl/16656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 14996, 37095, 12116, 52, 22945, 3144, 82, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 8358, 27261, 3144, 82, 1669, 2088, 3056, 2641, 90, 1866, 11, 895, 92, 341, 197, 19935, 2271, 14996, 37095, 12116, 52, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestConsumerStore(t *testing.T) { t.Run("Create/Update:NilChannel", func(t *testing.T) { repo := getConsumerRepo() sampleConsumer, err := data.NewConsumer(channel1, failedDeleteTestConsumerID, successfulGetTestToken, callbackURL) assert.Nil(t, err) sampleConsumer.ConsumingFrom = nil sampleConsumer.QuickFix() _, err = repo.Store(sampleConsumer) assert.NotNil(t, err) }) t.Run("Create/Update:NonExistingChannel", func(t *testing.T) { repo := getConsumerRepo() sampleChannel, err := data.NewChannel(nonExistingGetTestChannelID, successfulGetTestToken) assert.Nil(t, err) sampleChannel.QuickFix() sampleConsumer, err := data.NewConsumer(sampleChannel, failedDeleteTestConsumerID, successfulGetTestToken, callbackURL) assert.Nil(t, err) errConsumer, err := repo.Store(sampleConsumer) assert.NotNil(t, err) assert.NotNil(t, errConsumer) }) t.Run("Create:InvalidState", func(t *testing.T) { t.Parallel() consumer, err := data.NewConsumer(channel1, successfulInsertTestConsumerID, successfulGetTestToken, callbackURL) assert.Nil(t, err) assert.True(t, consumer.IsInValidState()) consumer.Token = "" assert.False(t, consumer.IsInValidState()) repo := getConsumerRepo() _, err = repo.Store(consumer) assert.Equal(t, ErrInvalidStateToSave, err) }) t.Run("Create:InsertionFailed", func(t *testing.T) { t.Parallel() db, mock, _ := sqlmock.New() expectedErr := errors.New("Insertion failed") mockChannelRepo := new(MockChannelRepository) mockChannelRepo.On("Get", channel1.ChannelID).Return(channel1, nil) mock.ExpectBegin() mock.ExpectExec("INSERT INTO").WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg()).WillReturnError(expectedErr) mock.ExpectRollback() mock.MatchExpectationsInOrder(true) repo := &ConsumerDBRepository{db: db, channelRepository: mockChannelRepo} consumer, err := data.NewConsumer(channel1, successfulInsertTestConsumerID, successfulGetTestToken, callbackURL) assert.Nil(t, err) assert.Equal(t, channel1.ChannelID, consumer.GetChannelIDSafely()) t.Log(consumer.GetChannelIDSafely()) _, err = repo.Store(consumer) mockChannelRepo.AssertExpectations(t) assert.Equal(t, expectedErr, err) assert.Nil(t, mock.ExpectationsWereMet()) }) t.Run("Create:Success", func(t *testing.T) { t.Parallel() consumer, _ := data.NewConsumer(channel1, successfulInsertTestConsumerID, successfulGetTestToken, callbackURL) repo := getConsumerRepo() _, err := repo.Store(consumer) assert.Nil(t, err) newConsumer, err := repo.Get(channel1.ChannelID, successfulInsertTestConsumerID) assert.Nil(t, err) assert.True(t, newConsumer.IsInValidState()) assert.Equal(t, consumer.ID, newConsumer.ID) assert.Equal(t, consumer.Name, newConsumer.Name) assert.Equal(t, consumer.ConsumerID, newConsumer.ConsumerID) assert.Equal(t, consumer.Token, newConsumer.Token) }) t.Run("Update:NothingToChange", func(t *testing.T) { t.Parallel() consumer, _ := data.NewConsumer(channel2, noChangeUpdateTestConsumerID, successfulGetTestToken, callbackURL) repo := getConsumerRepo() _, err := repo.Store(consumer) assert.Nil(t, err) failedUpdate, err := repo.Store(consumer) assert.Nil(t, err) assert.True(t, consumer.CreatedAt.Equal(failedUpdate.CreatedAt)) assert.True(t, consumer.UpdatedAt.Equal(failedUpdate.UpdatedAt)) }) t.Run("Update:InvalidState", func(t *testing.T) { t.Parallel() consumer, _ := data.NewConsumer(channel2, invalidStateUpdateTestConsumerID, successfulGetTestToken, callbackURL) repo := getConsumerRepo() _, err := repo.Store(consumer) assert.Nil(t, err) consumer.Token = "" _, err = repo.Store(consumer) assert.NotNil(t, err) assert.Equal(t, ErrInvalidStateToSave, err) }) t.Run("Update:UpdateFailed", func(t *testing.T) { t.Parallel() db, mock, _ := sqlmock.New() expectedErr := errors.New("Update failed") mockChannelRepo := new(MockChannelRepository) mockChannelRepo.On("Get", channel2.ChannelID).Return(channel2, nil) consumer, _ := data.NewConsumer(channel2, dbErrUpdateTestConsumerID, successfulGetTestToken, callbackURL) consumer.QuickFix() rows := sqlmock.NewRows([]string{"id", "consumerId", "channelId", "name", "token", "callbackUrl", "createdAt", "updatedAt"}).AddRow(consumer.ID, consumer.ConsumerID, channel2.ChannelID, consumer.Name, consumer.Token, consumer.CallbackURL, consumer.CreatedAt, consumer.UpdatedAt) mock.ExpectQuery(consumerSelectRowCommonQuery+" channelId like").WithArgs(channel2.ChannelID, dbErrUpdateTestConsumerID).WillReturnRows(rows).WillReturnError(nil) mock.ExpectBegin() mock.ExpectExec("UPDATE consumer").WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), dbErrUpdateTestConsumerID, channel2.ChannelID).WillReturnError(expectedErr) mock.ExpectRollback() mock.MatchExpectationsInOrder(true) repo := &ConsumerDBRepository{db: db, channelRepository: mockChannelRepo} consumer.Token = "c" _, err := repo.Store(consumer) assert.Equal(t, expectedErr, err) assert.Nil(t, mock.ExpectationsWereMet()) }) t.Run("Update:NoRowChanged", func(t *testing.T) { t.Parallel() // uses mock db, mock, _ := sqlmock.New() consumer, _ := data.NewConsumer(channel2, dbErrUpdateTestConsumerID, successfulGetTestToken, callbackURL) consumer.QuickFix() mockChannelRepo := new(MockChannelRepository) mockChannelRepo.On("Get", channel2.ChannelID).Return(channel2, nil) rows := sqlmock.NewRows([]string{"id", "consumerId", "channelId", "name", "token", "callbackUrl", "createdAt", "updatedAt"}).AddRow(consumer.ID, consumer.ConsumerID, channel2.ChannelID, consumer.Name, consumer.Token, consumer.CallbackURL, consumer.CreatedAt, consumer.UpdatedAt) mock.ExpectQuery(consumerSelectRowCommonQuery+" channelId like").WithArgs(channel2.ChannelID, dbErrUpdateTestConsumerID).WillReturnRows(rows).WillReturnError(nil) result := sqlmock.NewResult(1, 0) mock.ExpectBegin() mock.ExpectExec("UPDATE consumer").WithArgs(sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), sqlmock.AnyArg(), dbErrUpdateTestConsumerID, channel2.ChannelID).WillReturnResult(result).WillReturnError(nil) mock.ExpectRollback() mock.MatchExpectationsInOrder(true) repo := &ConsumerDBRepository{db: db, channelRepository: mockChannelRepo} consumer.Token = "c" _, err := repo.Store(consumer) assert.Equal(t, ErrNoRowsUpdated, err) assert.Nil(t, mock.ExpectationsWereMet()) }) t.Run("Update:Success", func(t *testing.T) { t.Parallel() consumer, _ := data.NewConsumer(channel1, successfulUpdateTestConsumerID, "oldtoken", callbackURL) repo := getConsumerRepo() repo.Store(consumer) consumer.Token = successfulGetTestToken updatedConsumer, err := repo.Store(consumer) assert.Nil(t, err) assert.Equal(t, successfulGetTestToken, updatedConsumer.Token) updatedConsumer, err = repo.Get(channel1.ChannelID, successfulUpdateTestConsumerID) assert.Nil(t, err) assert.Equal(t, successfulGetTestToken, updatedConsumer.Token) assert.True(t, consumer.UpdatedAt.Before(updatedConsumer.UpdatedAt)) }) }
explode_data.jsonl/64202
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2557 }
[ 2830, 3393, 29968, 6093, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4021, 14, 4289, 25, 19064, 9629, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 17200, 5368, 1669, 633, 29968, 25243, 741, 197, 1903, 1516, 29968, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNeedsSetupDev(t *testing.T) { config := &configs.Config{ Mounts: []*configs.Mount{ { Device: "bind", Source: "/dev", Destination: "/dev", }, }, } if needsSetupDev(config) { t.Fatal("expected needsSetupDev to be false, got true") } }
explode_data.jsonl/1882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 65064, 21821, 14592, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 609, 53978, 10753, 515, 197, 9209, 629, 82, 25, 29838, 53978, 1321, 629, 515, 298, 197, 515, 571, 197, 6985, 25, 414, 330, 7666, 756, 571, 197, 3608, 25, 41...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGrpcByteStreamInvalidReadLimit(t *testing.T) { testBlobSize := int64(maxChunkSize) testBlob, testBlobHash := testutils.RandomDataAndHash(testBlobSize) testBlobDigest := pb.Digest{ Hash: testBlobHash, SizeBytes: int64(len(testBlob)), } // Check that non-zero ReadLimit for compressed-blobs returns // InvalidArgument. bsrReq := bytestream.ReadRequest{ ResourceName: fmt.Sprintf("ignoredinstance/compressed-blobs/zstd/%s/%d", testBlobDigest.Hash, len(testBlob)), ReadLimit: 1024, } bsrc, err := bsClient.Read(ctx, &bsrReq) if err != nil { t.Fatal(err) } _, err = bsrc.Recv() if err == nil || err == io.EOF { t.Fatal("Expected error due to non-zero ReadLimit for compressed-blobs read") } statusErr, ok := status.FromError(err) if !ok { t.Errorf("Expected a grpc status error, got something else: %v", err) return } if statusErr.Code() != codes.InvalidArgument { t.Fatal("Expected InvalidArgument response, got", err) } }
explode_data.jsonl/61984
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 6464, 3992, 7153, 3027, 7928, 4418, 16527, 1155, 353, 8840, 836, 8, 341, 18185, 37985, 1695, 1669, 526, 21, 19, 8739, 28304, 1695, 340, 18185, 37985, 11, 1273, 37985, 6370, 1669, 1273, 6031, 26709, 1043, 3036, 6370, 8623, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetRoomString(t *testing.T) { initTestWorldInstance(true) s := worldManagerInstance.PrintRoom(true) assert.Equal(t, constants.NO_ROOMS, s) initTestWorldInstance(false) s = worldManagerInstance.PrintRoom(true) assert.Contains(t, s, testObjects.TestGame.StartingRoom.Desc) }
explode_data.jsonl/60307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 1949, 14003, 703, 1155, 353, 8840, 836, 8, 341, 28248, 2271, 10134, 2523, 3715, 340, 1903, 1669, 1879, 2043, 2523, 7918, 14003, 3715, 340, 6948, 12808, 1155, 11, 18021, 42883, 59966, 50, 11, 274, 340, 28248, 2271, 10134, 252...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDates_Validate(t *testing.T) { t.Parallel() tests := []struct { name string input internal.Dates withErr bool }{ { "OK: Start.IsZero", internal.Dates{ Due: time.Now(), }, false, }, { "OK: Due.IsZero", internal.Dates{ Start: time.Now(), }, false, }, { "OK: Start < Due", internal.Dates{ Start: time.Now(), Due: time.Now().Add(2 * time.Hour), }, false, }, { "ERR: Start > Due", internal.Dates{ Start: time.Now().Add(2 * time.Hour), Due: time.Now(), }, true, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() actualErr := tt.input.Validate() if (actualErr != nil) != tt.withErr { t.Fatalf("expected error %t, got %s", tt.withErr, actualErr) } var ierr *internal.Error if tt.withErr && !errors.As(actualErr, &ierr) { t.Fatalf("expected %T error, got %T", ierr, actualErr) } }) } }
explode_data.jsonl/74768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 55238, 62, 17926, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 22427, 256, 5306, 909, 973, 198, 197, 46948, 7747, 1807, 198, 197, 59403, 197, 197, 515...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestJsonSamples(t *testing.T) { bidder, buildErr := Builder(openrtb_ext.BidderPubmatic, config.Adapter{ Endpoint: "https://hbopenbid.pubmatic.com/translator?source=prebid-server"}) if buildErr != nil { t.Fatalf("Builder returned unexpected error %v", buildErr) } adapterstest.RunJSONBidderTest(t, "pubmatictest", bidder) }
explode_data.jsonl/77910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 5014, 39571, 1155, 353, 8840, 836, 8, 341, 2233, 307, 1107, 11, 1936, 7747, 1669, 20626, 30981, 3342, 65, 9927, 1785, 307, 1107, 29162, 37244, 11, 2193, 34190, 515, 197, 197, 27380, 25, 330, 2428, 1110, 49039, 2508, 20648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMissingExternalBuilderName(t *testing.T) { defer viper.Reset() viper.Set("peer.address", "localhost:8080") viper.Set("chaincode.externalBuilders", &[]ExternalBuilder{ { Path: "relative/plugin_dir", }, }) _, err := GlobalConfig() require.EqualError(t, err, "external builder at path relative/plugin_dir has no name attribute") }
explode_data.jsonl/71578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 25080, 25913, 3297, 675, 1155, 353, 8840, 836, 8, 341, 16867, 95132, 36660, 741, 5195, 12858, 4202, 445, 16537, 13792, 497, 330, 8301, 25, 23, 15, 23, 15, 1138, 5195, 12858, 4202, 445, 8819, 1851, 64227, 62306, 497, 609, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPreparePublicShares(t *testing.T) { curve := btcec.S256() pk, sharesMap, err := NewDealerShares(curve, 2, 3, nil) if err != nil { t.Errorf("NewDealerShares failed: %v", err) } if pk == nil { t.Errorf("NewDealerShares public key is nil") } if len(sharesMap) != 3 { t.Errorf("NewDealerShares didn't produce enough sharesMap") } publicShares, err := PreparePublicShares(sharesMap) if err != nil { t.Errorf("PreparePublicShares failed: %v", err) } if len(publicShares) != len(sharesMap) { t.Errorf("len(publicShares) != len(sharesMap): %d != %d", len(publicShares), len(sharesMap)) } for i := range publicShares { require.Equal(t, publicShares[i].Point.X, sharesMap[i].Point.X) require.Equal(t, publicShares[i].Point.Y, sharesMap[i].Point.Y) } }
explode_data.jsonl/73924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 312 }
[ 2830, 3393, 50590, 12676, 73015, 1155, 353, 8840, 836, 8, 341, 33209, 586, 1669, 19592, 68955, 808, 17, 20, 21, 741, 3223, 74, 11, 13248, 2227, 11, 1848, 1669, 1532, 93909, 73015, 17591, 586, 11, 220, 17, 11, 220, 18, 11, 2092, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestEmptyTlsCertHash(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() config := mockfab.NewMockEndpointConfig(mockCtrl) emptyCert := tls.Certificate{} config.EXPECT().TLSClientCerts().Return([]tls.Certificate{emptyCert}) tlsCertHash, err := TLSCertHash(config) assert.NotNil(t, tlsCertHash) assert.Nil(t, err) }
explode_data.jsonl/64484
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 3522, 51, 4730, 36934, 6370, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 741, 25873, 1669, 7860, 36855, 7121, 11571, 27380, 2648, 30389, 15001, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormat(t *testing.T) { type tcase struct { value uint64 output string } cases := []tcase{ {0, "0 B"}, {5, "5 B"}, {20, "20 B"}, {100, "100 B"}, {500, "500 B"}, {999, "999 B"}, {1000, "1.00 kB"}, {1005, "1.00 kB"}, {1006, "1.01 kB"}, {2334, "2.33 kB"}, {2335, "2.34 kB"}, {2995, "3.00 kB"}, {9994, "9.99 kB"}, {9995, "10.0 kB"}, {10000, "10.0 kB"}, {10050, "10.0 kB"}, {10061, "10.1 kB"}, {99949, "99.9 kB"}, {99950, "100 kB"}, {999499, "999 kB"}, {999500, "1.00 MB"}, {1000000, "1.00 MB"}, {952500000, "952 MB"}, {952500001, "953 MB"}, {1000000000, "1.00 GB"}, {2300000000000, "2.30 TB"}, {9700000000000000, "9.70 PB"}, {18400000000000000, "18.4 PB"}, } for _, c := range cases { out := Format(c.value) if out != c.output { t.Errorf("Format(%d): got %q, want %q", c.value, out, c.output) } } }
explode_data.jsonl/32530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 475 }
[ 2830, 3393, 4061, 1155, 353, 8840, 836, 8, 341, 13158, 259, 5638, 2036, 341, 197, 16309, 220, 2622, 21, 19, 198, 197, 21170, 914, 198, 197, 532, 1444, 2264, 1669, 3056, 83, 5638, 515, 197, 197, 90, 15, 11, 330, 15, 425, 7115, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestServerGetOnly(t *testing.T) { h := func(ctx *RequestCtx) { if !ctx.IsGet() { t.Fatalf("non-get request: %q", ctx.Method()) } ctx.Success("foo/bar", []byte("success")) } s := &Server{ Handler: h, GetOnly: true, } rw := &readWriter{} rw.r.WriteString("POST /foo HTTP/1.1\r\nHost: google.com\r\nContent-Length: 5\r\nContent-Type: aaa\r\n\r\n12345") ch := make(chan error) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err == nil { t.Fatalf("expecting error") } if err != errGetOnly { t.Fatalf("Unexpected error from serveConn: %s. Expecting %s", err, errGetOnly) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } br := bufio.NewReader(&rw.w) var resp Response if err := resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } statusCode := resp.StatusCode() if statusCode != StatusBadRequest { t.Fatalf("unexpected status code: %d. Expecting %d", statusCode, StatusBadRequest) } if !resp.ConnectionClose() { t.Fatalf("missing 'Connection: close' response header") } }
explode_data.jsonl/73300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 451 }
[ 2830, 3393, 5475, 1949, 7308, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 2915, 7502, 353, 1900, 23684, 8, 341, 197, 743, 753, 3773, 4506, 1949, 368, 341, 298, 3244, 30762, 445, 6280, 22491, 1681, 25, 1018, 80, 497, 5635, 20798, 2398, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClaimsMarshalUnmarshal(t *testing.T) { tm := time.Now().Add(14 * time.Hour) expr := json.Number(strconv.FormatInt(tm.Unix(), 10)) c := Claims{Issuer: "issuer", Expiration: expr} buf, err := json.Marshal(&c) if err != nil { t.Fatalf("expected no error, got: %v", err) } c0 := Claims{} err = json.Unmarshal(buf, &c0) if err != nil { t.Fatalf("expected no error, got: %v", err) } if expr != c0.Expiration { t.Errorf("expr and c0.Expiration should equal -- %v / %v", expr, c0.Expiration) } if "issuer" != c0.Issuer { t.Errorf("c0.Issuer should be 'issuer'") } c1 := Claims{} if err = json.Unmarshal([]byte(`{ "nbf": [] }`), &c1); err == nil { t.Errorf("expected error, got nil") } }
explode_data.jsonl/76412
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 51133, 55438, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 3244, 76, 1669, 882, 13244, 1005, 2212, 7, 16, 19, 353, 882, 73550, 340, 8122, 649, 1669, 2951, 31182, 4199, 12027, 9978, 1072, 64190, 10616, 941, 1507, 220, 16, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAddShardHostStats(t *testing.T) { expectedHosts := []string{"hostA", "hostB"} hostStatLines := map[string]ShardHostStatLine{} for _, host := range expectedHosts { hostStatLines[host] = ShardHostStatLine{ InUse: 0, Available: 0, Created: 0, Refreshing: 0, } } d := NewMongodbData( &StatLine{ ShardHostStatsLines: hostStatLines, }, map[string]string{}, // Use empty tags, so we don't break existing tests ) var acc testutil.Accumulator d.AddShardHostStats() d.flush(&acc) var hostsFound []string for host := range hostStatLines { for key := range shardHostStats { require.True(t, acc.HasInt64Field("mongodb_shard_stats", key)) } require.True(t, acc.HasTag("mongodb_shard_stats", "hostname")) hostsFound = append(hostsFound, host) } sort.Strings(hostsFound) sort.Strings(expectedHosts) require.Equal(t, hostsFound, expectedHosts) }
explode_data.jsonl/35782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 2212, 2016, 567, 9296, 16635, 1155, 353, 8840, 836, 8, 341, 42400, 9296, 82, 1669, 3056, 917, 4913, 3790, 32, 497, 330, 3790, 33, 16707, 63104, 15878, 16794, 1669, 2415, 14032, 60, 2016, 567, 9296, 15878, 2460, 16094, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4