text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestIDDriverValue(t *testing.T) { id := ID{0x4d, 0x88, 0xe1, 0x5b, 0x60, 0xf4, 0x86, 0xe4, 0x28, 0x41, 0x2d, 0xc9} data, err := id.Value() assert.NoError(t, err) assert.Equal(t, "9m4e2mr0ui3e8a215n4g", data) }
explode_data.jsonl/58926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 915, 11349, 1130, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 3034, 90, 15, 87, 19, 67, 11, 220, 15, 87, 23, 23, 11, 220, 15, 8371, 16, 11, 220, 15, 87, 20, 65, 11, 220, 15, 87, 21, 15, 11, 220, 15, 5848, 19, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocalTemporaryTableReplace(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("create temporary table tmp1 (id int primary key auto_increment, u int unique, v int)") tk.MustExec("insert into tmp1 values(1, 11, 101)") tk.MustExec("insert into tmp1 values(2, 12, 102)") tk.MustExec("insert into tmp1 values(3, 13, 103)") // out of transaction tk.MustExec("replace into tmp1 values(1, 12, 1000)") tk.MustQuery("select * from tmp1").Check(testkit.Rows("1 12 1000", "3 13 103")) tk.MustExec("replace into tmp1 values(4, 14, 104)") tk.MustQuery("select * from tmp1 where id=4").Check(testkit.Rows("4 14 104")) // in transaction and rollback tk.MustExec("begin") tk.MustExec("replace into tmp1 values(1, 13, 999)") tk.MustQuery("select * from tmp1").Check(testkit.Rows("1 13 999", "4 14 104")) tk.MustExec("replace into tmp1 values(5, 15, 105)") tk.MustQuery("select * from tmp1 where id=5").Check(testkit.Rows("5 15 105")) tk.MustExec("rollback") tk.MustQuery("select * from tmp1").Check(testkit.Rows("1 12 1000", "3 13 103", "4 14 104")) // out of transaction tk.MustExec("begin") tk.MustExec("replace into tmp1 values(1, 13, 999)") tk.MustExec("replace into tmp1 values(5, 15, 105)") tk.MustExec("commit") tk.MustQuery("select * from tmp1").Check(testkit.Rows("1 13 999", "4 14 104", "5 15 105")) }
explode_data.jsonl/5725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 525 }
[ 2830, 3393, 7319, 59362, 2556, 23107, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTopicTrie_unsubscribe(t *testing.T) { a := assert.New(t) trie := newTopicTrie() for cid, v := range testUnsubscribe.subTopics { for _, topic := range v { trie.subscribe(cid, topic) } } for cid, v := range testUnsubscribe.unsubscribe { for _, tt := range v { trie.unsubscribe(cid, tt) } } for cid, v := range testUnsubscribe.afterUnsub { for _, tt := range v { matched := trie.getMatchedTopicFilter(tt.topicName) if tt.exist { a.Equal(matched[cid][0].Qos, tt.wantQos) } else { a.Equal(0, len(matched)) } } } }
explode_data.jsonl/72946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 26406, 51, 7231, 4907, 9384, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 197, 8927, 1669, 501, 26406, 51, 7231, 741, 2023, 32141, 11, 348, 1669, 2088, 1273, 1806, 9384, 4309, 45003, 341, 197, 2023, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestGetCoins(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() { t.Skip() } _, err := f.GetCoins(context.Background()) if err != nil { t.Error(err) } }
explode_data.jsonl/15169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 1949, 69602, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 341, 197, 3244, 57776, 741, 197, 532, 197, 6878, 1848, 1669, 282, 2234, 69602, 5378, 19047, 2398, 743, 1848, 96...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestTitleCase(t *testing.T) { input := inputInfo{} output := outputInfo{ Tags: map[string]string{ "All Lowercase Words": "all lowercase words", "All Uppercase Words": "ALL UPPERCASE WORDS", "All Crazy Case Words": "aLl cRaZY cASE WordS", "With Common Preps in a CD Into the Box.": "With common preps in a cd INTO the box.", "Feat and feat. The Machines.": "Feat and Feat. the machines.", "Unicode Apos´trophe": "unicode apos´trophe", "...": "...", ".'?": ".'?", "I'll Be Ill'": "i'll be ill'", "Names Like O'Hara, D’Arcy": "Names like o'hara, d’arcy", "Names Like McDonald and MacNeil": "Names like mcdonald and macneil", "Éléanor": "élÉanor", "XIV LIV Xiv Liv. Liv. Xiv.": "XIV LIV xiv liv. liv. xiv.", "A Start With a Lowercase Constant": "a start with a lowercase constant", `"A Double Quoted Sentence" and 'One Single Quoted'.`: `"a double quoted sentence" and 'one single quoted'.`, `Another "Double Quoted Sentence", and "A Sentence More".`: `another "double quoted sentence", and "a sentence more".`, "Some I.N.I.T.I.A.L.S.": "Some i.n.i.t.i.a.l.s.", "Foo & The Bar": "foo & the bar", }, } buf, err := ioutil.ReadFile(scriptCase) if err != nil { t.Fatal("Script is not readable", err) } // Compile scripts. L := MakeSandbox(nil) SandboxCompileScript(L, "case", string(buf)) if err != nil { t.Fatal("Spurious sandbox", err) } defer L.Close() err = RunScript(L, "case", &input, &output) if err != nil { t.Fatalf("script case: %s", err) } for want, got := range output.Tags { if got != want { t.Errorf(`Got "%v", want "%v"`, got, want) } } }
explode_data.jsonl/68141
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1158 }
[ 2830, 3393, 3851, 4207, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 1946, 1731, 16094, 21170, 1669, 2550, 1731, 515, 197, 10261, 2032, 25, 2415, 14032, 30953, 515, 298, 197, 67049, 27536, 5638, 27630, 788, 3824, 330, 541, 42047, 4244, 75...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFlattenShoot(t *testing.T) { purpose := v1beta1.ShootPurposeEvaluation nodes := "10.250.0.0/19" pods := "100.96.0.0/11" services := "100.64.0.0/13" volumeType := "Standard_LRS" caBundle := "caBundle" policy := "policy" quota := true pdsLimit := int64(4) domain := "domain" authMode := "auth_mode" location := "Pacific/Auckland" hibernationStart := "00 17 * * 1" hibernationEnd := "00 00 * * 1" hibernationEnabled := true d := ResourceShoot().TestResourceData() shoot := corev1beta1.ShootSpec{ CloudProfileName: "az", SecretBindingName: "test-secret", Purpose: &purpose, Networking: corev1beta1.Networking{ Nodes: &nodes, Pods: &pods, Services: &services, Type: "calico", }, Maintenance: &corev1beta1.Maintenance{ AutoUpdate: &corev1beta1.MaintenanceAutoUpdate{ KubernetesVersion: true, MachineImageVersion: true, }, TimeWindow: &corev1beta1.MaintenanceTimeWindow{ Begin: "030000+0000", End: "040000+0000", }, }, Provider: corev1beta1.Provider{ Workers: []corev1beta1.Worker{ corev1beta1.Worker{ MaxSurge: &intstr.IntOrString{ IntVal: 1, }, MaxUnavailable: &intstr.IntOrString{ IntVal: 0, }, Maximum: 2, Minimum: 1, Volume: &corev1beta1.Volume{ Size: "50Gi", Type: &volumeType, }, Name: "cpu-worker", Machine: corev1beta1.Machine{ Image: &corev1beta1.ShootMachineImage{ Name: "coreos", Version: "2303.3.0", }, Type: "Standard_A4_v2", }, Annotations: map[string]string{ "test-key-annotation": "test-value-annotation", }, Labels: map[string]string{ "test-key-label": "test-value-label", }, CABundle: &caBundle, Zones: []string{"foo", "bar"}, Taints: []corev1.Taint{ corev1.Taint{ Key: "key", Value: "value", Effect: corev1.TaintEffectNoExecute, }, }, Kubernetes: &corev1beta1.WorkerKubernetes{ Kubelet: &corev1beta1.KubeletConfig{ PodPIDsLimit: &pdsLimit, CPUCFSQuota: &quota, CPUManagerPolicy: &policy, }, }, }, }, }, Region: "westeurope", Kubernetes: corev1beta1.Kubernetes{ Version: "1.15.4", }, DNS: &corev1beta1.DNS{ Domain: &domain, }, Addons: &corev1beta1.Addons{ KubernetesDashboard: &corev1beta1.KubernetesDashboard{ Addon: corev1beta1.Addon{ Enabled: true, }, AuthenticationMode: &authMode, }, NginxIngress: &corev1beta1.NginxIngress{ Addon: corev1beta1.Addon{ Enabled: true, }, }, }, Hibernation: &corev1beta1.Hibernation{ Enabled: &hibernationEnabled, Schedules: []corev1beta1.HibernationSchedule{ corev1beta1.HibernationSchedule{ Start: &hibernationStart, End: &hibernationEnd, Location: &location, }, }, }, Monitoring: &corev1beta1.Monitoring{ Alerting: &corev1beta1.Alerting{ EmailReceivers: []string{"receiver1", "receiver2"}, }, }, } expected := []interface{}{ map[string]interface{}{ "cloud_profile_name": "az", "secret_binding_name": "test-secret", "purpose": purpose, "region": "westeurope", "networking": []interface{}{ map[string]interface{}{ "nodes": nodes, "pods": pods, "services": services, "type": "calico", }, }, "kubernetes": []interface{}{ map[string]interface{}{ "version": "1.15.4", }, }, "maintenance": []interface{}{ map[string]interface{}{ "auto_update": []interface{}{ map[string]interface{}{ "kubernetes_version": true, "machine_image_version": true, }, }, "time_window": []interface{}{ map[string]interface{}{ "begin": "030000+0000", "end": "040000+0000", }, }, }, }, "provider": []interface{}{ map[string]interface{}{ "worker": []interface{}{ map[string]interface{}{ "name": "cpu-worker", "max_surge": 1, "max_unavailable": 0, "maximum": int32(2), "minimum": int32(1), "volume": []interface{}{ map[string]interface{}{ "size": "50Gi", "type": "Standard_LRS", }, }, "machine": []interface{}{ map[string]interface{}{ "type": "Standard_A4_v2", "image": []interface{}{ map[string]interface{}{ "name": "coreos", "version": "2303.3.0", }, }, }, }, "annotations": map[string]string{ "test-key-annotation": "test-value-annotation", }, "labels": map[string]string{ "test-key-label": "test-value-label", }, "zones": []string{"foo", "bar"}, "cabundle": caBundle, "taints": []interface{}{ map[string]interface{}{ "key": "key", "value": "value", "effect": corev1.TaintEffect("NoExecute"), }, }, "kubernetes": []interface{}{ map[string]interface{}{ "kubelet": []interface{}{ map[string]interface{}{ "pod_pids_limit": int64(4), "cpu_cfs_quota": true, "cpu_manager_policy": "policy", }, }, }, }, }, }, }, }, "dns": []interface{}{ map[string]interface{}{ "domain": domain, }, }, "addons": []interface{}{ map[string]interface{}{ "kubernetes_dashboard": []interface{}{ map[string]interface{}{ "enabled": true, "authentication_mode": authMode, }, }, "nginx_ingress": []interface{}{ map[string]interface{}{ "enabled": true, }, }, }, }, "hibernation": []interface{}{ map[string]interface{}{ "enabled": hibernationEnabled, "schedules": []interface{}{ map[string]interface{}{ "start": hibernationStart, "end": hibernationEnd, "location": location, }, }, }, }, "monitoring": []interface{}{ map[string]interface{}{ "alerting": []interface{}{ map[string]interface{}{ "emailreceivers": []string{"receiver1", "receiver2"}, }, }, }, }, }, } d.Set("spec", expected) out, _ := flatten.FlattenShoot(shoot, d, "") if diff := cmp.Diff(expected, out); diff != "" { t.Fatalf("Error matching output and expected: \n%s", diff) } }
explode_data.jsonl/26238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3433 }
[ 2830, 3393, 3882, 14456, 85195, 1155, 353, 8840, 836, 8, 341, 3223, 29249, 1669, 348, 16, 19127, 16, 10849, 1905, 74033, 82363, 198, 79756, 1669, 330, 16, 15, 13, 17, 20, 15, 13, 15, 13, 15, 14, 16, 24, 698, 3223, 29697, 1669, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVarAfterMain(t *testing.T) { gopClTest(t, ` package main func main() { println(i) } var i int `, `package main import fmt "fmt" func main() { fmt.Println(i) } var i int `) gopClTest(t, ` package main func f(v float64) float64 { return v } func main() { sink = f(100) } var sink float64 `, `package main func f(v float64) float64 { return v } func main() { sink = f(100) } var sink float64 `) }
explode_data.jsonl/73597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 3962, 6025, 6202, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1722, 1887, 271, 2830, 1887, 368, 341, 81168, 1956, 340, 630, 947, 600, 526, 198, 7808, 1565, 1722, 1887, 271, 474, 8879, 330, 12501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDiffGroups(t *testing.T) { l := func(s ...string) []string { return s } for e, engine := range []func(a, b interface{}) lcs.EditScript{ textdiff.DP, textdiff.Myers, } { for i, tc := range []struct { a, b string summary []string }{ {"S\nA\nB\nC\nD\nE", "S\nC", l("2,3d1", "5,6d2")}, {"S\nA\nB\nC\nD\nE", "C", l("1,3d0", "5,6d1")}, {"A\nB\nC\nD\nE", "C", l("1,2d0", "4,5d1")}, {"S\nC", "S\nA\nB\nC\nD\nE", l("1a2,3", "2a5,6")}, {"C", "S\nA\nB\nC\nD\nE", l("0a1,3", "1a5,6")}, {"C", "A\nB\nC\nD\nE", l("0a1,2", "1a4,5")}, {"S\nA\nB\nC\nD\nE", "S\nS\nC", l("2,3c2", "5,6d3")}, {"S\nA\nB\nC\nS", "S\nAA\nBB\nCC\nS", l("2,4c2,4")}, {"S\nAA\nBB\nCC\nS", "S\nA\nB\nC\nS", l("2,4c2,4")}, } { diffs := textdiff.DiffByLinesUsing([]byte(tc.a), []byte(tc.b), engine) ng := diffs.NumGroups() if got, want := ng, len(tc.summary); got != want { t.Errorf("%v.%v: got %v, want %v\n", e, i, got, want) continue } for g := 0; g < ng; g++ { if got, want := diffs.Group(g).Summary(), tc.summary[g]; got != want { t.Errorf("%v.%v: got %v, want %v\n", e, i, got, want) } } } } }
explode_data.jsonl/9740
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 703 }
[ 2830, 3393, 21751, 22173, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 2915, 1141, 2503, 917, 8, 3056, 917, 314, 470, 274, 456, 2023, 384, 11, 4712, 1669, 2088, 3056, 2830, 2877, 11, 293, 3749, 28875, 326, 4837, 35823, 5910, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewTemplate(t *testing.T) { path := "./test.gold" g := &Generator{} tpl := NewTemplate(path, g) if tpl.Path != path || tpl.Generator != g { t.Errorf("The template is invalid.") } }
explode_data.jsonl/74606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 3564, 7275, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 5924, 1944, 92960, 698, 3174, 1669, 609, 12561, 16094, 3244, 500, 1669, 1532, 7275, 5581, 11, 342, 340, 743, 60979, 17474, 961, 1815, 1369, 60979, 1224, 15312, 961, 342,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestReadTerragruntConfigFull(t *testing.T) { t.Parallel() cleanupTerraformFolder(t, TEST_FIXTURE_READ_CONFIG) rootPath := util.JoinPath(TEST_FIXTURE_READ_CONFIG, "full") runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath)) // check the outputs to make sure they are as expected stdout := bytes.Buffer{} stderr := bytes.Buffer{} require.NoError( t, runTerragruntCommand(t, fmt.Sprintf("terragrunt output -no-color -json --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath), &stdout, &stderr), ) outputs := map[string]TerraformOutput{} require.NoError(t, json.Unmarshal([]byte(stdout.String()), &outputs)) // Primitive config attributes assert.Equal(t, outputs["terraform_binary"].Value, "terragrunt") assert.Equal(t, outputs["terraform_version_constraint"].Value, "= 0.12.20") assert.Equal(t, outputs["terragrunt_version_constraint"].Value, "= 0.23.18") assert.Equal(t, outputs["download_dir"].Value, ".terragrunt-cache") assert.Equal(t, outputs["iam_role"].Value, "TerragruntIAMRole") assert.Equal(t, outputs["skip"].Value, "true") assert.Equal(t, outputs["prevent_destroy"].Value, "true") // Simple maps localstgOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["localstg"].Value.(string)), &localstgOut)) assert.Equal(t, localstgOut, map[string]interface{}{"the_answer": float64(42)}) inputsOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["inputs"].Value.(string)), &inputsOut)) assert.Equal(t, inputsOut, map[string]interface{}{"doc": "Emmett Brown"}) // Complex blocks depsOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["dependencies"].Value.(string)), &depsOut)) assert.Equal( t, depsOut, map[string]interface{}{ "paths": []interface{}{"../../fixture"}, }, ) generateOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["generate"].Value.(string)), &generateOut)) assert.Equal( t, generateOut, map[string]interface{}{ "provider": map[string]interface{}{ "path": "provider.tf", "if_exists": "overwrite_terragrunt", "comment_prefix": "# ", "disable_signature": false, "contents": `provider "aws" { region = "us-east-1" } `, }, }, ) remoteStateOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["remote_state"].Value.(string)), &remoteStateOut)) assert.Equal( t, remoteStateOut, map[string]interface{}{ "backend": "local", "disable_init": false, "disable_dependency_optimization": false, "generate": map[string]interface{}{"path": "backend.tf", "if_exists": "overwrite_terragrunt"}, "config": map[string]interface{}{"path": "foo.tfstate"}, }, ) terraformOut := map[string]interface{}{} require.NoError(t, json.Unmarshal([]byte(outputs["terraformtg"].Value.(string)), &terraformOut)) assert.Equal( t, terraformOut, map[string]interface{}{ "source": "./delorean", "include_in_copy": []interface{}{"time_machine.*"}, "extra_arguments": map[string]interface{}{ "var-files": map[string]interface{}{ "name": "var-files", "commands": []interface{}{"apply", "plan"}, "arguments": nil, "required_var_files": []interface{}{"extra.tfvars"}, "optional_var_files": []interface{}{"optional.tfvars"}, "env_vars": map[string]interface{}{ "TF_VAR_custom_var": "I'm set in extra_arguments env_vars", }, }, }, "before_hook": map[string]interface{}{ "before_hook_1": map[string]interface{}{ "name": "before_hook_1", "commands": []interface{}{"apply", "plan"}, "execute": []interface{}{"touch", "before.out"}, "working_dir": nil, "run_on_error": true, }, }, "after_hook": map[string]interface{}{ "after_hook_1": map[string]interface{}{ "name": "after_hook_1", "commands": []interface{}{"apply", "plan"}, "execute": []interface{}{"touch", "after.out"}, "working_dir": nil, "run_on_error": true, }, }, }, ) }
explode_data.jsonl/10156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1903 }
[ 2830, 3393, 4418, 51402, 68305, 3850, 2648, 9432, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 60639, 51, 13886, 627, 13682, 1155, 11, 13602, 42635, 41486, 13117, 12568, 340, 33698, 1820, 1669, 4094, 22363, 1820, 50320, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvvar_Prefixes(t *testing.T) { os.Setenv("APP_DATABASE_HOST", "localhost") os.Setenv("APP_DATABASE_PASSWORD", "password") os.Setenv("VAULT_ADDR", "vault:1337") os.Setenv("MICRO_REGISTRY", "mdns") var prefixtests = []struct { prefixOpts []source.Option expectedKeys []string }{ {[]source.Option{WithPrefix("APP", "MICRO")}, []string{"app", "micro"}}, {[]source.Option{WithPrefix("MICRO"), WithStrippedPrefix("APP")}, []string{"database", "micro"}}, {[]source.Option{WithPrefix("MICRO"), WithStrippedPrefix("APP")}, []string{"database", "micro"}}, } for _, pt := range prefixtests { source := NewSource(pt.prefixOpts...) c, err := source.Read() if err != nil { t.Error(err) } var actual map[string]interface{} if err := json.Unmarshal(c.Data, &actual); err != nil { t.Error(err) } // assert other prefixes ignored if l := len(actual); l != len(pt.expectedKeys) { t.Errorf("expected %v top keys, got %v", len(pt.expectedKeys), l) } for _, k := range pt.expectedKeys { if !containsKey(actual, k) { t.Errorf("expected key %v, not found", k) } } } }
explode_data.jsonl/18089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 14359, 947, 1088, 5060, 288, 1155, 353, 8840, 836, 8, 341, 25078, 4202, 3160, 445, 14707, 45510, 17213, 497, 330, 8301, 1138, 25078, 4202, 3160, 445, 14707, 45510, 23059, 497, 330, 3833, 1138, 25078, 4202, 3160, 445, 12820, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBootTime(t *testing.T) { metadata, err := utils.GetMetadataAttribute("start-time") if err != nil { t.Fatalf("couldn't get start time from metadata") } startTime, err := strconv.Atoi(metadata) if err != nil { t.Fatalf("failed to convet start time %s", metadata) } t.Logf("image boot time is %d", time.Now().Second()-startTime) }
explode_data.jsonl/81395
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 17919, 1462, 1155, 353, 8840, 836, 8, 341, 2109, 7603, 11, 1848, 1669, 12439, 2234, 14610, 3907, 445, 2468, 7246, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 90962, 944, 633, 1191, 882, 504, 11160, 1138, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRBACModelWithDomains(t *testing.T) { e, _ := NewEnforcer("examples/rbac_with_domains_model.conf", "examples/rbac_with_domains_policy.csv") testDomainEnforce(t, e, "alice", "domain1", "data1", "read", true) testDomainEnforce(t, e, "alice", "domain1", "data1", "write", true) testDomainEnforce(t, e, "alice", "domain1", "data2", "read", false) testDomainEnforce(t, e, "alice", "domain1", "data2", "write", false) testDomainEnforce(t, e, "bob", "domain2", "data1", "read", false) testDomainEnforce(t, e, "bob", "domain2", "data1", "write", false) testDomainEnforce(t, e, "bob", "domain2", "data2", "read", true) testDomainEnforce(t, e, "bob", "domain2", "data2", "write", true) }
explode_data.jsonl/57123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 29259, 1706, 1712, 2354, 74713, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 1532, 1702, 82010, 445, 51668, 7382, 55877, 6615, 70199, 5047, 13937, 497, 330, 51668, 7382, 55877, 6615, 70199, 22773, 11219, 5130, 18185, 136...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPublishInvalidSourceIdInPayload(t *testing.T) { payload := buildTestPayload(testSourceIdInvalid, testEventType, testEventTypeVersion, testEventID, testEventTime, testData) body, statusCode := performPublishRequest(t, publishServer.URL, payload) assertExpectedError(t, body, statusCode, http.StatusBadRequest, api.FieldSourceId, api.ErrorTypeValidationViolation) }
explode_data.jsonl/74420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 50145, 7928, 3608, 764, 641, 29683, 1155, 353, 8840, 836, 8, 341, 76272, 1669, 1936, 2271, 29683, 8623, 3608, 764, 7928, 11, 1273, 47906, 11, 1273, 47906, 5637, 11, 1273, 1556, 915, 345, 197, 18185, 1556, 1462, 11, 67348, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImgTest_Add_StreamingPassFail_MatchesExpectations_ZeroExitCode(t *testing.T) { unittest.MediumTest(t) workDir := t.TempDir() setupAuthWithGSUtil(t, workDir) td := testutils.TestDataDir(t) keysFile := filepath.Join(workDir, "keys.json") require.NoError(t, ioutil.WriteFile(keysFile, []byte(`{"os": "Android"}`), 0644)) mh := mockRPCResponses("https://my-instance-gold.skia.org").Positive("pixel-tests", blankDigest).Build() mg := &mocks.GCSUploader{} resultsMatcher := mock.MatchedBy(func(results jsonio.GoldResults) bool { assert.Equal(t, jsonio.GoldResults{ GitHash: "1234567890123456789012345678901234567890", Key: map[string]string{ "os": "Android", }, Results: []jsonio.Result{{ Key: map[string]string{"name": "pixel-tests", "device": "angler", "source_type": "my_corpus"}, Options: map[string]string{"some_option": "is optional", "ext": "png"}, Digest: blankDigest, }}, }, results) return true }) mg.On("UploadJSON", testutils.AnyContext, resultsMatcher, mock.Anything, `skia-gold-my-instance/dm-json-v1/2021/01/23/22/1234567890123456789012345678901234567890/waterfall/dm-1611440480000000019.json`).Return(nil) // Now call imgtest add with the following flags. This is simulating a test uploading a single // result for a test called pixel-tests. The digest has already been triaged positive. ctx, output, exit := testContext(mg, mh, nil, &timeOne) env := imgTest{ gitHash: "1234567890123456789012345678901234567890", corpus: "my_corpus", failureFile: filepath.Join(workDir, "failures.txt"), instanceID: "my-instance", keysFile: keysFile, passFailStep: true, pngDigest: blankDigest, pngFile: filepath.Join(td, "00000000000000000000000000000000.png"), testKeysStrings: []string{"device:angler"}, testName: "pixel-tests", testOptionalKeysStrings: []string{"some_option:is optional"}, workDir: workDir, } runUntilExit(t, func() { env.Add(ctx) }) logs := output.String() exit.AssertWasCalledWithCode(t, 0, logs) mg.AssertExpectations(t) }
explode_data.jsonl/69529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 950 }
[ 2830, 3393, 13033, 2271, 21346, 62, 76509, 12187, 19524, 1245, 9118, 17536, 804, 97672, 15339, 2078, 1155, 353, 8840, 836, 8, 341, 20479, 14267, 1321, 23090, 2271, 1155, 692, 97038, 6184, 1669, 259, 65009, 6184, 741, 84571, 5087, 2354, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResolvePlaceholdersInOutputValues(t *testing.T) { ctx := context.Background() wf := unmarshalWF(outputValuePlaceholders) woc := newWoc(*wf) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) pods, err := listPods(woc) assert.NoError(t, err) assert.True(t, len(pods.Items) > 0, "pod was not created successfully") templateString := pods.Items[0].ObjectMeta.Annotations["workflows.argoproj.io/template"] var template wfv1.Template err = json.Unmarshal([]byte(templateString), &template) assert.NoError(t, err) parameterValue := template.Outputs.Parameters[0].Value assert.NotNil(t, parameterValue) assert.Equal(t, "output-value-placeholders-wf", parameterValue.String()) }
explode_data.jsonl/70985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 56808, 17371, 16662, 641, 5097, 6227, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 6692, 69, 1669, 650, 27121, 32131, 11057, 1130, 17371, 16662, 340, 6692, 509, 1669, 501, 54, 509, 4071, 43083, 340, 6692, 509...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtendScalarType(t *testing.T) { t.Run("extend simple scalar type", func(t *testing.T) { run(extendScalarTypeDefinition, testDefinition, ` scalar Coordinates extend scalar Coordinates @deprecated(reason: "some reason") `, ` scalar Coordinates @deprecated(reason: "some reason") extend scalar Coordinates @deprecated(reason: "some reason") `) }) t.Run("extend scalar type by multiple directives", func(t *testing.T) { run(extendScalarTypeDefinition, testDefinition, ` scalar Coordinates extend scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) `, ` scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) extend scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) `) }) t.Run("extend non-existent scalar", func(t *testing.T) { run(extendScalarTypeDefinition, testDefinition, ` extend scalar Mood extend scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) `, ` extend scalar Mood extend scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) scalar Mood scalar Coordinates @deprecated(reason: "some reason") @skip(if: false) `) }) }
explode_data.jsonl/51978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 489 }
[ 2830, 3393, 72136, 20639, 929, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 25048, 4285, 17274, 943, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 56742, 7, 25048, 20639, 929, 10398, 11, 1273, 10398, 11, 22074, 464, 1903, 59153, 62...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDroppedMessagesOnSendToSub(t *testing.T) { opts := GetDefaultOptions() opts.MaxMsgs = 3 s := runServerWithOpts(t, opts, nil) defer s.Shutdown() sc := NewDefaultConnection(t) defer sc.Close() // Produce 1 message if err := sc.Publish("foo", []byte("hello")); err != nil { t.Fatalf("Unexpected error on publish: %v", err) } // Start a durable, it should receive the message ch := make(chan bool) if _, err := sc.Subscribe("foo", func(_ *stan.Msg) { ch <- true }, stan.DurableName("dur"), stan.DeliverAllAvailable()); err != nil { t.Fatalf("Unexpected error on subscribe: %v", err) } // Wait for message if err := Wait(ch); err != nil { t.Fatal("Did not get our message") } // Close connection sc.Close() // Recreate a connection sc = NewDefaultConnection(t) defer sc.Close() // Send messages 2, 3, 4 and 5. Messages 1 and 2 should be dropped. for i := 2; i <= 5; i++ { if err := sc.Publish("foo", []byte("hello")); err != nil { t.Fatalf("Unexpected error on publish: %v", err) } } // Start the durable, it should receive messages 3, 4 and 5 expectedSeq := uint64(3) good := 0 cb := func(m *stan.Msg) { if m.Sequence == expectedSeq { good++ if good == 3 { ch <- true } } expectedSeq++ } if _, err := sc.Subscribe("foo", cb, stan.DurableName("dur"), stan.DeliverAllAvailable()); err != nil { t.Fatalf("Unexpected error on subscribe: %v", err) } // Wait for messages: if err := Wait(ch); err != nil { t.Fatal("Did not get our messages") } }
explode_data.jsonl/23094
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 589 }
[ 2830, 3393, 35, 41716, 15820, 1925, 11505, 1249, 3136, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 2126, 3675, 3798, 741, 64734, 14535, 6611, 82, 284, 220, 18, 198, 1903, 1669, 1598, 5475, 2354, 43451, 1155, 11, 12185, 11, 2092, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test64BitErrorChecking(t *testing.T) { defer func() { if err := recover(); err != nil { t.Fatal("panic due to 0xFFFFFFFF != -1 " + "when int is 64 bits") } }() db := openTestConn(t) defer db.Close() r, err := db.Query(`SELECT * FROM (VALUES (0::integer, NULL::text), (1, 'test string')) AS t;`) if err != nil { t.Fatal(err) } defer r.Close() for r.Next() { } }
explode_data.jsonl/63443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 21, 19, 8344, 1454, 40129, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 1848, 1669, 11731, 2129, 1848, 961, 2092, 341, 298, 3244, 26133, 445, 19079, 4152, 311, 220, 15, 22620, 961, 481, 16, 330, 3610, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClusterAdminListPartitionReassignmentsWithDiffVersion(t *testing.T) { seedBroker := NewMockBroker(t, 1) defer seedBroker.Close() secondBroker := NewMockBroker(t, 2) defer secondBroker.Close() seedBroker.SetHandlerByMap(map[string]MockResponse{ "MetadataRequest": NewMockMetadataResponse(t). SetController(secondBroker.BrokerID()). SetBroker(seedBroker.Addr(), seedBroker.BrokerID()). SetBroker(secondBroker.Addr(), secondBroker.BrokerID()), }) secondBroker.SetHandlerByMap(map[string]MockResponse{ "ListPartitionReassignmentsRequest": NewMockListPartitionReassignmentsResponse(t), }) config := NewTestConfig() config.Version = V2_3_0_0 admin, err := NewClusterAdmin([]string{seedBroker.Addr()}, config) if err != nil { t.Fatal(err) } partitions := make([]int32, 0) _, err = admin.ListPartitionReassignments("my_topic", partitions) if !strings.ContainsAny(err.Error(), ErrUnsupportedVersion.Error()) { t.Fatal(err) } err = admin.Close() if err != nil { t.Fatal(err) } }
explode_data.jsonl/40789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 28678, 7210, 852, 49978, 693, 96310, 2354, 21751, 5637, 1155, 353, 8840, 836, 8, 341, 197, 22602, 65545, 1669, 1532, 11571, 65545, 1155, 11, 220, 16, 340, 16867, 10320, 65545, 10421, 2822, 197, 5569, 65545, 1669, 1532, 11571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTrigger_SkipEvent(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() mockUsers := mock.NewMockUserStore(controller) mockUsers.EXPECT().Find(noContext, dummyRepo.UserID).Return(dummyUser, nil) mockConfigService := mock.NewMockConfigService(controller) mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYamlSkipEvent, nil) mockConvertService := mock.NewMockConvertService(controller) mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYamlSkipEvent, nil) mockValidateService := mock.NewMockValidateService(controller) mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) triggerer := New( nil, mockConfigService, mockConvertService, nil, nil, nil, nil, nil, mockUsers, mockValidateService, nil, ) _, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) if err != nil { t.Errorf("Expect build silently skipped if event does not match") } }
explode_data.jsonl/26997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 17939, 1098, 13389, 1556, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 77333, 7137, 1669, 7860, 7121, 11571, 1474, 6093, 40845, 340, 77333, 7137, 22402, 7285,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIPToUInt32(t *testing.T) { type args struct { ip net.IP } tests := []struct { name string args args want uint32 }{ {name: "", args: args{ip: net.IPv4(1, 2, 3, 4)}, want: 67305985}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := IPToUInt32(tt.args.ip); got != tt.want { t.Errorf("IPToUInt32() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/13148
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 3298, 1249, 18777, 18, 17, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 46531, 4179, 46917, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 2622, 18, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMysql56VersionMatch(t *testing.T) { table := map[string]bool{ "10.0.13-MariaDB-1~precise-log": false, "5.1.63-google-log": false, "5.6.24-log": true, } for input, want := range table { if got := (&mysql56{}).VersionMatch(input); got != want { t.Errorf("(&mysql56{}).VersionMatch(%#v) = %v, want %v", input, got, want) } } }
explode_data.jsonl/11238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 44, 14869, 20, 21, 5637, 8331, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 2415, 14032, 96436, 515, 197, 197, 1, 16, 15, 13, 15, 13, 16, 18, 5251, 10432, 3506, 12, 16, 93, 10645, 1064, 46332, 788, 895, 345, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReverse(t *testing.T) { tests := []struct { sep string want string }{ {sep: ""}, {sep: "h"}, {sep: "ab"}, {sep: "abc"}, {sep: "abcdef"}, } for _, test := range tests { var want []string for _, s := range test.sep { want = append(want, string(s)) } reverse(want) test.want = strings.Join(want, "->") l := NewLinkList() for _, str := range test.sep { l.InsertTail(string(str)) } l.Reverse() require.Equal(t, test.want, l.Print()) } }
explode_data.jsonl/6119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 45695, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 28036, 220, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 90, 28036, 25, 77496, 197, 197, 90, 28036, 25, 330, 71, 7115, 197, 197, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestProcessProposalBadDial(t *testing.T) { _, err := testProcessProposal(t, "grpc://"+testAddress) if err == nil { t.Fatal("Process proposal should have failed") } }
explode_data.jsonl/45091
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 7423, 98637, 17082, 35, 530, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 1273, 7423, 98637, 1155, 11, 330, 56585, 1110, 5572, 1944, 4286, 340, 743, 1848, 621, 2092, 341, 197, 3244, 26133, 445, 7423, 13734, 1265, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCustomBuildDisablePush(t *testing.T) { f := newIBDFixture(t, k8s.EnvKIND6) defer f.TearDown() sha := digest.Digest("sha256:11cd0eb38bc3ceb958ffb2f9bd70be3fb317ce7d255c8a4c3f4af30e298aa1aab") f.docker.Images["gcr.io/some-project-162817/sancho:tilt-build"] = types.ImageInspect{ID: string(sha)} manifest := NewSanchoCustomBuildManifestWithPushDisabled(f) _, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(manifest), store.BuildStateSet{}) assert.NoError(t, err) // We didn't try to build or push an image, but we did try to tag it assert.Equal(t, 0, f.docker.BuildCount) assert.Equal(t, 1, f.docker.TagCount) assert.Equal(t, 0, f.kl.loadCount) assert.Equal(t, 0, f.docker.PushCount) }
explode_data.jsonl/38266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 10268, 11066, 25479, 16644, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 3256, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 42, 5245, 21, 340, 16867, 282, 836, 682, 4454, 741, 197, 15247, 1669, 20882, 909, 15153, 445, 1524...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserClone(t *testing.T) { user := &User{ Username: "foo", Password: "bar", Permissions: &Permissions{ Publish: &SubjectPermission{ Allow: []string{"foo"}, }, Subscribe: &SubjectPermission{ Allow: []string{"bar"}, }, }, } clone := user.clone() if !reflect.DeepEqual(user, clone) { t.Fatalf("Cloned Users are incorrect.\nexpected: %+v\ngot: %+v", user, clone) } clone.Permissions.Subscribe.Allow = []string{"baz"} if reflect.DeepEqual(user, clone) { t.Fatal("Expected Users to be different") } }
explode_data.jsonl/75599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 1474, 37677, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 609, 1474, 515, 197, 197, 11115, 25, 330, 7975, 756, 197, 197, 4876, 25, 330, 2257, 756, 197, 197, 23851, 25, 609, 23851, 515, 298, 10025, 2538, 25, 609, 13019, 149...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPostageHeaderError(t *testing.T) { var ( mockStorer = mock.NewStorer() mockStatestore = statestore.NewStateStore() logger = logging.New(ioutil.Discard, 5) mp = mockpost.New(mockpost.WithIssuer(postage.NewStampIssuer("", "", batchOk, 11, 10))) client, _, _ = newTestServer(t, testServerOptions{ Storer: mockStorer, Tags: tags.NewTags(mockStatestore, logger), Logger: logger, Post: mp, }) endpoints = []string{ "bytes", "bzz", "chunks", } ) content := []byte{7: 0} // 8 zeros for _, endpoint := range endpoints { t.Run(endpoint+": empty batch", func(t *testing.T) { hexbatch := hex.EncodeToString(batchEmpty) expCode := http.StatusBadRequest jsonhttptest.Request(t, client, http.MethodPost, "/"+endpoint, expCode, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, hexbatch), jsonhttptest.WithRequestHeader(api.ContentTypeHeader, "application/octet-stream"), jsonhttptest.WithRequestBody(bytes.NewReader(content)), ) }) t.Run(endpoint+": ok batch", func(t *testing.T) { hexbatch := hex.EncodeToString(batchOk) expCode := http.StatusCreated jsonhttptest.Request(t, client, http.MethodPost, "/"+endpoint, expCode, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, hexbatch), jsonhttptest.WithRequestHeader(api.ContentTypeHeader, "application/octet-stream"), jsonhttptest.WithRequestBody(bytes.NewReader(content)), ) }) t.Run(endpoint+": bad batch", func(t *testing.T) { hexbatch := hex.EncodeToString(batchInvalid) expCode := http.StatusBadRequest jsonhttptest.Request(t, client, http.MethodPost, "/"+endpoint, expCode, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, hexbatch), jsonhttptest.WithRequestHeader(api.ContentTypeHeader, "application/octet-stream"), jsonhttptest.WithRequestBody(bytes.NewReader(content)), ) }) } }
explode_data.jsonl/28017
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 770 }
[ 2830, 3393, 4133, 424, 4047, 1454, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 77333, 623, 14827, 257, 284, 7860, 7121, 623, 14827, 741, 197, 77333, 1397, 4314, 284, 1584, 4314, 7121, 1397, 6093, 741, 197, 17060, 260, 284, 8392, 71...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiLineBody(t *testing.T) { input1 := ` x = 1 y = 2 z = [ i | [x,y] = arr arr[_] = i] ` body1, err := ParseBody(input1) if err != nil { t.Fatalf("Unexpected parse error on enclosed body: %v", err) } expected1 := MustParseBody(`x = 1; y = 2; z = [i | [x,y] = arr; arr[_] = i]`) if !body1.Equal(expected1) { t.Errorf("Expected enclosed body to equal %v but got: %v", expected1, body1) } // Check that parser can handle multiple expressions w/o enclosing braces. input2 := ` x = 1 ; # comment after semicolon y = 2 # comment without semicolon z = [ i | [x,y] = arr # comment in comprehension arr[_] = i] ` body2, err := ParseBody(input2) if err != nil { t.Fatalf("Unexpected parse error on enclosed body: %v", err) } if !body2.Equal(expected1) { t.Errorf("Expected unenclosed body to equal %v but got: %v", expected1, body1) } assertParseOneBody(t, "whitespace following call", "f(x)\t\n [1]", NewBody( NewExpr( []*Term{ RefTerm(VarTerm("f")), VarTerm("x"), }, ), NewExpr( ArrayTerm(IntNumberTerm(1)), ), )) assertParseOneBody(t, "whitespace following array", "[1]\t\n [2]", NewBody( NewExpr( ArrayTerm(IntNumberTerm(1)), ), NewExpr( ArrayTerm(IntNumberTerm(2)), ), )) assertParseOneBody(t, "whitespace following set", "{1}\t\n {2}", NewBody( NewExpr( SetTerm(IntNumberTerm(1)), ), NewExpr( SetTerm(IntNumberTerm(2)), ), )) }
explode_data.jsonl/50479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 642 }
[ 2830, 3393, 20358, 2460, 5444, 1155, 353, 8840, 836, 8, 1476, 22427, 16, 1669, 22074, 197, 10225, 284, 220, 16, 198, 197, 14522, 284, 220, 17, 198, 197, 20832, 284, 508, 600, 760, 508, 87, 7358, 60, 284, 2890, 198, 24178, 2890, 1349...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFetchAutoAsgsVmss(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() vmssName := "test-vmss" vmssTag := "fake-tag" vmssTagValue := "fake-value" minString := "1" minVal := 1 maxString := "5" maxVal := 5 ngdo := cloudprovider.NodeGroupDiscoveryOptions{ NodeGroupAutoDiscoverySpecs: []string{fmt.Sprintf("label:%s=%s", vmssTag, vmssTagValue)}, } expectedScaleSets := []compute.VirtualMachineScaleSet{fakeVMSSWithTags(vmssName, map[string]*string{vmssTag: &vmssTagValue, "min": &minString, "max": &maxString})} expectedVMSSVMs := newTestVMSSVMList() manager := newTestAzureManager(t) mockVMSSClient := mockvmssclient.NewMockInterface(ctrl) mockVMSSClient.EXPECT().List(gomock.Any(), manager.config.ResourceGroup).Return(expectedScaleSets, nil).AnyTimes() manager.azClient.virtualMachineScaleSetsClient = mockVMSSClient mockVMSSVMClient := mockvmssvmclient.NewMockInterface(ctrl) mockVMSSVMClient.EXPECT().List(gomock.Any(), manager.config.ResourceGroup, vmssName, gomock.Any()).Return(expectedVMSSVMs, nil).AnyTimes() manager.azClient.virtualMachineScaleSetVMsClient = mockVMSSVMClient specs, err := parseLabelAutoDiscoverySpecs(ngdo) assert.NoError(t, err) manager.asgAutoDiscoverySpecs = specs // assert cache is empty before fetching auto asgs asgs := manager.asgCache.get() assert.Equal(t, 0, len(asgs)) manager.fetchAutoAsgs() asgs = manager.asgCache.get() assert.Equal(t, 1, len(asgs)) assert.Equal(t, vmssName, asgs[0].Id()) assert.Equal(t, minVal, asgs[0].MinSize()) assert.Equal(t, maxVal, asgs[0].MaxSize()) // test explicitlyConfigured manager.explicitlyConfigured[vmssName] = true manager.fetchAutoAsgs() asgs = manager.asgCache.get() assert.Equal(t, 1, len(asgs)) }
explode_data.jsonl/12797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 659 }
[ 2830, 3393, 20714, 13253, 32, 1991, 82, 88124, 778, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 54879, 778, 675, 1669, 330, 1944, 12, 7338, 778, 698, 54879, 778, 56...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintln(t *testing.T) { t.Log("Testing Println()") // Println will ALWAYS add a space between the label prefix and the // value printed. This needs correction now. testprintlntable := []struct { setlevel int level int inputtext string outputtext string }{ {setlevel: 2, level: 0, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 2, level: 1, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 2, level: 2, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 2, level: 3, inputtext: "Hello", outputtext: ""}, {setlevel: 2, level: 4, inputtext: "Hello", outputtext: ""}, {setlevel: 2, level: 5, inputtext: "Hello", outputtext: ""}, {setlevel: 2, level: -1, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: 0, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 0, level: 1, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: 2, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: 3, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: 4, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: 5, inputtext: "Hello", outputtext: ""}, {setlevel: 0, level: -1, inputtext: "Hello", outputtext: ""}, {setlevel: 4, level: 0, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 4, level: 1, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 4, level: 2, inputtext: "Hello", outputtext: "Hello\n"}, {setlevel: 4, level: 3, inputtext: "Hello", outputtext: "[V] Hello\n"}, {setlevel: 4, level: 4, inputtext: "Hello", outputtext: "[D] Hello\n"}, {setlevel: 4, level: 5, inputtext: "Hello", outputtext: ""}, {setlevel: 4, level: -1, inputtext: "Hello", outputtext: ""}, } tl := &testlogger{} kuttilog.SetLogger(tl) for _, testrow := range testprintlntable { tl.logstring = "" kuttilog.Setloglevel(testrow.setlevel) kuttilog.Println(testrow.level, testrow.inputtext) if tl.logstring != testrow.outputtext { t.Errorf( "\n Set Level: %v\n Level: %v\n Input: %v\n Expected output: %#v\n Received output: %#vEND", testrow.setlevel, testrow.level, testrow.inputtext, testrow.outputtext, tl.logstring, ) t.Fail() } } kuttilog.ResetLogger() }
explode_data.jsonl/60139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 885 }
[ 2830, 3393, 8994, 2261, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 16451, 8213, 2261, 368, 5130, 197, 322, 8213, 2261, 686, 67414, 912, 264, 3550, 1948, 279, 2383, 9252, 323, 279, 198, 197, 322, 897, 16709, 13, 1096, 3880, 26262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInstanceCredentialUserInfo(t *testing.T) { // Enable the OriginatingIdentity feature prevOrigIDEnablement := sctestutil.EnableOriginatingIdentity(t, true) defer utilfeature.DefaultMutableFeatureGate.Set(fmt.Sprintf("%v=%v", scfeatures.OriginatingIdentity, prevOrigIDEnablement)) creatorUserName := "creator" createdInstanceCredential := getTestInstanceCredential() createContext := sctestutil.ContextWithUserName(creatorUserName) bindingRESTStrategies.PrepareForCreate(createContext, createdInstanceCredential) if e, a := creatorUserName, createdInstanceCredential.Spec.UserInfo.Username; e != a { t.Errorf("unexpected user info in created spec: expected %q, got %q", e, a) } // TODO: Un-comment the following portion of this test when there is a field // in the spec to which the reconciler allows a change. // updaterUserName := "updater" // updatedInstanceCredential := getTestInstanceCredential() // updateContext := sctestutil.ContextWithUserName(updaterUserName) // bindingRESTStrategies.PrepareForUpdate(updateContext, updatedInstanceCredential, createdInstanceCredential) // if e, a := updaterUserName, updatedInstanceCredential.Spec.UserInfo.Username; e != a { // t.Errorf("unexpected user info in updated spec: expected %q, got %q", e, a) // } deleterUserName := "deleter" deletedInstanceCredential := getTestInstanceCredential() deleteContext := sctestutil.ContextWithUserName(deleterUserName) bindingRESTStrategies.CheckGracefulDelete(deleteContext, deletedInstanceCredential, nil) if e, a := deleterUserName, deletedInstanceCredential.Spec.UserInfo.Username; e != a { t.Errorf("unexpected user info in deleted spec: expected %q, got %q", e, a) } }
explode_data.jsonl/80701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 542 }
[ 2830, 3393, 2523, 48265, 36158, 1155, 353, 8840, 836, 8, 341, 197, 322, 18567, 279, 17116, 1095, 18558, 4565, 198, 50728, 62726, 915, 11084, 478, 1669, 274, 67880, 1314, 32287, 13298, 1095, 18558, 1155, 11, 830, 340, 16867, 4094, 12753, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTaskResize(t *testing.T) { t.Parallel() client, err := newClient(t, address) if err != nil { t.Fatal(err) } defer client.Close() var ( image Image ctx, cancel = testContext() id = t.Name() ) defer cancel() image, err = client.GetImage(ctx, testImage) if err != nil { t.Fatal(err) } container, err := client.NewContainer(ctx, id, WithNewSnapshot(id, image), WithNewSpec(oci.WithImageConfig(image), withExitStatus(7))) if err != nil { t.Fatal(err) } defer container.Delete(ctx, WithSnapshotCleanup) task, err := container.NewTask(ctx, empty()) if err != nil { t.Fatal(err) } defer task.Delete(ctx) statusC, err := task.Wait(ctx) if err != nil { t.Fatal(err) } if err := task.Resize(ctx, 32, 32); err != nil { t.Fatal(err) } task.Kill(ctx, syscall.SIGKILL) <-statusC }
explode_data.jsonl/39646
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 6262, 30561, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25291, 11, 1848, 1669, 501, 2959, 1155, 11, 2621, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867, 2943, 10421, 2822, 2405, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestCallFunctionWithExplicitThis(t *testing.T) { t.Parallel() ctx := NewIsolate().NewContext() this, _ := ctx.Eval(`(function(){ this.z = 3; return this; })()`, "") add, _ := ctx.Eval(`((x,y)=>(x+y+this.z))`, "") one, _ := ctx.Eval(`1`, "") two, _ := ctx.Eval(`2`, "") res, err := add.Call(this, one, two) if err != nil { t.Fatal(err) } else if num := res.Int64(); num != 6 { t.Errorf("Expected 6, got %v (%v)", num, res) } }
explode_data.jsonl/81558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 7220, 5152, 2354, 98923, 1986, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1532, 3872, 33066, 1005, 3564, 1972, 741, 2046, 11, 716, 1669, 5635, 5142, 831, 5809, 7, 1688, 11895, 419, 3938, 284, 220, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDefaultValuesNotAllowedWithSlice(t *testing.T) { var args struct { A []int `default:"123"` // required not allowed with default! } err := parse("", &args) assert.EqualError(t, err, ".A: default values are not supported for slice fields") }
explode_data.jsonl/13083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 3675, 6227, 97634, 2354, 33236, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 22985, 3056, 396, 1565, 2258, 2974, 16, 17, 18, 39917, 442, 2567, 537, 5420, 448, 1638, 4894, 197, 630, 9859, 1669, 4715, 19814, 609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeSystem_InputObjectFieldsMustHaveInputTypes_RejectsAnEmptyInputFieldType(t *testing.T) { _, err := schemaWithInputFieldOfType(nil) expectedError := `BadInputObject.badField field type must be Input Type but got: <nil>.` if err == nil || err.Error() != expectedError { t.Fatalf("Expected error: %v, got %v", expectedError, err) } }
explode_data.jsonl/79187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 929, 2320, 48653, 1190, 8941, 31776, 12116, 2505, 4173, 50693, 583, 82, 2082, 3522, 2505, 63733, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10802, 2354, 2505, 1877, 34696, 27907, 340, 42400, 1454, 1669, 1565, 17082, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRemoveWhileRunning(t *testing.T) { wg := &sync.WaitGroup{} wg.Add(1) cron := newWithSeconds() cron.Start() defer cron.Stop() id, _ := cron.AddFunc("* * * * * ?", func() { wg.Done() }) cron.Remove(id) select { case <-time.After(OneSecond): case <-wait(wg): t.FailNow() } }
explode_data.jsonl/8300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 13021, 7983, 18990, 1155, 353, 8840, 836, 8, 341, 72079, 1669, 609, 12996, 28384, 2808, 16094, 72079, 1904, 7, 16, 692, 1444, 2248, 1669, 501, 2354, 15343, 741, 1444, 2248, 12101, 741, 16867, 46582, 30213, 741, 15710, 11, 71...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientMethodWithComplexBody(t *testing.T) { Convey("TestClientMethodWithComplexBody", t, func(c C) { targetDir, err := ioutil.TempDir("", "") c.So(err, ShouldBeNil) apiDef := new(raml.APIDefinition) err = raml.ParseFile("../fixtures/body.raml", apiDef) c.So(err, ShouldBeNil) client := NewClient(apiDef, clientNameRequests, true) err = client.Generate(targetDir) c.So(err, ShouldBeNil) rootFixture := "./fixtures/method/client/complex_body/requests_unmarshall" files := []string{ "arrays_service.py", } for _, f := range files { s, err := utils.TestLoadFile(filepath.Join(targetDir, f)) c.So(err, ShouldBeNil) tmpl, err := utils.TestLoadFile(filepath.Join(rootFixture, f)) c.So(err, ShouldBeNil) c.So(s, ShouldEqual, tmpl) } c.Reset(func() { os.RemoveAll(targetDir) }) }) }
explode_data.jsonl/19337
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 2959, 3523, 2354, 31137, 5444, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 2959, 3523, 2354, 31137, 5444, 497, 259, 11, 2915, 1337, 356, 8, 341, 197, 28861, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 14676, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChangeOrgUserStatus(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) testSuccess := func(orgID, userID int64, public bool) { assert.NoError(t, ChangeOrgUserStatus(orgID, userID, public)) orgUser := AssertExistsAndLoadBean(t, &OrgUser{OrgID: orgID, UID: userID}).(*OrgUser) assert.Equal(t, public, orgUser.IsPublic) } testSuccess(3, 2, false) testSuccess(3, 2, false) testSuccess(3, 4, true) assert.NoError(t, ChangeOrgUserStatus(NonexistentID, NonexistentID, true)) }
explode_data.jsonl/71069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 4072, 42437, 1474, 2522, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 12367, 18185, 7188, 1669, 2915, 36246, 915, 11, 35204, 526, 21, 19, 11, 584, 1807, 8, 341, 197, 6948, 35699, 1155, 11, 10388,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndexOfImageSignature(t *testing.T) { for _, tc := range []struct { name string signatures []ImageSignature matchType string matchContent []byte expectedIndex int }{ { name: "empty", matchType: ImageSignatureTypeAtomicImageV1, matchContent: []byte("blob"), expectedIndex: -1, }, { name: "not present", signatures: []ImageSignature{ { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("binary"), }, { Type: "custom", Content: []byte("blob"), }, }, matchType: ImageSignatureTypeAtomicImageV1, matchContent: []byte("blob"), expectedIndex: -1, }, { name: "first and only", signatures: []ImageSignature{ { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("binary"), }, }, matchType: ImageSignatureTypeAtomicImageV1, matchContent: []byte("binary"), expectedIndex: 0, }, { name: "last", signatures: []ImageSignature{ { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("binary"), }, { Type: "custom", Content: []byte("blob"), }, { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("blob"), }, }, matchType: ImageSignatureTypeAtomicImageV1, matchContent: []byte("blob"), expectedIndex: 2, }, { name: "many matches", signatures: []ImageSignature{ { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("blob2"), }, { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("blob"), }, { Type: "custom", Content: []byte("blob"), }, { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("blob"), }, { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("blob"), }, { Type: ImageSignatureTypeAtomicImageV1, Content: []byte("binary"), }, }, matchType: ImageSignatureTypeAtomicImageV1, matchContent: []byte("blob"), expectedIndex: 1, }, } { im := Image{ Signatures: make([]ImageSignature, len(tc.signatures)), } for i, signature := range tc.signatures { signature.Name = fmt.Sprintf("%s:%s", signature.Type, signature.Content) im.Signatures[i] = signature } matchName := fmt.Sprintf("%s:%s", tc.matchType, tc.matchContent) index := IndexOfImageSignatureByName(im.Signatures, matchName) if index != tc.expectedIndex { t.Errorf("[%s] got unexpected index: %d != %d", tc.name, index, tc.expectedIndex) } index = IndexOfImageSignature(im.Signatures, tc.matchType, tc.matchContent) if index != tc.expectedIndex { t.Errorf("[%s] got unexpected index: %d != %d", tc.name, index, tc.expectedIndex) } } }
explode_data.jsonl/40838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1278 }
[ 2830, 3393, 27376, 1906, 25088, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 69054, 2789, 262, 3056, 1906, 25088, 198, 197, 47706, 929, 257, 914, 198, 197, 47706, 2762, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCollElgNotifier(t *testing.T) { mockDeployedChaincodeInfoProvider := &mock.DeployedChaincodeInfoProvider{} mockDeployedChaincodeInfoProvider.UpdatedChaincodesReturns([]*ledger.ChaincodeLifecycleInfo{ {Name: "cc1"}, }, nil) // Returns 3 collections - the bool value indicates the eligibility of peer for corresponding collection mockDeployedChaincodeInfoProvider.ChaincodeInfoReturnsOnCall(0, &ledger.DeployedChaincodeInfo{ ExplicitCollectionConfigPkg: testutilPrepapreMockCollectionConfigPkg( map[string]bool{"coll1": true, "coll2": true, "coll3": false})}, nil) // post commit - returns 4 collections mockDeployedChaincodeInfoProvider.ChaincodeInfoReturnsOnCall(1, &ledger.DeployedChaincodeInfo{ ExplicitCollectionConfigPkg: testutilPrepapreMockCollectionConfigPkg( map[string]bool{"coll1": false, "coll2": true, "coll3": true, "coll4": true})}, nil) mockMembershipInfoProvider := &mock.MembershipInfoProvider{} mockMembershipInfoProvider.AmMemberOfStub = func(channel string, p *peer.CollectionPolicyConfig) (bool, error) { return testutilIsEligibleForMockPolicy(p), nil } mockCollElgListener := &mockCollElgListener{} collElgNotifier := &collElgNotifier{ mockDeployedChaincodeInfoProvider, mockMembershipInfoProvider, make(map[string]collElgListener), } collElgNotifier.registerListener("testLedger", mockCollElgListener) collElgNotifier.HandleStateUpdates(&ledger.StateUpdateTrigger{ LedgerID: "testLedger", CommittingBlockNum: uint64(500), StateUpdates: map[string]*ledger.KVStateUpdates{ "doesNotMatterNS": { PublicUpdates: []*kvrwset.KVWrite{ { Key: "doesNotMatterKey", Value: []byte("doesNotMatterVal"), }, }, }, }, }) // event triggered should only contain "coll3" as this is the only collection // for which peer became from ineligile to eligible by upgrade tx assert.Equal(t, uint64(500), mockCollElgListener.receivedCommittingBlk) assert.Equal(t, map[string][]string{ "cc1": {"coll3"}, }, mockCollElgListener.receivedNsCollMap, ) }
explode_data.jsonl/33482
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 747 }
[ 2830, 3393, 15265, 6582, 70, 64729, 1155, 353, 8840, 836, 8, 341, 77333, 69464, 291, 18837, 1851, 1731, 5179, 1669, 609, 16712, 34848, 1989, 291, 18837, 1851, 1731, 5179, 16094, 77333, 69464, 291, 18837, 1851, 1731, 5179, 13, 16196, 18837...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGrid(t *testing.T) { // Construct the following grid, taken from https://github.com/git/git/blob/master/t/t6600-test-reach.sh // (10,10) // / \ // (10,9) (9,10) // / \ / \ // (10,8) (9,9) (8,10) // / \ / \ / \ // ( continued...) // \ / \ / \ / // (3,1) (2,2) (1,3) // \ / \ / // (2,1) (2,1) // \ / // (1,1) grid := make([][]*graveler.Commit, 10) kv := make(map[graveler.CommitID]*graveler.Commit) for i := 0; i < 10; i++ { grid[i] = make([]*graveler.Commit, 10) for j := 0; j < 10; j++ { parents := make([]graveler.CommitID, 0, 2) if i > 0 { parents = append(parents, graveler.CommitID(fmt.Sprintf("%d-%d", i-1, j))) } if j > 0 { parents = append(parents, graveler.CommitID(fmt.Sprintf("%d-%d", i, j-1))) } grid[i][j] = &graveler.Commit{Message: fmt.Sprintf("%d-%d", i, j), Parents: parents} kv[graveler.CommitID(fmt.Sprintf("%d-%d", i, j))] = grid[i][j] } } getter := newReader(kv) c, err := ref.FindMergeBase(context.Background(), getter, "", "7-4", "5-6") testutil.Must(t, err) verifyResult(t, c, []string{"5-4"}) c, err = ref.FindMergeBase(context.Background(), getter, "", "1-2", "2-1") testutil.Must(t, err) verifyResult(t, c, []string{"1-1"}) c, err = ref.FindMergeBase(context.Background(), getter, "", "0-9", "9-0") testutil.Must(t, err) verifyResult(t, c, []string{"0-0"}) c, err = ref.FindMergeBase(context.Background(), getter, "", "6-9", "9-6") testutil.Must(t, err) verifyResult(t, c, []string{"6-6"}) }
explode_data.jsonl/63462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 917 }
[ 2830, 3393, 3543, 1155, 353, 8840, 836, 8, 341, 197, 322, 18678, 279, 2701, 5827, 11, 4429, 504, 3703, 1110, 5204, 905, 60590, 60590, 34827, 23303, 5523, 5523, 21, 21, 15, 15, 16839, 5504, 610, 2395, 198, 197, 322, 1797, 320, 16, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestConcurrentShutdown(t *testing.T) { defer runtime.GOMAXPROCS(runtime.NumCPU()) if unprivileged() { t.Skip("skipping test in unprivileged mode.") } var wg sync.WaitGroup for i := 0; i < 10; i++ { wg.Add(1) go func(i int) { c, err := NewContainer(strconv.Itoa(i)) if err != nil { t.Errorf(err.Error()) } if err := c.Shutdown(30 * time.Second); err != nil { t.Errorf(err.Error()) } c.Wait(STOPPED, 30*time.Second) if c.Running() { t.Errorf("Shutting down the container failed...") } wg.Done() }(i) } wg.Wait() }
explode_data.jsonl/2809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 1109, 3231, 62004, 1155, 353, 8840, 836, 8, 341, 16867, 15592, 1224, 1898, 2954, 9117, 6412, 89467, 39847, 31615, 12367, 743, 650, 97288, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 304, 650, 97288, 3856, 13053, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOperationADCWithOverflow(t *testing.T) { cpu := testCPU cpu.Reset() v := -10 z := -127 cpu.a = byte(v) cpu.addressAbs = Word(0x0100) cpu.bus.CPUWrite(cpu.addressAbs, byte(z)) ADC(cpu) assertTrue(t, cpu.StatusRegister(C)) assertTrue(t, cpu.StatusRegister(V)) }
explode_data.jsonl/73053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 8432, 31956, 2354, 42124, 1155, 353, 8840, 836, 8, 341, 80335, 1669, 1273, 31615, 198, 80335, 36660, 741, 5195, 1669, 481, 16, 15, 198, 20832, 1669, 481, 16, 17, 22, 198, 80335, 5849, 284, 4922, 3747, 340, 80335, 13792, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplicaWinsOverSize(t *testing.T) { size := int32(3) jaeger := v1.NewJaeger(types.NamespacedName{Name: "TestReplicaWinsOverSize"}) jaeger.Spec.Collector.Size = 2 jaeger.Spec.Collector.Replicas = &size collector := NewCollector(jaeger) dep := collector.Get() assert.Equal(t, int32(3), *dep.Spec.Replicas) }
explode_data.jsonl/69115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 18327, 15317, 96186, 1918, 1695, 1155, 353, 8840, 836, 8, 341, 13832, 1669, 526, 18, 17, 7, 18, 340, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25, 330, 2271, 18327, 15317, 96186, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxNotifierHistoricalSpendDispatch(t *testing.T) { t.Parallel() const startingHeight = 10 hintCache := newMockHintCache() n := chainntnfs.NewTxNotifier(startingHeight, 100, hintCache, hintCache) // We'll start by constructing the spending details of the outpoint // below. spentOutpoint := zeroOutPoint spendTx := wire.NewMsgTx(2) spendTx.AddTxIn(&wire.TxIn{PreviousOutPoint: zeroOutPoint}) spendTxHash := spendTx.TxHash() expectedSpendDetails := &chainntnfs.SpendDetail{ SpentOutPoint: &spentOutpoint, SpenderTxHash: &spendTxHash, SpendingTx: spendTx, SpenderInputIndex: 0, SpendingHeight: startingHeight - 1, } // We'll register for a spend notification of the outpoint and ensure // that a notification isn't dispatched. ntfn := &chainntnfs.SpendNtfn{ OutPoint: spentOutpoint, Event: chainntnfs.NewSpendEvent(nil), } if _, err := n.RegisterSpend(ntfn); err != nil { t.Fatalf("unable to register spend ntfn: %v", err) } select { case <-ntfn.Event.Spend: t.Fatal("received unexpected spend notification") default: } // Because we're interested in testing the case of a historical spend, // we'll hand off the spending details of the outpoint to the notifier // as it is not possible for it to view historical events in the chain. // By doing this, we replicate the functionality of the ChainNotifier. err := n.UpdateSpendDetails(ntfn.OutPoint, expectedSpendDetails) if err != nil { t.Fatalf("unable to update spend details: %v", err) } // Now that we have the spending details, we should receive a spend // notification. We'll ensure that the details match as intended. select { case spendDetails := <-ntfn.Event.Spend: assertSpendDetails(t, spendDetails, expectedSpendDetails) default: t.Fatalf("expected to receive spend details") } // Finally, we'll ensure that if the spending transaction has also been // spent, then we don't receive another spend notification. prevOut := wire.OutPoint{Hash: spendTxHash, Index: 0} spendOfSpend := wire.NewMsgTx(2) spendOfSpend.AddTxIn(&wire.TxIn{PreviousOutPoint: prevOut}) block := pfcutil.NewBlock(&wire.MsgBlock{ Transactions: []*wire.MsgTx{spendOfSpend}, }) err = n.ConnectTip(block.Hash(), startingHeight+1, block.Transactions()) if err != nil { t.Fatalf("unable to connect block: %v", err) } if err := n.NotifyHeight(startingHeight + 1); err != nil { t.Fatalf("unable to dispatch notifications: %v", err) } select { case <-ntfn.Event.Spend: t.Fatal("received unexpected spend notification") default: } }
explode_data.jsonl/69554
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 877 }
[ 2830, 3393, 31584, 64729, 48983, 938, 50, 3740, 11283, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 4777, 5916, 3640, 284, 220, 16, 15, 271, 9598, 396, 8233, 1669, 501, 11571, 26987, 8233, 741, 9038, 1669, 8781, 406, 77, 384...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestFloat32_Remove(t *testing.T) { testcases := []struct { name string s Float32 input []float32 expect []float32 }{ { name: "test Float32 Remove, inputs nothing", s: map[float32]struct{}{1: {}, 1.5: {}, 1.2: {}}, input: []float32{}, expect: []float32{1, 1.5, 1.2}, }, { name: "test Float32 Remove, inputs multiple exit elements", s: map[float32]struct{}{1: {}, 1.5: {}, 1.2: {}}, input: []float32{1, 1.2}, expect: []float32{1.5}, }, { name: "test Float32 Remove, inputs multiple non-exit elements", s: map[float32]struct{}{1: {}, 1.5: {}, 1.2: {}}, input: []float32{0, 1.9, 1.3}, expect: []float32{1, 1.5, 1.2}, }, } for _, tc := range testcases { t.Logf("running scenario: %s", tc.name) tc.s.Remove(tc.input...) validateFloat32(t, tc.s, tc.expect) } }
explode_data.jsonl/60102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 421 }
[ 2830, 3393, 5442, 18, 17, 66843, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 1903, 414, 13001, 18, 17, 198, 197, 22427, 220, 3056, 3649, 18, 17, 198, 197, 24952, 3056, 3649, 18,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServerAppendEntriesWithStaleTermsAreRejected(t *testing.T) { s := newTestServer("1", &testTransporter{}) s.Start() defer s.Stop() s.(*server).currentTerm = 2 // Append single entry. e, _ := newLogEntry(nil, 1, 1, &testCommand1{Val: "foo", I: 10}) entries := []*LogEntry{e} resp := s.AppendEntries(newAppendEntriesRequest(1, 0, 0, 0, "ldr", entries)) if resp.Term != 2 || resp.Success { t.Fatalf("AppendEntries should have failed: %v/%v", resp.Term, resp.Success) } if index, term := s.(*server).log.commitInfo(); index != 0 || term != 0 { t.Fatalf("Invalid commit info [IDX=%v, TERM=%v]", index, term) } }
explode_data.jsonl/44065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 5475, 23877, 24533, 2354, 623, 1574, 43128, 11526, 77693, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 2271, 5475, 445, 16, 497, 609, 1944, 27560, 261, 6257, 692, 1903, 12101, 2822, 16867, 274, 30213, 741, 1903, 41399, 403...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_PrintInt(t *testing.T) { cases := []int{ 111, 69, 420, 000, 646545465, } for _, c := range cases { output := Print(c) assert.Equal(t, fmt.Sprintf("%+v\n", c), output) } }
explode_data.jsonl/43705
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 45788, 1072, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 396, 515, 197, 197, 16, 16, 16, 11, 220, 21, 24, 11, 220, 19, 17, 15, 11, 220, 15, 15, 15, 11, 220, 21, 19, 21, 20, 19, 20, 19, 21, 20, 345, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapProxy_EvictWithNilKey(t *testing.T) { _, err := mp.Evict(nil) AssertErrorNotNil(t, err, "evict did not return an error for nil key") mp.Clear() }
explode_data.jsonl/56985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 2227, 16219, 2089, 85, 849, 2354, 19064, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 5142, 85, 849, 27907, 340, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 5120, 849, 1521, 537, 470, 458, 1465, 369, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestShouldGetRemovedUserGroupsFromBackend(t *testing.T) { mock := mocks.NewMockAutheliaCtx(t) defer mock.Close() // Setup pointer to john so we can adjust it during the test. user := &authentication.UserDetails{ Username: "john", Groups: []string{ "admin", "users", }, Emails: []string{ "john@example.com", }, } verifyGet := VerifyGet(verifyGetCfg) mock.UserProviderMock.EXPECT().GetDetails("john").Return(user, nil).Times(2) clock := mocks.TestingClock{} clock.Set(time.Now()) userSession := mock.Ctx.GetSession() userSession.Username = user.Username userSession.AuthenticationLevel = authentication.TwoFactor userSession.LastActivity = clock.Now().Unix() userSession.RefreshTTL = clock.Now().Add(-1 * time.Minute) userSession.Groups = user.Groups userSession.Emails = user.Emails userSession.KeepMeLoggedIn = true err := mock.Ctx.SaveSession(userSession) require.NoError(t, err) mock.Ctx.Request.Header.Set("X-Original-URL", "https://two-factor.example.com") verifyGet(mock.Ctx) assert.Equal(t, 200, mock.Ctx.Response.StatusCode()) // Request should get refresh settings and new user details. mock.Ctx.Request.Header.Set("X-Original-URL", "https://admin.example.com") verifyGet(mock.Ctx) assert.Equal(t, 200, mock.Ctx.Response.StatusCode()) // Check Refresh TTL has been updated since admin.example.com has a group subject and refresh is enabled. userSession = mock.Ctx.GetSession() // Check user groups are correct. require.Len(t, userSession.Groups, len(user.Groups)) assert.Equal(t, clock.Now().Add(5*time.Minute).Unix(), userSession.RefreshTTL.Unix()) assert.Equal(t, "admin", userSession.Groups[0]) assert.Equal(t, "users", userSession.Groups[1]) // Remove the admin group, and force the next request to refresh. user.Groups = []string{"users"} userSession.RefreshTTL = clock.Now().Add(-1 * time.Second) err = mock.Ctx.SaveSession(userSession) require.NoError(t, err) mock.Ctx.Request.Header.Set("X-Original-URL", "https://admin.example.com") verifyGet(mock.Ctx) assert.Equal(t, 403, mock.Ctx.Response.StatusCode()) // Check admin group is removed from the session. userSession = mock.Ctx.GetSession() assert.Equal(t, clock.Now().Add(5*time.Minute).Unix(), userSession.RefreshTTL.Unix()) require.Len(t, userSession.Groups, 1) assert.Equal(t, "users", userSession.Groups[0]) }
explode_data.jsonl/20204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 852 }
[ 2830, 3393, 14996, 1949, 42642, 1474, 22173, 3830, 29699, 1155, 353, 8840, 836, 8, 341, 77333, 1669, 68909, 7121, 11571, 5087, 35929, 23684, 1155, 340, 16867, 7860, 10421, 2822, 197, 322, 18626, 7445, 311, 39642, 773, 582, 646, 7500, 432,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_ReplayBlocks(t *testing.T) { t.Parallel() app := startNewApplication(t, withConfigSet(func(c *configtest.TestGeneralConfig) { c.Overrides.EVMEnabled = null.BoolFrom(true) c.Overrides.GlobalEvmNonceAutoSync = null.BoolFrom(false) c.Overrides.GlobalBalanceMonitorEnabled = null.BoolFrom(false) c.Overrides.GlobalGasEstimatorMode = null.StringFrom("FixedPrice") })) client, _ := app.NewClientAndRenderer() set := flag.NewFlagSet("flagset", 0) set.Int64("block-number", 42, "") c := cli.NewContext(nil, set, nil) assert.NoError(t, client.ReplayFromBlock(c)) }
explode_data.jsonl/5264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 2959, 50693, 1363, 29804, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 28236, 1669, 1191, 3564, 4988, 1155, 345, 197, 46948, 2648, 1649, 18552, 1337, 353, 1676, 1944, 8787, 15415, 2648, 8, 341, 298, 1444, 13, 80010...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckoutSuccess(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) require.Nil(t, os.WriteFile( testRepo.testFileName, []byte("hello world"), os.FileMode(0o644), )) res, err := command.NewWithWorkDir( testRepo.sut.Dir(), "git", "diff", "--name-only").Run() require.Nil(t, err) require.True(t, res.Success()) require.Contains(t, res.Output(), filepath.Base(testRepo.testFileName)) err = testRepo.sut.Checkout(git.DefaultBranch, testRepo.testFileName) require.Nil(t, err) res, err = command.NewWithWorkDir( testRepo.sut.Dir(), "git", "diff", "--name-only").Run() require.Nil(t, err) require.True(t, res.Success()) require.Empty(t, res.Output()) }
explode_data.jsonl/14013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 55145, 7188, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 17957, 59678, 1155, 11, 2643, 4073, 1703, 1006, 197, 18185, 25243, 5958, 10903, 345, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidAdminNodeOU(t *testing.T) { //测试数据/节点1: //配置启用nodeous,但管理员不携带 //任何有效的结节。因此,MSP初始化必须失败 thisMSP, err := getLocalMSPWithVersionAndError(t, "testdata/nodeous1", MSPv1_1) assert.True(t, thisMSP.(*bccspmsp).ouEnforcement) assert.Error(t, err) //mspv1_0不应失败 thisMSP, err = getLocalMSPWithVersionAndError(t, "testdata/nodeous1", MSPv1_0) assert.False(t, thisMSP.(*bccspmsp).ouEnforcement) assert.NoError(t, err) }
explode_data.jsonl/2648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 7928, 7210, 1955, 11922, 1155, 353, 8840, 836, 8, 341, 322, 81705, 20074, 14, 92374, 16, 28311, 322, 85767, 111456, 3509, 782, 3837, 77288, 95457, 16530, 107426, 198, 322, 99885, 104775, 36885, 55502, 1773, 101886, 3837, 44, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGocloak_GetServerInfo(t *testing.T) { t.Parallel() client := NewClientWithDebug(t) token := GetAdminToken(t, client) serverInfo, err := client.GetServerInfo( token.AccessToken, ) FailIfErr(t, err, "Failed to fetch server info") t.Logf("Server Info: %+v", serverInfo) FailRequest(client, nil, 1, 0) _, err = client.GetServerInfo( token.AccessToken, ) FailIfNotErr(t, err, "") }
explode_data.jsonl/79504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 38, 509, 385, 585, 13614, 5475, 1731, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 43947, 1669, 2126, 7210, 3323, 1155, 11, 2943, 340, 41057, 1731, 11, 1848, 1669, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTLSExtensionsVerifyClient(t *testing.T) { // Server extendVerify := map[string]interface{}{ "name": "server", "cn": "client", } serverInfo := &certInfo{ CommonName: extendVerify["name"].(string), Curve: "RSA", } serverConfig, err := serverInfo.CreateCertConfig() if err != nil { t.Errorf("create server certificate error %v", err) return } serverConfig.VerifyClient = true serverConfig.Type = testType serverConfig.ExtendVerify = extendVerify filterChains := []v2.FilterChain{ { TLS: *serverConfig, }, } lc := &v2.Listener{} lc.FilterChains = filterChains ctxMng, err := NewTLSServerContextManager(lc, nil, log.StartLogger) if err != nil { t.Errorf("create context manager failed %v", err) return } server := MockServer{ Mng: ctxMng, t: t, } server.GoListenAndServe(t) defer server.Close() time.Sleep(time.Second) //wait server start testCases := []struct { Info *certInfo Pass func(resp *http.Response, err error) bool }{ { Info: &certInfo{ CommonName: extendVerify["cn"].(string), Curve: serverInfo.Curve, }, Pass: pass, }, { Info: &certInfo{ CommonName: "invalid client", Curve: serverInfo.Curve, }, Pass: fail, }, } for i, tc := range testCases { cfg, err := tc.Info.CreateCertConfig() cfg.ServerName = "127.0.0.1" if err != nil { t.Errorf("#%d create client certificate error %v", i, err) continue } cltMng, err := NewTLSClientContextManager(cfg, nil) if err != nil { t.Errorf("#%d create client context manager failed %v", i, err) continue } resp, err := MockClient(t, server.Addr, cltMng) if !tc.Pass(resp, err) { t.Errorf("#%d verify failed", i) } } }
explode_data.jsonl/1523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 732 }
[ 2830, 3393, 45439, 31282, 32627, 2959, 1155, 353, 8840, 836, 8, 341, 197, 322, 8422, 198, 197, 25048, 32627, 1669, 2415, 14032, 31344, 67066, 197, 197, 31486, 788, 330, 4030, 756, 197, 197, 1, 14271, 788, 256, 330, 2972, 756, 197, 532...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommitList(t *testing.T) { defer gock.Off() mockServerVersion() gock.New("https://try.gitea.io"). Get("/api/v1/repos/go-gitea/gitea/commits"). Reply(200). Type("application/json"). File("testdata/commits.json") client, _ := New("https://try.gitea.io") got, _, err := client.Git.ListCommits(context.Background(), "go-gitea/gitea", scm.CommitListOptions{}) if err != nil { t.Error(err) } var want []*scm.Commit raw, _ := ioutil.ReadFile("testdata/commits.json.golden") err = json.Unmarshal(raw, &want) assert.NoError(t, err) if diff := cmp.Diff(got, want); diff != "" { t.Errorf("Unexpected Results") t.Log(diff) } }
explode_data.jsonl/50607
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 33441, 852, 1155, 353, 8840, 836, 8, 341, 16867, 728, 377, 13, 4596, 2822, 77333, 5475, 5637, 2822, 3174, 1176, 7121, 445, 2428, 1110, 1539, 1302, 632, 64, 4245, 38609, 197, 37654, 4283, 2068, 5457, 16, 49505, 25525, 2371, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMessageExportSettingsIsValidGlobalRelaySettingsInvalidCustomerType(t *testing.T) { fs := &FileSettings{ DriverName: NewString("foo"), // bypass file location check } mes := &MessageExportSettings{ EnableExport: NewBool(true), ExportFormat: NewString(COMPLIANCE_EXPORT_TYPE_GLOBALRELAY), ExportFromTimestamp: NewInt64(0), DailyRunTime: NewString("15:04"), BatchSize: NewInt(100), GlobalRelaySettings: &GlobalRelayMessageExportSettings{ CustomerType: NewString("Invalid"), EmailAddress: NewString("valid@mattermost.com"), SmtpUsername: NewString("SomeUsername"), SmtpPassword: NewString("SomePassword"), }, } // should fail because customer type is invalid require.Error(t, mes.isValid(*fs)) }
explode_data.jsonl/50688
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 2052, 16894, 6086, 55470, 11646, 6740, 352, 6086, 7928, 12792, 929, 1155, 353, 8840, 836, 8, 341, 53584, 1669, 609, 1703, 6086, 515, 197, 10957, 5469, 675, 25, 1532, 703, 445, 7975, 3975, 442, 30718, 1034, 3728, 1779, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiple(t *testing.T) { testDuration := time.Second * 10 // number of available mathworker kites to be called kiteNumber := 3 // number of exp kites that will call mathwork kites clientNumber := 3 // ports are starting from 6000 up to 6000 + kiteNumber port := 6000 transport := transportFromEnv() for i := 0; i < kiteNumber; i++ { m := New("mathworker"+strconv.Itoa(i), "0.1."+strconv.Itoa(i)) m.Config.DisableAuthentication = true m.Config.Transport = transport m.Config.Port = port + i m.OnConnect(panicHandler) m.OnRegister(panicRegisterHandler) m.OnDisconnect(panicHandler) m.OnFirstRequest(panicHandler) m.HandleFunc("square", Square) go m.Run() <-m.ServerReadyNotify() defer m.Close() } clients := make([]*Client, clientNumber) for i := 0; i < clientNumber; i++ { cn := New("exp"+strconv.Itoa(i), "0.0.1") cn.Config.Transport = transport c := cn.NewClient("http://127.0.0.1:" + strconv.Itoa(port+i) + "/kite") if err := c.Dial(); err != nil { t.Fatal(err) } clients[i] = c defer c.Close() } timeout := time.After(testDuration) // every one second for { select { case <-time.Tick(time.Second): var wg sync.WaitGroup for i := 0; i < clientNumber; i++ { wg.Add(1) go func(i int, t *testing.T) { defer wg.Done() time.Sleep(time.Millisecond * time.Duration(rand.Intn(500))) _, err := clients[i].TellWithTimeout("square", 4*time.Second, 2) if err != nil { t.Error(err) } }(i, t) } wg.Wait() case <-timeout: return } } }
explode_data.jsonl/68243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 664 }
[ 2830, 3393, 32089, 1155, 353, 8840, 836, 8, 341, 18185, 12945, 1669, 882, 32435, 353, 220, 16, 15, 271, 197, 322, 1372, 315, 2500, 6888, 21462, 595, 3611, 311, 387, 2598, 198, 16463, 632, 2833, 1669, 220, 18, 271, 197, 322, 1372, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStrExtract(t *testing.T) { pat := []string{"mTOR", "AMPK", "dsf", "RNA-Seq", "RNA-seq"} patStr := strings.Join(pat, "|") res := StrExtract("mTOR, AMPK, DSF, RNA-Seq", patStr, 4) if len(res) != 3 { log.Fatalf("StrExtract faild: length of res should equals 3") } }
explode_data.jsonl/59574
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 2580, 28959, 1155, 353, 8840, 836, 8, 341, 3223, 266, 1669, 3056, 917, 4913, 76, 49658, 497, 330, 16024, 42, 497, 330, 5356, 69, 497, 330, 30720, 12, 20183, 497, 330, 30720, 7806, 80, 16707, 3223, 266, 2580, 1669, 9069, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMysql56ResetReplicationCommands(t *testing.T) { want := []string{ "STOP SLAVE", "RESET SLAVE ALL", "RESET MASTER", } if got := (&mysql56{}).ResetReplicationCommands(); !reflect.DeepEqual(got, want) { t.Errorf("(&mysql56{}).ResetReplicationCommands() = %#v, want %#v", got, want) } }
explode_data.jsonl/11239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 44, 14869, 20, 21, 14828, 18327, 1693, 30479, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 3056, 917, 515, 197, 197, 1, 50669, 16797, 9493, 756, 197, 197, 1, 50000, 16797, 9493, 13097, 756, 197, 197, 1, 50000, 71197, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGraphAttributeSetting(t *testing.T) { g := dot.NewGraph("Test") if g.Set("label", "foo") != nil { t.Error("Error setting value on g", g) } g.Set("Damping", "x") if g.Set("this_does_not_exist", "and_should_error") != dot.AttributeError { t.Error("Did not get godot.AttributeError when setting invalid attribute on g", g) } }
explode_data.jsonl/409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 11212, 3907, 15400, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 12756, 7121, 11212, 445, 2271, 1138, 743, 342, 4202, 445, 1502, 497, 330, 7975, 899, 961, 2092, 341, 197, 3244, 6141, 445, 1454, 6243, 897, 389, 342, 497, 342, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEncodeOneofNilWrapper(t *testing.T) { m := &testpb.TestAllTypes{OneofField: (*testpb.TestAllTypes_OneofUint32)(nil)} b, err := proto.Marshal(m) if err != nil { t.Fatal(err) } if len(b) > 0 { t.Errorf("Marshal return non-empty, want empty") } }
explode_data.jsonl/1538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 32535, 3966, 1055, 19064, 11542, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 1944, 16650, 8787, 2403, 4173, 90, 3966, 1055, 1877, 25, 4609, 1944, 16650, 8787, 2403, 4173, 68201, 1055, 21570, 18, 17, 2376, 8385, 10569, 223...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLabelsExecutor(t *testing.T) { tests := []struct { name string mockDb NotificationLoader expectedResult []contract.Notification expectedError bool expectedErrType error }{ { name: "Successful database call", mockDb: createMockNotificiationLoaderLabelsStringArg("GetNotificationsByLabels", nil, SuccessfulDatabaseResult, Labels, Limit), expectedResult: SuccessfulDatabaseResult, expectedError: false, expectedErrType: nil, }, { name: "Unsuccessful database call", mockDb: createMockNotificiationLoaderLabelsStringArg("GetNotificationsByLabels", Error, []contract.Notification{}, Labels, Limit), expectedResult: []contract.Notification{}, expectedError: true, expectedErrType: Error, }, { name: "Notification not found", mockDb: createMockNotificiationLoaderLabelsStringArg("GetNotificationsByLabels", nil, []contract.Notification{}, Labels, Limit), expectedResult: []contract.Notification{}, expectedError: true, expectedErrType: ErrorNotFound, }, { name: "Unknown Error", mockDb: createMockNotificiationLoaderLabelsStringArg("GetNotificationsByLabels", Error, SuccessfulDatabaseResult, Labels, Limit), expectedResult: SuccessfulDatabaseResult, expectedError: true, expectedErrType: Error, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { op := NewLabelsExecutor(test.mockDb, Labels, Limit) actual, err := op.Execute() if test.expectedError && err == nil { t.Error("Expected an error") return } if !test.expectedError && err != nil { t.Errorf("Unexpectedly encountered error: %s", err.Error()) return } if !reflect.DeepEqual(test.expectedErrType, err) { t.Errorf("Expected error result does not match the observed.\nExpected: %v\nObserved: %v\n", test.expectedErrType, err) return } if !reflect.DeepEqual(test.expectedResult, actual) { t.Errorf("Expected result does not match the observed.\nExpected: %v\nObserved: %v\n", test.expectedResult, actual) return } }) } }
explode_data.jsonl/31060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 867 }
[ 2830, 3393, 23674, 25255, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 310, 914, 198, 197, 77333, 7994, 688, 16571, 9181, 198, 197, 42400, 2077, 220, 3056, 20257, 49329, 198, 197, 42400, 1454, 256, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestIntPtr(t *testing.T) { var args struct { Foo *int } err := parse("--foo 123", &args) require.NoError(t, err) require.NotNil(t, args.Foo) assert.Equal(t, 123, *args.Foo) }
explode_data.jsonl/12990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 74156, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 12727, 2624, 353, 396, 198, 197, 532, 9859, 1669, 4715, 21549, 7975, 220, 16, 17, 18, 497, 609, 2116, 340, 17957, 35699, 1155, 11, 1848, 340, 17957, 93882, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewRequest(t *testing.T) { opts := Options{BaseURL: apiURL} client, err := New(opts) require.NoError(t, err, "create client error") t.Run("correctly create request path", func(t *testing.T) { req, err := client.NewRequest(http.MethodGet, "my-resource", nil) require.NoError(t, err, "new request not errors") require.Exactly(t, "https://base-url:8080/api/url/my-resource", req.URL.String()) require.Exactly(t, req.Header.Get("Content-Type"), "") }) t.Run("correctly set request body", func(t *testing.T) { var data = map[string]interface{}{ "some": "json format", "foo": "bar", "that": float64(3), } req, err := client.NewRequest(http.MethodPost, "my-resource", data) require.NoError(t, err, "request error") var reqBody map[string]interface{} err = json.NewDecoder(req.Body).Decode(&reqBody) require.NoError(t, err, "json marshal error") require.Exactly(t, data, reqBody, "wrong request body") require.Exactly(t, req.Header.Get("Content-Type"), "application/json") }) }
explode_data.jsonl/66012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 3564, 1900, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 14566, 90, 3978, 3144, 25, 6330, 3144, 532, 25291, 11, 1848, 1669, 1532, 30885, 340, 17957, 35699, 1155, 11, 1848, 11, 330, 3182, 2943, 1465, 5130, 3244, 16708, 445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLabelShardedMetaFilter_Filter_Hashmod(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second) defer cancel() relabelContentYamlFmt := ` - action: hashmod source_labels: ["%s"] target_label: shard modulus: 3 - action: keep source_labels: ["shard"] regex: %d ` for i := 0; i < 3; i++ { t.Run(fmt.Sprintf("%v", i), func(t *testing.T) { var relabelConfig []*relabel.Config testutil.Ok(t, yaml.Unmarshal([]byte(fmt.Sprintf(relabelContentYamlFmt, BlockIDLabel, i)), &relabelConfig)) f := NewLabelShardedMetaFilter(relabelConfig) input := map[ulid.ULID]*metadata.Meta{ ULID(1): { Thanos: metadata.Thanos{ Labels: map[string]string{"cluster": "B", "message": "keepme"}, }, }, ULID(2): { Thanos: metadata.Thanos{ Labels: map[string]string{"something": "A", "message": "keepme"}, }, }, ULID(3): { Thanos: metadata.Thanos{ Labels: map[string]string{"cluster": "A", "message": "keepme"}, }, }, ULID(4): { Thanos: metadata.Thanos{ Labels: map[string]string{"cluster": "A", "something": "B", "message": "keepme"}, }, }, ULID(5): { Thanos: metadata.Thanos{ Labels: map[string]string{"cluster": "B"}, }, }, ULID(6): { Thanos: metadata.Thanos{ Labels: map[string]string{"cluster": "B", "message": "keepme"}, }, }, ULID(7): {}, ULID(8): {}, ULID(9): {}, ULID(10): {}, ULID(11): {}, ULID(12): {}, ULID(13): {}, ULID(14): {}, ULID(15): {}, } expected := map[ulid.ULID]*metadata.Meta{} switch i { case 0: expected = map[ulid.ULID]*metadata.Meta{ ULID(2): input[ULID(2)], ULID(6): input[ULID(6)], ULID(11): input[ULID(11)], ULID(13): input[ULID(13)], } case 1: expected = map[ulid.ULID]*metadata.Meta{ ULID(5): input[ULID(5)], ULID(7): input[ULID(7)], ULID(10): input[ULID(10)], ULID(12): input[ULID(12)], ULID(14): input[ULID(14)], ULID(15): input[ULID(15)], } case 2: expected = map[ulid.ULID]*metadata.Meta{ ULID(1): input[ULID(1)], ULID(3): input[ULID(3)], ULID(4): input[ULID(4)], ULID(8): input[ULID(8)], ULID(9): input[ULID(9)], } } deleted := len(input) - len(expected) m := newTestFetcherMetrics() testutil.Ok(t, f.Filter(ctx, input, m.synced)) testutil.Equals(t, expected, input) testutil.Equals(t, float64(deleted), promtest.ToFloat64(m.synced.WithLabelValues(labelExcludedMeta))) }) } }
explode_data.jsonl/67637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1331 }
[ 2830, 3393, 2476, 2016, 20958, 12175, 5632, 68935, 2039, 988, 2593, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 220, 16, 17, 15, 77053, 32435, 340, 16867, 9121, 2822, 17200, 1502, 2762, 56,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDefaultGrpcServerSettings(t *testing.T) { gss := &GRPCServerSettings{} opts, err := gss.ToServerOption(componenttest.NewNopHost(), componenttest.NewNopTelemetrySettings()) _ = grpc.NewServer(opts...) assert.NoError(t, err) assert.Len(t, opts, 2) }
explode_data.jsonl/80325
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 3675, 6464, 3992, 5475, 6086, 1155, 353, 8840, 836, 8, 341, 3174, 778, 1669, 609, 8626, 4872, 5475, 6086, 16094, 64734, 11, 1848, 1669, 342, 778, 3274, 5475, 5341, 21228, 1944, 7121, 45, 453, 9296, 1507, 3692, 1944, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFailGracefullyBadLocalbootType(t *testing.T) { err := add("localboot", []string{"not-valid"}) if err.Error() != "method needs to be grub or path" { t.Errorf(`err.Error() = %q, want "error message: method needs to be grub or path"`, err.Error()) } }
explode_data.jsonl/25079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 19524, 86543, 3641, 17082, 7319, 4619, 929, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 912, 445, 2438, 4619, 497, 3056, 917, 4913, 1921, 84810, 23625, 743, 1848, 6141, 368, 961, 330, 4393, 3880, 311, 387, 73706, 476, 1815, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntArray_Clear(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []int{1, 2, 3, 5} array1 := garray.NewIntArrayFrom(a1) array1.Clear() t.Assert(array1.Len(), 0) }) }
explode_data.jsonl/47617
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 95338, 57744, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 20, 532, 197, 11923, 16, 1669, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProtocolNegotiation(t *testing.T) { cluster, _ := cassandra.NewCluster() if !assert.NotNil(t, cluster) { t.FailNow() } assert.Equal(t, cluster.ProtocolVersion, cassandra.MAX_SUPPORTED_VERSION) session, _ := cluster.Connect() if !assert.NotNil(t, session) { t.FailNow() } defer cluster.Shutdown() updatedProtocolVersion := session.ProtocolVersion updatedClusterVersion := cluster.ProtocolVersion t.Logf("Cassandra version %s", ENV_CASSANDRA_VERSION) if ENV_CASSANDRA_VERSION >= "2.2" { assert.Equal(t, updatedProtocolVersion, 4) assert.Equal(t, updatedClusterVersion, 4) } else if ENV_CASSANDRA_VERSION >= "2.1" { assert.Equal(t, updatedProtocolVersion, 3) assert.Equal(t, updatedClusterVersion, 3) } else if ENV_CASSANDRA_VERSION >= "2.0" { assert.Equal(t, updatedProtocolVersion, 2) assert.Equal(t, updatedClusterVersion, 2) } else { assert.Equal(t, updatedProtocolVersion, 1) assert.Equal(t, updatedClusterVersion, 1) } }
explode_data.jsonl/52059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 20689, 47800, 354, 7101, 1155, 353, 8840, 836, 8, 341, 197, 18855, 11, 716, 1669, 44224, 23274, 7121, 28678, 741, 743, 753, 2207, 93882, 1155, 11, 10652, 8, 341, 197, 3244, 57243, 7039, 741, 197, 532, 6948, 12808, 1155, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestIsEqual(t *testing.T) { var e1, e2 float32 e := createRandomObject(e1) if v, ok := e.(float32); ok { e1 = v } e = createRandomObject(e2) if v, ok := e.(float32); ok { e2 = v } s1 := New() s2 := New() if !s1.IsEqual(s2) { t.Error("expected 2 new sets to be equal") } s1.Add(e1) s1.Add(e2) if s1.IsEqual(s2) { t.Errorf("expected 2 different sets to be equal, %v, %v", s1, s2) } s2.Add(e1) s2.Add(e2) if !s1.IsEqual(s2) { t.Error("expected 2 sets with the same items added to be equal") } }
explode_data.jsonl/35012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 3872, 2993, 1155, 353, 8840, 836, 8, 341, 2405, 384, 16, 11, 384, 17, 2224, 18, 17, 198, 7727, 1669, 1855, 13999, 1190, 2026, 16, 340, 743, 348, 11, 5394, 1669, 384, 12832, 3649, 18, 17, 1215, 5394, 341, 197, 7727, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSchemaWriteAndReadBack(t *testing.T) { conn, cleanup, _ := testserver.NewTestServer(require.New(t), 0, memdb.DisableGC, 0, false, testfixtures.EmptyDatastore) t.Cleanup(cleanup) client := v1alpha1.NewSchemaServiceClient(conn) requestedObjectDefNames := []string{"example/user"} _, err := client.ReadSchema(context.Background(), &v1alpha1.ReadSchemaRequest{ ObjectDefinitionsNames: requestedObjectDefNames, }) grpcutil.RequireStatus(t, codes.NotFound, err) userSchema := `definition example/user {}` writeResp, err := client.WriteSchema(context.Background(), &v1alpha1.WriteSchemaRequest{ Schema: userSchema, }) require.NoError(t, err) require.Equal(t, requestedObjectDefNames, writeResp.GetObjectDefinitionsNames()) rev, err := nspkg.DecodeV1Alpha1Revision(writeResp.ComputedDefinitionsRevision) require.NoError(t, err) require.Len(t, rev, 1) readback, err := client.ReadSchema(context.Background(), &v1alpha1.ReadSchemaRequest{ ObjectDefinitionsNames: writeResp.GetObjectDefinitionsNames(), }) require.NoError(t, err) require.Equal(t, []string{userSchema}, readback.GetObjectDefinitions()) }
explode_data.jsonl/54545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 388 }
[ 2830, 3393, 8632, 7985, 3036, 4418, 3707, 1155, 353, 8840, 836, 8, 341, 32917, 11, 21290, 11, 716, 1669, 1273, 4030, 7121, 2271, 5475, 23482, 7121, 1155, 701, 220, 15, 11, 1833, 1999, 10166, 480, 22863, 11, 220, 15, 11, 895, 11, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPullImage(t *testing.T) { require := require.New(t) ctx, cleanup := context.BuildContextFixture() defer cleanup() p, err := PullClientFixtureWithAlpine(ctx) require.NoError(err) // Pull image. _, err = p.Pull(testutil.SampleImageTag) require.NoError(err) _, err = p.store.Layers.GetStoreFileStat("393ccd5c4dd90344c9d725125e13f636ce0087c62f5ca89050faaacbb9e3ed5b") require.NoError(err) _, err = p.store.Manifests.GetStoreFileStat(testutil.SampleImageRepoName, testutil.SampleImageTag) require.NoError(err) }
explode_data.jsonl/63635
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 36068, 1906, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 20985, 11, 21290, 1669, 2266, 25212, 1972, 18930, 741, 16867, 21290, 2822, 3223, 11, 1848, 1669, 31828, 2959, 18930, 2354, 2101, 38038, 7502, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileHelper_IsPathString_06(t *testing.T) { fh := FileHelper{} pathFile := fh.AdjustPathSlash("../../filesfortest/levelfilesfortest/level_01_dir/" + "level_02_dir/level_03_dir") pathFile = "." + pathFile _, _, _, err := fh.IsPathString(pathFile) if err == nil { t.Errorf("Expected an error return from fh.IsPathString(pathFile) " + "because 'pathFile' includes the text '...' . " + "However, NO ERROR WAS RETURNED!") } }
explode_data.jsonl/14511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 1703, 5511, 31879, 1820, 703, 62, 15, 21, 1155, 353, 8840, 836, 8, 1476, 220, 36075, 1669, 2887, 5511, 31483, 220, 1815, 1703, 1669, 36075, 17865, 4250, 1820, 88004, 36800, 7198, 3969, 477, 14, 3449, 490, 3658, 3969, 477, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntegration(t *testing.T) { images := tengo.SplitEnv("SKEEMA_TEST_IMAGES") if len(images) == 0 { fmt.Println("SKEEMA_TEST_IMAGES env var is not set, so integration tests will be skipped!") fmt.Println("To run integration tests, you may set SKEEMA_TEST_IMAGES to a comma-separated") fmt.Println("list of Docker images. Example:\n# SKEEMA_TEST_IMAGES=\"mysql:5.6,mysql:5.7\" go test") } manager, err := tengo.NewDockerClient(tengo.DockerClientOptions{}) if err != nil { t.Errorf("Unable to create sandbox manager: %s", err) } suite := &IntegrationSuite{manager: manager} tengo.RunSuite(suite, t, images) }
explode_data.jsonl/34618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 52464, 1155, 353, 8840, 836, 8, 341, 197, 3642, 1669, 66890, 19823, 14359, 445, 50, 3390, 48610, 11641, 79415, 1138, 743, 2422, 40877, 8, 621, 220, 15, 341, 197, 11009, 12419, 445, 50, 3390, 48610, 11641, 79415, 6105, 762, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRemoveClusterMetricsWhenNumberOfMetricsExceedsDefaultChannelLength_2843(t *testing.T) { clusterID := uuid.MustRandom() metric := prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "scylla_manager", Subsystem: "healthcheck", Name: "status", }, []string{clusterKey, hostKey}) for i := 0; i < 3*10*metricBufferSize; i++ { hl := prometheus.Labels{ clusterKey: clusterID.String(), hostKey: uuid.MustRandom().String(), } metric.With(hl).Set(1) } r := runner{metrics: &runnerMetrics{ status: metric, rtt: metric, timeout: metric, }} r.removeMetricsForCluster(clusterID) }
explode_data.jsonl/10928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 13021, 28678, 27328, 4498, 40619, 27328, 840, 4635, 82, 3675, 9629, 4373, 62, 17, 23, 19, 18, 1155, 353, 8840, 836, 8, 341, 197, 18855, 915, 1669, 16040, 50463, 13999, 741, 2109, 16340, 1669, 2706, 39705, 7121, 38, 19392, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCopyFromReturnValue(t *testing.T) { target := hooks{ RawPreStart: "foo", } source := hooks{ RawPostStart: "bar", } assert.False(t, target.CopyFrom(source), "Copying unrelated hooks should not trigger an override") target = hooks{ RawPreStart: "foo", } source = hooks{ RawPreStart: "bar", RawPostStart: "bar", } assert.True(t, target.CopyFrom(source), "Copying related hooks should trigger an override") }
explode_data.jsonl/71540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 12106, 3830, 40426, 1155, 353, 8840, 836, 8, 341, 28861, 1669, 29677, 515, 197, 11143, 672, 4703, 3479, 25, 330, 7975, 756, 197, 532, 47418, 1669, 29677, 515, 197, 11143, 672, 4133, 3479, 25, 330, 2257, 756, 197, 532, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSimpleRouter(t *testing.T) { config := &Config{ Rule: []*RoutingRule{ { TargetTag: &RoutingRule_Tag{ Tag: "test", }, NetworkList: &net.NetworkList{ Network: []net.Network{net.Network_TCP}, }, }, }, } mockCtl := gomock.NewController(t) defer mockCtl.Finish() mockDns := mocks.NewDNSClient(mockCtl) mockOhm := mocks.NewOutboundManager(mockCtl) mockHs := mocks.NewOutboundHandlerSelector(mockCtl) r := new(Router) common.Must(r.Init(config, mockDns, &mockOutboundManager{ Manager: mockOhm, HandlerSelector: mockHs, })) ctx := withOutbound(&session.Outbound{Target: net.TCPDestination(net.DomainAddress("v2ray.com"), 80)}) tag, err := r.PickRoute(ctx) common.Must(err) if tag != "test" { t.Error("expect tag 'test', bug actually ", tag) } }
explode_data.jsonl/81244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 361 }
[ 2830, 3393, 16374, 9523, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 609, 2648, 515, 197, 11143, 1111, 25, 29838, 24701, 11337, 515, 298, 197, 515, 571, 197, 6397, 5668, 25, 609, 24701, 11337, 78040, 515, 464, 197, 5668, 25, 330, 1944,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRetry(t *testing.T) { testCases := []struct { desc string maxRequestAttempts int wantRetryAttempts int wantResponseStatus int amountFaultyEndpoints int }{ { desc: "no retry on success", maxRequestAttempts: 1, wantRetryAttempts: 0, wantResponseStatus: http.StatusOK, amountFaultyEndpoints: 0, }, { desc: "no retry when max request attempts is one", maxRequestAttempts: 1, wantRetryAttempts: 0, wantResponseStatus: http.StatusInternalServerError, amountFaultyEndpoints: 1, }, { desc: "one retry when one server is faulty", maxRequestAttempts: 2, wantRetryAttempts: 1, wantResponseStatus: http.StatusOK, amountFaultyEndpoints: 1, }, { desc: "two retries when two servers are faulty", maxRequestAttempts: 3, wantRetryAttempts: 2, wantResponseStatus: http.StatusOK, amountFaultyEndpoints: 2, }, { desc: "max attempts exhausted delivers the 5xx response", maxRequestAttempts: 3, wantRetryAttempts: 2, wantResponseStatus: http.StatusInternalServerError, amountFaultyEndpoints: 3, }, } backendServer := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { rw.WriteHeader(http.StatusOK) rw.Write([]byte("OK")) })) forwarder, err := forward.New() if err != nil { t.Fatalf("Error creating forwarder: %s", err) } for _, test := range testCases { test := test t.Run(test.desc, func(t *testing.T) { t.Parallel() loadBalancer, err := roundrobin.New(forwarder) if err != nil { t.Fatalf("Error creating load balancer: %s", err) } basePort := 33444 for i := 0; i < test.amountFaultyEndpoints; i++ { // 192.0.2.0 is a non-routable IP for testing purposes. // See: https://stackoverflow.com/questions/528538/non-routable-ip-address/18436928#18436928 // We only use the port specification here because the URL is used as identifier // in the load balancer and using the exact same URL would not add a new server. err = loadBalancer.UpsertServer(testhelpers.MustParseURL("http://192.0.2.0:" + string(basePort+i))) assert.NoError(t, err) } // add the functioning server to the end of the load balancer list err = loadBalancer.UpsertServer(testhelpers.MustParseURL(backendServer.URL)) assert.NoError(t, err) retryListener := &countingRetryListener{} retry := NewRetry(test.maxRequestAttempts, loadBalancer, retryListener) recorder := httptest.NewRecorder() req := httptest.NewRequest(http.MethodGet, "http://localhost:3000/ok", nil) retry.ServeHTTP(recorder, req) assert.Equal(t, test.wantResponseStatus, recorder.Code) assert.Equal(t, test.wantRetryAttempts, retryListener.timesCalled) }) } }
explode_data.jsonl/44112
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1207 }
[ 2830, 3393, 51560, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 41653, 1698, 914, 198, 197, 22543, 1900, 81517, 262, 526, 198, 197, 50780, 51560, 81517, 257, 526, 198, 197, 50780, 92663, 262, 526, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVerbs(t *testing.T) { c := testRESTClient(t, nil) if r := c.Post(); r.verb != "POST" { t.Errorf("Post verb is wrong") } if r := c.Put(); r.verb != "PUT" { t.Errorf("Put verb is wrong") } if r := c.Get(); r.verb != "GET" { t.Errorf("Get verb is wrong") } if r := c.Delete(); r.verb != "DELETE" { t.Errorf("Delete verb is wrong") } }
explode_data.jsonl/13279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 10141, 1279, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1273, 38307, 2959, 1155, 11, 2092, 340, 743, 435, 1669, 272, 23442, 2129, 435, 13, 22328, 961, 330, 2946, 1, 341, 197, 3244, 13080, 445, 4133, 18607, 374, 4969, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIndexMethods(t *testing.T) { router := MakeRouter() router.GET("/", printMethod) router.POST("/", printMethod) router.PUT("/", printMethod) router.PATCH("/", printMethod) router.DELETE("/", printMethod) RunRequest(router, "GET", "/", 200, "GET", t) RunRequest(router, "POST", "/", 200, "POST", t) RunRequest(router, "PUT", "/", 200, "PUT", t) RunRequest(router, "PATCH", "/", 200, "PATCH", t) RunRequest(router, "DELETE", "/", 200, "DELETE", t) }
explode_data.jsonl/15102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 1552, 17856, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 7405, 9523, 741, 67009, 17410, 35460, 1173, 3523, 340, 67009, 14721, 35460, 1173, 3523, 340, 67009, 98371, 35460, 1173, 3523, 340, 67009, 1069, 8884, 35460, 1173, 3523, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryUserGroup(t *testing.T) { type args struct { query models.UserGroup } tests := []struct { name string args args want int wantErr bool }{ {"Query all user group", args{query: models.UserGroup{GroupName: "test_group_01"}}, 1, false}, {"Query all ldap group", args{query: models.UserGroup{GroupType: common.LDAPGroupType}}, 3, false}, {"Query ldap group with group property", args{query: models.UserGroup{GroupType: common.LDAPGroupType, LdapGroupDN: "CN=harbor_users,OU=sample,OU=vmware,DC=harbor,DC=com"}}, 1, false}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := QueryUserGroup(tt.args.query) if (err != nil) != tt.wantErr { t.Errorf("QueryUserGroup() error = %v, wantErr %v", err, tt.wantErr) return } if len(got) != tt.want { t.Errorf("QueryUserGroup() = %v, want %v", len(got), tt.want) } }) } }
explode_data.jsonl/73815
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 2859, 1474, 2808, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 27274, 4119, 7344, 2808, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, 198, 197, 50780, 262, 526...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestServerOriginServerUnreachable(t *testing.T) { // create a server, then promptly shut it down originServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) })) originURL, _ := url.Parse(originServer.URL) originServer.Close() handler := httptransport.NewServer( originURL, ) proxyServer := httptest.NewServer(handler) defer proxyServer.Close() resp, _ := http.Get(proxyServer.URL) switch resp.StatusCode { case http.StatusBadGateway: // go1.7 and beyond break case http.StatusInternalServerError: // to go1.7 break default: t.Errorf("want %d or %d, have %d", http.StatusBadGateway, http.StatusInternalServerError, resp.StatusCode) } }
explode_data.jsonl/15689
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 5475, 13298, 5475, 1806, 46550, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 264, 3538, 11, 1221, 39422, 9311, 432, 1495, 198, 197, 8611, 5475, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildPolicyName(t *testing.T) { assert := tassert.New(t) svc := service.MeshService{ Namespace: "default", Name: "foo", } testCases := []struct { name string svc service.MeshService sameNamespace bool expectedName string }{ { name: "same namespace", svc: svc, sameNamespace: true, expectedName: "foo", }, { name: "different namespace", svc: svc, sameNamespace: false, expectedName: "foo-default", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { actual := buildPolicyName(tc.svc, tc.sameNamespace) assert.Equal(tc.expectedName, actual) }) } }
explode_data.jsonl/69771
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 11066, 13825, 675, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 259, 2207, 7121, 1155, 692, 1903, 7362, 1669, 2473, 50155, 1860, 515, 197, 90823, 25, 330, 2258, 756, 197, 21297, 25, 414, 330, 7975, 756, 197, 630, 18185, 37302...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManifestGenerateComponentHubTag(t *testing.T) { g := NewGomegaWithT(t) objs, err := runManifestCommands("component_hub_tag", "", liveCharts) if err != nil { t.Fatal(err) } tests := []struct { deploymentName string containerName string want string }{ { deploymentName: "prometheus", want: "docker.io/prometheus:1.1.1", }, { deploymentName: "grafana", want: "grafana/grafana:1.2.3", }, { deploymentName: "istio-ingressgateway", containerName: "istio-proxy", want: "istio-spec.hub/proxyv2:istio-spec.tag", }, { deploymentName: "istiod", containerName: "discovery", want: "component.pilot.hub/pilot:2", }, { deploymentName: "kiali", want: "docker.io/testing/kiali:v1.22", }, } for _, tt := range tests { for _, os := range objs { containerName := tt.deploymentName if tt.containerName != "" { containerName = tt.containerName } container := mustGetContainer(g, os, tt.deploymentName, containerName) g.Expect(container).Should(HavePathValueEqual(PathValue{"image", tt.want})) } } }
explode_data.jsonl/48755
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 545 }
[ 2830, 3393, 38495, 31115, 2189, 19316, 5668, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 692, 22671, 82, 11, 1848, 1669, 1598, 38495, 30479, 445, 8571, 93996, 9372, 497, 7342, 3887, 64878, 340, 743, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGenerateModel_WithAllOfAndDiscriminatorAndArrayOfPolymorphs(t *testing.T) { specDoc, err := loads.Spec("../fixtures/codegen/todolist.models.yml") require.NoError(t, err) definitions := specDoc.Spec().Definitions schema := definitions["PetWithPets"] opts := opts() genModel, err := makeGenDefinition("PetWithPets", "models", schema, specDoc, opts) require.NoError(t, err) require.Len(t, genModel.AllOf, 2) assert.True(t, genModel.IsComplexObject) assert.Equal(t, "PetWithPets", genModel.Name) assert.Equal(t, "PetWithPets", genModel.GoType) buf := bytes.NewBuffer(nil) require.NoError(t, opts.templates.MustGet("model").Execute(buf, genModel)) ct, err := opts.LanguageOpts.FormatContent("PetWithPets.go", buf.Bytes()) require.NoError(t, err) res := string(ct) assertInCode(t, "type PetWithPets struct {", res) assertInCode(t, "UnmarshalPetSlice", res) }
explode_data.jsonl/2534
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 31115, 1712, 62, 2354, 2403, 2124, 3036, 23477, 94124, 3036, 62656, 14658, 1600, 16347, 82, 1155, 353, 8840, 836, 8, 341, 98100, 9550, 11, 1848, 1669, 20907, 36473, 17409, 45247, 46928, 4370, 5523, 347, 34675, 8235, 33936, 113...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBot_DeleteWebhook(t *testing.T) { type fields struct { apiClient apiClient } tests := []struct { name string fields fields wantResult bool wantErr bool }{ { name: "test1", fields: fields{ apiClient: &mockAPIClient{ method: "deleteWebhook", interfaceMethod: func() interface{} { return true }, bytesMethod: func() []byte { return []byte("true") }, }, }, wantResult: true, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b := &Bot{ apiClient: tt.fields.apiClient, } gotResult, err := b.DeleteWebhook(nil) if (err != nil) != tt.wantErr { t.Errorf("Bot.DeleteWebhook() error = %v, wantErr %v", err, tt.wantErr) return } if gotResult != tt.wantResult { t.Errorf("Bot.DeleteWebhook() = %v, want %v", gotResult, tt.wantResult) } }) } }
explode_data.jsonl/46104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 23502, 57418, 5981, 20873, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 54299, 2959, 6330, 2959, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 55276, 257, 5043, 198, 197, 50780, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBestChainStateSerialization(t *testing.T) { t.Parallel() workSum := new(big.Int) tests := []struct { name string state bestChainState serialized []byte }{ { name: "genesis", state: bestChainState{ hash: *newHashFromStr("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"), height: 0, totalTxns: 1, workSum: func() *big.Int { workSum.Add(workSum, CalcWork(486604799)) return new(big.Int).Set(workSum) }(), // 0x0100010001 }, serialized: hexToBytes("6fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000000000000100000000000000050000000100010001"), }, { name: "block 1", state: bestChainState{ hash: *newHashFromStr("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048"), height: 1, totalTxns: 2, workSum: func() *big.Int { workSum.Add(workSum, CalcWork(486604799)) return new(big.Int).Set(workSum) }(), // 0x0200020002 }, serialized: hexToBytes("4860eb18bf1b1620e37e9490fc8a427514416fd75159ab86688e9a8300000000010000000200000000000000050000000200020002"), }, } for i, test := range tests { // Ensure the state serializes to the expected value. gotBytes := serializeBestChainState(test.state) if !bytes.Equal(gotBytes, test.serialized) { t.Errorf("serializeBestChainState #%d (%s): mismatched "+ "bytes - got %x, want %x", i, test.name, gotBytes, test.serialized) continue } // Ensure the serialized bytes are decoded back to the expected state. state, err := deserializeBestChainState(test.serialized) if err != nil { t.Errorf("deserializeBestChainState #%d (%s) "+ "unexpected error: %v", i, test.name, err) continue } if !reflect.DeepEqual(state, test.state) { t.Errorf("deserializeBestChainState #%d (%s) "+ "mismatched state - got %v, want %v", i, test.name, state, test.state) continue } } }
explode_data.jsonl/59833
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 858 }
[ 2830, 3393, 14470, 18837, 1397, 35865, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 97038, 9190, 1669, 501, 75616, 7371, 340, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 24291, 414, 1850, 18837, 1397, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvokeNoPolicy(t *testing.T) { t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() testInvokeNoPolicy(t, l, v) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() testInvokeNoPolicy(t, l, v) }) }
explode_data.jsonl/47796
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 17604, 2753, 13825, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 13, 17, 63746, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 11, 348, 11, 21290, 1669, 6505, 60850, 1389, 3036, 14256, 2354, 53, 16, 17, 55315...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequirementsMergeFile(t *testing.T) { // setup the disk rootTmpDir, err := ioutil.TempDir("", "") require.NoError(t, err, "could not create temp dir") fileNames, err := ioutil.ReadDir("test_data") assert.NoError(t, err) for _, f := range fileNames { if f.IsDir() { name := f.Name() srcDir := filepath.Join("test_data", name) require.DirExists(t, srcDir) tmpDir := filepath.Join(rootTmpDir, name) err = files.CopyDirOverwrite(srcDir, tmpDir) require.NoError(t, err, "failed to copy %s to %s", srcDir, tmpDir) // now lets run the command _, o := merge.NewCmdRequirementsMerge() o.Dir = tmpDir o.File = filepath.Join(tmpDir, "changes.yml") t.Logf("merging requirements in dir %s\n", tmpDir) err = o.Run() require.NoError(t, err, "failed to run merge") expectedPath := filepath.Join(srcDir, "expected.yml") generatedFile := filepath.Join(tmpDir, jxcore.RequirementsConfigFileName) if generateTestOutput { data, err := ioutil.ReadFile(generatedFile) require.NoError(t, err, "failed to load %s", generatedFile) err = ioutil.WriteFile(expectedPath, data, 0666) require.NoError(t, err, "failed to save file %s", expectedPath) continue } testhelpers.AssertTextFilesEqual(t, expectedPath, generatedFile, "merged file for test "+name) } } }
explode_data.jsonl/22998
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 59202, 52096, 1703, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 279, 13364, 198, 33698, 35986, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 14676, 17957, 35699, 1155, 11, 1848, 11, 330, 28077, 537, 1855, 2730, 5419, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_getRepositoryURL(t *testing.T) { type remote struct { name string url string } var tests = []struct { name string expectError bool remotes []remote expect string }{ { name: "single origin", expectError: false, remotes: []remote{ {name: "origin", url: "https://github.com/okteto/go-getting-started"}, }, expect: "https://github.com/okteto/go-getting-started", }, { name: "single remote", expectError: false, remotes: []remote{ {name: "mine", url: "https://github.com/okteto/go-getting-started"}, }, expect: "https://github.com/okteto/go-getting-started", }, { name: "multiple remotes", expectError: false, remotes: []remote{ {name: "fork", url: "https://github.com/oktetotest/go-getting-started"}, {name: "origin", url: "https://github.com/cindy/go-getting-started"}, {name: "upstream", url: "https://github.com/okteto/go-getting-started"}, }, expect: "https://github.com/cindy/go-getting-started", }, { name: "no remotes", expectError: true, remotes: nil, expect: "", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { dir := t.TempDir() if _, err := model.GetRepositoryURL(dir); err == nil { t.Fatal("expected error when there's no github repo") } r, err := git.PlainInit(dir, true) if err != nil { t.Fatal(err) } for _, rm := range tt.remotes { if _, err := r.CreateRemote(&config.RemoteConfig{Name: rm.name, URLs: []string{rm.url}}); err != nil { t.Fatal(err) } } url, err := model.GetRepositoryURL(dir) if tt.expectError { if err == nil { t.Error("expected error when calling getRepositoryURL") } return } if err != nil { t.Fatal(err) } if url != tt.expect { t.Errorf("expected '%s', got '%s", tt.expect, url) } }) } }
explode_data.jsonl/28031
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 897 }
[ 2830, 3393, 3062, 4624, 3144, 1155, 353, 8840, 836, 8, 1476, 13158, 8699, 2036, 341, 197, 11609, 914, 198, 197, 19320, 220, 914, 198, 197, 532, 2405, 7032, 284, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 24952, 1454, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestAdd(t *testing.T) { for _, spec := range []struct { tokens [][]string want utilities.DoubleArray }{ { want: utilities.DoubleArray{ Encoding: make(map[string]int), }, }, { tokens: [][]string{{"foo"}}, want: utilities.DoubleArray{ Encoding: map[string]int{"foo": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}, // 0: ^ // 1: ^foo // 2: ^foo$ }, }, { tokens: [][]string{{"foo"}, {"bar"}}, want: utilities.DoubleArray{ Encoding: map[string]int{ "foo": 0, "bar": 1, }, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}, // 0: ^ // 1: ^foo // 2: ^bar // 3: ^foo$ // 4: ^bar$ }, }, { tokens: [][]string{{"foo", "bar"}, {"foo", "baz"}}, want: utilities.DoubleArray{ Encoding: map[string]int{ "foo": 0, "bar": 1, "baz": 2, }, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}, // 0: ^ // 1: ^foo // 2: ^foo.bar // 3: ^foo.baz // 4: ^foo.bar$ // 5: ^foo.baz$ }, }, { tokens: [][]string{{"foo", "bar"}, {"foo", "baz"}, {"qux"}}, want: utilities.DoubleArray{ Encoding: map[string]int{ "foo": 0, "bar": 1, "baz": 2, "qux": 3, }, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 1, 3, 4, 5}, // 0: ^ // 1: ^foo // 2: ^foo.bar // 3: ^foo.baz // 4: ^qux // 5: ^foo.bar$ // 6: ^foo.baz$ // 7: ^qux$ }, }, { tokens: [][]string{ {"foo", "bar"}, {"foo", "baz", "bar"}, {"qux", "foo"}, }, want: utilities.DoubleArray{ Encoding: map[string]int{ "foo": 0, "bar": 1, "baz": 2, "qux": 3, }, Base: []int{1, 1, 1, 5, 8, 0, 3, 0, 5, 0}, Check: []int{0, 1, 2, 2, 1, 3, 4, 7, 5, 9}, // 0: ^ // 1: ^foo // 2: ^foo.bar // 3: ^foo.baz // 4: ^qux // 5: ^foo.bar$ // 6: ^foo.baz.bar // 7: ^foo.baz.bar$ // 8: ^qux.foo // 9: ^qux.foo$ }, }, } { da := utilities.NewDoubleArray(spec.tokens) if got, want := da.Encoding, spec.want.Encoding; !reflect.DeepEqual(got, want) { t.Errorf("da.Encoding = %v; want %v; tokens = %#v", got, want, spec.tokens) } if got, want := da.Base, spec.want.Base; !compareArray(got, want) { t.Errorf("da.Base = %v; want %v; tokens = %#v", got, want, spec.tokens) } if got, want := da.Check, spec.want.Check; !compareArray(got, want) { t.Errorf("da.Check = %v; want %v; tokens = %#v", got, want, spec.tokens) } } }
explode_data.jsonl/43871
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1448 }
[ 2830, 3393, 2212, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1398, 1669, 2088, 3056, 1235, 341, 197, 3244, 9713, 52931, 917, 198, 197, 50780, 256, 29419, 36113, 1857, 198, 197, 59403, 197, 197, 515, 298, 50780, 25, 29419, 36113, 1857, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCheckForClientWithLogger(t *testing.T) { defer func() { client = nil logger = nil }() logger = logging.NewBasicLogger() Convey("Given a host", t, func() { host := "testhost" Convey("When checkForClient is called with no client", func() { err := checkForClient(host) Convey("Then an error should be returned", func() { So(err, ShouldNotBeNil) }) }) }) }
explode_data.jsonl/9670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 3973, 2461, 2959, 2354, 7395, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 25291, 284, 2092, 198, 197, 17060, 284, 2092, 198, 197, 69826, 17060, 284, 8392, 7121, 15944, 7395, 741, 93070, 5617, 445, 22043, 264, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidRequestForProduct(t *testing.T) { // We setup a fake http server that mocks a registration server. ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { file, err := os.Open("testdata/products-sle12.json") if err != nil { fmt.Fprintln(w, "FAIL!") return } io.Copy(w, file) file.Close() })) defer ts.Close() var cr Credentials var ip InstalledProduct data := SUSEConnectData{SccURL: ts.URL, Insecure: true} products, err := RequestProducts(data, cr, ip) if err != nil { t.Fatal("It should've run just fine...") } if len(products) != 1 { t.Fatalf("Unexpected number of products found. Got %d, expected %d", len(products), 1) } productHelper(t, products[0]) }
explode_data.jsonl/43079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 4088, 1900, 2461, 4816, 1155, 353, 8840, 836, 8, 341, 197, 322, 1205, 6505, 264, 12418, 1758, 3538, 429, 68909, 264, 12227, 3538, 624, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHasEdge(t *testing.T) { t.Parallel() var colOne = Col{Name: "Id", Type: "int", Key: "primary_key", MaxLen: sql.NullInt64{Int64: 0, Valid: false}} var colTwo = Col{Name: "Name", Type: "string", Key: "", MaxLen: sql.NullInt64{Int64: 45, Valid: true}} assert.Equal(t, "", colOne.GetMeta(), "should be equal") assert.Equal(t, "`gorm:size:45`", colTwo.GetMeta(), "should be equal") }
explode_data.jsonl/71802
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 10281, 11656, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2405, 1375, 3966, 284, 4254, 63121, 25, 330, 764, 497, 3990, 25, 330, 396, 497, 5309, 25, 330, 6545, 3097, 497, 7487, 11271, 25, 5704, 23979, 1072, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWeekdayAfterToday(t *testing.T) { now := time.Now() // Create new scheduler to have clean test env s := NewScheduler() // Schedule job at next week day var weekJob *Job switch now.Weekday() { case time.Monday: weekJob = s.Every(1).Tuesday() case time.Tuesday: weekJob = s.Every(1).Wednesday() case time.Wednesday: weekJob = s.Every(1).Thursday() case time.Thursday: weekJob = s.Every(1).Friday() case time.Friday: weekJob = s.Every(1).Saturday() case time.Saturday: weekJob = s.Every(1).Sunday() case time.Sunday: weekJob = s.Every(1).Monday() } // First run weekJob.scheduleNextRun() exp := time.Date(now.Year(), now.Month(), now.Day()+1, 0, 0, 0, 0, loc) assert.Equal(t, exp, weekJob.nextRun) // Simulate job run 7 days before weekJob.lastRun = weekJob.nextRun.AddDate(0, 0, -7) // Next run weekJob.scheduleNextRun() exp = time.Date(now.Year(), now.Month(), now.Day()+1, 0, 0, 0, 0, loc) assert.Equal(t, exp, weekJob.nextRun) }
explode_data.jsonl/63504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 384 }
[ 2830, 3393, 17053, 1292, 6025, 15364, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 882, 13244, 2822, 197, 322, 4230, 501, 28809, 311, 614, 4240, 1273, 6105, 198, 1903, 1669, 1532, 38878, 2822, 197, 322, 23512, 2618, 518, 1790, 2003, 1899,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_compute(t *testing.T) { value := compute(1, 2, add) fmt.Printf("add:%v\n", value) value = compute(8, 9, mul) fmt.Printf("mul:%v\n", value) }
explode_data.jsonl/50589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 57028, 1155, 353, 8840, 836, 8, 341, 16309, 1669, 12564, 7, 16, 11, 220, 17, 11, 912, 340, 11009, 19367, 445, 718, 7533, 85, 1699, 497, 897, 340, 16309, 284, 12564, 7, 23, 11, 220, 24, 11, 15602, 340, 11009, 19367, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDingConfig(t *testing.T) { var ( err error config string ) c.Convey("Define configuration", t, func() { config = configTpl c.Convey("Parse configuration", func() { err = beemod.Register(DefaultBuild).SetCfg([]byte(config), "toml").Run() c.So(err, c.ShouldBeNil) }) }) }
explode_data.jsonl/29793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 35, 287, 2648, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 9859, 262, 1465, 198, 197, 25873, 914, 198, 197, 340, 1444, 4801, 5617, 445, 35338, 6546, 497, 259, 11, 2915, 368, 341, 197, 25873, 284, 2193, 87137, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestState_SubscribeRuntimeVersion(t *testing.T) { if testing.Short() { t.Skip("skipping end-to-end test in short mode.") } api, err := gsrpc.NewSubstrateAPI(config.Default().RPCURL) assert.NoError(t, err) sub, err := api.RPC.State.SubscribeRuntimeVersion() assert.NoError(t, err) defer sub.Unsubscribe() timeout := time.After(10 * time.Second) for { select { case rv := <-sub.Chan(): fmt.Printf("%#v\n", rv) return case <-timeout: assert.FailNow(t, "timeout reached without getting a notification from subscription") return } } }
explode_data.jsonl/52304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 1397, 36359, 6273, 15123, 5637, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 835, 4686, 13068, 1273, 304, 2805, 3856, 13053, 197, 630, 54299, 11, 1848, 1669, 342, 15094, 3992, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5