text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestInvalidPubKey(t *testing.T) { _, err := New(&GinJWTMiddleware{ Realm: "zone", SigningAlgorithm: "RS256", PrivKeyFile: "testdata/jwtRS256.key", PubKeyFile: "testdata/invalidpubkey.key", }) assert.Error(t, err) assert.Equal(t, ErrInvalidPubKey, err) }
explode_data.jsonl/64431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 7928, 29162, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 1532, 2099, 38, 258, 55172, 24684, 515, 197, 197, 64290, 25, 310, 330, 8684, 756, 197, 197, 93358, 27847, 25, 330, 11451, 17, 20, 21, 756, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSSGenVoteBits(t *testing.T) { var ssgen = dcrutil.NewTx(ssgenMsgTx) ssgen.SetTree(wire.TxTreeStake) ssgen.SetIndex(0) correctvbs := uint16(0x8c94) votebits := stake.SSGenVoteBits(ssgen.MsgTx()) if correctvbs != votebits { t.Errorf("Error thrown on TestGetSSGenVoteBits: Looking for "+ "vbs % x, got vbs % x", correctvbs, votebits) } }
explode_data.jsonl/70514
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 1949, 1220, 9967, 41412, 19920, 1155, 353, 8840, 836, 8, 341, 2405, 274, 1991, 268, 284, 294, 5082, 1314, 7121, 31584, 30678, 4370, 6611, 31584, 340, 34472, 4370, 4202, 6533, 3622, 554, 81362, 6533, 623, 726, 340, 34472, 437...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEncodeConfigMap(t *testing.T) { cases := []struct { desc string cmYaml string expect string err string }{ // empty map {"empty data", ` apiVersion: v1 kind: ConfigMap`, `{"data":"","kind":"ConfigMap","name":""}`, ""}, // one key {"one key", ` apiVersion: v1 kind: ConfigMap data: one: ""`, `{"data":{"one":""},"kind":"ConfigMap","name":""}`, ""}, // three keys (tests sorting order) {"three keys", ` apiVersion: v1 kind: ConfigMap data: two: 2 one: "" three: 3`, `{"data":{"one":"","three":3,"two":2},"kind":"ConfigMap","name":""}`, ""}, // empty binary map {"empty data", ` apiVersion: v1 kind: ConfigMap`, `{"data":"","kind":"ConfigMap","name":""}`, ""}, // one key with binary data {"one key", ` apiVersion: v1 kind: ConfigMap binaryData: one: ""`, `{"binaryData":{"one":""},"data":"","kind":"ConfigMap","name":""}`, ""}, // three keys with binary data (tests sorting order) {"three keys", ` apiVersion: v1 kind: ConfigMap binaryData: two: 2 one: "" three: 3`, `{"binaryData":{"one":"","three":3,"two":2},"data":"","kind":"ConfigMap","name":""}`, ""}, // two keys, one string and one binary values {"two keys with one each", ` apiVersion: v1 kind: ConfigMap data: one: "" binaryData: two: ""`, `{"binaryData":{"two":""},"data":{"one":""},"kind":"ConfigMap","name":""}`, ""}, } for _, c := range cases { node, err := yaml.Parse(c.cmYaml) if err != nil { t.Fatal(err) } s, err := encodeConfigMap(node) if SkipRest(t, c.desc, err, c.err) { continue } if s != c.expect { t.Errorf("case %q, expect %q but got %q from encode %#v", c.desc, c.expect, s, c.cmYaml) } } }
explode_data.jsonl/32279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 639 }
[ 2830, 3393, 32535, 2648, 2227, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 41653, 256, 914, 198, 197, 98316, 56, 9467, 914, 198, 197, 24952, 914, 198, 197, 9859, 262, 914, 198, 197, 59403, 197, 197, 322, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDelims(t *testing.T) { const hello = "Hello, world" var value = struct{ Str string }{hello} for i := 0; i < len(delimPairs); i += 2 { text := ".Str" left := delimPairs[i+0] trueLeft := left right := delimPairs[i+1] trueRight := right if left == "" { // default case trueLeft = "{{" } if right == "" { // default case trueRight = "}}" } text = trueLeft + text + trueRight // Now add a comment text += trueLeft + "/*comment*/" + trueRight // Now add an action containing a string. text += trueLeft + `"` + trueLeft + `"` + trueRight // At this point text looks like `{{.Str}}{{/*comment*/}}{{"{{"}}`. tmpl, err := New("delims").Delims(left, right).Parse(text) if err != nil { t.Fatalf("delim %q text %q parse err %s", left, text, err) } var b = new(bytes.Buffer) err = tmpl.Execute(b, value) if err != nil { t.Fatalf("delim %q exec err %s", left, err) } if b.String() != hello+trueLeft { t.Errorf("expected %q got %q", hello+trueLeft, b.String()) } } }
explode_data.jsonl/41517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 16532, 5742, 1155, 353, 8840, 836, 8, 341, 4777, 23811, 284, 330, 9707, 11, 1879, 698, 2405, 897, 284, 2036, 90, 4509, 914, 335, 90, 14990, 532, 2023, 600, 1669, 220, 15, 26, 600, 366, 2422, 1500, 39795, 54228, 1215, 600...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestPop(t *testing.T) { var e1, e2 float32 e := createRandomObject(e1) if v, ok := e.(float32); ok { e1 = v } e = createRandomObject(e2) if v, ok := e.(float32); ok { e2 = v } s := New() popped := s.Pop() if popped != nonExistent { t.Errorf("default non existent sentinel not returned, instead got %v", popped) } s.Add(e1) s.Add(e2) s.Pop() if len(s.m) != 1 { t.Errorf("expected 1 entries, got %d", len(s.m)) } }
explode_data.jsonl/35005
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 11598, 1155, 353, 8840, 836, 8, 341, 2405, 384, 16, 11, 384, 17, 2224, 18, 17, 198, 7727, 1669, 1855, 13999, 1190, 2026, 16, 340, 743, 348, 11, 5394, 1669, 384, 12832, 3649, 18, 17, 1215, 5394, 341, 197, 7727, 16, 284,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStructLevelValidationsPointerPassing(t *testing.T) { v1 := New() v1.RegisterStructValidation(StructValidationTestStruct, &TestStruct{}) tst := &TestStruct{ String: "good value", } errs := v1.Struct(tst) NotEqual(t, errs, nil) AssertError(t, errs, "TestStruct.StringVal", "TestStruct.String", "StringVal", "String", "badvalueteststruct") }
explode_data.jsonl/77363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 9422, 4449, 4088, 804, 9084, 12187, 287, 1155, 353, 8840, 836, 8, 341, 5195, 16, 1669, 1532, 741, 5195, 16, 19983, 9422, 13799, 7, 9422, 13799, 2271, 9422, 11, 609, 2271, 9422, 6257, 692, 3244, 267, 1669, 609, 2271, 9422, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnprotectSheet(t *testing.T) { f, err := OpenFile(filepath.Join("test", "Book1.xlsx")) if !assert.NoError(t, err) { t.FailNow() } // Test unprotect not exists worksheet. assert.EqualError(t, f.UnprotectSheet("SheetN"), "sheet SheetN is not exist") f.UnprotectSheet("Sheet1") assert.NoError(t, f.SaveAs(filepath.Join("test", "TestUnprotectSheet.xlsx"))) }
explode_data.jsonl/36995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 1806, 45979, 10541, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 5264, 1703, 34793, 22363, 445, 1944, 497, 330, 7134, 16, 46838, 5455, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 3244, 57243, 7039, 741, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStrikeThrough(t *testing.T) { var tests = []string{ "nothing inline\n", "<p>nothing inline</p>\n", "simple ~~inline~~ test\n", "<p>simple <del>inline</del> test</p>\n", "~~at the~~ beginning\n", "<p><del>at the</del> beginning</p>\n", "at the ~~end~~\n", "<p>at the <del>end</del></p>\n", "~~try two~~ in ~~one line~~\n", "<p><del>try two</del> in <del>one line</del></p>\n", "over ~~two\nlines~~ test\n", "<p>over <del>two\nlines</del> test</p>\n", "odd ~~number of~~ markers~~ here\n", "<p>odd <del>number of</del> markers~~ here</p>\n", "odd ~~number\nof~~ markers~~ here\n", "<p>odd <del>number\nof</del> markers~~ here</p>\n", } doTestsInline(t, tests) }
explode_data.jsonl/57350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 335 }
[ 2830, 3393, 72319, 23857, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 917, 515, 197, 197, 1, 41212, 7381, 1699, 756, 197, 197, 22476, 79, 29, 41212, 7381, 522, 79, 8449, 77, 25897, 197, 197, 1, 22944, 77777, 5057, 5817, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkspace_ReadAddonsDir(t *testing.T) { testCases := map[string]struct { svcName string copilotDirPath string fs func() afero.Fs wantedFileNames []string wantedErr error }{ "dir not exist": { svcName: "webhook", copilotDirPath: "/copilot", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("/copilot/webhook", 0755) return fs }, wantedErr: &os.PathError{ Op: "open", Path: "/copilot/webhook/addons", Err: os.ErrNotExist, }, }, "retrieves file names": { svcName: "webhook", copilotDirPath: "/copilot", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("/copilot/webhook/addons", 0755) params, _ := fs.Create("/copilot/webhook/addons/params.yml") outputs, _ := fs.Create("/copilot/webhook/addons/outputs.yml") defer params.Close() defer outputs.Close() return fs }, wantedFileNames: []string{"outputs.yml", "params.yml"}, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN ws := &Workspace{ copilotDir: tc.copilotDirPath, fsUtils: &afero.Afero{ Fs: tc.fs(), }, } // WHEN actualFileNames, actualErr := ws.ReadAddonsDir(tc.svcName) // THEN require.Equal(t, tc.wantedErr, actualErr) require.Equal(t, tc.wantedFileNames, actualFileNames) }) } }
explode_data.jsonl/30119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 687 }
[ 2830, 3393, 45981, 38381, 2212, 2382, 6184, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 1903, 7362, 675, 286, 914, 198, 197, 1444, 453, 23958, 6184, 1820, 914, 198, 197, 53584, 1797, 2915, 368, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestForBool(t *testing.T) { var out bytes.Buffer var in bytes.Buffer cases := []struct { in string out bool }{ {"true", true}, {"1", true}, {"y", true}, {"Y", true}, {"yes", true}, {"Yes", true}, {"YES", true}, {"false", false}, {"0", false}, {"n", false}, {"N", false}, {"no", false}, {"No", false}, {"NO", false}, } for _, c := range cases { in.Reset() out.Reset() fmt.Fprintf(&in, "%v\n", c.in) // Note the default value is always the oposite of the input // to ensure it is flipped. b := prompt.ForBool("", !c.out, prompt.WithInput(&in), prompt.WithOutput(&out)) if b != c.out { t.Fatalf("expected '%v' to be an acceptable %v.", c.in, c.out) } } }
explode_data.jsonl/6618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 319 }
[ 2830, 3393, 2461, 11233, 1155, 353, 8840, 836, 8, 341, 2405, 700, 5820, 22622, 198, 2405, 304, 5820, 22622, 271, 1444, 2264, 1669, 3056, 1235, 341, 197, 17430, 220, 914, 198, 197, 13967, 1807, 198, 197, 59403, 197, 197, 4913, 1866, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_PanicRecover(t *testing.T) { type testCase struct { title string nextHandler http.Handler status int output string } cases := []testCase{ { title: "ignore \"nil\" panic and send a success code", nextHandler: http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { panic(nil) }, ), status: http.StatusOK, }, { title: "catch \"unknown\" panic and report to the client with code 500", nextHandler: http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { panic("unexpected panic") }, ), status: http.StatusInternalServerError, output: "unexpected panic\n", }, { title: "catch a \"standard error\", report to the client with code 500 and hide error message", nextHandler: http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { panic(fmt.Errorf("standard error")) }, ), status: http.StatusInternalServerError, output: fmt.Sprintf("%s\n", http.StatusText(http.StatusInternalServerError)), }, { title: "catch an \"error with status code\" and report to the client", nextHandler: http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { panic(errors.NewBadRequest(fmt.Errorf("bad request"))) }, ), status: http.StatusBadRequest, output: "bad request\n", }, } for _, tc := range cases { t.Run(tc.title, func(t *testing.T) { defer func() { if r := recover(); r != nil { t.Error("the code should never panic because it is wrapped with PanicRecover middleware") } }() w := httptest.NewRecorder() PanicRecover(errors.Send)(tc.nextHandler).ServeHTTP(w, nil) if w.Code != tc.status { t.Errorf("status code %d was expected to be %d", w.Code, tc.status) } if w.Body.String() != tc.output { t.Errorf("net output %q was expected to be %q", w.Body.String(), tc.output) } }) } }
explode_data.jsonl/33085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 772 }
[ 2830, 3393, 1088, 31270, 693, 3688, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 24751, 981, 914, 198, 197, 28144, 3050, 1758, 31010, 198, 197, 23847, 414, 526, 198, 197, 21170, 414, 914, 198, 197, 630, 1444, 2264, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLiveUpdateMultipleContainersUpdatesAllForUserRunFailuresAndDoesntFallBack(t *testing.T) { f := newBDFixture(t, k8s.EnvDockerDesktop, container.RuntimeDocker) defer f.TearDown() // Same UserRunFailure on all three exec calls f.docker.ExecErrorsToThrow = []error{userFailureErrDocker, userFailureErrDocker, userFailureErrDocker} m := NewSanchoLiveUpdateManifest(f) cIDs := []container.ID{"c1", "c2", "c3"} tCase := testCase{ manifest: m, runningContainersByTarget: map[model.TargetID][]container.ID{m.ImageTargetAt(0).ID(): cIDs}, changedFiles: []string{"a.txt"}, // BuildAndDeploy call will ultimately fail with this error, // b/c we DON'T fall back to an image build expectErrorContains: "failed with exit code: 123", // attempted update for each container; // for each, called copy and exec before hitting error // (so did not call restart) expectDockerCopyCount: 3, expectDockerExecCount: 3, expectDockerRestartCount: 0, // DO NOT fall back to image build expectDockerBuildCount: 0, expectK8sDeploy: false, } runTestCase(t, f, tCase) }
explode_data.jsonl/35164
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 429 }
[ 2830, 3393, 20324, 4289, 32089, 74632, 37091, 2403, 2461, 1474, 6727, 19524, 1413, 3036, 21468, 406, 49772, 3707, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 33, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 35, 13659, 23597, 11, 5476, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTriggerIsReady(t *testing.T) { tests := []struct { name string brokerStatus *BrokerStatus markKubernetesServiceExists bool markVirtualServiceExists bool subscriptionOwned bool subscriptionStatus *messagingv1alpha1.SubscriptionStatus wantReady bool }{{ name: "all happy", brokerStatus: TestHelper.ReadyBrokerStatus(), markKubernetesServiceExists: true, markVirtualServiceExists: true, subscriptionOwned: true, subscriptionStatus: TestHelper.ReadySubscriptionStatus(), wantReady: true, }, { name: "broker sad", brokerStatus: TestHelper.NotReadyBrokerStatus(), markKubernetesServiceExists: true, markVirtualServiceExists: true, subscriptionOwned: true, subscriptionStatus: TestHelper.ReadySubscriptionStatus(), wantReady: false, }, { name: "subscribed sad", brokerStatus: TestHelper.ReadyBrokerStatus(), markKubernetesServiceExists: true, markVirtualServiceExists: true, subscriptionOwned: true, subscriptionStatus: TestHelper.NotReadySubscriptionStatus(), wantReady: false, }, { name: "subscription not owned", brokerStatus: TestHelper.ReadyBrokerStatus(), markKubernetesServiceExists: true, markVirtualServiceExists: true, subscriptionOwned: false, subscriptionStatus: TestHelper.ReadySubscriptionStatus(), wantReady: false, }, { name: "all sad", brokerStatus: TestHelper.NotReadyBrokerStatus(), markKubernetesServiceExists: false, markVirtualServiceExists: false, subscriptionOwned: false, subscriptionStatus: TestHelper.NotReadySubscriptionStatus(), wantReady: false, }} for _, test := range tests { t.Run(test.name, func(t *testing.T) { ts := &TriggerStatus{} if test.brokerStatus != nil { ts.PropagateBrokerStatus(test.brokerStatus) } if !test.subscriptionOwned { ts.MarkSubscriptionNotOwned(&messagingv1alpha1.Subscription{}) } else if test.subscriptionStatus != nil { ts.PropagateSubscriptionStatus(test.subscriptionStatus) } got := ts.IsReady() if test.wantReady != got { t.Errorf("unexpected readiness: want %v, got %v", test.wantReady, got) } }) } }
explode_data.jsonl/54995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1217 }
[ 2830, 3393, 17939, 3872, 19202, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 664, 914, 198, 197, 2233, 45985, 2522, 394, 353, 65545, 2522, 198, 197, 2109, 838, 42, 29827, 1860, 15575, 1807, 198, 197, 2109, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFindServiceCredentials(t *testing.T) { spec.Run(t, "FindServiceCredentials", func(t *testing.T, _ spec.G, it spec.S) { g := NewGomegaWithT(t) it("matches single service by BindingName", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": "test-service", "credentials": { "test-key": "test-value" }, "instance_name": null, "label": null, "tags": [ ] } ] }`)() c, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) g.Expect(c).To(Equal(helper.Credentials{"test-key": "test-value"})) }) it("matches single service by InstanceName", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": null, "credentials": { "test-key": "test-value" }, "instance_name": "test-service", "label": null, "tags": [ ] } ] }`)() c, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) g.Expect(c).To(Equal(helper.Credentials{"test-key": "test-value"})) }) it("matches single service by Label", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": null, "credentials": { "test-key": "test-value" }, "instance_name": null, "label": "test-service", "tags": [ ] } ] }`)() c, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) g.Expect(c).To(Equal(helper.Credentials{"test-key": "test-value"})) }) it("matches single service by Tags", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": null, "credentials": { "test-key": "test-value" }, "instance_name": null, "label": null, "tags": [ "test-service" ] } ] }`)() c, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) g.Expect(c).To(Equal(helper.Credentials{"test-key": "test-value"})) }) it("matches single service with Credentials", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": null, "credentials": { "test-key": "test-value" }, "instance_name": null, "label": null, "tags": [ "test-service" ] } ] }`)() c, ok, err := helper.FindServiceCredentials("test-service", "test-key") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) g.Expect(c).To(Equal(helper.Credentials{"test-key": "test-value"})) }) it("does not match no service", func() { _, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeFalse()) }) it("does not match multiple services", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": "test-service-1", "credentials": { "test-key": "test-value" }, "instance_name": null, "label": null, "tags": [ ] }, { "binding_name": "test-service-2", "credentials": { "test-key": "test-value" }, "instance_name": null, "label": null, "tags": [ ] } ] }`)() _, ok, err := helper.FindServiceCredentials("test-service") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeFalse()) }) it("does not match without Credentials", func() { defer test.ReplaceEnv(t, "CNB_SERVICES", `{ "": [ { "binding_name": null, "credentials": { }, "instance_name": null, "label": null, "tags": [ "test-service" ] } ] }`)() _, ok, err := helper.FindServiceCredentials("test-service", "test-key") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeFalse()) }) }, spec.Report(report.Terminal{})) }
explode_data.jsonl/31425
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1972 }
[ 2830, 3393, 9885, 1860, 27025, 1155, 353, 8840, 836, 8, 341, 98100, 16708, 1155, 11, 330, 9885, 1860, 27025, 497, 2915, 1155, 353, 8840, 836, 11, 716, 1398, 1224, 11, 432, 1398, 808, 8, 1476, 197, 3174, 1669, 1532, 38, 32696, 2354, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollection_UpdateAll(t *testing.T) { ast := require.New(t) cli := initClient("test") defer cli.Close(context.Background()) defer cli.DropCollection(context.Background()) cli.EnsureIndexes(context.Background(), nil, []string{"name"}) id1 := primitive.NewObjectID() id2 := primitive.NewObjectID() id3 := primitive.NewObjectID() docs := []interface{}{ bson.D{{Key: "_id", Value: id1}, {Key: "name", Value: "Alice"}, {Key: "age", Value: 18}}, bson.D{{Key: "_id", Value: id2}, {Key: "name", Value: "Alice"}, {Key: "age", Value: 19}}, bson.D{{Key: "_id", Value: id3}, {Key: "name", Value: "Lucas"}, {Key: "age", Value: 20}}, } _, _ = cli.InsertMany(context.Background(), docs) var err error // update already exist record filter1 := bson.M{ "name": "Alice", } update1 := bson.M{ operator.Set: bson.M{ "age": 33, }, } opts := options.UpdateOptions{} opts.UpdateOptions = officialOpts.Update().SetBypassDocumentValidation(false) res, err := cli.UpdateAll(context.Background(), filter1, update1, opts) ast.NoError(err) ast.NotEmpty(res) ast.Equal(int64(2), res.MatchedCount) ast.Equal(int64(2), res.ModifiedCount) ast.Equal(int64(0), res.UpsertedCount) ast.Equal(nil, res.UpsertedID) // if record is not exist,err is nil, MatchedCount in res is 0 filter2 := bson.M{ "name": "Lily", } update2 := bson.M{ operator.Set: bson.M{ "age": 22, }, } res, err = cli.UpdateAll(context.Background(), filter2, update2) ast.Nil(err) ast.NotNil(res) ast.Equal(int64(0), res.MatchedCount) // filter is nil or wrong BSON Document format update3 := bson.M{ "name": "Geek", "age": 21, } res, err = cli.UpdateAll(context.Background(), nil, update3) ast.Error(err) ast.Nil(res) res, err = cli.UpdateAll(context.Background(), 1, update3) ast.Error(err) ast.Nil(res) // update is nil or wrong BSON Document format filter4 := bson.M{ "name": "Geek", } res, err = cli.UpdateAll(context.Background(), filter4, nil) ast.Error(err) ast.Nil(res) res, err = cli.UpdateAll(context.Background(), filter4, 1) ast.Error(err) ast.Nil(res) }
explode_data.jsonl/18378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 863 }
[ 2830, 3393, 6482, 47393, 2403, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 1373, 7121, 1155, 340, 86448, 1669, 2930, 2959, 445, 1944, 1138, 16867, 21348, 10421, 5378, 19047, 2398, 16867, 21348, 21688, 6482, 5378, 19047, 2398, 86448, 22834, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStorageMachineSet(t *testing.T) { key := types.NamespacedName{Name: "foo", Namespace: "default"} created := &MachineSet{ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "default"}} // Test Create fetched := &MachineSet{} if err := c.Create(context.TODO(), created); err != nil { t.Errorf("error creating machineset: %v", err) } if err := c.Get(context.TODO(), key, fetched); err != nil { t.Errorf("error getting machineset: %v", err) } if !reflect.DeepEqual(*fetched, *created) { t.Error("fetched value not what was created") } // Test Updating the Labels updated := fetched.DeepCopy() updated.Labels = map[string]string{"hello": "world"} if err := c.Update(context.TODO(), updated); err != nil { t.Errorf("error updating machineset: %v", err) } if err := c.Get(context.TODO(), key, fetched); err != nil { t.Errorf("error getting machineset: %v", err) } if !reflect.DeepEqual(*fetched, *updated) { t.Error("fetched value not what was updated") } // Test Delete if err := c.Delete(context.TODO(), fetched); err != nil { t.Errorf("error deleting machineset: %v", err) } if err := c.Get(context.TODO(), key, fetched); err == nil { t.Error("expected error getting machineset") } }
explode_data.jsonl/69316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 445 }
[ 2830, 3393, 5793, 21605, 1649, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 4494, 98932, 68552, 675, 63121, 25, 330, 7975, 497, 41962, 25, 330, 2258, 16707, 197, 7120, 1669, 609, 21605, 1649, 90, 1190, 12175, 25, 77520, 16, 80222, 63121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestScheduler_Weekdays(t *testing.T) { scheduler := NewScheduler() job1 := scheduler.Every(1).Monday().At("23:59") job2 := scheduler.Every(1).Wednesday().At("23:59") job1.Do(task) job2.Do(task) t.Logf("job1 scheduled for %s", job1.NextScheduledTime()) t.Logf("job2 scheduled for %s", job2.NextScheduledTime()) assert.NotEqual(t, job1.NextScheduledTime(), job2.NextScheduledTime(), "Two jobs scheduled at the same time on two different weekdays should never run at the same time") }
explode_data.jsonl/63492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 38878, 62, 17053, 13778, 1155, 353, 8840, 836, 8, 341, 1903, 15222, 1669, 1532, 38878, 2822, 68577, 16, 1669, 28809, 5142, 1204, 7, 16, 568, 35239, 1005, 1655, 445, 17, 18, 25, 20, 24, 1138, 68577, 17, 1669, 28809, 5142, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTarjanSCC(t *testing.T) { for i, test := range tarjanTests { g := simple.NewDirectedGraph() for u, e := range test.g { // Add nodes that are not defined by an edge. if g.Node(int64(u)) == nil { g.AddNode(simple.Node(u)) } for v := range e { g.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) } } gotSCCs := TarjanSCC(g) // tarjan.strongconnect does range iteration over maps, // so sort SCC members to ensure consistent ordering. gotIDs := make([][]int64, len(gotSCCs)) for i, scc := range gotSCCs { gotIDs[i] = make([]int64, len(scc)) for j, id := range scc { gotIDs[i][j] = id.ID() } sort.Sort(ordered.Int64s(gotIDs[i])) } for _, iv := range test.ambiguousOrder { sort.Sort(ordered.BySliceValues(test.want[iv.start:iv.end])) sort.Sort(ordered.BySliceValues(gotIDs[iv.start:iv.end])) } if !reflect.DeepEqual(gotIDs, test.want) { t.Errorf("unexpected Tarjan scc result for %d:\n\tgot:%v\n\twant:%v", i, gotIDs, test.want) } } }
explode_data.jsonl/22122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 62733, 22838, 3540, 34, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 1273, 1669, 2088, 12183, 22838, 18200, 341, 197, 3174, 1669, 4285, 7121, 92669, 11212, 741, 197, 2023, 575, 11, 384, 1669, 2088, 1273, 1302, 341, 298, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestRollbackTransitionOnEnterCallbackError(t *testing.T) { OrderStateMachine.State(OrderStatePaying).Enter(func(order interface{}, tx *gorm.DB) (err error) { order.(*Order).Address = "an address" return errors.New("intentional error") }) order := &Order{} order.State = OrderStateDraft CreateOrderAndExecuteTransition(order, OrderEventCheckout, t, false) testdb.First(&order, order.Id) if order.State != OrderStateDraft { t.Errorf("state transitioned on Enter callback error") } if order.Address != "" { t.Errorf("attribute changed on Enter callback error") } }
explode_data.jsonl/44777
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 32355, 1419, 21768, 1925, 6269, 7494, 1454, 1155, 353, 8840, 836, 8, 341, 197, 4431, 94666, 18942, 39692, 1397, 47, 17270, 568, 6269, 18552, 19385, 3749, 22655, 9854, 353, 73281, 22537, 8, 320, 615, 1465, 8, 341, 197, 42245,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScalarBls12377G1New(t *testing.T) { bls12377G1 := BLS12377G1() three := bls12377G1.Scalar.New(3) require.True(t, three.IsOdd()) four := bls12377G1.Scalar.New(4) require.True(t, four.IsEven()) neg1 := bls12377G1.Scalar.New(-1) require.True(t, neg1.IsEven()) neg2 := bls12377G1.Scalar.New(-2) require.True(t, neg2.IsOdd()) }
explode_data.jsonl/15748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 20639, 33, 4730, 16, 17, 18, 22, 22, 38, 16, 3564, 1155, 353, 8840, 836, 8, 341, 96421, 82, 16, 17, 18, 22, 22, 38, 16, 1669, 425, 7268, 16, 17, 18, 22, 22, 38, 16, 741, 197, 27856, 1669, 1501, 82, 16, 17, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainerAndSandboxPrivileged(t *testing.T) { testID := "test-id" testSandboxID := "sandbox-id" testPid := uint32(1234) containerConfig, sandboxConfig, imageConfig, _ := getCreateContainerTestData() ociRuntime := config.Runtime{} c := newTestCRIService() for desc, test := range map[string]struct { containerPrivileged bool sandboxPrivileged bool expectError bool }{ "privileged container in non-privileged sandbox should fail": { containerPrivileged: true, sandboxPrivileged: false, expectError: true, }, "privileged container in privileged sandbox should be fine": { containerPrivileged: true, sandboxPrivileged: true, expectError: false, }, "non-privileged container in privileged sandbox should be fine": { containerPrivileged: false, sandboxPrivileged: true, expectError: false, }, "non-privileged container in non-privileged sandbox should be fine": { containerPrivileged: false, sandboxPrivileged: false, expectError: false, }, } { t.Logf("TestCase %q", desc) containerConfig.Linux.SecurityContext.Privileged = test.containerPrivileged sandboxConfig.Linux.SecurityContext = &runtime.LinuxSandboxSecurityContext{ Privileged: test.sandboxPrivileged, } _, err := c.containerSpec(testID, testSandboxID, testPid, "", containerConfig, sandboxConfig, imageConfig, nil, ociRuntime) if test.expectError { assert.Error(t, err) } else { assert.NoError(t, err) } } }
explode_data.jsonl/6409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 4502, 3036, 50, 31536, 32124, 68431, 1155, 353, 8840, 836, 8, 341, 18185, 915, 1669, 330, 1944, 12897, 698, 18185, 50, 31536, 915, 1669, 330, 76756, 12897, 698, 18185, 32339, 1669, 2622, 18, 17, 7, 16, 17, 18, 19, 340, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_EPD_001(t *testing.T) { tool.Test(t, nil, new(App), func(app *App) { if app.EPD == nil { t.Error("nil EPD unit") } else { t.Log(app.EPD) } }) }
explode_data.jsonl/65639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 2089, 23025, 62, 15, 15, 16, 1155, 353, 8840, 836, 8, 341, 197, 14172, 8787, 1155, 11, 2092, 11, 501, 23231, 701, 2915, 11462, 353, 2164, 8, 341, 197, 743, 906, 5142, 23025, 621, 2092, 341, 298, 3244, 6141, 445, 8385, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAttachmentOnly(t *testing.T) { m := NewMessage() m.SetHeader("From", "from@example.com") m.SetHeader("To", "to@example.com") m.Attach(mockCopyFile("/tmp/test.pdf")) want := &message{ from: "from@example.com", to: []string{"to@example.com"}, content: "From: from@example.com\r\n" + "To: to@example.com\r\n" + "Content-Type: application/pdf; name=\"test.pdf\"\r\n" + "Content-Disposition: attachment; filename=\"test.pdf\"\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")), } testMessage(t, m, 0, want) }
explode_data.jsonl/31578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 33569, 7308, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 2052, 741, 2109, 4202, 4047, 445, 3830, 497, 330, 1499, 35487, 905, 1138, 2109, 4202, 4047, 445, 1249, 497, 330, 983, 35487, 905, 1138, 2109, 88284, 30389, 12106, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntryAddTimedWithStagedMetadatasDropPolicyMetadata(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() e, _, _ := testEntry(ctrl, testEntryOptions{ options: testOptions(ctrl).SetVerboseErrors(true), }) require.Equal( t, errOnlyDropPolicyStagedMetadata, e.AddTimedWithStagedMetadatas(testTimedMetric, metadata.DropStagedMetadatas), ) }
explode_data.jsonl/24240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 5874, 2212, 20217, 291, 2354, 623, 3279, 34673, 329, 19346, 19871, 13825, 14610, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 7727, 11, 8358, 716, 1669, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAlwaysOnSamplerShouldSample(t *testing.T) { gotD := AlwaysOnSampler().ShouldSample( SpanContext{}, false, ID{}, SpanID{}, "span", SpanKindClient, []kv.KeyValue{}, []Link{}) wantD := Decision{Sampled: true} if diff := cmp.Diff(wantD, gotD); diff != "" { t.Errorf("Decision: +got, -want%v", diff) } }
explode_data.jsonl/79606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 37095, 1925, 66048, 14996, 17571, 1155, 353, 8840, 836, 8, 341, 3174, 354, 35, 1669, 23240, 1925, 66048, 1005, 14996, 17571, 1006, 197, 7568, 848, 1972, 22655, 895, 11, 3034, 22655, 11903, 915, 22655, 330, 1480, 497, 11903, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_RegistrySynchronizer_CalcPositioningConstant(t *testing.T) { t.Parallel() for _, upkeepID := range []int64{0, 1, 100, 10_000} { _, err := keeper.CalcPositioningConstant(upkeepID, cltest.NewEIP55Address()) require.NoError(t, err) } }
explode_data.jsonl/35257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 62, 15603, 50, 14113, 3135, 920, 16927, 3812, 287, 15472, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 8358, 96096, 915, 1669, 2088, 3056, 396, 21, 19, 90, 15, 11, 220, 16, 11, 220, 16, 15, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConvertMapTypeDefToStruct(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", ` typedef string UUID struct MapValue { 1: required string one 2: optional string two } struct Foo { 1: required map<UUID, MapValue> uuidMap } struct Bar { 1: required map<UUID, MapValue> uuidMap }`, nil, nil, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.UUIDMap = make(map[structs.UUID]*structs.MapValue, len(in.UUIDMap)) for key1, value2 := range in.UUIDMap { if value2 != nil { out.UUIDMap[key1] = &structs.MapValue{} out.UUIDMap[key1].One = string(in.UUIDMap[key1].One) out.UUIDMap[key1].Two = (*string)(in.UUIDMap[key1].Two) } else { out.UUIDMap[key1] = nil } } `), lines) }
explode_data.jsonl/62053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 12012, 2227, 45102, 1249, 9422, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 3989, 197, 31199, 914, 23698, 271, 197, 6472, 5027, 1130, 341, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSidecarWithVolume(t *testing.T) { controller := newController() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") wf := unmarshalWF(sidecarWithVol) wf, err := wfcset.Create(wf) assert.NoError(t, err) wf, err = wfcset.Get(wf.ObjectMeta.Name, metav1.GetOptions{}) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate() assert.Equal(t, wfv1.NodeRunning, woc.wf.Status.Phase) pods, err := controller.kubeclientset.CoreV1().Pods(wf.ObjectMeta.Namespace).List(metav1.ListOptions{}) assert.NoError(t, err) assert.True(t, len(pods.Items) > 0, "pod was not created successfully") pod := pods.Items[0] claimVolFound := false existingVolFound := false for _, ctr := range pod.Spec.Containers { if ctr.Name == "sidevol" { for _, vol := range ctr.VolumeMounts { if vol.Name == "claim-vol" { claimVolFound = true } if vol.Name == "existing-vol" { existingVolFound = true } } } } assert.True(t, claimVolFound, "claim vol was not referenced by sidecar") assert.True(t, existingVolFound, "existing vol was not referenced by sidecar") }
explode_data.jsonl/54354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 454 }
[ 2830, 3393, 16384, 6918, 2354, 18902, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 501, 2051, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 31764, 6692, 69, 1669, 650, 2712...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetMissing(t *testing.T) { storage := map[string]RESTStorage{} simpleStorage := SimpleRESTStorage{ errors: map[string]error{"get": apierrs.NewNotFound("simple", "id")}, } storage["simple"] = &simpleStorage handler := Handle(storage, codec, "/prefix/version", selfLinker) server := httptest.NewServer(handler) resp, err := http.Get(server.URL + "/prefix/version/simple/id") if err != nil { t.Errorf("unexpected error: %v", err) } if resp.StatusCode != http.StatusNotFound { t.Errorf("Unexpected response %#v", resp) } }
explode_data.jsonl/71494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 1949, 25080, 1155, 353, 8840, 836, 8, 341, 197, 16172, 1669, 2415, 14032, 60, 38307, 5793, 16094, 1903, 6456, 5793, 1669, 8993, 38307, 5793, 515, 197, 73424, 25, 2415, 14032, 60, 841, 4913, 455, 788, 6330, 84735, 7121, 10372...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGocloak_RequestPermission(t *testing.T) { t.Parallel() cfg := GetConfig(t) client := NewClientWithDebug(t) SetUpTestUser(t, client) token, err := client.RequestPermission( cfg.GoCloak.ClientID, cfg.GoCloak.ClientSecret, cfg.GoCloak.Realm, cfg.GoCloak.UserName, cfg.GoCloak.Password, "Permission foo # 3") FailIfErr(t, err, "login failed") rptResult, err := client.RetrospectToken( token.AccessToken, cfg.GoCloak.ClientID, cfg.GoCloak.ClientSecret, cfg.GoCloak.Realm) t.Log(rptResult) FailIfErr(t, err, "inspection failed") FailIf(t, !rptResult.Active, "Inactive Token oO") }
explode_data.jsonl/79506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 38, 509, 385, 585, 44024, 14966, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 50286, 1669, 2126, 2648, 1155, 340, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 22212, 2324, 2271, 1474, 1155, 11, 2943, 340, 43947, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_sumByteArray(t *testing.T) { bytez := sumByteArray([]byte{0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1}) assert.Equal(t, bytez, uint(20)) bytez2 := sumByteArray([]byte{0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5, 0x5}) assert.Equal(t, bytez2, uint(100)) }
explode_data.jsonl/45578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 10160, 18394, 1155, 353, 8840, 836, 8, 341, 31422, 89, 1669, 2629, 18394, 10556, 3782, 90, 15, 87, 16, 11, 220, 15, 87, 16, 11, 220, 15, 87, 16, 11, 220, 15, 87, 16, 11, 220, 15, 87, 16, 11, 220, 15, 87, 16, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainerSecretReferenceDestTarget(t *testing.T) { ref := &swarmtypes.SecretReference{ File: &swarmtypes.SecretReferenceFileTarget{ Name: "app", }, } d := getSecretTargetPath(ref) expected := filepath.Join(containerSecretMountPath, "app") if d != expected { t.Fatalf("expected secret dest %q; received %q", expected, d) } }
explode_data.jsonl/72826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 4502, 19773, 8856, 34830, 6397, 1155, 353, 8840, 836, 8, 341, 59504, 1669, 609, 2280, 2178, 9242, 74779, 8856, 515, 197, 24848, 25, 609, 2280, 2178, 9242, 74779, 8856, 1703, 6397, 515, 298, 21297, 25, 330, 676, 756, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateChartWithoutRevision(t *testing.T) { conditions, err := ValidatePermissions(context.Background(), &argoappv1.ApplicationSpec{ Source: argoappv1.ApplicationSource{RepoURL: "https://charts.helm.sh/incubator/", Chart: "myChart", TargetRevision: ""}, Destination: argoappv1.ApplicationDestination{ Server: "https://kubernetes.default.svc", Namespace: "default", }, }, &argoappv1.AppProject{ Spec: argoappv1.AppProjectSpec{ SourceRepos: []string{"*"}, Destinations: []argoappv1.ApplicationDestination{{Server: "*", Namespace: "*"}}, }, }, nil) assert.NoError(t, err) assert.Equal(t, 1, len(conditions)) assert.Equal(t, argoappv1.ApplicationConditionInvalidSpecError, conditions[0].Type) assert.Equal(t, "spec.source.targetRevision is required if the manifest source is a helm chart", conditions[0].Message) }
explode_data.jsonl/49989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 17926, 14488, 26040, 33602, 1155, 353, 8840, 836, 8, 341, 197, 16495, 11, 1848, 1669, 23282, 23851, 5378, 19047, 1507, 609, 12088, 676, 85, 16, 17521, 8327, 515, 197, 197, 3608, 25, 1392, 78, 676, 85, 16, 17521, 3608, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCertificateRequestOperationValidateInvalidDomainName(t *testing.T) { operation := certificateRequestOperation{ domainName: "z", // Invalid } errs := operation.validate() if len(errs) != 1 { t.Errorf("Invalid number of errors; want 1, got: %d", len(errs)) } if strings.Index(errs[0].Error(), "The domain name requires at least 2 octets") == -1 { t.Errorf("Unexpected error; want: 'The domain name requires at leasr 2 octets', got: %s", errs[0].Error()) } }
explode_data.jsonl/29939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 33202, 1900, 8432, 17926, 7928, 13636, 675, 1155, 353, 8840, 836, 8, 341, 197, 9262, 1669, 15748, 1900, 8432, 515, 197, 2698, 3121, 675, 25, 330, 89, 497, 442, 13882, 198, 197, 630, 9859, 82, 1669, 5666, 19520, 2822, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKeeperPGConfDirBad(t *testing.T) { t.Parallel() dir, err := ioutil.TempDir("", "") if err != nil { t.Fatalf("unexpected err: %v", err) } defer os.RemoveAll(dir) clusterName := uuid.NewV4().String() tstore, err := NewTestStore(t, dir) if err != nil { t.Fatalf("unexpected err: %v", err) } storeEndpoints := fmt.Sprintf("%s:%s", tstore.listenAddress, tstore.port) // Test pgConfDir not absolute path tk, err := NewTestKeeper(t, dir, clusterName, pgSUUsername, pgSUPassword, pgReplUsername, pgReplPassword, tstore.storeBackend, storeEndpoints, "--pg-conf-dir=not/absolute/path") if err != nil { t.Fatalf("unexpected err: %v", err) } if err := tk.StartExpect(); err != nil { t.Fatalf("unexpected err: %v", err) } defer tk.Stop() if err := tk.cmd.Expect("keeper: pg-conf-dir must be an absolute path"); err != nil { t.Fatalf("expecting keeper reporting error due to pg-conf-dir provided as a non absolute path") } // Test unexistent pgConfDir tk2, err := NewTestKeeper(t, dir, clusterName, pgSUUsername, pgSUPassword, pgReplUsername, pgReplPassword, tstore.storeBackend, storeEndpoints, "--pg-conf-dir=/unexistent-configuration-directory") if err != nil { t.Fatalf("unexpected err: %v", err) } if err := tk2.StartExpect(); err != nil { t.Fatalf("unexpected err: %v", err) } defer tk2.Stop() if err := tk2.cmd.Expect("keeper: cannot stat pg-conf-dir:"); err != nil { t.Fatalf("expecting keeper reporting error due to unexistent pg-conf-dir") } // Test pgConfDir is a file tmpFile, err := ioutil.TempFile(dir, "") if err != nil { t.Fatalf("unexpected err: %v", err) } defer func() { tmpFile.Close() os.Remove(tmpFile.Name()) }() tk3, err := NewTestKeeper(t, dir, clusterName, pgSUUsername, pgSUPassword, pgReplUsername, pgReplPassword, tstore.storeBackend, storeEndpoints, fmt.Sprintf("--pg-conf-dir=%s", tmpFile.Name())) if err != nil { t.Fatalf("unexpected err: %v", err) } if err := tk3.StartExpect(); err != nil { t.Fatalf("unexpected err: %v", err) } defer tk3.Stop() if err := tk3.cmd.Expect("keeper: pg-conf-dir is not a directory"); err != nil { t.Fatalf("expecting keeper reporting error due to pg-conf-dir being a file") } }
explode_data.jsonl/16430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 859 }
[ 2830, 3393, 77233, 11383, 15578, 6184, 17082, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1848, 25, 1018, 85, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_syncURLInvalidity(t *testing.T) { tests := []struct { name string repoURL string }{ {"invalid URL", "not-a-url"}, {"invalid URL", "https//google.com"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { _, _, err := getRepo("namespace", "test", tt.repoURL, "") assert.ExistsErr(t, err, tt.name) }) } }
explode_data.jsonl/67798
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 23008, 3144, 7928, 487, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 17200, 5368, 3144, 914, 198, 197, 59403, 197, 197, 4913, 11808, 5548, 497, 330, 1921, 7409, 25443, 7115, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAllKustomizationFileNames(t *testing.T) { kcontent := ` configMapGenerator: - literals: - foo=bar - baz=qux name: my-configmap ` for _, n := range konfig.RecognizedKustomizationFileNames() { fSys := filesys.MakeFsInMemory() err := fSys.WriteFile(n, []byte(kcontent)) require.NoError(t, err) k, err := NewKustomizationFile(fSys) if err != nil { t.Fatalf("Unexpected Error: %v", err) } if k.path != n { t.Fatalf("Load incorrect file path %s", k.path) } } }
explode_data.jsonl/77185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 2403, 42, 1450, 2022, 1703, 7980, 1155, 353, 8840, 836, 8, 341, 16463, 1796, 1669, 22074, 1676, 2227, 12561, 510, 12, 75275, 510, 220, 481, 15229, 28, 2257, 198, 220, 481, 50247, 28, 446, 87, 198, 220, 829, 25, 847, 2513...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestByteSlice(t *testing.T) { full := []byte("abcde") bs := WrapByteSlice(full, 1) assert.EqualValues(t, full[1:], bs.Bytes()) assert.EqualValues(t, full, bs.BytesWithHeader()) assert.EqualValues(t, full, bs.Full()) bs = bs.ResliceTo(2) assert.EqualValues(t, full[1:3], bs.Bytes()) assert.EqualValues(t, full[:3], bs.BytesWithHeader()) assert.EqualValues(t, full, bs.Full()) bs = bs.ResliceTo(1) assert.EqualValues(t, full[1:2], bs.Bytes()) assert.EqualValues(t, full[:2], bs.BytesWithHeader()) assert.EqualValues(t, full, bs.Full()) }
explode_data.jsonl/36305
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 7153, 33236, 1155, 353, 8840, 836, 8, 341, 94042, 1669, 3056, 3782, 445, 13683, 450, 1138, 93801, 1669, 42187, 7153, 33236, 28907, 11, 220, 16, 340, 6948, 12808, 6227, 1155, 11, 2480, 58, 16, 45499, 17065, 36868, 2398, 6948,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTabletServerStartCommit(t *testing.T) { _, tsv, db := newTestTxExecutor(t) defer tsv.StopService() defer db.Close() target := querypb.Target{TabletType: topodatapb.TabletType_PRIMARY} commitTransition := fmt.Sprintf("update _vt.dt_state set state = %d where dtid = 'aa' and state = %d", int(querypb.TransactionState_COMMIT), int(querypb.TransactionState_PREPARE)) db.AddQuery(commitTransition, &sqltypes.Result{RowsAffected: 1}) txid := newTxForPrep(tsv) err := tsv.StartCommit(ctx, &target, txid, "aa") require.NoError(t, err) db.AddQuery(commitTransition, &sqltypes.Result{}) txid = newTxForPrep(tsv) err = tsv.StartCommit(ctx, &target, txid, "aa") assert.EqualError(t, err, "could not transition to COMMIT: aa", "Prepare err") }
explode_data.jsonl/79984
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 2556, 83, 5475, 3479, 33441, 1155, 353, 8840, 836, 8, 341, 197, 6878, 259, 3492, 11, 2927, 1669, 501, 2271, 31584, 25255, 1155, 340, 16867, 259, 3492, 30213, 1860, 741, 16867, 2927, 10421, 741, 28861, 1669, 3239, 16650, 3501...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFunction(t *testing.T) { var wg sync.WaitGroup wg.Add(1) r := memory.NewRegistry(memory.Services(test.Data)) // create service fn := NewFunction( Registry(r), Name("test.function"), AfterStart(func() error { wg.Done() return nil }), ) // we can't test fn.Init as it parses the command line // fn.Init() ch := make(chan error, 2) go func() { // run service ch <- fn.Run() }() // wait for start wg.Wait() // test call debug req := fn.Client().NewRequest( "test.function", "Debug.Health", new(proto.HealthRequest), ) rsp := new(proto.HealthResponse) err := fn.Client().Call(context.TODO(), req, rsp) if err != nil { t.Fatal(err) } if rsp.Status != "ok" { t.Fatalf("function response: %s", rsp.Status) } if err := <-ch; err != nil { t.Fatal(err) } }
explode_data.jsonl/77388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 5152, 1155, 353, 8840, 836, 8, 341, 2405, 63581, 12811, 28384, 2808, 198, 72079, 1904, 7, 16, 692, 7000, 1669, 4938, 7121, 15603, 63230, 20089, 8623, 3336, 4390, 197, 322, 1855, 2473, 198, 40095, 1669, 1532, 5152, 1006, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew_unknownTransferEncoding(t *testing.T) { var h Header h.Set("Content-Transfer-Encoding", "i-dont-exist") expected := "hey there" r := strings.NewReader(expected) e, err := New(h, r) if err == nil { t.Fatal("New(unknown transfer encoding): expected an error") } if !IsUnknownEncoding(err) { t.Fatal("New(unknown transfer encoding): expected an error that verifies IsUnknownEncoding") } if !errors.As(err, &UnknownEncodingError{}) { t.Fatal("New(unknown transfer encoding): expected an error that verifies errors.As(err, &EncodingError{})") } if b, err := ioutil.ReadAll(e.Body); err != nil { t.Error("Expected no error while reading entity body, got", err) } else if s := string(b); s != expected { t.Errorf("Expected %q as entity body but got %q", expected, s) } }
explode_data.jsonl/30555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 3564, 57507, 21970, 14690, 1155, 353, 8840, 836, 8, 341, 2405, 305, 12104, 198, 9598, 4202, 445, 2762, 12, 21970, 67358, 497, 330, 72, 1737, 544, 10187, 380, 5130, 42400, 1669, 330, 35561, 1052, 698, 7000, 1669, 9069, 68587,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHasParity(t *testing.T) { cases := []struct { o1 *s3.DeleteObjectsInput o2 BatchDeleteObject expected bool }{ { &s3.DeleteObjectsInput{}, BatchDeleteObject{ Object: &s3.DeleteObjectInput{}, }, true, }, { &s3.DeleteObjectsInput{ Bucket: aws.String("foo"), }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ Bucket: aws.String("bar"), }, }, false, }, { &s3.DeleteObjectsInput{}, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ Bucket: aws.String("foo"), }, }, false, }, { &s3.DeleteObjectsInput{ Bucket: aws.String("foo"), }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{}, }, false, }, { &s3.DeleteObjectsInput{ MFA: aws.String("foo"), }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ MFA: aws.String("bar"), }, }, false, }, { &s3.DeleteObjectsInput{}, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ MFA: aws.String("foo"), }, }, false, }, { &s3.DeleteObjectsInput{ MFA: aws.String("foo"), }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{}, }, false, }, { &s3.DeleteObjectsInput{ RequestPayer: s3.RequestPayer("foo"), }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ RequestPayer: s3.RequestPayerRequester, }, }, false, }, { &s3.DeleteObjectsInput{}, BatchDeleteObject{ Object: &s3.DeleteObjectInput{ RequestPayer: s3.RequestPayerRequester, }, }, false, }, { &s3.DeleteObjectsInput{ RequestPayer: s3.RequestPayerRequester, }, BatchDeleteObject{ Object: &s3.DeleteObjectInput{}, }, false, }, } for i, c := range cases { if result := hasParity(c.o1, c.o2); result != c.expected { t.Errorf("Case %d: expected %t, but received %t\n", i, c.expected, result) } } }
explode_data.jsonl/62306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 974 }
[ 2830, 3393, 10281, 4272, 487, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 22229, 16, 981, 353, 82, 18, 18872, 11543, 2505, 198, 197, 22229, 17, 981, 33904, 6435, 1190, 198, 197, 42400, 1807, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMatchQueryParams(t *testing.T) { qpm := queryParameterMatcherImpl{} configs := []v2.HeaderMatcher{ { Name: "key", Value: "value", }, { Name: "regex", Value: "[0-9]+", Regex: true, }, } for _, c := range configs { if kv, err := NewKeyValueData(c); err == nil { qpm = append(qpm, kv) } } for idx, querys := range []struct { params types.QueryParams expected bool }{ { params: types.QueryParams(map[string]string{ "key": "value", "regex": "12345", "empty": "any", }), expected: true, }, { params: types.QueryParams(map[string]string{ "key": "value", "regex": "12345", "empty": "", "ignore": "key", }), expected: true, }, { params: types.QueryParams(map[string]string{ "key": "value", }), expected: false, }, { params: types.QueryParams(map[string]string{ "key": "value", "regex": "abc", }), expected: false, }, } { if qpm.Matches(context.Background(), querys.params) != querys.expected { t.Fatalf("%d matched failed", idx) } } }
explode_data.jsonl/46019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 532 }
[ 2830, 3393, 8331, 2859, 4870, 1155, 353, 8840, 836, 8, 341, 18534, 5187, 1669, 3239, 4971, 37554, 9673, 16094, 25873, 82, 1669, 3056, 85, 17, 15753, 37554, 515, 197, 197, 515, 298, 21297, 25, 220, 330, 792, 756, 298, 47399, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTranslateUpstreamConfig(t *testing.T) { tr := &translator{} au := &configv1.ApisixUpstreamConfig{ LoadBalancer: nil, Scheme: apisixv1.SchemeGRPC, } ups, err := tr.TranslateUpstreamConfig(au) assert.Nil(t, err, "checking upstream config translating") assert.Equal(t, ups.Type, apisixv1.LbRoundRobin) assert.Equal(t, ups.Scheme, apisixv1.SchemeGRPC) au = &configv1.ApisixUpstreamConfig{ LoadBalancer: &configv1.LoadBalancer{ Type: apisixv1.LbConsistentHash, HashOn: apisixv1.HashOnHeader, Key: "user-agent", }, Scheme: apisixv1.SchemeHTTP, } ups, err = tr.TranslateUpstreamConfig(au) assert.Nil(t, err, "checking upstream config translating") assert.Equal(t, ups.Type, apisixv1.LbConsistentHash) assert.Equal(t, ups.Key, "user-agent") assert.Equal(t, ups.HashOn, apisixv1.HashOnHeader) assert.Equal(t, ups.Scheme, apisixv1.SchemeHTTP) au = &configv1.ApisixUpstreamConfig{ LoadBalancer: &configv1.LoadBalancer{ Type: apisixv1.LbConsistentHash, HashOn: apisixv1.HashOnHeader, Key: "user-agent", }, Scheme: "dns", } _, err = tr.TranslateUpstreamConfig(au) assert.Error(t, err, &translateError{ field: "scheme", reason: "invalid value", }) au = &configv1.ApisixUpstreamConfig{ LoadBalancer: &configv1.LoadBalancer{ Type: "hash", }, } _, err = tr.TranslateUpstreamConfig(au) assert.Error(t, err, &translateError{ field: "loadbalancer.type", reason: "invalid value", }) au = &configv1.ApisixUpstreamConfig{ LoadBalancer: &configv1.LoadBalancer{ Type: apisixv1.LbConsistentHash, HashOn: "arg", }, } _, err = tr.TranslateUpstreamConfig(au) assert.Error(t, err, &translateError{ field: "loadbalancer.hashOn", reason: "invalid value", }) }
explode_data.jsonl/9413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 775 }
[ 2830, 3393, 27473, 2324, 4027, 2648, 1155, 353, 8840, 836, 8, 341, 25583, 1669, 609, 44357, 31483, 197, 2863, 1669, 609, 1676, 85, 16, 96252, 941, 2324, 4027, 2648, 515, 197, 197, 5879, 93825, 25, 2092, 345, 197, 7568, 8058, 25, 981, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMaxWithArrayInputWithStrings(t *testing.T) { //Test Data d1 := []string{"abc", "abd", "cbd"} d2 := []string{"abc", "abd", "abe"} d3 := []string{"abc", "foo", " "} d4 := []string{"abc", "abc", "aaa"} n1 := []string{} //Calls r1 := MaxString(d1) r2 := MaxString(d2) r3 := MaxString(d3) r4 := MaxString(d4) c1 := MaxString(n1) // Assertions assert.Equal(t, "cbd", r1, "It should print cbd because its first char is max in the list") assert.Equal(t, "abe", r2, "It should print abe because its first different char is max in the list") assert.Equal(t, "foo", r3, "It should print foo because its first different char is max in the list") assert.Equal(t, "abc", r4, "It should print abc because its first different char is max in the list") assert.Equal(t, nil, c1, "It should return nil") }
explode_data.jsonl/5183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 5974, 2354, 1857, 2505, 2354, 20859, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 2885, 198, 2698, 16, 1669, 3056, 917, 4913, 13683, 497, 330, 85920, 497, 330, 90749, 16707, 2698, 17, 1669, 3056, 917, 4913, 13683, 497, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKazaamMultipleTransforms(t *testing.T) { jsonOut1 := `{"Rating":3,"example":{"old":{"value":3}}}` jsonOut2 := `{"rating":{"example":{"value":3},"primary":{"value":3}},"Range":5}` spec1 := `[{"operation": "shift", "spec": {"Rating": "rating.primary.value", "example.old": "rating.example"}}]` spec2 := `[{"operation": "default", "spec": {"Range": 5}}]` transform1, _ := kazaam.NewKazaam(spec1) kazaamOut1, _ := transform1.TransformJSONStringToString(testJSONInput) areEqual1, _ := checkJSONStringsEqual(kazaamOut1, jsonOut1) transform2, _ := kazaam.NewKazaam(spec2) kazaamOut2, _ := transform2.TransformJSONStringToString(testJSONInput) areEqual2, _ := checkJSONStringsEqual(kazaamOut2, jsonOut2) if !areEqual1 { t.Error("Transformed data does not match expectation.") t.Log("Expected: ", jsonOut1) t.Log("Actual: ", kazaamOut1) t.FailNow() } if !areEqual2 { t.Error("Transformed data does not match expectation.") t.Log("Expected: ", jsonOut2) t.Log("Actual: ", kazaamOut2) t.FailNow() } }
explode_data.jsonl/11852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 403 }
[ 2830, 3393, 42, 12707, 309, 32089, 8963, 82, 1155, 353, 8840, 836, 8, 341, 30847, 2662, 16, 1669, 1565, 4913, 22152, 788, 18, 1335, 8687, 22317, 813, 22317, 957, 788, 18, 3417, 31257, 30847, 2662, 17, 1669, 1565, 4913, 21931, 22317, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRedisPool(t *testing.T) { conf := &gredigo.RedisConf{ Host: "127.0.0.1", Port: 6379, MaxIdle: 100, MaxActive: 200, IdleTimeout: 240, } // 建立连接 conf.SetRedisPool("default") var wg sync.WaitGroup for i := 0; i < 20000; i++ { wg.Add(1) go func() { defer wg.Done() client := gredigo.GetRedisClient("default") defer client.Close() ok, err := client.Do("set", "myname", "daheige") fmt.Println(ok, err) value, _ := redis.String(client.Do("get", "myname")) fmt.Println("myname:", value) // 切换到database 1上面操作 v, err := client.Do("Select", 1) fmt.Println(v, err) _, _ = client.Do("lpush", "myList", 123) }() } wg.Wait() log.Println("exec success...") }
explode_data.jsonl/52258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 48137, 10551, 1155, 353, 8840, 836, 8, 341, 67850, 1669, 609, 70, 1151, 7836, 82398, 15578, 515, 197, 197, 9296, 25, 286, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 756, 197, 98459, 25, 286, 220, 21, 18, 22, 24, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarshaling(t *testing.T) { for _, tt := range marshalingTests { json, err := tt.marshaler.MarshalToString(tt.pb) if err != nil { t.Errorf("%s: marshaling error: %v", tt.desc, err) } else if tt.json != json { t.Errorf("%s: got [%v] want [%v]", tt.desc, json, tt.json) } } }
explode_data.jsonl/63113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 79712, 6132, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 31996, 6132, 18200, 341, 197, 30847, 11, 1848, 1669, 17853, 744, 28423, 261, 37271, 5870, 47152, 37916, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDownloadImages(t *testing.T) { const k8sVersion = "99.YY.ZZ" images := []string{"foo.io/sonobuoy/test:1.0"} tests := map[string]struct { client docker.Docker wantFileName string wantError bool }{ "simple": { client: FakeDockerClient{ saveFails: false, }, wantFileName: getTarFileName(k8sVersion), wantError: false, }, "fail": { client: FakeDockerClient{ saveFails: true, }, wantFileName: "", wantError: true, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { imgClient := ImageClient{ dockerClient: tc.client, } gotFilename, gotErr := imgClient.DownloadImages(images, k8sVersion) if gotErr != nil && tc.wantError != true { t.Fatalf("Got unexpected error: %v", gotErr) } if gotFilename != tc.wantFileName { t.Fatalf("Expected filename: %s but got: %s", tc.wantFileName, gotFilename) } }) } }
explode_data.jsonl/61548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 405 }
[ 2830, 3393, 11377, 14228, 1155, 353, 8840, 836, 8, 341, 4777, 595, 23, 82, 5637, 284, 330, 24, 24, 7507, 56, 13476, 57, 698, 197, 3642, 1669, 3056, 917, 4913, 7975, 4245, 14, 930, 674, 84, 2253, 12697, 25, 16, 13, 15, 63159, 78216...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStdCopyReturnsWriteErrors(t *testing.T) { stdOutBytes := []byte(strings.Repeat("o", startingBufLen)) stdErrBytes := []byte(strings.Repeat("e", startingBufLen)) buffer, err := getSrcBuffer(stdOutBytes, stdErrBytes) if err != nil { t.Fatal(err) } expectedError := errors.New("expected") dstOut := &errWriter{err: expectedError} written, err := StdCopy(dstOut, ioutil.Discard, buffer) if written != 0 { t.Fatalf("StdCopy should have written 0, but has written %d", written) } if err != expectedError { t.Fatalf("Didn't get expected error, got %v", err) } }
explode_data.jsonl/52164
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 22748, 12106, 16446, 7985, 13877, 1155, 353, 8840, 836, 8, 341, 6736, 2662, 7078, 1669, 3056, 3782, 51442, 2817, 10979, 445, 78, 497, 5916, 15064, 11271, 1171, 6736, 7747, 7078, 1669, 3056, 3782, 51442, 2817, 10979, 445, 68, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSortedISOCtrlSlice(t *testing.T) { // given attrs := map[string]*sysl.Attribute{ "iso_ctrl_11_txt": {}, "iso_ctrl_12_txt": {}, "iso_ctrl_5_txt": {}, } // when actual := getSortedISOCtrlSlice(attrs) // then assert.Equal(t, []string{"11", "12", "5"}, actual) }
explode_data.jsonl/58731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 51051, 1637, 7612, 9599, 33236, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 197, 20468, 1669, 2415, 14032, 8465, 7791, 75, 33775, 515, 197, 197, 1, 15420, 22667, 62, 16, 16, 22436, 788, 14573, 197, 197, 1, 15420, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Chain(t *testing.T) { var ( chainID = *utils.NewBigI(1) nodeID = int32(200) query = ` query GetChain { chain(id: "1") { id enabled createdAt nodes { id } } } ` ) testCases := []GQLTestCase{ unauthorizedTestCase(GQLTestCase{query: query}, "chain"), { name: "success", authenticated: true, before: func(f *gqlTestFramework) { f.App.On("EVMORM").Return(f.Mocks.evmORM) f.Mocks.evmORM.On("Chain", chainID).Return(types.Chain{ ID: chainID, Enabled: true, CreatedAt: f.Timestamp(), }, nil) f.Mocks.evmORM.On("GetNodesByChainIDs", []utils.Big{chainID}). Return([]types.Node{ { ID: nodeID, EVMChainID: chainID, }, }, nil) }, query: query, result: ` { "chain": { "id": "1", "enabled": true, "createdAt": "2021-01-01T00:00:00Z", "nodes": [{ "id": "200" }] } }`, }, } RunGQLTests(t, testCases) }
explode_data.jsonl/48069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 593 }
[ 2830, 3393, 27588, 466, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 8819, 915, 284, 353, 6031, 7121, 15636, 40, 7, 16, 340, 197, 20831, 915, 220, 284, 526, 18, 17, 7, 17, 15, 15, 340, 197, 27274, 256, 284, 22074, 298, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckSenderHostname(t *testing.T) { defaultHostname := "default-host" for nb, tc := range []struct { defaultHostnameDisabled bool submittedHostname string expectedHostname string }{ { defaultHostnameDisabled: false, submittedHostname: "", expectedHostname: defaultHostname, }, { defaultHostnameDisabled: false, submittedHostname: "custom", expectedHostname: "custom", }, { defaultHostnameDisabled: true, submittedHostname: "", expectedHostname: "", }, { defaultHostnameDisabled: true, submittedHostname: "custom", expectedHostname: "custom", }, } { t.Run(fmt.Sprintf("case %d: %q -> %q", nb, tc.submittedHostname, tc.expectedHostname), func(t *testing.T) { senderMetricSampleChan := make(chan senderMetricSample, 10) serviceCheckChan := make(chan metrics.ServiceCheck, 10) eventChan := make(chan metrics.Event, 10) bucketChan := make(chan senderHistogramBucket, 10) orchestratorChan := make(chan senderOrchestratorMetadata, 10) checkSender := newCheckSender(checkID1, defaultHostname, senderMetricSampleChan, serviceCheckChan, eventChan, bucketChan, orchestratorChan) checkSender.DisableDefaultHostname(tc.defaultHostnameDisabled) checkSender.Gauge("my.metric", 1.0, tc.submittedHostname, []string{"foo", "bar"}) checkSender.Commit() checkSender.ServiceCheck("my_service.can_connect", metrics.ServiceCheckOK, tc.submittedHostname, []string{"foo", "bar"}, "message") submittedEvent := metrics.Event{ Title: "Something happened", Text: "Description of the event", Ts: 12, Priority: metrics.EventPriorityLow, Host: tc.submittedHostname, Tags: []string{"foo", "bar"}, AlertType: metrics.EventAlertTypeInfo, AggregationKey: "event_agg_key", SourceTypeName: "docker", } checkSender.Event(submittedEvent) gaugeSenderSample := <-senderMetricSampleChan assert.EqualValues(t, checkID1, gaugeSenderSample.id) assert.Equal(t, metrics.GaugeType, gaugeSenderSample.metricSample.Mtype) assert.Equal(t, tc.expectedHostname, gaugeSenderSample.metricSample.Host) assert.Equal(t, false, gaugeSenderSample.commit) serviceCheck := <-serviceCheckChan assert.Equal(t, "my_service.can_connect", serviceCheck.CheckName) assert.Equal(t, metrics.ServiceCheckOK, serviceCheck.Status) assert.Equal(t, tc.expectedHostname, serviceCheck.Host) assert.Equal(t, []string{"foo", "bar"}, serviceCheck.Tags) assert.Equal(t, "message", serviceCheck.Message) event := <-eventChan assert.Equal(t, "Something happened", event.Title) assert.Equal(t, int64(12), event.Ts) assert.Equal(t, tc.expectedHostname, event.Host) assert.Equal(t, []string{"foo", "bar"}, event.Tags) }) } }
explode_data.jsonl/78307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1159 }
[ 2830, 3393, 3973, 20381, 88839, 1155, 353, 8840, 836, 8, 341, 11940, 88839, 1669, 330, 2258, 38589, 1837, 2023, 16649, 11, 17130, 1669, 2088, 3056, 1235, 341, 197, 11940, 88839, 25907, 1807, 198, 197, 28624, 5483, 88839, 981, 914, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcilableSSH(t *testing.T) { // Check that updating SSH Key of user core supported oldIgnCfg := ctrlcommon.NewIgnConfig() oldMcfg := helpers.CreateMachineConfigFromIgnition(oldIgnCfg) tempUser1 := ign3types.PasswdUser{Name: "core", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{"5678", "abc"}} newIgnCfg := ctrlcommon.NewIgnConfig() newIgnCfg.Passwd.Users = []ign3types.PasswdUser{tempUser1} newMcfg := helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg := reconcilable(oldMcfg, newMcfg) checkReconcilableResults(t, "SSH", errMsg) // Check that updating User with User that is not core is not supported tempUser2 := ign3types.PasswdUser{Name: "core", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{"1234"}} oldIgnCfg.Passwd.Users = append(oldIgnCfg.Passwd.Users, tempUser2) oldMcfg = helpers.CreateMachineConfigFromIgnition(oldIgnCfg) tempUser3 := ign3types.PasswdUser{Name: "another user", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{"5678"}} newIgnCfg.Passwd.Users[0] = tempUser3 newMcfg = helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg = reconcilable(oldMcfg, newMcfg) checkIrreconcilableResults(t, "SSH", errMsg) // check that we cannot make updates if any other Passwd.User field is changed. tempUser4 := ign3types.PasswdUser{Name: "core", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{"5678"}, HomeDir: helpers.StrToPtr("somedir")} newIgnCfg.Passwd.Users[0] = tempUser4 newMcfg = helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg = reconcilable(oldMcfg, newMcfg) checkIrreconcilableResults(t, "SSH", errMsg) // check that we cannot add a user or have len(Passwd.Users)> 1 tempUser5 := ign3types.PasswdUser{Name: "some user", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{"5678"}} newIgnCfg.Passwd.Users = append(newIgnCfg.Passwd.Users, tempUser5) newMcfg = helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg = reconcilable(oldMcfg, newMcfg) checkIrreconcilableResults(t, "SSH", errMsg) // check that user is not attempting to remove the only sshkey from core user tempUser6 := ign3types.PasswdUser{Name: "core", SSHAuthorizedKeys: []ign3types.SSHAuthorizedKey{}} newIgnCfg.Passwd.Users[0] = tempUser6 newIgnCfg.Passwd.Users = newIgnCfg.Passwd.Users[:len(newIgnCfg.Passwd.Users)-1] newMcfg = helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg = reconcilable(oldMcfg, newMcfg) checkIrreconcilableResults(t, "SSH", errMsg) //check that empty Users does not cause panic newIgnCfg.Passwd.Users = nil newMcfg = helpers.CreateMachineConfigFromIgnition(newIgnCfg) _, errMsg = reconcilable(oldMcfg, newMcfg) checkIrreconcilableResults(t, "SSH", errMsg) }
explode_data.jsonl/28210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 969 }
[ 2830, 3393, 693, 40446, 321, 480, 62419, 1155, 353, 8840, 836, 8, 341, 197, 322, 4248, 429, 21010, 40463, 5309, 315, 1196, 6200, 7248, 198, 61828, 43337, 42467, 1669, 23743, 5464, 7121, 43337, 2648, 741, 61828, 44, 14072, 1669, 30187, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartInformersSuccess(t *testing.T) { errCh := make(chan error) defer close(errCh) fi := &fixedInformer{sunk: true} stopCh := make(chan struct{}) defer close(stopCh) go func() { errCh <- StartInformers(stopCh, fi) }() select { case err := <-errCh: if err != nil { t.Error("Unexpected error:", err) } case <-time.After(time.Second): t.Error("Timed out waiting for informers to sync.") } }
explode_data.jsonl/45295
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 3479, 37891, 388, 7188, 1155, 353, 8840, 836, 8, 341, 9859, 1143, 1669, 1281, 35190, 1465, 340, 16867, 3265, 3964, 1143, 692, 30839, 1669, 609, 22021, 641, 34527, 84386, 3122, 25, 830, 630, 62644, 1143, 1669, 1281, 35190, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestOnMetadata(t *testing.T) { ts := &testServer{ t: t, tokens: []string{"firstToken", "secondToken"}, data: "profile_name", } server := newTestServer(t, SecureTestType, ts) defer server.Close() c := ec2metadata.New(unit.Session, &aws.Config{ Endpoint: aws.String(server.URL), }) req := c.NewRequest(&request.Operation{ Name: "Ec2Metadata request", HTTPMethod: "GET", HTTPPath: "/latest/foo", Paginator: nil, BeforePresignFn: nil, }, nil, nil) op := &operationListProvider{} c.Handlers.Complete.PushBack(op.addToOperationPerformedList) err := req.Send() if err != nil { t.Fatalf("expect no error, got %v", err) } if len(op.operationsPerformed) < 1 { t.Fatalf("Expected atleast one operation GetToken to be called on EC2Metadata client") return } if op.operationsPerformed[0] != "GetToken" { t.Fatalf("Expected GetToken operation to be called") } }
explode_data.jsonl/17847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 1900, 1925, 14610, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 609, 1944, 5475, 515, 197, 3244, 25, 414, 259, 345, 197, 3244, 9713, 25, 3056, 917, 4913, 3896, 3323, 497, 330, 5569, 3323, 7115, 197, 8924, 25, 256, 330, 536...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Mock_On_WithVariadicFuncWithEmptyInterfaceArray(t *testing.T) { // make a test impl object var mockedService = new(TestExampleImplementation) var expected []interface{} c := mockedService. On("TheExampleMethodVariadicInterface", expected). Return(nil) assert.Equal(t, []*Call{c}, mockedService.ExpectedCalls) assert.Equal(t, 1, len(c.Arguments)) assert.Equal(t, expected, c.Arguments[0]) assert.NotPanics(t, func() { mockedService.TheExampleMethodVariadicInterface() }) assert.Panics(t, func() { mockedService.TheExampleMethodVariadicInterface(1, 2) }) }
explode_data.jsonl/8576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 1245, 1176, 35482, 62, 2354, 56135, 36214, 9626, 2354, 3522, 5051, 1857, 1155, 353, 8840, 836, 8, 8022, 197, 322, 1281, 264, 1273, 11605, 1633, 319, 2405, 46149, 1860, 284, 501, 31159, 13314, 36850, 7229, 2405, 3601, 3056, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComments(t *testing.T) { data := strings.NewReplacer("\t", "", "\n", "").Replace(` <comments xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main"> <authors> <author>Microsoft Office User</author> </authors> <commentList> <comment ref="C6" authorId="0" shapeId="0"> <text> <r> <rPr> <rFont val="Tahoma"></rFont> <family val="2"></family> <color rgb="FF000000"></color> <sz val="10"></sz> </rPr> <t>My Comment1</t> </r> </text> </comment> <comment ref="C7" authorId="0"> <text> <t>My Comment2</t> </text> </comment> </commentList> </comments> `) shapeID := 0 decoder := xml.NewDecoder(bytes.NewReader([]byte(data))) comments := &ml.Comments{} err := decoder.DecodeElement(comments, nil) require.Nil(t, err) require.Equal(t, &ml.Comments{ XMLName: xml.Name{ Space: "http://schemas.openxmlformats.org/spreadsheetml/2006/main", Local: "comments", }, Authors: []primitives.Text{ "Microsoft Office User", }, CommentList: []*ml.Comment{ { Ref: primitives.Ref("C6").ToBounds(), AuthorID: 0, ShapeID: &shapeID, Text: &ml.StringItem{ Text: "", RichText: []*ml.RichText{ { Text: "My Comment1", Font: &ml.RichFont{ Size: 10, Color: &ml.Color{RGB: "FF000000"}, Name: "Tahoma", Family: 2, }, }, }, }, }, { Ref: primitives.Ref("C7").ToBounds(), AuthorID: 0, Text: &ml.StringItem{ Text: "My Comment2", }, }, }, }, comments) //encode data should be same as original encode, err := xml.Marshal(comments) require.Nil(t, err) require.Equal(t, data, string(encode)) }
explode_data.jsonl/40921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 900 }
[ 2830, 3393, 17373, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 9069, 7121, 693, 90212, 4921, 83, 497, 7342, 2917, 77, 497, 35229, 23107, 61528, 197, 197, 27, 14727, 24967, 428, 1254, 1110, 56543, 5826, 6455, 63482, 2659, 26734, 878, 15119...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNCName(t *testing.T) { testdata := []struct { input token output bool }{ {token{tokNumber, 1}, false}, } for _, td := range testdata { got := td.input.isNCName() if expected := td.output; got != expected { t.Errorf("%v isNCName() = %t, want %t", td.input, got, expected) } } }
explode_data.jsonl/36792
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 9949, 675, 1155, 353, 8840, 836, 8, 341, 18185, 691, 1669, 3056, 1235, 341, 197, 22427, 220, 3950, 198, 197, 21170, 1807, 198, 197, 59403, 197, 197, 90, 5839, 90, 29594, 2833, 11, 220, 16, 2137, 895, 1583, 197, 532, 2023...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCoverIsTransform(t *testing.T) { defer func() { r := recover() assert.NotNil(t, r) }() f := &codegen.FieldAssignment{ FromIdentifier: "gg.field", ToIdentifier: "lol.field", } f.IsTransform() }
explode_data.jsonl/62091
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 30896, 3872, 8963, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 7000, 1669, 11731, 741, 197, 6948, 93882, 1155, 11, 435, 340, 197, 69826, 1166, 1669, 609, 95859, 17087, 41613, 515, 197, 197, 3830, 8714, 25, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestHostQueueWriteBatchesPartialBatchErrs(t *testing.T) { for _, opts := range []Options{ newHostQueueTestOptions().SetUseV2BatchAPIs(false), newHostQueueTestOptions().SetUseV2BatchAPIs(true), } { t.Run(fmt.Sprintf("useV2: %v", opts.UseV2BatchAPIs()), func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockConnPool := NewMockconnectionPool(ctrl) opts = opts.SetHostQueueOpsFlushSize(2) queue := newTestHostQueue(opts) queue.connPool = mockConnPool // Open mockConnPool.EXPECT().Open() queue.Open() assert.Equal(t, statusOpen, queue.status) // Prepare writes var wg sync.WaitGroup writeErr := "a write error" writes := []*writeOperation{ testWriteOp("testNs", "foo", 1.0, 1000, rpc.TimeType_UNIX_SECONDS, func(r interface{}, err error) { assert.Error(t, err) rpcErr, ok := err.(*rpc.Error) assert.True(t, ok) assert.Equal(t, rpc.ErrorType_INTERNAL_ERROR, rpcErr.Type) assert.Equal(t, writeErr, rpcErr.Message) wg.Done() }), testWriteOp("testNs", "bar", 2.0, 2000, rpc.TimeType_UNIX_SECONDS, func(r interface{}, err error) { assert.NoError(t, err) wg.Done() }), } wg.Add(len(writes)) // Prepare mocks for flush mockClient := rpc.NewMockTChanNode(ctrl) batchErrs := &rpc.WriteBatchRawErrors{Errors: []*rpc.WriteBatchRawError{ {Index: 0, Err: &rpc.Error{ Type: rpc.ErrorType_INTERNAL_ERROR, Message: writeErr, }}, }} if opts.UseV2BatchAPIs() { writeBatch := func(ctx thrift.Context, req *rpc.WriteBatchRawV2Request) { for i, write := range writes { assert.Equal(t, req.Elements[i].NameSpace, int64(0)) assert.Equal(t, req.Elements[i].ID, write.request.ID) assert.Equal(t, req.Elements[i].Datapoint, write.request.Datapoint) } } mockClient.EXPECT().WriteBatchRawV2(gomock.Any(), gomock.Any()).Do(writeBatch).Return(batchErrs) } else { writeBatch := func(ctx thrift.Context, req *rpc.WriteBatchRawRequest) { for i, write := range writes { assert.Equal(t, req.Elements[i].ID, write.request.ID) assert.Equal(t, req.Elements[i].Datapoint, write.request.Datapoint) } } mockClient.EXPECT().WriteBatchRaw(gomock.Any(), gomock.Any()).Do(writeBatch).Return(batchErrs) } mockConnPool.EXPECT().NextClient().Return(mockClient, &noopPooledChannel{}, nil) // Perform writes for _, write := range writes { assert.NoError(t, queue.Enqueue(write)) } // Wait for flush wg.Wait() // Close var closeWg sync.WaitGroup closeWg.Add(1) mockConnPool.EXPECT().Close().Do(func() { closeWg.Done() }) queue.Close() closeWg.Wait() }) } }
explode_data.jsonl/54466
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1250 }
[ 2830, 3393, 9296, 7554, 7985, 33, 9118, 37314, 21074, 7747, 82, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 12185, 1669, 2088, 3056, 3798, 515, 197, 8638, 9296, 7554, 2271, 3798, 1005, 1649, 10253, 53, 17, 21074, 7082, 82, 3576, 1326, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrefetchPointKeys(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() cc := &clientConn{ alloc: arena.NewAllocator(1024), chunkAlloc: chunk.NewAllocator(), pkt: &packetIO{ bufWriter: bufio.NewWriter(bytes.NewBuffer(nil)), }, } tk := testkit.NewTestKit(t, store) cc.setCtx(&TiDBContext{Session: tk.Session()}) ctx := context.Background() tk.Session().GetSessionVars().EnableClusteredIndex = variable.ClusteredIndexDefModeIntOnly tk.MustExec("use test") tk.MustExec("create table prefetch (a int, b int, c int, primary key (a, b))") tk.MustExec("insert prefetch values (1, 1, 1), (2, 2, 2), (3, 3, 3)") tk.MustExec("begin optimistic") tk.MustExec("update prefetch set c = c + 1 where a = 2 and b = 2") // enable multi-statement capabilities := cc.ctx.GetSessionVars().ClientCapability capabilities ^= mysql.ClientMultiStatements cc.ctx.SetClientCapability(capabilities) query := "update prefetch set c = c + 1 where a = 1 and b = 1;" + "update prefetch set c = c + 1 where a = 2 and b = 2;" + "update prefetch set c = c + 1 where a = 3 and b = 3;" err := cc.handleQuery(ctx, query) require.NoError(t, err) txn, err := tk.Session().Txn(false) require.NoError(t, err) require.True(t, txn.Valid()) snap := txn.GetSnapshot() require.Equal(t, 4, snap.(snapshotCache).SnapCacheHitCount()) tk.MustExec("commit") tk.MustQuery("select * from prefetch").Check(testkit.Rows("1 1 2", "2 2 4", "3 3 4")) tk.MustExec("begin pessimistic") tk.MustExec("update prefetch set c = c + 1 where a = 2 and b = 2") require.Equal(t, 1, tk.Session().GetSessionVars().TxnCtx.PessimisticCacheHit) err = cc.handleQuery(ctx, query) require.NoError(t, err) txn, err = tk.Session().Txn(false) require.NoError(t, err) require.True(t, txn.Valid()) require.Equal(t, 5, tk.Session().GetSessionVars().TxnCtx.PessimisticCacheHit) tk.MustExec("commit") tk.MustQuery("select * from prefetch").Check(testkit.Rows("1 1 3", "2 2 6", "3 3 5")) }
explode_data.jsonl/73155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 766 }
[ 2830, 3393, 29978, 2995, 2609, 8850, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 63517, 1669, 609, 2972, 9701, 515, 197, 197, 4742, 25, 414, 24902, 7121, 42730, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDoguRequeueHandler_Handle(t *testing.T) { t.Run("handle nil error", func(t *testing.T) { // given reporter := &mocks.StatusReporter{} fakeClient := fake.NewClientBuilder().WithScheme(&runtime.Scheme{}).Build() handler := NewDoguRequeueHandler(fakeClient, reporter) doguResource := &k8sv1.Dogu{ ObjectMeta: metav1.ObjectMeta{Name: "myName"}, Status: k8sv1.DoguStatus{}, } // when result, err := handler.Handle(context.Background(), "my context", doguResource, nil) // then require.NoError(t, err) assert.False(t, result.Requeue) assert.Equal(t, result.RequeueAfter, time.Duration(0)) assert.Nil(t, doguResource.Status.StatusMessages) }) t.Run("handle non reportable error", func(t *testing.T) { // given reporter := &mocks.StatusReporter{} reporter.On("ReportError", mock.Anything, mock.Anything, mock.Anything).Return(nil) fakeClient := fake.NewClientBuilder().WithScheme(&runtime.Scheme{}).Build() handler := NewDoguRequeueHandler(fakeClient, reporter) doguResource := &k8sv1.Dogu{ ObjectMeta: metav1.ObjectMeta{Name: "myName"}, Status: k8sv1.DoguStatus{}, } myError := fmt.Errorf("this is my error") // when result, err := handler.Handle(context.Background(), "my context", doguResource, myError) // then require.Error(t, err) assert.Contains(t, err.Error(), "this is my error") assert.False(t, result.Requeue) assert.Equal(t, result.RequeueAfter, time.Duration(0)) assert.Nil(t, doguResource.Status.StatusMessages) mock.AssertExpectationsForObjects(t, reporter) }) t.Run("error on reporting error", func(t *testing.T) { // given myReportError := fmt.Errorf("this is my report error") reporter := &mocks.StatusReporter{} reporter.On("ReportError", mock.Anything, mock.Anything, mock.Anything).Return(myReportError) fakeClient := fake.NewClientBuilder().WithScheme(&runtime.Scheme{}).Build() handler := NewDoguRequeueHandler(fakeClient, reporter) doguResource := &k8sv1.Dogu{ ObjectMeta: metav1.ObjectMeta{Name: "myName"}, Status: k8sv1.DoguStatus{}, } myError := fmt.Errorf("this is my error") // when result, err := handler.Handle(context.Background(), "my context", doguResource, myError) // then require.Error(t, err) assert.Contains(t, err.Error(), "failed to report error: this is my report error") assert.False(t, result.Requeue) assert.Equal(t, result.RequeueAfter, time.Duration(0)) assert.Nil(t, doguResource.Status.StatusMessages) mock.AssertExpectationsForObjects(t, reporter) }) t.Run("handle with requeueable error", func(t *testing.T) { // given reporter := &mocks.StatusReporter{} reporter.On("ReportError", mock.Anything, mock.Anything, mock.Anything).Return(nil) scheme := runtime.NewScheme() scheme.AddKnownTypeWithName(schema.GroupVersionKind{ Group: "k8s.cloudogu.com", Version: "v1", Kind: "Dogu", }, &k8sv1.Dogu{}) doguResource := &k8sv1.Dogu{ ObjectMeta: metav1.ObjectMeta{Name: "myName"}, Status: k8sv1.DoguStatus{}, } fakeClient := fake.NewClientBuilder().WithScheme(scheme).WithObjects(doguResource).Build() handler := NewDoguRequeueHandler(fakeClient, reporter) myError := myRequeueableError{} // when result, err := handler.Handle(context.Background(), "my context", doguResource, myError) // then require.NoError(t, err) assert.False(t, result.Requeue) assert.Equal(t, result.RequeueAfter, time.Second*10) mock.AssertExpectationsForObjects(t, reporter) }) }
explode_data.jsonl/65070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1335 }
[ 2830, 3393, 48940, 84, 693, 4584, 3050, 42714, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 8192, 2092, 1465, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 2661, 198, 197, 69931, 261, 1669, 609, 16712, 82, 10538, 52766, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBreakpointCounts(t *testing.T) { skipOn(t, "broken", "freebsd") protest.AllowRecording(t) withTestProcess("bpcountstest", t, func(p *proc.Target, fixture protest.Fixture) { bp := setFileBreakpoint(p, t, fixture.Source, 12) for { if err := p.Continue(); err != nil { if _, exited := err.(proc.ErrProcessExited); exited { break } assertNoError(err, t, "Continue()") } } t.Logf("TotalHitCount: %d", bp.UserBreaklet().TotalHitCount) if bp.UserBreaklet().TotalHitCount != 200 { t.Fatalf("Wrong TotalHitCount for the breakpoint (%d)", bp.UserBreaklet().TotalHitCount) } if len(bp.UserBreaklet().HitCount) != 2 { t.Fatalf("Wrong number of goroutines for breakpoint (%d)", len(bp.UserBreaklet().HitCount)) } for _, v := range bp.UserBreaklet().HitCount { if v != 100 { t.Fatalf("Wrong HitCount for breakpoint (%v)", bp.UserBreaklet().HitCount) } } }) }
explode_data.jsonl/56233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 22524, 2768, 63731, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 1925, 1155, 11, 330, 48909, 497, 330, 10593, 51835, 1138, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 17808, 1830, 267, 477, 497, 259, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestOpenTelemetryJavaMetrics(t *testing.T) { systemtest.CleanupElasticsearch(t) srv := apmservertest.NewUnstartedServer(t) err := srv.Start() require.NoError(t, err) aggregator := simple.NewWithExactDistribution() err = sendOTLPMetrics(context.Background(), srv, aggregator, func(meter metric.MeterMust) { // Record well-known JVM runtime metrics, to test that they are // copied to their Elastic APM equivalents during ingest. jvmGCTime := meter.NewInt64Counter("runtime.jvm.gc.time") jvmGCCount := meter.NewInt64Counter("runtime.jvm.gc.count") jvmGCTime.Bind(label.String("gc", "G1 Young Generation")).Add(context.Background(), 123) jvmGCCount.Bind(label.String("gc", "G1 Young Generation")).Add(context.Background(), 1) jvmMemoryArea := meter.NewInt64UpDownCounter("runtime.jvm.memory.area") jvmMemoryArea.Bind( label.String("area", "heap"), label.String("type", "used"), ).Add(context.Background(), 42) }) require.NoError(t, err) result := systemtest.Elasticsearch.ExpectMinDocs(t, 2, "apm-*", estest.BoolQuery{Filter: []interface{}{ estest.TermQuery{Field: "processor.event", Value: "metric"}, }}) require.Len(t, result.Hits.Hits, 2) // one for each set of labels var gcHit, memoryAreaHit estest.SearchHit for _, hit := range result.Hits.Hits { require.Contains(t, hit.Source, "jvm") switch { case gjson.GetBytes(hit.RawSource, "labels.gc").Exists(): gcHit = hit case gjson.GetBytes(hit.RawSource, "labels.area").Exists(): memoryAreaHit = hit } } assert.Equal(t, 123.0, gjson.GetBytes(gcHit.RawSource, "runtime.jvm.gc.time").Value()) assert.Equal(t, 1.0, gjson.GetBytes(gcHit.RawSource, "runtime.jvm.gc.count").Value()) assert.Equal(t, map[string]interface{}{ "gc": "G1 Young Generation", "name": "G1 Young Generation", }, gcHit.Source["labels"]) assert.Equal(t, 123.0, gjson.GetBytes(gcHit.RawSource, "jvm.gc.time").Value()) assert.Equal(t, 1.0, gjson.GetBytes(gcHit.RawSource, "jvm.gc.count").Value()) assert.Equal(t, 42.0, gjson.GetBytes(memoryAreaHit.RawSource, "runtime.jvm.memory.area").Value()) assert.Equal(t, map[string]interface{}{ "area": "heap", "type": "used", }, memoryAreaHit.Source["labels"]) assert.Equal(t, 42.0, gjson.GetBytes(memoryAreaHit.RawSource, "jvm.memory.heap.used").Value()) }
explode_data.jsonl/41683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 891 }
[ 2830, 3393, 5002, 6639, 35958, 15041, 27328, 1155, 353, 8840, 836, 8, 341, 40293, 1944, 727, 60639, 36, 51179, 1836, 1155, 340, 1903, 10553, 1669, 1443, 76, 799, 1621, 477, 7121, 1806, 46723, 5475, 1155, 340, 9859, 1669, 43578, 12101, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLabels_UnmarshalGQL(t *testing.T) { for name, tc := range map[string]struct { input interface{} err bool errMsg string expected Labels }{ //given "correct input map[string]string": { input: map[string]interface{}{"annotation": "val1"}, err: false, expected: Labels{"annotation": "val1"}, }, "correct input map[string]int": { input: map[string]interface{}{"annotation": 123}, err: false, expected: Labels{"annotation": 123}, }, "correct input map[string][]string": { input: map[string]interface{}{"annotation": []string{"val1", "val2"}}, err: false, expected: Labels{"annotation": []string{"val1", "val2"}}, }, "error: input is nil": { input: nil, err: true, errMsg: "input should not be nil"}, "error: invalid input type": { input: map[int]interface{}{123: "invalid map"}, err: true, errMsg: "unexpected Labels type: map[int]interface {}, should be map[string]interface{}"}, } { t.Run(name, func(t *testing.T) { //when a := Labels{} err := a.UnmarshalGQL(tc.input) //then if tc.err { assert.Error(t, err) assert.EqualError(t, err, tc.errMsg) assert.Empty(t, a) } else { assert.NoError(t, err) assert.Equal(t, tc.expected, a) } }) } }
explode_data.jsonl/47659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 586 }
[ 2830, 3393, 23674, 40687, 27121, 38, 3588, 1155, 353, 8840, 836, 8, 341, 2023, 829, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 22427, 262, 3749, 16094, 197, 9859, 414, 1807, 198, 197, 9859, 6611, 256, 914, 198, 197, 42400...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAddReplica(t *testing.T) { ttr := make(topicThrottledReplicas) // Try to add an invalid type. err := ttr.addReplica("test", "0", "invalid", "1001") if err != errInvalidReplicaType { t.Errorf("Expected 'errInvalidReplicaType' error") } types := []replicaType{"leaders", "followers"} // Add valid types; error unexpected. for _, typ := range types { err := ttr.addReplica("test", "0", replicaType(typ), "1001") if err != nil { t.Errorf("Unexpected error: %s", err) } } // For each type {leaders, followers}, ensure that we have one follower entry. for _, typ := range types { gotLen := len(ttr["test"][typ]) if gotLen != 1 { t.Errorf("Expected len 1 for ttr[test][%s], got %d", typ, gotLen) } } // Spot check the content. if ttr["test"]["leaders"][0] != "0:1001" { t.Errorf("Expected output '0:1001', got '%s'", ttr["test"]["leaders"][0]) } }
explode_data.jsonl/39255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 2212, 18327, 15317, 1155, 353, 8840, 836, 8, 341, 3244, 376, 1669, 1281, 43839, 1001, 46689, 832, 18327, 52210, 692, 197, 322, 9735, 311, 912, 458, 8318, 943, 624, 9859, 1669, 259, 376, 1364, 18327, 15317, 445, 1944, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestValueEquals(t *testing.T) { assert := assert.New(t) vrw := newTestValueStore() values := []func() (Value, error){ func() (Value, error) { return Bool(false), nil }, func() (Value, error) { return Bool(true), nil }, func() (Value, error) { return Float(0), nil }, func() (Value, error) { return Float(-1), nil }, func() (Value, error) { return Float(1), nil }, func() (Value, error) { return String(""), nil }, func() (Value, error) { return String("hi"), nil }, func() (Value, error) { return String("bye"), nil }, func() (Value, error) { return NewBlob(context.Background(), vrw, &bytes.Buffer{}) }, func() (Value, error) { return NewBlob(context.Background(), vrw, bytes.NewBufferString("hi")) }, func() (Value, error) { return NewBlob(context.Background(), vrw, bytes.NewBufferString("bye")) }, func() (Value, error) { b1, err := NewBlob(context.Background(), vrw, bytes.NewBufferString("hi")) if err != nil { return nil, err } b2, err := NewBlob(context.Background(), vrw, bytes.NewBufferString("bye")) if err != nil { return nil, err } return newBlob(mustSeq(newBlobMetaSequence(1, []metaTuple{ mustMetaTuple(newMetaTuple(mustRef(NewRef(b1, Format_7_18)), mustOrdKey(orderedKeyFromInt(2, Format_7_18)), 2)), mustMetaTuple(newMetaTuple(mustRef(NewRef(b2, Format_7_18)), mustOrdKey(orderedKeyFromInt(5, Format_7_18)), 5)), }, vrw))), nil }, func() (Value, error) { return NewList(context.Background(), vrw) }, func() (Value, error) { return NewList(context.Background(), vrw, String("foo")) }, func() (Value, error) { return NewList(context.Background(), vrw, String("bar")) }, func() (Value, error) { return NewMap(context.Background(), vrw) }, func() (Value, error) { return NewMap(context.Background(), vrw, String("a"), String("a")) }, func() (Value, error) { return NewSet(context.Background(), vrw) }, func() (Value, error) { return NewSet(context.Background(), vrw, String("hi")) }, func() (Value, error) { return PrimitiveTypeMap[BoolKind], nil }, func() (Value, error) { return PrimitiveTypeMap[StringKind], nil }, func() (Value, error) { return MakeStructType("a") }, func() (Value, error) { return MakeStructType("b") }, func() (Value, error) { return MakeListType(PrimitiveTypeMap[BoolKind]) }, func() (Value, error) { return MakeListType(PrimitiveTypeMap[FloatKind]) }, func() (Value, error) { return MakeSetType(PrimitiveTypeMap[BoolKind]) }, func() (Value, error) { return MakeSetType(PrimitiveTypeMap[FloatKind]) }, func() (Value, error) { return MakeRefType(PrimitiveTypeMap[BoolKind]) }, func() (Value, error) { return MakeRefType(PrimitiveTypeMap[FloatKind]) }, func() (Value, error) { return MakeMapType(PrimitiveTypeMap[BoolKind], PrimitiveTypeMap[ValueKind]) }, func() (Value, error) { return MakeMapType(PrimitiveTypeMap[FloatKind], PrimitiveTypeMap[ValueKind]) }, } for i, f1 := range values { for j, f2 := range values { v1, err := f1() assert.NoError(err) v2, err := f2() assert.NoError(err) assert.Equal(v1.Equals(v2), i == j) } v, err := f1() assert.NoError(err) if v != nil { r, err := NewRef(v, Format_7_18) assert.NoError(err) assert.False(r.Equals(v)) assert.False(v.Equals(r)) } } }
explode_data.jsonl/80709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1250 }
[ 2830, 3393, 1130, 4315, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 5195, 31768, 1669, 501, 2271, 1130, 6093, 2822, 45939, 1669, 3056, 2830, 368, 320, 1130, 11, 1465, 1264, 197, 29244, 368, 320, 1130, 11, 1465, 8,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetKeys(t *testing.T) { rwc := openTPMOrSkip(t) defer rwc.Close() handles, err := GetKeys(rwc) if err != nil { t.Fatal("Couldn't enumerate keys in the TPM:", err) } t.Logf("Got %d keys: % d\n", len(handles), handles) }
explode_data.jsonl/75340
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 1949, 8850, 1155, 353, 8840, 836, 8, 341, 7000, 24028, 1669, 1787, 4239, 44, 2195, 35134, 1155, 340, 16867, 435, 24028, 10421, 2822, 9598, 20125, 11, 1848, 1669, 2126, 8850, 2601, 24028, 340, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueryGetAll(t *testing.T) { // This implicitly tests DocumentIterator as well. const dbPath = "projects/projectID/databases/(default)" ctx := context.Background() c, srv := newMock(t) docNames := []string{"C/a", "C/b"} wantPBDocs := []*pb.Document{ { Name: dbPath + "/documents/" + docNames[0], CreateTime: aTimestamp, UpdateTime: aTimestamp, Fields: map[string]*pb.Value{"f": intval(2)}, }, { Name: dbPath + "/documents/" + docNames[1], CreateTime: aTimestamp2, UpdateTime: aTimestamp3, Fields: map[string]*pb.Value{"f": intval(1)}, }, } srv.addRPC(nil, []interface{}{ &pb.RunQueryResponse{Document: wantPBDocs[0]}, &pb.RunQueryResponse{Document: wantPBDocs[1]}, }) gotDocs, err := c.Collection("C").Documents(ctx).GetAll() if err != nil { t.Fatal(err) } if got, want := len(gotDocs), len(wantPBDocs); got != want { t.Errorf("got %d docs, wanted %d", got, want) } for i, got := range gotDocs { want, err := newDocumentSnapshot(c.Doc(docNames[i]), wantPBDocs[i], c) if err != nil { t.Fatal(err) } if !testEqual(got, want) { // avoid writing a cycle got.c = nil want.c = nil t.Errorf("#%d: got %+v, want %+v", i, pretty.Value(got), pretty.Value(want)) } } }
explode_data.jsonl/17717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 567 }
[ 2830, 3393, 2859, 1949, 2403, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 51773, 7032, 11789, 11951, 438, 1632, 624, 4777, 2927, 1820, 284, 330, 17161, 40118, 915, 3446, 23822, 11884, 2258, 12954, 20985, 1669, 2266, 19047, 741, 1444, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFakeMapVerboseEqual(t *testing.T) { popr := math_rand.New(math_rand.NewSource(time.Now().UnixNano())) p := NewPopulatedFakeMap(popr, false) dAtA, err := github_com_gogo_protobuf_proto.Marshal(p) if err != nil { panic(err) } msg := &FakeMap{} if err := github_com_gogo_protobuf_proto.Unmarshal(dAtA, msg); err != nil { panic(err) } if err := p.VerboseEqual(msg); err != nil { t.Fatalf("%#v !VerboseEqual %#v, since %v", msg, p, err) } }
explode_data.jsonl/14898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 52317, 2227, 63404, 2993, 1155, 353, 8840, 836, 8, 341, 3223, 46288, 1669, 6888, 33864, 7121, 37270, 33864, 7121, 3608, 9730, 13244, 1005, 55832, 83819, 12145, 3223, 1669, 1532, 11598, 7757, 52317, 2227, 40148, 81, 11, 895, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTSInterface(t *testing.T) { expectPrintedTS(t, "interface A { a } x", "x;\n") expectPrintedTS(t, "interface A { a; b } x", "x;\n") expectPrintedTS(t, "interface A { a() } x", "x;\n") expectPrintedTS(t, "interface A { a(); b } x", "x;\n") expectPrintedTS(t, "interface Foo { foo(): Foo \n is: Bar } x", "x;\n") expectPrintedTS(t, "interface A<T extends number> extends B.C<D, E>, F.G<H, I> {} x", "x;\n") expectPrintedTS(t, "export interface A<T extends number> extends B.C<D, E>, F.G<H, I> {} x", "x;\n") expectPrintedTS(t, "export default interface Foo {} x", "x;\n") }
explode_data.jsonl/82316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 9951, 5051, 1155, 353, 8840, 836, 8, 341, 24952, 8994, 291, 9951, 1155, 11, 330, 4970, 362, 314, 264, 335, 856, 497, 330, 87, 17882, 77, 1138, 24952, 8994, 291, 9951, 1155, 11, 330, 4970, 362, 314, 264, 26, 293, 335, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatchJWSURLs(t *testing.T) { wfe, _ := setupWFE(t) noURLJWS, _, _ := signRequestEmbed(t, nil, "", "", wfe.nonceService) urlAJWS, _, _ := signRequestEmbed(t, nil, "example.com", "", wfe.nonceService) urlBJWS, _, _ := signRequestEmbed(t, nil, "example.org", "", wfe.nonceService) testCases := []struct { Name string Outer *jose.JSONWebSignature Inner *jose.JSONWebSignature ExpectedProblem *probs.ProblemDetails ErrorStatType string }{ { Name: "Outer JWS without URL", Outer: noURLJWS, Inner: urlAJWS, ExpectedProblem: &probs.ProblemDetails{ Type: probs.MalformedProblem, Detail: "Outer JWS header parameter 'url' required", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "KeyRolloverOuterJWSNoURL", }, { Name: "Inner JWS without URL", Outer: urlAJWS, Inner: noURLJWS, ExpectedProblem: &probs.ProblemDetails{ Type: probs.MalformedProblem, Detail: "Inner JWS header parameter 'url' required", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "KeyRolloverInnerJWSNoURL", }, { Name: "Inner and outer JWS without URL", Outer: noURLJWS, Inner: noURLJWS, ExpectedProblem: &probs.ProblemDetails{ Type: probs.MalformedProblem, // The Outer JWS is validated first Detail: "Outer JWS header parameter 'url' required", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "KeyRolloverOuterJWSNoURL", }, { Name: "Mismatched inner and outer JWS URLs", Outer: urlAJWS, Inner: urlBJWS, ExpectedProblem: &probs.ProblemDetails{ Type: probs.MalformedProblem, Detail: "Outer JWS 'url' value \"example.com\" does not match inner JWS 'url' value \"example.org\"", HTTPStatus: http.StatusBadRequest, }, ErrorStatType: "KeyRolloverMismatchedURLs", }, { Name: "Matching inner and outer JWS URLs", Outer: urlAJWS, Inner: urlAJWS, }, } for _, tc := range testCases { t.Run(tc.Name, func(t *testing.T) { wfe.stats.joseErrorCount.Reset() prob := wfe.matchJWSURLs(tc.Outer, tc.Inner) if prob != nil && tc.ExpectedProblem == nil { t.Errorf("matchJWSURLs failed. Expected no problem, got %#v", prob) } else { test.AssertMarshaledEquals(t, prob, tc.ExpectedProblem) } if tc.ErrorStatType != "" { test.AssertEquals(t, test.CountCounterVec( "type", tc.ErrorStatType, wfe.stats.joseErrorCount), 1) } }) } }
explode_data.jsonl/15360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1111 }
[ 2830, 3393, 8331, 41, 7433, 3144, 82, 1155, 353, 8840, 836, 8, 341, 6692, 1859, 11, 716, 1669, 6505, 54, 11419, 1155, 692, 72104, 3144, 41, 7433, 11, 8358, 716, 1669, 1841, 1900, 25486, 1155, 11, 2092, 11, 7342, 7342, 289, 1859, 125...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBuildCreateError(t *testing.T) { testServer(t, func(c *stdsdk.Client, p *structs.MockProvider) { var b1 *structs.Build p.On("BuildCreate", "app1", "", structs.BuildCreateOptions{}).Return(nil, fmt.Errorf("err1")) err := c.Post("/apps/app1/builds", stdsdk.RequestOptions{}, b1) require.Nil(t, b1) require.EqualError(t, err, "err1") }) }
explode_data.jsonl/71416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 11066, 4021, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1155, 11, 2915, 1337, 353, 1834, 51295, 11716, 11, 281, 353, 1235, 82, 24664, 5179, 8, 341, 197, 2405, 293, 16, 353, 1235, 82, 25212, 198, 197, 3223, 8071, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryConvertion(t *testing.T) { ast := assert.New(t) str := `#{"$or": [{"$and": [{"field1": "Willie"}, {"field2": {"$gt": 100}}, {"field3": {"$not": {"$eq": "murks"}}}]}, {"$and": [{"field1": "Max"}, {"field2": {"$lte": 100}}, {"field3": {"$ne": "murks"}}]}]}` //#{"$or": [ {"$and": [ {"field1":"Willie"},{field2:>100}) OR (field1:"Max" AND field2:<=100))` q := query.Query{ Condition: query.Node{ Operator: query.OROP, Conditions: []interface{}{ query.Node{ Operator: query.ANDOP, Conditions: []interface{}{ query.Condition{ Field: "field1", Operator: query.NO, Value: "Willie", }, query.Condition{ Field: "field2", Operator: query.GT, Value: 100, }, query.Condition{ Field: "field3", Operator: query.EQ, Invert: true, Value: "murks", }, }, }, query.Node{ Operator: query.ANDOP, Conditions: []interface{}{ query.Condition{ Field: "field1", Operator: query.NO, Value: "Max", }, query.Condition{ Field: "field2", Operator: query.LE, Value: 100, }, query.Condition{ Field: "field3", Operator: query.NE, Value: "murks", }, }, }, }, }, } ast.NotNil(q) s := ToMongoQuery(q) fmt.Println(str) fmt.Println(s) ast.Equal(str, s) }
explode_data.jsonl/74255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 850 }
[ 2830, 3393, 2859, 12012, 290, 1155, 353, 8840, 836, 8, 972, 88836, 1669, 2060, 7121, 1155, 7229, 11355, 1669, 1565, 2, 4913, 3, 269, 788, 61753, 3, 437, 788, 61753, 2566, 16, 788, 330, 9945, 645, 14345, 5212, 2566, 17, 788, 5212, 3,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseNullBool(t *testing.T) { UseStdLib = false runner := func(have string, want sql.NullBool, wantErr bool) func(*testing.T) { return func(t *testing.T) { b := sql.RawBytes(have) if have == "NULL" { b = nil } bv, ok, err := ParseBool(b) if wantErr { assert.Error(t, err, "%q", have) return } assert.NoError(t, err, "%s %q", t.Name(), have) assert.Exactly(t, want.Valid, ok) assert.Exactly(t, want.Bool, bv, t.Name()) } } t.Run("NULL is false and invalid", runner("NULL", sql.NullBool{}, false)) t.Run("empty is false and invalid", runner("", sql.NullBool{}, true)) t.Run(" is false and invalid", runner("", sql.NullBool{}, true)) t.Run("£ is false and invalid", runner("£", sql.NullBool{}, true)) t.Run("0 is false and valid", runner("0", sql.NullBool{Valid: true}, false)) t.Run("1 is true and valid", runner("1", sql.NullBool{Valid: true, Bool: true}, false)) t.Run("10 is false and invalid", runner("10", sql.NullBool{}, true)) t.Run("01 is false and invalid", runner("01", sql.NullBool{}, true)) t.Run("t is true and valid", runner("t", sql.NullBool{Valid: true, Bool: true}, false)) t.Run("true is true and valid", runner("true", sql.NullBool{Valid: true, Bool: true}, false)) t.Run("TRUE is true and valid", runner("TRUE", sql.NullBool{Valid: true, Bool: true}, false)) t.Run("f is false and valid", runner("f", sql.NullBool{Valid: true, Bool: false}, false)) t.Run("false is false and valid", runner("false", sql.NullBool{Valid: true, Bool: false}, false)) t.Run("FALSE is false and valid", runner("FALSE", sql.NullBool{Valid: true, Bool: false}, false)) }
explode_data.jsonl/13564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 644 }
[ 2830, 3393, 14463, 3280, 11233, 1155, 353, 8840, 836, 8, 341, 95023, 22748, 9194, 284, 895, 198, 197, 41736, 1669, 2915, 3203, 523, 914, 11, 1366, 5704, 23979, 11233, 11, 1366, 7747, 1807, 8, 2915, 4071, 8840, 836, 8, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestContextIsAborted(t *testing.T) { c, _ := CreateTestContext(httptest.NewRecorder()) assert.False(t, c.IsAborted()) c.Abort() assert.True(t, c.IsAborted()) c.Next() assert.True(t, c.IsAborted()) c.index++ assert.True(t, c.IsAborted()) }
explode_data.jsonl/26810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 1972, 3872, 5830, 13595, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 4230, 2271, 1972, 73392, 83, 70334, 7121, 47023, 2398, 6948, 50757, 1155, 11, 272, 4506, 5830, 13595, 12367, 1444, 25206, 371, 741, 6948, 32443, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterpreterSorting(t *testing.T) { s, err := parseFile("src/parse/asp/test_data/interpreter/sorted.build") require.NoError(t, err) assert.Equal(t, pyList{pyInt(1), pyInt(2), pyInt(3)}, s.Lookup("y")) // N.B. sorted() sorts in-place, unlike Python's one. We may change that later. }
explode_data.jsonl/81066
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 58426, 71681, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 4715, 1703, 445, 3548, 14, 6400, 14, 13367, 12697, 1769, 14, 90554, 2687, 13595, 13239, 1138, 17957, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 4510, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReloadConfig(t *testing.T) { re := require.New(t) registerDefaultSchedulers() RegisterScheduler("shuffle-leader") opt, err := newTestScheduleOption() re.NoError(err) storage := storage.NewStorageWithMemoryBackend() scheduleCfg := opt.GetScheduleConfig() scheduleCfg.MaxSnapshotCount = 10 opt.SetMaxReplicas(5) opt.GetPDServerConfig().UseRegionStorage = true re.NoError(opt.Persist(storage)) // Add a new default enable scheduler "shuffle-leader" DefaultSchedulers = append(DefaultSchedulers, SchedulerConfig{Type: "shuffle-leader"}) defer func() { DefaultSchedulers = DefaultSchedulers[:len(DefaultSchedulers)-1] }() newOpt, err := newTestScheduleOption() re.NoError(err) re.NoError(newOpt.Reload(storage)) schedulers := newOpt.GetSchedulers() re.Len(schedulers, len(DefaultSchedulers)) re.True(newOpt.IsUseRegionStorage()) for i, s := range schedulers { re.Equal(DefaultSchedulers[i].Type, s.Type) re.False(s.Disable) } re.Equal(5, newOpt.GetMaxReplicas()) re.Equal(uint64(10), newOpt.GetMaxSnapshotCount()) re.Equal(int64(512), newOpt.GetMaxMovableHotPeerSize()) }
explode_data.jsonl/78161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 50035, 2648, 1155, 353, 8840, 836, 8, 341, 17200, 1669, 1373, 7121, 1155, 340, 29422, 3675, 74674, 741, 79096, 38878, 445, 65355, 30207, 998, 1138, 64838, 11, 1848, 1669, 501, 2271, 32210, 5341, 741, 17200, 35699, 3964, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetIntervals(t *testing.T) { tests := []struct { name string request *http.Request dbMock interfaces.DBClient expectedStatus int }{ { "OK", createRequestIntervalAll(), createMockIntervalLoaderAllSuccess(), http.StatusOK, }, { name: "Unexpected Error", request: createRequestIntervalAll(), dbMock: createMockIntervalLoaderAllErr(), expectedStatus: http.StatusInternalServerError, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rr := httptest.NewRecorder() restGetIntervals(rr, tt.request, logger.NewMockClient(), tt.dbMock, &schedConfig.ConfigurationStruct{}) response := rr.Result() if response.StatusCode != tt.expectedStatus { t.Errorf("status code mismatch -- expected %v got %v", tt.expectedStatus, response.StatusCode) return } }) } }
explode_data.jsonl/51569
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 85097, 42198, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 23555, 286, 353, 1254, 9659, 198, 197, 20939, 11571, 260, 24099, 22537, 2959, 198, 197, 42400, 2522, 526, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRequestWithdaw(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test, either api keys or manipulaterealorders isnt set correctly") } _, err := b.RequestWithdraw("BTC", 1, "sdjflajdslfjld", "", "", "", "") if err == nil { t.Error("expected an error due to invalid toAddress") } }
explode_data.jsonl/33169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 1900, 2354, 67, 672, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1369, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 11, 2987, 633...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAuthBasicRequestParam(t *testing.T) { r := URL("http://localhost/") r.Auth("admin", "safe") err := r.setupAction("GET") if err != nil { t.Error(err) } var username, password, ok = r.Request.BasicAuth() if username != "admin" || password != "safe" || ok != true { t.Errorf("Wrong user credentials") } }
explode_data.jsonl/24732
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 5087, 15944, 25729, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 5548, 445, 1254, 1110, 8301, 53006, 7000, 25233, 445, 2882, 497, 330, 18675, 5130, 9859, 1669, 435, 25338, 2512, 445, 3806, 5130, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBuildGetError(t *testing.T) { testServer(t, func(c *stdsdk.Client, p *structs.MockProvider) { var b1 *structs.Build p.On("BuildGet", "app1", "build1").Return(nil, fmt.Errorf("err1")) err := c.Get("/apps/app1/builds/build1", stdsdk.RequestOptions{}, b1) require.Nil(t, b1) require.EqualError(t, err, "err1") }) }
explode_data.jsonl/71420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 11066, 1949, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1155, 11, 2915, 1337, 353, 1834, 51295, 11716, 11, 281, 353, 1235, 82, 24664, 5179, 8, 341, 197, 2405, 293, 16, 353, 1235, 82, 25212, 198, 197, 3223, 8071, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExitCodeWindows(t *testing.T) { t.Parallel() for i := 0; i <= 255; i++ { cmd := exec.Command(`..\testdata\test_exit_code.bat`, strconv.Itoa(i)) err := cmd.Run() if i == 0 { assert.Nil(t, err) } else { assert.Error(t, err) } retCode, err := GetExitCode(err) assert.Nil(t, err) assert.Equal(t, i, retCode) } // assert a non exec.ExitError returns an error err := errors.New("This is an explicit error") retCode, retErr := GetExitCode(err) assert.Error(t, retErr, "An error was expected") assert.Equal(t, err, retErr) assert.Equal(t, 0, retCode) }
explode_data.jsonl/5179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 15339, 2078, 13164, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2023, 600, 1669, 220, 15, 26, 600, 2651, 220, 17, 20, 20, 26, 600, 1027, 341, 197, 25920, 1669, 3883, 12714, 5809, 60402, 92425, 59, 1944, 16880, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetCurrencyPairDisplayConfig(t *testing.T) { cfg := GetConfig() err := cfg.LoadConfig(ConfigTestFile) if err != nil { t.Errorf( "Test failed. GetCurrencyPairDisplayConfig. LoadConfig Error: %s", err.Error(), ) } settings := cfg.GetCurrencyPairDisplayConfig() if settings.Delimiter != "-" || !settings.Uppercase { t.Errorf( "Test failed. GetCurrencyPairDisplayConfi. Invalid values", ) } }
explode_data.jsonl/21896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 1949, 26321, 12443, 7020, 2648, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2126, 2648, 741, 9859, 1669, 13286, 13969, 2648, 33687, 2271, 1703, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 1006, 298, 197, 1, 2271, 4641, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestImageReader_Read(t *testing.T) { cases := []struct { name string wantRef string fetcher *fakeFetcher options []ImageReaderOption wantErr bool }{ { name: "in general", fetcher: &fakeFetcher{ image: nil, err: nil, }, options: []ImageReaderOption{}, wantErr: false, }, { name: "insecure registry", fetcher: &fakeFetcher{ image: nil, err: nil, }, options: []ImageReaderOption{ WithInsecure(true), }, wantErr: false, }, { name: "fetcher failed", fetcher: &fakeFetcher{ image: nil, err: fmt.Errorf("error"), }, wantErr: true, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { options := append(tc.options, func(ir *ImageReader) { ir.fetcher = tc.fetcher }) ir := NewImageReader(options...) _, err := ir.Read("ref") if tc.wantErr { require.Error(t, err) return } require.NoError(t, err) require.Equal(t, "ref", tc.fetcher.requested.String()) }) } }
explode_data.jsonl/9035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 1906, 5062, 38381, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 50780, 3945, 914, 198, 197, 1166, 2995, 261, 353, 30570, 97492, 198, 197, 35500, 3056, 1906, 5062, 5341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSecretContentReferenceSuccess(t *testing.T) { f := newFixture(t) secret := &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "web-metric-secret", Namespace: metav1.NamespaceDefault, }, Data: map[string][]byte{ "apikey": []byte("12345"), }, } defer f.Close() c, _, _ := f.newController(noResyncPeriodFunc) f.kubeclient.CoreV1().Secrets(metav1.NamespaceDefault).Create(context.TODO(), secret, metav1.CreateOptions{}) argName := "apikey" run := &v1alpha1.AnalysisRun{ ObjectMeta: metav1.ObjectMeta{ Namespace: metav1.NamespaceDefault, }, Spec: v1alpha1.AnalysisRunSpec{ Args: []v1alpha1.Argument{{ Name: argName, ValueFrom: &v1alpha1.ValueFrom{ SecretKeyRef: &v1alpha1.SecretKeyRef{ Name: "web-metric-secret", Key: "apikey", }, }, }}, Metrics: []v1alpha1.Metric{{ Name: "rate", Provider: v1alpha1.MetricProvider{ Web: &v1alpha1.WebMetric{ Headers: []v1alpha1.WebMetricHeader{{ Key: "apikey", Value: "{{args.apikey}}", }}, }, }, }}, }, } f.provider.On("Run", mock.Anything, mock.Anything, mock.Anything).Return(newMeasurement(v1alpha1.AnalysisPhaseSuccessful), nil) newRun := c.reconcileAnalysisRun(run) assert.Equal(t, v1alpha1.AnalysisPhaseSuccessful, newRun.Status.Phase) }
explode_data.jsonl/75835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 19773, 2762, 8856, 7188, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 197, 20474, 1669, 609, 98645, 16, 74779, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 2911, 1448, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetWorkerRegisterSecret(t *testing.T) { tmp, err := ioutil.TempDir("", "TestGetWorkerRegisterSecret") if err != nil { t.Fatal(err) } gaia.Cfg = &gaia.Config{ Logger: hclog.NewNullLogger(), DataPath: tmp, HomePath: tmp, PipelinePath: tmp, DevMode: true, } // Initialize certificate store _, err = services.CertificateService() if err != nil { t.Fatalf("cannot initialize certificate service: %v", err) } // Initialize vault v, err := services.VaultService(nil) if err != nil { t.Fatalf("cannot initialize vault service: %v", err) } // Generate global worker secret secret := []byte(security.GenerateRandomUUIDV5()) v.Add(gaia.WorkerRegisterKey, secret) if err := v.SaveSecrets(); err != nil { t.Fatal(err) } // Initialize echo e := echo.New() if err := InitHandlers(e); err != nil { t.Fatal(err) } // Test get global worker secret t.Run("global secret success", func(t *testing.T) { req := httptest.NewRequest(echo.GET, "/api/"+gaia.APIVersion+"/worker/secret", nil) req.Header.Set("Content-Type", "application/json") rec := httptest.NewRecorder() c := e.NewContext(req, rec) if err := GetWorkerRegisterSecret(c); err != nil { t.Fatal(err) } if rec.Code != http.StatusOK { t.Fatalf("expected response code %v got %v", http.StatusOK, rec.Code) } bodyBytes, err := ioutil.ReadAll(rec.Body) if err != nil { t.Fatalf("cannot read response body: %s", err.Error()) } if !bytes.Equal(bodyBytes, secret) { t.Fatalf("returned global worker secret is incorrect. Got %s want %s", string(bodyBytes[:]), string(secret[:])) } }) }
explode_data.jsonl/47212
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 649 }
[ 2830, 3393, 1949, 21936, 8690, 19773, 1155, 353, 8840, 836, 8, 341, 20082, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 2271, 1949, 21936, 8690, 19773, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 3174, 64,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCanCheckEntropy(t *testing.T) { config := getConfig() match := MatchFile{ Path: "/xyz", Filename: "jkl", Extension: ".mno", } assert.True(t, match.CanCheckEntropy(config)) match.Extension = ".abc" assert.False(t, match.CanCheckEntropy(config)) }
explode_data.jsonl/18010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 6713, 3973, 97582, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 66763, 2822, 47706, 1669, 14152, 1703, 515, 197, 69640, 25, 414, 3521, 28854, 756, 197, 12727, 4033, 25, 220, 330, 73, 10561, 756, 197, 197, 12049, 25, 5933, 76...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptions(t *testing.T) { require := require.New(t) options := &options{} require.False(options.isValid()) require.Nil(options.cpusageCollector) require.Equal(CounterTypeZero, options.highLoadLevel) require.Equal(0.0, options.loadStatusJudgeRatio) require.Equal(uint(0), options.initialIntensity) require.Equal(uint(0), options.stepIntensity) require.Equal(uint(0), options.checkPeriodInSeconds) linuxCPUsageCollector, _ := NewLinuxCPUsageCollector() require.NotNil(linuxCPUsageCollector) const defaultHighLoadLevel = CounterTypeEighty const defaultSafeLoadLevel = CounterTypeSeventy const defaultLoadStatusJudgeRatio = 0.6 const defaultInitialIntensity = 50 const defaultStepIntensity = 10 const defaultCheckPeriodInSeconds = 10 WithCPUSageCollector(linuxCPUsageCollector)(options) WithHighLoadLevel(defaultHighLoadLevel)(options) WithLoadStatusJudgeRatio(defaultLoadStatusJudgeRatio)(options) WithInitialIntensity(defaultInitialIntensity)(options) WithStepIntensity(defaultStepIntensity)(options) WithCheckPeriodInseconds(defaultCheckPeriodInSeconds)(options) require.True(options.isValid()) require.Equal(linuxCPUsageCollector, options.cpusageCollector) require.Equal(defaultHighLoadLevel, options.highLoadLevel) require.Equal(defaultLoadStatusJudgeRatio, options.loadStatusJudgeRatio) require.Equal(uint(defaultInitialIntensity), options.initialIntensity) require.Equal(uint(defaultStepIntensity), options.stepIntensity) require.Equal(uint(defaultCheckPeriodInSeconds), options.checkPeriodInSeconds) }
explode_data.jsonl/76852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 483 }
[ 2830, 3393, 3798, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 35500, 1669, 609, 2875, 16094, 17957, 50757, 12078, 32161, 2398, 17957, 59678, 12078, 66260, 17698, 53694, 340, 17957, 12808, 7, 14099, 929, 17999, 11, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFiles__Delete404(t *testing.T) { achClient, _, server := MockClientServer("fileDelete404", func(r *mux.Router) { r.Methods("DELETE").Path("/files/{fileId}").HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json; charset=utf-8") w.WriteHeader(http.StatusNotFound) w.Write([]byte("{}")) }) }) defer server.Close() // Delete File (expect no error though) if err := achClient.DeleteFile("delete"); err != nil { t.Fatal(err) } }
explode_data.jsonl/73110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 10809, 563, 6435, 19, 15, 19, 1155, 353, 8840, 836, 8, 341, 197, 610, 2959, 11, 8358, 3538, 1669, 14563, 2959, 5475, 445, 1192, 6435, 19, 15, 19, 497, 2915, 2601, 353, 75066, 31413, 8, 341, 197, 7000, 20798, 82, 445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandleAddServiceInstanceSuccess(t *testing.T) { ctx := context.Background() updater, updated := newFakeUpdateServiceInstanceFunc(nil) getServiceClassFn := refs.NewFakeServiceClassGetterFunc(&data.ServiceClass{}, nil) getServiceBrokerFn := refs.NewFakeServiceBrokerGetterFunc(&data.ServiceBroker{}, nil) provisioner := fake.NewProvisioner() lifecycler := &fake.Lifecycler{ Provisioner: provisioner, } inst := new(data.ServiceInstance) inst.Kind = data.ServiceInstanceKind evt := watch.Event{ Type: watch.Added, Object: inst, } err := handleAddServiceInstance(ctx, lifecycler, updater, getServiceClassFn, getServiceBrokerFn, evt) assert.NoErr(t, err) assert.Equal(t, len(provisioner.Reqs), 1, "number of provision requests") assert.Equal(t, len(*updated), 2, "number of updated service instances") }
explode_data.jsonl/72429
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 6999, 2212, 1860, 2523, 7188, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 59810, 27463, 11, 6049, 1669, 501, 52317, 4289, 1860, 2523, 9626, 27907, 340, 10366, 1860, 1957, 24911, 1669, 43143, 7121, 52317, 1860,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEndpointMultipleJoins(t *testing.T) { if !testutils.IsRunningInContainer() { defer testutils.SetupTestOSContext(t)() } n, err := createTestNetwork(bridgeNetType, "testmultiple", options.Generic{ netlabel.GenericData: options.Generic{ "BridgeName": "testmultiple", }, }, nil, nil) if err != nil { t.Fatal(err) } defer func() { if err := n.Delete(); err != nil { t.Fatal(err) } }() ep, err := n.CreateEndpoint("ep1") if err != nil { t.Fatal(err) } defer func() { if err := ep.Delete(); err != nil { t.Fatal(err) } }() sbx1, err := controller.NewSandbox(containerID, libnetwork.OptionHostname("test"), libnetwork.OptionDomainname("docker.io"), libnetwork.OptionExtraHost("web", "192.168.0.1")) defer func() { if err := sbx1.Delete(); err != nil { t.Fatal(err) } }() sbx2, err := controller.NewSandbox("c2") defer func() { if err := sbx2.Delete(); err != nil { t.Fatal(err) } runtime.LockOSThread() }() err = ep.Join(sbx1) runtime.LockOSThread() if err != nil { t.Fatal(err) } defer func() { err = ep.Leave(sbx1) runtime.LockOSThread() if err != nil { t.Fatal(err) } }() err = ep.Join(sbx2) if err == nil { t.Fatal("Expected to fail multiple joins for the same endpoint") } if _, ok := err.(types.ForbiddenError); !ok { t.Fatalf("Failed with unexpected error type: %T. Desc: %s", err, err.Error()) } }
explode_data.jsonl/6367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 606 }
[ 2830, 3393, 27380, 32089, 22493, 1330, 1155, 353, 8840, 836, 8, 341, 743, 753, 1944, 6031, 4506, 18990, 641, 4502, 368, 341, 197, 16867, 1273, 6031, 39820, 2271, 3126, 1972, 1155, 8, 741, 197, 630, 9038, 11, 1848, 1669, 1855, 2271, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResourceSQLGlobalConfigCreateError(t *testing.T) { _, err := qa.ResourceFixture{ Resource: ResourceSQLGlobalConfig(), Create: true, Azure: true, State: map[string]interface{}{ "security_policy": "PASSTHROUGH", "instance_profile_arn": "arn:...", "data_access_config": map[string]interface{}{ "spark.sql.session.timeZone": "UTC", }, }, }.Apply(t) qa.AssertErrorStartsWith(t, err, "can't use instance_profile_arn outside of AWS") }
explode_data.jsonl/48785
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 4783, 6688, 11646, 2648, 4021, 1454, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 88496, 20766, 18930, 515, 197, 79487, 25, 11765, 6688, 11646, 2648, 3148, 197, 75569, 25, 256, 830, 345, 197, 22985, 50203, 25, 262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDetailHandler_handle_success(t *testing.T) { db := dbtest.Open(t) defer db.Close() conn := db.Open() defer conn.Close() ctx := context.Background() handler := GetDetailHandler{DB: conn} // step 1: insert test data into database const q = ` INSERT INTO accounts_kyc_status (stellar_address, callback_id, email_address, kyc_submitted_at, approved_at, pending_at, rejected_at, created_at) VALUES ('rejected-address', 'rejected-callback-id', 'xrejected@test.com', $1::timestamptz, NULL, NULL, $1::timestamptz, $4::timestamptz), ('pending-address', 'pending-callback-id', 'ypending@test.com', $2::timestamptz, NULL, $2::timestamptz, NULL, $4::timestamptz), ('approved-address', 'approved-callback-id', 'approved@test.com', $3::timestamptz, $3::timestamptz, NULL, NULL, $4::timestamptz) ` rejectedAt := time.Now().Add(-2 * time.Hour).UTC().Truncate(time.Second) pendingAt := time.Now().Add(-1 * time.Hour).UTC().Truncate(time.Second) approvedAt := time.Now().UTC().Truncate(time.Second) createdAt := time.Now().UTC().Truncate(time.Second) _, err := handler.DB.ExecContext(ctx, q, rejectedAt.Format(time.RFC3339), pendingAt.Format(time.RFC3339), approvedAt.Format(time.RFC3339), createdAt.Format(time.RFC3339)) require.NoError(t, err) // step 2.1: retrieve "rejected" entry with stellar address in := getDetailRequest{StellarAddressOrCallbackID: "rejected-address"} kycGetResp, err := handler.handle(ctx, in) require.NoError(t, err) wantKYCGetResponse := kycGetResponse{ StellarAddress: "rejected-address", CallbackID: "rejected-callback-id", EmailAddress: "xrejected@test.com", CreatedAt: &createdAt, KYCSubmittedAt: &rejectedAt, RejectedAt: &rejectedAt, PendingAt: nil, ApprovedAt: nil, } assert.Equal(t, &wantKYCGetResponse, kycGetResp) // step 2.2: retrieve "rejected" entry with callbackID in = getDetailRequest{StellarAddressOrCallbackID: "rejected-callback-id"} kycGetResp, err = handler.handle(ctx, in) require.NoError(t, err) assert.Equal(t, &wantKYCGetResponse, kycGetResp) // step 3.1: retrieve "pending" entry with stellar address in = getDetailRequest{StellarAddressOrCallbackID: "pending-address"} kycGetResp, err = handler.handle(ctx, in) require.NoError(t, err) wantKYCGetResponse = kycGetResponse{ StellarAddress: "pending-address", CallbackID: "pending-callback-id", EmailAddress: "ypending@test.com", CreatedAt: &createdAt, KYCSubmittedAt: &pendingAt, RejectedAt: nil, PendingAt: &pendingAt, ApprovedAt: nil, } assert.Equal(t, &wantKYCGetResponse, kycGetResp) // step 3.2: retrieve "pending" entry with callbackID in = getDetailRequest{StellarAddressOrCallbackID: "pending-callback-id"} kycGetResp, err = handler.handle(ctx, in) require.NoError(t, err) assert.Equal(t, &wantKYCGetResponse, kycGetResp) // step 4.1: retrieve "approved" entry with stellar address in = getDetailRequest{StellarAddressOrCallbackID: "approved-address"} kycGetResp, err = handler.handle(ctx, in) require.NoError(t, err) wantKYCGetResponse = kycGetResponse{ StellarAddress: "approved-address", CallbackID: "approved-callback-id", EmailAddress: "approved@test.com", CreatedAt: &createdAt, KYCSubmittedAt: &approvedAt, RejectedAt: nil, PendingAt: nil, ApprovedAt: &approvedAt, } assert.Equal(t, &wantKYCGetResponse, kycGetResp) // step 4.2: retrieve "approved" entry with callbackID in = getDetailRequest{StellarAddressOrCallbackID: "approved-callback-id"} kycGetResp, err = handler.handle(ctx, in) require.NoError(t, err) assert.Equal(t, &wantKYCGetResponse, kycGetResp) }
explode_data.jsonl/7625
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1438 }
[ 2830, 3393, 1949, 10649, 3050, 10630, 18632, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 1944, 12953, 1155, 340, 16867, 2927, 10421, 741, 32917, 1669, 2927, 12953, 741, 16867, 4534, 10421, 741, 20985, 1669, 2266, 19047, 2822, 53326, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPublicOptionsTrades(t *testing.T) { t.Parallel() // test optional params result, err := f.GetPublicOptionsTrades(context.Background(), time.Time{}, time.Time{}, "") if err != nil { t.Error(err) } if len(result) != 20 { t.Error("default limit should have returned 20 items") } tmNow := time.Now() result, err = f.GetPublicOptionsTrades(context.Background(), tmNow.AddDate(0, 0, -1), tmNow, "5") if err != nil { t.Error(err) } if len(result) != 5 { t.Error("limit of 5 should return 5 items") } _, err = f.GetPublicOptionsTrades(context.Background(), time.Unix(validFTTBTCEndTime, 0), time.Unix(validFTTBTCStartTime, 0), "5") if err != errStartTimeCannotBeAfterEndTime { t.Errorf("should have thrown errStartTimeCannotBeAfterEndTime, got %v", err) } }
explode_data.jsonl/15215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 1949, 12676, 3798, 1282, 3452, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 322, 1273, 10101, 3628, 198, 9559, 11, 1848, 1669, 282, 2234, 12676, 3798, 1282, 3452, 5378, 19047, 3148, 197, 21957, 16299, 22655, 88...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestShouldCheckSHA512Password(t *testing.T) { ok, err := CheckPassword("password", "$6$rounds=50000$aFr56HjK3DrB8t3S$zhPQiS85cgBlNhUKKE6n/AHMlpqrvYSnSL3fEVkK0yHFQ.oFFAd8D4OhPAy18K5U61Z2eBhxQXExGU/eknXlY1") assert.NoError(t, err) assert.True(t, ok) }
explode_data.jsonl/40185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 14996, 3973, 33145, 20, 16, 17, 4876, 1155, 353, 8840, 836, 8, 341, 59268, 11, 1848, 1669, 4248, 4876, 445, 3833, 497, 5201, 21, 3, 1049, 82, 28, 20, 15, 15, 15, 15, 39562, 22560, 20, 21, 39, 73, 42, 18, 8847, 33, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBinanceCurrencyPairToBinancePair(t *testing.T) { cp := types.CurrencyPair{Base: "ATOM", Quote: "USDT"} binanceSymbol := currencyPairToBinanceTickerPair(cp) require.Equal(t, binanceSymbol, "atomusdt@ticker") }
explode_data.jsonl/24565
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 33, 24387, 26321, 12443, 1249, 33, 24387, 12443, 1155, 353, 8840, 836, 8, 341, 52018, 1669, 4494, 77186, 12443, 90, 3978, 25, 330, 77932, 497, 24535, 25, 330, 2034, 10599, 16707, 2233, 24387, 15090, 1669, 11413, 12443, 1249, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSetValueAtPathSuccess(t *testing.T) { var s Struct awsutil.SetValueAtPath(&s, "C", "test1") awsutil.SetValueAtPath(&s, "B.B.C", "test2") awsutil.SetValueAtPath(&s, "B.D.C", "test3") assert.Equal(t, "test1", s.C) assert.Equal(t, "test2", s.B.B.C) assert.Equal(t, "test3", s.B.D.C) awsutil.SetValueAtPath(&s, "B.*.C", "test0") assert.Equal(t, "test0", s.B.B.C) assert.Equal(t, "test0", s.B.D.C) var s2 Struct awsutil.SetValueAtPath(&s2, "b.b.c", "test0") assert.Equal(t, "test0", s2.B.B.C) awsutil.SetValueAtPath(&s2, "A", []Struct{{}}) assert.Equal(t, []Struct{{}}, s2.A) str := "foo" s3 := Struct{} awsutil.SetValueAtPath(&s3, "b.b.c", str) assert.Equal(t, "foo", s3.B.B.C) s3 = Struct{B: &Struct{B: &Struct{C: str}}} awsutil.SetValueAtPath(&s3, "b.b.c", nil) assert.Equal(t, "", s3.B.B.C) s3 = Struct{} awsutil.SetValueAtPath(&s3, "b.b.c", nil) assert.Equal(t, "", s3.B.B.C) s3 = Struct{} awsutil.SetValueAtPath(&s3, "b.b.c", &str) assert.Equal(t, "foo", s3.B.B.C) var s4 struct{ Name *string } awsutil.SetValueAtPath(&s4, "Name", str) assert.Equal(t, str, *s4.Name) s4 = struct{ Name *string }{} awsutil.SetValueAtPath(&s4, "Name", nil) assert.Equal(t, (*string)(nil), s4.Name) s4 = struct{ Name *string }{Name: &str} awsutil.SetValueAtPath(&s4, "Name", nil) assert.Equal(t, (*string)(nil), s4.Name) s4 = struct{ Name *string }{} awsutil.SetValueAtPath(&s4, "Name", &str) assert.Equal(t, str, *s4.Name) }
explode_data.jsonl/26252
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 724 }
[ 2830, 3393, 48068, 86422, 7188, 1155, 353, 8840, 836, 8, 341, 2405, 274, 16139, 198, 197, 8635, 1314, 42726, 86422, 2099, 82, 11, 330, 34, 497, 330, 1944, 16, 1138, 197, 8635, 1314, 42726, 86422, 2099, 82, 11, 330, 33, 1785, 727, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertibleTo(t *testing.T) { for _, test := range []struct { v, t Type want bool }{ {Typ[Int], Typ[Int], true}, {Typ[Int], Typ[Float32], true}, {Typ[Int], Typ[String], true}, {newDefined(Typ[Int]), Typ[Int], true}, {newDefined(new(Struct)), new(Struct), true}, {newDefined(Typ[Int]), new(Struct), false}, {Typ[UntypedInt], Typ[Int], true}, {NewSlice(Typ[Int]), NewPointer(NewArray(Typ[Int], 10)), true}, {NewSlice(Typ[Int]), NewArray(Typ[Int], 10), false}, {NewSlice(Typ[Int]), NewPointer(NewArray(Typ[Uint], 10)), false}, // Untyped string values are not permitted by the spec, so the behavior below is undefined. {Typ[UntypedString], Typ[String], true}, } { if got := ConvertibleTo(test.v, test.t); got != test.want { t.Errorf("ConvertibleTo(%v, %v) = %t, want %t", test.v, test.t, got, test.want) } } }
explode_data.jsonl/29390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 88816, 1249, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 5195, 11, 259, 3990, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 90, 12834, 36261, 1125, 17518, 36261, 1125, 830, 1583, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3