text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestStartCommandLineWithRightArgumentLength(t *testing.T) { os.Args = []string{ "bellt", "generate", "--t=123", "--f=456", } err := StartCommandLine(emptyOnlyOneCommand) if err != nil { t.Errorf("Error handling error: want %s, got %s", "nil", err.Error()) } }
explode_data.jsonl/30298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 3479, 71885, 2354, 5979, 9171, 4373, 1155, 353, 8840, 836, 8, 341, 25078, 51015, 284, 3056, 917, 515, 197, 197, 1, 17250, 83, 756, 197, 197, 1, 19366, 756, 197, 197, 74757, 83, 28, 16, 17, 18, 756, 197, 197, 74757, 69,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTx_CreateBucketIfNotExists(t *testing.T) { db := MustOpenDB() defer db.MustClose() if err := db.Update(func(tx *bolt.Tx) error { // Create bucket. if b, err := tx.CreateBucketIfNotExists([]byte("widgets")); err != nil { t.Fatal(err) } else if b == nil { t.Fatal("expected bucket") } // Create bucket again. if b, err := tx.CreateBucketIfNotExists([]byte("widgets")); err != nil { t.Fatal(err) } else if b == nil { t.Fatal("expected bucket") } return nil }); err != nil { t.Fatal(err) } // Read the bucket through a separate transaction. if err := db.View(func(tx *bolt.Tx) error { if tx.Bucket([]byte("widgets")) == nil { t.Fatal("expected bucket") } return nil }); err != nil { t.Fatal(err) } }
explode_data.jsonl/1690
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 318 }
[ 2830, 3393, 31584, 34325, 36018, 2679, 2623, 15575, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 15465, 5002, 3506, 741, 16867, 2927, 50463, 7925, 741, 743, 1848, 1669, 2927, 16689, 18552, 27301, 353, 52433, 81362, 8, 1465, 341, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_templateClient_GetFromConfigMap(t *testing.T) { g := NewWithT(t) configClient, err := config.New("", config.InjectReader(test.NewFakeReader())) g.Expect(err).NotTo(HaveOccurred()) configMap := &corev1.ConfigMap{ TypeMeta: metav1.TypeMeta{ Kind: "ConfigMap", APIVersion: "v1", }, ObjectMeta: metav1.ObjectMeta{ Namespace: "ns1", Name: "my-template", }, Data: map[string]string{ "prod": template, }, } type fields struct { proxy Proxy configClient config.Client } type args struct { configMapNamespace string configMapName string configMapDataKey string targetNamespace string skipTemplateProcess bool } tests := []struct { name string fields fields args args want string wantErr bool }{ { name: "Return template", fields: fields{ proxy: test.NewFakeProxy().WithObjs(configMap), configClient: configClient, }, args: args{ configMapNamespace: "ns1", configMapName: "my-template", configMapDataKey: "prod", targetNamespace: "", skipTemplateProcess: false, }, want: template, wantErr: false, }, { name: "Config map does not exists", fields: fields{ proxy: test.NewFakeProxy().WithObjs(configMap), configClient: configClient, }, args: args{ configMapNamespace: "ns1", configMapName: "something-else", configMapDataKey: "prod", targetNamespace: "", skipTemplateProcess: false, }, want: "", wantErr: true, }, { name: "Config map key does not exists", fields: fields{ proxy: test.NewFakeProxy().WithObjs(configMap), configClient: configClient, }, args: args{ configMapNamespace: "ns1", configMapName: "my-template", configMapDataKey: "something-else", targetNamespace: "", skipTemplateProcess: false, }, want: "", wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { g := NewWithT(t) processor := yaml.NewSimpleProcessor() tc := newTemplateClient(TemplateClientInput{tt.fields.proxy, tt.fields.configClient, processor}) got, err := tc.GetFromConfigMap(tt.args.configMapNamespace, tt.args.configMapName, tt.args.configMapDataKey, tt.args.targetNamespace, tt.args.skipTemplateProcess) if tt.wantErr { g.Expect(err).To(HaveOccurred()) return } g.Expect(err).NotTo(HaveOccurred()) wantTemplate, err := repository.NewTemplate(repository.TemplateInput{ RawArtifact: []byte(tt.want), ConfigVariablesClient: configClient.Variables(), Processor: processor, TargetNamespace: tt.args.targetNamespace, SkipTemplateProcess: tt.args.skipTemplateProcess, }) g.Expect(err).NotTo(HaveOccurred()) g.Expect(got).To(Equal(wantTemplate)) }) } }
explode_data.jsonl/68531
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1276 }
[ 2830, 3393, 8693, 2959, 13614, 3830, 2648, 2227, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 25873, 2959, 11, 1848, 1669, 2193, 7121, 19814, 2193, 41046, 5062, 8623, 7121, 52317, 5062, 12145, 3174, 81893, 3964, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDelBlock(t *testing.T) { q, mem := initEnv(0) defer q.Close() defer mem.Close() delBlock := blk var blockDetail = &types.BlockDetail{Block: delBlock} mem.setHeader(&types.Header{Height: 2, BlockTime: 1e9 + 1}) msg1 := mem.client.NewMessage("mempool", types.EventDelBlock, blockDetail) mem.client.Send(msg1, true) msg2 := mem.client.NewMessage("mempool", types.EventGetMempoolSize, nil) mem.client.Send(msg2, true) reply, err := mem.client.Wait(msg2) if err != nil { t.Error(err) return } size := reply.GetData().(*types.MempoolSize).Size if size != 2 { t.Error("TestDelBlock failed") } }
explode_data.jsonl/16836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 16532, 4713, 1155, 353, 8840, 836, 8, 341, 18534, 11, 1833, 1669, 2930, 14359, 7, 15, 340, 16867, 2804, 10421, 741, 16867, 1833, 10421, 741, 69509, 4713, 1669, 40643, 198, 2405, 2504, 10649, 284, 609, 9242, 28477, 10649, 90,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCheckBranchName(t *testing.T) { ghp, repoPath, err := getTestGitObjectPusher() if repoPath != "" { defer os.RemoveAll(repoPath) } require.Nil(t, err) sampleBaranches := []struct { branchName string valid bool }{ {"release-1.20", true}, // Valid name {"release-chorizo", false}, // Invalid, not a semver {"1.20", false}, // Invalid, has to start with release } for _, testCase := range sampleBaranches { if testCase.valid { require.Nil(t, ghp.checkBranchName(testCase.branchName)) } else { require.NotNil(t, ghp.checkBranchName(testCase.branchName)) } } }
explode_data.jsonl/59772
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 3973, 18197, 675, 1155, 353, 8840, 836, 8, 341, 197, 866, 79, 11, 15867, 1820, 11, 1848, 1669, 633, 2271, 46562, 1190, 16644, 261, 741, 743, 15867, 1820, 961, 1591, 341, 197, 16867, 2643, 84427, 50608, 1820, 340, 197, 532,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLogsPathMatcher_ValidVarLogPod(t *testing.T) { cfgLogsPath := "/var/log/pods/" cfgResourceType := "pod" sourcePath := "/var/log/pods/namespace_pod-name_%s/container/0.log" if runtime.GOOS == "windows" { cfgLogsPath = "C:\\var\\log\\pods\\" sourcePath = "C:\\var\\log\\pods\\namespace_pod-name_%s\\container\\0.log" } source := fmt.Sprintf(sourcePath, puid) expectedResult := puid executeTestWithResourceType(t, cfgLogsPath, cfgResourceType, source, expectedResult) }
explode_data.jsonl/34426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 51053, 1820, 37554, 97279, 3962, 2201, 23527, 1155, 353, 8840, 836, 8, 341, 50286, 51053, 1820, 1669, 3521, 947, 19413, 4322, 29697, 29555, 50286, 4783, 929, 1669, 330, 39073, 698, 47418, 1820, 1669, 3521, 947, 19413, 4322, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIssue20647(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) ctx, cancel := context.WithCancel(context.Background()) defer cancel() conn, err := db.Conn(ctx) if err != nil { t.Fatal(err) } conn.dc.ci.(*fakeConn).skipDirtySession = true defer conn.Close() stmt, err := conn.PrepareContext(ctx, "SELECT|people|name|") if err != nil { t.Fatal(err) } defer stmt.Close() rows1, err := stmt.QueryContext(ctx) if err != nil { t.Fatal("rows1", err) } defer rows1.Close() rows2, err := stmt.QueryContext(ctx) if err != nil { t.Fatal("rows2", err) } defer rows2.Close() if rows1.dc != rows2.dc { t.Fatal("stmt prepared on Conn does not use same connection") } }
explode_data.jsonl/16026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 42006, 17, 15, 21, 19, 22, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 692, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestJudge(t *testing.T) { genA := newGen(factorGenA, testInputGenA, 1) genB := newGen(factorGenB, testInputGenB, 1) assert.Equal(t, 1, judge(genA, genB, 5)) genA = newGen(factorGenA, testInputGenA, 1) genB = newGen(factorGenB, testInputGenB, 1) assert.Equal(t, 588, judge(genA, genB, 40000000)) genA = newGen(factorGenA, testInputGenA, multiplesGenA) genB = newGen(factorGenB, testInputGenB, multiplesGenB) assert.Equal(t, 1, judge(genA, genB, 1056)) genA = newGen(factorGenA, testInputGenA, multiplesGenA) genB = newGen(factorGenB, testInputGenB, multiplesGenB) assert.Equal(t, 309, judge(genA, genB, 5000000)) }
explode_data.jsonl/56729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 60256, 1155, 353, 8840, 836, 8, 341, 82281, 32, 1669, 501, 9967, 95299, 9967, 32, 11, 1273, 2505, 9967, 32, 11, 220, 16, 340, 82281, 33, 1669, 501, 9967, 95299, 9967, 33, 11, 1273, 2505, 9967, 33, 11, 220, 16, 340, 694...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHTTPTransaction(t *testing.T) { tracer, apmtracer, recorder := newTestTracer() defer apmtracer.Close() span := tracer.StartSpan("name") ext.HTTPUrl.Set(span, "/foo?bar=baz") ext.HTTPMethod.Set(span, "POST") ext.HTTPStatusCode.Set(span, 404) span.Finish() apmtracer.Flush(nil) payloads := recorder.Payloads() require.Len(t, payloads.Transactions, 1) transaction := payloads.Transactions[0] assert.Equal(t, "request", transaction.Type) assert.Equal(t, "HTTP 4xx", transaction.Result) assert.Equal(t, &model.Request{ Method: "POST", HTTPVersion: "1.1", URL: model.URL{ Protocol: "http", Path: "/foo", Search: "bar=baz", }, }, transaction.Context.Request) }
explode_data.jsonl/28590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 9230, 8070, 1155, 353, 8840, 836, 8, 341, 25583, 9584, 11, 1443, 76, 94941, 11, 48835, 1669, 501, 2271, 1282, 9584, 741, 16867, 1443, 76, 94941, 10421, 2822, 197, 1480, 1669, 64306, 12101, 12485, 445, 606, 1138, 95450, 27358...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReverseBytesTo(t *testing.T) { assert := assert.New(t) table := []string{ "carbon.agents.carbon-clickhouse.graphite1.tcp.metricsReceived", "", ".", "carbon..xx", ".hello..world.", } for i := 0; i < len(table); i++ { x := []byte(table[i]) y := make([]byte, len(table[i])) z := reverseBytesOriginal(x) ReverseBytesTo(y, x) assert.Equal(string(z), string(y)) } }
explode_data.jsonl/32219
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 45695, 7078, 1249, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 26481, 1669, 3056, 917, 515, 197, 197, 1, 73341, 28092, 805, 26890, 5970, 28118, 7675, 10763, 632, 16, 85253, 35359, 23260, 756, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLessThan256Range(t *testing.T) { bufSize := 5 readerSize := 10 buf := make([]byte, bufSize) r := NewFakeReader(0, readerSize) n, err := r.Read(buf) if err != nil { t.Errorf("[TestLessThan256Range] Read(buf) should not return an error, got %s", err) } if n != bufSize { t.Errorf("[TestLessThan256Range] expected n:%d, got %d", bufSize, n) } // Check if all returned bytes match what is expected (a range from 0 to 4) ctr := byte(0) for _, v := range buf { if v != ctr { t.Errorf("[TestLessThan256Range] returned bytes expected to be %d at position %d, got %d", ctr, ctr, v) } ctr++ } // Check that we get io.EOF if we read more _, err = r.Read(buf) // Read the remaining 5 bytes if err != nil { t.Errorf("[TestLessThan256Range] Read(buf) should not return an error, got %s", err) } n, err = r.Read(buf) // Read one too many if err == nil { t.Errorf("[TestLessThan256Range] Read(buf) should return an io.EOF error, got nil and n:%d", n) } }
explode_data.jsonl/48667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 27451, 26067, 17, 20, 21, 6046, 1155, 353, 8840, 836, 8, 341, 26398, 1695, 1669, 220, 20, 198, 61477, 1695, 1669, 220, 16, 15, 271, 26398, 1669, 1281, 10556, 3782, 11, 6607, 1695, 340, 7000, 1669, 1532, 52317, 5062, 7, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetJobRunInfos_ReturnStartTimeForCurrentAssociatedCluster(t *testing.T) { withRepository(func(r *RedisJobRepository) { leasedJob1 := addLeasedJob(t, r, "queue1", "cluster1") startTime := time.Now() plusOneHour := startTime.Add(time.Hour) jobErrors, err := r.UpdateStartTime([]*JobStartInfo{{ JobId: leasedJob1.Id, ClusterId: "cluster2", StartTime: startTime, }}) AssertUpdateStartTimeNoErrors(t, jobErrors, err) jobErrors, err = r.UpdateStartTime([]*JobStartInfo{{ JobId: leasedJob1.Id, ClusterId: "cluster1", StartTime: plusOneHour, }}) AssertUpdateStartTimeNoErrors(t, jobErrors, err) runInfos, err := r.GetJobRunInfos([]string{leasedJob1.Id}) assert.NoError(t, err) assert.Len(t, runInfos, 1) assert.Equal(t, plusOneHour.UTC(), runInfos[leasedJob1.Id].StartTime.UTC()) }) }
explode_data.jsonl/32061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 1949, 12245, 6727, 38059, 53316, 40203, 2461, 5405, 52969, 28678, 1155, 353, 8840, 836, 8, 341, 46948, 4624, 18552, 2601, 353, 48137, 12245, 4624, 8, 341, 197, 197, 4673, 12245, 16, 1669, 912, 2304, 1475, 12245, 1155, 11, 43...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_mtpAuthorization_deserialize(t *testing.T) { testData := []uint8{ 0x00, 0x00, 0x00, 0x4b, // dbiMtpAuthorization 0x00, 0x00, 0x05, 0x30, // mainLength 0xff, 0xff, 0xff, 0xff, // legacyUserId 0xff, 0xff, 0xff, 0xff, // legacyMainDcId 0x00, 0x00, 0x00, 0x00, 0x12, 0x73, 0xab, 0x45, // UserID = 309570373 0x00, 0x00, 0x00, 0x02, // DC = 2 0x00, 0x00, 0x00, 0x05, // 5 keys. } maxCut := len(testData) + 16 for i := byte(0); i < 5; i++ { testData = append(testData, 0, 0, 0, i) // DC ID as BigEndian uint32 key := bytes.Repeat([]byte{i}, 256) testData = append(testData, key...) } t.Run("OK", func(t *testing.T) { a := require.New(t) var m MTPAuthorization a.NoError(m.deserialize(&qtReader{buf: bin.Buffer{Buf: testData}})) a.Equal(uint64(309570373), m.UserID) a.Equal(2, m.MainDC) a.Len(m.Keys, 5) for i := 0; i < 5; i++ { a.Equal(m.Keys[i][0], uint8(i)) } }) t.Run("WrongID", func(t *testing.T) { a := require.New(t) var m MTPAuthorization a.Error(m.deserialize(&qtReader{buf: bin.Buffer{Buf: make([]byte, 4)}})) }) for i := 0; i < maxCut; i += 4 { t.Run(fmt.Sprintf("EOFAfter%d", i), func(t *testing.T) { a := require.New(t) var m MTPAuthorization a.Error(m.deserialize(&qtReader{buf: bin.Buffer{Buf: testData[:i]}})) }) } }
explode_data.jsonl/4113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 644 }
[ 2830, 3393, 717, 790, 18124, 15768, 9050, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 2496, 23, 515, 197, 197, 15, 87, 15, 15, 11, 220, 15, 87, 15, 15, 11, 220, 15, 87, 15, 15, 11, 220, 15, 87, 19, 65, 11, 442, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRegisterCustomerHandler(t *testing.T) { hf := New("/test") hf.RegisterHandler("POST", "/verify", static.New("verify")) r, _ := http.NewRequest("POST", "http://www.example.com/test/verify", nil) h := hf.FindHandler(r) err := AssertThat(h, NotNilValue()) if err != nil { t.Fatal(err) } resp := mock.NewHttpResponseWriterMock() h.ServeHTTP(resp, r) err = AssertThat(resp.String(), Is("verify")) if err != nil { t.Fatal(err) } }
explode_data.jsonl/10019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 8690, 12792, 3050, 1155, 353, 8840, 836, 8, 341, 9598, 69, 1669, 1532, 4283, 1944, 1138, 9598, 69, 19983, 3050, 445, 2946, 497, 3521, 12446, 497, 1099, 7121, 445, 12446, 5455, 7000, 11, 716, 1669, 1758, 75274, 445, 2946, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetChannelByName(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client channel, resp := Client.GetChannelByName(th.BasicChannel.Name, th.BasicTeam.Id, "") CheckNoError(t, resp) if channel.Name != th.BasicChannel.Name { t.Fatal("names did not match") } channel, resp = Client.GetChannelByName(th.BasicPrivateChannel.Name, th.BasicTeam.Id, "") CheckNoError(t, resp) if channel.Name != th.BasicPrivateChannel.Name { t.Fatal("names did not match") } Client.RemoveUserFromChannel(th.BasicChannel.Id, th.BasicUser.Id) _, resp = Client.GetChannelByName(th.BasicChannel.Name, th.BasicTeam.Id, "") CheckNoError(t, resp) Client.RemoveUserFromChannel(th.BasicPrivateChannel.Id, th.BasicUser.Id) _, resp = Client.GetChannelByName(th.BasicPrivateChannel.Name, th.BasicTeam.Id, "") CheckForbiddenStatus(t, resp) _, resp = Client.GetChannelByName(GenerateTestChannelName(), th.BasicTeam.Id, "") CheckNotFoundStatus(t, resp) _, resp = Client.GetChannelByName(GenerateTestChannelName(), "junk", "") CheckBadRequestStatus(t, resp) Client.Logout() _, resp = Client.GetChannelByName(th.BasicChannel.Name, th.BasicTeam.Id, "") CheckUnauthorizedStatus(t, resp) user := th.CreateUser() Client.Login(user.Email, user.Password) _, resp = Client.GetChannelByName(th.BasicChannel.Name, th.BasicTeam.Id, "") CheckForbiddenStatus(t, resp) _, resp = th.SystemAdminClient.GetChannelByName(th.BasicChannel.Name, th.BasicTeam.Id, "") CheckNoError(t, resp) }
explode_data.jsonl/65655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 543 }
[ 2830, 3393, 1949, 9629, 16898, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 71550, 11, 9039, 1669, 8423, 2234, 9629, 168...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSet_Pops(t *testing.T) { gtest.C(t, func(t *gtest.T) { s := gset.New(true) s.Add(1, 2, 3, 4) t.Assert(s.Size(), 4) t.Assert(s.Pops(0), nil) t.AssertIN(s.Pops(1), []int{1, 2, 3, 4}) t.Assert(s.Size(), 3) a := s.Pops(6) t.Assert(len(a), 3) t.AssertIN(a, []int{1, 2, 3, 4}) t.Assert(s.Size(), 0) }) gtest.C(t, func(t *gtest.T) { s := gset.New(true) a := []interface{}{1, 2, 3, 4} s.Add(a...) t.Assert(s.Size(), 4) t.Assert(s.Pops(-2), nil) t.AssertIN(s.Pops(-1), a) }) }
explode_data.jsonl/34395
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 1649, 1088, 3721, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 1903, 1669, 342, 746, 7121, 3715, 340, 197, 1903, 1904, 7, 16, 11, 220, 17, 11, 220, 18, 11, 220, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPublishedAuthMiddleware(t *testing.T) { tests := []struct { Name string PublishedName string BasicAuthUsername string BasicAuthPassword string Error error }{ { Name: "empty name", }, { Name: "not found", PublishedName: "aaa", }, { Name: "no auth", PublishedName: "inactive", }, { Name: "auth", PublishedName: "active", Error: echo.ErrUnauthorized, }, { Name: "auth with invalid credentials", PublishedName: "active", BasicAuthUsername: "aaa", BasicAuthPassword: "bbb", Error: echo.ErrUnauthorized, }, { Name: "auth with valid credentials", PublishedName: "active", BasicAuthUsername: "fooo", BasicAuthPassword: "baar", }, } for _, tc := range tests { tc := tc t.Run(tc.Name, func(t *testing.T) { t.Parallel() assert := assert.New(t) req := httptest.NewRequest(http.MethodGet, "/", nil) if tc.BasicAuthUsername != "" { req.Header.Set(echo.HeaderAuthorization, "basic "+base64.StdEncoding.EncodeToString([]byte(tc.BasicAuthUsername+":"+tc.BasicAuthPassword))) } res := httptest.NewRecorder() e := echo.New() c := e.NewContext(req, res) c.SetParamNames("name") c.SetParamValues(tc.PublishedName) m := mockPublishedUsecaseMiddleware(false) err := m(PublishedAuthMiddleware()(func(c echo.Context) error { return c.String(http.StatusOK, "test") }))(c) if tc.Error == nil { assert.NoError(err) assert.Equal(http.StatusOK, res.Code) assert.Equal("test", res.Body.String()) } else { assert.ErrorIs(err, tc.Error) } }) } }
explode_data.jsonl/36777
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 787 }
[ 2830, 3393, 28886, 5087, 24684, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21297, 1060, 914, 198, 197, 10025, 11669, 675, 257, 914, 198, 197, 12791, 5971, 5087, 11115, 914, 198, 197, 12791, 5971, 5087, 4876, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDB_Inc(t *testing.T) { defer leaktest.AfterTest(t)() s, db := setup(t) defer s.Stopper().Stop() ctx := context.TODO() if _, err := db.Inc(ctx, "aa", 100); err != nil { t.Fatal(err) } result, err := db.Get(ctx, "aa") if err != nil { t.Fatal(err) } checkIntResult(t, 100, result.ValueInt()) }
explode_data.jsonl/20019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 3506, 25972, 66, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 1903, 11, 2927, 1669, 6505, 1155, 340, 16867, 274, 7758, 18487, 1005, 10674, 741, 20985, 1669, 2266, 90988, 2822, 743, 8358, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFSRedirect(t *testing.T) { defer afterTest(t) ts := httptest.NewServer(StripPrefix("/test", FileServer(Dir(".")))) defer ts.Close() for _, data := range fsRedirectTestData { res, err := Get(ts.URL + data.original) if err != nil { t.Fatal(err) } res.Body.Close() if g, e := res.Request.URL.Path, data.redirect; g != e { t.Errorf("redirect from %s: got %s, want %s", data.original, g, e) } } }
explode_data.jsonl/48225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 8485, 17725, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 57441, 1669, 54320, 70334, 7121, 5475, 7, 5901, 14335, 4283, 1944, 497, 2887, 5475, 5432, 404, 73133, 22788, 16867, 10591, 10421, 2822, 2023, 8358, 821, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRangeDeleteParams_WithID(t *testing.T) { p := NewRangeDeleteParams() p = p.WithID("test-id") require.NotNil(t, p.ID) assert.Equal(t, "test-id", p.ID) }
explode_data.jsonl/38294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 6046, 6435, 4870, 62, 2354, 915, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 6046, 6435, 4870, 741, 3223, 284, 281, 26124, 915, 445, 1944, 12897, 1138, 17957, 93882, 1155, 11, 281, 9910, 340, 6948, 12808, 1155, 11, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSampleDeliveryOrder(t *testing.T) { ts := 10 n := config.DefaultQueueConfig.MaxSamplesPerSend * ts samples := make([]tsdb.RefSample, 0, n) series := make([]tsdb.RefSeries, 0, n) for i := 0; i < n; i++ { name := fmt.Sprintf("test_metric_%d", i%ts) samples = append(samples, tsdb.RefSample{ Ref: uint64(i), T: int64(i), V: float64(i), }) series = append(series, tsdb.RefSeries{ Ref: uint64(i), Labels: tsdbLabels.Labels{tsdbLabels.Label{Name: "__name__", Value: name}}, }) } c := NewTestStorageClient() c.expectSamples(samples, series) dir, err := ioutil.TempDir("", "TestSampleDeliveryOrder") testutil.Ok(t, err) defer os.RemoveAll(dir) m := NewQueueManager(nil, dir, newEWMARate(ewmaWeight, shardUpdateDuration), config.DefaultQueueConfig, nil, nil, c, defaultFlushDeadline) m.StoreSeries(series, 0) m.Start() defer m.Stop() // These should be received by the client. m.Append(samples) c.waitForExpectedSamples(t) }
explode_data.jsonl/1307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 404 }
[ 2830, 3393, 17571, 38121, 4431, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 220, 16, 15, 198, 9038, 1669, 2193, 13275, 7554, 2648, 14535, 39571, 3889, 11505, 353, 10591, 198, 1903, 4023, 1669, 1281, 10556, 2576, 1999, 18369, 17571, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStartServer(t *testing.T) { certDir := test.PathToDir(t, "server") tlsCert := filepath.Join(certDir, "tls.crt") tlsKey := filepath.Join(certDir, "tls.key") policyDir := test.PathToDir(t, "store") tempDir := t.TempDir() check := func(s *testutil.ServerInfo) (bool, error) { ctx, cancelFunc := context.WithTimeout(context.Background(), 1*time.Second) defer cancelFunc() return s.IsReady(ctx) } testCases := []struct { name string opt testutil.ServerOpt }{ {name: "None"}, {name: "TLS", opt: testutil.WithTLSCertAndKey(tlsCert, tlsKey)}, {name: "Policy Dir", opt: testutil.WithPolicyRepositoryDirectory(policyDir)}, {name: "UDS gRPC", opt: testutil.WithGRPCListenAddr(fmt.Sprintf("unix:%s", filepath.Join(tempDir, "grpc.sock")))}, {name: "UDS HTTP", opt: testutil.WithHTTPListenAddr(fmt.Sprintf("unix:%s", filepath.Join(tempDir, "http.sock")))}, {name: "Admin API", opt: testutil.WithAdminAPI("test", "test")}, {name: "Config Reader", opt: testutil.WithConfig(strings.NewReader(configYAML))}, } for _, tc := range testCases { tc := tc t.Run(tc.name, func(t *testing.T) { s, err := testutil.StartCerbosServer(tc.opt) require.NoError(t, err) defer s.Stop() //nolint:errcheck var ready bool for i := 0; i < 5; i++ { ready, err = check(s) if ready { break } if i < 4 { sleepTime := time.Duration(100*(i+1)) * time.Millisecond time.Sleep(sleepTime) } } require.NoError(t, err) require.True(t, ready) }) } }
explode_data.jsonl/72962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 660 }
[ 2830, 3393, 3479, 5475, 1155, 353, 8840, 836, 8, 341, 1444, 529, 6184, 1669, 1273, 17474, 1249, 6184, 1155, 11, 330, 4030, 1138, 3244, 4730, 36934, 1669, 26054, 22363, 87793, 6184, 11, 330, 34488, 93869, 1138, 3244, 4730, 1592, 1669, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortConditions(t *testing.T) { for _, p := range permutations { permuted := make([]apis.Condition, len(someConditions)) for i, j := range p { permuted[i] = someConditions[j] } sorted := sortConditions(permuted) assert.DeepEqual(t, sorted, someConditions) } }
explode_data.jsonl/36446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 10231, 35435, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 281, 1669, 2088, 71949, 341, 197, 197, 19913, 2774, 1669, 1281, 10556, 13725, 75134, 11, 2422, 1141, 635, 35435, 1171, 197, 2023, 600, 11, 502, 1669, 2088, 281, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_UpdateManager_UpdateStatus(t *testing.T) { model := NewMemoryModel() manager := newUpdateManager(model) sa := model.StopAreas().New() sa.Name = "Parent" sa.Save() sa2 := model.StopAreas().New() sa2.Name = "Son" sa2.ParentId = sa.id sa2.Save() sa3 := model.StopAreas().New() sa3.Name = "Grandson" sa3.ParentId = sa2.id sa3.Save() event := NewStatusUpdateEvent(sa3.Id(), "test_origin", true) manager.Update(event) stopArea, _ := model.StopAreas().Find(sa.Id()) if status, ok := stopArea.Origins.Origin("test_origin"); !ok || !status { t.Errorf("Parent StopArea status should have been updated, got found origin: %v and status: %v", ok, status) } stopArea2, _ := model.StopAreas().Find(sa2.Id()) if status, ok := stopArea2.Origins.Origin("test_origin"); !ok || !status { t.Errorf("StopArea status should have been updated, got found origin: %v and status: %v", ok, status) } stopArea3, _ := model.StopAreas().Find(sa3.Id()) if status, ok := stopArea3.Origins.Origin("test_origin"); !ok || !status { t.Errorf("StopArea status should have been updated, got found origin: %v and status: %v", ok, status) } }
explode_data.jsonl/49837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 420 }
[ 2830, 3393, 47393, 2043, 47393, 2522, 1155, 353, 8840, 836, 8, 341, 19727, 1669, 1532, 10642, 1712, 741, 92272, 1669, 501, 4289, 2043, 7635, 692, 1903, 64, 1669, 1614, 30213, 71237, 1005, 3564, 741, 1903, 64, 2967, 284, 330, 8387, 698, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestStringMap(t *testing.T) { origRawMap := map[string]string{"k0": "v0", "k1": "v1", "k2": "v2"} origMap := NewStringMap().InitFromMap(origRawMap) sm := NewStringMap().InitFromMap(origRawMap) assert.EqualValues(t, 3, sm.Len()) val, exist := sm.Get("k2") assert.True(t, exist) assert.EqualValues(t, "v2", val.Value()) val, exist = sm.Get("k3") assert.False(t, exist) assert.EqualValues(t, StringValue{nil}, val) sm.Insert("k1", "v1") assert.EqualValues(t, origMap.Sort(), sm.Sort()) sm.Insert("k3", "v3") assert.EqualValues(t, 4, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k0": "v0", "k1": "v1", "k2": "v2", "k3": "v3"}).Sort(), sm.Sort()) assert.True(t, sm.Delete("k3")) assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, origMap.Sort(), sm.Sort()) sm.Update("k3", "v3") assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, origMap.Sort(), sm.Sort()) sm.Update("k2", "v3") assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k0": "v0", "k1": "v1", "k2": "v3"}).Sort(), sm.Sort()) sm.Update("k2", "v2") assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, origMap.Sort(), sm.Sort()) sm.Upsert("k3", "v3") assert.EqualValues(t, 4, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k0": "v0", "k1": "v1", "k2": "v2", "k3": "v3"}).Sort(), sm.Sort()) sm.Upsert("k1", "v5") assert.EqualValues(t, 4, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k0": "v0", "k1": "v5", "k2": "v2", "k3": "v3"}).Sort(), sm.Sort()) sm.Upsert("k1", "v1") assert.EqualValues(t, 4, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k0": "v0", "k1": "v1", "k2": "v2", "k3": "v3"}).Sort(), sm.Sort()) assert.True(t, sm.Delete("k3")) assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, origMap.Sort(), sm.Sort()) assert.EqualValues(t, false, sm.Delete("k3")) assert.EqualValues(t, 3, sm.Len()) assert.EqualValues(t, origMap.Sort(), sm.Sort()) assert.True(t, sm.Delete("k0")) assert.EqualValues(t, 2, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k1": "v1", "k2": "v2"}).Sort(), sm.Sort()) assert.True(t, sm.Delete("k2")) assert.EqualValues(t, 1, sm.Len()) assert.EqualValues(t, NewStringMap().InitFromMap(map[string]string{"k1": "v1"}).Sort(), sm.Sort()) assert.True(t, sm.Delete("k1")) assert.EqualValues(t, 0, sm.Len()) }
explode_data.jsonl/38207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1091 }
[ 2830, 3393, 703, 2227, 1155, 353, 8840, 836, 8, 341, 197, 4670, 20015, 2227, 1669, 2415, 14032, 30953, 4913, 74, 15, 788, 330, 85, 15, 497, 330, 74, 16, 788, 330, 85, 16, 497, 330, 74, 17, 788, 330, 85, 17, 16707, 197, 4670, 222...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComputePullFromWhere(t *testing.T) { Convey("Given a defaultConsumer", t, func() { dc := &defaultConsumer{ model: Clustering, cType: _PushConsume, } ctrl := gomock.NewController(t) defer ctrl.Finish() offsetStore := NewMockOffsetStore(ctrl) dc.storage = offsetStore mq := &primitive.MessageQueue{ Topic: "test", } namesrvCli := internal.NewMockNamesrvs(ctrl) dc.namesrv = namesrvCli rmqCli := internal.NewMockRMQClient(ctrl) dc.client = rmqCli Convey("get effective offset", func() { offsetStore.EXPECT().read(gomock.Any(), gomock.Any()).Return(int64(10)) res := dc.computePullFromWhere(mq) assert.Equal(t, int64(10), res) }) Convey("ConsumeFromLastOffset for normal topic", func() { offsetStore.EXPECT().read(gomock.Any(), gomock.Any()).Return(int64(-1)) dc.option.FromWhere = ConsumeFromLastOffset broker := "a" namesrvCli.EXPECT().FindBrokerAddrByName(gomock.Any()).Return(broker) rmqCli.EXPECT().InvokeSync(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()). Return(&remote.RemotingCommand{ ExtFields: map[string]string{ "offset": "20", }, }, nil) res := dc.computePullFromWhere(mq) assert.Equal(t, int64(20), res) }) Convey("ConsumeFromFirstOffset for normal topic", func() { offsetStore.EXPECT().read(gomock.Any(), gomock.Any()).Return(int64(-1)) dc.option.FromWhere = ConsumeFromFirstOffset res := dc.computePullFromWhere(mq) assert.Equal(t, int64(0), res) }) Convey("ConsumeFromTimestamp for normal topic", func() { offsetStore.EXPECT().read(gomock.Any(), gomock.Any()).Return(int64(-1)) dc.option.FromWhere = ConsumeFromTimestamp dc.option.ConsumeTimestamp = "20060102150405" broker := "a" namesrvCli.EXPECT().FindBrokerAddrByName(gomock.Any()).Return(broker) rmqCli.EXPECT().InvokeSync(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()). Return(&remote.RemotingCommand{ ExtFields: map[string]string{ "offset": "30", }, }, nil) res := dc.computePullFromWhere(mq) assert.Equal(t, int64(30), res) }) }) }
explode_data.jsonl/16331
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 900 }
[ 2830, 3393, 46254, 36068, 3830, 9064, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 22043, 264, 1638, 29968, 497, 259, 11, 2915, 368, 341, 197, 87249, 1669, 609, 2258, 29968, 515, 298, 19727, 25, 2435, 36694, 345, 298, 1444, 929, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVoidAuthorization(t *testing.T) { c, _ := NewClient(testClientID, testSecret, APIBaseSandBox) c.GetAccessToken() _, err := c.VoidAuthorization(testAuthID) if err == nil { t.Errorf("Auth is expired, 400 error must be returned") } }
explode_data.jsonl/18444
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 35882, 18124, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 1532, 2959, 8623, 2959, 915, 11, 1273, 19773, 11, 5333, 3978, 47044, 1611, 340, 1444, 2234, 37649, 2822, 197, 6878, 1848, 1669, 272, 21710, 18124, 8623, 5087, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStdCopyWriteAndRead(t *testing.T) { stdOutBytes := []byte(strings.Repeat("o", startingBufLen)) stdErrBytes := []byte(strings.Repeat("e", startingBufLen)) buffer, err := getSrcBuffer(stdOutBytes, stdErrBytes) if err != nil { t.Fatal(err) } written, err := StdCopy(ioutil.Discard, ioutil.Discard, buffer) if err != nil { t.Fatal(err) } expectedTotalWritten := len(stdOutBytes) + len(stdErrBytes) if written != int64(expectedTotalWritten) { t.Fatalf("Expected to have total of %d bytes written, got %d", expectedTotalWritten, written) } }
explode_data.jsonl/52158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 22748, 12106, 7985, 3036, 4418, 1155, 353, 8840, 836, 8, 341, 6736, 2662, 7078, 1669, 3056, 3782, 51442, 2817, 10979, 445, 78, 497, 5916, 15064, 11271, 1171, 6736, 7747, 7078, 1669, 3056, 3782, 51442, 2817, 10979, 445, 68, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListEach(t *testing.T) { list := New() list.Add("a", "b", "c") list.Each(func(index int, value interface{}) { switch index { case 0: if actualValue, expectedValue := value, "a"; actualValue != expectedValue { t.Errorf("Got %v expected %v", actualValue, expectedValue) } case 1: if actualValue, expectedValue := value, "b"; actualValue != expectedValue { t.Errorf("Got %v expected %v", actualValue, expectedValue) } case 2: if actualValue, expectedValue := value, "c"; actualValue != expectedValue { t.Errorf("Got %v expected %v", actualValue, expectedValue) } default: t.Errorf("Too many") } }) }
explode_data.jsonl/18293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 852, 4854, 1155, 353, 8840, 836, 8, 341, 14440, 1669, 1532, 741, 14440, 1904, 445, 64, 497, 330, 65, 497, 330, 66, 1138, 14440, 13, 4854, 18552, 7195, 526, 11, 897, 3749, 28875, 341, 197, 8961, 1922, 341, 197, 2722, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestCommitTransactions(t *testing.T) { // Setup a new blockchain with genesis block containing test token on test address var ( database = ethdb.NewMemDatabase() gspec = core.Genesis{ Config: chainConfig, Factory: blockFactory, Alloc: core.GenesisAlloc{testBankAddress: {Balance: testBankFunds}}, ShardID: 0, } ) gspec.MustCommit(database) chain, _ := core.NewBlockChain(database, nil, gspec.Config, chain2.Engine, vm.Config{}, nil) // Create a new worker worker := New(params.TestChainConfig, chain, chain2.Engine) // Generate a test tx baseNonce := worker.GetCurrentState().GetNonce(crypto.PubkeyToAddress(testBankKey.PublicKey)) randAmount := rand.Float32() tx, _ := types.SignTx(types.NewTransaction(baseNonce, testBankAddress, uint32(0), big.NewInt(int64(denominations.One*randAmount)), params.TxGas, nil, nil), types.HomesteadSigner{}, testBankKey) // Commit the tx to the worker txs := make(map[common.Address]types.PoolTransactions) txs[testBankAddress] = types.PoolTransactions{tx} err := worker.CommitTransactions( txs, testBankAddress, ) if err != nil { t.Error(err) } if len(worker.GetCurrentReceipts()) == 0 { t.Error("No receipt is created for new transactions") } if len(worker.current.txs) != 1 { t.Error("Transaction is not committed") } }
explode_data.jsonl/44239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 33441, 48761, 1155, 353, 8840, 836, 8, 341, 197, 322, 18626, 264, 501, 17944, 448, 59366, 2504, 8482, 1273, 3950, 389, 1273, 2621, 198, 2405, 2399, 197, 2698, 2211, 284, 8372, 1999, 7121, 18816, 5988, 741, 197, 3174, 9535, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestX5C_AuthorizeRenew(t *testing.T) { now := time.Now().Truncate(time.Second) type test struct { p *X5C code int err error } tests := map[string]func(*testing.T) test{ "fail/renew-disabled": func(t *testing.T) test { p, err := generateX5C(nil) assert.FatalError(t, err) // disable renewal disable := true p.Claims = &Claims{DisableRenewal: &disable} p.ctl.Claimer, err = NewClaimer(p.Claims, globalProvisionerClaims) assert.FatalError(t, err) return test{ p: p, code: http.StatusUnauthorized, err: errors.Errorf("renew is disabled for provisioner '%s'", p.GetName()), } }, "ok": func(t *testing.T) test { p, err := generateX5C(nil) assert.FatalError(t, err) return test{ p: p, } }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { tc := tt(t) if err := tc.p.AuthorizeRenew(context.Background(), &x509.Certificate{ NotBefore: now, NotAfter: now.Add(time.Hour), }); err != nil { if assert.NotNil(t, tc.err) { sc, ok := err.(errs.StatusCoder) assert.Fatal(t, ok, "error does not implement StatusCoder interface") assert.Equals(t, sc.StatusCode(), tc.code) assert.HasPrefix(t, err.Error(), tc.err.Error()) } } else { assert.Nil(t, tc.err) } }) } }
explode_data.jsonl/44606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 611 }
[ 2830, 3393, 55, 20, 34, 1566, 1553, 551, 34625, 365, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 882, 13244, 1005, 1282, 26900, 9730, 32435, 340, 13158, 1273, 2036, 341, 197, 3223, 262, 353, 55, 20, 34, 198, 197, 43343, 526, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRestrictionNoneInRealLoader(t *testing.T) { dir, fSys, err := commonSetupForLoaderRestrictionTest() if err != nil { t.Fatalf("unexpected error: %v", err) } defer os.RemoveAll(dir) var l ifc.Loader l = newLoaderOrDie(RestrictionNone, fSys, dir) l = doSanityChecksAndDropIntoBase(t, l) // Reading symlink to exteriorData works. _, err = l.Load("symLinkToExteriorData") if err != nil { t.Fatalf("unexpected error: %v", err) } // Attempt to read "up" works. _, err = l.Load("../exteriorData") if err != nil { t.Fatalf("unexpected error: %v", err) } }
explode_data.jsonl/65705
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 50360, 2479, 4064, 641, 12768, 9181, 1155, 353, 8840, 836, 8, 341, 48532, 11, 282, 32792, 11, 1848, 1669, 4185, 21821, 2461, 9181, 50360, 2479, 2271, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1465, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestList(t *testing.T) { var c Client list, err := c.List() if err != nil { t.Fatalf("err: %s", err) } if list == nil { t.Fatal("list should not be nil") } var license *LicenseInfo for _, l := range list.Licenses { if l.ID == "MIT" { license = l break } } if license == nil { t.Fatal("MIT license not found") } }
explode_data.jsonl/30670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 341, 2405, 272, 8423, 198, 14440, 11, 1848, 1669, 272, 5814, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 615, 25, 1018, 82, 497, 1848, 340, 197, 532, 743, 1140, 621, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCreateDeploymentConfigConflictingNamespace(t *testing.T) { storage := REST{} channel, err := storage.Create(kapi.WithNamespace(kapi.NewContext(), "legal-name"), &api.DeploymentConfig{ ObjectMeta: kapi.ObjectMeta{Name: "foo", Namespace: "some-value"}, }) if channel != nil { t.Error("Expected a nil channel, but we got a value") } checkExpectedNamespaceError(t, err) }
explode_data.jsonl/66994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 4021, 75286, 2648, 15578, 78208, 22699, 1155, 353, 8840, 836, 8, 341, 197, 16172, 1669, 25414, 31483, 71550, 11, 1848, 1669, 5819, 7251, 5969, 2068, 26124, 22699, 5969, 2068, 7121, 1972, 1507, 330, 6428, 11494, 3975, 609, 2068...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVotesService_Get(t *testing.T) { client, mux, _, teardown := setupTest() defer teardown() mux.HandleFunc("/votes/dummy", func(writer http.ResponseWriter, request *http.Request) { testMethod(t, request, "GET") fmt.Fprint(writer, `{ "data": { "id": "dummy", "blockId": "dummy", "type": 3, "amount": 0, "fee": 100000000, "sender": "dummy", "recipient": "dummy", "signature": "dummy", "asset": { "votes": [ "+dummy" ] }, "confirmations": 10, "timestamp": { "epoch": 39862054, "unix": 1529963254, "human": "2018-06-25T21:47:34Z" } } }`) }) responseStruct, response, err := client.Votes.Get(context.Background(), "dummy") testGeneralError(t, "Votes.Get", err) testResponseUrl(t, "Votes.Get", response, "/api/votes/dummy") testResponseStruct(t, "Votes.Get", responseStruct, &GetTransaction{ Data: Transaction{ Id: "dummy", BlockId: "dummy", Type: 3, Amount: 0, Fee: 100000000, Sender: "dummy", Recipient: "dummy", Signature: "dummy", Asset: &TransactionAsset{ Votes: []string{ "+dummy", }, }, Confirmations: 10, Timestamp: Timestamp{ Epoch: 39862054, Unix: 1529963254, Human: "2018-06-25T21:47:34Z", }, }, }) }
explode_data.jsonl/79293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 711 }
[ 2830, 3393, 75535, 1860, 13614, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 2271, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 37835, 3446, 8574, 497, 2915, 38356, 1758, 37508, 11, 1681, 353, 1254, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddRegExpRouteDoesNotMatchForInvalidMethod(t *testing.T) { re := regexp.MustCompile("^/test$") r := NewRouter() r.AddRegExpRoute("test", http.MethodGet, re, dummyHandler) req := events.APIGatewayProxyRequest{ Path: "/test", HTTPMethod: http.MethodPost, } assert.False(t, r.Routes["test"].Match(req), "Expected regexp route matching to return false") }
explode_data.jsonl/4490
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 2212, 50192, 4899, 21468, 2623, 8331, 2461, 7928, 3523, 1155, 353, 8840, 836, 8, 341, 17200, 1669, 41877, 98626, 48654, 14, 1944, 3, 5130, 7000, 1669, 1532, 9523, 741, 7000, 1904, 50192, 4899, 445, 1944, 497, 1758, 20798, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxQueryInvalid(t *testing.T) { db := newTestDB(t, "") defer closeDB(t, db) tx, err := db.Begin() if err != nil { t.Fatal(err) } defer tx.Rollback() _, err = tx.Query("SELECT|t1|name|") if err == nil { t.Fatal("Error expected") } }
explode_data.jsonl/15977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 31584, 2859, 7928, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 14676, 16867, 3265, 3506, 1155, 11, 2927, 692, 46237, 11, 1848, 1669, 2927, 28467, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFindObjectSucceeds(t *testing.T) { ctx := MockCtx{} ctx.FindObjectsInitFunc = findObjectsInitOK ctx.FindObjectsFinalFunc = findObjectsFinalOK ctx.FindObjectsFunc = func(pkcs11.SessionHandle, int) ([]pkcs11.ObjectHandle, bool, error) { return []pkcs11.ObjectHandle{1}, false, nil } s := &Session{ctx, 0} // test FindObject works handle, err := s.FindObject(nil) test.AssertNotError(t, err, "FindObject failed when everything worked as expected") test.AssertEquals(t, handle, pkcs11.ObjectHandle(1)) }
explode_data.jsonl/1159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 80835, 50, 29264, 82, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 14563, 23684, 16094, 20985, 9998, 11543, 3803, 9626, 284, 1477, 11543, 3803, 3925, 198, 20985, 9998, 11543, 19357, 9626, 284, 1477, 11543, 19357, 3925, 198, 2098...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVictorOpsNoAPIKey(t *testing.T) { _, err := LoadFile("testdata/conf.victorops-no-apikey.yml") if err == nil { t.Fatalf("Expected an error parsing %s: %s", "testdata/conf.victorops-no-apikey.yml", err) } if err.Error() != "no global VictorOps API Key set" { t.Errorf("Expected: %s\nGot: %s", "no global VictorOps API Key set", err.Error()) } }
explode_data.jsonl/72922
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 36125, 269, 38904, 2753, 7082, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 8893, 1703, 445, 92425, 59241, 3133, 849, 269, 3721, 28366, 23904, 792, 33936, 1138, 743, 1848, 621, 2092, 341, 197, 3244, 30762, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewRequestAll_ReturnsRequestAll(t *testing.T) { b := NewRequestAll(func(parameters APIRequestParameters) (Paginated, error) { return nil, nil }, func(Paginated) {}) assert.NotNil(t, b) }
explode_data.jsonl/44637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 3564, 1900, 2403, 53316, 82, 1900, 2403, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 1532, 1900, 2403, 18552, 37959, 5333, 1900, 9706, 8, 320, 47712, 15479, 11, 1465, 8, 314, 470, 2092, 11, 2092, 2470, 2915, 5304, 351, 15479...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestEarlyClose(t *testing.T) { var cluster testutils.Cluster store, dom, clean := testkit.CreateMockStoreAndDomain(t, mockstore.WithClusterInspector(func(c testutils.Cluster) { mockstore.BootstrapWithSingleStore(c) cluster = c })) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("create table earlyclose (id int primary key)") N := 100 // Insert N rows. var values []string for i := 0; i < N; i++ { values = append(values, fmt.Sprintf("(%d)", i)) } tk.MustExec("insert earlyclose values " + strings.Join(values, ",")) // Get table ID for split. is := dom.InfoSchema() tbl, err := is.TableByName(model.NewCIStr("test"), model.NewCIStr("earlyclose")) require.NoError(t, err) tblID := tbl.Meta().ID // Split the table. tableStart := tablecodec.GenTableRecordPrefix(tblID) cluster.SplitKeys(tableStart, tableStart.PrefixNext(), N/2) ctx := context.Background() for i := 0; i < N/2; i++ { rss, err := tk.Session().Execute(ctx, "select * from earlyclose order by id") require.NoError(t, err) rs := rss[0] req := rs.NewChunk(nil) require.NoError(t, rs.Next(ctx, req)) require.NoError(t, rs.Close()) } // Goroutine should not leak when error happen. require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/store/copr/handleTaskOnceError", `return(true)`)) defer func() { require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/store/copr/handleTaskOnceError")) }() rss, err := tk.Session().Execute(ctx, "select * from earlyclose") require.NoError(t, err) rs := rss[0] req := rs.NewChunk(nil) err = rs.Next(ctx, req) require.Error(t, err) require.NoError(t, rs.Close()) }
explode_data.jsonl/38133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 647 }
[ 2830, 3393, 41198, 7925, 1155, 353, 8840, 836, 8, 341, 2405, 10652, 1273, 6031, 72883, 198, 57279, 11, 4719, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 3036, 13636, 1155, 11, 7860, 4314, 26124, 28678, 46230, 18552, 1337, 1273, 6031, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewExportedPort_LeaseGrantFails(t *testing.T) { controller := gomock.NewController(t) mockKV := mock_etcd.NewMockKV(controller) mockLease := mock_etcd.NewMockLease(controller) ctx := context.TODO() errExpected := errors.New("No lease for thee") mockLease.EXPECT().Grant(gomock.Eq(ctx), gomock.Eq(int64(30000))).Return( nil, errExpected) exporter, err := NewExporterFromClient(ctx, mockKV, mockLease, 30000) if err != errExpected { t.Error("NewExporterFromClient reports unexpected error: ", err) } if exporter == nil { t.Fatal("NewExporterFromClient returned nil exporter") } }
explode_data.jsonl/66147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 3564, 16894, 291, 7084, 62, 2304, 519, 67971, 37, 6209, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 77333, 82707, 1669, 7860, 45668, 4385, 7121, 11571, 82707, 40845, 340, 77333, 2304, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBlockReceiptStorage(t *testing.T) { db := NewMemoryDatabase() // Create a live block since we need metadata to reconstruct the receipt tx1 := model.NewTransaction(1, common.HexToAddress("0x1"), big.NewInt(1), 1, big.NewInt(1), nil) tx2 := model.NewTransaction(2, common.HexToAddress("0x2"), big.NewInt(2), 2, big.NewInt(2), nil) body := &model.Body{Transactions: model.Transactions{tx1, tx2}} // Create the two receipts to manage afterwards receipt1 := &model.Receipt{ Status: model.ReceiptStatusFailed, CumulativeGasUsed: 1, Logs: []*model.Log{ {Address: common.BytesToAddress([]byte{0x11})}, {Address: common.BytesToAddress([]byte{0x01, 0x11})}, }, TxHash: tx1.Hash(), ContractAddress: common.BytesToAddress([]byte{0x01, 0x11, 0x11}), GasUsed: 111111, } receipt1.Bloom = model.CreateBloom(model.Receipts{receipt1}) receipt2 := &model.Receipt{ PostState: common.Hash{2}.Bytes(), CumulativeGasUsed: 2, Logs: []*model.Log{ {Address: common.BytesToAddress([]byte{0x22})}, {Address: common.BytesToAddress([]byte{0x02, 0x22})}, }, TxHash: tx2.Hash(), ContractAddress: common.BytesToAddress([]byte{0x02, 0x22, 0x22}), GasUsed: 222222, } receipt2.Bloom = model.CreateBloom(model.Receipts{receipt2}) receipts := []*model.Receipt{receipt1, receipt2} // Check that no receipt entries are in a pristine database hash := common.BytesToHash([]byte{0x03, 0x14}) if rs := ReadReceipts(db, hash, 0, config.TestChainConfig); len(rs) != 0 { t.Fatalf("non existent receipts returned: %v", rs) } // Insert the body that corresponds to the receipts WriteBody(db, hash, 0, body) // Insert the receipt slice into the database and check presence WriteReceipts(db, hash, 0, receipts) if rs := ReadReceipts(db, hash, 0, config.TestChainConfig); len(rs) == 0 { t.Fatalf("no receipts returned") } else { if err := checkReceiptsRLP(rs, receipts); err != nil { t.Fatalf(err.Error()) } } // Delete the body and ensure that the receipts are no longer returned (metadata can't be recomputed) DeleteBody(db, hash, 0) if rs := ReadReceipts(db, hash, 0, config.TestChainConfig); rs != nil { t.Fatalf("receipts returned when body was deleted: %v", rs) } // Ensure that receipts without metadata can be returned without the block body too if err := checkReceiptsRLP(ReadRawReceipts(db, hash, 0), receipts); err != nil { t.Fatalf(err.Error()) } // Sanity check that body alone without the receipt is a full purge WriteBody(db, hash, 0, body) DeleteReceipts(db, hash, 0) if rs := ReadReceipts(db, hash, 0, config.TestChainConfig); len(rs) != 0 { t.Fatalf("deleted receipts returned: %v", rs) } }
explode_data.jsonl/72786
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1060 }
[ 2830, 3393, 4713, 67461, 5793, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 1532, 10642, 5988, 2822, 197, 322, 4230, 264, 3887, 2504, 2474, 582, 1184, 11160, 311, 43828, 279, 22567, 198, 46237, 16, 1669, 1614, 7121, 8070, 7, 16, 11, 418...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestDelNodes(t *testing.T) { services := []*registry.Service{ { Name: "foo", Version: "1.0.0", Nodes: []*registry.Node{ { Id: "foo-123", Address: "localhost", Port: 9999, }, { Id: "foo-321", Address: "localhost", Port: 6666, }, }, }, { Name: "foo", Version: "1.0.0", Nodes: []*registry.Node{ { Id: "foo-123", Address: "localhost", Port: 6666, }, }, }, } nodes := delNodes(services[0].Nodes, services[1].Nodes) if i := len(nodes); i != 1 { t.Errorf("Expected only 1 node, got %d: %+v", i, nodes) } t.Logf("Nodes %+v", nodes) }
explode_data.jsonl/26131
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 16532, 12288, 1155, 353, 8840, 836, 8, 341, 1903, 2161, 1669, 29838, 29172, 13860, 515, 197, 197, 515, 298, 21297, 25, 262, 330, 7975, 756, 298, 77847, 25, 330, 16, 13, 15, 13, 15, 756, 298, 197, 12288, 25, 29838, 29172,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResolver_AddEventAPI(t *testing.T) { // given testErr := errors.New("Test error") id := "bar" appId := "1" modelAPI := fixMinModelEventAPIDefinition(id, "placeholder") gqlAPI := fixGQLEventAPIDefinition(id, "placeholder") gqlAPIInput := fixGQLEventAPIDefinitionInput() modelAPIInput := fixModelEventAPIDefinitionInput() testCases := []struct { Name string PersistenceFn func() *persistenceautomock.PersistenceTx TransactionerFn func(persistTx *persistenceautomock.PersistenceTx) *persistenceautomock.Transactioner ServiceFn func() *automock.EventAPIService AppServiceFn func() *automock.ApplicationService ConverterFn func() *automock.EventAPIConverter ExpectedAPI *graphql.EventAPIDefinition ExpectedErr error }{ { Name: "Success", PersistenceFn: txtest.PersistenceContextThatExpectsCommit, TransactionerFn: txtest.TransactionerThatSucceeds, ServiceFn: func() *automock.EventAPIService { svc := &automock.EventAPIService{} svc.On("Create", contextParam, appId, *modelAPIInput).Return(id, nil).Once() svc.On("Get", contextParam, id).Return(modelAPI, nil).Once() return svc }, AppServiceFn: func() *automock.ApplicationService { appSvc := &automock.ApplicationService{} appSvc.On("Exist", contextParam, appId).Return(true, nil) return appSvc }, ConverterFn: func() *automock.EventAPIConverter { conv := &automock.EventAPIConverter{} conv.On("InputFromGraphQL", gqlAPIInput).Return(modelAPIInput).Once() conv.On("ToGraphQL", modelAPI).Return(gqlAPI).Once() return conv }, ExpectedAPI: gqlAPI, ExpectedErr: nil, }, { Name: "Returns error when application not exist", PersistenceFn: txtest.PersistenceContextThatDoesntExpectCommit, TransactionerFn: txtest.TransactionerThatSucceeds, ServiceFn: func() *automock.EventAPIService { svc := &automock.EventAPIService{} return svc }, AppServiceFn: func() *automock.ApplicationService { appSvc := &automock.ApplicationService{} appSvc.On("Exist", contextParam, appId).Return(false, nil) return appSvc }, ConverterFn: func() *automock.EventAPIConverter { conv := &automock.EventAPIConverter{} conv.On("InputFromGraphQL", gqlAPIInput).Return(modelAPIInput).Once() return conv }, ExpectedAPI: nil, ExpectedErr: errors.New("Cannot add EventAPI to not existing Application"), }, { Name: "Returns error when application existence check failed", PersistenceFn: txtest.PersistenceContextThatDoesntExpectCommit, TransactionerFn: txtest.TransactionerThatSucceeds, ServiceFn: func() *automock.EventAPIService { svc := &automock.EventAPIService{} return svc }, AppServiceFn: func() *automock.ApplicationService { appSvc := &automock.ApplicationService{} appSvc.On("Exist", contextParam, appId).Return(false, testErr) return appSvc }, ConverterFn: func() *automock.EventAPIConverter { conv := &automock.EventAPIConverter{} conv.On("InputFromGraphQL", gqlAPIInput).Return(modelAPIInput).Once() return conv }, ExpectedAPI: nil, ExpectedErr: testErr, }, { Name: "Returns error when EventAPI creation failed", PersistenceFn: txtest.PersistenceContextThatDoesntExpectCommit, TransactionerFn: txtest.TransactionerThatSucceeds, ServiceFn: func() *automock.EventAPIService { svc := &automock.EventAPIService{} svc.On("Create", contextParam, appId, *modelAPIInput).Return("", testErr).Once() return svc }, AppServiceFn: func() *automock.ApplicationService { appSvc := &automock.ApplicationService{} appSvc.On("Exist", contextParam, appId).Return(true, nil) return appSvc }, ConverterFn: func() *automock.EventAPIConverter { conv := &automock.EventAPIConverter{} conv.On("InputFromGraphQL", gqlAPIInput).Return(modelAPIInput).Once() return conv }, ExpectedAPI: nil, ExpectedErr: testErr, }, { Name: "Returns error when EventAPI retrieval failed", PersistenceFn: txtest.PersistenceContextThatDoesntExpectCommit, TransactionerFn: txtest.TransactionerThatSucceeds, ServiceFn: func() *automock.EventAPIService { svc := &automock.EventAPIService{} svc.On("Create", contextParam, appId, *modelAPIInput).Return(id, nil).Once() svc.On("Get", contextParam, id).Return(nil, testErr).Once() return svc }, AppServiceFn: func() *automock.ApplicationService { appSvc := &automock.ApplicationService{} appSvc.On("Exist", contextParam, appId).Return(true, nil) return appSvc }, ConverterFn: func() *automock.EventAPIConverter { conv := &automock.EventAPIConverter{} conv.On("InputFromGraphQL", gqlAPIInput).Return(modelAPIInput).Once() return conv }, ExpectedAPI: nil, ExpectedErr: testErr, }, } for _, testCase := range testCases { t.Run(testCase.Name, func(t *testing.T) { // given persistTx := testCase.PersistenceFn() tx := testCase.TransactionerFn(persistTx) svc := testCase.ServiceFn() converter := testCase.ConverterFn() appSvc := testCase.AppServiceFn() resolver := eventapi.NewResolver(tx, svc, appSvc, converter, nil) // when result, err := resolver.AddEventAPI(context.TODO(), appId, *gqlAPIInput) // then assert.Equal(t, testCase.ExpectedAPI, result) if testCase.ExpectedErr != nil { assert.Contains(t, err.Error(), testCase.ExpectedErr.Error()) } else { require.Nil(t, err) } persistTx.AssertExpectations(t) tx.AssertExpectations(t) svc.AssertExpectations(t) appSvc.AssertExpectations(t) converter.AssertExpectations(t) }) } }
explode_data.jsonl/2240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2433 }
[ 2830, 3393, 18190, 21346, 1556, 7082, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 18185, 7747, 1669, 5975, 7121, 445, 2271, 1465, 5130, 15710, 1669, 330, 2257, 698, 28236, 764, 1669, 330, 16, 1837, 19727, 7082, 1669, 5046, 6217, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallerSubscriberClient_GetTxReceipt(t *testing.T) { response := cltest.MustReadFile(t, "testdata/getTransactionReceipt.json") mockServer, wsCleanup := cltest.NewWSServer(string(response)) defer wsCleanup() config := cltest.NewConfigWithWSServer(t, mockServer) store, cleanup := cltest.NewStoreWithConfig(config) defer cleanup() ec := store.TxManager.(*strpkg.EthTxManager).Client hash := common.HexToHash("0xb903239f8543d04b5dc1ba6579132b143087c68db1b2168786408fcbce568238") receipt, err := ec.GetTxReceipt(hash) assert.NoError(t, err) assert.Equal(t, hash, receipt.Hash) assert.Equal(t, cltest.Int(uint64(11)), receipt.BlockNumber) }
explode_data.jsonl/63841
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 58735, 40236, 2959, 13614, 31584, 67461, 1155, 353, 8840, 836, 8, 341, 21735, 1669, 1185, 1944, 50463, 4418, 1703, 1155, 11, 330, 92425, 23302, 8070, 67461, 4323, 1138, 77333, 5475, 11, 17624, 67335, 1669, 1185, 1944, 7121, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRule(t *testing.T) { // If we are not on CI skip the test. if os.Getenv("CI") == "" { t.Skip("Not on CI, skipping comby-dependent test") } input := map[string]string{ "file.go": "func foo(success) {} func bar(fail) {}", } zipData, err := testutil.CreateZip(input) if err != nil { t.Fatal(err) } zf, cleanup, err := testutil.TempZipFileOnDisk(zipData) if err != nil { t.Fatal(err) } defer cleanup() p := &protocol.PatternInfo{ Pattern: "func :[[fn]](:[args])", IncludePatterns: []string{".go"}, CombyRule: `where :[args] == "success"`, } ctx, cancel, sender := newLimitedStreamCollector(context.Background(), 1000000000) defer cancel() err = structuralSearch(ctx, zf, Subset(p.IncludePatterns), "", p.Pattern, p.CombyRule, p.Languages, "repo", sender) if err != nil { t.Fatal(err) } got := sender.collected want := []protocol.FileMatch{ { Path: "file.go", LimitHit: false, LineMatches: []protocol.LineMatch{ { LineNumber: 0, OffsetAndLengths: [][2]int{{0, 17}}, Preview: "func foo(success)", }, }, MatchCount: 1, }, } if !reflect.DeepEqual(got, want) { t.Fatalf("got file matches %v, want %v", got, want) } }
explode_data.jsonl/5446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 542 }
[ 2830, 3393, 11337, 1155, 353, 8840, 836, 8, 341, 197, 322, 1416, 582, 525, 537, 389, 20694, 10706, 279, 1273, 624, 743, 2643, 64883, 445, 11237, 899, 621, 1591, 341, 197, 3244, 57776, 445, 2623, 389, 20694, 11, 42659, 469, 1694, 42818...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetObjectsToReplicateRemoteHasAll(t *testing.T) { ece, dr, err := getTestEce(nil) if dr != "" { defer os.RemoveAll(dr) } require.Nil(t, err) idb, err := ece.getDB("sdb1") require.Nil(t, err) timestamp := time.Now().UnixNano() body := "just testing" hsh0 := "00000000000000000000000000000001" f, err := idb.TempFile(hsh0, 0, timestamp, int64(len(body)), true) require.Nil(t, err) f.Write([]byte(body)) require.Nil(t, idb.Commit(f, hsh0, 0, timestamp, "PUT", map[string]string{"name": "o1"}, false, "")) hsh1 := "00000000000000000000000000000002" f, err = idb.TempFile(hsh1, 0, timestamp, int64(len(body)), true) require.Nil(t, err) f.Write([]byte(body)) require.Nil(t, idb.Commit(f, hsh1, 0, timestamp, "PUT", map[string]string{"name": "o2"}, false, "")) remoteItems, err := idb.List("", "", "", 0) require.Nil(t, err) ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { d, err := json.Marshal(remoteItems) require.Nil(t, err) w.WriteHeader(200) w.Write(d) })) u, err := url.Parse(ts.URL) require.Nil(t, err) host, ports, err := net.SplitHostPort(u.Host) require.Nil(t, err) port, err := strconv.Atoi(ports) require.Nil(t, err) osc := make(chan ObjectStabilizer) cancel := make(chan struct{}) defer close(cancel) go ece.GetObjectsToReplicate( PriorityRepJob{FromDevice: &ring.Device{Device: "sdb1"}, ToDevice: &ring.Device{Device: "sdb2", Scheme: "http", Port: port, Ip: host}}, osc, cancel) os := <-osc require.Nil(t, os) }
explode_data.jsonl/58666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 650 }
[ 2830, 3393, 1949, 11543, 1249, 18327, 48795, 24703, 10281, 2403, 1155, 353, 8840, 836, 8, 341, 7727, 346, 11, 1353, 11, 1848, 1669, 633, 2271, 36, 346, 27907, 340, 743, 1353, 961, 1591, 341, 197, 16867, 2643, 84427, 30958, 340, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDetect(t *testing.T) { tcs := []struct { input string expect string }{ { input: "Grand.Designs.S12E06.720p.HDTV.x264", expect: "S12E06", }, { input: "Grand.Designs.s12e6.720p.HDTV.x264", expect: "S12E6", }, { input: "Grand.Designs.s12e.720p.HDTV.x264", expect: "nomatch", }, { input: "Simpsons 07x01 - Who Shot Mr Burns (Part 2) [rl]", expect: "S07E01", }, } for _, tc := range tcs { tc := tc // pin t.Run(tc.input, func(t *testing.T) { res := Detect(tc.input) var output string if res != nil { output = res.String() } else { output = "nomatch" } assert.EqualString(t, output, tc.expect) }) } }
explode_data.jsonl/44958
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 57193, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 24952, 914, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 220, 330, 40151, 51872, 82, 808, 16, 17, 36, 15, 21, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_handler_IndexInfo(t *testing.T) { type args struct { w http.ResponseWriter r *http.Request } type fields struct { indexer index.IndexServer } type want struct { wantCode int err error } type test struct { name string args args fields fields want want checkFunc func(want, int, error) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want, gotCode int, err error) error { if !errors.Is(err, w.err) { return errors.Errorf("got_error: \"%#v\",\n\t\t\t\twant: \"%#v\"", err, w.err) } if !reflect.DeepEqual(gotCode, w.wantCode) { return errors.Errorf("got: \"%#v\",\n\t\t\t\twant: \"%#v\"", gotCode, w.wantCode) } return nil } tests := []test{ // TODO test cases /* { name: "test_case_1", args: args { w: nil, r: nil, }, fields: fields { indexer: nil, }, want: want{}, checkFunc: defaultCheckFunc, }, */ // TODO test cases /* func() test { return test { name: "test_case_2", args: args { w: nil, r: nil, }, fields: fields { indexer: nil, }, want: want{}, checkFunc: defaultCheckFunc, } }(), */ } for _, tc := range tests { test := tc t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt, goleak.IgnoreCurrent()) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } h := &handler{ indexer: test.fields.indexer, } gotCode, err := h.IndexInfo(test.args.w, test.args.r) if err := test.checkFunc(test.want, gotCode, err); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/40270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1028 }
[ 2830, 3393, 10183, 50361, 1731, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 6692, 1758, 37508, 198, 197, 7000, 353, 1254, 9659, 198, 197, 532, 13158, 5043, 2036, 341, 197, 26327, 261, 1922, 18338, 5475, 198, 197, 532, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateAll(t *testing.T) { if testing.Short() { t.Skip("skip test in short mode") } sess := newSessionForTesting(t) defer sess.Close() for _, name := range AllExperiments() { builder, err := sess.NewExperimentBuilder(name) if err != nil { t.Fatal(err) } exp := builder.NewExperiment() good := (exp.Name() == name) if !good { t.Fatal("unexpected experiment name") } } }
explode_data.jsonl/26306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 4021, 2403, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 20599, 1273, 304, 2805, 3856, 1138, 197, 532, 1903, 433, 1669, 501, 5283, 2461, 16451, 1155, 340, 16867, 21875, 10421, 741, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_genJSONSchema(t *testing.T) { t.Parallel() type args struct { path string } type want struct { err error } type test struct { name string args args want want checkFunc func(want, error) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want, err error) error { if !errors.Is(err, w.err) { return errors.Errorf("got_error: \"%#v\",\n\t\t\t\twant: \"%#v\"", err, w.err) } return nil } tests := []test{ // TODO test cases /* { name: "test_case_1", args: args { path: "", }, want: want{}, checkFunc: defaultCheckFunc, }, */ // TODO test cases /* func() test { return test { name: "test_case_2", args: args { path: "", }, want: want{}, checkFunc: defaultCheckFunc, } }(), */ } for _, tc := range tests { test := tc t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt, goleak.IgnoreCurrent()) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } err := genJSONSchema(test.args.path) if err := test.checkFunc(test.want, err); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/42609
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 748 }
[ 2830, 3393, 16322, 5370, 8632, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 26781, 914, 198, 197, 532, 13158, 1366, 2036, 341, 197, 9859, 1465, 198, 197, 532, 13158, 1273, 2036, 341, 197, 11609, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSliceWithControlCharactersNeedsToEscape(t *testing.T) { unescaped := generateUnescapedSlice() lowByte := byte(int(' ') - 1) unescapedWithLowByte := append(unescaped, lowByte) assert.True(t, NeedToEscape(unescapedWithLowByte)) highByte := byte(int('~') + 1) unescaped = append(unescaped, highByte) assert.True(t, NeedToEscape(unescaped)) }
explode_data.jsonl/51128
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 33236, 2354, 3273, 37489, 65064, 1249, 48124, 1155, 353, 8840, 836, 8, 341, 20479, 65826, 1669, 6923, 1806, 65826, 33236, 741, 8810, 363, 7153, 1669, 4922, 1548, 492, 16667, 481, 220, 16, 340, 20479, 65826, 2354, 24187, 7153, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMoveAllToActiveOrBackoffQueue_PreEnqueueChecks(t *testing.T) { var podInfos []*framework.QueuedPodInfo for i := 0; i < 5; i++ { pInfo := newQueuedPodInfoForLookup(&v1.Pod{ ObjectMeta: metav1.ObjectMeta{Name: fmt.Sprintf("p%d", i)}, Spec: v1.PodSpec{Priority: pointer.Int32Ptr(int32(i))}, }) podInfos = append(podInfos, pInfo) } tests := []struct { name string preEnqueueCheck PreEnqueueCheck podInfos []*framework.QueuedPodInfo want []string }{ { name: "nil PreEnqueueCheck", podInfos: podInfos, want: []string{"p0", "p1", "p2", "p3", "p4"}, }, { name: "move Pods with priority greater than 2", podInfos: podInfos, preEnqueueCheck: func(pod *v1.Pod) bool { return *pod.Spec.Priority >= 2 }, want: []string{"p2", "p3", "p4"}, }, { name: "move Pods with even priority and greater than 2", podInfos: podInfos, preEnqueueCheck: func(pod *v1.Pod) bool { return *pod.Spec.Priority%2 == 0 && *pod.Spec.Priority >= 2 }, want: []string{"p2", "p4"}, }, { name: "move Pods with even and negative priority", podInfos: podInfos, preEnqueueCheck: func(pod *v1.Pod) bool { return *pod.Spec.Priority%2 == 0 && *pod.Spec.Priority < 0 }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { q := NewTestQueue(context.Background(), newDefaultQueueSort()) for _, podInfo := range tt.podInfos { q.AddUnschedulableIfNotPresent(podInfo, q.schedulingCycle) } q.MoveAllToActiveOrBackoffQueue(TestEvent, tt.preEnqueueCheck) var got []string for q.podBackoffQ.Len() != 0 { obj, err := q.podBackoffQ.Pop() if err != nil { t.Fatalf("Fail to pop pod from backoffQ: %v", err) } queuedPodInfo, ok := obj.(*framework.QueuedPodInfo) if !ok { t.Fatalf("Fail to covert popped obj (type %T) to *framework.QueuedPodInfo", obj) } got = append(got, queuedPodInfo.Pod.Name) } if diff := cmp.Diff(tt.want, got); diff != "" { t.Errorf("Unexpected diff (-want, +got):\n%s", diff) } }) } }
explode_data.jsonl/68208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 988 }
[ 2830, 3393, 9860, 2403, 1249, 5728, 2195, 3707, 1847, 7554, 79561, 1702, 4584, 49820, 1155, 353, 8840, 836, 8, 341, 2405, 7509, 38059, 29838, 3794, 10003, 361, 3260, 23527, 1731, 198, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeExpandVolume(t *testing.T) { d := NewFakeDriver() req := csi.NodeExpandVolumeRequest{} resp, err := d.NodeExpandVolume(context.Background(), &req) assert.Nil(t, resp) if !reflect.DeepEqual(err, status.Error(codes.Unimplemented, "")) { t.Errorf("Unexpected error: %v", err) } }
explode_data.jsonl/36858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 1955, 38946, 18902, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 1532, 52317, 11349, 741, 24395, 1669, 272, 6321, 21714, 38946, 18902, 1900, 16094, 34653, 11, 1848, 1669, 294, 21714, 38946, 18902, 5378, 19047, 1507, 609, 2958, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAssignmentsSecretUpdateAndDeletion(t *testing.T) { t.Parallel() gd, err := startDispatcher(DefaultConfig()) assert.NoError(t, err) defer gd.Close() expectedSessionID, nodeID := getSessionAndNodeID(t, gd.Clients[0]) // create the relevant secrets and tasks secrets, tasks := makeTasksAndSecrets(t, nodeID) err = gd.Store.Update(func(tx store.Tx) error { for _, secret := range secrets[:len(secrets)-1] { assert.NoError(t, store.CreateSecret(tx, secret)) } for _, task := range tasks { assert.NoError(t, store.CreateTask(tx, task)) } return nil }) assert.NoError(t, err) stream, err := gd.Clients[0].Assignments(context.Background(), &api.AssignmentsRequest{SessionID: expectedSessionID}) assert.NoError(t, err) defer stream.CloseSend() time.Sleep(100 * time.Millisecond) // check the initial task and secret stream resp, err := stream.Recv() assert.NoError(t, err) // FIXME(aaronl): This is hard to maintain. assert.Equal(t, 16, len(resp.Changes)) taskChanges, secretChanges := collectTasksAndSecrets(resp.Changes) assert.Len(t, taskChanges, 10) // 10 types of task states >= assigned, 2 types < assigned for _, task := range tasks[2:] { assert.NotNil(t, taskChanges[idAndAction{id: task.ID, action: api.AssignmentChange_AssignmentActionUpdate}]) } assert.Len(t, secretChanges, 6) // 6 types of task states between assigned and running inclusive for _, secret := range secrets[2:8] { assert.NotNil(t, secretChanges[idAndAction{id: secret.ID, action: api.AssignmentChange_AssignmentActionUpdate}]) } // updating secrets, used by tasks or not, do not cause any changes err = gd.Store.Update(func(tx store.Tx) error { for _, secret := range secrets[:len(secrets)-2] { secret.Spec.Data = []byte("new secret data") assert.NoError(t, store.UpdateSecret(tx, secret)) } return nil }) assert.NoError(t, err) recvChan := make(chan struct{}) go func() { _, _ = stream.Recv() recvChan <- struct{}{} }() select { case <-recvChan: assert.Fail(t, "secret update should not trigger dispatcher update") case <-time.After(250 * time.Millisecond): } // deleting secrets, used by tasks or not, do not cause any changes err = gd.Store.Update(func(tx store.Tx) error { for _, secret := range secrets[:len(secrets)-2] { assert.NoError(t, store.DeleteSecret(tx, secret.ID)) } return nil }) assert.NoError(t, err) select { case <-recvChan: assert.Fail(t, "secret delete should not trigger dispatcher update") case <-time.After(250 * time.Millisecond): } }
explode_data.jsonl/13851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 910 }
[ 2830, 3393, 28933, 1368, 19773, 4289, 3036, 1912, 52625, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3174, 67, 11, 1848, 1669, 1191, 21839, 87874, 2648, 2398, 6948, 35699, 1155, 11, 1848, 340, 16867, 32630, 10421, 2822, 42400, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInitializeHeadTimestamp(t *testing.T) { t.Run("clean", func(t *testing.T) { dir, err := ioutil.TempDir("", "test_head_init") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() db, err := Open(dir, nil, nil, nil) testutil.Ok(t, err) defer db.Close() // Should be set to init values if no WAL or blocks exist so far. testutil.Equals(t, int64(math.MaxInt64), db.head.MinTime()) testutil.Equals(t, int64(math.MinInt64), db.head.MaxTime()) // First added sample initializes the writable range. app := db.Appender() _, err = app.Add(labels.FromStrings("a", "b"), 1000, 1) testutil.Ok(t, err) testutil.Equals(t, int64(1000), db.head.MinTime()) testutil.Equals(t, int64(1000), db.head.MaxTime()) }) t.Run("wal-only", func(t *testing.T) { dir, err := ioutil.TempDir("", "test_head_init") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() testutil.Ok(t, os.MkdirAll(path.Join(dir, "wal"), 0777)) w, err := wal.New(nil, nil, path.Join(dir, "wal"), false) testutil.Ok(t, err) var enc RecordEncoder err = w.Log( enc.Series([]RefSeries{ {Ref: 123, Labels: labels.FromStrings("a", "1")}, {Ref: 124, Labels: labels.FromStrings("a", "2")}, }, nil), enc.Samples([]RefSample{ {Ref: 123, T: 5000, V: 1}, {Ref: 124, T: 15000, V: 1}, }, nil), ) testutil.Ok(t, err) testutil.Ok(t, w.Close()) db, err := Open(dir, nil, nil, nil) testutil.Ok(t, err) defer db.Close() testutil.Equals(t, int64(5000), db.head.MinTime()) testutil.Equals(t, int64(15000), db.head.MaxTime()) }) t.Run("existing-block", func(t *testing.T) { dir, err := ioutil.TempDir("", "test_head_init") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() createBlock(t, dir, genSeries(1, 1, 1000, 2000)) db, err := Open(dir, nil, nil, nil) testutil.Ok(t, err) defer db.Close() testutil.Equals(t, int64(2000), db.head.MinTime()) testutil.Equals(t, int64(2000), db.head.MaxTime()) }) t.Run("existing-block-and-wal", func(t *testing.T) { dir, err := ioutil.TempDir("", "test_head_init") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() createBlock(t, dir, genSeries(1, 1, 1000, 6000)) testutil.Ok(t, os.MkdirAll(path.Join(dir, "wal"), 0777)) w, err := wal.New(nil, nil, path.Join(dir, "wal"), false) testutil.Ok(t, err) var enc RecordEncoder err = w.Log( enc.Series([]RefSeries{ {Ref: 123, Labels: labels.FromStrings("a", "1")}, {Ref: 124, Labels: labels.FromStrings("a", "2")}, }, nil), enc.Samples([]RefSample{ {Ref: 123, T: 5000, V: 1}, {Ref: 124, T: 15000, V: 1}, }, nil), ) testutil.Ok(t, err) testutil.Ok(t, w.Close()) r := prometheus.NewRegistry() db, err := Open(dir, nil, r, nil) testutil.Ok(t, err) defer db.Close() testutil.Equals(t, int64(6000), db.head.MinTime()) testutil.Equals(t, int64(15000), db.head.MaxTime()) // Check that old series has been GCed. testutil.Equals(t, 1.0, prom_testutil.ToFloat64(db.head.metrics.series)) }) }
explode_data.jsonl/64384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1422 }
[ 2830, 3393, 9928, 12346, 20812, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 18377, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1944, 13138, 6137, 1138, 197, 18185, 1314, 54282...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGet_archiveSubdirWildMultiMatch(t *testing.T) { dst := tempDir(t) u := testModule("archive-rooted-multi/archive.tar.gz") u += "//*" if err := Get(dst, u); err == nil { t.Fatal("should error") } else if !strings.Contains(err.Error(), "multiple") { t.Fatalf("err: %s", err) } }
explode_data.jsonl/817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 1949, 42873, 3136, 3741, 40603, 20358, 8331, 1155, 353, 8840, 836, 8, 341, 52051, 1669, 2730, 6184, 1155, 340, 10676, 1669, 1273, 3332, 445, 16019, 39214, 291, 95669, 71627, 28048, 20963, 1138, 10676, 1421, 330, 21417, 698, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewClient(t *testing.T) { testCases := []struct { config Config envToken string expect string }{ { // specify directly config: Config{Token: "abcdefg"}, envToken: "", expect: "", }, { // specify via env but not to be set env (part 1) config: Config{Token: "GITHUB_TOKEN"}, envToken: "", expect: "github token is missing", }, { // specify via env (part 1) config: Config{Token: "GITHUB_TOKEN"}, envToken: "abcdefg", expect: "", }, { // specify via env but not to be set env (part 2) config: Config{Token: "$GITHUB_TOKEN"}, envToken: "", expect: "github token is missing", }, { // specify via env but not to be set env (part 3) config: Config{Token: "$TFNOTIFY_GITHUB_TOKEN"}, envToken: "", expect: "github token is missing", }, { // specify via env (part 2) config: Config{Token: "$GITHUB_TOKEN"}, envToken: "abcdefg", expect: "", }, { // specify via env (part 3) config: Config{Token: "$TFNOTIFY_GITHUB_TOKEN"}, envToken: "abcdefg", expect: "", }, { // no specification (part 1) config: Config{}, envToken: "", expect: "github token is missing", }, { // no specification (part 2) config: Config{}, envToken: "abcdefg", expect: "github token is missing", }, } for _, testCase := range testCases { if strings.HasPrefix(testCase.config.Token, "$") { key := strings.TrimPrefix(testCase.config.Token, "$") os.Setenv(key, testCase.envToken) } _, err := NewClient(testCase.config) if err == nil { continue } if err.Error() != testCase.expect { t.Errorf("got %q but want %q", err.Error(), testCase.expect) } } }
explode_data.jsonl/37415
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 779 }
[ 2830, 3393, 3564, 2959, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 25873, 256, 5532, 198, 197, 57538, 3323, 914, 198, 197, 24952, 256, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 322, 13837, 5961, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestReversiAnz30(t *testing.T) { r := NewReversiAnz() if r.GetOwnEdgeSideOneCnt() != 0 { t.Errorf("NG") } }
explode_data.jsonl/23053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 58 }
[ 2830, 3393, 693, 3004, 72, 2082, 89, 18, 15, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 693, 3004, 72, 2082, 89, 741, 743, 435, 2234, 14182, 11656, 16384, 3966, 33747, 368, 961, 220, 15, 341, 197, 3244, 13080, 445, 6140, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSchemaWithNoPKs(t *testing.T) { colColl := NewColCollection(nonPkCols...) _, _ = SchemaFromCols(colColl) assert.NotPanics(t, func() { UnkeyedSchemaFromCols(colColl) }) }
explode_data.jsonl/5967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 8632, 2354, 2753, 22242, 82, 1155, 353, 8840, 836, 8, 341, 46640, 15265, 1669, 1532, 6127, 6482, 29191, 58415, 37567, 31218, 197, 6878, 716, 284, 12539, 3830, 37567, 19611, 15265, 692, 6948, 15000, 35693, 1211, 1155, 11, 2915,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetProductList(t *testing.T) { t.Log("Test catalogrepository products list") catalogRepo := NewCatalogRepository(db) prodList, err := catalogRepo.GetProductList(context.TODO(), nil, []string{"604488100f719d9c76a28fe3"}) assert.Nil(t, err) assert.Equal(t, len(prodList), 2) assert.Equal(t, prodList[0].Name, "Cola") prodList, err = catalogRepo.GetProductList(context.TODO(), []string{"6043d76e94df8de741c2c0d5"}, nil) assert.Nil(t, err) assert.Equal(t, len(prodList), 4) assert.Equal(t, prodList[0].Name, "Cola") prodList, err = catalogRepo.GetProductList(context.TODO(), []string{"6043d76e94df8de741c2c0d5"}, []string{"604488100f719d9c76a28fe7"}) assert.Nil(t, err) assert.Equal(t, len(prodList), 3) assert.Equal(t, prodList[0].Name, "Chicken Barbecue") }
explode_data.jsonl/58787
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 1949, 4816, 852, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 2271, 16403, 23319, 3871, 1140, 5130, 1444, 7750, 25243, 1669, 1532, 41606, 4624, 9791, 692, 197, 19748, 852, 11, 1848, 1669, 16403, 25243, 2234, 4816, 852, 537...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewPeerEndorserMutualTLSNoClientCerts(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() config := mockfab.DefaultMockConfig(mockCtrl) url := "grpcs://0.0.0.0:1234" _, err := newPeerEndorser(getPeerEndorserRequest(url, mockfab.GoodCert, "", config, kap, false, false)) if err != nil { t.Fatalf("Peer conn should be constructed: %s", err) } }
explode_data.jsonl/45086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 3564, 30888, 3727, 269, 799, 51440, 928, 45439, 2753, 2959, 34, 15546, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 2822, 25873, 1669, 7860, 36855, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRandomThresholdViolation(t *testing.T) { start := time.Now() end := start.Add(time.Hour * time.Duration(2)) measures := randomThresholdViolation(start, end, -80, -50, FabricConfig.ViolationRate) if len(measures) > 1 { // assert violation period violation := measures[1] assert.True(t, violation.InViolation, "second period should be in violation") assert.Less(t, float64(-50), violation.MaxValue, "violation value should be greater than -50") } // assert first period assert.False(t, measures[0].InViolation, "first period should not be in violation") assert.GreaterOrEqual(t, float64(-50), measures[0].MaxValue, "normal value should be less than -50") }
explode_data.jsonl/75438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 13999, 37841, 74971, 1155, 353, 8840, 836, 8, 341, 21375, 1669, 882, 13244, 741, 6246, 1669, 1191, 1904, 9730, 73550, 353, 882, 33795, 7, 17, 1171, 49294, 23471, 1669, 4194, 37841, 74971, 10639, 11, 835, 11, 481, 23, 15, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScopeLookupParent(t *testing.T) { imports := make(testImporter) conf := Config{Importer: imports} var info Info makePkg := func(path, src string) { f, err := parseSrc(path, src) if err != nil { t.Fatal(err) } imports[path], err = conf.Check(path, []*syntax.File{f}, &info) if err != nil { t.Fatal(err) } } makePkg("lib", "package lib; var X int") // Each /*name=kind:line*/ comment makes the test look up the // name at that point and checks that it resolves to a decl of // the specified kind and line number. "undef" means undefined. mainSrc := ` /*lib=pkgname:5*/ /*X=var:1*/ /*Pi=const:8*/ /*T=typename:9*/ /*Y=var:10*/ /*F=func:12*/ package main import "lib" import . "lib" const Pi = 3.1415 type T struct{} var Y, _ = lib.X, X func F(){ const pi, e = 3.1415, /*pi=undef*/ 2.71828 /*pi=const:13*/ /*e=const:13*/ type /*t=undef*/ t /*t=typename:14*/ *t print(Y) /*Y=var:10*/ x, Y := Y, /*x=undef*/ /*Y=var:10*/ Pi /*x=var:16*/ /*Y=var:16*/ ; _ = x; _ = Y var F = /*F=func:12*/ F /*F=var:17*/ ; _ = F var a []int for i, x := range a /*i=undef*/ /*x=var:16*/ { _ = i; _ = x } var i interface{} switch y := i.(type) { /*y=undef*/ case /*y=undef*/ int /*y=var:23*/ : case float32, /*y=undef*/ float64 /*y=var:23*/ : default /*y=var:23*/: println(y) } /*y=undef*/ switch int := i.(type) { case /*int=typename:0*/ int /*int=var:31*/ : println(int) default /*int=var:31*/ : } } /*main=undef*/ ` info.Uses = make(map[*syntax.Name]Object) makePkg("main", mainSrc) mainScope := imports["main"].Scope() rx := regexp.MustCompile(`^/\*(\w*)=([\w:]*)\*/$`) base := syntax.NewFileBase("main") syntax.CommentsDo(strings.NewReader(mainSrc), func(line, col uint, text string) { pos := syntax.MakePos(base, line, col) // Syntax errors are not comments. if text[0] != '/' { t.Errorf("%s: %s", pos, text) return } // Parse the assertion in the comment. m := rx.FindStringSubmatch(text) if m == nil { t.Errorf("%s: bad comment: %s", pos, text) return } name, want := m[1], m[2] // Look up the name in the innermost enclosing scope. inner := mainScope.Innermost(pos) if inner == nil { t.Errorf("%s: at %s: can't find innermost scope", pos, text) return } got := "undef" if _, obj := inner.LookupParent(name, pos); obj != nil { kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types2.")) got = fmt.Sprintf("%s:%d", kind, obj.Pos().Line()) } if got != want { t.Errorf("%s: at %s: %s resolved to %s, want %s", pos, text, name, got, want) } }) // Check that for each referring identifier, // a lookup of its name on the innermost // enclosing scope returns the correct object. for id, wantObj := range info.Uses { inner := mainScope.Innermost(id.Pos()) if inner == nil { t.Errorf("%s: can't find innermost scope enclosing %q", id.Pos(), id.Value) continue } // Exclude selectors and qualified identifiers---lexical // refs only. (Ideally, we'd see if the AST parent is a // SelectorExpr, but that requires PathEnclosingInterval // from golang.org/x/tools/go/ast/astutil.) if id.Value == "X" { continue } _, gotObj := inner.LookupParent(id.Value, id.Pos()) if gotObj != wantObj { t.Errorf("%s: got %v, want %v", id.Pos(), gotObj, wantObj) continue } } }
explode_data.jsonl/29389
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1438 }
[ 2830, 3393, 10803, 34247, 8387, 1155, 353, 8840, 836, 8, 341, 21918, 82, 1669, 1281, 8623, 77289, 340, 67850, 1669, 5532, 90, 77289, 25, 15202, 532, 2405, 3546, 13074, 198, 77438, 47, 7351, 1669, 2915, 5581, 11, 2286, 914, 8, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccKeycloakGroupRoles_simultaneousRoleAndAssignmentUpdate(t *testing.T) { t.Parallel() groupName := acctest.RandomWithPrefix("tf-acc") resource.Test(t, resource.TestCase{ ProviderFactories: testAccProviderFactories, PreCheck: func() { testAccPreCheck(t) }, Steps: []resource.TestStep{ { Config: testKeycloakGroupRoles_simultaneousRoleAndAssignmentUpdate(groupName, 1), Check: testAccCheckKeycloakGroupHasRoles("keycloak_group_roles.group_roles", true), }, { Config: testKeycloakGroupRoles_simultaneousRoleAndAssignmentUpdate(groupName, 2), Check: testAccCheckKeycloakGroupHasRoles("keycloak_group_roles.group_roles", true), }, }, }) }
explode_data.jsonl/47895
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 14603, 1592, 88751, 2808, 25116, 18314, 494, 17666, 9030, 3036, 41613, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 44260, 675, 1669, 1613, 67880, 26709, 2354, 14335, 445, 8935, 12, 4475, 5130, 50346, 8787, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsNullDataScript(t *testing.T) { var hash160 = dcrutil.Hash160([]byte("test")) var overMaxDataCarrierSize = make([]byte, txscript.MaxDataCarrierSize+1) var underMaxDataCarrierSize = make([]byte, txscript.MaxDataCarrierSize/2) rand.Read(overMaxDataCarrierSize) rand.Read(underMaxDataCarrierSize) tests := []struct { name string scriptSource *txscript.ScriptBuilder version uint16 expected bool }{ { name: "OP_RETURN script", scriptSource: txscript.NewScriptBuilder(). AddOp(txscript.OP_RETURN), version: 0, expected: true, }, { name: "OP_RETURN script with unsupported version", scriptSource: txscript.NewScriptBuilder(). AddOp(txscript.OP_RETURN), version: 100, expected: false, }, { name: "OP_RETURN script with data under MaxDataCarrierSize", scriptSource: txscript.NewScriptBuilder(). AddOp(txscript.OP_RETURN).AddData(underMaxDataCarrierSize), version: 0, expected: true, }, { name: "OP_RETURN script with data over MaxDataCarrierSize", scriptSource: txscript.NewScriptBuilder(). AddOp(txscript.OP_RETURN).AddData(overMaxDataCarrierSize), version: 0, expected: false, }, { name: "revocation-tagged p2pkh script", scriptSource: txscript.NewScriptBuilder(). AddOp(txscript.OP_SSRTX).AddOp(txscript.OP_DUP). AddOp(txscript.OP_HASH160).AddData(hash160). AddOp(txscript.OP_EQUALVERIFY).AddOp(txscript.OP_CHECKSIG), version: 0, expected: false, }, } for _, test := range tests { script, err := test.scriptSource.Script() if err != nil { t.Fatalf("%s: unexpected script generation error: %s", test.name, err) } result := stake.IsNullDataScript(test.version, script) if result != test.expected { t.Fatalf("%s: expected %v, got %v", test.name, test.expected, result) } } }
explode_data.jsonl/70518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 757 }
[ 2830, 3393, 98593, 1043, 5910, 1155, 353, 8840, 836, 8, 341, 2405, 5175, 16, 21, 15, 284, 294, 5082, 1314, 15103, 16, 21, 15, 10556, 3782, 445, 1944, 5455, 2405, 916, 5974, 1043, 96294, 1695, 284, 1281, 10556, 3782, 11, 9854, 2282, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUnMarshalProfileRow(t *testing.T) { row := new(tablestore.Row) row.Columns = append(row.Columns, &tablestore.AttributeColumn{ ColumnName: "profile_id", Value: "dfdfdkfmdkfkdfkdm", }, &tablestore.AttributeColumn{ ColumnName: "size", Value: int64(64), }) unMarshalProfileRow(row) }
explode_data.jsonl/71808
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 1806, 55438, 8526, 3102, 1155, 353, 8840, 836, 8, 341, 33967, 1669, 501, 15761, 4314, 14657, 340, 33967, 15165, 284, 8737, 7835, 15165, 11, 609, 2005, 4314, 33775, 2933, 515, 197, 197, 26162, 25, 330, 5365, 842, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { t.Parallel() funcOK := Option{ name: "ok", f: func(*Config) error { return nil }, } funcNG := Option{ name: "ok", f: func(*Config) error { return errForTest }, } tests := []struct { name string options []Option expect error }{ {"success(New)", []Option{funcOK}, nil}, {"success(Option)", []Option{funcNG}, errForTest}, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() expect := tt.expect _, actual := New(io.Discard, tt.options...) FailIfNotErrorIs(t, expect, actual) }) } }
explode_data.jsonl/71316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 29244, 3925, 1669, 6959, 515, 197, 11609, 25, 330, 562, 756, 197, 1166, 25, 2915, 4071, 2648, 8, 1465, 341, 298, 853, 2092, 198, 197, 197, 1583, 197, 532, 2924...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeStatusWithCloudProviderNodeIP(t *testing.T) { testKubelet := newTestKubelet(t, false /* controllerAttachDetachEnabled */) defer testKubelet.Cleanup() kubelet := testKubelet.kubelet kubelet.kubeClient = nil // ensure only the heartbeat client is used kubelet.hostname = testKubeletHostname cases := []struct { name string nodeIP net.IP nodeAddresses []v1.NodeAddress expectedAddresses []v1.NodeAddress shouldError bool }{ { name: "A single InternalIP", nodeIP: net.ParseIP("10.1.1.1"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, shouldError: false, }, { name: "NodeIP is external", nodeIP: net.ParseIP("55.55.55.55"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, shouldError: false, }, { // Accommodating #45201 and #49202 name: "InternalIP and ExternalIP are the same", nodeIP: net.ParseIP("55.55.55.55"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "55.55.55.55"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "55.55.55.55"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, shouldError: false, }, { name: "An Internal/ExternalIP, an Internal/ExternalDNS", nodeIP: net.ParseIP("10.1.1.1"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeInternalDNS, Address: "ip-10-1-1-1.us-west-2.compute.internal"}, {Type: v1.NodeExternalDNS, Address: "ec2-55-55-55-55.us-west-2.compute.amazonaws.com"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeInternalDNS, Address: "ip-10-1-1-1.us-west-2.compute.internal"}, {Type: v1.NodeExternalDNS, Address: "ec2-55-55-55-55.us-west-2.compute.amazonaws.com"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, shouldError: false, }, { name: "An Internal with multiple internal IPs", nodeIP: net.ParseIP("10.1.1.1"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeInternalIP, Address: "10.2.2.2"}, {Type: v1.NodeInternalIP, Address: "10.3.3.3"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, shouldError: false, }, { name: "An InternalIP that isn't valid: should error", nodeIP: net.ParseIP("10.2.2.2"), nodeAddresses: []v1.NodeAddress{ {Type: v1.NodeInternalIP, Address: "10.1.1.1"}, {Type: v1.NodeExternalIP, Address: "55.55.55.55"}, {Type: v1.NodeHostName, Address: testKubeletHostname}, }, expectedAddresses: nil, shouldError: true, }, } for _, testCase := range cases { // testCase setup existingNode := v1.Node{ ObjectMeta: metav1.ObjectMeta{Name: testKubeletHostname, Annotations: make(map[string]string)}, Spec: v1.NodeSpec{}, } kubelet.nodeIP = testCase.nodeIP fakeCloud := &fakecloud.FakeCloud{ Addresses: testCase.nodeAddresses, Err: nil, } kubelet.cloud = fakeCloud kubelet.cloudproviderRequestParallelism = make(chan int, 1) kubelet.cloudproviderRequestSync = make(chan int) kubelet.cloudproviderRequestTimeout = 10 * time.Second kubelet.nodeIPValidator = func(nodeIP net.IP) error { return nil } // execute method err := kubelet.setNodeAddress(&existingNode) if err != nil && !testCase.shouldError { t.Errorf("Unexpected error for test %s: %q", testCase.name, err) continue } else if err != nil && testCase.shouldError { // expected an error continue } // Sort both sets for consistent equality sortNodeAddresses(testCase.expectedAddresses) sortNodeAddresses(existingNode.Status.Addresses) assert.True( t, apiequality.Semantic.DeepEqual( testCase.expectedAddresses, existingNode.Status.Addresses, ), fmt.Sprintf("Test %s failed %%s", testCase.name), diff.ObjectDiff(testCase.expectedAddresses, existingNode.Status.Addresses), ) } }
explode_data.jsonl/82100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2283 }
[ 2830, 3393, 1955, 2522, 2354, 16055, 5179, 1955, 3298, 1155, 353, 8840, 836, 8, 341, 18185, 42, 3760, 1149, 1669, 501, 2271, 42, 3760, 1149, 1155, 11, 895, 1391, 6461, 30485, 89306, 5462, 639, 340, 16867, 1273, 42, 3760, 1149, 727, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListActiveWorkers(t *testing.T) { svc := &mockDynamoDB{tableExist: true, item: map[string]*dynamodb.AttributeValue{}} kclConfig := cfg.NewKinesisClientLibConfig("appName", "test", "us-west-2", "abc"). WithLeaseStealing(true) checkpoint := NewDynamoCheckpoint(kclConfig).WithDynamoDB(svc) err := checkpoint.Init() if err != nil { t.Errorf("Checkpoint initialization failed: %+v", err) } shardStatus := map[string]*par.ShardStatus{ "0000": {ID: "0000", AssignedTo: "worker_1", Checkpoint: "", Mux: &sync.RWMutex{}}, "0001": {ID: "0001", AssignedTo: "worker_2", Checkpoint: "", Mux: &sync.RWMutex{}}, "0002": {ID: "0002", AssignedTo: "worker_4", Checkpoint: "", Mux: &sync.RWMutex{}}, "0003": {ID: "0003", AssignedTo: "worker_0", Checkpoint: "", Mux: &sync.RWMutex{}}, "0004": {ID: "0004", AssignedTo: "worker_1", Checkpoint: "", Mux: &sync.RWMutex{}}, "0005": {ID: "0005", AssignedTo: "worker_3", Checkpoint: "", Mux: &sync.RWMutex{}}, "0006": {ID: "0006", AssignedTo: "worker_3", Checkpoint: "", Mux: &sync.RWMutex{}}, "0007": {ID: "0007", AssignedTo: "worker_0", Checkpoint: "", Mux: &sync.RWMutex{}}, "0008": {ID: "0008", AssignedTo: "worker_4", Checkpoint: "", Mux: &sync.RWMutex{}}, "0009": {ID: "0009", AssignedTo: "worker_2", Checkpoint: "", Mux: &sync.RWMutex{}}, "0010": {ID: "0010", AssignedTo: "worker_0", Checkpoint: ShardEnd, Mux: &sync.RWMutex{}}, } workers, err := checkpoint.ListActiveWorkers(shardStatus) if err != nil { t.Error(err) } for workerID, shards := range workers { assert.Equal(t, 2, len(shards)) for _, shard := range shards { assert.Equal(t, workerID, shard.AssignedTo) } } }
explode_data.jsonl/9845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 725 }
[ 2830, 3393, 852, 5728, 74486, 1155, 353, 8840, 836, 8, 341, 1903, 7362, 1669, 609, 16712, 35, 85608, 3506, 90, 2005, 25613, 25, 830, 11, 1509, 25, 2415, 14032, 8465, 67, 83348, 33775, 1130, 6257, 532, 16463, 564, 2648, 1669, 13286, 71...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExecuteQuery(t *testing.T) { for _, testEnv := range testEnvs { // Query is only supported and tested on the CouchDB testEnv if testEnv.getName() == couchDBtestEnvName { t.Logf("Running test for TestEnv = %s", testEnv.getName()) testLedgerID := "testexecutequery" testEnv.init(t, testLedgerID, nil) testExecuteQuery(t, testEnv) testEnv.cleanup() } } }
explode_data.jsonl/63611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 17174, 2859, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 14359, 1669, 2088, 1273, 1702, 11562, 341, 197, 197, 322, 11361, 374, 1172, 7248, 323, 12510, 389, 279, 61128, 3506, 1273, 14359, 198, 197, 743, 1273, 14359, 8911,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCollectorAutoscalersDisabledByExplicitReplicaSize(t *testing.T) { // prepare tests := []int32{int32(0), int32(1)} for _, test := range tests { jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) jaeger.Spec.Collector.Replicas = &test c := NewCollector(jaeger) // test a := c.Autoscalers() // verify assert.Len(t, a, 0) } }
explode_data.jsonl/59535
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 53694, 19602, 436, 5416, 388, 25907, 1359, 98923, 18327, 15317, 1695, 1155, 353, 8840, 836, 8, 341, 197, 322, 10549, 198, 78216, 1669, 3056, 396, 18, 17, 90, 396, 18, 17, 7, 15, 701, 526, 18, 17, 7, 16, 73822, 2023, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNegotiationNeeded(t *testing.T) { lim := test.TimeOut(time.Second * 30) defer lim.Stop() report := test.CheckRoutines(t) defer report() pc, err := NewPeerConnection(Configuration{}) if err != nil { t.Error(err.Error()) } var wg sync.WaitGroup wg.Add(1) pc.OnNegotiationNeeded(wg.Done) _, err = pc.CreateDataChannel("initial_data_channel", nil) assert.NoError(t, err) wg.Wait() assert.NoError(t, pc.Close()) }
explode_data.jsonl/8651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 47800, 354, 7101, 56706, 1155, 353, 8840, 836, 8, 341, 197, 4659, 1669, 1273, 16299, 2662, 9730, 32435, 353, 220, 18, 15, 340, 16867, 4568, 30213, 2822, 69931, 1669, 1273, 10600, 49, 28628, 1155, 340, 16867, 1895, 2822, 8201...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseWithoutKeyColumn(t *testing.T) { domain := newSplitQuery("test") table := &Table{hashBy: "col6", sortBy: "NA"} queries, err := parseThriftDomain(domain, table.hashBy, table.sortBy) assert.Error(t, err) assert.Nil(t, queries) }
explode_data.jsonl/74844
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 14463, 26040, 1592, 2933, 1155, 353, 8840, 836, 8, 341, 2698, 3121, 1669, 501, 20193, 2859, 445, 1944, 1138, 26481, 1669, 609, 2556, 90, 8296, 1359, 25, 330, 2074, 21, 497, 66913, 25, 330, 7326, 16707, 197, 42835, 11, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientCloseTelemetryConnection(t *testing.T) { // create server telemetrybuffer and start server tb = NewTelemetryBuffer(hostAgentUrl) err := tb.StartServer() if err == nil { go tb.BufferAndPushData(0) } if !SockExists() { t.Errorf("telemetry sock doesn't exist") } // create client telemetrybuffer and connect to server tb1 := NewTelemetryBuffer(hostAgentUrl) if err := tb1.Connect(); err != nil { t.Errorf("connection to telemetry server failed %v", err) } // Close client connection tb1.Close() time.Sleep(300 * time.Millisecond) if len(tb.connections) != 0 { t.Errorf("All connections not closed as expected") } // Exit server thread and close server connection tb.Cancel() }
explode_data.jsonl/49199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 2959, 7925, 6639, 35958, 4526, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 3538, 61037, 7573, 323, 1191, 3538, 198, 62842, 284, 1532, 6639, 35958, 4095, 19973, 16810, 2864, 340, 9859, 1669, 16363, 12101, 5475, 741, 743, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSuObjectPut(t *testing.T) { assert := assert.T(t).This ob := SuObject{} ob.Set(One, One) // put assert(ob.NamedSize()).Is(1) assert(ob.ListSize()).Is(0) ob.Set(Zero, Zero) // add + migrate assert(ob.NamedSize()).Is(0) assert(ob.ListSize()).Is(2) ob.Set(Zero, SuInt(10)) // set ob.Set(One, SuInt(11)) // set assert(ob.Get(nil, Zero)).Is(SuInt(10)) assert(ob.Get(nil, One)).Is(SuInt(11)) }
explode_data.jsonl/7115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 36459, 1190, 19103, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 836, 1155, 568, 1986, 198, 63353, 1669, 16931, 1190, 16094, 63353, 4202, 7, 3966, 11, 3776, 8, 442, 2182, 198, 6948, 49595, 57872, 1695, 6011, 3872, 7, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckpointSortBySeqno(t *testing.T) { assert := assert.New(t) fmt.Println("============== Test case start: TestCheckpointSortBySeqno =================") defer fmt.Println("============== Test case end: TestCheckpointSortBySeqno =================") var unsortedList CheckpointRecordsList validFailoverLog := uint64(1234) validFailoverLog2 := uint64(12345) failoverLog := &mcc.FailoverLog{[2]uint64{validFailoverLog, 0}, [2]uint64{validFailoverLog2, 0}} //var invalidFailoverLog uint64 = "2345" earlySeqno := uint64(100) laterSeqno := uint64(200) latestSeqno := uint64(300) record := &CheckpointRecord{ Failover_uuid: validFailoverLog, Seqno: earlySeqno, Target_vb_opaque: nil, Target_Seqno: 0, } record2 := &CheckpointRecord{ Failover_uuid: validFailoverLog, Seqno: laterSeqno, } record3 := &CheckpointRecord{ Failover_uuid: validFailoverLog, Seqno: latestSeqno, } unsortedList = append(unsortedList, record) unsortedList = append(unsortedList, record2) unsortedList = append(unsortedList, record3) toSortList := unsortedList.PrepareSortStructure(failoverLog, nil) sort.Sort(toSortList) outputList := toSortList.ToRegularList() for i := 0; i < len(outputList)-1; i++ { assert.True(outputList[i].Seqno > outputList[i+1].Seqno) } }
explode_data.jsonl/25101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 520 }
[ 2830, 3393, 92688, 10231, 1359, 20183, 2152, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 11009, 12419, 445, 38603, 3393, 1142, 1191, 25, 3393, 92688, 10231, 1359, 20183, 2152, 24818, 1138, 16867, 8879, 12419, 445, 386...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIrrelevantNth(t *testing.T) { { opts := defaultOptions() words := []string{"--nth", "..", "-x"} parseOptions(opts, words) postProcessOptions(opts) if len(opts.Nth) != 0 { t.Errorf("nth should be empty: %v", opts.Nth) } } for _, words := range [][]string{{"--nth", "..,3", "+x"}, {"--nth", "3,1..", "+x"}, {"--nth", "..-1,1", "+x"}} { { opts := defaultOptions() parseOptions(opts, words) postProcessOptions(opts) if len(opts.Nth) != 0 { t.Errorf("nth should be empty: %v", opts.Nth) } } { opts := defaultOptions() words = append(words, "-x") parseOptions(opts, words) postProcessOptions(opts) if len(opts.Nth) != 2 { t.Errorf("nth should not be empty: %v", opts.Nth) } } } }
explode_data.jsonl/40865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 361 }
[ 2830, 3393, 48113, 97573, 45, 339, 1155, 353, 8840, 836, 8, 341, 197, 515, 197, 64734, 1669, 1638, 3798, 741, 197, 197, 5761, 1669, 3056, 917, 4913, 313, 51738, 497, 32213, 497, 6523, 87, 16707, 197, 75115, 3798, 30885, 11, 4244, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestServe(t *testing.T) { t.Run("with empty http request body", func(t *testing.T) { k8sAPI, err := k8s.NewFakeAPI() if err != nil { panic(err) } testServer := getConfiguredServer(mockHTTPServer, k8sAPI, nil, nil) in := bytes.NewReader(nil) request := httptest.NewRequest(http.MethodGet, "/", in) recorder := httptest.NewRecorder() testServer.serve(recorder, request) if recorder.Code != http.StatusOK { t.Errorf("HTTP response status mismatch. Expected: %d. Actual: %d", http.StatusOK, recorder.Code) } if reflect.DeepEqual(recorder.Body.Bytes(), []byte("")) { t.Errorf("Content mismatch. Expected HTTP response body to be empty %v", recorder.Body.Bytes()) } }) }
explode_data.jsonl/41101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 60421, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4197, 4287, 1758, 1681, 2487, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 16463, 23, 82, 7082, 11, 1848, 1669, 595, 23, 82, 7121, 52317, 7082, 741, 197, 743, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSyncPodDeletesDuplicate(t *testing.T) { dm, fakeDocker := newTestDockerManager() pod := makePod("bar", &api.PodSpec{ Containers: []api.Container{ {Name: "foo"}, }, }) fakeDocker.SetFakeRunningContainers([]*FakeContainer{ { ID: "1234", Name: "/k8s_foo_bar_new_12345678_1111", }, { ID: "9876", Name: "/k8s_POD." + strconv.FormatUint(generatePodInfraContainerHash(pod), 16) + "_bar_new_12345678_2222", }, { ID: "4567", Name: "/k8s_foo_bar_new_12345678_3333", }}) runSyncPod(t, dm, fakeDocker, pod, nil, false) verifyCalls(t, fakeDocker, []string{ // Kill the duplicated container. "stop", }) // Expect one of the duplicates to be killed. if len(fakeDocker.Stopped) != 1 || (fakeDocker.Stopped[0] != "1234" && fakeDocker.Stopped[0] != "4567") { t.Errorf("Wrong containers were stopped: %v", fakeDocker.Stopped) } }
explode_data.jsonl/31165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 12154, 23527, 61317, 53979, 1155, 353, 8840, 836, 8, 341, 2698, 76, 11, 12418, 35, 13659, 1669, 501, 2271, 35, 13659, 2043, 741, 3223, 347, 1669, 1281, 23527, 445, 2257, 497, 609, 2068, 88823, 8327, 515, 197, 197, 74632, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSliceDistinctTo(t *testing.T) { s1 := sliceTestStruct{ Name: "库陈胜", Age: 30, } s2 := sliceTestStruct{ Name: "酷达舒", Age: 29, } s3 := sliceTestStruct{ Name: "库陈胜", Age: 28, } list := []sliceTestStruct{s1, s2, s3} l := isc.SliceDistinctTo(list, func(s sliceTestStruct) string { return s.Name }) t.Logf("%s\n", isc.ToString(l)) b := isc.SliceContains(list, func(s sliceTestStruct) string { return s.Name }, "库陈胜") t.Logf("%v\n", b) }
explode_data.jsonl/23304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 33236, 72767, 1249, 1155, 353, 8840, 836, 8, 341, 1903, 16, 1669, 15983, 2271, 9422, 515, 197, 21297, 25, 330, 44956, 100348, 99813, 756, 197, 197, 16749, 25, 220, 220, 18, 15, 345, 197, 532, 1903, 17, 1669, 15983, 2271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestZoektIndexedRepos(t *testing.T) { repos := makeRepositoryRevisions( "foo/indexed-one@", "foo/indexed-two@", "foo/indexed-three@", "foo/unindexed-one", "foo/unindexed-two", "foo/multi-rev@a:b", ) zoektRepos := map[string]*zoekt.Repository{} for _, r := range []*zoekt.Repository{{ Name: "foo/indexed-one", Branches: []zoekt.RepositoryBranch{{Name: "HEAD", Version: "deadbeef"}}, }, { Name: "foo/indexed-two", Branches: []zoekt.RepositoryBranch{{Name: "HEAD", Version: "deadbeef"}}, }, { Name: "foo/indexed-three", Branches: []zoekt.RepositoryBranch{ {Name: "HEAD", Version: "deadbeef"}, {Name: "foobar", Version: "deadcow"}, }, }} { zoektRepos[r.Name] = r } makeIndexed := func(repos []*search.RepositoryRevisions) []*search.RepositoryRevisions { var indexed []*search.RepositoryRevisions for _, r := range repos { rev := &search.RepositoryRevisions{ Repo: r.Repo, Revs: r.Revs, } indexed = append(indexed, rev) } return indexed } cases := []struct { name string repos []*search.RepositoryRevisions indexed []*search.RepositoryRevisions unindexed []*search.RepositoryRevisions }{{ name: "all", repos: repos, indexed: makeIndexed(repos[:3]), unindexed: repos[3:], }, { name: "one unindexed", repos: repos[3:4], indexed: repos[:0], unindexed: repos[3:4], }, { name: "one indexed", repos: repos[:1], indexed: makeIndexed(repos[:1]), unindexed: repos[:0], }} for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { indexed, unindexed := zoektIndexedRepos(zoektRepos, tc.repos, nil) if diff := cmp.Diff(repoRevsSliceToMap(tc.indexed), indexed.repoRevs); diff != "" { t.Error("unexpected indexed:", diff) } if diff := cmp.Diff(tc.unindexed, unindexed); diff != "" { t.Error("unexpected unindexed:", diff) } }) } }
explode_data.jsonl/52663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 892 }
[ 2830, 3393, 57, 78, 17149, 69941, 693, 966, 1155, 353, 8840, 836, 8, 341, 17200, 966, 1669, 1281, 4624, 693, 40015, 1006, 197, 197, 1, 7975, 9022, 291, 18589, 31, 756, 197, 197, 1, 7975, 9022, 291, 37402, 31, 756, 197, 197, 1, 797...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCCPA(t *testing.T) { testCases := []struct { description string requestBody string enforceCCPA bool expectedSyncs []string }{ { description: "Feature Flag On & Opt-Out Yes", requestBody: `{"bidders":["appnexus"], "us_privacy":"1-Y-"}`, enforceCCPA: true, expectedSyncs: []string{}, }, { description: "Feature Flag Off & Opt-Out Yes", requestBody: `{"bidders":["appnexus"], "us_privacy":"1-Y-"}`, enforceCCPA: false, expectedSyncs: []string{"appnexus"}, }, { description: "Feature Flag On & Opt-Out No", requestBody: `{"bidders":["appnexus"], "us_privacy":"1-N-"}`, enforceCCPA: false, expectedSyncs: []string{"appnexus"}, }, { description: "Feature Flag On & Opt-Out Unknown", requestBody: `{"bidders":["appnexus"], "us_privacy":"1---"}`, enforceCCPA: false, expectedSyncs: []string{"appnexus"}, }, { description: "Feature Flag On & Opt-Out Invalid", requestBody: `{"bidders":["appnexus"], "us_privacy":"invalid"}`, enforceCCPA: false, expectedSyncs: []string{"appnexus"}, }, { description: "Feature Flag On & Opt-Out Not Provided", requestBody: `{"bidders":["appnexus"]}`, enforceCCPA: false, expectedSyncs: []string{"appnexus"}, }, } for _, test := range testCases { gdpr := config.GDPR{UsersyncIfAmbiguous: true} ccpa := config.CCPA{Enforce: test.enforceCCPA} rr := doConfigurablePost(test.requestBody, nil, true, syncersForTest(), gdpr, ccpa) assert.Equal(t, http.StatusOK, rr.Code, test.description+":httpResponseCode") assert.ElementsMatch(t, test.expectedSyncs, parseSyncs(t, rr.Body.Bytes()), test.description+":syncs") assert.Equal(t, "no_cookie", parseStatus(t, rr.Body.Bytes()), test.description+":status") } }
explode_data.jsonl/21687
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 734 }
[ 2830, 3393, 3706, 8041, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 42407, 256, 914, 198, 197, 23555, 5444, 256, 914, 198, 197, 81848, 8833, 3706, 8041, 256, 1807, 198, 197, 42400, 12154, 82, 3056, 917, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMultiMerge(t *testing.T) { i1 := newListPostings(1, 2, 3, 4, 5, 6, 1000, 1001) i2 := newListPostings(2, 4, 5, 6, 7, 8, 999, 1001) i3 := newListPostings(1, 2, 5, 6, 7, 8, 1001, 1200) res, err := ExpandPostings(Merge(i1, i2, i3)) testutil.Ok(t, err) testutil.Equals(t, []uint64{1, 2, 3, 4, 5, 6, 7, 8, 999, 1000, 1001, 1200}, res) }
explode_data.jsonl/13125
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 20358, 52096, 1155, 353, 8840, 836, 8, 341, 8230, 16, 1669, 67418, 4133, 819, 7, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 16, 15, 15, 15, 11, 220, 16, 15, 15, 16, 340, 8230, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ConsistentHash(t *testing.T) { virtualNodeList := []int{100, 150, 200} // 测试10台服务器 nodeNum := 10 // 测试数据量100W testCount := 1000000 for _, virtualNode := range virtualNodeList { consistentHash := &Consistent{} distributeMap := make(map[string]int64) for i := 1; i <= nodeNum; i++ { serverName := "172.17.0." + strconv.Itoa(i) consistentHash.Add(serverName, virtualNode) distributeMap[serverName] = 0 } // 测试100W个数据分布 for i := 0; i < testCount; i++ { testName := "testName" serverName := consistentHash.GetNode(testName + strconv.Itoa(i)) distributeMap[serverName] = distributeMap[serverName] + 1 } var keys []string var values []float64 for k, v := range distributeMap { keys = append(keys, k) values = append(values, float64(v)) } sort.Strings(keys) fmt.Printf("####测试%d个结点,一个结点有%d个虚拟结点,%d条测试数据\n", nodeNum, virtualNode, testCount) for _, k := range keys { fmt.Printf("服务器地址:%s 分布数据数:%d\n", k, distributeMap[k]) } fmt.Printf("标准差:%f\n\n", getStandardDeviation(values)) } }
explode_data.jsonl/20003
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 533 }
[ 2830, 3393, 920, 2382, 18128, 6370, 1155, 353, 8840, 836, 8, 341, 9558, 1955, 852, 1669, 3056, 396, 90, 16, 15, 15, 11, 220, 16, 20, 15, 11, 220, 17, 15, 15, 532, 197, 322, 98313, 66635, 16, 15, 53938, 89047, 198, 20831, 4651, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_20210217(t *testing.T) { type params struct { para1 [][]int para2 int para3 int ans [][]int } qs := []params{ { para1: [][]int{{1, 2}, {3, 4}}, para2: 1, para3: 4, ans: [][]int{{1, 2, 3, 4}}, }, { para1: [][]int{{1, 2}, {3, 4}}, para2: 2, para3: 4, ans: [][]int{{1, 2}, {3, 4}}, }, } utils.Segmentation("20210217") for _, q := range qs { ret, p, r, c := q.ans, q.para1, q.para2, q.para3 res := matrixReshape(p, r, c) fmt.Printf("【input】: %v %d %d \t【output】: %v\n", p, r, c, res) if !utils.DeepEqual(ret, res) { t.Errorf(`"%v" not equal to "%v"`, res, ret) } } }
explode_data.jsonl/11762
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 62, 17, 15, 17, 16, 15, 17, 16, 22, 1155, 353, 8840, 836, 8, 341, 13158, 3628, 2036, 341, 197, 197, 14794, 16, 52931, 396, 198, 197, 197, 14794, 17, 526, 198, 197, 197, 14794, 18, 526, 198, 197, 43579, 256, 52931, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNestedCustomMarshaler(t *testing.T) { result, err := Marshal(nestedCustomMarshalerData) if err != nil { t.Fatal(err) } expected := nestedCustomMarshalerToml if !bytes.Equal(result, expected) { t.Errorf("Bad nested custom marshaler: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result) } }
explode_data.jsonl/46326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 71986, 10268, 55438, 261, 1155, 353, 8840, 836, 8, 341, 9559, 11, 1848, 1669, 35667, 1445, 9980, 10268, 55438, 261, 1043, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 42400, 1669, 24034, 10268, 5543...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDistSQLReceiverUpdatesCaches(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) ctx := context.Background() size := func() int64 { return 2 << 10 } st := cluster.MakeTestingClusterSettings() rangeCache := kvcoord.NewRangeDescriptorCache(st, nil /* db */, size, stop.NewStopper()) r := MakeDistSQLReceiver( ctx, nil /* resultWriter */, tree.Rows, rangeCache, nil /* txn */, nil /* updateClock */, &SessionTracing{}) replicas := []roachpb.ReplicaDescriptor{{ReplicaID: 1}, {ReplicaID: 2}, {ReplicaID: 3}} descs := []roachpb.RangeDescriptor{ {RangeID: 1, StartKey: roachpb.RKey("a"), EndKey: roachpb.RKey("c"), InternalReplicas: replicas}, {RangeID: 2, StartKey: roachpb.RKey("c"), EndKey: roachpb.RKey("e"), InternalReplicas: replicas}, {RangeID: 3, StartKey: roachpb.RKey("g"), EndKey: roachpb.RKey("z"), InternalReplicas: replicas}, } // Push some metadata and check that the caches are updated with it. status := r.Push(nil /* row */, &execinfrapb.ProducerMetadata{ Ranges: []roachpb.RangeInfo{ { Desc: descs[0], Lease: roachpb.Lease{ Replica: roachpb.ReplicaDescriptor{NodeID: 1, StoreID: 1, ReplicaID: 1}, Start: hlc.MinTimestamp, Sequence: 1, }, }, { Desc: descs[1], Lease: roachpb.Lease{ Replica: roachpb.ReplicaDescriptor{NodeID: 2, StoreID: 2, ReplicaID: 2}, Start: hlc.MinTimestamp, Sequence: 1, }, }, }}) if status != execinfra.NeedMoreRows { t.Fatalf("expected status NeedMoreRows, got: %d", status) } status = r.Push(nil /* row */, &execinfrapb.ProducerMetadata{ Ranges: []roachpb.RangeInfo{ { Desc: descs[2], Lease: roachpb.Lease{ Replica: roachpb.ReplicaDescriptor{NodeID: 3, StoreID: 3, ReplicaID: 3}, Start: hlc.MinTimestamp, Sequence: 1, }, }, }}) if status != execinfra.NeedMoreRows { t.Fatalf("expected status NeedMoreRows, got: %d", status) } for i := range descs { ri := rangeCache.GetCached(ctx, descs[i].StartKey, false /* inclusive */) require.NotNilf(t, ri, "failed to find range for key: %s", descs[i].StartKey) require.Equal(t, &descs[i], ri.Desc()) require.NotNil(t, ri.Lease()) } }
explode_data.jsonl/43680
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 948 }
[ 2830, 3393, 23356, 6688, 25436, 37091, 34, 14242, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20985, 1669, 2266, 19047, 2822, 13832, 1669, 2915, 368, 526, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetSheetBackground(t *testing.T) { f, err := OpenFile(filepath.Join("test", "Book1.xlsx")) if !assert.NoError(t, err) { t.FailNow() } err = f.SetSheetBackground("Sheet2", filepath.Join("test", "images", "background.jpg")) if !assert.NoError(t, err) { t.FailNow() } err = f.SetSheetBackground("Sheet2", filepath.Join("test", "images", "background.jpg")) if !assert.NoError(t, err) { t.FailNow() } assert.NoError(t, f.SaveAs(filepath.Join("test", "TestSetSheetBackground.xlsx"))) }
explode_data.jsonl/36965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 1649, 10541, 8706, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 5264, 1703, 34793, 22363, 445, 1944, 497, 330, 7134, 16, 46838, 5455, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 3244, 57243, 7039, 741, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEncapsulate(t *testing.T) { type args struct { p []byte hasAddressCtrlPrefix bool } tests := []struct { name string args args want *Frame }{ { name: "real packet 1", args: args{ p: []byte{0x08, 0x91}, hasAddressCtrlPrefix: false, }, want: &Frame{ Payload: []byte{0x08, 0x91}, FCS: []byte{0x87, 0x44}, HasAddressCtrlPrefix: false, }, }, { name: "real packet 2", args: args{ p: []byte{0x08, 0xb1}, hasAddressCtrlPrefix: false, }, want: &Frame{ Payload: []byte{0x08, 0xb1}, FCS: []byte{0x85, 0x65}, HasAddressCtrlPrefix: false, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := Encapsulate(tt.args.p, tt.args.hasAddressCtrlPrefix); !reflect.DeepEqual(got, tt.want) { t.Errorf("Encapsulate() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/21447
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 556 }
[ 2830, 3393, 7408, 2625, 6334, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 3223, 503, 3056, 3782, 198, 197, 63255, 4286, 15001, 14335, 1807, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMockHashOracle_Register(t *testing.T) { oracle := NewMockHashOracle(numOfClients) oracle.Register(generateSigning(t).Verifier().String()) oracle.Register(generateSigning(t).Verifier().String()) assert.Equal(t, 2, len(oracle.clients)) }
explode_data.jsonl/25843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 11571, 6370, 48663, 73124, 1155, 353, 8840, 836, 8, 341, 197, 69631, 1669, 1532, 11571, 6370, 48663, 8068, 2124, 47174, 340, 197, 69631, 19983, 3268, 13220, 93358, 1155, 568, 82394, 1005, 703, 2398, 197, 69631, 19983, 3268, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParseNotificationTopic(t *testing.T) { for _, test := range []struct { in string wantProjectID string wantTopicID string }{ {"", "?", "?"}, {"foobar", "?", "?"}, {"//pubsub.googleapis.com/projects/foo", "?", "?"}, {"//pubsub.googleapis.com/projects/my-project/topics/my-topic", "my-project", "my-topic"}, } { gotProjectID, gotTopicID := parseNotificationTopic(test.in) if gotProjectID != test.wantProjectID || gotTopicID != test.wantTopicID { t.Errorf("%q: got (%q, %q), want (%q, %q)", test.in, gotProjectID, gotTopicID, test.wantProjectID, test.wantTopicID) } } }
explode_data.jsonl/57369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 14463, 11196, 26406, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 17430, 310, 914, 198, 197, 50780, 7849, 915, 914, 198, 197, 50780, 26406, 915, 256, 914, 198, 197, 59403, 197, 197, 491...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetRegion(t *testing.T) { aws := mockAvailabilityZone("us-west-2e") zones, ok := aws.Zones() if !ok { t.Fatalf("Unexpected missing zones impl") } zone, err := zones.GetZone() if err != nil { t.Fatalf("unexpected error %v", err) } if zone.Region != "us-west-2" { t.Errorf("Unexpected region: %s", zone.Region) } if zone.FailureDomain != "us-west-2e" { t.Errorf("Unexpected FailureDomain: %s", zone.FailureDomain) } }
explode_data.jsonl/12848
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 1949, 14091, 1155, 353, 8840, 836, 8, 341, 197, 8635, 1669, 7860, 51703, 15363, 445, 355, 37602, 12, 17, 68, 1138, 20832, 3154, 11, 5394, 1669, 31521, 13476, 3154, 741, 743, 753, 562, 341, 197, 3244, 30762, 445, 29430, 740...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBaseAddressPubKey(t *testing.T) { _, pub1, addr1 := testdata.KeyTestPubAddr() _, pub2, addr2 := testdata.KeyTestPubAddr() acc := types.NewBaseAccountWithAddress(addr1) // check the address (set) and pubkey (not set) require.EqualValues(t, addr1, acc.GetAddress()) require.EqualValues(t, nil, acc.GetPubKey()) // can't override address err := acc.SetAddress(addr2) require.NotNil(t, err) require.EqualValues(t, addr1, acc.GetAddress()) // set the pubkey err = acc.SetPubKey(pub1) require.Nil(t, err) require.Equal(t, pub1, acc.GetPubKey()) // can override pubkey err = acc.SetPubKey(pub2) require.Nil(t, err) require.Equal(t, pub2, acc.GetPubKey()) //------------------------------------ // can set address on empty account acc2 := types.BaseAccount{} err = acc2.SetAddress(addr2) require.Nil(t, err) require.EqualValues(t, addr2, acc2.GetAddress()) }
explode_data.jsonl/80928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 3978, 4286, 29162, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 6675, 16, 11, 10789, 16, 1669, 1273, 691, 9610, 2271, 29162, 13986, 741, 197, 6878, 6675, 17, 11, 10789, 17, 1669, 1273, 691, 9610, 2271, 29162, 13986, 741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaultKeyFunc(t *testing.T) { defaultKeyID := "default-key" tests := []struct { description string token *jwt.Token useDefaultKey bool resolveErr error expectedPublicKey interface{} expectedErr error }{ { description: "Success", token: jwt.New(jwt.SigningMethodHS256), expectedPublicKey: "public-key", }, { description: "Success with Default Key", useDefaultKey: true, token: jwt.New(jwt.SigningMethodHS256), expectedPublicKey: "public-key", }, { description: "Resolve Error", token: jwt.New(jwt.SigningMethodHS256), resolveErr: errors.New("resolve error"), expectedErr: errors.New("resolve error"), }, } for _, tc := range tests { t.Run(tc.description, func(t *testing.T) { assert := assert.New(t) r := new(key.MockResolver) pair := new(key.MockPair) pair.On("Public").Return(tc.expectedPublicKey).Once() if tc.useDefaultKey { r.On("ResolveKey", mock.Anything, defaultKeyID).Return(pair, tc.resolveErr).Once() } else { tc.token.Header = map[string]interface{}{ "kid": "some-value", } r.On("ResolveKey", mock.Anything, "some-value").Return(pair, tc.resolveErr).Once() } publicKey, err := defaultKeyFunc(context.Background(), defaultKeyID, r)(tc.token) assert.Equal(tc.expectedPublicKey, publicKey) if tc.expectedErr == nil || err == nil { assert.Equal(tc.expectedErr, err) } else { assert.Contains(err.Error(), tc.expectedErr.Error()) } }) } }
explode_data.jsonl/15369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 696 }
[ 2830, 3393, 3675, 1592, 9626, 1155, 353, 8840, 836, 8, 341, 11940, 1592, 915, 1669, 330, 2258, 16173, 1837, 78216, 1669, 3056, 1235, 341, 197, 42407, 981, 914, 198, 197, 43947, 1797, 353, 41592, 32277, 198, 197, 41819, 3675, 1592, 257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEncodeYAMLNonStringMapKey(t *testing.T) { f := newFixture(t) defer f.TearDown() f.File("Tiltfile", `encode_yaml({1: 'hello'})`) _, err := f.ExecFile("Tiltfile") require.Error(t, err) require.Contains(t, err.Error(), "only string keys are supported in maps. found key '1' of type int64") }
explode_data.jsonl/10619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 32535, 56, 31102, 8121, 703, 2227, 1592, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 16867, 282, 836, 682, 4454, 2822, 1166, 8576, 445, 51, 2963, 1192, 497, 1565, 6180, 64380, 2306, 16, 25, 364, 14990,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFloatColumnPrecision(t *testing.T) { if dialect := os.Getenv("GORM_DIALECT"); dialect != "mysql" && dialect != "sqlite" { t.Skip() } type FloatTest struct { ID string `gorm:"primary_key"` FloatValue float64 `gorm:"column:float_value" sql:"type:float(255,5);"` } DB.DropTable(&FloatTest{}) DB.AutoMigrate(&FloatTest{}) data := FloatTest{ID: "uuid", FloatValue: 112.57315} if err := DB.Save(&data).Error; err != nil || data.ID != "uuid" || data.FloatValue != 112.57315 { t.Errorf("Float value should not lose precision") } }
explode_data.jsonl/28075
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 5442, 2933, 55501, 1155, 353, 8840, 836, 8, 341, 743, 42279, 1669, 2643, 64883, 445, 38, 4365, 1557, 5863, 3965, 5038, 42279, 961, 330, 12272, 1, 1009, 42279, 961, 330, 37042, 1, 341, 197, 3244, 57776, 741, 197, 630, 13158...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStreamInterceptorEnhancesClient(t *testing.T) { // prepare inCtx := peer.NewContext(context.Background(), &peer.Peer{ Addr: &net.IPAddr{IP: net.IPv4(1, 1, 1, 1)}, }) var outContext context.Context stream := &mockedStream{ ctx: inCtx, } handler := func(srv interface{}, stream grpc.ServerStream) error { outContext = stream.Context() return nil } // test err := enhanceStreamWithClientInformation(nil, stream, nil, handler) // verify assert.NoError(t, err) cl := client.FromContext(outContext) assert.Equal(t, "1.1.1.1", cl.Addr.String()) }
explode_data.jsonl/80336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 3027, 32786, 57468, 3020, 2959, 1155, 353, 8840, 836, 8, 341, 197, 322, 10549, 198, 17430, 23684, 1669, 14397, 7121, 1972, 5378, 19047, 1507, 609, 16537, 1069, 34756, 515, 197, 197, 13986, 25, 609, 4711, 46917, 13986, 90, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWebSocketReverseProxyBackendShutDown(t *testing.T) { shutdown := make(chan struct{}) backend := httptest.NewServer(websocket.Handler(func(ws *websocket.Conn) { shutdown <- struct{}{} })) defer backend.Close() go func() { <-shutdown backend.Close() }() // Get proxy to use for the test p := newWebSocketTestProxy(backend.URL, false, 30*time.Second) backendProxy := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { p.ServeHTTP(w, r) })) defer backendProxy.Close() // Set up WebSocket client url := strings.Replace(backendProxy.URL, "http://", "ws://", 1) ws, err := websocket.Dial(url, "", backendProxy.URL) if err != nil { t.Fatal(err) } defer ws.Close() var actualMsg string if rcvErr := websocket.Message.Receive(ws, &actualMsg); rcvErr == nil { t.Errorf("we don't get backend shutdown notification") } }
explode_data.jsonl/64230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 61238, 45695, 16219, 29699, 2016, 332, 4454, 1155, 353, 8840, 836, 8, 341, 36196, 18452, 1669, 1281, 35190, 2036, 37790, 197, 20942, 1669, 54320, 70334, 7121, 5475, 39769, 9556, 31010, 18552, 57786, 353, 83208, 50422, 8, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1