text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestConcurrentConns(t *testing.T) { listener := TcpListener{ ServiceAddress: ":8195", AllowedPendingMessages: 10000, MaxTCPConnections: 2, } listener.parser, _ = parsers.NewInfluxParser() acc := &testutil.Accumulator{} require.NoError(t, listener.Start(acc)) defer listener.Stop() time.Sleep(time.Millisecond * 25) _, err := net.Dial("tcp", "127.0.0.1:8195") assert.NoError(t, err) _, err = net.Dial("tcp", "127.0.0.1:8195") assert.NoError(t, err) // Connection over the limit: conn, err := net.Dial("tcp", "127.0.0.1:8195") assert.NoError(t, err) net.Dial("tcp", "127.0.0.1:8195") buf := make([]byte, 1500) n, err := conn.Read(buf) assert.NoError(t, err) assert.Equal(t, "Telegraf maximum concurrent TCP connections (2) reached, closing.\n"+ "You may want to increase max_tcp_connections in"+ " the Telegraf tcp listener configuration.\n", string(buf[:n])) _, err = conn.Write([]byte(testMsg)) assert.NoError(t, err) time.Sleep(time.Millisecond * 10) assert.Zero(t, acc.NFields()) }
explode_data.jsonl/33002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 1109, 3231, 1109, 4412, 1155, 353, 8840, 836, 8, 341, 14440, 798, 1669, 64876, 2743, 515, 197, 91619, 4286, 25, 260, 13022, 23, 16, 24, 20, 756, 197, 197, 35382, 32027, 15820, 25, 220, 16, 15, 15, 15, 15, 345, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_computedList(t *testing.T) { m := testModule(t, "plan-computed-list") p := testProvider("aws") p.GetSchemaReturn = &ProviderSchema{ ResourceTypes: map[string]*configschema.Block{ "aws_instance": { Attributes: map[string]*configschema.Attribute{ "compute": {Type: cty.String, Optional: true}, "foo": {Type: cty.String, Optional: true}, "num": {Type: cty.String, Optional: true}, "list": {Type: cty.List(cty.String), Computed: true}, }, }, }, } p.DiffFn = func(info *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) { diff := &InstanceDiff{ Attributes: map[string]*ResourceAttrDiff{}, } computedKeys := map[string]bool{} for _, k := range c.ComputedKeys { computedKeys[k] = true } compute, _ := c.Raw["compute"].(string) if compute != "" { diff.Attributes[compute] = &ResourceAttrDiff{ Old: "", New: "", NewComputed: true, } diff.Attributes["compute"] = &ResourceAttrDiff{ Old: "", New: compute, } } fooOld := s.Attributes["foo"] fooNew, _ := c.Raw["foo"].(string) if fooOld != fooNew { diff.Attributes["foo"] = &ResourceAttrDiff{ Old: fooOld, New: fooNew, NewComputed: computedKeys["foo"], } } numOld := s.Attributes["num"] numNew, _ := c.Raw["num"].(string) if numOld != numNew { diff.Attributes["num"] = &ResourceAttrDiff{ Old: numOld, New: numNew, NewComputed: computedKeys["num"], } } listOld := s.Attributes["list.#"] if listOld == "" { diff.Attributes["list.#"] = &ResourceAttrDiff{ Old: "", New: "", NewComputed: true, } } return diff, nil } ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), }) plan, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } schema := p.GetSchemaReturn.ResourceTypes["aws_instance"] ty := schema.ImpliedType() if len(plan.Changes.Resources) != 2 { t.Fatal("expected 2 changes, got", len(plan.Changes.Resources)) } for _, res := range plan.Changes.Resources { if res.Action != plans.Create { t.Fatalf("expected resource creation, got %s", res.Action) } ric, err := res.Decode(ty) if err != nil { t.Fatal(err) } switch i := ric.Addr.String(); i { case "aws_instance.bar": checkVals(t, objectVal(t, schema, map[string]cty.Value{ "list": cty.UnknownVal(cty.List(cty.String)), "foo": cty.UnknownVal(cty.String), }), ric.After) case "aws_instance.foo": checkVals(t, objectVal(t, schema, map[string]cty.Value{ "list": cty.UnknownVal(cty.List(cty.String)), "num": cty.NumberIntVal(2), "compute": cty.StringVal("list.#"), }), ric.After) default: t.Fatal("unknown instance:", i) } } }
explode_data.jsonl/28673
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1332 }
[ 2830, 3393, 1972, 17, 20485, 2965, 19292, 852, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 11476, 19292, 9029, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 2234, 8632, 5598, 284, 609, 5179, 8632, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidateAlias(t *testing.T) { require := require.New(t) testFilePath := filepath.Join(os.TempDir(), testPath) defer testutil.CleanupPath(testFilePath) alias := "aaa" config.ReadConfig.Aliases = map[string]string{} err := validateAlias(alias) require.NoError(err) config.ReadConfig.Aliases[alias] = "a" err = validateAlias(alias) require.Error(err) alias = strings.Repeat("a", 50) err = validateAlias(alias) require.Error(err) }
explode_data.jsonl/13450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 17926, 22720, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 18185, 19090, 1669, 26054, 22363, 9638, 65009, 6184, 1507, 1273, 1820, 340, 16867, 1273, 1314, 727, 60639, 1820, 8623, 19090, 340, 197, 14956, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestControlPlaneIsHealthy(t *testing.T) { readyStatus := corev1.PodStatus{ Conditions: []corev1.PodCondition{ { Type: corev1.PodReady, Status: corev1.ConditionTrue, }, }, } workloadCluster := &cluster{ client: &fakeClient{ list: nodeListForTestControlPlaneIsHealthy(), get: map[string]interface{}{ "kube-system/kube-apiserver-first-control-plane": &corev1.Pod{Status: readyStatus}, "kube-system/kube-apiserver-second-control-plane": &corev1.Pod{Status: readyStatus}, "kube-system/kube-apiserver-third-control-plane": &corev1.Pod{Status: readyStatus}, "kube-system/kube-controller-manager-first-control-plane": &corev1.Pod{Status: readyStatus}, "kube-system/kube-controller-manager-second-control-plane": &corev1.Pod{Status: readyStatus}, "kube-system/kube-controller-manager-third-control-plane": &corev1.Pod{Status: readyStatus}, }, }, } health, err := workloadCluster.controlPlaneIsHealthy(context.Background()) if err != nil { t.Fatal(err) } if len(health) == 0 { t.Fatal("no nodes were checked") } if len(health) != len(nodeListForTestControlPlaneIsHealthy().Items) { t.Fatal("not all nodes were checked") } }
explode_data.jsonl/9765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 508 }
[ 2830, 3393, 3273, 34570, 3872, 96113, 1155, 353, 8840, 836, 8, 341, 197, 2307, 2522, 1669, 6200, 85, 16, 88823, 2522, 515, 197, 197, 35435, 25, 3056, 98645, 16, 88823, 10547, 515, 298, 197, 515, 571, 27725, 25, 256, 6200, 85, 16, 88...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestS6aProxyWithHSS_AIA(t *testing.T) { config := generateS6aProxyConfig() addr := startTestServer(t, config, true) // Set up a connection to the server. conn, err := grpc.Dial(addr, grpc.WithInsecure()) if err != nil { t.Fatalf("GRPC connect error: %v", err) return } defer conn.Close() c := protos.NewS6AProxyClient(conn) req := &protos.AuthenticationInformationRequest{ UserName: test.TEST_IMSI, VisitedPlmn: []byte(test.TEST_PLMN_ID), NumRequestedEutranVectors: 3, ImmediateResponsePreferred: true, } complChan := make(chan error, 1) go func() { t.Logf("TestS6aProxyWithHSS_AIA - AIA RPC Req: %s", req.String()) r, err := c.AuthenticationInformation(context.Background(), req) if err != nil { t.Fatalf("TestS6aProxyWithHSS_AIA - GRPC AIR Error: %v", err) complChan <- err return } t.Logf("GRPC AIA Resp: %#+v", *r) if r.ErrorCode != protos.ErrorCode_UNDEFINED { t.Errorf("Unexpected AIA with PLMN IMSI1 Error Code: %d", r.ErrorCode) } assert.Len(t, r.EutranVectors, 3) assert.Equal(t, []byte("\x15\x9a\xbf\x21\xca\xe2\xbf\x0a\xdb\xcb\xf1\x47\xef\x87\x74\x9d"), r.EutranVectors[0].Rand) assert.Equal(t, []byte("\x63\x82\xb8\x54\x48\x59\x80\x00\xf5\xaf\x37\xa5\xe9\x6d\x76\x58"), r.EutranVectors[1].Autn) assert.Equal(t, []byte("\x74\x60\x79\x2b\x8d\x5e\xb1\x62\xfd\x88\x28\xc2\x1a\x3b\xa0\xc5"+ "\x6e\x06\xed\xbf\x5b\x20\x54\x72\x50\x06\x36\xc5\xfa\xd9\x0b\x84"), r.EutranVectors[2].Kasme) // End complChan <- nil }() select { case err := <-complChan: if err != nil { t.Fatal(err) return } case <-time.After(time.Second * 2): t.Fatal("TestS6aProxyWithHSS_AIA Timed out") return } }
explode_data.jsonl/81464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 889 }
[ 2830, 3393, 50, 21, 64, 16219, 2354, 39, 1220, 1566, 5863, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 6923, 50, 21, 64, 16219, 2648, 741, 53183, 1669, 1191, 2271, 5475, 1155, 11, 2193, 11, 830, 340, 197, 322, 2573, 705, 264, 3633, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDecodeKeyPairs(t *testing.T) { cert := cert{ Domain: domain{"foo.bar"}, Key: "a2V5", Certificate: "Y2VydA==", } if err := decodeKeyPairs(&cert); err != nil { t.Failed() } assert.Equal(t, cert.Key, "key") assert.Equal(t, cert.Certificate, "cert") }
explode_data.jsonl/5440
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 32564, 1592, 54228, 1155, 353, 8840, 836, 8, 1476, 1444, 529, 1669, 2777, 515, 197, 10957, 3121, 25, 414, 7947, 4913, 7975, 22001, 7115, 197, 55242, 25, 260, 330, 64, 17, 53, 20, 756, 197, 6258, 20962, 25, 330, 56, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMinimal_141(t *testing.T) { runTestAWS(t, "minimal-141.example.com", "minimal-141", "v1alpha0", false, 1, true, false, nil) }
explode_data.jsonl/17493
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 57 }
[ 2830, 3393, 88328, 62, 16, 19, 16, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 36136, 1155, 11, 330, 92607, 12, 16, 19, 16, 7724, 905, 497, 330, 92607, 12, 16, 19, 16, 497, 330, 85, 16, 7141, 15, 497, 895, 11, 220, 16, 11, 830,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMultipleBucketsDeferredBuild(t *testing.T) { log.Printf("In TestMultipleBucketsDeferredBuild()") e := secondaryindex.DropAllSecondaryIndexes(indexManagementAddress) FailTestIfError(e, "Error in DropAllSecondaryIndexes", t) index1 := "buck1_id1" index2 := "buck1_id2" index3 := "buck2_id3" bucket1 := "default" bucket2 := "defertest_buck2" kvutility.FlushBucket(bucket1, "", clusterconfig.Username, clusterconfig.Password, kvaddress) kvutility.EditBucket(bucket1, "", clusterconfig.Username, clusterconfig.Password, kvaddress, "256") kvutility.DeleteBucket(bucket2, "", clusterconfig.Username, clusterconfig.Password, kvaddress) secondaryindex.RemoveClientForBucket(kvaddress, bucket2) kvutility.CreateBucket(bucket2, "sasl", "", clusterconfig.Username, clusterconfig.Password, kvaddress, "256", "11213") tc.ClearMap(docs) time.Sleep(bucketOpWaitDur * time.Second) log.Printf("Setting JSON docs in KV") bucket1docs := generateDocs(50000, "users.prod") bucket2docs := generateDocs(50000, "users.prod") kvutility.SetKeyValues(bucket1docs, bucket1, "", clusterconfig.KVAddress) kvutility.SetKeyValues(bucket2docs, bucket2, "", clusterconfig.KVAddress) UpdateKVDocs(bucket1docs, docs) err := secondaryindex.CreateSecondaryIndexAsync(index1, bucket1, indexManagementAddress, "", []string{"company"}, false, []byte("{\"defer_build\": true}"), true, nil) FailTestIfError(err, "Error in creating the index", t) err = secondaryindex.CreateSecondaryIndexAsync(index2, bucket1, indexManagementAddress, "", []string{"email"}, false, []byte("{\"defer_build\": true}"), true, nil) FailTestIfError(err, "Error in creating the index", t) err = secondaryindex.CreateSecondaryIndexAsync(index3, bucket2, indexManagementAddress, "", []string{"gender"}, false, []byte("{\"defer_build\": true}"), true, nil) FailTestIfError(err, "Error in creating the index", t) client, err := secondaryindex.GetOrCreateClient(indexManagementAddress, "test1client") FailTestIfError(err, "Error while creating client", t) defn1, _ := secondaryindex.GetDefnID(client, bucket1, index1) defn2, _ := secondaryindex.GetDefnID(client, bucket1, index2) defn3, _ := secondaryindex.GetDefnID(client, bucket2, index3) err = secondaryindex.BuildIndexesAsync([]uint64{defn1}, indexManagementAddress, defaultIndexActiveTimeout) FailTestIfError(err, "Error from BuildIndexesAsync of index1", t) err = secondaryindex.BuildIndexesAsync([]uint64{defn2, defn3}, indexManagementAddress, defaultIndexActiveTimeout) //FailTestIfNoError(err, "Error from BuildIndexesAsync", t) //if err != nil { // log.Printf("Build index failed as expected for %v and %v. Error = %v", defn2, defn3, err.Error()) //} state, e := client.IndexState(defn3) log.Printf("Index state of %v is %v", defn3, state) FailTestIfError(e, "Error in TestMultipleBucketsDeferredBuild. Build should complete and no error expected for index of second bucket", t) e = secondaryindex.WaitTillIndexActive(defn1, client, defaultIndexActiveTimeout) if e != nil { FailTestIfError(e, "Error in WaitTillIndexActive for first index first bucket", t) } // comment out this test since it depends on timing on when indexer will retry rebuilding index //time.Sleep(1 * time.Second) //err = secondaryindex.BuildIndex(index2, bucket1, indexManagementAddress, defaultIndexActiveTimeout) //FailTestIfNoError(err, "Index2 is expected to build in background. Expected failure when trying to build index2 explicitly, but no failure returned.", t) e = secondaryindex.WaitTillIndexActive(defn2, client, defaultIndexActiveTimeout) if e != nil { FailTestIfError(e, "Error in WaitTillIndexActive for second index first bucket", t) } docScanResults := datautility.ExpectedScanResponse_string(bucket1docs, "company", "B", "H", 1) scanResults, err := secondaryindex.Range(index1, bucket1, indexScanAddress, []interface{}{"B"}, []interface{}{"H"}, 1, false, defaultlimit, c.SessionConsistency, nil) FailTestIfError(err, "Error in scan", t) err = tv.Validate(docScanResults, scanResults) FailTestIfError(err, "Error in scan result validation", t) docScanResults = datautility.ExpectedScanResponse_string(bucket1docs, "email", "f", "t", 3) scanResults, err = secondaryindex.Range(index2, bucket1, indexScanAddress, []interface{}{"f"}, []interface{}{"t"}, 3, false, defaultlimit, c.SessionConsistency, nil) FailTestIfError(err, "Error in scan", t) err = tv.Validate(docScanResults, scanResults) FailTestIfError(err, "Error in scan result validation", t) docScanResults = datautility.ExpectedScanResponse_string(bucket2docs, "gender", "female", "female", 3) scanResults, err = secondaryindex.Range(index3, bucket2, indexScanAddress, []interface{}{"female"}, []interface{}{"female"}, 3, false, defaultlimit, c.SessionConsistency, nil) FailTestIfError(err, "Error in scan", t) err = tv.Validate(docScanResults, scanResults) FailTestIfError(err, "Error in scan result validation", t) kvutility.EditBucket(bucket1, "", clusterconfig.Username, clusterconfig.Password, kvaddress, "1024") kvutility.DeleteBucket(bucket2, "", clusterconfig.Username, clusterconfig.Password, kvaddress) secondaryindex.RemoveClientForBucket(kvaddress, bucket2) time.Sleep(5 * time.Second) // Wait for bucket delete to complete }
explode_data.jsonl/59132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1719 }
[ 2830, 3393, 32089, 33, 38551, 88417, 11066, 1155, 353, 8840, 836, 8, 341, 6725, 19367, 445, 641, 3393, 32089, 33, 38551, 88417, 11066, 368, 5130, 7727, 1669, 14246, 1252, 21688, 2403, 48963, 62229, 7195, 22237, 4286, 340, 12727, 604, 2271...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRunningAddressesWithPublishStatusAddress(t *testing.T) { fk := buildStatusSync() fk.PublishStatusAddress = "127.0.0.1" ra, _ := fk.runningAddresses() if ra == nil { t.Fatalf("returned nil but expected valid []string") } rl := len(ra) if len(ra) != 1 { t.Errorf("returned %v but expected %v", rl, 1) } rv := ra[0] if rv != "127.0.0.1" { t.Errorf("returned %v but expected %v", rv, "127.0.0.1") } }
explode_data.jsonl/63336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 18990, 52290, 2354, 50145, 2522, 4286, 1155, 353, 8840, 836, 8, 341, 1166, 74, 1669, 1936, 2522, 12154, 741, 1166, 74, 83935, 2522, 4286, 284, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 1837, 197, 956, 11, 716, 1669, 626...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUnixSocketProxyPaths(t *testing.T) { greeting := "Hello route %s" tests := []struct { url string prefix string expected string }{ {"", "", fmt.Sprintf(greeting, "/")}, {"/hello", "", fmt.Sprintf(greeting, "/hello")}, {"/foo/bar", "", fmt.Sprintf(greeting, "/foo/bar")}, {"/foo?bar", "", fmt.Sprintf(greeting, "/foo?bar")}, {"/greet?name=john", "", fmt.Sprintf(greeting, "/greet?name=john")}, {"/world?wonderful&colorful", "", fmt.Sprintf(greeting, "/world?wonderful&colorful")}, {"/proxy/hello", "/proxy", fmt.Sprintf(greeting, "/hello")}, {"/proxy/foo/bar", "/proxy", fmt.Sprintf(greeting, "/foo/bar")}, {"/proxy/?foo=bar", "/proxy", fmt.Sprintf(greeting, "/?foo=bar")}, {"/queues/%2F/fetchtasks", "", fmt.Sprintf(greeting, "/queues/%2F/fetchtasks")}, {"/queues/%2F/fetchtasks?foo=bar", "", fmt.Sprintf(greeting, "/queues/%2F/fetchtasks?foo=bar")}, } for _, test := range tests { p, ts := GetHTTPProxy(greeting, test.prefix) actualMsg, err := GetTestServerMessage(p, ts, test.url) if err != nil { t.Fatalf("Getting server message failed - %v", err) } if actualMsg != test.expected { t.Errorf("Expected '%s' but got '%s' instead", test.expected, actualMsg) } } if runtime.GOOS == "windows" { return } for _, test := range tests { p, ts, tmpdir, err := GetSocketProxy(greeting, test.prefix) if err != nil { t.Fatalf("Getting socket proxy failed - %v", err) } actualMsg, err := GetTestServerMessage(p, ts, test.url) if err != nil { os.RemoveAll(tmpdir) t.Fatalf("Getting server message failed - %v", err) } if actualMsg != test.expected { t.Errorf("Expected '%s' but got '%s' instead", test.expected, actualMsg) } os.RemoveAll(tmpdir) } }
explode_data.jsonl/64235
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 737 }
[ 2830, 3393, 55832, 10286, 16219, 26901, 1155, 353, 8840, 836, 8, 341, 3174, 43632, 1669, 330, 9707, 6021, 1018, 82, 1837, 78216, 1669, 3056, 1235, 341, 197, 19320, 414, 914, 198, 197, 3223, 5060, 256, 914, 198, 197, 42400, 914, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestSerializeBasic(t *testing.T) { pr := &pingRequest{ ClientSiteID: "0101-0101", LicenseKey: "mylicense", DeployType: "server", ClientVersionString: "3.12.6", AuthProviders: []string{"foo", "bar"}, ExternalServices: []string{extsvc.KindGitHub, extsvc.KindGitLab}, CodeHostVersions: nil, BuiltinSignupAllowed: true, HasExtURL: false, UniqueUsers: 123, Activity: json.RawMessage([]byte(`{"foo":"bar"}`)), BatchChangesUsage: nil, CodeIntelUsage: nil, CodeMonitoringUsage: nil, CodeHostIntegrationUsage: nil, IDEExtensionsUsage: nil, SearchUsage: nil, GrowthStatistics: nil, SavedSearches: nil, HomepagePanels: nil, SearchOnboarding: nil, InitialAdminEmail: "test@sourcegraph.com", TotalUsers: 234, HasRepos: true, EverSearched: false, EverFindRefs: true, RetentionStatistics: nil, } now := time.Now() payload, err := marshalPing(pr, true, "127.0.0.1", now) if err != nil { t.Fatalf("unexpected error %s", err) } compareJSON(t, payload, `{ "remote_ip": "127.0.0.1", "remote_site_version": "3.12.6", "remote_site_id": "0101-0101", "license_key": "mylicense", "has_update": "true", "unique_users_today": "123", "site_activity": {"foo":"bar"}, "batch_changes_usage": null, "code_intel_usage": null, "new_code_intel_usage": null, "dependency_versions": null, "extensions_usage": null, "code_insights_usage": null, "code_insights_critical_telemetry": null, "code_monitoring_usage": null, "code_host_integration_usage": null, "ide_extensions_usage": null, "cta_usage": null, "search_usage": null, "growth_statistics": null, "saved_searches": null, "search_onboarding": null, "homepage_panels": null, "repositories": null, "retention_statistics": null, "installer_email": "test@sourcegraph.com", "auth_providers": "foo,bar", "ext_services": "GITHUB,GITLAB", "code_host_versions": null, "builtin_signup_allowed": "true", "deploy_type": "server", "total_user_accounts": "234", "has_external_url": "false", "has_repos": "true", "ever_searched": "false", "ever_find_refs": "true", "timestamp": "`+now.UTC().Format(time.RFC3339)+`" }`) }
explode_data.jsonl/3334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1172 }
[ 2830, 3393, 15680, 15944, 1155, 353, 8840, 836, 8, 341, 25653, 1669, 609, 9989, 1900, 515, 197, 71724, 17597, 915, 25, 1797, 330, 15, 16, 15, 16, 12, 15, 16, 15, 16, 756, 197, 197, 9827, 1592, 25, 2290, 330, 2408, 13266, 756, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDataFragment(t *testing.T) { msg, err := BuildMessageData("CQCQCQ PD0MZ", DDFormatUTF16, true) if err != nil { t.Fatalf("build message failed: %v", err) } want := &DataFragment{Data: msg} blocks, err := want.DataBlocks(Rate34Data, true) if err != nil { t.Fatalf("encode failed: %v", err) } if blocks == nil { t.Fatal("encode failed: blocks is nil") } if len(blocks) != 2 { t.Fatalf("encode failed: expected 2 blocks, got %d", len(blocks)) } for i, block := range blocks { t.Log(fmt.Sprintf("block %02d:\n%s", i, hex.Dump(block.Data))) } test, err := CombineDataBlocks(blocks) if err != nil { t.Fatalf("decode failed: %v", err) } if !bytes.Equal(test.Data[:len(want.Data)], want.Data) { t.Log(fmt.Sprintf("want:\n%s", hex.Dump(want.Data))) t.Log(fmt.Sprintf("got:\n%s", hex.Dump(test.Data))) t.Fatal("decode failed: data is wrong") } }
explode_data.jsonl/3411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 93200, 9488, 1155, 353, 8840, 836, 8, 341, 21169, 11, 1848, 1669, 7854, 2052, 1043, 445, 34, 61042, 61042, 48, 26474, 15, 96988, 497, 30904, 4061, 8561, 16, 21, 11, 830, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCreateBreaker(t *testing.T) { destination := &Destination{ Client: Client{ Name: "demo-provider", Namespace: "demo-dev", Version: "v1", }, MaxConnections: 1, HttpMaxPendingRequests: 1, SleepWindow: 1, HttpDetectionInterval: 1, HttpMaxEjectionPercent: 1, HttpConsecutiveErrors: 1, HttpMaxRequestsPerConnection: 1, } config, err := NewClient() destination.Crd = config assert.Equal(t, nil, err) resourceVersion, err := destination.Create() assert.Equal(t, nil, err) log.Info("create destination:", resourceVersion) }
explode_data.jsonl/59911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 4021, 22524, 261, 1155, 353, 8840, 836, 8, 341, 197, 17997, 1669, 609, 33605, 515, 197, 71724, 25, 8423, 515, 298, 21297, 25, 286, 330, 25762, 81532, 756, 298, 90823, 25, 256, 330, 25762, 25758, 756, 298, 77847, 25, 257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompositesWithRefs(t *testing.T) { ref1 := RefTerm(VarTerm("a"), VarTerm("i"), StringTerm("b")) ref2 := RefTerm(VarTerm("c"), IntNumberTerm(0), StringTerm("d"), StringTerm("e"), VarTerm("j")) assertParseOneTerm(t, "ref keys", "[{a[i].b: 8, c[0][\"d\"].e[j]: f}]", ArrayTerm(ObjectTerm(Item(ref1, IntNumberTerm(8)), Item(ref2, VarTerm("f"))))) assertParseOneTerm(t, "ref values", "[{8: a[i].b, f: c[0][\"d\"].e[j]}]", ArrayTerm(ObjectTerm(Item(IntNumberTerm(8), ref1), Item(VarTerm("f"), ref2)))) assertParseOneTerm(t, "ref values (sets)", `{a[i].b, {c[0]["d"].e[j]}}`, SetTerm(ref1, SetTerm(ref2))) }
explode_data.jsonl/50464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 1092, 80139, 2354, 82807, 1155, 353, 8840, 836, 8, 341, 59504, 16, 1669, 8550, 17249, 7, 3962, 17249, 445, 64, 3975, 8735, 17249, 445, 72, 3975, 923, 17249, 445, 65, 5455, 59504, 17, 1669, 8550, 17249, 7, 3962, 17249, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScanField(t *testing.T) { type value struct { Field string Negated bool Advance int } cases := []struct { Input string Negated bool Want value }{ // Valid field. { Input: "repo:foo", Want: value{ Field: "repo", Advance: 5, }, }, { Input: "RepO:foo", Want: value{ Field: "RepO", Advance: 5, }, }, { Input: "after:", Want: value{ Field: "after", Advance: 6, }, }, { Input: "-repo:", Want: value{ Field: "repo", Negated: true, Advance: 6, }, }, // Invalid field. { Input: "", Want: value{ Field: "", Advance: 0, }, }, { Input: "-", Want: value{ Field: "", Advance: 0, }, }, { Input: "-:", Want: value{ Field: "", Advance: 0, }, }, { Input: ":", Want: value{ Field: "", Advance: 0, }, }, { Input: "??:foo", Want: value{ Field: "", Advance: 0, }, }, { Input: "repo", Want: value{ Field: "", Advance: 0, }, }, { Input: "-repo", Want: value{ Field: "", Advance: 0, }, }, { Input: "--repo:", Want: value{ Field: "", Advance: 0, }, }, { Input: ":foo", Want: value{ Field: "", Advance: 0, }, }, } for _, c := range cases { t.Run("scan field", func(t *testing.T) { gotField, gotNegated, gotAdvance := ScanField([]byte(c.Input)) if diff := cmp.Diff(c.Want, value{gotField, gotNegated, gotAdvance}); diff != "" { t.Error(diff) } }) } }
explode_data.jsonl/49623
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 895 }
[ 2830, 3393, 26570, 1877, 1155, 353, 8840, 836, 8, 341, 13158, 897, 2036, 341, 197, 94478, 256, 914, 198, 197, 18317, 791, 657, 1807, 198, 197, 197, 95027, 526, 198, 197, 532, 1444, 2264, 1669, 3056, 1235, 341, 197, 66588, 256, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKubeenvMetrics(t *testing.T) { if err := applyMixerRule(kubeenvTelemetryRule); err != nil { fatalf(t, "could not create required mixer rule: %v", err) } defer func() { if err := deleteMixerRule(kubeenvTelemetryRule); err != nil { t.Logf("could not clear rule: %v", err) } }() allowRuleSync() if err := visitProductPage(productPageTimeout, http.StatusOK); err != nil { fatalf(t, "Test app setup failure: %v", err) } log.Info("Successfully sent request(s) to /productpage; checking metrics...") allowPrometheusSync() promAPI, err := promAPI() if err != nil { fatalf(t, "Could not build prometheus API client: %v", err) } // instead of trying to find an exact match, we'll loop through all successful requests to ensure no values are "unknown" query := fmt.Sprintf("istio_kube_request_count{%s=\"200\"}", responseCodeLabel) t.Logf("prometheus query: %s", query) value, err := promAPI.Query(context.Background(), query, time.Now()) if err != nil { fatalf(t, "Could not get metrics from prometheus: %v", err) } log.Infof("promvalue := %s", value.String()) if value.Type() != model.ValVector { errorf(t, "Value not a model.Vector; was %s", value.Type().String()) } vec := value.(model.Vector) if got, want := len(vec), 1; got < want { errorf(t, "Found %d istio_kube_request_count metrics, want at least %d", got, want) } for _, sample := range vec { metric := sample.Metric for labelKey, labelVal := range metric { if labelVal == "unknown" { errorf(t, "Unexpected 'unknown' value for label '%s' in sample '%s'", labelKey, sample) } } } }
explode_data.jsonl/81436
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 580 }
[ 2830, 3393, 42, 3760, 3160, 27328, 1155, 353, 8840, 836, 8, 341, 743, 1848, 1669, 3796, 44, 39014, 11337, 5969, 3760, 3160, 6639, 35958, 11337, 1215, 1848, 961, 2092, 341, 197, 1166, 266, 3104, 1155, 11, 330, 28077, 537, 1855, 2567, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSendDB(t *testing.T) { tempDir, err := ioutil.TempDir("", "") if err != nil { t.Fatalf("unexpected error creating temp dir: %s", err) } defer os.RemoveAll(tempDir) var paths []string ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { paths = append(paths, r.URL.Path) if r.URL.Path == "/dbs/42/stitch" { return } if r.URL.Path != "/dbs/42/0" { t.Errorf("unexpected path. want=%s have=%s", "/dbs/42/0", r.URL.Path) } gzipReader, err := gzip.NewReader(r.Body) if err != nil { t.Fatalf("unexpected error decompressing payload: %s", err) } defer gzipReader.Close() contents, err := ioutil.ReadAll(gzipReader) if err != nil { t.Fatalf("unexpected error reading decompressed payload: %s", err) } if diff := cmp.Diff([]byte("payload\n"), contents); diff != "" { t.Errorf("unexpected contents (-want +got):\n%s", diff) } w.Write([]byte(`{"size": 100}`)) })) defer ts.Close() if err := ioutil.WriteFile(filepath.Join(tempDir, "test"), []byte("payload\n"), os.ModePerm); err != nil { t.Fatalf("unexpected error writing temp file: %s", err) } client := &bundleManagerClientImpl{bundleManagerURL: ts.URL, maxPayloadSizeBytes: 10000} if err := client.SendDB(context.Background(), 42, filepath.Join(tempDir, "test")); err != nil { t.Fatalf("unexpected error sending db: %s", err) } }
explode_data.jsonl/21435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 11505, 3506, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 53859, 1465, 6825, 2730, 5419, 25, 1018, 82, 497, 1848, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFillingDefaultRpcNameAndAddress(t *testing.T) { cfg := &Config{ ClusterMetadata: &ClusterMetadata{ CurrentClusterName: "clusterA", ClusterInformation: map[string]ClusterInformation{ "clusterA": { RPCAddress: "127.0.0.1:7933", }, }, }, PublicClient: PublicClient{ HostPort: "", }, } cfg.fillDefaults() assert.Equal(t, "cadence-frontend", cfg.ClusterMetadata.ClusterInformation["clusterA"].RPCName) assert.Equal(t, "127.0.0.1:7933", cfg.PublicClient.HostPort) }
explode_data.jsonl/46569
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 37, 9400, 3675, 60248, 675, 3036, 4286, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 609, 2648, 515, 197, 197, 28678, 14610, 25, 609, 28678, 14610, 515, 298, 197, 5405, 28678, 675, 25, 330, 18855, 32, 756, 298, 197, 28678, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteControllerAndExpectations(t *testing.T) { client := clientset.NewForConfigOrDie(&restclient.Config{Host: "", ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()}}) manager := NewReplicaSetController(client, controller.NoResyncPeriodFunc, 10, 0) manager.podStoreSynced = alwaysReady rs := newReplicaSet(1, map[string]string{"foo": "bar"}) manager.rsStore.Store.Add(rs) fakePodControl := controller.FakePodControl{} manager.podControl = &fakePodControl // This should set expectations for the ReplicaSet manager.syncReplicaSet(getKey(rs, t)) validateSyncReplicaSet(t, &fakePodControl, 1, 0) fakePodControl.Clear() // Get the ReplicaSet key rsKey, err := controller.KeyFunc(rs) if err != nil { t.Errorf("Couldn't get key for object %+v: %v", rs, err) } // This is to simulate a concurrent addPod, that has a handle on the expectations // as the controller deletes it. podExp, exists, err := manager.expectations.GetExpectations(rsKey) if !exists || err != nil { t.Errorf("No expectations found for ReplicaSet") } manager.rsStore.Delete(rs) manager.syncReplicaSet(getKey(rs, t)) if _, exists, err = manager.expectations.GetExpectations(rsKey); exists { t.Errorf("Found expectaions, expected none since the ReplicaSet has been deleted.") } // This should have no effect, since we've deleted the ReplicaSet. podExp.Add(-1, 0) manager.podStore.Indexer.Replace(make([]interface{}, 0), "0") manager.syncReplicaSet(getKey(rs, t)) validateSyncReplicaSet(t, &fakePodControl, 0, 0) }
explode_data.jsonl/10054
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 525 }
[ 2830, 3393, 6435, 2051, 3036, 17536, 804, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 2943, 746, 7121, 2461, 2648, 2195, 18175, 2099, 3927, 2972, 10753, 90, 9296, 25, 7342, 8883, 2648, 25, 2732, 2972, 12614, 2648, 90, 2808, 5637, 25, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMultipleRulesCoveringSingleRule(t *testing.T) { escalationTest{ ownerRules: []authorizationapi.PolicyRule{ {Verbs: sets.NewString("delete"), Resources: sets.NewString("deployments")}, {Verbs: sets.NewString("delete"), Resources: sets.NewString("builds")}, {Verbs: sets.NewString("update"), Resources: sets.NewString("builds", "deployments")}, }, servantRules: []authorizationapi.PolicyRule{ {Verbs: sets.NewString("delete", "update"), Resources: sets.NewString("builds", "deployments")}, }, expectedCovered: true, expectedUncoveredRules: []authorizationapi.PolicyRule{}, }.test(t) }
explode_data.jsonl/9043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 32089, 26008, 30896, 287, 10888, 11337, 1155, 353, 8840, 836, 8, 341, 80629, 278, 367, 2271, 515, 197, 197, 8118, 26008, 25, 3056, 39554, 2068, 1069, 8018, 11337, 515, 298, 197, 90, 10141, 1279, 25, 7289, 7121, 703, 445, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInjectionIgnoreNotProvided(t *testing.T) { type Aaa1 struct{} type Bbb1 struct { Aaa1 *Aaa1 `inject:""` } b := new(Bbb1) _ = NewDebug(). WithObjects(b). InitializeGraphWithImplicitObjects() fmt.Printf("b=%#v\n", b) assert.NotNil(t, b.Aaa1, "b=%#v", b) }
explode_data.jsonl/72955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 36653, 12497, 2623, 35819, 291, 1155, 353, 8840, 836, 8, 341, 13158, 362, 5305, 16, 2036, 16094, 13158, 425, 6066, 16, 2036, 341, 197, 22985, 5305, 16, 353, 32, 5305, 16, 1565, 32133, 2974, 8805, 197, 630, 2233, 1669, 501,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteFileShare(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() cloud := &Cloud{} mockFileClient := mockfileclient.NewMockInterface(ctrl) mockFileClient.EXPECT().DeleteFileShare(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).AnyTimes() cloud.FileClient = mockFileClient tests := []struct { rg string acct string name string expectErr bool }{ { rg: "rg", acct: "bar", name: "foo", expectErr: false, }, } for _, test := range tests { mockStorageAccountsClient := mockstorageaccountclient.NewMockInterface(ctrl) cloud.StorageAccountClient = mockStorageAccountsClient err := cloud.DeleteFileShare(test.rg, test.acct, test.name) if test.expectErr && err == nil { t.Errorf("unexpected non-error") continue } if !test.expectErr && err != nil { t.Errorf("unexpected error: %v", err) continue } } }
explode_data.jsonl/10211
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 6435, 1703, 12115, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 12361, 1669, 609, 16055, 16094, 77333, 1703, 2959, 1669, 7860, 1192, 2972, 7121, 11571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestPeersToProto(t *testing.T) { // single node single := &node{ id: testNodeId, address: testNodeAddress, peers: make(map[string]*node), network: newNetwork(Name(testNodeNetName)), status: newStatus(), } topCount := 0 protoPeers := PeersToProto(single, 0) if len(protoPeers.Peers) != topCount { t.Errorf("Expected to find %d nodes, found: %d", topCount, len(protoPeers.Peers)) } // complicated node graph node := testSetup() topCount = 3 // list of ids of nodes of depth 1 i.e. node peers peerIds := make(map[string]bool) // add peer Ids for _, id := range testNodePeerIds { peerIds[id] = true } // depth 1 should give us immmediate neighbours only protoPeers = PeersToProto(node, 1) if len(protoPeers.Peers) != topCount { t.Errorf("Expected to find %d nodes, found: %d", topCount, len(protoPeers.Peers)) } }
explode_data.jsonl/23550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 10197, 388, 1249, 31549, 1155, 353, 8840, 836, 8, 341, 197, 322, 3175, 2436, 198, 1903, 2173, 1669, 609, 3509, 515, 197, 15710, 25, 414, 1273, 79206, 345, 197, 63202, 25, 1273, 1955, 4286, 345, 197, 197, 375, 388, 25, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEtcdListPods(t *testing.T) { fakeClient := tools.NewFakeEtcdClient(t) ctx := api.NewDefaultContext() key := makePodListKey(ctx) fakeClient.Data[key] = tools.EtcdResponseWithError{ R: &etcd.Response{ Node: &etcd.Node{ Nodes: []*etcd.Node{ { Value: runtime.EncodeOrDie(latest.Codec, &api.Pod{ TypeMeta: api.TypeMeta{ID: "foo"}, DesiredState: api.PodState{Host: "machine"}, }), }, { Value: runtime.EncodeOrDie(latest.Codec, &api.Pod{ TypeMeta: api.TypeMeta{ID: "bar"}, DesiredState: api.PodState{Host: "machine"}, }), }, }, }, }, E: nil, } registry := NewTestEtcdRegistry(fakeClient) pods, err := registry.ListPods(ctx, labels.Everything()) if err != nil { t.Errorf("unexpected error: %v", err) } if len(pods.Items) != 2 || pods.Items[0].ID != "foo" || pods.Items[1].ID != "bar" { t.Errorf("Unexpected pod list: %#v", pods) } if pods.Items[0].CurrentState.Host != "machine" || pods.Items[1].CurrentState.Host != "machine" { t.Errorf("Failed to populate host name.") } }
explode_data.jsonl/8153
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 505 }
[ 2830, 3393, 31860, 4385, 852, 23527, 82, 1155, 353, 8840, 836, 8, 341, 1166, 726, 2959, 1669, 7375, 7121, 52317, 31860, 4385, 2959, 1155, 340, 20985, 1669, 6330, 7121, 3675, 1972, 741, 23634, 1669, 1281, 23527, 852, 1592, 7502, 340, 116...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestContext2Plan_moduleDestroyMultivar(t *testing.T) { m := testModule(t, "plan-module-destroy-multivar") p := testProvider("aws") p.DiffFn = testDiffFn s := MustShimLegacyState(&State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{}, }, &ModuleState{ Path: []string{"root", "child"}, Resources: map[string]*ResourceState{ "aws_instance.foo.0": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar0", }, }, "aws_instance.foo.1": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "bar1", }, }, }, }, }, }) ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), State: s, Destroy: true, }) plan, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } schema := p.GetSchemaReturn.ResourceTypes["aws_instance"] ty := schema.ImpliedType() if len(plan.Changes.Resources) != 2 { t.Fatal("expected 2 changes, got", len(plan.Changes.Resources)) } for _, res := range plan.Changes.Resources { ric, err := res.Decode(ty) if err != nil { t.Fatal(err) } switch i := ric.Addr.String(); i { case "module.child.aws_instance.foo[0]", "module.child.aws_instance.foo[1]": if res.Action != plans.Delete { t.Fatalf("resource %s should be removed", i) } default: t.Fatal("unknown instance:", i) } } }
explode_data.jsonl/28692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 706 }
[ 2830, 3393, 1972, 17, 20485, 10750, 14245, 40404, 56398, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 46718, 12, 15518, 96927, 56398, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 98063, 24911, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestHasOrgVisibleTypePublic(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) owner := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) user3 := AssertExistsAndLoadBean(t, &User{ID: 3}).(*User) const newOrgName = "test-org-public" org := &User{ Name: newOrgName, Visibility: structs.VisibleTypePublic, } AssertNotExistsBean(t, &User{Name: org.Name, Type: UserTypeOrganization}) assert.NoError(t, CreateOrganization(org, owner)) org = AssertExistsAndLoadBean(t, &User{Name: org.Name, Type: UserTypeOrganization}).(*User) test1 := HasOrgVisible(org, owner) test2 := HasOrgVisible(org, user3) test3 := HasOrgVisible(org, nil) assert.Equal(t, test1, true) // owner of org assert.Equal(t, test2, true) // user not a part of org assert.Equal(t, test3, true) // logged out user }
explode_data.jsonl/71077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 10281, 42437, 5715, 929, 12676, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 197, 8118, 1669, 5319, 15575, 3036, 5879, 10437, 1155, 11, 609, 1474, 90, 915, 25, 220, 17, 16630, 4071, 1474, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateTaskRunResultTwoResults(t *testing.T) { for _, c := range []struct { desc string podStatus corev1.PodStatus taskRunStatus *v1beta1.TaskRunStatus want []v1beta1.TaskRunResult }{{ desc: "two test results", podStatus: corev1.PodStatus{ ContainerStatuses: []corev1.ContainerStatus{{ State: corev1.ContainerState{ Terminated: &corev1.ContainerStateTerminated{ Message: `[{"key":"resultNameOne","value":"resultValueOne", "type": "TaskRunResult"},{"key":"resultNameTwo","value":"resultValueTwo", "type": "TaskRunResult"}]`, }, }, }}, }, want: []v1beta1.TaskRunResult{{ Name: "resultNameOne", Value: "resultValueOne", }, { Name: "resultNameTwo", Value: "resultValueTwo", }}, }} { t.Run(c.desc, func(t *testing.T) { names.TestingSeed() tr := &v1beta1.TaskRun{} tr.Status.SetCondition(&apis.Condition{ Type: apis.ConditionSucceeded, Status: corev1.ConditionTrue, }) if err := updateTaskRunResourceResult(tr, c.podStatus); err != nil { t.Errorf("updateTaskRunResourceResult: %s", err) } if d := cmp.Diff(c.want, tr.Status.TaskRunResults); d != "" { t.Errorf("updateTaskRunResourceResult %s", diff.PrintWantGot(d)) } }) } }
explode_data.jsonl/60090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 538 }
[ 2830, 3393, 4289, 6262, 6727, 2077, 11613, 9801, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 3056, 1235, 341, 197, 41653, 688, 914, 198, 197, 3223, 347, 2522, 257, 6200, 85, 16, 88823, 2522, 198, 197, 49115, 6727, 2522,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreatePublicToken(t *testing.T) { sandboxResp, _ := testClient.CreateSandboxPublicToken(sandboxInstitution, testProducts) tokenResp, _ := testClient.ExchangePublicToken(sandboxResp.PublicToken) publicTokenResp, err := testClient.CreatePublicToken(tokenResp.AccessToken) assert.Nil(t, err) assert.True(t, strings.HasPrefix(publicTokenResp.PublicToken, "public-sandbox")) }
explode_data.jsonl/12576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 4021, 12676, 3323, 1155, 353, 8840, 836, 8, 341, 1903, 31536, 36555, 11, 716, 1669, 1273, 2959, 7251, 50, 31536, 12676, 3323, 1141, 31536, 641, 10446, 11, 1273, 17746, 340, 43947, 36555, 11, 716, 1669, 1273, 2959, 86997, 126...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGet(t *testing.T) { t.Run("exists", func(t *testing.T) { mockStore := new(store.MockStore) mockStore.On("Get", "test key").Return("test value", true) testServer := server.NewServer(mockStore, newLogger()) response, err := testServer.Get(context.Background(), &api.GetRequest{ Key: "test key", }) require.NotNil(t, response) require.Equal(t, "test value", response.Value) require.True(t, response.Exists) require.Nil(t, err) }) t.Run("does not exist", func(t *testing.T) { mockStore := new(store.MockStore) mockStore.On("Get", "test key").Return("", false) testServer := server.NewServer(mockStore, newLogger()) response, err := testServer.Get(context.Background(), &api.GetRequest{ Key: "test key", }) require.NotNil(t, response) require.Equal(t, "", response.Value) require.False(t, response.Exists) require.Nil(t, err) }) }
explode_data.jsonl/28527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16304, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 77333, 6093, 1669, 501, 31200, 24664, 6093, 340, 197, 77333, 6093, 8071, 445, 1949, 497, 330, 1944, 1376, 1827, 559...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_Do(t *testing.T) { tests := []struct { name string cfg config.Metadata mockReceiver *mockEventStoreReceiver sendReq *types.Request wantReq *types.Request wantResp *types.Response wantErr bool }{ { name: "request", cfg: config.Metadata{ Name: "kubemq-target", Kind: "", Properties: map[string]string{ "host": "localhost", "port": "50000", }, }, mockReceiver: &mockEventStoreReceiver{ host: "localhost", port: 50000, channel: "event_stores", timeout: 5 * time.Second, }, sendReq: types.NewRequest(). SetData([]byte("data")). SetMetadataKeyValue("id", "id"). SetMetadataKeyValue("channel", "event_stores"), wantReq: types.NewRequest(). SetData([]byte("data")), wantResp: types.NewResponse(). SetMetadataKeyValue("error", ""). SetMetadataKeyValue("sent", "true"). SetMetadataKeyValue("event_store_id", "id"), wantErr: false, }, { name: "request error - no data", cfg: config.Metadata{ Name: "kubemq-target", Kind: "", Properties: map[string]string{ "host": "localhost", "port": "50000", }, }, mockReceiver: &mockEventStoreReceiver{ host: "localhost", port: 50000, channel: "event_stores", timeout: 5 * time.Second, }, sendReq: types.NewRequest(). SetMetadataKeyValue("id", "id"). SetMetadataKeyValue("channel", "event_stores"), wantReq: nil, wantResp: nil, wantErr: true, }, { name: "request error - bad metadata - empty channel", cfg: config.Metadata{ Name: "kubemq-target", Kind: "", Properties: map[string]string{ "host": "localhost", "port": "50000", }, }, mockReceiver: &mockEventStoreReceiver{ host: "localhost", port: 50000, channel: "event_stores", timeout: 5 * time.Second, }, sendReq: types.NewRequest(). SetData([]byte("data")). SetMetadataKeyValue("id", "id"). SetMetadataKeyValue("channel", ""), wantReq: nil, wantResp: nil, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() recRequestCh := make(chan *types.Request, 1) recErrCh := make(chan error, 1) go func() { gotRequest, err := tt.mockReceiver.run(ctx) select { case recErrCh <- err: case recRequestCh <- gotRequest: } }() time.Sleep(time.Second) target := New() err := target.Init(ctx, tt.cfg) require.NoError(t, err) gotResp, err := target.Do(ctx, tt.sendReq) if tt.wantErr { require.Error(t, err) return } require.NoError(t, err) require.EqualValues(t, tt.wantResp, gotResp) select { case gotRequest := <-recRequestCh: require.EqualValues(t, tt.wantReq, gotRequest) case err := <-recErrCh: require.NoError(t, err) case <-ctx.Done(): require.NoError(t, ctx.Err()) } }) } }
explode_data.jsonl/42675
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1413 }
[ 2830, 3393, 2959, 93481, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 50286, 688, 2193, 46475, 198, 197, 77333, 25436, 353, 16712, 1556, 6093, 25436, 198, 197, 32817, 27234, 414, 353, 9242,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHook_SafeName(t *testing.T) { WorkingDir = "/hooks" hookPath := "/hooks/002-cool-hooks/monitor-namespaces.py" hookName, err := filepath.Rel(WorkingDir, hookPath) if err != nil { t.Error(err) } h := NewHook(hookName, hookPath) assert.Equal(t, "002-cool-hooks-monitor-namespaces-py", h.SafeName()) }
explode_data.jsonl/7944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 31679, 1098, 5645, 675, 1155, 353, 8840, 836, 8, 341, 197, 33978, 6184, 284, 3521, 38560, 698, 9598, 1941, 1820, 1669, 3521, 38560, 14, 15, 15, 17, 22471, 337, 78387, 14, 32225, 89044, 27338, 7197, 1837, 9598, 1941, 675, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestZeroList(t *testing.T) { gtest.C(t, func(t *gtest.T) { var l1 = New() l1.PushFront(1) checkList(t, l1, []interface{}{1}) var l2 = New() l2.PushBack(1) checkList(t, l2, []interface{}{1}) var l3 = New() l3.PushFrontList(l1) checkList(t, l3, []interface{}{1}) var l4 = New() l4.PushBackList(l2) checkList(t, l4, []interface{}{1}) }) }
explode_data.jsonl/30892
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 17999, 852, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 2405, 326, 16, 284, 1532, 741, 197, 8810, 16, 34981, 23395, 7, 16, 340, 197, 25157, 852, 1155, 11, 326, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMonFoundInQuorum(t *testing.T) { response := client.MonStatusResponse{} // "a" is in quorum response.Quorum = []int{0} response.MonMap.Mons = []client.MonMapEntry{ {Name: "a", Rank: 0}, {Name: "b", Rank: 1}, {Name: "c", Rank: 2}, } assert.True(t, monFoundInQuorum("a", response)) assert.False(t, monFoundInQuorum("b", response)) assert.False(t, monFoundInQuorum("c", response)) // b and c also in quorum, but not d response.Quorum = []int{0, 1, 2} assert.True(t, monFoundInQuorum("a", response)) assert.True(t, monFoundInQuorum("b", response)) assert.True(t, monFoundInQuorum("c", response)) assert.False(t, monFoundInQuorum("d", response)) }
explode_data.jsonl/57261
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 11095, 6650, 641, 2183, 33006, 1155, 353, 8840, 836, 8, 341, 21735, 1669, 2943, 52211, 2522, 2582, 31483, 197, 322, 330, 64, 1, 374, 304, 922, 33006, 198, 21735, 33907, 33006, 284, 3056, 396, 90, 15, 532, 21735, 52211, 222...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommandStats(t *testing.T) { e := getTestExporter() setupDBKeys(t, os.Getenv("TEST_REDIS_URI")) defer deleteKeysFromDB(t, os.Getenv("TEST_REDIS_URI")) chM := make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() want := map[string]bool{"test_commands_duration_seconds_total": false, "test_commands_total": false} for m := range chM { for k := range want { if strings.Contains(m.Desc().String(), k) { want[k] = true } } } for k, found := range want { if !found { t.Errorf("didn't find %s", k) } } }
explode_data.jsonl/46994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 4062, 16635, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 633, 2271, 88025, 2822, 84571, 3506, 8850, 1155, 11, 2643, 64883, 445, 10033, 2192, 21202, 23116, 5455, 16867, 3698, 8850, 3830, 3506, 1155, 11, 2643, 64883, 445, 10033, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFlipServiceInternalAnnotation(t *testing.T) { svc := getTestService("servicea", v1.ProtocolTCP, 80) svcUpdated := flipServiceInternalAnnotation(&svc) if !requiresInternalLoadBalancer(svcUpdated) { t.Errorf("Expected svc to be an internal service") } svcUpdated = flipServiceInternalAnnotation(svcUpdated) if requiresInternalLoadBalancer(svcUpdated) { t.Errorf("Expected svc to be an external service") } svc2 := getInternalTestService("serviceb", 8081) svc2Updated := flipServiceInternalAnnotation(&svc2) if requiresInternalLoadBalancer(svc2Updated) { t.Errorf("Expected svc to be an external service") } svc2Updated = flipServiceInternalAnnotation(svc2Updated) if !requiresInternalLoadBalancer(svc2Updated) { t.Errorf("Expected svc to be an internal service") } }
explode_data.jsonl/50382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 46808, 1860, 11569, 19711, 1155, 353, 8840, 836, 8, 341, 1903, 7362, 1669, 633, 2271, 1860, 445, 7936, 64, 497, 348, 16, 54096, 49896, 11, 220, 23, 15, 340, 1903, 7362, 16196, 1669, 18287, 1860, 11569, 19711, 2099, 58094, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPopulateCluster_Custom_CIDR(t *testing.T) { c := buildMinimalCluster() c.Spec.NetworkCIDR = "172.20.2.0/24" c.Spec.Subnets = []kopsapi.ClusterSubnetSpec{ {Name: "subnet-us-mock-1a", Zone: "us-mock-1a", CIDR: "172.20.2.0/27"}, {Name: "subnet-us-mock-1b", Zone: "us-mock-1b", CIDR: "172.20.2.32/27"}, {Name: "subnet-us-mock-1c", Zone: "us-mock-1c", CIDR: "172.20.2.64/27"}, } cloud, err := BuildCloud(c) if err != nil { t.Fatalf("error from BuildCloud: %v", err) } err = PerformAssignments(c, cloud) if err != nil { t.Fatalf("error from PerformAssignments: %v", err) } full, err := mockedPopulateClusterSpec(c) if err != nil { t.Fatalf("Unexpected error from PopulateCluster: %v", err) } if full.Spec.NetworkCIDR != "172.20.2.0/24" { t.Fatalf("Unexpected NetworkCIDR: %v", full.Spec.NetworkCIDR) } }
explode_data.jsonl/75035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 11598, 6334, 28678, 57402, 87525, 49, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1936, 88328, 28678, 741, 1444, 36473, 30149, 54146, 49, 284, 330, 16, 22, 17, 13, 17, 15, 13, 17, 13, 15, 14, 17, 19, 698, 1444, 36473, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStateFromName(t *testing.T) { t.Run("valid state names", func(t *testing.T) { states := []state{ &stateInitial{}, &statePrepareResponse{}, &stateAwaitResponse{}, &stateDone{}, } for _, expected := range states { actual, err := stateFromName(expected.Name()) require.NoError(t, err) require.Equal(t, expected, actual) } }) t.Run("invalid state name", func(t *testing.T) { _, err := stateFromName("invalid") require.Error(t, err) }) }
explode_data.jsonl/11778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 1397, 3830, 675, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1891, 1584, 5036, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18388, 973, 1669, 3056, 2454, 515, 298, 197, 5, 2454, 6341, 38837, 298, 197, 5, 2454, 50590...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPEGWorking(t *testing.T) { tests := []struct { name string input string ncalls int }{ { name: "Empty", input: "", ncalls: 0}, { name: "Set", input: "Set(2, f=10)", ncalls: 1}, { name: "SetWithColKeySingleQuote", input: `Set('foo', f=10)`, ncalls: 1}, { name: "SetWithColKeyDoubleQuote", input: `Set("foo", f=10)`, ncalls: 1}, { name: "SetTime", input: "Set(2, f=1, 1999-12-31T00:00)", ncalls: 1}, { name: "DoubleSet", input: "Set(1, a=4)Set(2, a=4)", ncalls: 2}, { name: "DoubleSetSpc", input: "Set(1, a=4) Set(2, a=4)", ncalls: 2}, { name: "DoubleSetNewline", input: "Set(1, a=4) \n Set(2, a=4)", ncalls: 2}, { name: "SetWithArbCall", input: "Set(1, a=4)Blerg(z=ha)", ncalls: 2}, { name: "SetArbSet", input: "Set(1, a=4)Blerg(z=ha)Set(2, z=99)", ncalls: 3}, { name: "ArbSetArb", input: "Arb(q=1, a=4)Set(1, z=9)Arb(z=99)", ncalls: 3}, { name: "SetStringArg", input: "Set(1, a=zoom)", ncalls: 1}, { name: "SetManyArgs", input: "Set(1, a=4, b=5)", ncalls: 1}, { name: "SetManyMixedArgs", input: "Set(1, a=4, bsd=haha)", ncalls: 1}, { name: "SetTimestamp", input: "Set(1, a=4, 2017-04-03T19:34)", ncalls: 1}, { name: "Union()", input: "Union()", ncalls: 1}, { name: "UnionOneRow", input: "Union(Row(a=1))", ncalls: 1}, { name: "UnionTwoRows", input: "Union(Row(a=1), Row(z=44))", ncalls: 1}, { name: "UnionNested", input: "Union(Intersect(Row(), Union(Row(), Row())), Row())", ncalls: 1}, { name: "TopN no args", input: "TopN(boondoggle)", ncalls: 1}, { name: "TopN with args", input: "TopN(boon, doggle=9)", ncalls: 1}, { name: "double quoted args", input: `B(a="zm''e")`, ncalls: 1}, { name: "single quoted args", input: `B(a='zm""e')`, ncalls: 1}, { name: "SetRowAttrs", input: "SetRowAttrs(blah, 9, a=47)", ncalls: 1}, { name: "SetRowAttrs2args", input: "SetRowAttrs(blah, 9, a=47, b=bval)", ncalls: 1}, { name: "SetRowAttrsWithRowKeySingleQuote", input: "SetRowAttrs(blah, 'rowKey', a=47)", ncalls: 1}, { name: "SetRowAttrsWithRowKeyDoubleQuote", input: `SetRowAttrs(blah, "rowKey", a=47)`, ncalls: 1}, { name: "SetColumnAttrs", input: "SetColumnAttrs(9, a=47)", ncalls: 1}, { name: "SetColumnAttrs2args", input: "SetColumnAttrs(9, a=47, b=bval)", ncalls: 1}, { name: "SetColumnAttrsWithColKeySingleQuote", input: "SetColumnAttrs('colKey', a=47)", ncalls: 1}, { name: "SetColumnAttrsWithColKeyDoubleQuote", input: `SetColumnAttrs("colKey", a=47)`, ncalls: 1}, { name: "Clear", input: "Clear(1, a=53)", ncalls: 1}, { name: "Clear2args", input: "Clear(1, a=53, b=33)", ncalls: 1}, { name: "TopN", input: "TopN(myfield, n=44)", ncalls: 1}, { name: "TopNBitmap", input: "TopN(myfield, Row(a=47), n=10)", ncalls: 1}, { name: "RangeLT", input: "Range(a < 4)", ncalls: 1}, { name: "RangeGT", input: "Range(a > 4)", ncalls: 1}, { name: "RangeLTE", input: "Range(a <= 4)", ncalls: 1}, { name: "RangeGTE", input: "Range(a >= 4)", ncalls: 1}, { name: "RangeEQ", input: "Range(a == 4)", ncalls: 1}, { name: "RangeNEQ", input: "Range(a != null)", ncalls: 1}, { name: "RangeLTLT", input: "Range(4 < a < 9)", ncalls: 1}, { name: "RangeLTLTE", input: "Range(4 < a <= 9)", ncalls: 1}, { name: "RangeLTELT", input: "Range(4 <= a < 9)", ncalls: 1}, { name: "RangeLTELTE", input: "Range(4 <= a <= 9)", ncalls: 1}, { name: "RangeTime", input: "Range(a=4, 2010-07-04T00:00, 2010-08-04T00:00)", ncalls: 1}, { name: "RangeTimeQuotes", input: `Range(a=4, '2010-07-04T00:00', "2010-08-04T00:00")`, ncalls: 1}, { name: "Dashed Frame", input: "Set(1, my-frame=9)", ncalls: 1}, { name: "newlines", input: `Set( 1, my-frame =9)`, ncalls: 1}, } for i, test := range tests { t.Run(test.name+strconv.Itoa(i), func(t *testing.T) { q, err := ParseString(test.input) if err != nil { t.Fatalf("parsing query '%s': %v", test.input, err) } if len(q.Calls) != test.ncalls { t.Fatalf("wrong number of calls for '%s': %#v", test.input, q.Calls) } }) } }
explode_data.jsonl/21701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2515 }
[ 2830, 3393, 22061, 33978, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 22427, 220, 914, 198, 197, 197, 1016, 5583, 526, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 256, 330, 3522, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOnlyLocalLoadBalancing(t *testing.T) { ipt, fp := buildFakeProxier() svcIP := "10.20.30.41" svcPort := 80 svcNodePort := 3001 svcLBIP := "1.2.3.4" svcPortName := proxy.ServicePortName{ NamespacedName: makeNSN("ns1", "svc1"), Port: "p80", } makeServiceMap(fp, makeTestService(svcPortName.Namespace, svcPortName.Name, func(svc *v1.Service) { svc.Spec.Type = "LoadBalancer" svc.Spec.ClusterIP = svcIP svc.Spec.Ports = []v1.ServicePort{{ Name: svcPortName.Port, Port: int32(svcPort), Protocol: v1.ProtocolTCP, NodePort: int32(svcNodePort), }} svc.Status.LoadBalancer.Ingress = []v1.LoadBalancerIngress{{ IP: svcLBIP, }} svc.Spec.ExternalTrafficPolicy = v1.ServiceExternalTrafficPolicyTypeLocal }), ) epIP := "10.180.0.1" epIP1 := "10.180.1.1" thisHostname := testHostname otherHostname := "other-hostname" tcpProtocol := v1.ProtocolTCP populateEndpointSlices(fp, makeTestEndpointSlice(svcPortName.Namespace, svcPortName.Name, 1, func(eps *discovery.EndpointSlice) { eps.AddressType = discovery.AddressTypeIPv4 eps.Endpoints = []discovery.Endpoint{ { // **local** endpoint address, should be added as RS Addresses: []string{epIP}, NodeName: &thisHostname, }, { // **remote** endpoint address, should not be added as RS Addresses: []string{epIP1}, NodeName: &otherHostname, }} eps.Ports = []discovery.EndpointPort{{ Name: utilpointer.StringPtr(svcPortName.Port), Port: utilpointer.Int32(int32(svcPort)), Protocol: &tcpProtocol, }} }), ) fp.syncProxyRules() // Expect 2 services and 1 destination epVS := &netlinktest.ExpectedVirtualServer{ VSNum: 2, IP: svcLBIP, Port: uint16(svcPort), Protocol: string(v1.ProtocolTCP), RS: []netlinktest.ExpectedRealServer{{ IP: epIP, Port: uint16(svcPort), }}} checkIPVS(t, fp, epVS) // check ipSet rules epIPSet := netlinktest.ExpectedIPSet{ kubeLoadBalancerSet: {{ IP: svcLBIP, Port: svcPort, Protocol: strings.ToLower(string(v1.ProtocolTCP)), SetType: utilipset.HashIPPort, }}, kubeLoadBalancerLocalSet: {{ IP: svcLBIP, Port: svcPort, Protocol: strings.ToLower(string(v1.ProtocolTCP)), SetType: utilipset.HashIPPort, }}, } checkIPSet(t, fp, epIPSet) // Check iptables chain and rules epIpt := netlinktest.ExpectedIptablesChain{ string(kubeServicesChain): {{ JumpChain: string(KubeLoadBalancerChain), MatchSet: kubeLoadBalancerSet, }, { JumpChain: string(KubeMarkMasqChain), MatchSet: kubeClusterIPSet, }, { JumpChain: string(KubeNodePortChain), MatchSet: "", }, { JumpChain: "ACCEPT", MatchSet: kubeClusterIPSet, }, { JumpChain: "ACCEPT", MatchSet: kubeLoadBalancerSet, }}, string(KubeLoadBalancerChain): {{ JumpChain: "RETURN", MatchSet: kubeLoadBalancerLocalSet, }, { JumpChain: string(KubeMarkMasqChain), MatchSet: "", }}, } checkIptables(t, ipt, epIpt) }
explode_data.jsonl/44362
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1314 }
[ 2830, 3393, 7308, 7319, 5879, 37889, 8974, 1155, 353, 8840, 836, 8, 341, 8230, 417, 11, 12007, 1669, 1936, 52317, 1336, 87, 1268, 2822, 1903, 7362, 3298, 1669, 330, 16, 15, 13, 17, 15, 13, 18, 15, 13, 19, 16, 698, 1903, 7362, 7084...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEditHistoryUnflushed(t *testing.T) { // Bob writes one file. expectedEdits1 := []expectedEdit{ { "alice,bob", keybase1.FolderType_PRIVATE, "bob", []string{"/keybase/private/alice,bob/a/b"}, nil, }, } // Alice and Bob both write a second file, but alice's is unflushed. expectedEdits2Alice := []expectedEdit{ { "alice,bob", keybase1.FolderType_PRIVATE, "alice", []string{"/keybase/private/alice,bob/a/c"}, nil, }, expectedEdits1[0], } expectedEdits2Bob := []expectedEdit{ { "alice,bob", keybase1.FolderType_PRIVATE, "bob", []string{ "/keybase/private/alice,bob/a/d", "/keybase/private/alice,bob/a/b", }, nil, }, } // Alice runs CR and flushes her journal. expectedEdits3 := []expectedEdit{ expectedEdits2Alice[0], expectedEdits2Bob[0], } expectedEdits4 := []expectedEdit{ { "alice,bob", keybase1.FolderType_PRIVATE, "alice", nil, []string{ "/keybase/private/alice,bob/a/d", "/keybase/private/alice,bob/a/c", "/keybase/private/alice,bob/a/b", }, }, } test(t, journal(), users("alice", "bob"), as(alice, mkdir("a"), ), as(alice, enableJournal(), ), as(bob, mkfile("a/b", "hello"), ), as(bob, checkUserEditHistory(expectedEdits1), ), as(alice, checkUserEditHistory(expectedEdits1), ), as(alice, pauseJournal(), addTime(1*time.Minute), mkfile("a/c", "hello2"), ), as(bob, addTime(1*time.Minute), mkfile("a/d", "hello"), ), as(bob, checkUserEditHistory(expectedEdits2Bob), ), as(alice, noSync(), checkUserEditHistory(expectedEdits2Alice), ), as(alice, noSync(), resumeJournal(), // This should kick off conflict resolution. flushJournal(), ), as(alice, // Extra flush to make sure the edit history messages have // been received by all users. flushJournal(), ), as(alice, checkUserEditHistory(expectedEdits3), ), as(bob, checkUserEditHistory(expectedEdits3), ), as(alice, pauseJournal(), addTime(1*time.Minute), rm("a/b"), rm("a/c"), rm("a/d"), rmdir("a"), ), as(alice, checkUnflushedPaths([]string{ "/keybase/private/alice,bob", "/keybase/private/alice,bob/a", }), ), as(alice, noSync(), resumeJournal(), flushJournal(), ), as(alice, checkUserEditHistory(expectedEdits4), ), as(bob, checkUserEditHistory(expectedEdits4), ), ) }
explode_data.jsonl/54760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1183 }
[ 2830, 3393, 4036, 13424, 1806, 21664, 291, 1155, 353, 8840, 836, 8, 341, 197, 322, 14261, 13914, 825, 1034, 624, 42400, 2715, 1199, 16, 1669, 3056, 7325, 4036, 515, 197, 197, 515, 298, 197, 1, 63195, 8402, 674, 756, 298, 23634, 3152, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCategoryById(t *testing.T) { fmt.Println("TestGetCategoryById") if testing.Short() { t.Skip("skipping test short mode") } initTest() cat, err := manager.getCategoryByID(1) if err != nil { t.Error(err) } if cat == nil { t.Error("expected category") } p := "" if cat.ParentCategoryId != nil { p = strconv.Itoa(*cat.ParentCategoryId) } fmt.Printf("Cat: %v (id=%d) pc= %v Created: %v \n", cat.Name, cat.CategoryId, p, cat.CreatedDate.Format("2006-01-02T15:04:05")) }
explode_data.jsonl/22085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 1949, 6746, 2720, 1155, 353, 8840, 836, 8, 341, 11009, 12419, 445, 2271, 1949, 6746, 2720, 1138, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 2805, 3856, 1138, 197, 532, 28248, 2271, 741, 92791, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEmptyNameConstraints(t *testing.T) { block, _ := pem.Decode([]byte(emptyNameConstraintsPEM)) _, err := ParseCertificate(block.Bytes) if err == nil { t.Fatal("unexpected success") } const expected = "empty name constraints" if str := err.Error(); !strings.Contains(str, expected) { t.Errorf("expected %q in error but got %q", expected, str) } }
explode_data.jsonl/68015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 3522, 675, 12925, 1155, 353, 8840, 836, 8, 341, 47996, 11, 716, 1669, 54184, 56372, 10556, 3782, 24216, 675, 12925, 1740, 44, 1171, 197, 6878, 1848, 1669, 14775, 33202, 18682, 36868, 340, 743, 1848, 621, 2092, 341, 197, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStateOversizedBlock(t *testing.T) { config := configSetup(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() cs1, vss := makeState(ctx, t, makeStateArgs{config: config, validators: 2}) cs1.state.ConsensusParams.Block.MaxBytes = 2000 height, round := cs1.Height, cs1.Round vs2 := vss[1] partSize := types.BlockPartSizeBytes timeoutProposeCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryTimeoutPropose) voteCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryVote) propBlock, err := cs1.createProposalBlock(ctx) require.NoError(t, err) propBlock.Data.Txs = []types.Tx{tmrand.Bytes(2001)} propBlock.Header.DataHash = propBlock.Data.Hash() // make the second validator the proposer by incrementing round round++ incrementRound(vss[1:]...) propBlockParts, err := propBlock.MakePartSet(partSize) require.NoError(t, err) blockID := types.BlockID{Hash: propBlock.Hash(), PartSetHeader: propBlockParts.Header()} proposal := types.NewProposal(height, round, -1, blockID, propBlock.Header.Time) p := proposal.ToProto() err = vs2.SignProposal(ctx, config.ChainID(), p) require.NoError(t, err) proposal.Signature = p.Signature totalBytes := 0 for i := 0; i < int(propBlockParts.Total()); i++ { part := propBlockParts.GetPart(i) totalBytes += len(part.Bytes) } err = cs1.SetProposalAndBlock(ctx, proposal, propBlock, propBlockParts, "some peer") require.NoError(t, err) // start the machine startTestRound(ctx, cs1, height, round) // c1 should log an error with the block part message as it exceeds the consensus params. The // block is not added to cs.ProposalBlock so the node timeouts. ensureNewTimeout(t, timeoutProposeCh, height, round, cs1.proposeTimeout(round).Nanoseconds()) // and then should send nil prevote and precommit regardless of whether other validators prevote and // precommit on it ensurePrevoteMatch(t, voteCh, height, round, nil) signAddVotes(ctx, t, cs1, tmproto.PrevoteType, config.ChainID(), blockID, vs2) ensurePrevote(t, voteCh, height, round) ensurePrecommit(t, voteCh, height, round) validatePrecommit(ctx, t, cs1, round, -1, vss[0], nil, nil) signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), blockID, vs2) }
explode_data.jsonl/54260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 781 }
[ 2830, 3393, 1397, 46, 3004, 1506, 4713, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2193, 21821, 1155, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 71899, 16, 11, 348, 778, 1669, 1281, 1397, 7502, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewEnvironment(t *testing.T) { context := "" // empty, use current-Context kubeconfig, wantConfig := createFakeKubeconfigFileOrDie(t) var wOut, wErr bytes.Buffer wantEnv := &KubeEnvironment{ config: wantConfig, stdout: &wOut, stderr: &wErr, kubeconfig: kubeconfig, } gotEnv, err := NewEnvironment(kubeconfig, context, &wOut, &wErr) if err != nil { t.Fatal(err) } if diff := cmp.Diff(*wantEnv.GetConfig(), *gotEnv.GetConfig()); diff != "" { t.Errorf("bad config: \n got %v \nwant %v\ndiff %v", gotEnv, wantEnv, diff) } wantEnv.config = nil gotEnv.config = nil if !reflect.DeepEqual(wantEnv, gotEnv) { t.Errorf("bad environment: \n got %v \nwant %v", *gotEnv, *wantEnv) } // verify interface if gotEnv.Stderr() != &wErr { t.Errorf("Stderr() returned wrong io.writer") } if gotEnv.Stdout() != &wOut { t.Errorf("Stdout() returned wrong io.writer") } gotEnv.Printf("stdout %v", "test") wantOut := "stdout test" if gotOut := wOut.String(); gotOut != wantOut { t.Errorf("Printf() printed wrong string: got %v want %v", gotOut, wantOut) } gotEnv.Errorf("stderr %v", "test") wantErr := "stderr test" if gotErr := wErr.String(); gotErr != wantErr { t.Errorf("Errorf() printed wrong string: got %v want %v", gotErr, wantErr) } }
explode_data.jsonl/65418
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 3564, 12723, 1155, 353, 8840, 836, 8, 341, 28413, 1669, 1591, 442, 4287, 11, 990, 1482, 12, 1972, 198, 16463, 3760, 1676, 11, 1366, 2648, 1669, 1855, 52317, 42, 3760, 1676, 1703, 2195, 18175, 1155, 692, 2405, 289, 2662, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRootMappingFsMount(t *testing.T) { c := qt.New(t) fs := NewBaseFileDecorator(afero.NewMemMapFs()) testfile := "test.txt" c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0755), qt.IsNil) c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0755), qt.IsNil) bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs) rm := []RootMapping{ // Directories RootMapping{ From: "content/blog", To: "mynoblogcontent", Meta: FileMeta{"lang": "no"}, }, RootMapping{ From: "content/blog", To: "myenblogcontent", Meta: FileMeta{"lang": "en"}, }, RootMapping{ From: "content/blog", To: "mysvblogcontent", Meta: FileMeta{"lang": "sv"}, }, // Files RootMapping{ From: "content/singles/p1.md", To: "singlefiles/no.txt", ToBasedir: "singlefiles", Meta: FileMeta{"lang": "no"}, }, RootMapping{ From: "content/singles/p1.md", To: "singlefiles/sv.txt", ToBasedir: "singlefiles", Meta: FileMeta{"lang": "sv"}, }, } rfs, err := NewRootMappingFs(bfs, rm...) c.Assert(err, qt.IsNil) blog, err := rfs.Stat(filepath.FromSlash("content/blog")) c.Assert(err, qt.IsNil) c.Assert(blog.IsDir(), qt.Equals, true) blogm := blog.(FileMetaInfo).Meta() c.Assert(blogm.Lang(), qt.Equals, "no") // First match f, err := blogm.Open() c.Assert(err, qt.IsNil) defer f.Close() dirs1, err := f.Readdirnames(-1) c.Assert(err, qt.IsNil) // Union with duplicate dir names filtered. c.Assert(dirs1, qt.DeepEquals, []string{"test.txt", "test.txt", "other.txt", "test.txt"}) files, err := afero.ReadDir(rfs, filepath.FromSlash("content/blog")) c.Assert(err, qt.IsNil) c.Assert(len(files), qt.Equals, 4) testfilefi := files[1] c.Assert(testfilefi.Name(), qt.Equals, testfile) testfilem := testfilefi.(FileMetaInfo).Meta() c.Assert(testfilem.Filename(), qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt")) tf, err := testfilem.Open() c.Assert(err, qt.IsNil) defer tf.Close() b, err := ioutil.ReadAll(tf) c.Assert(err, qt.IsNil) c.Assert(string(b), qt.Equals, "some no content") // Check file mappings single, err := rfs.Stat(filepath.FromSlash("content/singles/p1.md")) c.Assert(err, qt.IsNil) c.Assert(single.IsDir(), qt.Equals, false) singlem := single.(FileMetaInfo).Meta() c.Assert(singlem.Lang(), qt.Equals, "no") // First match singlesDir, err := rfs.Open(filepath.FromSlash("content/singles")) c.Assert(err, qt.IsNil) defer singlesDir.Close() singles, err := singlesDir.Readdir(-1) c.Assert(err, qt.IsNil) c.Assert(singles, qt.HasLen, 2) for i, lang := range []string{"no", "sv"} { fi := singles[i].(FileMetaInfo) c.Assert(fi.Meta().PathFile(), qt.Equals, filepath.FromSlash("themes/a/singlefiles/"+lang+".txt")) c.Assert(fi.Meta().Lang(), qt.Equals, lang) c.Assert(fi.Name(), qt.Equals, "p1.md") } }
explode_data.jsonl/13727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1548 }
[ 2830, 3393, 8439, 6807, 48300, 16284, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 38949, 7121, 1155, 340, 53584, 1669, 1532, 3978, 1703, 47951, 2877, 802, 78, 7121, 18816, 2227, 48300, 12367, 18185, 1192, 1669, 330, 1944, 3909, 1837, 1444, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdatePostTimeLimit(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() post := th.BasicPost.Clone() th.App.Srv().SetLicense(model.NewTestLicense()) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.PostEditTimeLimit = -1 }) _, err := th.App.UpdatePost(th.Context, post, true) require.Nil(t, err) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.PostEditTimeLimit = 1000000000 }) post.Message = model.NewId() _, err = th.App.UpdatePost(th.Context, post, true) require.Nil(t, err, "should allow you to edit the post") th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.PostEditTimeLimit = 1 }) post.Message = model.NewId() _, err = th.App.UpdatePost(th.Context, post, true) require.NotNil(t, err, "should fail on update old post") th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.PostEditTimeLimit = -1 }) }
explode_data.jsonl/26426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 4289, 4133, 1462, 16527, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 51172, 1669, 270, 48868, 4133, 64463, 2822, 70479, 5105, 808, 10553, 1005, 1649, 9827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreWatchExpireEmptyRefresh(t *testing.T) { s := newStore() fc := newFakeClock() s.clock = fc var eidx uint64 s.Create("/foo", false, "bar", false, TTLOptionSet{ExpireTime: fc.Now().Add(500 * time.Millisecond), Refresh: true}) // Should be no-op fc.Advance(200 * time.Millisecond) s.DeleteExpiredKeys(fc.Now()) s.Update("/foo", "", TTLOptionSet{ExpireTime: fc.Now().Add(500 * time.Millisecond), Refresh: true}) w, _ := s.Watch("/", true, false, 2) fc.Advance(700 * time.Millisecond) s.DeleteExpiredKeys(fc.Now()) eidx = 3 // We should skip 2 because a TTL update should occur with no watch notification if set `TTLOptionSet.Refresh` to true testutil.AssertEqual(t, w.StartIndex(), eidx-1) e := nbselect(w.EventChan()) testutil.AssertEqual(t, e.EtcdIndex, eidx) testutil.AssertEqual(t, e.Action, "expire") testutil.AssertEqual(t, e.Node.Key, "/foo") testutil.AssertEqual(t, *e.PrevNode.Value, "bar") }
explode_data.jsonl/44124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 6093, 14247, 8033, 554, 3522, 14567, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 6093, 741, 1166, 66, 1669, 501, 52317, 26104, 741, 1903, 50546, 284, 25563, 271, 2405, 384, 6361, 2622, 21, 19, 198, 1903, 7251, 4283, 797...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInRepoConfigEnabled(t *testing.T) { testCases := []struct { name string config Config expected bool testing string }{ { name: "Exact match", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "org/repo": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "org/repo", }, { name: "Orgname matches", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "org": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "org/repo", }, { name: "Globally enabled", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "*": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "org/repo", }, { name: "Disabled by default", expected: false, testing: "org/repo", }, { name: "Gerrit format org Hostname matches", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "host-name/extra/repo", }, { name: "Gerrit format org Hostname matches with http", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "http://host-name/extra/repo", }, { name: "Gerrit format Just org Hostname matches", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "host-name", }, { name: "Gerrit format Just org Hostname matches with http", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "http://host-name", }, { name: "Gerrit format Just repo Hostname matches", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name/repo/name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "host-name/repo/name", }, { name: "Gerrit format Just org Hostname matches with http", config: Config{ ProwConfig: ProwConfig{ InRepoConfig: InRepoConfig{ Enabled: map[string]*bool{ "host-name/repo/name": utilpointer.BoolPtr(true), }, }, }, }, expected: true, testing: "http://host-name/repo/name", }, } for idx := range testCases { tc := testCases[idx] t.Run(tc.name, func(t *testing.T) { t.Parallel() if result := tc.config.InRepoConfigEnabled(tc.testing); result != tc.expected { t.Errorf("Expected %t, got %t", tc.expected, result) } }) } }
explode_data.jsonl/41041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1551 }
[ 2830, 3393, 641, 25243, 2648, 5462, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 25873, 256, 5532, 198, 197, 42400, 1807, 198, 197, 197, 8840, 220, 914, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPutReflect_struct(t *testing.T) { var a1 testStruct = testStruct{42, "hello", 12.5} expected := make([]byte, 18) lex.PutInt(expected, a1.A) lex.PutString(expected[8:], a1.B) lex.PutFloat32(expected[14:], a1.C) actual := make([]byte, 18) lex.PutReflect(actual, a1) assert.True(t, bytes.Equal(expected, actual)) }
explode_data.jsonl/50147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 19103, 72789, 15126, 1155, 353, 8840, 836, 8, 341, 2405, 264, 16, 1273, 9422, 284, 1273, 9422, 90, 19, 17, 11, 330, 14990, 497, 220, 16, 17, 13, 20, 630, 42400, 1669, 1281, 10556, 3782, 11, 220, 16, 23, 340, 197, 2571,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue14159(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("DROP TABLE IF EXISTS t") tk.MustExec("CREATE TABLE t (v VARCHAR(100))") tk.MustExec("INSERT INTO t VALUES ('3289742893213123732904809')") tk.MustQuery("SELECT * FROM t WHERE v").Check(testkit.Rows("3289742893213123732904809")) }
explode_data.jsonl/65508
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 42006, 16, 19, 16, 20, 24, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPostgresqlIgnoresUnwantedColumnsIntegration(t *testing.T) { if testing.Short() { t.Skip("Skipping integration test in short mode") } p := &Postgresql{ Log: testutil.Logger{}, Service: postgresql.Service{ Address: fmt.Sprintf( "host=%s user=postgres sslmode=disable", testutil.GetLocalHost(), ), }, } var acc testutil.Accumulator require.NoError(t, p.Start(&acc)) require.NoError(t, acc.GatherError(p.Gather)) assert.NotEmpty(t, p.IgnoredColumns()) for col := range p.IgnoredColumns() { assert.False(t, acc.HasMeasurement(col)) } }
explode_data.jsonl/20139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 4133, 81624, 40, 70, 2152, 416, 1806, 86592, 13965, 52464, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 85945, 17590, 1273, 304, 2805, 3856, 1138, 197, 630, 3223, 1669, 609, 4133, 81624, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestZb_CancelOrder(t *testing.T) { r, err := zb.CancelOrder("201802014255365", goex.NewCurrencyPair2("BTC_QC")) t.Log(err) t.Log(r) }
explode_data.jsonl/28912
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 57, 65, 97485, 4431, 1155, 353, 8840, 836, 8, 341, 7000, 11, 1848, 1669, 74355, 36491, 4431, 445, 17, 15, 16, 23, 15, 17, 15, 16, 19, 17, 20, 20, 18, 21, 20, 497, 728, 327, 7121, 26321, 12443, 17, 445, 59118, 13337, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestInvokeCmd(t *testing.T) { defer viper.Reset() defer resetFlags() resetFlags() mockCF, err := getMockChaincodeCmdFactory() assert.NoError(t, err, "Error getting mock chaincode command factory") // Error case 0: no channelID specified cmd := invokeCmd(mockCF) addFlags(cmd) args := []string{"-n", "example02", "-c", "{\"Args\": [\"invoke\",\"a\",\"b\",\"10\"]}"} cmd.SetArgs(args) err = cmd.Execute() assert.Error(t, err, "'peer chaincode invoke' command should have returned error when called without -C flag") // Success case cmd = invokeCmd(mockCF) addFlags(cmd) args = []string{"-n", "example02", "-c", "{\"Args\": [\"invoke\",\"a\",\"b\",\"10\"]}", "-C", "mychannel"} cmd.SetArgs(args) err = cmd.Execute() assert.NoError(t, err, "Run chaincode invoke cmd error") // set timeout for error cases viper.Set("peer.client.connTimeout", 10*time.Millisecond) // Error case 1: no orderer endpoints t.Logf("Start error case 1: no orderer endpoints") getEndorserClient := common.GetEndorserClientFnc getOrdererEndpointOfChain := common.GetOrdererEndpointOfChainFnc getBroadcastClient := common.GetBroadcastClientFnc getDefaultSigner := common.GetDefaultSignerFnc getDeliverClient := common.GetDeliverClientFnc getPeerDeliverClient := common.GetPeerDeliverClientFnc defer func() { common.GetEndorserClientFnc = getEndorserClient common.GetOrdererEndpointOfChainFnc = getOrdererEndpointOfChain common.GetBroadcastClientFnc = getBroadcastClient common.GetDefaultSignerFnc = getDefaultSigner common.GetDeliverClientFnc = getDeliverClient common.GetPeerDeliverClientFnc = getPeerDeliverClient }() common.GetEndorserClientFnc = func(string, string) (pb.EndorserClient, error) { return mockCF.EndorserClients[0], nil } common.GetOrdererEndpointOfChainFnc = func(chainID string, signer msp.SigningIdentity, endorserClient pb.EndorserClient) ([]string, error) { return []string{}, nil } cmd = invokeCmd(nil) addFlags(cmd) args = []string{"-n", "example02", "-c", "{\"Args\": [\"invoke\",\"a\",\"b\",\"10\"]}", "-C", "mychannel"} cmd.SetArgs(args) err = cmd.Execute() assert.Error(t, err) // Error case 2: getEndorserClient returns error t.Logf("Start error case 2: getEndorserClient returns error") common.GetEndorserClientFnc = func(string, string) (pb.EndorserClient, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) // Error case 3: getDeliverClient returns error t.Logf("Start error case 3: getDeliverClient returns error") common.GetDeliverClientFnc = func(string, string) (pb.Deliver_DeliverClient, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) // Error case 4 : getPeerDeliverClient returns error t.Logf("Start error case 4: getPeerDeliverClient returns error") common.GetPeerDeliverClientFnc = func(string, string) (api.PeerDeliverClient, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) // Error case 5: getDefaultSignerFnc returns error t.Logf("Start error case 5: getDefaultSignerFnc returns error") common.GetEndorserClientFnc = func(string, string) (pb.EndorserClient, error) { return mockCF.EndorserClients[0], nil } common.GetPeerDeliverClientFnc = func(string, string) (api.PeerDeliverClient, error) { return mockCF.DeliverClients[0], nil } common.GetDefaultSignerFnc = func() (msp.SigningIdentity, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) common.GetDefaultSignerFnc = common.GetDefaultSigner // Error case 6: getOrdererEndpointOfChainFnc returns error t.Logf("Start error case 6: getOrdererEndpointOfChainFnc returns error") common.GetEndorserClientFnc = func(string, string) (pb.EndorserClient, error) { return mockCF.EndorserClients[0], nil } common.GetOrdererEndpointOfChainFnc = func(chainID string, signer msp.SigningIdentity, endorserClient pb.EndorserClient) ([]string, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) // Error case 7: getBroadcastClient returns error t.Logf("Start error case 7: getBroadcastClient returns error") common.GetOrdererEndpointOfChainFnc = func(chainID string, signer msp.SigningIdentity, endorserClient pb.EndorserClient) ([]string, error) { return []string{"localhost:9999"}, nil } common.GetBroadcastClientFnc = func() (common.BroadcastClient, error) { return nil, errors.New("error") } err = cmd.Execute() assert.Error(t, err) // Success case t.Logf("Start success case") common.GetBroadcastClientFnc = func() (common.BroadcastClient, error) { return mockCF.BroadcastClient, nil } err = cmd.Execute() assert.NoError(t, err) }
explode_data.jsonl/65802
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1664 }
[ 2830, 3393, 17604, 15613, 1155, 353, 8840, 836, 8, 341, 16867, 95132, 36660, 741, 16867, 7585, 9195, 2822, 70343, 9195, 741, 77333, 9650, 11, 1848, 1669, 633, 11571, 18837, 1851, 15613, 4153, 741, 6948, 35699, 1155, 11, 1848, 11, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResolvePlaceholdersInGlobalVariables(t *testing.T) { ctx := context.Background() wf := unmarshalWF(globalVariablePlaceholders) woc := newWoc(*wf) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) pods, err := listPods(woc) assert.NoError(t, err) assert.True(t, len(pods.Items) > 0, "pod was not created successfully") templateString := pods.Items[0].ObjectMeta.Annotations["workflows.argoproj.io/template"] var template wfv1.Template err = json.Unmarshal([]byte(templateString), &template) assert.NoError(t, err) namespaceValue := template.Outputs.Parameters[0].Value assert.NotNil(t, namespaceValue) assert.Equal(t, "testNamespace", namespaceValue.String()) serviceAccountNameValue := template.Outputs.Parameters[1].Value assert.NotNil(t, serviceAccountNameValue) assert.Equal(t, "testServiceAccountName", serviceAccountNameValue.String()) }
explode_data.jsonl/71007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 315 }
[ 2830, 3393, 56808, 17371, 16662, 641, 11646, 22678, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 6692, 69, 1669, 650, 27121, 32131, 31951, 7827, 17371, 16662, 340, 6692, 509, 1669, 501, 54, 509, 4071, 43083, 340, 6692, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAliasInConfigFile(t *testing.T) { // the config file specifies "beard". If we make this an alias for // "hasbeard", we still want the old config file to work with beard. RegisterAlias("beard", "hasbeard") assert.Equal(t, true, Get("hasbeard")) Set("hasbeard", false) assert.Equal(t, false, Get("beard")) }
explode_data.jsonl/5550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 22720, 641, 2648, 1703, 1155, 353, 8840, 836, 8, 341, 197, 322, 279, 2193, 1034, 29102, 330, 1371, 567, 3263, 220, 1416, 582, 1281, 419, 458, 15534, 369, 198, 197, 322, 330, 4648, 1371, 567, 497, 582, 2058, 1366, 279, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFirstBigSet(t *testing.T) { tests := []struct { num *big.Int ix int }{ {big.NewInt(0), 0}, {big.NewInt(1), 0}, {big.NewInt(2), 1}, {big.NewInt(0x100), 8}, } for _, test := range tests { if ix := FirstBitSet(test.num); ix != test.ix { t.Errorf("FirstBitSet(b%b) = %d, want %d", test.num, ix, test.ix) } } }
explode_data.jsonl/35393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 5338, 15636, 1649, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 22431, 353, 16154, 7371, 198, 197, 197, 941, 220, 526, 198, 197, 59403, 197, 197, 90, 16154, 7121, 1072, 7, 15, 701, 220, 15, 1583, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConfigSourceManager_ResolveErrors(t *testing.T) { ctx := context.Background() testErr := errors.New("test error") tests := []struct { name string config map[string]interface{} configSourceMap map[string]configsource.ConfigSource }{ { name: "incorrect_cfgsrc_ref", config: map[string]interface{}{ "cfgsrc": "$tstcfgsrc:selector?{invalid}", }, configSourceMap: map[string]configsource.ConfigSource{ "tstcfgsrc": &testConfigSource{}, }, }, { name: "error_on_retrieve", config: map[string]interface{}{ "cfgsrc": "$tstcfgsrc:selector", }, configSourceMap: map[string]configsource.ConfigSource{ "tstcfgsrc": &testConfigSource{ErrOnRetrieve: testErr}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { manager, err := NewManager(nil) require.NoError(t, err) manager.configSources = tt.configSourceMap res, err := manager.Resolve(ctx, configparser.NewConfigMapFromStringMap(tt.config)) require.Error(t, err) require.Nil(t, res) require.NoError(t, manager.Close(ctx)) }) } }
explode_data.jsonl/34667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 2648, 3608, 2043, 62, 56808, 13877, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 18185, 7747, 1669, 5975, 7121, 445, 1944, 1465, 5130, 78216, 1669, 3056, 1235, 341, 197, 11609, 310, 914, 198, 197, 25873, 688,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchConfigFilenames(t *testing.T) { if _, err := fetchConfigFilenames(""); err == nil { t.Errorf("FetchConfigFilenames(): should throw ==> '%v'", "config dir name is empty") return } var noneexistingDir = "cafebabe" if _, err := fetchConfigFilenames(noneexistingDir); err == nil { t.Errorf("FetchConfigFilenames(): should throw ==> '%v'", "open cafebabe: no such file or directory") return } var emptyDir = path.Join(testConfigDir, "cafababe") os.Mkdir(emptyDir, 0644) if _, err := fetchConfigFilenames(emptyDir); err == nil { t.Errorf("FetchConfigFilenames(): should throw ==> '%v'", "Your config dir seems to be empty") return } os.Remove(emptyDir) ////////////////////////////////////////////////////////////////////////////// expected := testFilenames createTestFiles(testConfigDir) actual, err := fetchConfigFilenames(testConfigDir) if err != nil { t.Errorf("FetchConfigFilenames(): err => '%v'", err) return } fcc.RemoveFiles(TestMemberNames, testConfigDir, t) for i := range expected { if expected[i] != actual[i] { t.Errorf("GetFilenames(): expected '%v', actual '%v'", expected[i], actual[i]) } } }
explode_data.jsonl/7669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 414 }
[ 2830, 3393, 20714, 2648, 37, 52768, 1155, 353, 8840, 836, 8, 1476, 743, 8358, 1848, 1669, 7807, 2648, 37, 52768, 97918, 1848, 621, 2092, 341, 197, 3244, 13080, 445, 20714, 2648, 37, 52768, 4555, 1265, 2510, 32296, 7677, 85, 22772, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestTrackFunctionsFailingPOST(t *testing.T) { assert := assert.New(t) httpmock.Activate() defer httpmock.DeactivateAndReset() httpmock.RegisterResponder( "POST", "http://com.acme.collector/com.snowplowanalytics.snowplow/tp2", httpmock.NewStringResponder(404, ""), ) tracker := InitTracker( RequireEmitter(InitEmitter( RequireCollectorUri("com.acme.collector"), OptionRequestType("POST"), OptionStorage(*InitStorageMemory()), OptionCallback(func(g []CallbackResult, b []CallbackResult) { log.Println("Successes: " + IntToString(len(g))) log.Println("Failures: " + IntToString(len(b))) }), )), OptionSubject(InitSubject()), OptionNamespace("namespace"), OptionAppId("app-id"), OptionPlatform("mob"), OptionBase64Encode(false), ) assert.NotNil(tracker) tracker.TrackPageView(PageViewEvent{PageUrl: NewString("acme.com")}) tracker.Emitter.Stop() }
explode_data.jsonl/37652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 15667, 25207, 37, 14277, 2946, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 28080, 16712, 14140, 731, 741, 16867, 1758, 16712, 8934, 16856, 3036, 14828, 2822, 28080, 16712, 19983, 30884, 1006, 197, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCacheTimes(t *testing.T) { var found bool tc := New(50*time.Millisecond, 1*time.Millisecond) tc.Set("a", 1, DefaultExpiration) tc.Set("b", 2, NoExpiration) tc.Set("c", 3, 20*time.Millisecond) tc.Set("d", 4, 70*time.Millisecond) <-time.After(25 * time.Millisecond) _, found = tc.Get("c") if found { t.Error("Found c when it should have been automatically deleted") } <-time.After(30 * time.Millisecond) _, found = tc.Get("a") if found { t.Error("Found a when it should have been automatically deleted") } _, found = tc.Get("b") if !found { t.Error("Did not find b even though it was set to never expire") } _, found = tc.Get("d") if !found { t.Error("Did not find d even though it was set to expire later than the default") } <-time.After(20 * time.Millisecond) _, found = tc.Get("d") if found { t.Error("Found d when it should have been automatically deleted (later than the default)") } }
explode_data.jsonl/63757
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 8233, 18889, 1155, 353, 8840, 836, 8, 341, 2405, 1730, 1807, 271, 78255, 1669, 1532, 7, 20, 15, 77053, 71482, 11, 220, 16, 77053, 71482, 340, 78255, 4202, 445, 64, 497, 220, 16, 11, 7899, 66301, 340, 78255, 4202, 445, 65...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestBaseTable_Instance(t *testing.T) { tests := []struct { name string b *BaseTable want string }{ { name: "1", b: NewBaseTable("db", "schema", "table"), want: "db", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.b.Instance(); got != tt.want { t.Errorf("BaseTable.Instance() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/20039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 3978, 2556, 70849, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 2233, 262, 353, 3978, 2556, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 16, 756, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRuleForbiddenView(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := []string{ `create view v_today (today) AS SELECT CURRENT_DATE;`, `CREATE VIEW v (col) AS SELECT 'abc';`, } for _, sql := range sqls { q, _ := NewQuery4Audit(sql) rule := q.RuleForbiddenView() if rule.Item != "TBL.006" { t.Error("Rule not match:", rule.Item, "Expect : TBL.006") } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 11337, 69115, 851, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 3056, 917, 515, 197, 197, 63, 3182, 1651, 348, 68717, 320, 30113, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTarUntarWrongPrefix(t *testing.T) { dir, err := ioutil.TempDir("", "input") checkErr(t, err) dir2, err := ioutil.TempDir("", "output") checkErr(t, err) dir = dir + "/" defer func() { os.RemoveAll(dir) os.RemoveAll(dir2) }() filepath := path.Join(dir, "foo") if err := os.MkdirAll(path.Dir(filepath), 0755); err != nil { t.Fatalf("unexpected error: %v", err) } createTmpFile(t, filepath, "sample data") opts := NewCopyOptions(genericclioptions.NewTestIOStreamsDiscard()) writer := &bytes.Buffer{} if err := makeTar(dir, dir, writer); err != nil { t.Fatalf("unexpected error: %v", err) } reader := bytes.NewBuffer(writer.Bytes()) err = opts.untarAll(reader, dir2, "verylongprefix-showing-the-tar-was-tempered-with") if err == nil || !strings.Contains(err.Error(), "tar contents corrupted") { t.Fatalf("unexpected error: %v", err) } }
explode_data.jsonl/58288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 62733, 20250, 277, 29185, 14335, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1355, 1138, 25157, 7747, 1155, 11, 1848, 340, 48532, 17, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHOTP_HexRFCTestValues(t *testing.T) { otpHex, err := NewHOTP(rfc4226TestSecret, WithLength(8), FormatHex()) assert.NoError(t, err) // Expected results from https://tools.ietf.org/html/rfc4226#page-32 expectedResults := []string{ "4c93cf18", "41397eea", "082fef30", "66ef7655", "61c5938a", "33c083d4", "7256c032", "04e5b397", "2823443f", "2679dc69", } for i, expectedResult := range expectedResults { otp, err := otpHex.At(i) assert.NoError(t, err, "OTP generation failed") assert.Equal(t, expectedResult, otp) } }
explode_data.jsonl/41259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 39, 90146, 2039, 327, 17612, 76434, 6227, 1155, 353, 8840, 836, 8, 341, 197, 48708, 20335, 11, 1848, 1669, 1532, 39, 90146, 2601, 8316, 19, 17, 17, 21, 2271, 19773, 11, 3085, 4373, 7, 23, 701, 15042, 20335, 12367, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReaderOnLatestWithBatching(t *testing.T) { client, err := NewClient(ClientOptions{ URL: lookupURL, }) assert.Nil(t, err) defer client.Close() topic := newTopicName() ctx := context.Background() // create producer producer, err := client.CreateProducer(ProducerOptions{ Topic: topic, DisableBatching: false, BatchingMaxMessages: 4, BatchingMaxPublishDelay: 1 * time.Second, }) assert.Nil(t, err) defer producer.Close() // send 10 messages msgIDs := [10]MessageID{} for i := 0; i < 10; i++ { idx := i producer.SendAsync(ctx, &ProducerMessage{ Payload: []byte(fmt.Sprintf("hello-%d", i)), }, func(id MessageID, producerMessage *ProducerMessage, err error) { assert.NoError(t, err) assert.NotNil(t, id) msgIDs[idx] = id }) } err = producer.Flush() assert.NoError(t, err) // create reader on 5th message (not included) reader, err := client.CreateReader(ReaderOptions{ Topic: topic, StartMessageID: LatestMessageID(), StartMessageIDInclusive: false, }) assert.Nil(t, err) defer reader.Close() // Reader should yield no message since it's at the end of the topic ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) msg, err := reader.Next(ctx) assert.Error(t, err) assert.Nil(t, msg) cancel() }
explode_data.jsonl/6386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 540 }
[ 2830, 3393, 5062, 1925, 31992, 2354, 21074, 287, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 46851, 3798, 515, 197, 79055, 25, 18615, 3144, 345, 197, 8824, 6948, 59678, 1155, 11, 1848, 340, 16867, 2943, 10421, 2822,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRandomUint64(t *testing.T) { tries := 1 << 8 // 2^8 watermark := uint64(1 << 56) // 2^56 maxHits := 5 badRNG := "The random number generator on this system is clearly " + "terrible since we got %d values less than %d in %d runs " + "when only %d was expected" numHits := 0 for i := 0; i < tries; i++ { nonce, err := RandomUint64() if err != nil { t.Errorf("RandomUint64 iteration %d failed - err %v", i, err) return } if nonce < watermark { numHits++ } if numHits > maxHits { str := fmt.Sprintf(badRNG, numHits, watermark, tries, maxHits) t.Errorf("Random Uint64 iteration %d failed - %v %v", i, str, numHits) return } } }
explode_data.jsonl/15306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 13999, 21570, 21, 19, 1155, 353, 8840, 836, 8, 341, 197, 4543, 1669, 220, 16, 1115, 220, 23, 1060, 442, 220, 17, 61, 23, 198, 6692, 962, 3987, 1669, 2622, 21, 19, 7, 16, 1115, 220, 20, 21, 8, 442, 220, 17, 61, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSumInt64DataPoints(t *testing.T) { desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind) labels := label.NewSet() s := sumAgg.New() assert.NoError(t, s.Update(context.Background(), metric.Number(1), &desc)) s.Checkpoint(context.Background(), &desc) if m, err := sum(&desc, &labels, s); assert.NoError(t, err) { assert.Equal(t, []*metricpb.Int64DataPoint{{Value: 1}}, m.Int64DataPoints) assert.Equal(t, []*metricpb.DoubleDataPoint(nil), m.DoubleDataPoints) assert.Equal(t, []*metricpb.HistogramDataPoint(nil), m.HistogramDataPoints) assert.Equal(t, []*metricpb.SummaryDataPoint(nil), m.SummaryDataPoints) } }
explode_data.jsonl/70792
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 9190, 1072, 21, 19, 1043, 11411, 1155, 353, 8840, 836, 8, 341, 41653, 1669, 18266, 7121, 11709, 19814, 18266, 6167, 47023, 10629, 11, 18266, 7371, 21, 19, 2833, 10629, 340, 95143, 1669, 2383, 7121, 1649, 741, 1903, 1669, 262...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProperties_ValidateInvalidStruct(t *testing.T) { cs := getK8sDefaultContainerService(false) cs.Properties.OrchestratorProfile = &OrchestratorProfile{} expectedMsg := "missing Properties.OrchestratorProfile.OrchestratorType" if err := cs.Validate(false); err == nil || err.Error() != expectedMsg { t.Errorf("expected validation error with message : %s", err.Error()) } }
explode_data.jsonl/17874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 7903, 62, 17926, 7928, 9422, 1155, 353, 8840, 836, 8, 341, 71899, 1669, 633, 42, 23, 82, 3675, 4502, 1860, 3576, 340, 71899, 15945, 90449, 331, 15111, 850, 8526, 284, 609, 2195, 331, 15111, 850, 8526, 16094, 42400, 6611, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrepareRepoContext(t *testing.T) { var out bytes.Buffer logger := zerolog.New(&out) ctx := logger.WithContext(context.Background()) _, logger = PrepareRepoContext(ctx, 42, &github.Repository{ Name: github.String("test"), Owner: &github.User{ Login: github.String("mhaypenny"), }, }) logger.Info().Msg("") var entry struct { ID int64 `json:"github_installation_id"` Owner string `json:"github_repository_owner"` Name string `json:"github_repository_name"` } if err := json.Unmarshal(out.Bytes(), &entry); err != nil { t.Fatalf("invalid log entry: %s: %v", out.String(), err) } assertField(t, "installation ID", int64(42), entry.ID) assertField(t, "repository owner", "mhaypenny", entry.Owner) assertField(t, "repository name", "test", entry.Name) }
explode_data.jsonl/70537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 50590, 25243, 1972, 1155, 353, 8840, 836, 8, 341, 2405, 700, 5820, 22622, 271, 17060, 1669, 76178, 1609, 7121, 2099, 411, 340, 20985, 1669, 5925, 26124, 1972, 5378, 19047, 12367, 197, 6878, 5925, 284, 31166, 25243, 1972, 7502,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInferGrantType(t *testing.T) { for _, path := range []string{"dex", "okta", "auth0", "onelogin"} { t.Run(path, func(t *testing.T) { rawConfig, err := ioutil.ReadFile("testdata/" + path + ".json") assert.NoError(t, err) var config OIDCConfiguration err = json.Unmarshal(rawConfig, &config) assert.NoError(t, err) grantType := InferGrantType(&config) assert.Equal(t, GrantTypeAuthorizationCode, grantType) var noCodeResponseTypes []string for _, supportedResponseType := range config.ResponseTypesSupported { if supportedResponseType != ResponseTypeCode { noCodeResponseTypes = append(noCodeResponseTypes, supportedResponseType) } } config.ResponseTypesSupported = noCodeResponseTypes grantType = InferGrantType(&config) assert.Equal(t, GrantTypeImplicit, grantType) }) } }
explode_data.jsonl/51089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 641, 802, 67971, 929, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1815, 1669, 2088, 3056, 917, 4913, 91769, 497, 330, 562, 2565, 497, 330, 3242, 15, 497, 330, 263, 50571, 258, 9207, 341, 197, 3244, 16708, 5581, 11, 2915, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTruncateToBitSize(t *testing.T) { tests := []struct{ v, bits, res uint64 }{ {0, 1, 0}, {1, 1, 1}, {0x123, 4, 0x3}, {0xabc, 4, 0xc}, {0x123, 8, 0x23}, {0xabc, 8, 0xbc}, {0x12345678abcdabcd, 64, 0x12345678abcdabcd}, {0xf2345678abcdabcd, 64, 0xf2345678abcdabcd}, } for i, test := range tests { t.Run(fmt.Sprint(i), func(t *testing.T) { res := truncateToBitSize(test.v, test.bits) if res != test.res { t.Fatalf("truncateToBitSize(0x%x, %v)=0x%x, want 0x%x", test.v, test.bits, res, test.res) } }) } }
explode_data.jsonl/73782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 1282, 26900, 1249, 8344, 1695, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 90, 348, 11, 9472, 11, 592, 2622, 21, 19, 335, 515, 197, 197, 90, 15, 11, 220, 16, 11, 220, 15, 1583, 197, 197, 90, 16, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServiceLoopCatchError(t *testing.T) { if dipper.Logger == nil { f, _ := os.OpenFile(os.DevNull, os.O_APPEND, 0o777) defer f.Close() dipper.GetLogger("test service", "DEBUG", f, f) } svc := &Service{ name: "testsvc", driverRuntimes: map[string]*driver.Runtime{ "d1": { State: driver.DriverAlive, Handler: driver.NewDriver(map[string]interface{}{ "name": "testdriver1", "type": "builtin", "handlerData": map[string]interface{}{ "shortName": "testdriver1", }, }), }, }, responders: map[string][]MessageResponder{ "test:error1": { func(d *driver.Runtime, m *dipper.Message) { panic(fmt.Errorf("error in responder")) }, }, }, transformers: map[string][]func(*driver.Runtime, *dipper.Message) *dipper.Message{ "test:error2": { func(d *driver.Runtime, m *dipper.Message) *dipper.Message { panic(fmt.Errorf("error in transformer")) }, }, }, Route: func(m *dipper.Message) []RoutedMessage { if m.Channel == "test" && m.Subject == "error0" { panic(fmt.Errorf("error in route")) } return nil }, } svc.driverRuntimes["d1"].Stream = make(chan dipper.Message, 1) svc.driverRuntimes["d1"].Output, _ = os.OpenFile(os.DevNull, os.O_APPEND, 0o777) go func() { assert.NotPanics(t, svc.serviceLoop, "service loop should recover panic in route") }() // injecting error in route svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "error0", } time.Sleep(30 * time.Millisecond) // quiting faster by send an extra message svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "noerror", } daemon.ShutDown() daemon.ShuttingDown = false svc.driverRuntimes["d1"].Stream = make(chan dipper.Message, 1) svc.driverRuntimes["d1"].Output, _ = os.OpenFile(os.DevNull, os.O_APPEND, 0o777) go func() { assert.NotPanics(t, svc.serviceLoop, "service loop should recover panic in responder") }() // injecting error in responder svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "error1", } time.Sleep(30 * time.Millisecond) // quiting faster by send an extra message svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "noerror", } daemon.ShutDown() daemon.ShuttingDown = false svc.driverRuntimes["d1"].Stream = make(chan dipper.Message, 1) svc.driverRuntimes["d1"].Output, _ = os.OpenFile(os.DevNull, os.O_APPEND, 0o777) go func() { assert.NotPanics(t, svc.serviceLoop, "service loop should recover panic in transformer") }() // injecting error in transformer svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "error2", } time.Sleep(30 * time.Millisecond) // quiting faster by send an extra message svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "noerror", } daemon.ShutDown() daemon.ShuttingDown = false svc.driverRuntimes["d1"].Stream = make(chan dipper.Message, 1) svc.driverRuntimes["d1"].Output, _ = os.OpenFile(os.DevNull, os.O_APPEND, 0o777) // injecting error in process svc.driverRuntimes["d1"].Handler = nil go func() { assert.NotPanics(t, svc.serviceLoop, "service loop should recover panic in process itself") }() svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "error3", } // recover the service object to avoid crash during quiting svc.driverRuntimes["d1"].Handler = driver.NewDriver(map[string]interface{}{ "name": "testdriver1", "type": "builtin", "handlerData": map[string]interface{}{ "shortName": "testdriver1", }, }) time.Sleep(30 * time.Millisecond) // quiting faster by send an extra message svc.driverRuntimes["d1"].Stream <- dipper.Message{ Channel: "test", Subject: "noerror", } daemon.ShutDown() daemon.ShuttingDown = false }
explode_data.jsonl/12388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1550 }
[ 2830, 3393, 1860, 14620, 57760, 1454, 1155, 353, 8840, 836, 8, 341, 743, 23604, 712, 12750, 621, 2092, 341, 197, 1166, 11, 716, 1669, 2643, 12953, 1703, 9638, 77611, 3280, 11, 2643, 8382, 65851, 11, 220, 15, 78, 22, 22, 22, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetCompactionFailed(t *testing.T) { tmpdir, err := ioutil.TempDir("", "test") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(tmpdir)) }() blockDir := createBlock(t, tmpdir, genSeries(1, 1, 0, 1)) b, err := OpenBlock(nil, blockDir, nil) testutil.Ok(t, err) testutil.Equals(t, false, b.meta.Compaction.Failed) testutil.Ok(t, b.setCompactionFailed()) testutil.Equals(t, true, b.meta.Compaction.Failed) testutil.Ok(t, b.Close()) b, err = OpenBlock(nil, blockDir, nil) testutil.Ok(t, err) testutil.Equals(t, true, b.meta.Compaction.Failed) testutil.Ok(t, b.Close()) }
explode_data.jsonl/9633
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 1649, 13552, 1311, 9408, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1944, 1138, 18185, 1314, 54282, 1155, 11, 1848, 340, 16867, 2915, 368, 341, 197, 18185, 1314, 54282, 1155, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetEnvVar(t *testing.T) { ctx := context.Background() scope := NewReferenceScope(TestTx) for _, v := range setEnvVarTests { err := SetEnvVar(ctx, scope, v.Expr) if err != nil { if len(v.Error) < 1 { t.Errorf("%s: unexpected error %q", v.Name, err) } else if err.Error() != v.Error { t.Errorf("%s: error %q, want error %q", v.Name, err.Error(), v.Error) } continue } if 0 < len(v.Error) { t.Errorf("%s: no error, want error %q", v.Name, v.Error) continue } val := os.Getenv(v.Expr.EnvVar.Name) if val != v.Expect { t.Errorf("%s: value = %s, want %s", v.Name, val, v.Expect) } } }
explode_data.jsonl/50848
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 1649, 14359, 3962, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 66836, 1669, 1532, 8856, 10803, 31159, 31584, 692, 2023, 8358, 348, 1669, 2088, 738, 14359, 3962, 18200, 341, 197, 9859, 1669, 2573, 14359, 3962, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRabbitMQGetMetricSpecForScaling(t *testing.T) { for _, testData := range rabbitMQMetricIdentifiers { meta, err := parseRabbitMQMetadata(&ScalerConfig{ResolvedEnv: sampleRabbitMqResolvedEnv, TriggerMetadata: testData.metadataTestData.metadata, AuthParams: nil}) if err != nil { t.Fatal("Could not parse metadata:", err) } mockRabbitMQScaler := rabbitMQScaler{ metadata: meta, connection: nil, channel: nil, httpClient: http.DefaultClient, } metricSpec := mockRabbitMQScaler.GetMetricSpecForScaling() metricName := metricSpec[0].External.Metric.Name if metricName != testData.name { t.Error("Wrong External metric source name:", metricName, "wanted:", testData.name) } } }
explode_data.jsonl/61599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 49, 19949, 35169, 1949, 54310, 8327, 2461, 59684, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 67348, 1669, 2088, 38724, 35169, 54310, 28301, 11836, 341, 197, 84004, 11, 1848, 1669, 4715, 49, 19949, 35169, 14610, 2099, 59553, 264...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRepositoryRelease_String(t *testing.T) { v := RepositoryRelease{ TagName: String(""), TargetCommitish: String(""), Name: String(""), Body: String(""), Draft: Bool(false), Prerelease: Bool(false), ID: Int64(0), CreatedAt: &Timestamp{}, PublishedAt: &Timestamp{}, URL: String(""), HTMLURL: String(""), AssetsURL: String(""), UploadURL: String(""), ZipballURL: String(""), TarballURL: String(""), Author: &User{}, NodeID: String(""), } want := `github.RepositoryRelease{TagName:"", TargetCommitish:"", Name:"", Body:"", Draft:false, Prerelease:false, ID:0, CreatedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}, PublishedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}, URL:"", HTMLURL:"", AssetsURL:"", UploadURL:"", ZipballURL:"", TarballURL:"", Author:github.User{}, NodeID:""}` if got := v.String(); got != want { t.Errorf("RepositoryRelease.String = %v, want %v", got, want) } }
explode_data.jsonl/33289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 481 }
[ 2830, 3393, 4624, 16077, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 26377, 16077, 515, 197, 197, 22616, 25, 260, 923, 445, 4461, 197, 197, 6397, 33441, 812, 25, 923, 445, 4461, 197, 21297, 25, 310, 923, 445, 4461, 197, 197, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAdminApplet(t *testing.T) { Convey("Connecting to applet", t, func(ctx C) { app, err := New() So(err, ShouldBeNil) defer app.Close() Convey("Admin Applet behaves correctly", func(ctx C) { _, code, err := app.Send([]byte{0x00, 0xA4, 0x04, 0x00, 0x05, 0xF0, 0x00, 0x00, 0x00, 0x00}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x9000) Convey("If pin is too long or too short", func(ctx C) { pin := make([]byte, 129) _, code, err := app.Send(append([]byte{0x00, 0x20, 0x00, 0x00, byte(len(pin))}, pin...)) So(err, ShouldBeNil) So(code, ShouldEqual, 0x6700) _, code, err = app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x6700) }) Convey("When pin is not verified", commandTests(false, app)) Convey("When pin is verified", commandTests(true, app)) Convey("If pin verification fails", func(ctx C) { _, code, err = app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x06, 0x31, 0x32, 0x33, 0x34, 0x35, 0x35}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x63C2) _, code, err := app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x07, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x63C1) _, code, err = app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x06, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x9000) }) Reset(func() { // Reset validation status without decreasing the counter _, code, err = app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x01, 0x00}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x6700) // Read retry counter _, code, err = app.Send([]byte{0x00, 0x20, 0x00, 0x00, 0x00}) So(err, ShouldBeNil) So(code, ShouldEqual, 0x63C3) }) }) }) }
explode_data.jsonl/52257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 923 }
[ 2830, 3393, 7210, 2164, 1149, 1155, 353, 8840, 836, 8, 1476, 93070, 5617, 445, 62924, 311, 906, 1149, 497, 259, 11, 2915, 7502, 356, 8, 1476, 197, 28236, 11, 1848, 1669, 1532, 741, 197, 76912, 3964, 11, 12260, 3430, 19064, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMaxPathLen(t *testing.T) { template := &Certificate{ SerialNumber: big.NewInt(1), Subject: pkix.Name{ CommonName: "Σ Acme Co", }, NotBefore: time.Unix(1000, 0), NotAfter: time.Unix(100000, 0), BasicConstraintsValid: true, IsCA: true, } cert1 := serialiseAndParse(t, template) if m := cert1.MaxPathLen; m != -1 { t.Errorf("Omitting MaxPathLen didn't turn into -1, got %d", m) } if cert1.MaxPathLenZero { t.Errorf("Omitting MaxPathLen resulted in MaxPathLenZero") } template.MaxPathLen = 1 cert2 := serialiseAndParse(t, template) if m := cert2.MaxPathLen; m != 1 { t.Errorf("Setting MaxPathLen didn't work. Got %d but set 1", m) } if cert2.MaxPathLenZero { t.Errorf("Setting MaxPathLen resulted in MaxPathLenZero") } template.MaxPathLen = 0 template.MaxPathLenZero = true cert3 := serialiseAndParse(t, template) if m := cert3.MaxPathLen; m != 0 { t.Errorf("Setting MaxPathLenZero didn't work, got %d", m) } if !cert3.MaxPathLenZero { t.Errorf("Setting MaxPathLen to zero didn't result in MaxPathLenZero") } }
explode_data.jsonl/68005
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 444 }
[ 2830, 3393, 5974, 1820, 11271, 1155, 353, 8840, 836, 8, 341, 22832, 1669, 609, 33202, 515, 197, 93658, 2833, 25, 2409, 7121, 1072, 7, 16, 1326, 197, 197, 13019, 25, 22458, 941, 2967, 515, 298, 90580, 675, 25, 330, 144624, 6381, 2660, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestParseDERCRL(t *testing.T) { derBytes := fromBase64(derCRLBase64) certList, err := ParseDERCRL(derBytes) if err != nil { t.Errorf("error parsing: %s", err) return } numCerts := len(certList.TBSCertList.RevokedCertificates) expected := 88 if numCerts != expected { t.Errorf("bad number of revoked certificates. got: %d want: %d", numCerts, expected) } if certList.HasExpired(time.Unix(1302517272, 0)) { t.Errorf("CRL has expired (but shouldn't have)") } // Can't check the signature here without a package cycle. }
explode_data.jsonl/56553
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 14463, 11391, 34, 4728, 1155, 353, 8840, 836, 8, 341, 197, 1107, 7078, 1669, 504, 3978, 21, 19, 7, 1107, 34, 4728, 3978, 21, 19, 340, 1444, 529, 852, 11, 1848, 1669, 14775, 11391, 34, 4728, 7, 1107, 7078, 340, 743, 184...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHasVar(t *testing.T) { for _, tt := range []struct { name string r *Rule s string want bool }{ { name: "has var", r: &Rule{ Matchers: []orderedMatcher{ &ByteMatch{ Variable: "foovar", }, }, }, s: "foovar", want: true, }, { name: "has var", r: &Rule{ Matchers: []orderedMatcher{ &ByteMatch{ Variable: "barvar", }, }, }, s: "foovar", want: false, }, { name: "no byte matchers", r: &Rule{}, s: "foovar", want: false, }, } { got := tt.r.HasVar(tt.s) if got != tt.want { t.Fatalf("got=%v; want=%v", got, tt.want) } } }
explode_data.jsonl/59698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 10281, 3962, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 7000, 262, 353, 11337, 198, 197, 1903, 262, 914, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTransportClone(t *testing.T) { tr := &Transport{ Proxy: func(*Request) (*url.URL, error) { panic("") }, DialContext: func(ctx context.Context, network, addr string) (net.Conn, error) { panic("") }, Dial: func(network, addr string) (net.Conn, error) { panic("") }, DialTLS: func(network, addr string) (net.Conn, error) { panic("") }, TLSClientConfig: new(tls.Config), TLSHandshakeTimeout: time.Second, DisableKeepAlives: true, DisableCompression: true, MaxIdleConns: 1, MaxIdleConnsPerHost: 1, MaxConnsPerHost: 1, IdleConnTimeout: time.Second, ResponseHeaderTimeout: time.Second, ExpectContinueTimeout: time.Second, ProxyConnectHeader: Header{}, MaxResponseHeaderBytes: 1, ForceAttemptHTTP2: true, TLSNextProto: map[string]func(authority string, c *tls.Conn) RoundTripper{ "foo": func(authority string, c *tls.Conn) RoundTripper { panic("") }, }, ReadBufferSize: 1, WriteBufferSize: 1, } tr2 := tr.Clone() rv := reflect.ValueOf(tr2).Elem() rt := rv.Type() for i := 0; i < rt.NumField(); i++ { sf := rt.Field(i) if !token.IsExported(sf.Name) { continue } if rv.Field(i).IsZero() { t.Errorf("cloned field t2.%s is zero", sf.Name) } } if _, ok := tr2.TLSNextProto["foo"]; !ok { t.Errorf("cloned Transport lacked TLSNextProto 'foo' key") } // But test that a nil TLSNextProto is kept nil: tr = new(Transport) tr2 = tr.Clone() if tr2.TLSNextProto != nil { t.Errorf("Transport.TLSNextProto unexpected non-nil") } }
explode_data.jsonl/14182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 730 }
[ 2830, 3393, 27560, 37677, 1155, 353, 8840, 836, 8, 341, 25583, 1669, 609, 27560, 515, 197, 197, 16219, 25, 1698, 2915, 4071, 1900, 8, 4609, 1085, 20893, 11, 1465, 8, 314, 21975, 39047, 1153, 197, 10957, 530, 1972, 25, 310, 2915, 7502,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypes(t *testing.T) { var type1 Min8Max28NumericText assert.NotNil(t, type1.Validate()) type1 = "test" assert.NotNil(t, type1.Validate()) type1 = "1111111111" assert.Nil(t, type1.Validate()) var type2 InvestigationStatus1Code assert.NotNil(t, type2.Validate()) type2 = "test" assert.NotNil(t, type2.Validate()) type2 = "NOAP" assert.Nil(t, type2.Validate()) var type3 TransactionRequestType1Code assert.NotNil(t, type3.Validate()) type3 = "test" assert.NotNil(t, type3.Validate()) type3 = "OREC" assert.Nil(t, type3.Validate()) var type4 ExternalAccountIdentification1Code assert.NotNil(t, type4.Validate()) type4 = "test" assert.Nil(t, type4.Validate()) var type5 ExternalCashAccountType1Code assert.NotNil(t, type5.Validate()) type5 = "test" assert.Nil(t, type5.Validate()) var type6 ExternalClearingSystemIdentification1Code assert.NotNil(t, type6.Validate()) type6 = "test" assert.Nil(t, type6.Validate()) var type7 ExternalFinancialInstitutionIdentification1Code assert.NotNil(t, type7.Validate()) type7 = "test" assert.Nil(t, type7.Validate()) var type8 ExternalOrganisationIdentification1Code assert.NotNil(t, type8.Validate()) type8 = "test" assert.Nil(t, type8.Validate()) var type9 ExternalPersonIdentification1Code assert.NotNil(t, type9.Validate()) type9 = "test" assert.Nil(t, type9.Validate()) var type10 StatusResponse1Code assert.NotNil(t, type10.Validate()) type10 = "test" assert.NotNil(t, type10.Validate()) type10 = "PART" assert.Nil(t, type10.Validate()) var type11 InvestigatedParties1Code assert.NotNil(t, type11.Validate()) type11 = "test" assert.NotNil(t, type11.Validate()) type11 = "OWNE" assert.Nil(t, type11.Validate()) }
explode_data.jsonl/33449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 689 }
[ 2830, 3393, 4173, 1155, 353, 8840, 836, 8, 341, 2405, 943, 16, 3386, 23, 5974, 17, 23, 36296, 1178, 198, 6948, 93882, 1155, 11, 943, 16, 47667, 2398, 13158, 16, 284, 330, 1944, 698, 6948, 93882, 1155, 11, 943, 16, 47667, 2398, 13158...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadWaitsForLock(t *testing.T) { t.Parallel() dir, remove := mustTempDir(t) defer remove() path := filepath.Join(dir, "timestamp.txt") f, err := lockedfile.Create(path) if err != nil { t.Fatalf("Create: %v", err) } defer f.Close() const ( part1 = "part 1\n" part2 = "part 2\n" ) _, err = f.WriteString(part1) if err != nil { t.Fatalf("WriteString: %v", err) } t.Logf("WriteString(%q) = <nil>", part1) wait := mustBlock(t, "Read", func() { b, err := lockedfile.Read(path) if err != nil { t.Errorf("Read: %v", err) return } const want = part1 + part2 got := string(b) if got == want { t.Logf("Read(_) = %q", got) } else { t.Errorf("Read(_) = %q, _; want %q", got, want) } }) _, err = f.WriteString(part2) if err != nil { t.Errorf("WriteString: %v", err) } else { t.Logf("WriteString(%q) = <nil>", part2) } f.Close() wait(t) }
explode_data.jsonl/44954
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 423 }
[ 2830, 3393, 4418, 54, 56479, 2461, 11989, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 48532, 11, 4057, 1669, 1969, 12151, 6184, 1155, 340, 16867, 4057, 2822, 26781, 1669, 26054, 22363, 14161, 11, 330, 13035, 3909, 5130, 1166, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExpandUserNothingToExpand(t *testing.T) { expected := "does/not/expand" path, err := ExpandUser(expected) assert.NilError(t, err) assert.Equal(t, expected, path) }
explode_data.jsonl/34410
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 38946, 1474, 23780, 1249, 38946, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 27057, 62441, 14, 32317, 698, 26781, 11, 1848, 1669, 50141, 1474, 15253, 692, 6948, 59678, 1454, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRawArtifact(t *testing.T) { a := &RawArtifact{Data: "my-data"} assert.True(t, a.HasLocation()) assert.Error(t, a.SetKey("my-key")) _, err := a.GetKey() assert.Error(t, err) }
explode_data.jsonl/26024
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 20015, 85578, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 609, 20015, 85578, 90, 1043, 25, 330, 2408, 13945, 16707, 6948, 32443, 1155, 11, 264, 16152, 4707, 2398, 6948, 6141, 1155, 11, 264, 4202, 1592, 445, 2408, 16173, 5455,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestKraken_GetUnfinishOrders(t *testing.T) { ords, err := k.GetUnfinishOrders(goex.NewCurrencyPair(goex.XBT, goex.USD)) assert.Nil(t, err) t.Log(ords) }
explode_data.jsonl/44494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 81165, 3366, 13614, 1806, 30150, 24898, 1155, 353, 8840, 836, 8, 341, 197, 2260, 11, 1848, 1669, 595, 2234, 1806, 30150, 24898, 47415, 327, 7121, 26321, 12443, 47415, 327, 4338, 17602, 11, 728, 327, 13, 26749, 1171, 6948, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestHoard_GetWithError(t *testing.T) { h := Make(ExpiresNever) result, err := h.GetWithError("key", func() (interface{}, error, *Expiration) { return "first", nil, ExpiresNever }) assert.Equal(t, result, "first") assert.Nil(t, err) result, err = h.GetWithError("key2", func() (interface{}, error, *Expiration) { return "second", errors.New("EXTERMINATE!!!"), ExpiresNever }) assert.Equal(t, "second", result) assert.NotNil(t, err) }
explode_data.jsonl/82499
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 39, 33386, 13614, 66102, 1155, 353, 8840, 836, 8, 1476, 9598, 1669, 7405, 7, 65331, 26155, 692, 9559, 11, 1848, 1669, 305, 2234, 66102, 445, 792, 497, 2915, 368, 320, 4970, 22655, 1465, 11, 353, 66301, 8, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogToStdoutSet(t *testing.T) { cfg := &AutomaticLoggingConfig{ Backend: BackendStdout, Spans: true, } p, err := newTraceProcessor(&automaticLoggingProcessor{}, cfg) require.NoError(t, err) require.True(t, p.(*automaticLoggingProcessor).logToStdout) err = p.Start(context.Background(), componenttest.NewNopHost()) require.NoError(t, err) cfg = &AutomaticLoggingConfig{ Backend: BackendLogs, Spans: true, } p, err = newTraceProcessor(&automaticLoggingProcessor{}, cfg) require.NoError(t, err) require.False(t, p.(*automaticLoggingProcessor).logToStdout) }
explode_data.jsonl/73351
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 2201, 1249, 22748, 411, 1649, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 609, 62790, 34575, 2648, 515, 197, 197, 29699, 25, 55260, 22748, 411, 345, 197, 197, 6406, 596, 25, 256, 830, 345, 197, 630, 3223, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateDecoration(t *testing.T) { defCfg := prowapi.DecorationConfig{ UtilityImages: &prowjobv1.UtilityImages{ CloneRefs: "clone-me", InitUpload: "upload-me", Entrypoint: "enter-me", Sidecar: "official-drink-of-the-org", }, GCSCredentialsSecret: "upload-secret", GCSConfiguration: &prowjobv1.GCSConfiguration{ PathStrategy: prowjobv1.PathStrategyExplicit, DefaultOrg: "so-org", DefaultRepo: "very-repo", }, } cases := []struct { name string container v1.Container config *prowapi.DecorationConfig pass bool }{ { name: "allow no decoration", pass: true, }, { name: "happy case with cmd", config: &defCfg, container: v1.Container{ Command: []string{"hello", "world"}, }, pass: true, }, { name: "happy case with args", config: &defCfg, container: v1.Container{ Args: []string{"hello", "world"}, }, pass: true, }, { name: "reject invalid decoration config", config: &prowapi.DecorationConfig{}, container: v1.Container{ Command: []string{"hello", "world"}, }, }, { name: "reject container that has no cmd, no args", config: &defCfg, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { switch err := validateDecoration(tc.container, tc.config); { case err == nil && !tc.pass: t.Error("validation failed to raise an error") case err != nil && tc.pass: t.Errorf("validation should have passed, got: %v", err) } }) } }
explode_data.jsonl/8074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 664 }
[ 2830, 3393, 17926, 19431, 1155, 353, 8840, 836, 8, 341, 7452, 42467, 1669, 47558, 2068, 22442, 7614, 2648, 515, 197, 15980, 12189, 14228, 25, 609, 79, 651, 8799, 85, 16, 62652, 14228, 515, 298, 197, 37677, 82807, 25, 220, 330, 19982, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWithBrowser(t *testing.T) { // an easy way to check if auth works with webserver // to test, run with // $ go test -run TestWithBrowser -- server // configure a browser to use the printed proxy address, use the proxy // and exit with Ctrl-C. It will throw error if your haven't acutally used the proxy if os.Args[len(os.Args)-1] != "server" { return } proxy := goproxy.NewProxyHttpServer() println("proxy localhost port 8082") access := int32(0) proxy.OnRequest().Do(auth.Basic("my_realm", func(user, passwd string) bool { atomic.AddInt32(&access, 1) return user == "user" && passwd == "1234" })) l, err := net.Listen("tcp", "localhost:8082") if err != nil { t.Fatal(err) } ch := make(chan os.Signal) signal.Notify(ch, os.Interrupt) go func() { <-ch l.Close() }() http.Serve(l, proxy) if access <= 0 { t.Error("No one accessed the proxy") } }
explode_data.jsonl/43783
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 2354, 17878, 1155, 353, 8840, 836, 8, 341, 197, 322, 458, 4135, 1616, 311, 1779, 421, 4166, 4278, 448, 3482, 4030, 198, 197, 322, 311, 1273, 11, 1598, 448, 198, 197, 322, 400, 728, 1273, 481, 6108, 3393, 2354, 17878, 117...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKafkaClient_Configure(t *testing.T) { module := fixtureModule() module.Configure("test", "consumer.test") assert.NotNil(t, module.saramaConfig, "Expected saramaConfig to be populated") assert.Equal(t, "__consumer_offsets", module.offsetsTopic, "Default OffsetTopic value of __consumer_offsets did not get set") }
explode_data.jsonl/34249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 42, 21883, 2959, 15100, 17781, 1155, 353, 8840, 836, 8, 341, 54020, 1669, 12507, 3332, 741, 54020, 78281, 445, 1944, 497, 330, 46764, 5958, 1138, 6948, 93882, 1155, 11, 4688, 514, 637, 64, 2648, 11, 330, 18896, 274, 637, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_readHash_(t *testing.T) { if PrintTestNames { printTestName() } var test = func(input []byte) { var resultHash []byte { buf := bytes.NewBuffer(input) resultHash = readHash(buf) } var expectHash []byte { buf := bytes.NewBuffer(input) expectHash = makeHash(buf.Bytes()) } if !bytes.Equal(resultHash, expectHash) { t.Errorf("\n input:\n\t%v\n%s\n expect:%v\n\t result:\n\t%v\n", input, string(input), expectHash, resultHash) } } TempBufferSize = 100 test(nil) test([]byte("abc")) test([]byte(strings.Repeat("abc", 1024))) }
explode_data.jsonl/70784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 6443, 6370, 8361, 83, 353, 8840, 836, 8, 341, 743, 8213, 2271, 7980, 341, 197, 6900, 2271, 675, 741, 197, 532, 2405, 1273, 284, 2915, 5384, 3056, 3782, 8, 341, 197, 2405, 1102, 6370, 3056, 3782, 198, 197, 197, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMissingHeadRefs(t *testing.T) { require := require.New(t) cwd, err := os.Getwd() require.NoError(err) path := filepath.Join(cwd, "_testdata") lib, err := siva.NewLibrary("siva", osfs.New(path), siva.LibraryOptions{ RootedRepo: true, }) require.NoError(err) pool := gitbase.NewRepositoryPool(cache.NewObjectLRUDefault(), lib) engine := newBaseEngine(pool) session := gitbase.NewSession(pool) ctx := sql.NewContext(context.TODO(), sql.WithSession(session)) _, iter, err := engine.Query(ctx, "SELECT * FROM refs") require.NoError(err) rows, err := sql.RowIterToRows(iter) require.NoError(err) require.Len(rows, 57) }
explode_data.jsonl/13695
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 25080, 12346, 82807, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 1444, 6377, 11, 1848, 1669, 2643, 2234, 6377, 741, 17957, 35699, 3964, 692, 26781, 1669, 26054, 22363, 1337, 6377, 11, 9000, 92425, 5130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestControllerUpdateEventWithWrongController(t *testing.T) { c, tc := makeController("v1", "Pod") c.Update(simpleOwnedPod("unit", "test")) validateNotSent(t, tc, sourcesv1beta1.ApiServerSourceUpdateRefEventType) }
explode_data.jsonl/39266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 2051, 4289, 1556, 2354, 29185, 2051, 1155, 353, 8840, 836, 8, 341, 1444, 11, 17130, 1669, 1281, 2051, 445, 85, 16, 497, 330, 23527, 1138, 1444, 16689, 1141, 6456, 57641, 23527, 445, 3843, 497, 330, 1944, 5455, 197, 7067, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNormalizeComponentName(t *testing.T) { if name := "lib.FooBar"; normalizeComponentName(name) != "lib.foobar" { t.Errorf("name is not lib.foobar: %s", name) } if name := "main.FooBar"; normalizeComponentName(name) != "foobar" { t.Errorf("name is not foobar: %s", name) } }
explode_data.jsonl/36074
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 87824, 2189, 675, 1155, 353, 8840, 836, 8, 341, 743, 829, 1669, 330, 2740, 991, 2624, 3428, 5123, 21694, 2189, 675, 3153, 8, 961, 330, 2740, 13, 50267, 1, 341, 197, 3244, 13080, 445, 606, 374, 537, 3051, 13, 50267, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInitializeContainersV3MetadataEndpoint(t *testing.T) { task := Task{ Containers: []*apicontainer.Container{ { Name: "c1", Environment: make(map[string]string), }, }, } container := task.Containers[0] task.initializeContainersV3MetadataEndpoint(utils.NewStaticUUIDProvider("new-uuid")) // Test if the v3 endpoint id is set and the endpoint is injected to env assert.Equal(t, container.GetV3EndpointID(), "new-uuid") assert.Equal(t, container.Environment[apicontainer.MetadataURIEnvironmentVariableName], fmt.Sprintf(apicontainer.MetadataURIFormat, "new-uuid")) }
explode_data.jsonl/37201
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 9928, 74632, 53, 18, 14610, 27380, 1155, 353, 8840, 836, 8, 341, 49115, 1669, 5430, 515, 197, 197, 74632, 25, 29838, 391, 51160, 1743, 33672, 515, 298, 197, 515, 571, 21297, 25, 286, 330, 66, 16, 756, 571, 197, 12723, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetTemplatesRequest_Execute(t *testing.T) { expectedResult := randomGetTemplatesResult() var givenResult []messages.GetTemplatesResult req := test.NewRequest(func(req *http.Request) (res *http.Response, err error) { result := api.Response{ Result: expectedResult, } response, _ := json.Marshal(&result) return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBuffer(response)), }, nil }) givenResult, err := messages.GetTemplates(req). Execute() if err != nil { t.Fatalf(`Error should be nil, "%s" given`, err.Error()) } if !reflect.DeepEqual(expectedResult, givenResult) { t.Fatal("Results should be equal") } }
explode_data.jsonl/54169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 1949, 51195, 1900, 83453, 1155, 353, 8840, 836, 8, 341, 42400, 2077, 1669, 4194, 1949, 51195, 2077, 741, 2405, 2661, 2077, 3056, 16325, 2234, 51195, 2077, 271, 24395, 1669, 1273, 75274, 18552, 6881, 353, 1254, 9659, 8, 320, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceAccountIsMutuallyExclusiveWithAuth(t *testing.T) { paramUri := fmt.Sprintf("localhost?user=test&pass=yep&useServiceAccount=true") _, err := newHawkularSource(paramUri) if err == nil { t.Errorf("Expected error from newHawkularSource") } }
explode_data.jsonl/55851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 1860, 7365, 3872, 51440, 1832, 70405, 2354, 5087, 1155, 353, 8840, 836, 8, 341, 36037, 13899, 1669, 8879, 17305, 445, 8301, 30, 872, 53538, 5, 6385, 29368, 747, 5, 810, 1860, 7365, 11265, 1138, 197, 6878, 1848, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSharedMap_ComputeIfAbsent(t *testing.T) { sharedMap := NewSharedMap() actual, loaded := sharedMap.ComputeIfAbsent("1", func(key string) interface{} { return "any" }) assert.False(t, loaded) assert.EqualValues(t, "any", actual) actual, loaded = sharedMap.ComputeIfAbsent("1", func(key string) interface{} { return "foo" }) assert.True(t, loaded) assert.EqualValues(t, "any", actual) }
explode_data.jsonl/77839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 16997, 2227, 16946, 8492, 2679, 80251, 1155, 353, 8840, 836, 8, 341, 197, 6100, 2227, 1669, 1532, 16997, 2227, 741, 88814, 11, 6661, 1669, 6094, 2227, 89237, 2679, 80251, 445, 16, 497, 2915, 4857, 914, 8, 3749, 6257, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteHumanReadableWriterError(t *testing.T) { assert := assert.New(t) err := errors.New("test") w := &errorWriter{err} assert.Equal(err, WriteEncodedValue(w, Number(42))) }
explode_data.jsonl/60907
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 7985, 33975, 57938, 6492, 1454, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 9859, 1669, 5975, 7121, 445, 1944, 1138, 6692, 1669, 609, 841, 6492, 90, 615, 532, 6948, 12808, 3964, 11, 9645, 46795, 1130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1