text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestGetDataRateIndexFromDataRate(t *testing.T) {
for _, tc := range []struct {
Name string
BandID string
DataRate ttnpb.DataRate
ExpectedDataRateIndex int
ErrorAssertion func(error) bool
}{
{
Name: "Valid_EU",
BandID: "EU_863_870",
DataRate: ttnpb.DataRate{Modulation: &ttnpb.DataRate_LoRa{LoRa: &ttnpb.LoRaDataRate{
SpreadingFactor: 7,
Bandwidth: 125000,
}}},
ExpectedDataRateIndex: 5,
},
{
Name: "Valid_EU_FSK",
BandID: "EU_863_870",
DataRate: ttnpb.DataRate{Modulation: &ttnpb.DataRate_FSK{FSK: &ttnpb.FSKDataRate{
BitRate: 50000,
}}},
ExpectedDataRateIndex: 7,
},
{
Name: "Invalid_EU",
BandID: "EU_863_870",
DataRate: ttnpb.DataRate{Modulation: &ttnpb.DataRate_LoRa{LoRa: &ttnpb.LoRaDataRate{
SpreadingFactor: 11,
Bandwidth: 250000,
}}},
ErrorAssertion: func(err error) bool {
return errors.Resemble(err, errDataRate)
},
},
{
Name: "Empty",
BandID: "EU_863_870",
DataRate: ttnpb.DataRate{},
ErrorAssertion: func(err error) bool {
return errors.Resemble(err, errDataRate)
},
},
} {
t.Run(tc.Name, func(t *testing.T) {
a := assertions.New(t)
drIndex, err := getDataRateIndexFromDataRate(tc.BandID, tc.DataRate)
if err != nil {
if tc.ErrorAssertion == nil || !a.So(tc.ErrorAssertion(err), should.BeTrue) {
t.Fatalf("Unexpected error: %v", err)
}
} else if tc.ErrorAssertion != nil {
t.Fatalf("Expected error")
} else {
if !a.So(drIndex, should.Resemble, tc.ExpectedDataRateIndex) {
t.Fatalf("Invalid datarate: %v", drIndex)
}
}
})
}
} | explode_data.jsonl/69426 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 840
} | [
2830,
3393,
68957,
11564,
1552,
3830,
1043,
11564,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17130,
1669,
2088,
3056,
1235,
341,
197,
21297,
1698,
914,
198,
197,
12791,
437,
915,
394,
914,
198,
197,
40927,
11564,
1060,
259,
1517,
16650,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGet(t *testing.T) {
lru := New(int64(0), nil)
lru.Add("key1", String("1234"))
v, ok := lru.Get("key1")
if !ok || string(v.(String)) != "1234" {
t.Fatalf("cache hit key1=1234 failed")
}
_, ok = lru.Get("key2")
if ok {
t.Fatalf("cache miss key2 failed")
}
} | explode_data.jsonl/16543 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
1949,
1155,
353,
8840,
836,
8,
341,
8810,
2672,
1669,
1532,
1548,
21,
19,
7,
15,
701,
2092,
340,
8810,
2672,
1904,
445,
792,
16,
497,
923,
445,
16,
17,
18,
19,
5455,
5195,
11,
5394,
1669,
326,
2672,
2234,
445,
792,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestAnonymous(t *testing.T) {
validate := New()
validate.RegisterTagNameFunc(func(fld reflect.StructField) string {
name := strings.SplitN(fld.Tag.Get("json"), ",", 2)[0]
if name == "-" {
return ""
}
return name
})
type Test struct {
Anonymous struct {
A string `validate:"required" json:"EH"`
}
AnonymousB struct {
B string `validate:"required" json:"BEE"`
}
anonymousC struct {
c string `validate:"required"`
}
}
tst := &Test{
Anonymous: struct {
A string `validate:"required" json:"EH"`
}{
A: "1",
},
AnonymousB: struct {
B string `validate:"required" json:"BEE"`
}{
B: "",
},
anonymousC: struct {
c string `validate:"required"`
}{
c: "",
},
}
Equal(t, tst.anonymousC.c, "")
err := validate.Struct(tst)
NotEqual(t, err, nil)
errs := err.(ValidationErrors)
Equal(t, len(errs), 1)
AssertError(t, errs, "Test.AnonymousB.BEE", "Test.AnonymousB.B", "BEE", "B", "required")
fe := getError(errs, "Test.AnonymousB.BEE", "Test.AnonymousB.B")
NotEqual(t, fe, nil)
Equal(t, fe.Field(), "BEE")
Equal(t, fe.StructField(), "B")
s := struct {
c string `validate:"required"`
}{
c: "",
}
err = validate.Struct(s)
Equal(t, err, nil)
} | explode_data.jsonl/77217 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 543
} | [
2830,
3393,
32684,
1155,
353,
8840,
836,
8,
1476,
197,
7067,
1669,
1532,
741,
197,
7067,
19983,
22616,
9626,
18552,
955,
507,
8708,
51445,
1877,
8,
914,
341,
197,
11609,
1669,
9069,
19823,
45,
955,
507,
23676,
2234,
445,
2236,
3975,
8... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestQueryEscape(t *testing.T) {
for _, tt := range queryEscapeTests {
actual := QueryEscape(tt.in)
if tt.out != actual {
t.Errorf("QueryEscape(%q) = %q, want %q", tt.in, actual, tt.out)
}
// for bonus points, verify that escape:unescape is an identity.
roundtrip, err := QueryUnescape(actual)
if roundtrip != tt.in || err != nil {
t.Errorf("QueryUnescape(%q) = %q, %s; want %q, %s", actual, roundtrip, err, tt.in, "[no error]")
}
}
} | explode_data.jsonl/71722 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 198
} | [
2830,
3393,
2859,
48124,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17853,
1669,
2088,
3239,
48124,
18200,
341,
197,
88814,
1669,
11361,
48124,
47152,
1858,
340,
197,
743,
17853,
2532,
961,
5042,
341,
298,
3244,
13080,
445,
2859,
48124,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestInterpretOneCharacterStringLiteralType(t *testing.T) {
t.Parallel()
inter := parseCheckAndInterpret(t, `
fun test(): Type {
let c: String = "x"
return c.getType()
}
`)
result, err := inter.Invoke("test")
require.NoError(t, err)
require.Equal(t,
interpreter.TypeValue{Type: interpreter.PrimitiveStaticTypeString},
result,
)
} | explode_data.jsonl/73422 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 153
} | [
2830,
3393,
3306,
8043,
3966,
12404,
703,
17350,
929,
1155,
353,
8840,
836,
8,
1476,
3244,
41288,
7957,
2822,
58915,
1669,
4715,
3973,
3036,
3306,
8043,
1155,
11,
22074,
262,
2464,
1273,
4555,
3990,
341,
286,
1077,
272,
25,
923,
284,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCacheLargePacket(t *testing.T) {
data := make([]byte, pageBytes*3)
test(t, []testSequence{
{
in: layers.TCP{
SrcPort: 1,
DstPort: 2,
Seq: 1001,
BaseLayer: layers.BaseLayer{Payload: data},
},
want: []Reassembly{},
},
{
in: layers.TCP{
SrcPort: 1,
DstPort: 2,
Seq: 1000,
SYN: true,
BaseLayer: layers.BaseLayer{Payload: []byte{}},
},
want: []Reassembly{
Reassembly{
Start: true,
Bytes: []byte{},
},
Reassembly{
Bytes: data[:pageBytes],
},
Reassembly{
Bytes: data[pageBytes : pageBytes*2],
},
Reassembly{
Bytes: data[pageBytes*2 : pageBytes*3],
},
},
},
})
} | explode_data.jsonl/9657 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 425
} | [
2830,
3393,
8233,
34253,
16679,
1155,
353,
8840,
836,
8,
972,
8924,
1669,
1281,
10556,
3782,
11,
2150,
7078,
9,
18,
1218,
18185,
1155,
11,
3056,
1944,
14076,
1666,
197,
197,
1666,
298,
17430,
25,
13617,
836,
7123,
1666,
571,
7568,
128... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnmarshalerPanic(t *testing.T) {
defer func() {
if r := recover(); r == nil {
t.Errorf("recover = nil, want error")
}
}()
m := &fakeUnmarshaler{err: errors.New("blah")}
MustUnmarshal(m, nil)
} | explode_data.jsonl/66764 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
1806,
27121,
261,
47,
31270,
1155,
353,
8840,
836,
8,
341,
16867,
2915,
368,
341,
197,
743,
435,
1669,
11731,
2129,
435,
621,
2092,
341,
298,
3244,
13080,
445,
74283,
284,
2092,
11,
1366,
1465,
1138,
197,
197,
532,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestParseIfStatementInit(t *testing.T) {
p := createParser(`if x = 0; x > 5 {
}`)
bvmUtils.Assert(t, isIfStatement(p), "should detect if statement")
parseIfStatement(p)
bvmUtils.AssertNow(t, len(p.errs) == 0, p.errs.Format())
first := p.scope.Next()
bvmUtils.Assert(t, first.Type() == ast.IfStatement, "Asteroid Errors: Node Error: Wrong node type. ")
} | explode_data.jsonl/49712 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 148
} | [
2830,
3393,
14463,
2679,
8636,
3803,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
1855,
6570,
5809,
333,
856,
284,
220,
15,
26,
856,
861,
220,
20,
1476,
197,
27085,
2233,
7338,
4209,
11711,
1155,
11,
374,
2679,
8636,
1295,
701,
330,
54... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStateToTerminalViewUnresourcedYAMLManifest(t *testing.T) {
m := k8sManifest(t, model.UnresourcedYAMLManifestName, testyaml.SanchoYAML)
state := newState([]model.Manifest{m})
krs := state.ManifestTargets[m.Name].State.K8sRuntimeState()
krs.ApplyFilter = yamlToApplyFilter(t, testyaml.SanchoYAML)
state.ManifestTargets[m.Name].State.RuntimeState = krs
v := StateToTerminalView(*state, &sync.RWMutex{})
assert.Equal(t, 2, len(v.Resources))
r, _ := v.Resource(m.Name)
assert.Equal(t, nil, r.LastBuild().Error)
expectedInfo := view.YAMLResourceInfo{
K8sDisplayNames: []string{"sancho:deployment"},
}
assert.Equal(t, expectedInfo, r.ResourceInfo)
} | explode_data.jsonl/54856 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 271
} | [
2830,
3393,
1397,
1249,
47890,
851,
1806,
416,
53392,
56,
31102,
38495,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
595,
23,
82,
38495,
1155,
11,
1614,
10616,
416,
53392,
56,
31102,
38495,
675,
11,
1273,
41466,
808,
276,
958,
56,
31102,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_forwardProcessBlock(t *testing.T) {
tmpDir, err := ioutil.TempDir("", "")
require.NoError(t, err)
mapper, _ := eosSearch.NewBlockMapper("dfuseiohooks:event", false, "*")
preIndexer := search.NewPreIndexer(mapper, tmpDir)
cases := []struct {
name string
block *pbcodec.Block
expectedMatchCount int
expectedLastBlockRead uint64
cancelContext bool
expectedError string
}{
{
name: "sunny path",
block: newBlock("00000006a", "00000005a", trxID(2), "eosio.token"),
expectedLastBlockRead: uint64(6),
expectedMatchCount: 1,
},
{
name: "canceled context",
block: newBlock("00000006a", "00000005a", trxID(2), "eosio.token"),
cancelContext: true,
expectedMatchCount: 0,
expectedError: "rpc error: code = Canceled desc = context canceled",
},
{
name: "block to young context",
block: newBlock("00000009a", "00000001a", trxID(2), "eosio.token"),
expectedMatchCount: 0,
expectedError: "end of block range",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
block, err := ToBStreamBlock(c.block)
require.NoError(t, err)
preprocessObj, err := preIndexer.Preprocess(block)
fObj := &forkable.ForkableObject{
Obj: preprocessObj.(*search.SingleIndex),
}
bleveQuery, err := search.NewParsedQuery("account:eosio.token")
matchCollector := search.GetMatchCollector
if matchCollector == nil {
panic(fmt.Errorf("no match collector set, should not happen, you should define a collector"))
}
incomingMatches := make(chan *pb.SearchMatch)
q := searchLive.LiveQuery{
BleveQuery: bleveQuery,
Request: &pb.BackendRequest{
LowBlockNum: 0,
HighBlockNum: uint64(8),
},
}
matchesReceived := make(chan bool)
var matches []*pb.SearchMatch
if c.expectedMatchCount > 0 {
go func() {
select {
case m := <-incomingMatches:
matches = append(matches, m)
if len(matches) == c.expectedMatchCount {
close(matchesReceived)
}
case <-time.After(100 * time.Millisecond):
close(matchesReceived)
}
}()
} else {
close(matchesReceived)
}
ctx := context.Background()
if c.cancelContext {
canceledContext, cancel := context.WithCancel(ctx)
cancel()
ctx = canceledContext
}
q.Ctx = ctx
q.MatchCollector = matchCollector
q.IncomingMatches = incomingMatches
err = q.ForwardProcessBlock(block, fObj)
if c.expectedError != "" {
require.Error(t, err)
require.Equal(t, c.expectedError, err.Error())
return
}
require.NoError(t, err)
<-matchesReceived
assert.Equal(t, c.expectedLastBlockRead, q.LastBlockRead)
assert.Len(t, matches, c.expectedMatchCount)
})
}
} | explode_data.jsonl/14625 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1295
} | [
2830,
3393,
32121,
7423,
4713,
1155,
353,
8840,
836,
8,
341,
20082,
6184,
11,
1848,
1669,
43144,
65009,
6184,
19814,
14676,
17957,
35699,
1155,
11,
1848,
340,
2109,
3106,
11,
716,
1669,
61794,
5890,
7121,
4713,
10989,
445,
2940,
810,
81... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGetEnabledPairs(t *testing.T) {
cfg := GetConfig()
err := cfg.LoadConfig(ConfigTestFile)
if err != nil {
t.Errorf(
"Test failed. TestGetEnabledPairs. LoadConfig Error: %s", err.Error(),
)
}
_, err = cfg.GetEnabledPairs("asdf")
if err == nil {
t.Error(
"Test failed. TestGetEnabledPairs. Non-existent exchange returned nil error",
)
}
_, err = cfg.GetEnabledPairs("Bitfinex")
if err != nil {
t.Errorf(
"Test failed. TestGetEnabledPairs. Incorrect values. Err: %s", err,
)
}
} | explode_data.jsonl/21890 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 209
} | [
2830,
3393,
1949,
5462,
54228,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
2126,
2648,
741,
9859,
1669,
13286,
13969,
2648,
33687,
2271,
1703,
340,
743,
1848,
961,
2092,
341,
197,
3244,
13080,
1006,
298,
197,
1,
2271,
4641,
13,
3393,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestVersionedCheckpointsSpecialCase3(t *testing.T) {
tree := NewMutableTree(db.NewMemDB(), 0)
tree.Set([]byte("n"), []byte("2wUCUs8q"))
tree.Set([]byte("l"), []byte("WQ7mvMbc"))
tree.SaveVersion()
tree.Set([]byte("N"), []byte("ved29IqU"))
tree.Set([]byte("v"), []byte("01jquVXU"))
tree.SaveVersion()
tree.Set([]byte("l"), []byte("bhIpltPM"))
tree.Set([]byte("B"), []byte("rj97IKZh"))
tree.SaveVersion()
tree.DeleteVersion(2)
tree.GetVersioned([]byte("m"), 1)
} | explode_data.jsonl/25129 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 206
} | [
2830,
3393,
5637,
291,
3973,
7706,
20366,
4207,
18,
1155,
353,
8840,
836,
8,
341,
51968,
1669,
1532,
11217,
6533,
9791,
7121,
18816,
3506,
1507,
220,
15,
692,
51968,
4202,
10556,
3782,
445,
77,
3975,
3056,
3782,
445,
17,
86,
5459,
355... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStoreMark(t *testing.T) {
assert := assert.New(t)
db := dbm.NewMemDB()
store := NewStore(db)
// before we do anything, priority/pending are empty
priorityEv := store.PriorityEvidence()
pendingEv := store.PendingEvidence(-1)
assert.Equal(0, len(priorityEv))
assert.Equal(0, len(pendingEv))
priority := int64(10)
ev := types.NewMockGoodEvidence(2, 1, []byte("val1"))
added := store.AddNewEvidence(ev, priority)
assert.True(added)
// get the evidence. verify. should be uncommitted
ei := store.GetInfo(ev.Height(), ev.Hash())
assert.Equal(ev, ei.Evidence)
assert.Equal(priority, ei.Priority)
assert.False(ei.Committed)
// new evidence should be returns in priority/pending
priorityEv = store.PriorityEvidence()
pendingEv = store.PendingEvidence(-1)
assert.Equal(1, len(priorityEv))
assert.Equal(1, len(pendingEv))
// priority is now empty
store.MarkEvidenceAsBroadcasted(ev)
priorityEv = store.PriorityEvidence()
pendingEv = store.PendingEvidence(-1)
assert.Equal(0, len(priorityEv))
assert.Equal(1, len(pendingEv))
// priority and pending are now empty
store.MarkEvidenceAsCommitted(ev)
priorityEv = store.PriorityEvidence()
pendingEv = store.PendingEvidence(-1)
assert.Equal(0, len(priorityEv))
assert.Equal(0, len(pendingEv))
// evidence should show committed
newPriority := int64(0)
ei = store.GetInfo(ev.Height(), ev.Hash())
assert.Equal(ev, ei.Evidence)
assert.Equal(newPriority, ei.Priority)
assert.True(ei.Committed)
} | explode_data.jsonl/47406 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 528
} | [
2830,
3393,
6093,
8949,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
692,
20939,
1669,
2927,
76,
7121,
18816,
3506,
741,
57279,
1669,
1532,
6093,
9791,
692,
197,
322,
1573,
582,
653,
4113,
11,
10619,
4322,
2459,
525,
42... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHeadTracker_ReconnectOnError(t *testing.T) {
t.Parallel()
g := gomega.NewGomegaWithT(t)
db := pgtest.NewGormDB(t)
config := newCfg(t)
orm := headtracker.NewORM(db, cltest.FixtureChainID)
ethClient, sub := cltest.NewEthClientAndSubMockWithDefaultChain(t)
ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(sub, nil)
ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(nil, errors.New("cannot reconnect"))
ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(sub, nil)
ethClient.On("HeadByNumber", mock.Anything, (*big.Int)(nil)).Return(cltest.Head(0), nil)
chErr := make(chan error)
sub.On("Unsubscribe").Return()
sub.On("Err").Return((<-chan error)(chErr))
checker := &cltest.MockHeadTrackable{}
ht := createHeadTrackerWithChecker(ethClient, config, orm, checker)
// connect
assert.Nil(t, ht.Start())
assert.Equal(t, int32(0), checker.OnNewLongestChainCount())
// trigger reconnect loop
chErr <- errors.New("Test error to force reconnect")
g.Eventually(func() int32 { return checker.OnNewLongestChainCount() }).Should(gomega.Equal(int32(1)))
// stop
assert.NoError(t, ht.Stop())
} | explode_data.jsonl/63750 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 428
} | [
2830,
3393,
12346,
31133,
50693,
6459,
74945,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
3174,
1669,
342,
32696,
7121,
38,
32696,
2354,
51,
1155,
692,
20939,
1669,
17495,
1944,
7121,
38,
493,
3506,
1155,
340,
25873,
1669,
501... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTransformImportedTypes(t *testing.T) {
schema := []byte(`
syntax = "proto3";
package test;
import "google/protobuf/any.proto";
message ErrorStatus {
string message = 1;
repeated google.protobuf.Any details = 2;
}
`)
input := new(bytes.Buffer)
input.Write(schema)
output := new(bytes.Buffer)
transformer := proto2gql.NewTransformer(output)
if err := transformer.Transform(input); err != nil {
t.Fatal(err)
}
expected := `
type TestErrorStatus {
message: String
details: [GoogleProtobufAny]
}
`
expected = strings.TrimSpace(expected)
actual := strings.TrimSpace(output.String())
if expected != actual {
t.Fatalf("Expected %s to equal to %s", expected, actual)
}
} | explode_data.jsonl/2071 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 265
} | [
2830,
3393,
8963,
11511,
291,
4173,
1155,
353,
8840,
836,
8,
341,
1903,
3416,
1669,
3056,
3782,
61528,
197,
1903,
13662,
284,
330,
15110,
18,
876,
197,
197,
1722,
1273,
401,
197,
21918,
330,
17485,
14,
32921,
14,
3767,
57322,
3302,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestR4GetOperation(t *testing.T) {
teardown := setup(t, jsonformat.R4)
defer teardown()
orgID := "f5fe538f-c3b5-4454-8774-cd3789f59b9f"
muxCDR.HandleFunc("/store/fhir/"+cdrOrgID+"/Organization/"+orgID, func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/fhir+json;fhirVersion=4.0")
switch r.Method {
case "GET":
if !assert.Equal(t, "application/fhir+json;fhirVersion=4.0", r.Header.Get("Content-Type")) {
w.WriteHeader(http.StatusUnsupportedMediaType)
return
}
if !assert.Equal(t, cdr.APIVersion, r.Header.Get("API-Version")) {
w.WriteHeader(http.StatusPreconditionFailed)
return
}
w.WriteHeader(http.StatusOK)
_, _ = io.WriteString(w, `{
"resourceType": "Organization",
"id": "`+orgID+`",
"meta": {
"versionId": "6dfa7cc8-2000-11ea-91df-bb500f85c5e2",
"lastUpdated": "2019-12-16T12:34:40.544022+00:00"
},
"identifier": [
{
"use": "usual",
"system": "https://identity.philips-healthsuite.com/organization",
"value": "`+orgID+`"
}
],
"active": true,
"name": "Hospital2"
}
`)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
})
retrieved, resp, err := cdrClient.OperationsR4.Get("Organization/" + orgID)
if !assert.Nil(t, err) {
return
}
if !assert.NotNil(t, resp) {
return
}
if !assert.NotNil(t, retrieved) {
return
}
assert.Equal(t, http.StatusOK, resp.StatusCode)
org := retrieved.GetOrganization()
assert.Equal(t, "Hospital2", org.Name.Value)
} | explode_data.jsonl/40619 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 690
} | [
2830,
3393,
49,
19,
1949,
8432,
1155,
353,
8840,
836,
8,
341,
197,
665,
37496,
1669,
6505,
1155,
11,
2951,
2243,
2013,
19,
340,
16867,
49304,
2822,
87625,
915,
1669,
330,
69,
20,
1859,
20,
18,
23,
69,
1786,
18,
65,
20,
12,
19,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestParseIPPortInvalid(t *testing.T) {
testcases := []string{
"",
"C0A80016",
"C0A800:1234",
"FOOBARBA:1234",
"C0A80016:0CEA:1234",
}
for _, s := range testcases {
ip, port, err := parseIPPort(s)
if ip != nil || port != uint16(0) || err == nil {
t.Errorf("Expected error for input %s, have ip = %s, port = %v, err = %v", s, ip, port, err)
}
}
} | explode_data.jsonl/52484 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 178
} | [
2830,
3393,
14463,
3298,
7084,
7928,
1155,
353,
8840,
836,
8,
341,
18185,
23910,
1669,
3056,
917,
515,
197,
197,
39680,
197,
197,
46316,
15,
32,
23,
15,
15,
16,
21,
756,
197,
197,
46316,
15,
32,
23,
15,
15,
25,
16,
17,
18,
19,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestNewIndexDatabase(t *testing.T) {
ctrl := gomock.NewController(t)
defer func() {
_ = fileutil.RemoveDir(testPath)
ctrl.Finish()
}()
mockMetadata := metadb.NewMockMetadata(ctrl)
mockMetadata.EXPECT().DatabaseName().Return("test").AnyTimes()
db, err := NewIndexDatabase(context.TODO(), testPath, mockMetadata, nil, nil)
assert.NoError(t, err)
assert.NotNil(t, db)
// can't new duplicate
db2, err := NewIndexDatabase(context.TODO(), testPath, nil, nil, nil)
assert.Error(t, err)
assert.Nil(t, db2)
err = db.Close()
assert.NoError(t, err)
} | explode_data.jsonl/33820 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 223
} | [
2830,
3393,
3564,
1552,
5988,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
2915,
368,
341,
197,
197,
62,
284,
1034,
1314,
13270,
6184,
8623,
1820,
692,
197,
84381,
991,
18176,
741,
197,
6982... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSyncJobRunPresenter_HappyPath(t *testing.T) {
newAddress := common.HexToAddress("0x9FBDa871d559710256a2502A2517b794B482Db40")
requestID := common.HexToHash("0xcafe")
txHash := common.HexToHash("0xdeadbeef")
task0RunID := models.NewID()
task1RunID := models.NewID()
job := models.JobSpec{ID: models.NewID()}
runRequest := models.RunRequest{
Payment: assets.NewLink(2),
RequestID: &requestID,
TxHash: &txHash,
Requester: &newAddress,
}
run := models.MakeJobRun(&job, time.Now(), &models.Initiator{Type: models.InitiatorRunLog}, big.NewInt(0), &runRequest)
run.TaskRuns = []models.TaskRun{
models.TaskRun{
ID: task0RunID,
Status: models.RunStatusPendingIncomingConfirmations,
ObservedIncomingConfirmations: clnull.Uint32From(1),
MinRequiredIncomingConfirmations: clnull.Uint32From(3),
},
models.TaskRun{
ID: task1RunID,
Status: models.RunStatusErrored,
Result: models.RunResult{ErrorMessage: null.StringFrom("yikes fam")},
ObservedIncomingConfirmations: clnull.Uint32From(1),
MinRequiredIncomingConfirmations: clnull.Uint32From(3),
},
}
p := SyncJobRunPresenter{JobRun: &run}
bytes, err := p.MarshalJSON()
require.NoError(t, err)
var data map[string]interface{}
err = json.Unmarshal(bytes, &data)
require.NoError(t, err)
assert.Equal(t, data["runId"], run.ID.String())
assert.Equal(t, data["jobId"], job.ID.String())
assert.Equal(t, data["status"], "in_progress")
assert.Contains(t, data, "error")
assert.Contains(t, data, "createdAt")
assert.Equal(t, data["payment"], "2")
assert.Equal(t, data["finishedAt"], nil)
assert.Contains(t, data, "tasks")
initiator, ok := data["initiator"].(map[string]interface{})
require.True(t, ok)
assert.Equal(t, initiator["type"], "runlog")
assert.Equal(t, initiator["requestId"], "0x000000000000000000000000000000000000000000000000000000000000cafe")
assert.Equal(t, initiator["txHash"], "0x00000000000000000000000000000000000000000000000000000000deadbeef")
assert.Equal(t, initiator["requester"], newAddress.Hex())
tasks, ok := data["tasks"].([]interface{})
require.True(t, ok)
require.Len(t, tasks, 2)
task0, ok := tasks[0].(map[string]interface{})
require.True(t, ok)
assert.Equal(t, task0["index"], float64(0))
assert.Contains(t, task0, "type")
assert.Equal(t, "pending_incoming_confirmations", task0["status"])
assert.Equal(t, task0["error"], nil)
assert.Equal(t, float64(1), task0["confirmations"])
assert.Equal(t, float64(3), task0["minimumConfirmations"])
task1, ok := tasks[1].(map[string]interface{})
require.True(t, ok)
assert.Equal(t, task1["index"], float64(1))
assert.Contains(t, task1, "type")
assert.Equal(t, task1["status"], "errored")
assert.Equal(t, task1["error"], "yikes fam")
} | explode_data.jsonl/66338 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1213
} | [
2830,
3393,
12154,
12245,
6727,
33849,
2039,
11144,
1820,
1155,
353,
8840,
836,
8,
341,
8638,
4286,
1669,
4185,
91538,
1249,
4286,
445,
15,
87,
24,
37,
9548,
64,
23,
22,
16,
67,
20,
20,
24,
22,
16,
15,
17,
20,
21,
64,
17,
20,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestZeroRat(t *testing.T) {
var x, y, z Rat
y.SetFrac64(0, 42)
if x.Cmp(&y) != 0 {
t.Errorf("x and y should be both equal and zero")
}
if s := x.String(); s != "0/1" {
t.Errorf("got x = %s, want 0/1", s)
}
if s := x.RatString(); s != "0" {
t.Errorf("got x = %s, want 0", s)
}
z.Add(&x, &y)
if s := z.RatString(); s != "0" {
t.Errorf("got x+y = %s, want 0", s)
}
z.Sub(&x, &y)
if s := z.RatString(); s != "0" {
t.Errorf("got x-y = %s, want 0", s)
}
z.Mul(&x, &y)
if s := z.RatString(); s != "0" {
t.Errorf("got x*y = %s, want 0", s)
}
// check for division by zero
defer func() {
if s := recover(); s == nil || s.(string) != "division by zero" {
panic(s)
}
}()
z.Quo(&x, &y)
} | explode_data.jsonl/35065 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 369
} | [
2830,
3393,
17999,
49,
266,
1155,
353,
8840,
836,
8,
341,
2405,
856,
11,
379,
11,
1147,
40577,
198,
14522,
4202,
37,
19959,
21,
19,
7,
15,
11,
220,
19,
17,
692,
743,
856,
727,
1307,
2099,
88,
8,
961,
220,
15,
341,
197,
3244,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestContains(t *testing.T) {
items := []string{"item1", "item2", "item3"}
require.True(t, contains(items, "item2"))
require.False(t, contains(items, "item5"))
} | explode_data.jsonl/4334 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 64
} | [
2830,
3393,
23805,
1155,
353,
8840,
836,
8,
341,
46413,
1669,
3056,
917,
4913,
1203,
16,
497,
330,
1203,
17,
497,
330,
1203,
18,
16707,
17957,
32443,
1155,
11,
5610,
24337,
11,
330,
1203,
17,
5455,
17957,
50757,
1155,
11,
5610,
24337,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestParseEnv(t *testing.T) {
tests := []struct {
envArray []string
expected []api.EnvVar
expectErr bool
test string
}{
{
envArray: []string{
"THIS_ENV=isOK",
"HAS_COMMAS=foo,bar",
"HAS_EQUALS=jJnro54iUu75xNy==",
},
expected: []api.EnvVar{
{
Name: "THIS_ENV",
Value: "isOK",
},
{
Name: "HAS_COMMAS",
Value: "foo,bar",
},
{
Name: "HAS_EQUALS",
Value: "jJnro54iUu75xNy==",
},
},
expectErr: false,
test: "test case 1",
},
{
envArray: []string{
"WITH_OUT_EQUALS",
},
expected: []api.EnvVar{},
expectErr: true,
test: "test case 2",
},
{
envArray: []string{
"WITH_OUT_VALUES=",
},
expected: []api.EnvVar{
{
Name: "WITH_OUT_VALUES",
Value: "",
},
},
expectErr: false,
test: "test case 3",
},
{
envArray: []string{
"=WITH_OUT_NAME",
},
expected: []api.EnvVar{},
expectErr: true,
test: "test case 4",
},
}
for _, test := range tests {
envs, err := parseEnvs(test.envArray)
if !test.expectErr && err != nil {
t.Errorf("unexpected error: %v (%s)", err, test.test)
}
if test.expectErr && err != nil {
continue
}
if !reflect.DeepEqual(envs, test.expected) {
t.Errorf("\nexpected:\n%#v\nsaw:\n%#v (%s)", test.expected, envs, test.test)
}
}
} | explode_data.jsonl/64189 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 764
} | [
2830,
3393,
14463,
14359,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
57538,
1857,
220,
3056,
917,
198,
197,
42400,
220,
3056,
2068,
81214,
3962,
198,
197,
24952,
7747,
1807,
198,
197,
18185,
414,
914,
198,
197,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestInvalidJSONUnmarshal(t *testing.T) {
cases := []struct {
name string
req map[string]interface{}
target interface{}
}{
{
name: "InvalidTTLSegments",
req: map[string]interface{}{
"ttl": "1.2.3s",
},
target: &AndroidConfig{},
},
{
name: "IncorrectTTLSeconds",
req: map[string]interface{}{
"ttl": "abcs",
},
target: &AndroidConfig{},
},
{
name: "IncorrectTTLNanoseconds",
req: map[string]interface{}{
"ttl": "10.abcs",
},
target: &AndroidConfig{},
},
{
name: "InvalidApsAlert",
req: map[string]interface{}{
"alert": 10,
},
target: &Aps{},
},
{
name: "InvalidApsSound",
req: map[string]interface{}{
"sound": 10,
},
target: &Aps{},
},
}
for _, tc := range cases {
b, err := json.Marshal(tc.req)
if err != nil {
t.Errorf("Marshal(%s) = %v; want = nil", tc.name, err)
}
if err := json.Unmarshal(b, tc.target); err == nil {
t.Errorf("Unmarshal(%s) = %v; want = error", tc.name, err)
}
}
} | explode_data.jsonl/75175 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 505
} | [
2830,
3393,
7928,
5370,
1806,
27121,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
256,
914,
198,
197,
24395,
262,
2415,
14032,
31344,
16094,
197,
28861,
3749,
16094,
197,
59403,
197,
197,
515,
298,
11609,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestShowMessageParams(t *testing.T) {
t.Parallel()
const (
want = `{"message":"error message","type":1}`
wantUnknown = `{"message":"unknown message","type":0}`
)
wantType := ShowMessageParams{
Message: "error message",
Type: MessageTypeError,
}
wantTypeUnkonwn := ShowMessageParams{
Message: "unknown message",
Type: MessageType(0),
}
t.Run("Marshal", func(t *testing.T) {
t.Parallel()
tests := []struct {
name string
field ShowMessageParams
want string
wantMarshalErr bool
wantErr bool
}{
{
name: "Valid",
field: wantType,
want: want,
wantMarshalErr: false,
wantErr: false,
},
{
name: "Unknown",
field: wantTypeUnkonwn,
want: wantUnknown,
wantMarshalErr: false,
wantErr: false,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := json.Marshal(&tt.field)
if (err != nil) != tt.wantMarshalErr {
t.Fatal(err)
}
if diff := cmp.Diff(tt.want, string(got)); (diff != "") != tt.wantErr {
t.Errorf("%s: wantErr: %t\n(-want +got)\n%s", tt.name, tt.wantErr, diff)
}
})
}
})
t.Run("Unmarshal", func(t *testing.T) {
t.Parallel()
tests := []struct {
name string
field string
want ShowMessageParams
wantUnmarshalErr bool
wantErr bool
}{
{
name: "Valid",
field: want,
want: wantType,
wantUnmarshalErr: false,
wantErr: false,
},
{
name: "Unknown",
field: wantUnknown,
want: wantTypeUnkonwn,
wantUnmarshalErr: false,
wantErr: false,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
var got ShowMessageParams
if err := json.Unmarshal([]byte(tt.field), &got); (err != nil) != tt.wantUnmarshalErr {
t.Fatal(err)
}
if diff := cmp.Diff(tt.want, got); (diff != "") != tt.wantErr {
t.Errorf("%s: wantErr: %t\n(-want +got)\n%s", tt.name, tt.wantErr, diff)
}
})
}
})
} | explode_data.jsonl/16158 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1241
} | [
2830,
3393,
7812,
2052,
4870,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
4777,
2399,
197,
50780,
286,
284,
1565,
4913,
1994,
3252,
841,
1943,
2198,
1313,
788,
16,
31257,
197,
50780,
13790,
284,
1565,
4913,
1994,
3252,
16088,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWorkflowTemplateServer_DeleteClusterWorkflowTemplate(t *testing.T) {
server, ctx := getClusterWorkflowTemplateServer()
t.Run("Labelled", func(t *testing.T) {
_, err := server.DeleteClusterWorkflowTemplate(ctx, &clusterwftmplpkg.ClusterWorkflowTemplateDeleteRequest{
Name: "cluster-workflow-template-whalesay-template2",
})
assert.NoError(t, err)
})
t.Run("Unlabelled", func(t *testing.T) {
_, err := server.DeleteClusterWorkflowTemplate(ctx, &clusterwftmplpkg.ClusterWorkflowTemplateDeleteRequest{
Name: "cluster-workflow-template-whalesay-template",
})
assert.Error(t, err)
})
} | explode_data.jsonl/48139 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 225
} | [
2830,
3393,
62768,
7275,
5475,
57418,
28678,
62768,
7275,
1155,
353,
8840,
836,
8,
341,
41057,
11,
5635,
1669,
633,
28678,
62768,
7275,
5475,
741,
3244,
16708,
445,
2476,
832,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
197,
6878,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPrintStorageVersion(t *testing.T) {
commonEncodingVersion := "v1"
tests := []struct {
sv apiserverinternal.StorageVersion
expected []metav1.TableRow
}{
{
sv: apiserverinternal.StorageVersion{
ObjectMeta: metav1.ObjectMeta{
Name: "empty",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Status: apiserverinternal.StorageVersionStatus{},
},
// Columns: Name, CommonEncodingVersion, StorageVersions, Age
expected: []metav1.TableRow{{Cells: []interface{}{"empty", "<unset>", "<unset>", "0s"}}},
},
{
sv: apiserverinternal.StorageVersion{
ObjectMeta: metav1.ObjectMeta{
Name: "valid",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Status: apiserverinternal.StorageVersionStatus{
StorageVersions: []apiserverinternal.ServerStorageVersion{
{
APIServerID: "1",
EncodingVersion: "v1",
DecodableVersions: []string{"v1"},
},
{
APIServerID: "2",
EncodingVersion: "v1",
DecodableVersions: []string{"v1", "v2"},
},
},
CommonEncodingVersion: &commonEncodingVersion,
},
},
// Columns: Name, CommonEncodingVersion, StorageVersions, Age
expected: []metav1.TableRow{{Cells: []interface{}{"valid", "v1", "1=v1,2=v1", "0s"}}},
},
{
sv: apiserverinternal.StorageVersion{
ObjectMeta: metav1.ObjectMeta{
Name: "disagree",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Status: apiserverinternal.StorageVersionStatus{
StorageVersions: []apiserverinternal.ServerStorageVersion{
{
APIServerID: "1",
EncodingVersion: "v1",
DecodableVersions: []string{"v1"},
},
{
APIServerID: "2",
EncodingVersion: "v1",
DecodableVersions: []string{"v1", "v2"},
},
{
APIServerID: "3",
EncodingVersion: "v2",
DecodableVersions: []string{"v2"},
},
},
},
},
// Columns: Name, CommonEncodingVersion, StorageVersions, Age
expected: []metav1.TableRow{{Cells: []interface{}{"disagree", "<unset>", "1=v1,2=v1,3=v2", "0s"}}},
},
{
sv: apiserverinternal.StorageVersion{
ObjectMeta: metav1.ObjectMeta{
Name: "agreeWithMore",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Status: apiserverinternal.StorageVersionStatus{
StorageVersions: []apiserverinternal.ServerStorageVersion{
{
APIServerID: "1",
EncodingVersion: "v1",
DecodableVersions: []string{"v1"},
},
{
APIServerID: "2",
EncodingVersion: "v1",
DecodableVersions: []string{"v1", "v2"},
},
{
APIServerID: "3",
EncodingVersion: "v1",
DecodableVersions: []string{"v1", "v2"},
},
{
APIServerID: "4",
EncodingVersion: "v1",
DecodableVersions: []string{"v1", "v2", "v3alpha1"},
},
},
CommonEncodingVersion: &commonEncodingVersion,
},
},
// Columns: Name, CommonEncodingVersion, StorageVersions, Age
expected: []metav1.TableRow{{Cells: []interface{}{"agreeWithMore", "v1", "1=v1,2=v1,3=v1 + 1 more...", "0s"}}},
},
}
for i, test := range tests {
rows, err := printStorageVersion(&test.sv, printers.GenerateOptions{})
if err != nil {
t.Fatal(err)
}
for i := range rows {
rows[i].Object.Object = nil
}
if !reflect.DeepEqual(test.expected, rows) {
t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows))
}
}
} | explode_data.jsonl/72311 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1788
} | [
2830,
3393,
8994,
5793,
5637,
1155,
353,
8840,
836,
8,
341,
83825,
14690,
5637,
1669,
330,
85,
16,
698,
78216,
1669,
3056,
1235,
341,
197,
1903,
85,
981,
97723,
2836,
10481,
43771,
5637,
198,
197,
42400,
3056,
4059,
402,
16,
18257,
31... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestClustersReplicationFactorMin(t *testing.T) {
ctx := context.Background()
if nClusters < 5 {
t.Skip("Need at least 5 peers")
}
clusters, mock := createClusters(t)
defer shutdownClusters(t, clusters, mock)
for _, c := range clusters {
c.config.ReplicationFactorMin = nClusters - 1
c.config.ReplicationFactorMax = nClusters
}
// Shutdown two peers
clusters[nClusters-1].Shutdown(ctx)
waitForLeaderAndMetrics(t, clusters)
clusters[nClusters-2].Shutdown(ctx)
waitForLeaderAndMetrics(t, clusters)
h := test.Cid1
_, err := clusters[0].Pin(ctx, h, api.PinOptions{})
if err == nil {
t.Error("Pin should have failed as rplMin cannot be satisfied")
}
t.Log(err)
if !strings.Contains(err.Error(), fmt.Sprintf("not enough peers to allocate CID")) {
t.Fatal(err)
}
} | explode_data.jsonl/66615 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 295
} | [
2830,
3393,
94992,
18327,
1693,
20661,
6217,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
741,
743,
308,
94992,
366,
220,
20,
341,
197,
3244,
57776,
445,
23657,
518,
3245,
220,
20,
25029,
1138,
197,
630,
39407,
14605,
11,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestPrometheus_GetDescIsStable(t *testing.T) {
metric := &Prometheus{
Name: "test-metric",
Labels: []*MetricLabel{
{Key: "foo", Value: "bar"},
{Key: "hello", Value: "World"},
},
Histogram: &Histogram{
Buckets: []Amount{{"10"}, {"20"}, {"30"}},
},
}
stableDesc := metric.GetDesc()
for i := 0; i < 10; i++ {
if !assert.Equal(t, stableDesc, metric.GetDesc()) {
break
}
}
} | explode_data.jsonl/26047 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 181
} | [
2830,
3393,
35186,
39705,
13614,
11065,
3872,
623,
480,
1155,
353,
8840,
836,
8,
341,
2109,
16340,
1669,
609,
35186,
39705,
515,
197,
21297,
25,
330,
1944,
1448,
16340,
756,
197,
197,
23674,
25,
29838,
54310,
2476,
515,
298,
197,
90,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestResourceSpansWireCompatibility(t *testing.T) {
// This test verifies that OTLP ProtoBufs generated using goproto lib in
// opentelemetry-proto repository OTLP ProtoBufs generated using gogoproto lib in
// this repository are wire compatible.
// Generate ResourceSpans as pdata struct.
pdataRS := generateTestResourceSpans()
// Marshal its underlying ProtoBuf to wire.
wire1, err := gogoproto.Marshal(pdataRS.orig)
assert.NoError(t, err)
assert.NotNil(t, wire1)
// Unmarshal from the wire to OTLP Protobuf in goproto's representation.
var goprotoMessage emptypb.Empty
err = goproto.Unmarshal(wire1, &goprotoMessage)
assert.NoError(t, err)
// Marshal to the wire again.
wire2, err := goproto.Marshal(&goprotoMessage)
assert.NoError(t, err)
assert.NotNil(t, wire2)
// Unmarshal from the wire into gogoproto's representation.
var gogoprotoRS2 otlptrace.ResourceSpans
err = gogoproto.Unmarshal(wire2, &gogoprotoRS2)
assert.NoError(t, err)
// Now compare that the original and final ProtoBuf messages are the same.
// This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible.
assert.EqualValues(t, pdataRS.orig, &gogoprotoRS2)
} | explode_data.jsonl/79479 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 432
} | [
2830,
3393,
4783,
6406,
596,
37845,
85880,
1155,
353,
8840,
836,
8,
341,
197,
322,
1096,
1273,
87856,
429,
8605,
12567,
57677,
15064,
82,
7907,
1667,
342,
45926,
983,
3051,
304,
198,
197,
322,
1179,
6817,
35958,
9838,
983,
12542,
8605,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestVariableFunctionScoping(t *testing.T) {
withTestProcess("testvariables", t, func(p *proc.Target, fixture protest.Fixture) {
err := p.Continue()
assertNoError(err, t, "Continue() returned an error")
evalVariable(p, t, "a1")
evalVariable(p, t, "a2")
// Move scopes, a1 exists here by a2 does not
err = p.Continue()
assertNoError(err, t, "Continue() returned an error")
evalVariable(p, t, "a1")
_, err = evalVariableOrError(p, "a2")
if err == nil {
t.Fatalf("Can eval out of scope variable a2")
}
})
} | explode_data.jsonl/56229 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 207
} | [
2830,
3393,
7827,
5152,
3326,
33707,
1155,
353,
8840,
836,
8,
341,
46948,
2271,
7423,
445,
1944,
18616,
497,
259,
11,
2915,
1295,
353,
15782,
35016,
11,
12507,
8665,
991,
12735,
8,
341,
197,
9859,
1669,
281,
2451,
6232,
741,
197,
6948... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestInfoInvalidResponseJSONError(t *testing.T) {
client := &Client{
client: newMockClient(func(req *http.Request) (*http.Response, error) {
return &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(bytes.NewReader([]byte("invalid json"))),
}, nil
}),
}
_, err := client.Info(context.Background())
if err == nil || !strings.Contains(err.Error(), "invalid character") {
t.Fatalf("expected a 'invalid character' error, got %v", err)
}
} | explode_data.jsonl/70205 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 185
} | [
2830,
3393,
1731,
7928,
2582,
5370,
1454,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
609,
2959,
515,
197,
25291,
25,
501,
11571,
2959,
18552,
6881,
353,
1254,
9659,
8,
4609,
1254,
12574,
11,
1465,
8,
341,
298,
853,
609,
1254,
12574,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPayload_Type(t *testing.T) {
t.Parallel()
payload := Payload{
"type": "test",
}
t.Run("normal test", func(t *testing.T) {
result := payload.Type()
assert.Equal(t, "test", result)
})
} | explode_data.jsonl/29917 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 88
} | [
2830,
3393,
29683,
13729,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
76272,
1669,
52916,
515,
197,
197,
44470,
788,
330,
1944,
756,
197,
630,
3244,
16708,
445,
8252,
1273,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
9559,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestModelAvg_Forward(t *testing.T) {
model := newTestModel(Avg)
g := ag.NewGraph()
ctx := nn.Context{Graph: g, Mode: nn.Training}
// == Forward
x1 := g.NewVariable(mat.NewVecDense([]mat.Float{0.5, 0.6}), true)
x2 := g.NewVariable(mat.NewVecDense([]mat.Float{0.7, -0.4}), true)
x3 := g.NewVariable(mat.NewVecDense([]mat.Float{0.0, -0.7}), true)
y := nn.Reify(ctx, model).(*Model).Forward(x1, x2, x3)
assert.InDeltaSlice(t, []mat.Float{0.0912345, -0.292413, -0.2596035}, y[0].Value().Data(), 1.0e-06)
assert.InDeltaSlice(t, []mat.Float{-0.5168655, -0.018346, -0.256866}, y[1].Value().Data(), 1.0e-06)
assert.InDeltaSlice(t, []mat.Float{0.1928965, 0.0044645, 0.2589785}, y[2].Value().Data(), 1.0e-06)
} | explode_data.jsonl/67750 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 343
} | [
2830,
3393,
1712,
39447,
84368,
1606,
1155,
353,
8840,
836,
8,
341,
19727,
1669,
501,
2271,
1712,
4346,
7239,
340,
3174,
1669,
933,
7121,
11212,
741,
20985,
1669,
10883,
9328,
90,
11212,
25,
342,
11,
14562,
25,
10883,
8240,
2056,
630,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIntList(t *testing.T) {
for idx, tc := range sharedIntCases {
t.Run(fmt.Sprintf("int/%d", idx), func(t *testing.T) {
tt := assert.WrapTB(t)
out, skip := tc.AsInts()
if skip {
t.Skip()
}
var v IntList
fs := cmdy.NewFlagSet()
fs.Var(&v, "s", "test")
err := fs.Parse(tc.in)
if tc.ok {
tt.MustOK(err)
tt.MustEqual(IntList(out), v)
} else {
tt.MustAssert(err != nil)
}
})
}
} | explode_data.jsonl/58341 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 232
} | [
2830,
3393,
1072,
852,
1155,
353,
8840,
836,
8,
341,
2023,
7187,
11,
17130,
1669,
2088,
6094,
1072,
37302,
341,
197,
3244,
16708,
28197,
17305,
445,
396,
12627,
67,
497,
7187,
701,
2915,
1155,
353,
8840,
836,
8,
341,
298,
3244,
83,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_hasCycleUsingSet(t *testing.T) {
t.Parallel()
t.Run("cycle in position 1", func(t *testing.T) {
t.Parallel()
tail := &Node{
Value: -4,
Next: nil,
}
list := &Node{
Value: 3,
Next: &Node{
Value: 2,
Next: &Node{
Value: 0,
Next: tail,
},
},
}
tail.Next = list.Next
assert.Equal(t, true, hasCycleUsingSet(list))
assert.Equal(t, true, hasCycleUsingFloydsTortoiseAndHare(list))
})
t.Run("cycle in position 0", func(t *testing.T) {
t.Parallel()
tail := &Node{
Value: 2,
Next: nil,
}
list := &Node{
Value: 1,
Next: tail,
}
tail.Next = list
assert.Equal(t, true, hasCycleUsingSet(list))
assert.Equal(t, true, hasCycleUsingFloydsTortoiseAndHare(list))
})
t.Run("no cycle", func(t *testing.T) {
t.Parallel()
list := &Node{
Value: 1,
Next: nil,
}
assert.Equal(t, false, hasCycleUsingSet(list))
assert.Equal(t, false, hasCycleUsingFloydsTortoiseAndHare(list))
})
} | explode_data.jsonl/17960 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 475
} | [
2830,
3393,
21778,
44820,
16429,
1649,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
3244,
16708,
445,
21621,
304,
2309,
220,
16,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
3244,
41288,
7957,
2822,
197,
3244,
604,
1669,
60... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestContainerSpecTty(t *testing.T) {
testID := "test-id"
testSandboxID := "sandbox-id"
testPid := uint32(1234)
containerConfig, sandboxConfig, imageConfig, specCheck := getCreateContainerTestData()
ociRuntime := config.Runtime{}
c := newTestCRIService()
for _, tty := range []bool{true, false} {
containerConfig.Tty = tty
spec, err := c.containerSpec(testID, testSandboxID, testPid, "", containerConfig, sandboxConfig, imageConfig, nil, ociRuntime)
require.NoError(t, err)
specCheck(t, testID, testSandboxID, testPid, spec)
assert.Equal(t, tty, spec.Process.Terminal)
if tty {
assert.Contains(t, spec.Process.Env, "TERM=xterm")
} else {
assert.NotContains(t, spec.Process.Env, "TERM=xterm")
}
}
} | explode_data.jsonl/6405 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 286
} | [
2830,
3393,
4502,
8327,
51,
1881,
1155,
353,
8840,
836,
8,
341,
18185,
915,
1669,
330,
1944,
12897,
698,
18185,
50,
31536,
915,
1669,
330,
76756,
12897,
698,
18185,
32339,
1669,
2622,
18,
17,
7,
16,
17,
18,
19,
340,
53290,
2648,
11,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNodeCount(t *testing.T) {
graphSpec := types.GraphSpec{
Edges: []types.Edge{{Source: "n1", Target: "n2"}, {Source: "n1", Target: "n3"}, {Source: "n2", Target: "n4"}},
}
if NodeCount(&graphSpec) != 4 {
t.Error("")
}
} | explode_data.jsonl/62516 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 100
} | [
2830,
3393,
1955,
2507,
1155,
353,
8840,
836,
8,
341,
66616,
8327,
1669,
4494,
40237,
8327,
515,
197,
197,
41122,
25,
3056,
9242,
13,
11656,
2979,
3608,
25,
330,
77,
16,
497,
13483,
25,
330,
77,
17,
14345,
314,
3608,
25,
330,
77,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestTiernanSimpleCyclesInto(t *testing.T) {
g := graph.NewSimpleDirectedGraphWithFeatures(graph.DeterministicIteration)
g.AddVertex("a")
g.AddVertex("b")
g.AddVertex("c")
g.Connect("a", "b")
g.Connect("b", "c")
g.Connect("c", "a")
var cycles [][]string
assert.NotPanics(t, func() {
TiernanSimpleCyclesOf(g).CyclesInto(&cycles)
})
assert.Equal(t, [][]string{{"a", "b", "c"}}, cycles)
} | explode_data.jsonl/67942 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 173
} | [
2830,
3393,
45351,
932,
276,
16374,
34,
15805,
26591,
1155,
353,
8840,
836,
8,
341,
3174,
1669,
4771,
7121,
16374,
92669,
11212,
2354,
21336,
24312,
909,
16483,
4532,
53101,
340,
3174,
1904,
8320,
445,
64,
1138,
3174,
1904,
8320,
445,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFullParseJWE(t *testing.T) {
// Messages that should succeed to parse
successes := []string{
// Flattened serialization, single recipient
"{\"protected\":\"eyJhbGciOiJYWVoiLCJlbmMiOiJYWVoifQo\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
// Unflattened serialization, single recipient
"{\"protected\":\"\",\"unprotected\":{\"enc\":\"XYZ\"},\"recipients\":[{\"header\":{\"alg\":\"XYZ\"},\"encrypted_key\":\"QUJD\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
}
for i := range successes {
_, err := ParseEncrypted(successes[i])
if err != nil {
t.Error("Unble to parse valid message", err, successes[i])
}
}
// Messages that should fail to parse
failures := []string{
// Empty
"{}",
// Invalid JSON
"{XX",
// Invalid protected header
"{\"protected\":\"###\"}",
// Invalid protected header
"{\"protected\":\"e1gK\"}",
// Invalid encrypted key
"{\"protected\":\"e30\",\"encrypted_key\":\"###\"}",
// Invalid IV
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"###\"}",
// Invalid ciphertext
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"###\"}",
// Invalid tag
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"###\"}",
// Invalid AAD
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\",\"aad\":\"###\"}",
// Missing alg/enc headers
"{\"protected\":\"e30\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
// Missing enc header
"{\"protected\":\"eyJhbGciOiJYWVoifQ\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
// Missing alg header
"{\"protected\":\"eyJlbmMiOiJYWVoifQ\",\"encrypted_key\":\"QUJD\",\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
// Unflattened serialization, single recipient, invalid encrypted_key
"{\"protected\":\"\",\"recipients\":[{\"header\":{\"alg\":\"XYZ\", \"enc\":\"XYZ\"},\"encrypted_key\":\"###\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
// Unflattened serialization, single recipient, missing alg
"{\"protected\":\"eyJhbGciOiJYWVoifQ\",\"recipients\":[{\"encrypted_key\":\"QUJD\"}],\"iv\":\"QUJD\",\"ciphertext\":\"QUJD\",\"tag\":\"QUJD\"}",
}
for i := range failures {
_, err := ParseEncrypted(failures[i])
if err == nil {
t.Error("Able to parse invalid message", err, failures[i])
}
}
} | explode_data.jsonl/14776 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1057
} | [
2830,
3393,
9432,
14463,
41,
12457,
1155,
353,
8840,
836,
8,
972,
197,
322,
26729,
429,
1265,
11996,
311,
4715,
319,
30553,
288,
1669,
3056,
917,
1666,
197,
197,
322,
2988,
1587,
6758,
47443,
11,
3175,
21713,
319,
197,
197,
14129,
210... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestGetFileRef(t *testing.T) {
ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
t.Errorf("Bad method: %s", r.Method)
}
if r.URL.Path != "/repos/k8s/kuber/contents/foo/bar.txt" {
t.Errorf("Bad request path: %s", r.URL)
}
if r.URL.RawQuery != "ref=12345" {
t.Errorf("Bad request query: %s", r.URL.RawQuery)
}
c := &Content{
Content: base64.StdEncoding.EncodeToString([]byte("abcde")),
}
b, err := json.Marshal(&c)
if err != nil {
t.Fatalf("Didn't expect error: %v", err)
}
fmt.Fprint(w, string(b))
}))
defer ts.Close()
c := getClient(ts.URL)
if content, err := c.GetFile("k8s", "kuber", "foo/bar.txt", "12345"); err != nil {
t.Errorf("Didn't expect error: %v", err)
} else if string(content) != "abcde" {
t.Errorf("Wrong content -- expect: abcde, got: %s", string(content))
}
} | explode_data.jsonl/6282 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 406
} | [
2830,
3393,
1949,
1703,
3945,
1155,
353,
8840,
836,
8,
341,
57441,
1669,
54320,
70334,
7121,
13470,
1220,
2836,
19886,
89164,
18552,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
743,
435,
20798,
961,
1758,
20798,
1949,
341,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestSignHeader(t *testing.T) {
//create only what we need to test the seal
var testRaftId uint16 = 5
config := &node.Config{Name: "unit-test", DataDir: ""}
nodeKey := config.NodeKey()
raftProtocolManager := &ProtocolManager{raftId: testRaftId}
raftService := &RaftService{nodeKey: nodeKey, raftProtocolManager: raftProtocolManager}
minter := minter{eth: raftService}
//create some fake header to sign
fakeParentHash := common.HexToHash("0xc2c1dc1be8054808c69e06137429899d")
header := &types.Header{
ParentHash: fakeParentHash,
Number: big.NewInt(1),
Difficulty: big.NewInt(1),
GasLimit: uint64(0),
GasUsed: uint64(0),
Coinbase: minter.coinbase,
Time: uint64(time.Now().UnixNano()),
}
headerHash := header.Hash()
extraDataBytes := minter.buildExtraSeal(headerHash)
var seal *extraSeal
err := rlp.DecodeBytes(extraDataBytes[:], &seal)
if err != nil {
t.Fatalf("Unable to decode seal: %s", err.Error())
}
// Check raftId
sealRaftId, err := hexutil.DecodeUint64("0x" + string(seal.RaftId)) //add the 0x prefix
if err != nil {
t.Errorf("Unable to get RaftId: %s", err.Error())
}
if sealRaftId != uint64(testRaftId) {
t.Errorf("RaftID does not match. Expected: %d, Actual: %d", testRaftId, sealRaftId)
}
//Identify who signed it
sig := seal.Signature
pubKey, err := crypto.SigToPub(headerHash.Bytes(), sig)
if err != nil {
t.Fatalf("Unable to get public key from signature: %s", err.Error())
}
//Compare derived public key to original public key
if pubKey.X.Cmp(nodeKey.X) != 0 {
t.Errorf("Signature incorrect!")
}
} | explode_data.jsonl/13615 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 623
} | [
2830,
3393,
7264,
4047,
1155,
353,
8840,
836,
8,
341,
197,
322,
3182,
1172,
1128,
582,
1184,
311,
1273,
279,
25349,
198,
2405,
1273,
55535,
723,
764,
2622,
16,
21,
284,
220,
20,
198,
25873,
1669,
609,
3509,
10753,
63121,
25,
330,
38... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestReposService_List(t *testing.T) {
var s repos
ctx := testContext()
wantRepos := []*types.Repo{
{Name: "r1"},
{Name: "r2"},
}
calledList := db.Mocks.Repos.MockList(t, "r1", "r2")
repos, err := s.List(ctx, db.ReposListOptions{})
if err != nil {
t.Fatal(err)
}
if !*calledList {
t.Error("!calledList")
}
if !reflect.DeepEqual(repos, wantRepos) {
t.Errorf("got %+v, want %+v", repos, wantRepos)
}
} | explode_data.jsonl/77406 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 202
} | [
2830,
3393,
693,
966,
1860,
27104,
1155,
353,
8840,
836,
8,
341,
2405,
274,
45774,
198,
20985,
1669,
1273,
1972,
2822,
50780,
693,
966,
1669,
29838,
9242,
2817,
5368,
515,
197,
197,
63121,
25,
330,
81,
16,
7115,
197,
197,
63121,
25,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGrep(t *testing.T) {
u := gptest.NewUnitTester(t)
defer u.Remove()
ctx := context.Background()
ctx = ctxutil.WithAlwaysYes(ctx, true)
ctx = ctxutil.WithInteractive(ctx, false)
act, err := newMock(ctx, u)
require.NoError(t, err)
require.NotNil(t, act)
buf := &bytes.Buffer{}
out.Stdout = buf
defer func() {
out.Stdout = os.Stdout
}()
c := gptest.CliCtx(ctx, t, "foo")
t.Run("empty store", func(t *testing.T) {
defer buf.Reset()
assert.NoError(t, act.Grep(c))
})
t.Run("add some secret", func(t *testing.T) {
defer buf.Reset()
sec := &secrets.Plain{}
sec.SetPassword("foobar")
sec.WriteString("foobar")
assert.NoError(t, act.Store.Set(ctx, "foo", sec))
})
t.Run("should find existing", func(t *testing.T) {
defer buf.Reset()
assert.NoError(t, act.Grep(c))
})
t.Run("RE2", func(t *testing.T) {
defer buf.Reset()
c := gptest.CliCtxWithFlags(ctx, t, map[string]string{"regexp": "true"}, "f..bar")
assert.NoError(t, act.Grep(c))
})
} | explode_data.jsonl/60983 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 451
} | [
2830,
3393,
38,
9995,
1155,
353,
8840,
836,
8,
341,
10676,
1669,
342,
70334,
7121,
4562,
58699,
1155,
340,
16867,
575,
13270,
2822,
20985,
1669,
2266,
19047,
741,
20985,
284,
5635,
1314,
26124,
37095,
9454,
7502,
11,
830,
340,
20985,
28... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAsyncProducer(t *testing.T) {
// the default for producers is a fire-and-forget model that doesn't return
// successes
t.Run("Without Successes", func(t *testing.T) {
t.Skip("Skipping test because sarama.MockBroker doesn't work with versions >= sarama.V0_11_0_0 " +
"https://github.com/Shopify/sarama/issues/1665")
mt := mocktracer.Start()
defer mt.Stop()
broker := newMockBroker(t)
cfg := sarama.NewConfig()
cfg.Version = sarama.V0_11_0_0
producer, err := sarama.NewAsyncProducer([]string{broker.Addr()}, cfg)
if err != nil {
t.Fatal(err)
}
producer = WrapAsyncProducer(nil, producer)
msg1 := &sarama.ProducerMessage{
Topic: "my_topic",
Value: sarama.StringEncoder("test 1"),
}
producer.Input() <- msg1
waitForSpans(mt, 1, time.Second*10)
spans := mt.FinishedSpans()
assert.Len(t, spans, 1)
{
s := spans[0]
assert.Equal(t, "kafka", s.Tag(ext.ServiceName))
assert.Equal(t, "queue", s.Tag(ext.SpanType))
assert.Equal(t, "Produce Topic my_topic", s.Tag(ext.ResourceName))
assert.Equal(t, "kafka.produce", s.OperationName())
assert.Equal(t, int32(0), s.Tag("partition"))
assert.Equal(t, int64(0), s.Tag("offset"))
}
})
t.Run("With Successes", func(t *testing.T) {
t.Skip("Skipping test because sarama.MockBroker doesn't work with versions >= sarama.V0_11_0_0 " +
"https://github.com/Shopify/sarama/issues/1665")
mt := mocktracer.Start()
defer mt.Stop()
broker := newMockBroker(t)
cfg := sarama.NewConfig()
cfg.Version = sarama.V0_11_0_0
cfg.Producer.Return.Successes = true
producer, err := sarama.NewAsyncProducer([]string{broker.Addr()}, cfg)
if err != nil {
t.Fatal(err)
}
producer = WrapAsyncProducer(cfg, producer)
msg1 := &sarama.ProducerMessage{
Topic: "my_topic",
Value: sarama.StringEncoder("test 1"),
}
producer.Input() <- msg1
<-producer.Successes()
spans := mt.FinishedSpans()
assert.Len(t, spans, 1)
{
s := spans[0]
assert.Equal(t, "kafka", s.Tag(ext.ServiceName))
assert.Equal(t, "queue", s.Tag(ext.SpanType))
assert.Equal(t, "Produce Topic my_topic", s.Tag(ext.ResourceName))
assert.Equal(t, "kafka.produce", s.OperationName())
assert.Equal(t, int32(0), s.Tag("partition"))
assert.Equal(t, int64(0), s.Tag("offset"))
}
})
} | explode_data.jsonl/28006 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1002
} | [
2830,
3393,
6525,
45008,
1155,
353,
8840,
836,
8,
341,
197,
322,
279,
1638,
369,
23308,
374,
264,
3940,
9777,
15193,
455,
1614,
429,
3171,
944,
470,
198,
197,
322,
47088,
198,
3244,
16708,
445,
26040,
13047,
288,
497,
2915,
1155,
353,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestExecErrorsWhenTheSpecifiedCommandDoesNotExist(t *testing.T) {
t.Parallel()
p := script.Exec("doesntexist")
p.Wait()
if p.Error() == nil {
t.Error("want error running non-existent command")
}
} | explode_data.jsonl/51501 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 77
} | [
2830,
3393,
10216,
13877,
4498,
785,
8327,
1870,
4062,
21468,
45535,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
3223,
1669,
5316,
30798,
445,
27057,
406,
28575,
1138,
3223,
28384,
741,
743,
281,
6141,
368,
621,
2092,
341,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func Test_Pagination_PrevNext_PrevAnd1NextLink(t *testing.T) {
doc := testutil.CreateHTML()
body := dom.QuerySelector(doc, "body")
root := testutil.CreateDiv(0)
dom.AppendChild(body, root)
prevAnchor := testutil.CreateAnchor("prev", "prev page")
nextAnchor := testutil.CreateAnchor("page2", "next page")
dom.AppendChild(root, prevAnchor)
dom.AppendChild(root, nextAnchor)
assertDefaultDocumenOutlink(t, doc, prevAnchor, nextAnchor)
} | explode_data.jsonl/10829 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 170
} | [
2830,
3393,
1088,
10353,
1088,
7282,
5847,
1088,
7282,
3036,
16,
5847,
3939,
1155,
353,
8840,
836,
8,
341,
59536,
1669,
1273,
1314,
7251,
5835,
741,
35402,
1669,
4719,
15685,
5877,
19153,
11,
330,
2599,
5130,
33698,
1669,
1273,
1314,
72... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnmarshalPublish(t *testing.T) {
testCases := map[string]struct {
inContent string
wantedPublish PublishConfig
wantedErr error
}{
"Valid publish yaml": {
inContent: `topics:
- name: tests
allowed_workers:
- hello
`,
wantedPublish: PublishConfig{
Topics: []Topic{
{
Name: aws.String("tests"),
AllowedWorkers: []string{"hello"},
},
},
},
},
"Empty workers don't appear in topic": {
inContent: `topics:
- name: tests
`,
wantedPublish: PublishConfig{
Topics: []Topic{
{
Name: aws.String("tests"),
},
},
},
},
"Error when unmarshalable": {
inContent: `topics:
- name: tests
allowed_workers:
- hello
- name: orders
`,
wantedErr: errors.New("yaml: line 1: did not find expected '-' indicator"),
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
p := PublishConfig{}
err := yaml.Unmarshal([]byte(tc.inContent), &p)
if tc.wantedErr != nil {
require.EqualError(t, err, tc.wantedErr.Error())
} else {
require.NoError(t, err)
require.Equal(t, tc.wantedPublish, p)
}
})
}
} | explode_data.jsonl/79738 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 551
} | [
2830,
3393,
1806,
27121,
50145,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
2415,
14032,
60,
1235,
341,
197,
17430,
2762,
257,
914,
198,
197,
6692,
7566,
50145,
23499,
2648,
198,
197,
6692,
7566,
7747,
257,
1465,
198,
197,
59403,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAppSdkVersionByPartition(t *testing.T) {
testJavaError(t, "sdk_version must have a value when the module is located at vendor or product", `
android_app {
name: "foo",
srcs: ["a.java"],
vendor: true,
platform_apis: true,
}
`)
testJava(t, `
android_app {
name: "bar",
srcs: ["b.java"],
platform_apis: true,
}
`)
for _, enforce := range []bool{true, false} {
bp := `
android_app {
name: "foo",
srcs: ["a.java"],
product_specific: true,
platform_apis: true,
}
`
errorHandler := android.FixtureExpectsNoErrors
if enforce {
errorHandler = android.FixtureExpectsAtLeastOneErrorMatchingPattern("sdk_version must have a value when the module is located at vendor or product")
}
android.GroupFixturePreparers(
PrepareForTestWithJavaDefaultModules,
android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
variables.EnforceProductPartitionInterface = proptools.BoolPtr(enforce)
}),
).
ExtendWithErrorHandler(errorHandler).
RunTestWithBp(t, bp)
}
} | explode_data.jsonl/58490 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 427
} | [
2830,
3393,
2164,
57175,
5637,
1359,
49978,
1155,
353,
8840,
836,
8,
341,
18185,
15041,
1454,
1155,
11,
330,
51295,
9438,
1969,
614,
264,
897,
979,
279,
4688,
374,
7407,
518,
20728,
476,
1985,
497,
22074,
197,
197,
5954,
8191,
341,
29... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestContext2Plan_taintIgnoreChanges(t *testing.T) {
m := testModule(t, "plan-taint-ignore-changes")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"vars": {Type: cty.String, Optional: true},
"type": {Type: cty.String, Computed: true},
},
},
},
}
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"vars": "foo",
"type": "aws_instance",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetSchemaReturn.ResourceTypes["aws_instance"]
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
if res.Action != plans.DeleteThenCreate {
t.Fatalf("resource %s should be replaced", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("foo"),
"vars": cty.StringVal("foo"),
"type": cty.StringVal("aws_instance"),
}), ric.Before)
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"vars": cty.StringVal("foo"),
"type": cty.StringVal("aws_instance"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
} | explode_data.jsonl/28701 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 959
} | [
2830,
3393,
1972,
17,
20485,
528,
1641,
12497,
11317,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1273,
3332,
1155,
11,
330,
10393,
2385,
1641,
43171,
11582,
5520,
1138,
3223,
1669,
1273,
5179,
445,
8635,
1138,
3223,
2234,
8632,
5598,
284... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestDockerPullImage(t *testing.T) {
image := "test_image"
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "pull", image).Return(bytes.Buffer{}, nil)
d := executables.NewDocker(executable)
err := d.PullImage(ctx, image)
if err != nil {
t.Fatalf("Docker.PullImage() error = %v, want nil", err)
}
} | explode_data.jsonl/6802 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 159
} | [
2830,
3393,
35,
13659,
36068,
1906,
1155,
353,
8840,
836,
8,
341,
31426,
1669,
330,
1944,
4954,
1837,
20985,
1669,
2266,
19047,
741,
77333,
15001,
1669,
342,
316,
1176,
7121,
2051,
1155,
692,
67328,
5922,
1669,
7860,
11748,
332,
4788,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestReadArchiveMultipleUpdates(t *testing.T) {
// first create archive, that we will be able to read
updateTestDir, _ := ioutil.TempDir("", "update")
defer os.RemoveAll(updateTestDir)
archive, err := WriteRootfsImageArchive(updateTestDir, RootfsImageStructMultiple)
assert.NoError(t, err)
assert.NotEqual(t, "", archive)
// open archive file
f, err := os.Open(archive)
defer f.Close()
assert.NoError(t, err)
assert.NotNil(t, f)
aReader := NewReader(f)
p, err := aReader.Read()
assert.NoError(t, err)
assert.Len(t, p, 2)
} | explode_data.jsonl/35295 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 205
} | [
2830,
3393,
4418,
42502,
32089,
37091,
1155,
353,
8840,
836,
8,
341,
197,
322,
1156,
1855,
18132,
11,
429,
582,
686,
387,
2952,
311,
1349,
198,
27175,
2271,
6184,
11,
716,
1669,
43144,
65009,
6184,
19814,
330,
2386,
1138,
16867,
2643,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNodeMapUnmarshalYAML(t *testing.T) {
var tests = []struct {
name string
mockUnmarshal func(interface{}) error
expected NodeMap
wantErr bool
}{
{
name: "ValidUnmarshal",
mockUnmarshal: func(v interface{}) error {
result, ok := v.(*map[string]string)
if !ok {
return errors.New("invalid input type")
}
(*result)[nodeEndpoint1] = nodeAccountId1.String()
(*result)[nodeEndpoint2] = nodeAccountId2.String()
(*result)[nodeEndpoint3] = nodeAccountId3.String()
return nil
},
expected: NodeMap{
nodeEndpoint1: nodeAccountId1,
nodeEndpoint2: nodeAccountId2,
nodeEndpoint3: nodeAccountId3,
},
wantErr: false,
},
{
name: "UnmarshalError",
mockUnmarshal: func(v interface{}) error {
return errors.New("unknown error")
},
expected: nil,
wantErr: true,
},
{
name: "InvalidAccountIDString",
mockUnmarshal: func(v interface{}) error {
result, ok := v.(*map[string]string)
if !ok {
return errors.New("invalid input type")
}
(*result)[nodeEndpoint1] = "0.a.3"
(*result)[nodeEndpoint2] = "x"
(*result)[nodeEndpoint3] = "10"
return nil
},
expected: nil,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
nodeMap := make(NodeMap)
err := nodeMap.UnmarshalYAML(tt.mockUnmarshal)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.EqualValues(t, tt.expected, nodeMap)
}
})
}
} | explode_data.jsonl/66731 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 720
} | [
2830,
3393,
1955,
2227,
1806,
27121,
56,
31102,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
11609,
688,
914,
198,
197,
77333,
1806,
27121,
2915,
75487,
28875,
1465,
198,
197,
42400,
414,
6018,
2227,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestWriteConcernWithOptions(t *testing.T) {
t.Run("on nil WriteConcern", func(t *testing.T) {
var wc *writeconcern.WriteConcern
wc = wc.WithOptions(writeconcern.WMajority())
require.Equal(t, wc.GetW().(string), "majority")
})
t.Run("on existing WriteConcern", func(t *testing.T) {
wc := writeconcern.New(writeconcern.W(1), writeconcern.J(true))
require.Equal(t, wc.GetW().(int), 1)
require.Equal(t, wc.GetJ(), true)
wc = wc.WithOptions(writeconcern.WMajority())
require.Equal(t, wc.GetW().(string), "majority")
require.Equal(t, wc.GetJ(), true)
})
t.Run("with multiple options", func(t *testing.T) {
wc := writeconcern.New(writeconcern.W(1), writeconcern.J(true))
require.Equal(t, wc.GetW().(int), 1)
require.Equal(t, wc.GetJ(), true)
require.Equal(t, wc.GetWTimeout(), time.Duration(0))
wc = wc.WithOptions(writeconcern.WMajority(), writeconcern.WTimeout(time.Second))
require.Equal(t, wc.GetW().(string), "majority")
require.Equal(t, wc.GetJ(), true)
require.Equal(t, wc.GetWTimeout(), time.Second)
})
} | explode_data.jsonl/44437 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 452
} | [
2830,
3393,
7985,
62142,
74238,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
263,
2092,
9645,
62142,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
2405,
26548,
353,
4934,
443,
28544,
4073,
62142,
271,
197,
88858,
284,
26548,
26124,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCapability_FormatToCapability(t *testing.T) {
assert := assert.New(t)
// Ensure all ffmpeg-enumerated formats are represented during conversion
for _, format := range ffmpeg.ExtensionFormats {
_, err := formatToCapability(format)
assert.Nil(err)
}
// ensure error is triggered for unrepresented values
c, err := formatToCapability(-100)
assert.Equal(Capability_Invalid, c)
assert.Equal(capFormatConv, err)
} | explode_data.jsonl/74082 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 140
} | [
2830,
3393,
63746,
72999,
1249,
63746,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
197,
322,
29279,
678,
84912,
20767,
3389,
657,
19856,
525,
15251,
2337,
14409,
198,
2023,
8358,
3561,
1669,
2088,
84912,
59715,
44599,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestPostXappPush(t *testing.T) {
var tests = []struct {
description string
request models.XappRequest
jsonRequest string
jsonResponse string
expectedOutput *models.XappResponse
expectedError error
httpStatusCode int
}{
{
description: "successfully create push",
request: models.XappRequest{
UserToken: "token",
Subtitle: "subtitle",
Body: "body",
Data: anyjson.AnyJson{
"test_json": "TestJson",
"integer": 3,
"float64": float64(1.2),
},
},
jsonRequest: `{
"user_token": "token",
"subtitle": "subtitle",
"body": "body",
"data": {
"test_json": "TestJson",
"integer": 3,
"float64": 1.2
}
}`,
jsonResponse: `{
"pushed": true
}`,
expectedOutput: &models.XappResponse{Pushed: true},
expectedError: nil,
httpStatusCode: 200,
},
{
description: "invalid request body",
request: models.XappRequest{
UserToken: "",
Subtitle: "",
Body: "body",
Data: anyjson.AnyJson{},
},
jsonRequest: "",
jsonResponse: "",
expectedOutput: nil,
expectedError: &invalidPushRequestError{},
httpStatusCode: 0,
},
{
description: "error creating push",
request: models.XappRequest{
UserToken: "token",
Subtitle: "subtitle",
Body: "body",
Data: anyjson.AnyJson{"test_json": "TestJson"},
},
jsonRequest: `{
"user_token": "token",
"subtitle": "subtitle",
"body": "body"
}`,
jsonResponse: `{
"error": {
"reference": "42d58b17-ee92-419d-b8ec-15797d10c4ed",
"code": 400
}
}`,
expectedOutput: nil,
expectedError: &xumm.ErrorResponse{ErrorResponseBody: xumm.ErrorResponseBody{Reference: "42d58b17-ee92-419d-b8ec-15797d10c4ed", Code: 400}},
httpStatusCode: 400,
},
{
description: "test request body serialisation when body is empty",
request: models.XappRequest{
UserToken: "token",
Subtitle: "subtitle",
Data: anyjson.AnyJson{},
},
jsonRequest: `{
"user_token": "token",
"subtitle": "subtitle"
}`,
jsonResponse: `{
"pushed": true
}`,
expectedOutput: &models.XappResponse{Pushed: true},
expectedError: nil,
httpStatusCode: 200,
},
}
for _, tt := range tests {
t.Run(tt.description, func(t *testing.T) {
m := &testutils.MockClient{}
m.DoFunc = testutils.MockResponse(tt.jsonResponse, tt.httpStatusCode, m)
cfg, err := xumm.NewConfig(xumm.WithHttpClient(m), xumm.WithAuth("testApiKey", "testApiSecret"))
assert.NoError(t, err)
xapp := &Xapp{
Cfg: cfg,
}
xp, err := xapp.PostXappPush(tt.request)
if tt.expectedError != nil {
assert.Nil(t, xp)
assert.Error(t, err)
assert.EqualError(t, err, tt.expectedError.Error())
} else {
body, _ := ioutil.ReadAll(m.Spy.Body)
assert.JSONEq(t, tt.jsonRequest, string(body))
assert.Equal(t, xp, tt.expectedOutput)
assert.Equal(t, http.Header{
"X-API-Key": {"testApiKey"},
"X-API-Secret": {"testApiSecret"},
"Content-Type": {"application/json"},
}, m.Spy.Header)
assert.NoError(t, err)
}
})
}
} | explode_data.jsonl/67192 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1490
} | [
2830,
3393,
4133,
55,
676,
16644,
1155,
353,
8840,
836,
8,
1476,
2405,
7032,
284,
3056,
1235,
341,
197,
42407,
262,
914,
198,
197,
23555,
286,
4119,
4338,
676,
1900,
198,
197,
30847,
1900,
262,
914,
198,
197,
30847,
2582,
256,
914,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestExpansionsInHighlevelConfig(t *testing.T) {
camroot, err := osutil.GoPackagePath("camlistore.org")
if err != nil {
t.Fatalf("failed to find camlistore.org GOPATH root: %v", err)
}
const keyID = "26F5ABDA"
os.Setenv("TMP_EXPANSION_TEST", keyID)
os.Setenv("TMP_EXPANSION_SECRING", filepath.Join(camroot, filepath.FromSlash("pkg/jsonsign/testdata/test-secring.gpg")))
conf, err := serverinit.Load([]byte(`
{
"auth": "localhost",
"listen": ":4430",
"https": false,
"identity": ["_env", "${TMP_EXPANSION_TEST}"],
"identitySecretRing": ["_env", "${TMP_EXPANSION_SECRING}"],
"googlecloudstorage": ":camlistore-dev-blobs",
"kvIndexFile": "/tmp/camli-index.kvdb"
}
`))
if err != nil {
t.Fatal(err)
}
got := fmt.Sprintf("%#v", conf)
if !strings.Contains(got, keyID) {
t.Errorf("Expected key %s in resulting low-level config. Got: %s", keyID, got)
}
} | explode_data.jsonl/9279 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 390
} | [
2830,
3393,
8033,
596,
908,
641,
11976,
3294,
2648,
1155,
353,
8840,
836,
8,
341,
1444,
309,
2888,
11,
1848,
1669,
2643,
1314,
67131,
13100,
1820,
445,
11599,
1607,
460,
2659,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestPropertyDrop(t *testing.T) {
// GIVEN
graphName := "mygraph"
g := NewGraph(graphName)
require.NotNil(t, g)
v := NewVertexG(g)
require.NotNil(t, v)
p := NewPropertyV(v)
require.NotNil(t, p)
// WHEN
qb := p.Drop()
// THEN
assert.NotNil(t, qb)
assert.Equal(t, fmt.Sprintf("%s.drop()", graphName), p.String())
} | explode_data.jsonl/38215 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 153
} | [
2830,
3393,
3052,
19871,
1155,
353,
8840,
836,
8,
1476,
197,
322,
89836,
198,
66616,
675,
1669,
330,
2408,
4439,
698,
3174,
1669,
1532,
11212,
24312,
675,
340,
17957,
93882,
1155,
11,
342,
340,
5195,
1669,
1532,
8320,
38,
3268,
340,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClearAll(t *testing.T) {
tt := test.Start(t).Scenario("kahuna")
defer tt.Finish()
is := sys(tt)
err := is.ClearAll()
tt.Require.NoError(err)
// ensure no ledgers
var found int
err = tt.HorizonSession().GetRaw(&found, "SELECT COUNT(*) FROM history_ledgers")
tt.Require.NoError(err)
tt.Assert.Equal(0, found)
} | explode_data.jsonl/69625 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 135
} | [
2830,
3393,
14008,
2403,
1155,
353,
8840,
836,
8,
341,
3244,
83,
1669,
1273,
12101,
1155,
568,
54031,
445,
83502,
8565,
1138,
16867,
17853,
991,
18176,
741,
19907,
1669,
5708,
47152,
692,
9859,
1669,
374,
13524,
2403,
2822,
3244,
83,
81... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWithHTTPAuth(t *testing.T) {
t.Parallel()
Convey("Given an auth provider and dialer", t, func() {
dialer := &mockDialerStruct{}
Convey("And Dial is called with username and password", func() {
_, err := mockDial(dialer, WithHTTPAuth(func(request *http.Request) error {
return nil
}))
Convey("Then no error should be encountered", func() {
So(err, ShouldBeNil)
})
})
})
} | explode_data.jsonl/53407 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 160
} | [
2830,
3393,
2354,
9230,
5087,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
93070,
5617,
445,
22043,
458,
4166,
9109,
323,
27860,
261,
497,
259,
11,
2915,
368,
341,
197,
2698,
530,
261,
1669,
609,
16712,
35,
530,
261,
9422,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestChanOf(t *testing.T) {
// check construction and use of type not in binary
type T string
ct := ChanOf(BothDir, TypeOf(T("")))
v := MakeChan(ct, 2)
runtime.GC()
v.Send(ValueOf(T("hello")))
runtime.GC()
v.Send(ValueOf(T("world")))
runtime.GC()
sv1, _ := v.Recv()
sv2, _ := v.Recv()
s1 := sv1.String()
s2 := sv2.String()
if s1 != "hello" || s2 != "world" {
t.Errorf("constructed chan: have %q, %q, want %q, %q", s1, s2, "hello", "world")
}
// check that type already in binary is found
type T1 int
checkSameType(t, Zero(ChanOf(BothDir, TypeOf(T1(1)))).Interface(), (chan T1)(nil))
} | explode_data.jsonl/29608 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 259
} | [
2830,
3393,
46019,
2124,
1155,
353,
8840,
836,
8,
341,
197,
322,
1779,
8086,
323,
990,
315,
943,
537,
304,
7868,
198,
13158,
350,
914,
198,
89216,
1669,
41302,
2124,
7,
20629,
6184,
11,
3990,
2124,
4140,
445,
29836,
5195,
1669,
7405,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestCustomer_CreatePaymentSource_Acct(t *testing.T) {
key := "test api key"
mockResponse := new(invdendpoint.BankAccount)
mockResponse.Id = int64(1234)
mockResponse.Last4 = "4242"
mockResponse.Object = "bank_account"
mockResponse.Verified = true
server, err := invdmockserver.New(200, mockResponse, "json", true)
if err != nil {
t.Fatal(err)
}
defer server.Close()
conn := mockConnection(key, server)
defaultEntity := conn.NewCustomer()
intermediate := conn.NewPaymentSource()
subjectEntity, err := defaultEntity.CreatePaymentSource(intermediate)
if err != nil {
t.Fatal("Error:", err)
}
if subjectEntity == nil {
t.Fatal("subjectEntity does not exist", err)
}
if !subjectEntity.Verified {
t.Fatal("Did not instantiate correctly", err)
}
} | explode_data.jsonl/15020 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 277
} | [
2830,
3393,
12792,
34325,
20188,
3608,
1566,
66,
302,
1155,
353,
8840,
836,
8,
341,
23634,
1669,
330,
1944,
6330,
1376,
1837,
77333,
2582,
1669,
501,
5900,
16598,
32540,
1785,
1180,
7365,
340,
77333,
2582,
6444,
284,
526,
21,
19,
7,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestModelRegister(t *testing.T) {
RegisterModel((*Model)(nil))
for k := range modelRegistry {
t.Log(k)
}
model := MakeInstance("github.com/lonelypale/goutils/database/mongodb.Model")
t.Log(model)
} | explode_data.jsonl/53025 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 83
} | [
2830,
3393,
1712,
8690,
1155,
353,
8840,
836,
8,
341,
79096,
1712,
26609,
1712,
2376,
8385,
4390,
2023,
595,
1669,
2088,
1614,
15603,
341,
197,
3244,
5247,
5969,
340,
197,
630,
19727,
1669,
7405,
2523,
445,
5204,
905,
14,
12212,
974,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestReactorBasic(t *testing.T) {
N := 4
css, cleanup := randConsensusNet(N, "consensus_reactor_test", newMockTickerFunc(true), newCounter)
defer cleanup()
reactors, blocksSubs, eventBuses := startConsensusNet(t, css, N)
defer stopConsensusNet(log.TestingLogger(), reactors, eventBuses)
// wait till everyone makes the first new block
timeoutWaitGroup(t, N, func(j int) {
<-blocksSubs[j].Out()
}, css)
} | explode_data.jsonl/7890 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 153
} | [
2830,
3393,
693,
5621,
15944,
1155,
353,
8840,
836,
8,
341,
18317,
1669,
220,
19,
198,
1444,
778,
11,
21290,
1669,
10382,
15220,
13626,
6954,
8204,
11,
330,
6254,
13626,
1288,
5621,
4452,
497,
501,
11571,
87278,
9626,
3715,
701,
501,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestImageTagInvalidReference(t *testing.T) {
client := &Client{
client: newMockClient(errorMock(http.StatusInternalServerError, "Server error")),
}
err := client.ImageTag(context.Background(), "image_id", "aa/asdf$$^/aa")
if err == nil || err.Error() != `Error parsing reference: "aa/asdf$$^/aa" is not a valid repository/tag` {
t.Fatalf("expected ErrReferenceInvalidFormat, got %v", err)
}
} | explode_data.jsonl/42538 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 139
} | [
2830,
3393,
1906,
5668,
7928,
8856,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
609,
2959,
515,
197,
25291,
25,
501,
11571,
2959,
6390,
11571,
19886,
66760,
11,
330,
5475,
1465,
30154,
197,
630,
9859,
1669,
2943,
7528,
5668,
5378,
19047,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAsValue(t *testing.T) {
intValue, ok := asValue("10").(int)
if ok != true {
t.Error()
}
if intValue != 10 {
t.Error()
}
floatValue, ok := asValue("0.12").(float64)
if ok != true {
t.Error()
}
if floatValue != 0.12 {
t.Error()
}
boolValue, ok := asValue("true").(bool)
if ok != true {
t.Error()
}
if boolValue != true {
t.Error()
}
stringValue, ok := asValue("test string").(string)
if ok != true {
t.Error()
}
if stringValue != "test string" {
t.Error()
}
} | explode_data.jsonl/13415 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 223
} | [
2830,
3393,
2121,
1130,
1155,
353,
8840,
836,
8,
341,
2084,
1130,
11,
5394,
1669,
438,
1130,
445,
16,
15,
1827,
7,
396,
340,
743,
5394,
961,
830,
341,
197,
3244,
6141,
741,
197,
532,
743,
53077,
961,
220,
16,
15,
341,
197,
3244,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestReverseProxy(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stderr)
testHeaderValue := []string{"header-value"}
testHeaders := http.Header{
"X-Header-1": testHeaderValue,
"X-Header-2": testHeaderValue,
"X-Header-3": testHeaderValue,
}
testTrailerValue := []string{"trailer-value"}
testTrailers := http.Header{
"X-Trailer-1": testTrailerValue,
"X-Trailer-2": testTrailerValue,
"X-Trailer-3": testTrailerValue,
}
verifyHeaderValues := func(actual http.Header, expected http.Header) bool {
if actual == nil {
t.Error("Expected headers")
return true
}
for k := range expected {
if expected.Get(k) != actual.Get(k) {
t.Errorf("Expected header '%s' to be proxied properly", k)
return true
}
}
return false
}
verifyHeadersTrailers := func(headers http.Header, trailers http.Header) {
if verifyHeaderValues(headers, testHeaders) || verifyHeaderValues(trailers, testTrailers) {
t.FailNow()
}
}
requestReceived := false
backend := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// read the body (even if it's empty) to make Go parse trailers
io.Copy(ioutil.Discard, r.Body)
verifyHeadersTrailers(r.Header, r.Trailer)
requestReceived = true
// Set headers.
copyHeader(w.Header(), testHeaders)
// Only announce one of the trailers to test wether
// unannounced trailers are proxied correctly.
for k := range testTrailers {
w.Header().Set("Trailer", k)
break
}
w.WriteHeader(http.StatusOK)
w.Write([]byte("Hello, client"))
// Set trailers.
shallowCopyTrailers(w.Header(), testTrailers, true)
}))
defer backend.Close()
// set up proxy
p := &Proxy{
Next: httpserver.EmptyNext, // prevents panic in some cases when test fails
Upstreams: []Upstream{newFakeUpstream(backend.URL, false, 30*time.Second)},
}
// Create the fake request body.
// This will copy "trailersToSet" to r.Trailer right before it is closed and
// thus test for us wether unannounced client trailers are proxied correctly.
body := &trailerTestStringReader{
Reader: *strings.NewReader("test"),
trailersToSet: testTrailers,
}
// Create the fake request with the above body.
r := httptest.NewRequest("GET", "/", body)
r.Trailer = make(http.Header)
body.request = r
copyHeader(r.Header, testHeaders)
// Only announce one of the trailers to test wether
// unannounced trailers are proxied correctly.
for k, v := range testTrailers {
r.Trailer[k] = v
break
}
w := httptest.NewRecorder()
p.ServeHTTP(w, r)
res := w.Result()
if !requestReceived {
t.Error("Expected backend to receive request, but it didn't")
}
verifyHeadersTrailers(res.Header, res.Trailer)
// Make sure {upstream} placeholder is set
r.Body = ioutil.NopCloser(strings.NewReader("test"))
rr := httpserver.NewResponseRecorder(testResponseRecorder{
ResponseWriterWrapper: &httpserver.ResponseWriterWrapper{ResponseWriter: httptest.NewRecorder()},
})
rr.Replacer = httpserver.NewReplacer(r, rr, "-")
p.ServeHTTP(rr, r)
if got, want := rr.Replacer.Replace("{upstream}"), backend.URL; got != want {
t.Errorf("Expected custom placeholder {upstream} to be set (%s), but it wasn't; got: %s", want, got)
}
} | explode_data.jsonl/64225 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1181
} | [
2830,
3393,
45695,
16219,
1155,
353,
8840,
836,
8,
341,
6725,
4202,
5097,
1956,
30158,
909,
47560,
340,
16867,
1487,
4202,
5097,
9638,
77319,
692,
18185,
97721,
1669,
3056,
917,
4913,
2708,
19083,
16707,
18185,
10574,
1669,
1758,
15753,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestMetadata_Offline(t *testing.T) {
t.Run("unavailable in offline mode", func(t *testing.T) {
service := APIService{
config: &configuration.Configuration{Mode: configuration.Offline},
}
resp, err := service.ConstructionMetadata(
context.Background(),
&types.ConstructionMetadataRequest{},
)
assert.Nil(t, resp)
assert.Equal(t, errors.ErrUnavailableOffline.Code, err.Code)
})
} | explode_data.jsonl/44644 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 146
} | [
2830,
3393,
14610,
94377,
1056,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
359,
10334,
304,
26166,
3856,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
52934,
1669,
10106,
95125,
515,
298,
25873,
25,
609,
21138,
17334,
90,
3636,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMetaIndexEntriesSorted(t *testing.T) {
f, err := build(DefaultCompression, nil, /* filter policy */
TableFilter, nil, nil, 4096, 4096)
require.NoError(t, err)
r, err := NewReader(f, ReaderOptions{})
require.NoError(t, err)
b, err := r.readBlock(r.metaIndexBH, nil /* transform */, nil /* attrs */)
require.NoError(t, err)
defer b.Release()
i, err := newRawBlockIter(bytes.Compare, b.Get())
require.NoError(t, err)
var keys []string
for valid := i.First(); valid; valid = i.Next() {
keys = append(keys, string(i.Key().UserKey))
}
if !sort.StringsAreSorted(keys) {
t.Fatalf("metaindex block out of order: %v", keys)
}
require.NoError(t, i.Close())
require.NoError(t, r.Close())
} | explode_data.jsonl/40343 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 276
} | [
2830,
3393,
12175,
1552,
24533,
51051,
1155,
353,
8840,
836,
8,
341,
1166,
11,
1848,
1669,
1936,
87874,
81411,
11,
2092,
11,
1391,
4051,
4842,
735,
197,
197,
2556,
5632,
11,
2092,
11,
2092,
11,
220,
19,
15,
24,
21,
11,
220,
19,
15... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestBuildInfo(t *testing.T) {
t.Parallel()
Convey("A testing BuildInfoProvider", t, func() {
c := context.Background()
c = memory.Use(c)
c = rawpresentation.InjectFakeLogdogClient(c, fakelogs.NewClient())
testSvc := testSwarmingService{
host: "swarming.example.com",
req: swarming.SwarmingRpcsTaskRequest{
Properties: &swarming.SwarmingRpcsTaskProperties{
Command: []string{"kitchen", "foo", "bar", "-logdog-project", "testproject", "baz"},
},
Tags: []string{
"allow_milo:1",
},
},
res: swarming.SwarmingRpcsTaskResult{
TaskId: "12340",
State: TaskRunning,
Tags: []string{
"allow_milo:1",
"foo:1",
"bar:2",
},
TryNumber: 1,
},
}
bip := BuildInfoProvider{
swarmingServiceFunc: func(context.Context, string) (SwarmingService, error) {
return &testSvc, nil
},
}
biReq := milo.BuildInfoRequest{
Build: &milo.BuildInfoRequest_Swarming_{
Swarming: &milo.BuildInfoRequest_Swarming{
Task: "12340",
},
},
}
Convey("Will fail to load a non-Kitchen build.", func() {
testSvc.req.Properties.Command = []string{"not", "kitchen"}
_, err := bip.GetBuildInfo(c, biReq.GetSwarming(), "")
So(err, ShouldBeRPCNotFound)
})
Convey("Will fail to load Kitchen without LogDog and no project hint.", func() {
testSvc.req.Properties.Command = []string{"kitchen"}
_, err := bip.GetBuildInfo(c, biReq.GetSwarming(), "")
So(err, ShouldBeRPCNotFound)
})
})
} | explode_data.jsonl/51974 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 642
} | [
2830,
3393,
11066,
1731,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
93070,
5617,
445,
32,
7497,
7854,
1731,
5179,
497,
259,
11,
2915,
368,
341,
197,
1444,
1669,
2266,
19047,
741,
197,
1444,
284,
4938,
9046,
1337,
340,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLockRESTlient(t *testing.T) {
endpoint, err := NewEndpoint("http://localhost:9000")
if err != nil {
t.Fatalf("unexpected error %v", err)
}
lkClient := newlockRESTClient(endpoint)
if !lkClient.IsOnline() {
t.Fatalf("unexpected error. connection failed")
}
// Attempt all calls.
_, err = lkClient.RLock(context.Background(), dsync.LockArgs{})
if err == nil {
t.Fatal("Expected for Rlock to fail")
}
_, err = lkClient.Lock(context.Background(), dsync.LockArgs{})
if err == nil {
t.Fatal("Expected for Lock to fail")
}
_, err = lkClient.RUnlock(dsync.LockArgs{})
if err == nil {
t.Fatal("Expected for RUnlock to fail")
}
_, err = lkClient.Unlock(dsync.LockArgs{})
if err == nil {
t.Fatal("Expected for Unlock to fail")
}
} | explode_data.jsonl/72403 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 304
} | [
2830,
3393,
11989,
38307,
1451,
1155,
353,
8840,
836,
8,
341,
6246,
2768,
11,
1848,
1669,
1532,
27380,
445,
1254,
1110,
8301,
25,
24,
15,
15,
15,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
53859,
1465,
1018,
85,
497,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestService_Serialize(t *testing.T) {
t.Run(
"when 'rawArg' is specified, it returns it serialized to base64",
func(t *testing.T) {
rawArg := []byte("foobar")
svc := NewBase64Service()
actualReturn, actualErr := svc.Serialize(rawArg)
require.Nil(t, actualErr)
expectedReturn := []byte("Zm9vYmFy")
assert.Equal(t, expectedReturn, actualReturn)
},
)
} | explode_data.jsonl/15602 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 158
} | [
2830,
3393,
1860,
1098,
9050,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
1006,
197,
197,
1,
9309,
364,
1041,
2735,
6,
374,
5189,
11,
432,
4675,
432,
32916,
311,
2331,
21,
19,
756,
197,
29244,
1155,
353,
8840,
836,
8,
341,
298,
765... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDescribeResourceQuota(t *testing.T) {
fake := fake.NewSimpleClientset(&api.ResourceQuota{
ObjectMeta: metav1.ObjectMeta{
Name: "bar",
Namespace: "foo",
},
Status: api.ResourceQuotaStatus{
Hard: api.ResourceList{
api.ResourceName(api.ResourceCPU): resource.MustParse("1"),
api.ResourceName(api.ResourceLimitsCPU): resource.MustParse("2"),
api.ResourceName(api.ResourceLimitsMemory): resource.MustParse("2G"),
api.ResourceName(api.ResourceMemory): resource.MustParse("1G"),
api.ResourceName(api.ResourceRequestsCPU): resource.MustParse("1"),
api.ResourceName(api.ResourceRequestsMemory): resource.MustParse("1G"),
},
Used: api.ResourceList{
api.ResourceName(api.ResourceCPU): resource.MustParse("0"),
api.ResourceName(api.ResourceLimitsCPU): resource.MustParse("0"),
api.ResourceName(api.ResourceLimitsMemory): resource.MustParse("0G"),
api.ResourceName(api.ResourceMemory): resource.MustParse("0G"),
api.ResourceName(api.ResourceRequestsCPU): resource.MustParse("0"),
api.ResourceName(api.ResourceRequestsMemory): resource.MustParse("0G"),
},
},
})
c := &describeClient{T: t, Namespace: "foo", Interface: fake}
d := ResourceQuotaDescriber{c}
out, err := d.Describe("foo", "bar", printers.DescriberSettings{ShowEvents: true})
if err != nil {
t.Errorf("unexpected error: %v", err)
}
expectedOut := []string{"bar", "foo", "limits.cpu", "2", "limits.memory", "2G", "requests.cpu", "1", "requests.memory", "1G"}
for _, expected := range expectedOut {
if !strings.Contains(out, expected) {
t.Errorf("expected to find %q in output: %q", expected, out)
}
}
} | explode_data.jsonl/34948 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 678
} | [
2830,
3393,
74785,
4783,
2183,
6089,
1155,
353,
8840,
836,
8,
341,
1166,
726,
1669,
12418,
7121,
16374,
2959,
746,
2099,
2068,
20766,
2183,
6089,
515,
197,
23816,
12175,
25,
77520,
16,
80222,
515,
298,
21297,
25,
414,
330,
2257,
756,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestReplaceTextLinks(t *testing.T) {
scenarios := map[string]string{
`This is a link to example.org`: `This is a link to example.org`,
`This is a link to ftp://example.org`: `This is a link to ftp://example.org`,
`This is a link to www.example.org`: `This is a link to www.example.org`,
`This is a link to http://example.org`: `This is a link to <a href="http://example.org">http://example.org</a>`,
`This is a link to http://example.org, end of sentence.`: `This is a link to <a href="http://example.org">http://example.org</a>, end of sentence.`,
`This is a link to https://example.org`: `This is a link to <a href="https://example.org">https://example.org</a>`,
`This is a link to https://www.example.org/path/to?q=s`: `This is a link to <a href="https://www.example.org/path/to?q=s">https://www.example.org/path/to?q=s</a>`,
`This is a link to https://example.org/index#hash-tag, http://example.org/.`: `This is a link to <a href="https://example.org/index#hash-tag">https://example.org/index#hash-tag</a>, <a href="http://example.org/">http://example.org/</a>.`,
}
for input, expected := range scenarios {
actual := replaceTextLinks(input)
if actual != expected {
t.Errorf(`Unexpected link replacement, got "%s" instead of "%s"`, actual, expected)
}
}
} | explode_data.jsonl/21468 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 708
} | [
2830,
3393,
23107,
1178,
24089,
1155,
353,
8840,
836,
8,
341,
29928,
60494,
1669,
2415,
14032,
30953,
515,
197,
197,
63,
1986,
374,
264,
2656,
311,
3110,
2659,
44622,
13463,
1565,
1986,
374,
264,
2656,
311,
3110,
2659,
12892,
197,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestReadChallengeTx_forbidsMuxedAccounts(t *testing.T) {
kp0 := newKeypair0()
tx, err := BuildChallengeTx(
kp0.Seed(),
kp0.Address(),
"testwebauth.stellar.org",
"testanchor.stellar.org",
network.TestNetworkPassphrase,
time.Hour,
)
env := tx.ToXDR()
assert.NoError(t, err)
aid := xdr.MustAddress(kp0.Address())
muxedAccount := xdr.MuxedAccount{
Type: xdr.CryptoKeyTypeKeyTypeMuxedEd25519,
Med25519: &xdr.MuxedAccountMed25519{
Id: 0xcafebabe,
Ed25519: *aid.Ed25519,
},
}
*env.V1.Tx.Operations[0].SourceAccount = muxedAccount
challenge, err := marshallBase64(env, env.Signatures())
assert.NoError(t, err)
_, _, _, err = ReadChallengeTx(
challenge,
kp0.Address(),
network.TestNetworkPassphrase,
"testwebauth.stellar.org",
[]string{"testanchor.stellar.org"},
)
errorMessage := "only valid Ed25519 accounts are allowed in challenge transactions"
assert.Contains(t, err.Error(), errorMessage)
} | explode_data.jsonl/20715 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 398
} | [
2830,
3393,
4418,
62078,
31584,
5478,
65,
3365,
44,
2200,
291,
41369,
1155,
353,
8840,
836,
8,
341,
16463,
79,
15,
1669,
501,
6608,
1082,
1310,
15,
741,
46237,
11,
1848,
1669,
7854,
62078,
31584,
1006,
197,
16463,
79,
15,
5732,
291,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStream_gotoNextWriteFrame(t *testing.T) {
t.Run("test", func(t *testing.T) {
assert := base.NewAssert(t)
for i := 0; i < streamBlockSize*(streamFrameArrayInitSize+2); i++ {
stream := NewStream()
if i >= streamPosBody {
stream.SetWritePos(i)
}
curSeg := stream.writeSeg
assert(stream.writeSeg).Equals(len(stream.frames) - 1)
stream.gotoNextWriteFrame()
assert(stream.writeSeg).Equals(len(stream.frames) - 1)
assert(stream.writeSeg).Equals(curSeg + 1)
stream.Release()
}
})
} | explode_data.jsonl/21190 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 216
} | [
2830,
3393,
3027,
97732,
5847,
7985,
4369,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
1944,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
6948,
1669,
2331,
7121,
8534,
1155,
340,
197,
2023,
600,
1669,
220,
15,
26,
600,
366,
426... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestArtistService_Create(t *testing.T) {
s := NewArtistService(newMockArtistDAO())
artist, err := s.Create(nil, &models.Artist{
Name: "ddd",
})
if assert.Nil(t, err) && assert.NotNil(t, artist) {
assert.Equal(t, 4, artist.Id)
assert.Equal(t, "ddd", artist.Name)
}
// dao error
_, err = s.Create(nil, &models.Artist{
Id: 100,
Name: "ddd",
})
assert.NotNil(t, err)
// validation error
_, err = s.Create(nil, &models.Artist{
Name: "",
})
assert.NotNil(t, err)
} | explode_data.jsonl/70210 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 221
} | [
2830,
3393,
40309,
1860,
34325,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
1532,
40309,
1860,
1755,
11571,
40309,
19532,
2398,
197,
18622,
11,
1848,
1669,
274,
7251,
27907,
11,
609,
6507,
50064,
380,
515,
197,
21297,
25,
330,
40360,
756,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetHeaders(t *testing.T) {
testCases := []struct {
desc string
labels map[string]string
expected *types.Headers
}{
{
desc: "should return nil when no custom headers options are set",
labels: map[string]string{},
expected: nil,
},
{
desc: "should return a struct when all custom headers options are set",
labels: map[string]string{
TraefikFrontendRequestHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
TraefikFrontendResponseHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
TraefikFrontendSSLProxyHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
TraefikFrontendAllowedHosts: "foo,bar,bor",
TraefikFrontendHostsProxyHeaders: "foo,bar,bor",
TraefikFrontendSSLHost: "foo",
TraefikFrontendCustomFrameOptionsValue: "foo",
TraefikFrontendContentSecurityPolicy: "foo",
TraefikFrontendPublicKey: "foo",
TraefikFrontendReferrerPolicy: "foo",
TraefikFrontendCustomBrowserXSSValue: "foo",
TraefikFrontendSTSSeconds: "666",
TraefikFrontendSSLRedirect: "true",
TraefikFrontendSSLForceHost: "true",
TraefikFrontendSSLTemporaryRedirect: "true",
TraefikFrontendSTSIncludeSubdomains: "true",
TraefikFrontendSTSPreload: "true",
TraefikFrontendForceSTSHeader: "true",
TraefikFrontendFrameDeny: "true",
TraefikFrontendContentTypeNosniff: "true",
TraefikFrontendBrowserXSSFilter: "true",
TraefikFrontendIsDevelopment: "true",
},
expected: &types.Headers{
CustomRequestHeaders: map[string]string{
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
"Content-Type": "application/json; charset=utf-8",
},
CustomResponseHeaders: map[string]string{
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
"Content-Type": "application/json; charset=utf-8",
},
SSLProxyHeaders: map[string]string{
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
"Content-Type": "application/json; charset=utf-8",
},
AllowedHosts: []string{"foo", "bar", "bor"},
HostsProxyHeaders: []string{"foo", "bar", "bor"},
SSLHost: "foo",
CustomFrameOptionsValue: "foo",
ContentSecurityPolicy: "foo",
PublicKey: "foo",
ReferrerPolicy: "foo",
CustomBrowserXSSValue: "foo",
STSSeconds: 666,
SSLForceHost: true,
SSLRedirect: true,
SSLTemporaryRedirect: true,
STSIncludeSubdomains: true,
STSPreload: true,
ForceSTSHeader: true,
FrameDeny: true,
ContentTypeNosniff: true,
BrowserXSSFilter: true,
IsDevelopment: true,
},
},
}
for _, test := range testCases {
test := test
t.Run(test.desc, func(t *testing.T) {
t.Parallel()
actual := GetHeaders(test.labels)
assert.Equal(t, test.expected, actual)
})
}
} | explode_data.jsonl/51865 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1599
} | [
2830,
3393,
1949,
10574,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
41653,
257,
914,
198,
197,
95143,
256,
2415,
14032,
30953,
198,
197,
42400,
353,
9242,
43968,
198,
197,
59403,
197,
197,
515,
298,
41653,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHttpRequest(t *testing.T) {
if err := setupDir(); err != nil {
t.Fatal(err)
}
defer os.RemoveAll(testDir)
d := &Daemon{}
err := d.Init()
if err != nil {
t.Fatal(err)
}
defer d.Stop()
c := http.Client{Transport: &http.Transport{Dial: DevLxdDialer{Path: fmt.Sprintf("%s/devapollo/sock", testDir)}.DevLxdDial}}
raw, err := c.Get("http://1.0")
if err != nil {
t.Fatal(err)
}
if raw.StatusCode != 500 {
t.Fatal(err)
}
resp, err := ioutil.ReadAll(raw.Body)
if err != nil {
t.Fatal(err)
}
if !strings.Contains(string(resp), pidNotInContainerErr.Error()) {
t.Fatal("resp error not expected: ", string(resp))
}
} | explode_data.jsonl/9187 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 287
} | [
2830,
3393,
26362,
1155,
353,
8840,
836,
8,
341,
743,
1848,
1669,
6505,
6184,
2129,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
2643,
84427,
8623,
6184,
692,
2698,
1669,
609,
89177,
16094,
9859,
1669,
294,
26849,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestAnnotationsForInvalidCandidate(t *testing.T) {
candidate := NewCandidate(
&Context{
Event: &github.CheckSuiteEvent{},
}, &github.CommitFile{
BlobURL: github.String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/deployment.yaml"),
Filename: github.String("deployment.yaml"),
}, nil)
filePath, _ := filepath.Abs("../fixtures/invalid.yaml")
fileContents, _ := ioutil.ReadFile(filePath)
candidate.setBytes(&fileContents)
annotations := candidate.Validate()
want := []*github.CheckRunAnnotation{
{
Path: github.String("deployment.yaml"),
BlobHRef: github.String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/deployment.yaml"),
StartLine: github.Int(1),
EndLine: github.Int(1),
AnnotationLevel: github.String("failure"),
Title: github.String("Error validating Deployment against master schema"),
Message: github.String("spec.replicas: Invalid type. Expected: integer, given: string"),
RawDetails: github.String("* context: (root).spec.replicas\n* expected: integer\n* field: spec.replicas\n* given: string\n"),
},
{
Path: github.String("deployment.yaml"),
BlobHRef: github.String("https://github.com/octocat/Hello-World/blob/837db83be4137ca555d9a5598d0a1ea2987ecfee/deployment.yaml"),
StartLine: github.Int(1),
EndLine: github.Int(1),
AnnotationLevel: github.String("failure"),
Title: github.String("Error validating Deployment against master schema"),
Message: github.String("template: template is required"),
RawDetails: github.String("* context: (root).spec\n* field: template\n* property: template\n"),
}}
if len(annotations) != len(want) {
t.Errorf("a total of %d annotations were returned, wanted %d", len(annotations), len(want))
}
for i, annotation := range annotations {
if diff := deep.Equal(annotation, want[i]); diff != nil {
t.Error(diff)
}
}
} | explode_data.jsonl/53893 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 835
} | [
2830,
3393,
21418,
2461,
7928,
63901,
1155,
353,
8840,
836,
8,
341,
1444,
17050,
1669,
1532,
63901,
1006,
197,
197,
5,
1972,
515,
298,
56055,
25,
609,
5204,
10600,
28000,
1556,
38837,
197,
197,
2137,
609,
5204,
53036,
1703,
515,
298,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestGoBuildQualifyImport(t *testing.T) {
base, err := random.Image(1024, 1)
if err != nil {
t.Fatalf("random.Image() = %v", err)
}
repoDir, err := repoRootDir()
if err != nil {
t.Fatalf("could not get Git repository root directory")
}
tests := []struct {
description string
rawImportpath string
dir string
qualifiedImportpath string
expectError bool
}{
{
description: "strict qualified import path",
rawImportpath: "ko://github.com/google/ko",
dir: "",
qualifiedImportpath: "ko://github.com/google/ko",
expectError: false,
},
{
description: "strict qualified import path in subdirectory of go.mod",
rawImportpath: "ko://github.com/google/ko/test",
dir: "",
qualifiedImportpath: "ko://github.com/google/ko/test",
expectError: false,
},
{
description: "non-strict qualified import path",
rawImportpath: "github.com/google/ko",
dir: "",
qualifiedImportpath: "ko://github.com/google/ko",
expectError: false,
},
{
description: "non-strict local import path in repository root directory",
rawImportpath: "./test",
dir: repoDir,
qualifiedImportpath: "ko://github.com/google/ko/test",
expectError: false,
},
{
description: "non-strict local import path in subdirectory",
rawImportpath: ".",
dir: filepath.Join(repoDir, "test"),
qualifiedImportpath: "ko://github.com/google/ko/test",
expectError: false,
},
{
description: "non-existent non-strict local import path",
rawImportpath: "./does-not-exist",
dir: "/",
qualifiedImportpath: "should return error",
expectError: true,
},
}
for _, test := range tests {
t.Run(test.description, func(t *testing.T) {
ng, err := NewGo(context.Background(), test.dir, WithBaseImages(func(context.Context, string) (name.Reference, Result, error) { return nil, base, nil }))
if err != nil {
t.Fatalf("NewGo() = %v", err)
}
gotImportpath, err := ng.QualifyImport(test.rawImportpath)
if err != nil && test.expectError {
return
}
if err != nil && !test.expectError {
t.Errorf("QualifyImport(dir=%q)(%q) was error (%v), want nil error", test.dir, test.rawImportpath, err)
}
if err == nil && test.expectError {
t.Errorf("QualifyImport(dir=%q)(%q) was nil error, want non-nil error", test.dir, test.rawImportpath)
}
if gotImportpath != test.qualifiedImportpath {
t.Errorf("QualifyImport(dir=%q)(%q) = (%q, nil), want (%q, nil)", test.dir, test.rawImportpath, gotImportpath, test.qualifiedImportpath)
}
})
}
} | explode_data.jsonl/2478 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1253
} | [
2830,
3393,
10850,
11066,
31029,
1437,
11511,
1155,
353,
8840,
836,
8,
341,
24195,
11,
1848,
1669,
4194,
7528,
7,
16,
15,
17,
19,
11,
220,
16,
340,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
11463,
7528,
368,
284,
1018,
85,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClose(t *testing.T) {
data := "data"
s, server := newServiceAndServer(t)
go server.Start()
l := New(s)
var wg sync.WaitGroup
wg.Add(1)
go func() {
assert.NotNil(t, l.Start())
wg.Done()
}()
_, _, err := server.Execute("log", data)
assert.Nil(t, err)
assert.Nil(t, l.Close())
_, _, err = server.Execute("log", data)
assert.NotNil(t, err)
wg.Wait()
} | explode_data.jsonl/61163 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 176
} | [
2830,
3393,
7925,
1155,
353,
8840,
836,
8,
341,
8924,
1669,
330,
691,
1837,
1903,
11,
3538,
1669,
501,
1860,
3036,
5475,
1155,
340,
30680,
3538,
12101,
2822,
8810,
1669,
1532,
1141,
340,
2405,
63581,
12811,
28384,
2808,
271,
72079,
1904... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestConstructBlockVolumeSpec(t *testing.T) {
tmpDir, plug := getBlockPlugin(t)
defer os.RemoveAll(tmpDir)
podPath := filepath.Join(tmpDir, testPodPath)
spec, err := plug.ConstructBlockVolumeSpec(types.UID("poduid"), testPVName, podPath)
if err != nil {
t.Errorf("ConstructBlockVolumeSpec() failed: %v", err)
}
if spec == nil {
t.Fatalf("ConstructBlockVolumeSpec() returned nil")
}
volName := spec.Name()
if volName != testPVName {
t.Errorf("Expected volume name %q, got %q", testPVName, volName)
}
if spec.Volume != nil {
t.Errorf("Volume object returned, expected nil")
}
pv := spec.PersistentVolume
if pv == nil {
t.Fatalf("PersistentVolume object nil")
}
if spec.PersistentVolume.Spec.VolumeMode == nil {
t.Fatalf("Volume mode has not been set.")
}
if *spec.PersistentVolume.Spec.VolumeMode != v1.PersistentVolumeBlock {
t.Errorf("Unexpected volume mode %q", *spec.PersistentVolume.Spec.VolumeMode)
}
ls := pv.Spec.PersistentVolumeSource.Local
if ls == nil {
t.Fatalf("LocalVolumeSource object nil")
}
} | explode_data.jsonl/14260 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 380
} | [
2830,
3393,
28468,
4713,
18902,
8327,
1155,
353,
8840,
836,
8,
341,
20082,
6184,
11,
19633,
1669,
633,
4713,
11546,
1155,
340,
16867,
2643,
84427,
10368,
6184,
692,
3223,
347,
1820,
1669,
26054,
22363,
10368,
6184,
11,
1273,
23527,
1820,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestRemittanceOriginatorContactPhoneNumberAlphaNumeric(t *testing.T) {
ro := mockRemittanceOriginator()
ro.ContactPhoneNumber = "®"
err := ro.Validate()
require.EqualError(t, err, fieldError("ContactPhoneNumber", ErrNonAlphanumeric, ro.ContactPhoneNumber).Error())
} | explode_data.jsonl/32941 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 85
} | [
2830,
3393,
6590,
87191,
13298,
850,
8732,
45820,
19384,
36296,
1155,
353,
8840,
836,
8,
341,
197,
299,
1669,
7860,
6590,
87191,
13298,
850,
741,
197,
299,
53975,
45820,
284,
330,
11909,
1837,
9859,
1669,
926,
47667,
2822,
17957,
12808,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestVSphereKubernetes120BottleRocketThreeWorkersConformanceFlow(t *testing.T) {
test := framework.NewClusterE2ETest(
t,
framework.NewVSphere(t, framework.WithBottleRocket120()),
framework.WithClusterFiller(api.WithKubernetesVersion(v1alpha1.Kube120)),
framework.WithClusterFiller(api.WithWorkerNodeCount(3)),
)
runConformanceFlow(test)
} | explode_data.jsonl/34518 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 129
} | [
2830,
3393,
26050,
8023,
42,
29827,
16,
17,
15,
33,
62118,
80017,
19641,
74486,
1109,
11795,
18878,
1155,
353,
8840,
836,
8,
341,
18185,
1669,
12626,
7121,
28678,
36,
17,
1348,
477,
1006,
197,
3244,
345,
197,
1166,
5794,
7121,
26050,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServiceAccountTokenAutoCreate(t *testing.T) {
c, _, stopFunc := startServiceAccountTestServer(t)
defer stopFunc()
ns := "test-service-account-token-creation"
name := "my-service-account"
// Create namespace
_, err := c.Core().Namespaces().Create(&api.Namespace{ObjectMeta: api.ObjectMeta{Name: ns}})
if err != nil {
t.Fatalf("could not create namespace: %v", err)
}
// Create service account
serviceAccount, err := c.Core().ServiceAccounts(ns).Create(&api.ServiceAccount{ObjectMeta: api.ObjectMeta{Name: name}})
if err != nil {
t.Fatalf("Service Account not created: %v", err)
}
// Get token
token1Name, token1, err := getReferencedServiceAccountToken(c, ns, name, true)
if err != nil {
t.Fatal(err)
}
// Delete token
err = c.Core().Secrets(ns).Delete(token1Name, nil)
if err != nil {
t.Fatalf("Could not delete token: %v", err)
}
// Get recreated token
token2Name, token2, err := getReferencedServiceAccountToken(c, ns, name, true)
if err != nil {
t.Fatal(err)
}
if token1Name == token2Name {
t.Fatalf("Expected new auto-created token name")
}
if token1 == token2 {
t.Fatalf("Expected new auto-created token value")
}
// Trigger creation of a new referenced token
serviceAccount, err = c.Core().ServiceAccounts(ns).Get(name)
if err != nil {
t.Fatal(err)
}
serviceAccount.Secrets = []api.ObjectReference{}
_, err = c.Core().ServiceAccounts(ns).Update(serviceAccount)
if err != nil {
t.Fatal(err)
}
// Get rotated token
token3Name, token3, err := getReferencedServiceAccountToken(c, ns, name, true)
if err != nil {
t.Fatal(err)
}
if token3Name == token2Name {
t.Fatalf("Expected new auto-created token name")
}
if token3 == token2 {
t.Fatalf("Expected new auto-created token value")
}
// Delete service account
err = c.Core().ServiceAccounts(ns).Delete(name, nil)
if err != nil {
t.Fatal(err)
}
// Wait for tokens to be deleted
tokensToCleanup := sets.NewString(token1Name, token2Name, token3Name)
err = wait.Poll(time.Second, 10*time.Second, func() (bool, error) {
// Get all secrets in the namespace
secrets, err := c.Core().Secrets(ns).List(api.ListOptions{})
// Retrieval errors should fail
if err != nil {
return false, err
}
for _, s := range secrets.Items {
if tokensToCleanup.Has(s.Name) {
// Still waiting for tokens to be cleaned up
return false, nil
}
}
// All clean
return true, nil
})
if err != nil {
t.Fatalf("Error waiting for tokens to be deleted: %v", err)
}
} | explode_data.jsonl/67129 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 927
} | [
2830,
3393,
1860,
7365,
3323,
13253,
4021,
1155,
353,
8840,
836,
8,
341,
1444,
11,
8358,
2936,
9626,
1669,
1191,
1860,
7365,
2271,
5475,
1155,
340,
16867,
2936,
9626,
2822,
84041,
1669,
330,
1944,
23461,
49982,
34841,
12,
37375,
698,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDatastore(t *testing.T) {
client := initDatastoreClient(t)
taskRepo := task.NewTaskRepository(client)
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
now := time.Unix(time.Now().Unix(), 0)
desc := "hello"
id, err := taskRepo.Insert(ctx, &task.Task{
ID: datastore.NameKey("Task", uuid.New().String(), nil),
Desc: desc,
Created: now,
Done: true,
})
if err != nil {
t.Fatalf("failed to put item: %+v", err)
}
ret, err := taskRepo.Get(ctx, id)
if err != nil {
t.Fatalf("failed to get item: %+v", err)
}
compareTask(t, &task.Task{
ID: id,
Desc: desc,
Created: now,
Done: true,
}, ret)
rets, err := taskRepo.GetMulti(ctx, []*datastore.Key{id})
if err != nil {
t.Fatalf("failed to get item: %+v", err)
}
if len(rets) != 1 {
t.Errorf("GetMulti should return 1 item: %+v", err)
}
compareTask(t, &task.Task{
ID: id,
Desc: desc,
Created: now,
Done: true,
}, rets[0])
compareTask(t, &task.Task{
ID: id,
Desc: desc,
Created: now,
Done: true,
}, ret)
if err := taskRepo.DeleteByID(ctx, id); err != nil {
t.Fatalf("delete failed: %+v", err)
}
if _, err := taskRepo.Get(ctx, id); err != datastore.ErrNoSuchEntity {
t.Fatalf("Get deleted item should return ErrNoSuchEntity: %+v", err)
}
} | explode_data.jsonl/6549 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 602
} | [
2830,
93200,
4314,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
68733,
4314,
2959,
1155,
692,
49115,
25243,
1669,
3383,
7121,
6262,
4624,
12805,
692,
20985,
11,
9121,
1669,
2266,
26124,
7636,
5378,
19047,
1507,
220,
18,
15,
77053,
32435,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestUpdateFeeAdjustments(t *testing.T) {
t.Parallel()
aliceChannel, bobChannel, cleanUp, err := CreateTestChannels(
channeldb.SingleFunderTweaklessBit,
)
if err != nil {
t.Fatalf("unable to create test channels: %v", err)
}
defer cleanUp()
// First, we'll grab the current base fee rate as we'll be using this
// to make relative adjustments int he fee rate.
baseFeeRate := aliceChannel.channelState.LocalCommitment.FeePerKw
// We'll first try to increase the fee rate 5x, this should be able to
// be committed without any issue.
newFee := chainfee.SatPerKWeight(baseFeeRate * 5)
if err := aliceChannel.UpdateFee(newFee); err != nil {
t.Fatalf("unable to alice update fee: %v", err)
}
if err := bobChannel.ReceiveUpdateFee(newFee); err != nil {
t.Fatalf("unable to bob update fee: %v", err)
}
// With the fee updates applied, we'll now initiate a state transition
// to ensure the fee update is locked in.
if err := ForceStateTransition(aliceChannel, bobChannel); err != nil {
t.Fatalf("unable to create new commitment: %v", err)
}
// We'll now attempt to increase the fee rate 1,000,000x of the base
// fee. This should result in an error as Alice won't be able to pay
// this new fee rate.
newFee = chainfee.SatPerKWeight(baseFeeRate * 1000000)
if err := aliceChannel.UpdateFee(newFee); err == nil {
t.Fatalf("alice should reject the fee rate")
}
// Finally, we'll attempt to adjust the fee down and use a fee which is
// smaller than the initial base fee rate. The fee application and
// state transition should proceed without issue.
newFee = chainfee.SatPerKWeight(baseFeeRate / 10)
if err := aliceChannel.UpdateFee(newFee); err != nil {
t.Fatalf("unable to alice update fee: %v", err)
}
if err := bobChannel.ReceiveUpdateFee(newFee); err != nil {
t.Fatalf("unable to bob update fee: %v", err)
}
if err := ForceStateTransition(aliceChannel, bobChannel); err != nil {
t.Fatalf("unable to create new commitment: %v", err)
}
} | explode_data.jsonl/72467 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 676
} | [
2830,
3393,
4289,
41941,
38616,
1368,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
197,
63195,
9629,
11,
35192,
9629,
11,
4240,
2324,
11,
1848,
1669,
4230,
2271,
35925,
1006,
197,
23049,
1020,
783,
65,
23119,
37,
7995,
51,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestUnmarshalInvalidPointerKind(t *testing.T) {
a := 1
err := Unmarshal([]byte{}, &a)
if err == nil {
t.Fatal("unmarshal should err when given an invalid pointer type")
}
} | explode_data.jsonl/46341 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 72
} | [
2830,
3393,
1806,
27121,
7928,
9084,
10629,
1155,
353,
8840,
836,
8,
341,
11323,
1669,
220,
16,
198,
9859,
1669,
1230,
27121,
10556,
3782,
22655,
609,
64,
340,
743,
1848,
621,
2092,
341,
197,
3244,
26133,
445,
359,
27121,
1265,
1848,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestMinimumNumber(t *testing.T) {
t.Parallel()
tests := []struct {
name string
len int32
pass string
expected int32
}{
{"Test case 00", int32(3), "Ab1", int32(3)},
{"Test case 01", int32(11), "#HackerRank", int32(1)},
{"Test case 38", int32(5), "jnhqj", int32(3)},
}
for _, tc := range tests {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
res := MinimumNumber(tc.len, tc.pass)
assert.Equal(t, tc.expected, res)
})
}
} | explode_data.jsonl/67679 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 228
} | [
2830,
3393,
28695,
2833,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
78216,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
33111,
414,
526,
18,
17,
198,
197,
41431,
257,
914,
198,
197,
42400,
526,
18,
17,
198,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPlanCacheSnapshot(t *testing.T) {
store, clean := testkit.CreateMockStore(t)
defer clean()
orgEnable := core.PreparedPlanCacheEnabled()
defer core.SetPreparedPlanCache(orgEnable)
core.SetPreparedPlanCache(true)
se, err := session.CreateSession4TestWithOpt(store, &session.Opt{
PreparedPlanCache: kvcache.NewSimpleLRUCache(100, 0.1, math.MaxUint64),
})
require.NoError(t, err)
tk := testkit.NewTestKitWithSession(t, store, se)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(id int)")
tk.MustExec("insert into t values (1),(2),(3),(4)")
// For mocktikv, safe point is not initialized, we manually insert it for snapshot to use.
timeSafe := time.Now().Add(-48 * 60 * 60 * time.Second).Format("20060102-15:04:05 -0700 MST")
safePointSQL := `INSERT HIGH_PRIORITY INTO mysql.tidb VALUES ('tikv_gc_safe_point', '%[1]s', '')
ON DUPLICATE KEY
UPDATE variable_value = '%[1]s'`
tk.MustExec(fmt.Sprintf(safePointSQL, timeSafe))
tk.MustExec("prepare stmt from 'select * from t where id=?'")
tk.MustExec("set @p = 1")
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("0"))
tk.MustQuery("execute stmt using @p").Check(testkit.Rows("1"))
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("0"))
tk.MustQuery("execute stmt using @p").Check(testkit.Rows("1"))
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1"))
// Record the current tso.
tk.MustExec("begin")
tso := tk.Session().GetSessionVars().TxnCtx.StartTS
tk.MustExec("rollback")
require.True(t, tso > 0)
// Insert one more row with id = 1.
tk.MustExec("insert into t values (1)")
tk.MustExec(fmt.Sprintf("set @@tidb_snapshot = '%d'", tso))
tk.MustQuery("select * from t where id = 1").Check(testkit.Rows("1"))
tk.MustQuery("execute stmt using @p").Check(testkit.Rows("1"))
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1"))
} | explode_data.jsonl/5527 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 749
} | [
2830,
3393,
20485,
8233,
15009,
1155,
353,
8840,
836,
8,
341,
57279,
11,
4240,
1669,
1273,
8226,
7251,
11571,
6093,
1155,
340,
16867,
4240,
741,
87625,
11084,
1669,
6200,
28770,
7212,
20485,
8233,
5462,
741,
16867,
6200,
4202,
4703,
7212,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParse(t *testing.T) {
tokenizer := jwt.New(secret)
token, err := tokenizer.Issue(key())
require.Nil(t, err, fmt.Sprintf("issuing key expected to succeed: %s", err))
userKey := key()
userKey.Type = authn.APIKey
userKey.ExpiresAt = time.Now().UTC().Add(-1 * time.Minute).Round(time.Second)
userToken, err := tokenizer.Issue(userKey)
require.Nil(t, err, fmt.Sprintf("issuing user key expected to succeed: %s", err))
expKey := key()
expKey.ExpiresAt = time.Now().UTC().Add(-1 * time.Minute).Round(time.Second)
expToken, err := tokenizer.Issue(expKey)
require.Nil(t, err, fmt.Sprintf("issuing expired key expected to succeed: %s", err))
cases := []struct {
desc string
key authn.Key
token string
err error
}{
{
desc: "parse valid key",
key: key(),
token: token,
err: nil,
},
{
desc: "parse ivalid key",
key: authn.Key{},
token: "invalid",
err: authn.ErrUnauthorizedAccess,
},
{
desc: "parse expired key",
key: authn.Key{},
token: expToken,
err: authn.ErrKeyExpired,
},
{
desc: "parse expired user key",
key: userKey,
token: userToken,
err: nil,
},
}
for _, tc := range cases {
key, err := tokenizer.Parse(tc.token)
assert.True(t, errors.Contains(err, tc.err), fmt.Sprintf("%s expected %s, got %s", tc.desc, tc.err, err))
assert.Equal(t, tc.key, key, fmt.Sprintf("%s expected %v, got %v", tc.desc, tc.key, key))
}
} | explode_data.jsonl/64410 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 622
} | [
2830,
3393,
14463,
1155,
353,
8840,
836,
8,
341,
43947,
3135,
1669,
24589,
7121,
75862,
692,
43947,
11,
1848,
1669,
45958,
2447,
83890,
4857,
2398,
17957,
59678,
1155,
11,
1848,
11,
8879,
17305,
445,
1038,
7471,
1376,
3601,
311,
11996,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestThousandSep(t *testing.T) {
st := []struct {
name string
num int
exp string
}{
{"num=0", 0, "0"},
{"testcase1", 123, "123"},
{"testcase2", 1234, "1.234"},
{"testcase3", 1000000, "1.000.000"},
}
for _, tt := range st {
t.Run(tt.name, func(t *testing.T) {
out := addSeps(tt.num)
if out != tt.exp {
t.Fatalf("with input num:%d wanted %s but got %s", tt.num, tt.exp, out)
}
t.Log("pass")
})
}
} | explode_data.jsonl/61173 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 220
} | [
2830,
3393,
1001,
51849,
41114,
1155,
353,
8840,
836,
8,
341,
18388,
1669,
3056,
1235,
341,
197,
11609,
914,
198,
197,
22431,
220,
526,
198,
197,
48558,
220,
914,
198,
197,
59403,
197,
197,
4913,
2413,
28,
15,
497,
220,
15,
11,
330,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_getVMExtensionUpdateSupport(t *testing.T) {
// Update disabled
ctx := log.NewSyncLogger(log.NewLogfmtLogger(os.Stdout))
mm := createMockVMExtensionEnvironmentManager()
ii, _ := GetInitializationInfo("yaba", "5.0", true, testEnableCallback)
ext, err := getVMExtensionInternal(ctx, ii, mm)
require.NoError(t, err, "getVMExtensionInternal failed")
require.NotNil(t, ext)
// Verify this is a noop
updateNormalCallbackCalled = false
cmd := ext.exec.cmds["update"]
require.NotNil(t, cmd)
_, err = cmd.f(ctx, ext)
require.NoError(t, err, "updateCallback failed")
require.False(t, updateNormalCallbackCalled)
// Update enabled
ii.UpdateCallback = testUpdateCallbackNormal
ext, err = getVMExtensionInternal(ctx, ii, mm)
require.NoError(t, err, "getVMExtensionInternal failed")
require.NotNil(t, ext)
// Verify this is not a noop
cmd = ext.exec.cmds["update"]
require.NotNil(t, cmd)
_, err = cmd.f(ctx, ext)
require.NoError(t, err, "updateCallback failed")
require.True(t, updateNormalCallbackCalled)
} | explode_data.jsonl/18579 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 368
} | [
2830,
3393,
3062,
11187,
12049,
4289,
7916,
1155,
353,
8840,
836,
8,
341,
197,
322,
5549,
8386,
198,
20985,
1669,
1487,
7121,
12154,
7395,
12531,
7121,
2201,
12501,
7395,
9638,
83225,
1171,
2109,
76,
1669,
1855,
11571,
11187,
12049,
12723... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMetricsServiceV2ListLogMetricsError(t *testing.T) {
errCode := codes.Internal
mockMetrics.err = grpc.Errorf(errCode, "test error")
var formattedParent string = MetricsProjectPath("[PROJECT]")
var request = &loggingpb.ListLogMetricsRequest{
Parent: formattedParent,
}
c, err := NewMetricsClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
resp, err := c.ListLogMetrics(context.Background(), request).Next()
if c := grpc.Code(err); c != errCode {
t.Errorf("got error code %q, want %q", c, errCode)
}
_ = resp
} | explode_data.jsonl/77778 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 203
} | [
2830,
3393,
27328,
1860,
53,
17,
852,
2201,
27328,
1454,
1155,
353,
8840,
836,
8,
341,
9859,
2078,
1669,
13912,
32579,
198,
77333,
27328,
18441,
284,
47900,
13080,
3964,
2078,
11,
330,
1944,
1465,
5130,
2405,
23126,
8387,
914,
284,
5419... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_getShardSets(t *testing.T) {
t.Parallel()
c := &Cluster{
Vtctld: &vtctldProxy{
VtctldClient: &fakevtctldclient.VtctldClient{
GetKeyspaceResults: map[string]struct {
Response *vtctldatapb.GetKeyspaceResponse
Error error
}{
"ks1": {
Response: &vtctldatapb.GetKeyspaceResponse{
Keyspace: &vtctldatapb.Keyspace{
Name: "ks1",
Keyspace: &topodatapb.Keyspace{},
},
},
},
"ks2": {
Response: &vtctldatapb.GetKeyspaceResponse{
Keyspace: &vtctldatapb.Keyspace{
Name: "ks2",
Keyspace: &topodatapb.Keyspace{},
},
},
},
"ks3": {
Error: topo.NewError(topo.NoNode, "ks3"), /* we need to fail in a particular way */
},
},
GetKeyspacesResults: struct {
Keyspaces []*vtctldatapb.Keyspace
Error error
}{
Keyspaces: []*vtctldatapb.Keyspace{
{
Name: "ks1",
Keyspace: &topodatapb.Keyspace{},
},
{
Name: "ks2",
Keyspace: &topodatapb.Keyspace{},
},
},
},
FindAllShardsInKeyspaceResults: map[string]struct {
Response *vtctldatapb.FindAllShardsInKeyspaceResponse
Error error
}{
"ks1": {
Response: &vtctldatapb.FindAllShardsInKeyspaceResponse{
Shards: map[string]*vtctldatapb.Shard{
"-80": {
Keyspace: "ks1",
Name: "-80",
Shard: &topodatapb.Shard{},
},
"80-": {
Keyspace: "ks1",
Name: "80-",
Shard: &topodatapb.Shard{},
},
},
},
},
"ks2": {
Response: &vtctldatapb.FindAllShardsInKeyspaceResponse{
Shards: map[string]*vtctldatapb.Shard{
"-": {
Keyspace: "ks2",
Name: "-",
Shard: &topodatapb.Shard{},
},
},
},
},
},
},
},
topoReadPool: pools.NewRPCPool(5, 0, nil),
}
require.NoError(t, c.Vtctld.Dial(context.Background()))
tests := []struct {
name string
keyspaces []string
keyspaceShards []string
result map[string]sets.String
shouldErr bool
}{
{
name: "all keyspaces and shards",
keyspaces: nil,
keyspaceShards: nil,
result: map[string]sets.String{
"ks1": sets.NewString("-80", "80-"),
"ks2": sets.NewString("-"),
},
},
{
name: "keyspaceShards filter",
keyspaces: nil,
keyspaceShards: []string{"ks1/-80", "ks2/-"},
result: map[string]sets.String{
"ks1": sets.NewString("-80"),
"ks2": sets.NewString("-"),
},
},
{
name: "keyspace and shards filters",
keyspaces: []string{"ks1"},
keyspaceShards: []string{"ks1/80-"},
result: map[string]sets.String{
"ks1": sets.NewString("80-"),
},
},
{
name: "skipped non-existing shards and keyspaces",
keyspaces: nil,
keyspaceShards: []string{"ks1/-" /* does not exist */, "ks1/-80", "ks1/80-", "ks3/-" /* does not exist */},
result: map[string]sets.String{
"ks1": sets.NewString("-80", "80-"),
},
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
result, err := c.getShardSets(context.Background(), tt.keyspaces, tt.keyspaceShards)
if tt.shouldErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.result, result)
})
}
} | explode_data.jsonl/57720 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1876
} | [
2830,
3393,
3062,
2016,
567,
30175,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
1444,
1669,
609,
28678,
515,
197,
17446,
83,
302,
507,
25,
609,
9708,
302,
507,
16219,
515,
298,
17446,
83,
302,
507,
2959,
25,
609,
30570,
9... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDetectKvOrderViolation(t *testing.T) {
var errOrderViolation = errors.New("Detected Order Violation")
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
cfg := clientv3.Config{
Endpoints: []string{
clus.Members[0].GRPCAddr(),
clus.Members[1].GRPCAddr(),
clus.Members[2].GRPCAddr(),
},
}
cli, err := clientv3.New(cfg)
if err != nil {
t.Fatal(err)
}
ctx := context.TODO()
if _, err = clus.Client(0).Put(ctx, "foo", "bar"); err != nil {
t.Fatal(err)
}
// ensure that the second member has the current revision for the key foo
if _, err = clus.Client(1).Get(ctx, "foo"); err != nil {
t.Fatal(err)
}
// stop third member in order to force the member to have an outdated revision
clus.Members[2].Stop(t)
time.Sleep(1 * time.Second) // give enough time for operation
_, err = cli.Put(ctx, "foo", "buzz")
if err != nil {
t.Fatal(err)
}
// perform get request against the first member, in order to
// set up kvOrdering to expect "foo" revisions greater than that of
// the third member.
orderingKv := NewKV(cli.KV,
func(op clientv3.Op, resp clientv3.OpResponse, prevRev int64) error {
return errOrderViolation
})
_, err = orderingKv.Get(ctx, "foo")
if err != nil {
t.Fatal(err)
}
// ensure that only the third member is queried during requests
clus.Members[0].Stop(t)
clus.Members[1].Stop(t)
clus.Members[2].Restart(t)
// force OrderingKv to query the third member
cli.SetEndpoints(clus.Members[2].GRPCAddr())
_, err = orderingKv.Get(ctx, "foo", clientv3.WithSerializable())
if err != errOrderViolation {
t.Fatalf("expected %v, got %v", errOrderViolation, err)
}
} | explode_data.jsonl/66030 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 659
} | [
2830,
3393,
57193,
42,
85,
4431,
74971,
1155,
353,
8840,
836,
8,
341,
2405,
1848,
4431,
74971,
284,
5975,
7121,
445,
17076,
1569,
7217,
29455,
367,
5130,
16867,
1273,
1314,
36892,
2271,
1155,
340,
197,
4163,
1669,
17590,
7121,
28678,
53... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMatch(t *testing.T) {
var (
exactMatcher, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{MatchPattern: &v3matcherpb.StringMatcher_Exact{Exact: "exact"}})
prefixMatcher, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{MatchPattern: &v3matcherpb.StringMatcher_Prefix{Prefix: "prefix"}})
suffixMatcher, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{MatchPattern: &v3matcherpb.StringMatcher_Suffix{Suffix: "suffix"}})
regexMatcher, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{MatchPattern: &v3matcherpb.StringMatcher_SafeRegex{SafeRegex: &v3matcherpb.RegexMatcher{Regex: "good?regex?"}}})
containsMatcher, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{MatchPattern: &v3matcherpb.StringMatcher_Contains{Contains: "contains"}})
exactMatcherIgnoreCase, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{
MatchPattern: &v3matcherpb.StringMatcher_Exact{Exact: "exact"},
IgnoreCase: true,
})
prefixMatcherIgnoreCase, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{
MatchPattern: &v3matcherpb.StringMatcher_Prefix{Prefix: "prefix"},
IgnoreCase: true,
})
suffixMatcherIgnoreCase, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{
MatchPattern: &v3matcherpb.StringMatcher_Suffix{Suffix: "suffix"},
IgnoreCase: true,
})
containsMatcherIgnoreCase, _ = StringMatcherFromProto(&v3matcherpb.StringMatcher{
MatchPattern: &v3matcherpb.StringMatcher_Contains{Contains: "contains"},
IgnoreCase: true,
})
)
tests := []struct {
desc string
matcher StringMatcher
input string
wantMatch bool
}{
{
desc: "exact match success",
matcher: exactMatcher,
input: "exact",
wantMatch: true,
},
{
desc: "exact match failure",
matcher: exactMatcher,
input: "not-exact",
},
{
desc: "exact match success with ignore case",
matcher: exactMatcherIgnoreCase,
input: "EXACT",
wantMatch: true,
},
{
desc: "exact match failure with ignore case",
matcher: exactMatcherIgnoreCase,
input: "not-exact",
},
{
desc: "prefix match success",
matcher: prefixMatcher,
input: "prefixIsHere",
wantMatch: true,
},
{
desc: "prefix match failure",
matcher: prefixMatcher,
input: "not-prefix",
},
{
desc: "prefix match success with ignore case",
matcher: prefixMatcherIgnoreCase,
input: "PREFIXisHere",
wantMatch: true,
},
{
desc: "prefix match failure with ignore case",
matcher: prefixMatcherIgnoreCase,
input: "not-PREFIX",
},
{
desc: "suffix match success",
matcher: suffixMatcher,
input: "hereIsThesuffix",
wantMatch: true,
},
{
desc: "suffix match failure",
matcher: suffixMatcher,
input: "suffix-is-not-here",
},
{
desc: "suffix match success with ignore case",
matcher: suffixMatcherIgnoreCase,
input: "hereIsTheSuFFix",
wantMatch: true,
},
{
desc: "suffix match failure with ignore case",
matcher: suffixMatcherIgnoreCase,
input: "SUFFIX-is-not-here",
},
{
desc: "regex match success",
matcher: regexMatcher,
input: "goodregex",
wantMatch: true,
},
{
desc: "regex match failure",
matcher: regexMatcher,
input: "regex-is-not-here",
},
{
desc: "contains match success",
matcher: containsMatcher,
input: "IScontainsHERE",
wantMatch: true,
},
{
desc: "contains match failure",
matcher: containsMatcher,
input: "con-tains-is-not-here",
},
{
desc: "contains match success with ignore case",
matcher: containsMatcherIgnoreCase,
input: "isCONTAINShere",
wantMatch: true,
},
{
desc: "contains match failure with ignore case",
matcher: containsMatcherIgnoreCase,
input: "CON-TAINS-is-not-here",
},
}
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
if gotMatch := test.matcher.Match(test.input); gotMatch != test.wantMatch {
t.Errorf("StringMatcher.Match(%s) returned %v, want %v", test.input, gotMatch, test.wantMatch)
}
})
}
} | explode_data.jsonl/38090 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1868
} | [
2830,
3393,
8331,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
8122,
531,
37554,
11,
716,
1843,
284,
923,
37554,
3830,
31549,
2099,
85,
18,
70826,
16650,
6431,
37554,
90,
8331,
15760,
25,
609,
85,
18,
70826,
16650,
6431,
37554,
62,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_scsi_WaitUdevSymlink(t *testing.T) {
type args struct {
ctx context.Context
deviceName string
wwn string
}
ctx := context.Background()
devArgs := args{ctx: ctx, deviceName: mh.ValidDeviceName, wwn: mh.ValidWWID}
dmArgs := args{ctx: ctx, deviceName: mh.ValidDMName, wwn: mh.ValidWWID}
devPath := fmt.Sprintf("/dev/disk/by-id/scsi-%s", mh.ValidWWID)
dmPath := fmt.Sprintf("/dev/disk/by-id/dm-uuid-mpath-%s", mh.ValidWWID)
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mock := mh.MockHelper{
Ctrl: ctrl,
}
tests := []struct {
name string
fields scsiFields
stateSetter func(fields scsiFields)
args args
wantErr bool
}{
{
name: "dev found",
fields: getDefaultSCSIFields(ctrl),
stateSetter: func(fields scsiFields) {
mock.FilePathEvalSymlinksCallPath = devPath
mock.FilePathEvalSymlinksOKReturn = mh.ValidDevicePath
mock.FilePathEvalSymlinksOK(fields.filePath)
},
args: devArgs,
wantErr: false,
},
{
name: "not found",
fields: getDefaultSCSIFields(ctrl),
stateSetter: func(fields scsiFields) {
mock.FilePathEvalSymlinksErr(fields.filePath)
},
args: devArgs,
wantErr: true,
},
{
name: "symlink point to unexpected device",
fields: getDefaultSCSIFields(ctrl),
stateSetter: func(fields scsiFields) {
mock.FilePathEvalSymlinksOKReturn = mh.ValidDevicePath2
mock.FilePathEvalSymlinksOK(fields.filePath)
},
args: devArgs,
wantErr: true,
},
{
name: "dm found",
fields: getDefaultSCSIFields(ctrl),
stateSetter: func(fields scsiFields) {
mock.FilePathEvalSymlinksCallPath = dmPath
mock.FilePathEvalSymlinksOKReturn = mh.ValidDMPath
mock.FilePathEvalSymlinksOK(fields.filePath)
},
args: dmArgs,
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &Scsi{
fileReader: tt.fields.fileReader,
filePath: tt.fields.filePath,
os: tt.fields.os,
osexec: tt.fields.osexec,
singleCall: tt.fields.singleCall,
}
tt.stateSetter(tt.fields)
if err := s.WaitUdevSymlink(tt.args.ctx, tt.args.deviceName, tt.args.wwn); (err != nil) != tt.wantErr {
t.Errorf("WaitUdevSymlink() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
} | explode_data.jsonl/65990 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1100
} | [
2830,
3393,
643,
63229,
2763,
1315,
52,
3583,
34667,
44243,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
20985,
286,
2266,
9328,
198,
197,
54719,
675,
914,
198,
197,
197,
1250,
77,
286,
914,
198,
197,
630,
20985,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDaoReportLog(t *testing.T) {
var (
c = context.TODO()
id = int64(0)
)
convey.Convey("ReportLog", t, func(ctx convey.C) {
res, tids, err := d.ReportLog(c, id)
ctx.Convey("Then err should be nil.res,tids should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(tids, convey.ShouldNotBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
} | explode_data.jsonl/51302 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 177
} | [
2830,
3393,
12197,
10361,
2201,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
1444,
220,
284,
2266,
90988,
741,
197,
15710,
284,
526,
21,
19,
7,
15,
340,
197,
340,
37203,
5617,
4801,
5617,
445,
10361,
2201,
497,
259,
11,
2915,
7502... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDataSetsGet(t *testing.T) {
req := client.NewDataSetsGetRequest()
resp, err := req.Do()
if err != nil {
t.Error(err)
}
b, _ := json.MarshalIndent(resp, "", " ")
log.Println(string(b))
} | explode_data.jsonl/18871 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 90
} | [
2830,
93200,
30175,
1949,
1155,
353,
8840,
836,
8,
341,
24395,
1669,
2943,
7121,
1043,
30175,
1949,
1900,
741,
34653,
11,
1848,
1669,
4232,
33596,
741,
743,
1848,
961,
2092,
341,
197,
3244,
6141,
3964,
340,
197,
630,
2233,
11,
716,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.