text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestApplyComponentCreateSuccess(t *testing.T) {
// resolve empty policy
empty := newTestData(t, builder.NewPolicyBuilder())
actualState := empty.resolution()
// resolve full policy
desired := newTestData(t, makePolicyBuilder())
// apply changes
applier := NewEngineApply(
desired.policy(),
desired.resolution(),
actual.NewNoOpActionStateUpdater(actualState),
desired.external(),
mockRegistry(true, false),
diff.NewPolicyResolutionDiff(desired.resolution(), actualState).ActionPlan,
event.NewLog(logrus.DebugLevel, "test-apply"),
action.NewApplyResultUpdaterImpl(),
)
// check actual state
assert.Equal(t, 0, len(actualState.ComponentInstanceMap), "Actual state should be empty")
// check that policy apply finished with expected results
actualState = applyAndCheck(t, applier, action.ApplyResult{Success: 4, Failed: 0, Skipped: 0})
// check that actual state got updated
assert.Equal(t, 2, len(actualState.ComponentInstanceMap), "Actual state should not be empty after apply()")
} | explode_data.jsonl/74187 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 310
} | [
2830,
3393,
28497,
2189,
4021,
7188,
1155,
353,
8840,
836,
8,
341,
197,
322,
8830,
4287,
4842,
198,
197,
3194,
1669,
501,
83920,
1155,
11,
7363,
7121,
13825,
3297,
2398,
88814,
1397,
1669,
4287,
86431,
2822,
197,
322,
8830,
2480,
4842,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnauthorizedSeek(t *testing.T) {
mm := newMockMultichainManager()
for i := 1; i < ledgerSize; i++ {
l := mm.chains[systemChainID].ledger
l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}}, ""))
}
mm.chains[systemChainID].policyManager.Policy.Err = fmt.Errorf("Fail to evaluate policy")
m := newMockD()
defer close(m.recvChan)
ds := NewHandlerImpl(mm)
go ds.Handle(m)
m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(0)), Stop: seekSpecified(uint64(0)), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY})
select {
case deliverReply := <-m.sendChan:
if deliverReply.GetStatus() != cb.Status_FORBIDDEN {
t.Fatalf("Received wrong error on the reply channel")
}
case <-time.After(time.Second):
t.Fatalf("Timed out waiting to get all blocks")
}
} | explode_data.jsonl/36260 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 339
} | [
2830,
3393,
51181,
39350,
1155,
353,
8840,
836,
8,
341,
2109,
76,
1669,
501,
11571,
40404,
713,
466,
2043,
741,
2023,
600,
1669,
220,
16,
26,
600,
366,
46933,
1695,
26,
600,
1027,
341,
197,
8810,
1669,
9465,
5329,
1735,
58,
8948,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestMulti(t *testing.T) {
connection, err := OpenConnection("multi-conn", "tcp", "localhost:6379", 1, nil)
assert.NoError(t, err)
queue, err := connection.OpenQueue("multi-q")
assert.NoError(t, err)
_, err = queue.PurgeReady()
assert.NoError(t, err)
for i := 0; i < 20; i++ {
err := queue.Publish(fmt.Sprintf("multi-d%d", i))
assert.NoError(t, err)
}
count, err := queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(20), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(0), count)
assert.NoError(t, queue.StartConsuming(10, time.Millisecond))
time.Sleep(2 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
consumer := NewTestConsumer("multi-cons")
consumer.AutoAck = false
consumer.AutoFinish = false
_, err = queue.AddConsumer("multi-cons", consumer)
assert.NoError(t, err)
time.Sleep(10 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
assert.NoError(t, consumer.LastDelivery.Ack())
time.Sleep(10 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(9), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
consumer.Finish()
time.Sleep(10 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(9), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
assert.NoError(t, consumer.LastDelivery.Ack())
time.Sleep(10 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(8), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
consumer.Finish()
time.Sleep(10 * time.Millisecond)
count, err = queue.readyCount()
assert.NoError(t, err)
assert.Equal(t, int64(8), count)
count, err = queue.unackedCount()
assert.NoError(t, err)
assert.Equal(t, int64(10), count)
queue.StopConsuming()
assert.NoError(t, connection.stopHeartbeat())
} | explode_data.jsonl/44665 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 925
} | [
2830,
3393,
20358,
1155,
353,
8840,
836,
8,
341,
54590,
11,
1848,
1669,
5264,
4526,
445,
26268,
12,
5148,
497,
330,
27161,
497,
330,
8301,
25,
21,
18,
22,
24,
497,
220,
16,
11,
2092,
340,
6948,
35699,
1155,
11,
1848,
340,
46993,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestMemory_Configure(t *testing.T) {
config := `{"trust_domain":"example.com", "ttl":"1h", "key_size":2048}`
pluginConfig := &spi.ConfigureRequest{
Configuration: config,
}
m := &memoryPlugin{
mtx: &sync.RWMutex{},
}
resp, err := m.Configure(pluginConfig)
assert.Nil(t, err)
assert.Equal(t, &spi.ConfigureResponse{}, resp)
} | explode_data.jsonl/73850 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 140
} | [
2830,
3393,
10642,
15100,
17781,
1155,
353,
8840,
836,
8,
341,
25873,
1669,
1565,
4913,
56655,
20111,
3252,
8687,
905,
497,
330,
62858,
3252,
16,
71,
497,
330,
792,
2368,
788,
17,
15,
19,
23,
31257,
197,
9138,
2648,
1669,
609,
39157,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestKVPutAtMostOnce(t *testing.T) {
defer testutil.AfterTest(t)
clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 1})
defer clus.Terminate(t)
if _, err := clus.Client(0).Put(context.TODO(), "k", "1"); err != nil {
t.Fatal(err)
}
for i := 0; i < 10; i++ {
clus.Members[0].DropConnections()
donec := make(chan struct{})
go func() {
defer close(donec)
for i := 0; i < 10; i++ {
clus.Members[0].DropConnections()
time.Sleep(5 * time.Millisecond)
}
}()
_, err := clus.Client(0).Put(context.TODO(), "k", "v")
<-donec
if err != nil {
break
}
}
resp, err := clus.Client(0).Get(context.TODO(), "k")
if err != nil {
t.Fatal(err)
}
if resp.Kvs[0].Version > 11 {
t.Fatalf("expected version <= 10, got %+v", resp.Kvs[0])
}
} | explode_data.jsonl/16414 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 369
} | [
2830,
3393,
42,
13378,
332,
1655,
13319,
12522,
1155,
353,
8840,
836,
8,
341,
16867,
1273,
1314,
36892,
2271,
1155,
340,
197,
4163,
1669,
17590,
7121,
28678,
53,
18,
1155,
11,
609,
60168,
72883,
2648,
90,
1695,
25,
220,
16,
3518,
1686... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestClient_Do(t *testing.T) {
t.Parallel()
cases := []struct {
name string
request *http.Request
Option []Option
}{
{
"normal",
func() *http.Request { r, _ := http.NewRequest("GET", "https://example.com/", nil); return r }(),
[]Option{},
},
{
"large request",
func() *http.Request {
r, _ := http.NewRequest("POST", "https://example.com/", strings.NewReader(strings.Repeat("t", 10)))
return r
}(),
[]Option{WithRequestLogThreshold(1)},
},
{
"large response",
func() *http.Request { r, _ := http.NewRequest("GET", "https://example.com/", nil); return r }(),
[]Option{WithResponseLogThreshold(1)},
},
{
"error",
func() *http.Request { r, _ := http.NewRequest("GET", "https://non-exist-domain.com/", nil); return r }(),
[]Option{},
},
}
for _, c := range cases {
c := c
t.Run(c.name, func(t *testing.T) {
t.Parallel()
tracer := mocktracer.New()
client := NewClient(tracer, c.Option...)
resp, err := client.Do(c.request)
if err != nil {
assert.True(t, tracer.FinishedSpans()[0].Tags()["error"].(bool))
return
}
defer resp.Body.Close()
assert.NotEmpty(t, tracer.FinishedSpans())
byt, _ := ioutil.ReadAll(resp.Body)
assert.Len(t, byt, 1256)
})
}
} | explode_data.jsonl/29963 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 556
} | [
2830,
3393,
2959,
93481,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
262,
914,
198,
197,
23555,
353,
1254,
9659,
198,
197,
197,
5341,
220,
3056,
5341,
198,
197,
59403,
197,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGoMagic(t *testing.T) {
now, _ := time.Parse(time.RFC3339, "2017-02-01T16:06:19+08:00")
tests := []struct {
data string
exp string
}{
{
data: "select x@(MM)@(DD) from dbtable@(hh)-@(mm)",
exp: "select x0201 from dbtable16-06",
},
{
data: "select x@(M)@(D) from dbtable@(h)-@(m)",
exp: "select x21 from dbtable16-6",
},
{
data: "@(YY)",
exp: "17",
},
{
data: "@(YYYY)@(MM)",
exp: "201702",
},
{
data: "hhhhh",
exp: "hhhhh",
},
}
for _, ti := range tests {
got := goMagic(ti.data, now)
assert.EqualValues(t, ti.exp, got)
}
} | explode_data.jsonl/61888 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 316
} | [
2830,
3393,
10850,
43538,
1155,
353,
8840,
836,
8,
341,
80922,
11,
716,
1669,
882,
8937,
9730,
2013,
6754,
18,
18,
18,
24,
11,
330,
17,
15,
16,
22,
12,
15,
17,
12,
15,
16,
51,
16,
21,
25,
15,
21,
25,
16,
24,
10,
15,
23,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestParseCertificateWithRSAESOAEPPublicKey(t *testing.T) {
wantKey := &rsa.PublicKey{
E: 65537,
N: bigFromHexString("8e7983105063a3f1c2e77d7c8b4f411db9c76f11366ccbb11757001fa51805bbbf68b6dccd9ad28a82c6a831e1591f06181c2e328c261cf9a14ff1704dc3261cc16da751760141969330a75b3fc5ae185ac76d636a97a339838bd042aa7c5999f63f946c6987b0e8be8f5908d08563f62bc6ee9510e36752bd3b2e7b55281bc92a8dcc8ba8a30d6aaa7580b672d83802449d34bfea0434edea4dbc3a9b201f3de0bf5ab2ca96a5254f4e76a58adc8d3bc385be94e93e0052e4066f238a9c1195eaacda02a6dc78393063340054e3ce99754a8770c8efcca45ad8fc999f7aaa67a8a83960141e5f4b892d3af333f09b6d13e60900e0ea8bd3b5eea30f4f2b889b"),
}
der, _ := pem.Decode([]byte(oaepCertPEM))
if der == nil {
t.Fatalf("Failed to decode PEM cert")
}
cert, err := ParseCertificate(der.Bytes)
if err != nil {
t.Fatalf("Failed to parse certificate: %s", err)
}
if cert.PublicKeyAlgorithm != RSAESOAEP {
t.Errorf("Parsed key algorithm was not RSAESOAEP")
}
parsedKey, ok := cert.PublicKey.(*rsa.PublicKey)
if !ok {
t.Fatalf("Parsed key was not an RSA key: %s", err)
}
if wantKey.E != parsedKey.E ||
wantKey.N.Cmp(parsedKey.N) != 0 {
t.Fatal("Parsed key differs from expected key")
}
} | explode_data.jsonl/67990 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 609
} | [
2830,
3393,
14463,
33202,
2354,
73564,
1570,
46,
13669,
4406,
475,
1592,
1155,
353,
8840,
836,
8,
341,
50780,
1592,
1669,
609,
60869,
49139,
1592,
515,
197,
22784,
25,
220,
21,
20,
20,
18,
22,
345,
197,
18317,
25,
2409,
3830,
49137,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestArgs(t *testing.T) {
c := testConfig(t)
fun := c.Fun("entry",
Bloc("entry",
Valu("a", OpConst64, c.config.Types.Int64, 14, nil),
Valu("b", OpConst64, c.config.Types.Int64, 26, nil),
Valu("sum", OpAdd64, c.config.Types.Int64, 0, nil, "a", "b"),
Valu("mem", OpInitMem, types.TypeMem, 0, nil),
Goto("exit")),
Bloc("exit",
Exit("mem")))
sum := fun.values["sum"]
for i, name := range []string{"a", "b"} {
if sum.Args[i] != fun.values[name] {
t.Errorf("arg %d for sum is incorrect: want %s, got %s",
i, sum.Args[i], fun.values[name])
}
}
} | explode_data.jsonl/66552 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 269
} | [
2830,
3393,
4117,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
1273,
2648,
1155,
340,
90126,
1669,
272,
991,
359,
445,
4085,
756,
197,
12791,
1074,
445,
4085,
756,
298,
197,
2208,
84,
445,
64,
497,
10672,
19167,
21,
19,
11,
272,
5423,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWalk(t *testing.T) {
// The root of the walk must be a name with an actual basename, not just ".".
// Walk uses Lstat to obtain the name of the root, and Lstat on platforms
// other than Plan 9 reports the name "." instead of the actual base name of
// the directory. (See https://golang.org/issue/42115.)
type file struct {
path string
name string
size int64
mode fs.FileMode
isDir bool
}
testCases := []struct {
name string
overlay string
root string
wantFiles []file
}{
{"no overlay", `
{}
-- dir/file.txt --
`,
"dir",
[]file{
{"dir", "dir", 0, fs.ModeDir | 0700, true},
{"dir/file.txt", "file.txt", 0, 0600, false},
},
},
{"overlay with different file", `
{
"Replace": {
"dir/file.txt": "dir/other.txt"
}
}
-- dir/file.txt --
-- dir/other.txt --
contents of other file
`,
"dir",
[]file{
{"dir", "dir", 0, fs.ModeDir | 0500, true},
{"dir/file.txt", "file.txt", 23, 0600, false},
{"dir/other.txt", "other.txt", 23, 0600, false},
},
},
{"overlay with new file", `
{
"Replace": {
"dir/file.txt": "dir/other.txt"
}
}
-- dir/other.txt --
contents of other file
`,
"dir",
[]file{
{"dir", "dir", 0, fs.ModeDir | 0500, true},
{"dir/file.txt", "file.txt", 23, 0600, false},
{"dir/other.txt", "other.txt", 23, 0600, false},
},
},
{"overlay with new directory", `
{
"Replace": {
"dir/subdir/file.txt": "dir/other.txt"
}
}
-- dir/other.txt --
contents of other file
`,
"dir",
[]file{
{"dir", "dir", 0, fs.ModeDir | 0500, true},
{"dir/other.txt", "other.txt", 23, 0600, false},
{"dir/subdir", "subdir", 0, fs.ModeDir | 0500, true},
{"dir/subdir/file.txt", "file.txt", 23, 0600, false},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
initOverlay(t, tc.overlay)
var got []file
Walk(tc.root, func(path string, info fs.FileInfo, err error) error {
got = append(got, file{path, info.Name(), info.Size(), info.Mode(), info.IsDir()})
return nil
})
if len(got) != len(tc.wantFiles) {
t.Errorf("Walk: saw %#v in walk; want %#v", got, tc.wantFiles)
}
for i := 0; i < len(got) && i < len(tc.wantFiles); i++ {
wantPath := filepath.FromSlash(tc.wantFiles[i].path)
if got[i].path != wantPath {
t.Errorf("path of file #%v in walk, got %q, want %q", i, got[i].path, wantPath)
}
if got[i].name != tc.wantFiles[i].name {
t.Errorf("name of file #%v in walk, got %q, want %q", i, got[i].name, tc.wantFiles[i].name)
}
if got[i].mode&(fs.ModeDir|0700) != tc.wantFiles[i].mode {
t.Errorf("mode&(fs.ModeDir|0700) for mode of file #%v in walk, got %v, want %v", i, got[i].mode&(fs.ModeDir|0700), tc.wantFiles[i].mode)
}
if got[i].isDir != tc.wantFiles[i].isDir {
t.Errorf("isDir for file #%v in walk, got %v, want %v", i, got[i].isDir, tc.wantFiles[i].isDir)
}
if tc.wantFiles[i].isDir {
continue // don't check size for directories
}
if got[i].size != tc.wantFiles[i].size {
t.Errorf("size of file #%v in walk, got %v, want %v", i, got[i].size, tc.wantFiles[i].size)
}
}
})
}
} | explode_data.jsonl/56052 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1441
} | [
2830,
3393,
48849,
1155,
353,
8840,
836,
8,
341,
197,
322,
576,
3704,
315,
279,
4227,
1969,
387,
264,
829,
448,
458,
5042,
38196,
11,
537,
1101,
22760,
624,
197,
322,
12554,
5711,
444,
9878,
311,
6851,
279,
829,
315,
279,
3704,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRest_DeleteMe(t *testing.T) {
ts, srv, teardown := startupT(t)
defer teardown()
client := http.Client{}
req, err := http.NewRequest(http.MethodPost, fmt.Sprintf("%s/api/v1/deleteme?site=remark42", ts.URL), nil)
assert.NoError(t, err)
req.Header.Add("X-JWT", devToken)
resp, err := client.Do(req)
assert.NoError(t, err)
assert.Equal(t, 200, resp.StatusCode)
body, err := ioutil.ReadAll(resp.Body)
assert.NoError(t, resp.Body.Close())
assert.NoError(t, err)
m := map[string]string{}
err = json.Unmarshal(body, &m)
assert.NoError(t, err)
assert.Equal(t, "remark42", m["site"])
assert.Equal(t, "dev", m["user_id"])
tkn := m["token"]
claims, err := srv.Authenticator.TokenService().Parse(tkn)
assert.NoError(t, err)
assert.Equal(t, "dev", claims.User.ID)
assert.Equal(t, "https://demo.remark42.com/web/deleteme.html?token="+tkn, m["link"])
req, err = http.NewRequest(http.MethodPost, fmt.Sprintf("%s/api/v1/deleteme?site=remark42", ts.URL), nil)
assert.NoError(t, err)
resp, err = client.Do(req)
assert.NoError(t, err)
assert.Equal(t, 401, resp.StatusCode)
assert.NoError(t, resp.Body.Close())
} | explode_data.jsonl/37404 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 489
} | [
2830,
3393,
12416,
57418,
7823,
1155,
353,
8840,
836,
8,
341,
57441,
11,
43578,
11,
49304,
1669,
20567,
51,
1155,
340,
16867,
49304,
2822,
25291,
1669,
1758,
11716,
16094,
24395,
11,
1848,
1669,
1758,
75274,
19886,
20798,
4133,
11,
8879,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBhattacharyya(t *testing.T) {
for i, test := range []struct {
p []float64
q []float64
res float64
}{
{
p: []float64{0.5, 0.1, 0.3, 0.1},
q: []float64{0.1, 0.4, 0.25, 0.25},
res: 0.15597338718671386,
},
{
p: []float64{0.4, 0.6, 0.0},
q: []float64{0.2, 0.2, 0.6},
res: 0.46322207765351153,
},
{
p: []float64{0.1, 0.1, 0.0, 0.8},
q: []float64{0.6, 0.3, 0.0, 0.1},
res: 0.3552520032137785,
},
} {
resultpq := Bhattacharyya(test.p, test.q)
resultqp := Bhattacharyya(test.q, test.p)
if math.Abs(resultpq-test.res) > 1e-10 {
t.Errorf("Bhattacharyya distance mismatch in case %d. Expected %v, Found %v", i, test.res, resultpq)
}
if math.Abs(resultpq-resultqp) > 1e-10 {
t.Errorf("Bhattacharyya distance is assymmetric in case %d.", i)
}
}
// Bhattacharyya should panic if the inputs have different length
if !panics(func() { Bhattacharyya(make([]float64, 2), make([]float64, 3)) }) {
t.Errorf("Bhattacharyya did not panic with length mismatch")
}
} | explode_data.jsonl/1773 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 529
} | [
2830,
3393,
33,
71,
16330,
658,
7755,
1155,
353,
8840,
836,
8,
341,
2023,
600,
11,
1273,
1669,
2088,
3056,
1235,
341,
197,
3223,
256,
3056,
3649,
21,
19,
198,
197,
18534,
256,
3056,
3649,
21,
19,
198,
197,
10202,
2224,
21,
19,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSplitCodec(t *testing.T) {
assert.NotPanics(t, func() {
q := new(query)
q.Begin = []byte("ABC")
id := q.Encode()
assert.Equal(t, []byte{0x3, 0x41, 0x42, 0x43, 0x0, 0x0}, id)
out, err := decodeQuery(id)
assert.NoError(t, err)
assert.Equal(t, []byte("ABC"), out.Begin)
})
} | explode_data.jsonl/74842 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
20193,
36913,
1155,
353,
8840,
836,
8,
341,
6948,
15000,
35693,
1211,
1155,
11,
2915,
368,
341,
197,
18534,
1669,
501,
10741,
340,
197,
18534,
28467,
284,
3056,
3782,
445,
25411,
5130,
197,
15710,
1669,
2804,
50217,
741,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGetProfileRefreshSettings(t *testing.T) {
cfg := verifyGetCfg
refresh, interval := getProfileRefreshSettings(cfg)
assert.Equal(t, true, refresh)
assert.Equal(t, 5*time.Minute, interval)
cfg.RefreshInterval = schema.ProfileRefreshDisabled
refresh, interval = getProfileRefreshSettings(cfg)
assert.Equal(t, false, refresh)
assert.Equal(t, time.Duration(0), interval)
cfg.RefreshInterval = schema.ProfileRefreshAlways
refresh, interval = getProfileRefreshSettings(cfg)
assert.Equal(t, true, refresh)
assert.Equal(t, time.Duration(0), interval)
} | explode_data.jsonl/20208 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 192
} | [
2830,
3393,
1949,
8526,
14567,
6086,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
10146,
1949,
42467,
271,
197,
17168,
11,
9873,
1669,
633,
8526,
14567,
6086,
28272,
692,
6948,
12808,
1155,
11,
830,
11,
10408,
340,
6948,
12808,
1155,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSampler(t *testing.T) {
for _, lvl := range []Level{DebugLevel, InfoLevel, WarnLevel, ErrorLevel, DPanicLevel, PanicLevel, FatalLevel} {
sampler, logs := fakeSampler(DebugLevel, time.Minute, 2, 3)
// Ensure that counts aren't shared between levels.
probeLevel := DebugLevel
if lvl == DebugLevel {
probeLevel = InfoLevel
}
for i := 0; i < 10; i++ {
writeSequence(sampler, 1, probeLevel)
}
// Clear any output.
logs.TakeAll()
for i := 1; i < 10; i++ {
writeSequence(sampler, i, lvl)
}
assertSequence(t, logs.TakeAll(), lvl, 1, 2, 5, 8)
}
} | explode_data.jsonl/39112 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 231
} | [
2830,
3393,
66048,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
46129,
1669,
2088,
3056,
4449,
90,
7939,
4449,
11,
13074,
4449,
11,
67746,
4449,
11,
4600,
4449,
11,
31757,
31270,
4449,
11,
83740,
4449,
11,
65629,
4449,
92,
341,
197,
1903... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func Test_procSubscribePush_pushSupport(t *testing.T) {
chain, mock33 := createBlockChainWithFalgSet(t, false, false)
defer mock33.Close()
subscribe := new(types.PushSubscribeReq)
err := chain.procSubscribePush(subscribe)
assert.Equal(t, types.ErrPushNotSupport, err)
} | explode_data.jsonl/61710 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 95
} | [
2830,
3393,
24436,
28573,
16644,
14218,
7916,
1155,
353,
8840,
836,
8,
341,
197,
8819,
11,
7860,
18,
18,
1669,
1855,
4713,
18837,
2354,
37,
23881,
1649,
1155,
11,
895,
11,
895,
340,
16867,
7860,
18,
18,
10421,
741,
28624,
6273,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFindIssueByIDOtherErr(t *testing.T) {
i := domain.Issue{
ID: 1,
Title: "test-title",
Description: "test-description",
Status: 1,
ProjectID: 1,
}
cucm, iucm, lucm, pucm, m := prepareMocksAndRUC()
iucm.On("FindByID", i.ID).Return(i, errors.New("test error"))
c, _ := prepareHTTP(echo.GET, "/api/issues/:id", nil)
c.SetParamNames("id")
c.SetParamValues("1")
err := m.FindIssueByID(c)
assert.NotNil(t, err)
assert.Equal(t, "test error", err.Error())
checkAssertions(t, cucm, iucm, lucm, pucm)
} | explode_data.jsonl/60169 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
9885,
42006,
60572,
11409,
7747,
1155,
353,
8840,
836,
8,
341,
8230,
1669,
7947,
2447,
83890,
515,
197,
29580,
25,
688,
220,
16,
345,
197,
92233,
25,
981,
330,
1944,
8816,
756,
197,
47414,
25,
330,
1944,
42830,
756,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAttrsAppendLD(t *testing.T) {
c := setupTest([]string{"append", "attrs", "--host", "orion-ld", "--id", "urn:ngsi-ld:Product:010", "--data", "{\"specialOffer\":{\"value\": true}}", "--context", "[\"http://context\"]"})
reqRes := helper.MockHTTPReqRes{}
reqRes.Res.StatusCode = http.StatusNoContent
reqRes.ReqData = []byte(`{"@context":["http://context"],"specialOffer":{"value":true}}`)
reqRes.Path = "/ngsi-ld/v1/entities/urn:ngsi-ld:Product:010/attrs"
helper.SetClientHTTP(c, reqRes)
err := attrsAppend(c, c.Ngsi, c.Client)
assert.NoError(t, err)
} | explode_data.jsonl/33065 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 229
} | [
2830,
3393,
53671,
23877,
12335,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6505,
2271,
10556,
917,
4913,
5090,
497,
330,
20468,
497,
14482,
3790,
497,
330,
269,
290,
12,
507,
497,
14482,
307,
497,
330,
399,
25,
968,
6321,
12,
507,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCheckCSRFToken(t *testing.T) {
t.Run("should allow a POST request with a valid CSRF token header", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
r.Header.Set(model.HEADER_CSRF_TOKEN, token)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.True(t, checked)
assert.True(t, passed)
assert.Nil(t, c.Err)
})
t.Run("should allow a POST request with an X-Requested-With header", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
Log: th.App.Log(),
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
r.Header.Set(model.HEADER_REQUESTED_WITH, model.HEADER_REQUESTED_WITH_XML)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.True(t, checked)
assert.True(t, passed)
assert.Nil(t, c.Err)
})
t.Run("should not allow a POST request with an X-Requested-With header with strict CSRF enforcement enabled", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
mockStore := th.App.Srv().Store.(*mocks.Store)
mockUserStore := mocks.UserStore{}
mockUserStore.On("Count", mock.Anything).Return(int64(10), nil)
mockPostStore := mocks.PostStore{}
mockPostStore.On("GetMaxPostSize").Return(65535, nil)
mockSystemStore := mocks.SystemStore{}
mockSystemStore.On("GetByName", "UpgradedFromTE").Return(&model.System{Name: "UpgradedFromTE", Value: "false"}, nil)
mockSystemStore.On("GetByName", "InstallationDate").Return(&model.System{Name: "InstallationDate", Value: "10"}, nil)
mockSystemStore.On("GetByName", "FirstServerRunTimestamp").Return(&model.System{Name: "FirstServerRunTimestamp", Value: "10"}, nil)
mockStore.On("User").Return(&mockUserStore)
mockStore.On("Post").Return(&mockPostStore)
mockStore.On("System").Return(&mockSystemStore)
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ServiceSettings.ExperimentalStrictCSRFEnforcement = true
})
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
Log: th.App.Log(),
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
r.Header.Set(model.HEADER_REQUESTED_WITH, model.HEADER_REQUESTED_WITH_XML)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.True(t, checked)
assert.False(t, passed)
assert.NotNil(t, c.Err)
})
t.Run("should not allow a POST request without either header", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.True(t, checked)
assert.False(t, passed)
assert.NotNil(t, c.Err)
})
t.Run("should not check GET requests", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodGet, "", nil)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.False(t, checked)
assert.False(t, passed)
assert.Nil(t, c.Err)
})
t.Run("should not check a request passing the auth token in a header", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: true,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationHeader
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.False(t, checked)
assert.False(t, passed)
assert.Nil(t, c.Err)
})
t.Run("should not check a request passing a nil session", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: false,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
r.Header.Set(model.HEADER_CSRF_TOKEN, token)
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, nil)
assert.False(t, checked)
assert.False(t, passed)
assert.Nil(t, c.Err)
})
t.Run("should check requests for handlers that don't require a session but have one", func(t *testing.T) {
th := SetupWithStoreMock(t)
defer th.TearDown()
h := &Handler{
RequireSession: false,
TrustRequester: false,
}
token := "token"
tokenLocation := app.TokenLocationCookie
c := &Context{
App: th.App,
}
r, _ := http.NewRequest(http.MethodPost, "", nil)
r.Header.Set(model.HEADER_CSRF_TOKEN, token)
session := &model.Session{
Props: map[string]string{
"csrf": token,
},
}
checked, passed := h.checkCSRFToken(c, r, token, tokenLocation, session)
assert.True(t, checked)
assert.True(t, passed)
assert.Nil(t, c.Err)
})
} | explode_data.jsonl/11787 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2518
} | [
2830,
3393,
3973,
6412,
17612,
3323,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
5445,
2138,
264,
12869,
1681,
448,
264,
2697,
78595,
3950,
4247,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
70479,
1669,
18626,
2354,
6093,
11571,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestJsonEncodable(t *testing.T) {
var s customJsonEncodable
vm := New()
vm.Set("s", &s)
ret, err := vm.RunString("JSON.stringify(s)")
if err != nil {
t.Fatal(err)
}
if !ret.StrictEquals(vm.ToValue("\"Test\"")) {
t.Fatalf("Expected \"Test\", got: %v", ret)
}
} | explode_data.jsonl/10489 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 127
} | [
2830,
3393,
5014,
7408,
69129,
1155,
353,
8840,
836,
8,
341,
2405,
274,
2526,
5014,
7408,
69129,
271,
54879,
1669,
1532,
741,
54879,
4202,
445,
82,
497,
609,
82,
692,
11262,
11,
1848,
1669,
10995,
16708,
703,
445,
5370,
10052,
1141,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestACL_CloneDestroy(t *testing.T) {
if CONSUL_ROOT == "" {
t.SkipNow()
}
c := makeClient(t)
c.config.Token = CONSUL_ROOT
acl := c.ACL()
id, wm, err := acl.Clone(CONSUL_ROOT, nil)
if err != nil {
t.Fatalf("err: %v", err)
}
if wm.RequestTime == 0 {
t.Fatalf("bad: %v", wm)
}
if id == "" {
t.Fatalf("invalid: %v", id)
}
wm, err = acl.Destroy(id, nil)
if err != nil {
t.Fatalf("err: %v", err)
}
if wm.RequestTime == 0 {
t.Fatalf("bad: %v", wm)
}
} | explode_data.jsonl/44593 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 246
} | [
2830,
3393,
55393,
85110,
603,
14245,
1155,
353,
8840,
836,
8,
341,
743,
72906,
1094,
16197,
621,
1591,
341,
197,
3244,
57776,
7039,
741,
197,
532,
1444,
1669,
1281,
2959,
1155,
340,
1444,
5423,
32277,
284,
72906,
1094,
16197,
198,
1132... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestThrottler_CreateStoreRecord(t *testing.T) {
throttleID, throttlePeriod, store := setup()
store.On("Get", throttleID).Return("", false, time.Millisecond*10, nil)
store.On("Set", throttleID, "1", throttlePeriod).Return(true, nil)
subject := throttles.NewThrottler(store)
result, duration, err := subject.CanTrigger(throttleID, throttlePeriod)
assert.True(t, result)
assert.Equal(t, time.Duration(0), duration)
assert.Nil(t, err)
store.AssertCalled(t, "Get", throttleID)
store.AssertCalled(t, "Set", throttleID, "1", throttlePeriod)
} | explode_data.jsonl/70680 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 199
} | [
2830,
3393,
1001,
46689,
1536,
34325,
6093,
6471,
1155,
353,
8840,
836,
8,
341,
70479,
27535,
915,
11,
42166,
23750,
11,
3553,
1669,
6505,
2822,
57279,
8071,
445,
1949,
497,
42166,
915,
568,
5598,
19814,
895,
11,
882,
71482,
9,
16,
15... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSet(t *testing.T) {
w := httptest.NewRecorder()
recordRequest := NewResponseRecorder(w)
reader := strings.NewReader(`{"username": "dennis"}`)
request, err := http.NewRequest("POST", "http://localhost", reader)
if err != nil {
t.Fatalf("Request Formation Failed: %s\n", err.Error())
}
repl := NewReplacer(request, recordRequest, "")
repl.Set("host", "getcaddy.com")
repl.Set("method", "GET")
repl.Set("status", "201")
repl.Set("variable", "value")
if repl.Replace("This host is {host}") != "This host is getcaddy.com" {
t.Error("Expected host replacement failed")
}
if repl.Replace("This request method is {method}") != "This request method is GET" {
t.Error("Expected method replacement failed")
}
if repl.Replace("The response status is {status}") != "The response status is 201" {
t.Error("Expected status replacement failed")
}
if repl.Replace("The value of variable is {variable}") != "The value of variable is value" {
t.Error("Expected variable replacement failed")
}
} | explode_data.jsonl/29032 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 338
} | [
2830,
3393,
1649,
1155,
353,
8840,
836,
8,
341,
6692,
1669,
54320,
70334,
7121,
47023,
741,
71952,
1900,
1669,
1532,
2582,
47023,
3622,
340,
61477,
1669,
9069,
68587,
5809,
4913,
5113,
788,
330,
67,
16156,
1,
5541,
692,
23555,
11,
1848,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestGenerateModel_WithCustomTag(t *testing.T) {
specDoc, err := loads.Spec("../fixtures/codegen/todolist.models.yml")
require.NoError(t, err)
definitions := specDoc.Spec().Definitions
k := "WithCustomTag"
schema := definitions[k]
opts := opts()
genModel, err := makeGenDefinition(k, "models", schema, specDoc, opts)
require.NoError(t, err)
buf := bytes.NewBuffer(nil)
require.NoError(t, opts.templates.MustGet("model").Execute(buf, genModel))
assertInCode(t, "mytag:\"foo,bar\"", buf.String())
} | explode_data.jsonl/2504 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 192
} | [
2830,
3393,
31115,
1712,
62,
2354,
10268,
5668,
1155,
353,
8840,
836,
8,
341,
98100,
9550,
11,
1848,
1669,
20907,
36473,
17409,
45247,
46928,
4370,
5523,
347,
34675,
8235,
33936,
1138,
17957,
35699,
1155,
11,
1848,
692,
7452,
4054,
82,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestListRecipientTransferSchedules(t *testing.T) {
client := testutil.NewFixedClient(t)
var schds omise.ScheduleList
client.MustDo(&schds, &ListRecipientTransferSchedules{
RecipientID: "recp_test_50894vc13y8z4v51iuc",
})
r.Len(t, schds.Data, 1)
r.Equal(t, "schd_57zhl296uxc7yiun6xx", schds.Data[0].ID)
r.NotNil(t, schds.Data[0].Transfer)
r.Nil(t, schds.Data[0].Charge)
} | explode_data.jsonl/19942 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 178
} | [
2830,
3393,
852,
74432,
21970,
50,
49613,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
1273,
1314,
7121,
13520,
2959,
1155,
340,
2405,
5699,
5356,
7861,
1064,
87081,
852,
198,
25291,
50463,
5404,
2099,
21062,
5356,
11,
609,
852,
74432,
21... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestProjectDownloadChunk_overdriveStatus(t *testing.T) {
t.Parallel()
now := time.Now()
pcws := new(projectChunkWorkerSet)
pcws.staticErasureCoder = modules.NewRSCodeDefault()
pdc := new(projectDownloadChunk)
pdc.workerSet = pcws
pdc.availablePieces = [][]*pieceDownload{
{
{expectedCompleteTime: now.Add(-1 * time.Minute)},
{expectedCompleteTime: now.Add(-3 * time.Minute)},
},
{
{expectedCompleteTime: now.Add(-2 * time.Minute)},
},
}
// verify we return the correct amount of overdrive workers that need to be
// launched if no pieces have launched yet, also verify last return time
toLaunch, returnTime := pdc.overdriveStatus()
if toLaunch != modules.RenterDefaultDataPieces {
t.Fatal("unexpected")
}
if returnTime != (time.Time{}) {
t.Fatal("unexpected", returnTime)
}
// launch a piece and verify we get 1 worker to launch due to the return
// time being in the past
pdc.availablePieces[0][0].launched = true
toLaunch, returnTime = pdc.overdriveStatus()
if toLaunch != 1 {
t.Fatal("unexpected")
}
if returnTime != now.Add(-1*time.Minute) {
t.Fatal("unexpected")
}
// add a piecedownload that returns somewhere in the future
pdc.availablePieces[1] = append(pdc.availablePieces[1], &pieceDownload{
launched: true,
expectedCompleteTime: now.Add(time.Minute),
})
toLaunch, returnTime = pdc.overdriveStatus()
if toLaunch != 0 {
t.Fatal("unexpected")
}
if returnTime != now.Add(time.Minute) {
t.Fatal("unexpected")
}
} | explode_data.jsonl/32996 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 555
} | [
2830,
3393,
7849,
11377,
28304,
15431,
31967,
2522,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
80922,
1669,
882,
13244,
2822,
82013,
8915,
1669,
501,
21168,
28304,
21936,
1649,
340,
82013,
8915,
26181,
19462,
3970,
35290,
284,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestClientCertificateCredential_GetTokenInvalidCredentials(t *testing.T) {
srv, close := mock.NewTLSServer()
defer close()
srv.SetResponse(mock.WithStatusCode(http.StatusUnauthorized))
options := ClientCertificateCredentialOptions{}
options.AuthorityHost = srv.URL()
options.HTTPClient = srv
cred, err := NewClientCertificateCredential(tenantID, clientID, certificatePath, &options)
if err != nil {
t.Fatalf("Did not expect an error but received one: %v", err)
}
_, err = cred.GetToken(context.Background(), policy.TokenRequestOptions{Scopes: []string{scope}})
if err == nil {
t.Fatalf("Expected to receive a nil error, but received: %v", err)
}
var authFailed *AuthenticationFailedError
if !errors.As(err, &authFailed) {
t.Fatalf("Expected: AuthenticationFailedError, Received: %T", err)
}
} | explode_data.jsonl/26258 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 281
} | [
2830,
3393,
2959,
33202,
48265,
13614,
3323,
7928,
27025,
1155,
353,
8840,
836,
8,
341,
1903,
10553,
11,
3265,
1669,
7860,
7121,
13470,
1220,
2836,
741,
16867,
3265,
741,
1903,
10553,
4202,
2582,
30389,
26124,
15872,
19886,
10538,
51181,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestRestoreRealIP(t *testing.T) {
h := func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(r.RemoteAddr))
}
handler := RestoreRealIP(http.HandlerFunc(h))
rr := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/", nil)
r.RemoteAddr = "8.8.8.8"
r.Header.Set("CF-Connecting-IP", "1.1.1.1")
handler.ServeHTTP(rr, r)
assert.Equal(t, "1.1.1.1", rr.Body.String())
} | explode_data.jsonl/44436 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 180
} | [
2830,
3393,
56284,
12768,
3298,
1155,
353,
8840,
836,
8,
341,
9598,
1669,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
6692,
4073,
10556,
3782,
2601,
51434,
13986,
1171,
197,
532,
53326,
1669,
43820,
12768,
3298,
1988... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCreate(t *testing.T) {
wantRandom := "i-want-random-data-not-this-special-string"
type args struct {
ctx context.Context
mg resource.Managed
}
type want struct {
mg resource.Managed
cre managed.ExternalCreation
err error
}
cases := map[string]struct {
handler http.Handler
kube client.Client
args args
want want
}{
"Successful": {
handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if diff := cmp.Diff(http.MethodPost, r.Method); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
i := &container.Cluster{}
b, err := ioutil.ReadAll(r.Body)
if diff := cmp.Diff(err, nil); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
err = json.Unmarshal(b, i)
if diff := cmp.Diff(err, nil); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
w.WriteHeader(http.StatusOK)
_ = r.Body.Close()
_ = json.NewEncoder(w).Encode(&container.Operation{})
}),
args: args{
mg: cluster(),
},
want: want{
mg: cluster(withConditions(xpv1.Creating())),
cre: managed.ExternalCreation{ConnectionDetails: managed.ConnectionDetails{
xpv1.ResourceCredentialsSecretPasswordKey: []byte(wantRandom),
}},
err: nil,
},
},
"SuccessfulSkipCreate": {
handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if diff := cmp.Diff(http.MethodPost, r.Method); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
i := &container.Cluster{}
b, err := ioutil.ReadAll(r.Body)
if diff := cmp.Diff(err, nil); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
err = json.Unmarshal(b, i)
if diff := cmp.Diff(err, nil); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
// Return bad request for create to demonstrate that
// http call is never made.
w.WriteHeader(http.StatusBadRequest)
_ = r.Body.Close()
_ = json.NewEncoder(w).Encode(&container.Operation{})
}),
args: args{
mg: cluster(withProviderStatus(v1beta2.ClusterStateProvisioning)),
},
want: want{
mg: cluster(
withConditions(xpv1.Creating()),
withProviderStatus(v1beta2.ClusterStateProvisioning),
),
cre: managed.ExternalCreation{},
err: nil,
},
},
"AlreadyExists": {
handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_ = r.Body.Close()
if diff := cmp.Diff(http.MethodPost, r.Method); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
w.WriteHeader(http.StatusConflict)
_ = json.NewEncoder(w).Encode(&container.Operation{})
}),
args: args{
mg: cluster(),
},
want: want{
mg: cluster(withConditions(xpv1.Creating())),
err: errors.Wrap(gError(http.StatusConflict, ""), errCreateCluster),
},
},
"Failed": {
handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_ = r.Body.Close()
if diff := cmp.Diff(http.MethodPost, r.Method); diff != "" {
t.Errorf("r: -want, +got:\n%s", diff)
}
w.WriteHeader(http.StatusBadRequest)
_ = json.NewEncoder(w).Encode(&container.Operation{})
}),
args: args{
mg: cluster(),
},
want: want{
mg: cluster(withConditions(xpv1.Creating())),
err: errors.Wrap(gError(http.StatusBadRequest, ""), errCreateCluster),
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
server := httptest.NewServer(tc.handler)
defer server.Close()
s, _ := container.NewService(context.Background(), option.WithEndpoint(server.URL), option.WithoutAuthentication())
e := clusterExternal{
kube: tc.kube,
projectID: projectID,
cluster: s,
}
_, err := e.Create(tc.args.ctx, tc.args.mg)
if tc.want.err != nil && err != nil {
// the case where our mock server returns error.
if diff := cmp.Diff(tc.want.err.Error(), err.Error()); diff != "" {
t.Errorf("Create(...): -want, +got:\n%s", diff)
}
} else {
if diff := cmp.Diff(tc.want.err, err); diff != "" {
t.Errorf("Create(...): -want, +got:\n%s", diff)
}
}
if diff := cmp.Diff(tc.want.mg, tc.args.mg); diff != "" {
t.Errorf("Create(...): -want, +got:\n%s", diff)
}
})
}
} | explode_data.jsonl/69614 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1907
} | [
2830,
3393,
4021,
1155,
353,
8840,
836,
8,
341,
50780,
13999,
1669,
330,
72,
2630,
517,
91675,
13945,
29169,
60191,
92248,
30881,
1837,
13158,
2827,
2036,
341,
197,
20985,
2266,
9328,
198,
197,
2109,
70,
220,
5101,
29902,
3279,
198,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestListNeverRateLimiter(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
vmListErr := &retry.Error{
RawError: fmt.Errorf("azure cloud provider rate limited(%s) for operation %q", "read", "VMList"),
Retriable: true,
}
armClient := mockarmclient.NewMockInterface(ctrl)
vmClient := getTestVMClientWithNeverRateLimiter(armClient)
result, rerr := vmClient.List(context.TODO(), "rg")
assert.Equal(t, 0, len(result))
assert.NotNil(t, rerr)
assert.Equal(t, vmListErr, rerr)
} | explode_data.jsonl/16692 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 199
} | [
2830,
3393,
852,
26155,
11564,
43,
17700,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
23743,
991,
18176,
2822,
54879,
852,
7747,
1669,
609,
44848,
6141,
515,
197,
11143,
672,
1454,
25,
220,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFindListenerByName(t *testing.T) {
setup()
defer tearDown()
addrStr := "127.0.0.1:8083"
name := "listener4"
cfg := baseListenerConfig(addrStr, name)
if ln := GetListenerAdapterInstance().FindListenerByName(testServerName, name); ln != nil {
t.Fatal("find listener name failed, expected not found")
}
if err := GetListenerAdapterInstance().AddOrUpdateListener(testServerName, cfg); err != nil {
t.Fatalf("update listener failed, %v", err)
}
if ln := GetListenerAdapterInstance().FindListenerByName(testServerName, name); ln == nil {
t.Fatal("expected find listener, but not")
}
} | explode_data.jsonl/9409 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 204
} | [
2830,
3393,
9885,
2743,
16898,
1155,
353,
8840,
836,
8,
341,
84571,
741,
16867,
32825,
2822,
53183,
2580,
1669,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
23,
15,
23,
18,
698,
11609,
1669,
330,
35039,
19,
698,
50286,
1669,
2331,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestParseURL(t *testing.T) {
src := `http://username:password@hostname:9090/path?arg=value#anchor`
expect := map[string]string{
"scheme": "http",
"host": "hostname",
"port": "9090",
"user": "username",
"pass": "password",
"path": "/path",
"query": "arg=value",
"fragment": "anchor",
}
gtest.C(t, func(t *gtest.T) {
component := 0
for k, v := range []string{"all", "scheme", "host", "port", "user", "pass", "path", "query", "fragment"} {
if v == "all" {
component = -1
} else {
component = 1 << (uint(k - 1))
}
res, err := gurl.ParseURL(src, component)
if err != nil {
t.Errorf("ParseURL failed. component:%v, err:%v", component, err)
return
}
if v == "all" {
t.Assert(res, expect)
} else {
t.Assert(res[v], expect[v])
}
}
})
} | explode_data.jsonl/52551 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 405
} | [
2830,
3393,
14463,
3144,
1155,
353,
8840,
836,
8,
341,
41144,
1669,
1565,
1254,
1110,
5113,
25,
3833,
31,
27806,
25,
24,
15,
24,
15,
50976,
30,
858,
46538,
2,
17109,
3989,
24952,
1669,
2415,
14032,
30953,
515,
197,
197,
40787,
8058,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestLiveUpdateLocalContainerFallBackOn(t *testing.T) {
f := newBDFixture(t, k8s.EnvDockerDesktop, container.RuntimeDocker)
defer f.TearDown()
lu := assembleLiveUpdate(SanchoSyncSteps(f), SanchoRunSteps, true, []string{"a.txt"}, f)
tCase := testCase{
manifest: manifestbuilder.New(f, "sancho").
WithK8sYAML(SanchoYAML).
WithImageTarget(NewSanchoDockerBuildImageTarget(f)).
WithLiveUpdate(lu).
Build(),
changedFiles: []string{"a.txt"},
expectDockerBuildCount: 1, // we did a Docker build instead of an in-place update!
expectDockerPushCount: 0,
expectDockerCopyCount: 0,
expectDockerExecCount: 0,
expectDockerRestartCount: 0,
expectK8sDeploy: true, // Because we fell back to image builder, we also did a k8s deploy
logsContain: []string{"Detected change to fall_back_on file", "a.txt"},
}
runTestCase(t, f, tCase)
} | explode_data.jsonl/35174 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 376
} | [
2830,
3393,
20324,
4289,
7319,
4502,
49772,
3707,
1925,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
501,
33,
5262,
12735,
1155,
11,
595,
23,
82,
81214,
35,
13659,
23597,
11,
5476,
16706,
35,
13659,
340,
16867,
282,
836,
682,
4454,
2822,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOptionalTokensToBeTokens(t *testing.T) {
var tok *token.Token
Implements(t, tok, &Optional{})
var forward *token.ForwardToken
Implements(t, forward, &Optional{})
} | explode_data.jsonl/53289 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 64
} | [
2830,
3393,
15309,
29300,
1249,
3430,
29300,
1155,
353,
8840,
836,
8,
341,
2405,
9628,
353,
5839,
32277,
271,
197,
1427,
4674,
1155,
11,
9628,
11,
609,
15309,
6257,
692,
2405,
4637,
353,
5839,
26676,
1606,
3323,
271,
197,
1427,
4674,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestColumnPruning(t *testing.T) {
var (
input []string
output []map[int][]string
)
planSuiteUnexportedData.GetTestCases(t, &input, &output)
s := createPlannerSuite()
ctx := context.Background()
for i, tt := range input {
comment := fmt.Sprintf("case:%v sql:\"%s\"", i, tt)
stmt, err := s.p.ParseOneStmt(tt, "", "")
require.NoError(t, err, comment)
p, _, err := BuildLogicalPlanForTest(ctx, s.ctx, stmt, s.is)
require.NoError(t, err)
lp, err := logicalOptimize(ctx, flagPredicatePushDown|flagPrunColumns|flagPrunColumnsAgain, p.(LogicalPlan))
require.NoError(t, err)
testdata.OnRecord(func() {
output[i] = make(map[int][]string)
})
checkDataSourceCols(lp, t, output[i], comment)
}
} | explode_data.jsonl/50215 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 304
} | [
2830,
3393,
2933,
3533,
37202,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
22427,
220,
3056,
917,
198,
197,
21170,
3056,
2186,
18640,
45725,
917,
198,
197,
340,
197,
10393,
28000,
1806,
1533,
291,
1043,
2234,
2271,
37302,
1155,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSelectFilter(t *testing.T) {
if testing.Short() {
t.Skip()
}
engine.se.Reload(context.Background())
execStatements(t, []string{
"create table t1(id1 int, id2 int, val varbinary(128), primary key(id1))",
})
defer execStatements(t, []string{
"drop table t1",
})
engine.se.Reload(context.Background())
filter := &binlogdatapb.Filter{
Rules: []*binlogdatapb.Rule{{
Match: "t1",
Filter: "select id2, val from t1 where in_keyrange(id2, 'hash', '-80')",
}},
}
testcases := []testcase{{
input: []string{
"begin",
"insert into t1 values (4, 1, 'aaa')",
"insert into t1 values (2, 4, 'aaa')",
"commit",
},
output: [][]string{{
`begin`,
`type:FIELD field_event:{table_name:"t1" fields:{name:"id2" type:INT32 table:"t1" org_table:"t1" database:"vttest" org_name:"id2" column_length:11 charset:63 column_type:"int(11)"} fields:{name:"val" type:VARBINARY table:"t1" org_table:"t1" database:"vttest" org_name:"val" column_length:128 charset:63 column_type:"varbinary(128)"}}`,
`type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"1aaa"}}}`,
`gtid`,
`commit`,
}},
}}
runCases(t, filter, testcases, "", nil)
} | explode_data.jsonl/10414 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 506
} | [
2830,
3393,
3379,
5632,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
741,
197,
532,
80118,
4523,
38939,
2731,
5378,
19047,
12367,
67328,
93122,
1155,
11,
3056,
917,
515,
197,
197,
1,
3182,
1965,
259,
16,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGettingAuthToken(t *testing.T) {
if !h.ConfigExists(cnfgPath) {
t.Skip("No auth config provided")
}
err := h.CheckAuth(
&AuthCnfg{},
cnfgPath,
[]string{"SiteURL"},
)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/31318 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 102
} | [
2830,
3393,
28655,
84708,
1155,
353,
8840,
836,
8,
341,
743,
753,
71,
10753,
15575,
92456,
4817,
1820,
8,
341,
197,
3244,
57776,
445,
2753,
4166,
2193,
3897,
1138,
197,
532,
9859,
1669,
305,
10600,
5087,
1006,
197,
197,
5,
5087,
34,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNoServiceNaming(t *testing.T) {
testutils.WithTestServer(t, nil, func(t testing.TB, ts *testutils.TestServer) {
ctx, cancel := NewContext(time.Second)
defer cancel()
_, _, _, err := raw.Call(ctx, ts.Server(), ts.HostPort(), "", "Echo", []byte("Headers"), []byte("Body"))
assert.Equal(t, ErrNoServiceName, err)
ts.AssertRelayStats(relaytest.NewMockStats())
})
} | explode_data.jsonl/78183 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 149
} | [
2830,
3393,
2753,
1860,
85410,
1155,
353,
8840,
836,
8,
341,
18185,
6031,
26124,
2271,
5475,
1155,
11,
2092,
11,
2915,
1155,
7497,
836,
33,
11,
10591,
353,
1944,
6031,
8787,
5475,
8,
341,
197,
20985,
11,
9121,
1669,
1532,
1972,
9730,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTopLevelAwaitIIFE(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/entry.js": `
await foo;
for await (foo of bar) ;
`,
},
entryPaths: []string{"/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
OutputFormat: config.FormatIIFE,
AbsOutputFile: "/out.js",
},
expectedScanLog: `entry.js: error: Top-level await is currently not supported with the "iife" output format
entry.js: error: Top-level await is currently not supported with the "iife" output format
`,
})
} | explode_data.jsonl/38566 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 222
} | [
2830,
3393,
5366,
4449,
37352,
40,
39677,
1155,
353,
8840,
836,
8,
341,
11940,
57239,
25952,
33,
1241,
832,
1155,
11,
51450,
515,
197,
74075,
25,
2415,
14032,
30953,
515,
298,
197,
3115,
4085,
2857,
788,
22074,
571,
34436,
15229,
280,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTransportAutomaticHTTP2_DialerAndTLSConfigSupportsHTTP2AndTLSConfig(t *testing.T) {
testTransportAutoHTTP(t, &Transport{
ForceAttemptHTTP2: true,
TLSClientConfig: new(tls.Config),
}, true)
} | explode_data.jsonl/14144 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 79
} | [
2830,
3393,
27560,
62790,
9230,
17,
1557,
530,
261,
3036,
45439,
2648,
7916,
82,
9230,
17,
3036,
45439,
2648,
1155,
353,
8840,
836,
8,
341,
18185,
27560,
13253,
9230,
1155,
11,
609,
27560,
515,
197,
197,
18573,
47052,
9230,
17,
25,
83... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestFSRecoverWithoutIndexFiles(t *testing.T) {
cleanupFSDatastore(t)
defer cleanupFSDatastore(t)
fs := createDefaultFileStore(t)
defer fs.Close()
limits := testDefaultStoreLimits
limits.MaxMsgs = 8
if err := fs.SetLimits(&limits); err != nil {
t.Fatalf("Unexpected error setting limits: %v", err)
}
total := limits.MaxMsgs + 1
payload := []byte("hello")
msgs := make([]*pb.MsgProto, 0, total)
cs := storeCreateChannel(t, fs, "foo")
for i := 0; i < total; i++ {
msgs = append(msgs, storeMsg(t, cs, "foo", uint64(i+1), payload))
}
msgStore := cs.Msgs.(*FileMsgStore)
// Get the index file names
fs.RLock()
idxFileNames := make([]string, 0, len(msgStore.files))
for _, sl := range msgStore.files {
idxFileNames = append(idxFileNames, sl.idxFile.name)
}
fs.RUnlock()
// Close store
fs.Close()
// Remove the index files
for _, fn := range idxFileNames {
if err := os.Remove(fn); err != nil {
t.Fatalf("Error removing file %q: %v", fn, err)
}
}
// Restart store
fs, state := openDefaultFileStore(t)
defer fs.Close()
cs = getRecoveredChannel(t, state, "foo")
for i := 0; i < total; i++ {
m := msgStoreLookup(t, cs.Msgs, uint64(i+1))
if !reflect.DeepEqual(m, msgs[i]) {
t.Fatalf("Expected to get message %v, got %v", msgs[i], m)
}
}
} | explode_data.jsonl/7764 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 530
} | [
2830,
3393,
8485,
693,
3688,
26040,
1552,
10809,
1155,
353,
8840,
836,
8,
341,
1444,
60639,
8485,
1043,
4314,
1155,
340,
16867,
21290,
8485,
1043,
4314,
1155,
692,
53584,
1669,
1855,
3675,
1703,
6093,
1155,
340,
16867,
8619,
10421,
2822,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestDeleteHost(t *testing.T) {
RegisterMockDriver(t)
api := tests.NewMockAPI(t)
if _, err := createHost(api, defaultMachineConfig); err != nil {
t.Errorf("createHost failed: %v", err)
}
if err := DeleteHost(api); err != nil {
t.Fatalf("Unexpected error deleting host: %v", err)
}
} | explode_data.jsonl/4187 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 112
} | [
2830,
3393,
6435,
9296,
1155,
353,
8840,
836,
8,
341,
79096,
11571,
11349,
1155,
340,
54299,
1669,
7032,
7121,
11571,
7082,
1155,
340,
743,
8358,
1848,
1669,
1855,
9296,
24827,
11,
1638,
21605,
2648,
1215,
1848,
961,
2092,
341,
197,
324... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMultipleOfferAnswer(t *testing.T) {
firstPeerConn, err := NewPeerConnection(Configuration{})
if err != nil {
t.Errorf("New PeerConnection: got error: %v", err)
}
if _, err = firstPeerConn.CreateOffer(nil); err != nil {
t.Errorf("First Offer: got error: %v", err)
}
if _, err = firstPeerConn.CreateOffer(nil); err != nil {
t.Errorf("Second Offer: got error: %v", err)
}
secondPeerConn, err := NewPeerConnection(Configuration{})
if err != nil {
t.Errorf("New PeerConnection: got error: %v", err)
}
secondPeerConn.OnICECandidate(func(i *ICECandidate) {
})
if _, err = secondPeerConn.CreateOffer(nil); err != nil {
t.Errorf("First Offer: got error: %v", err)
}
if _, err = secondPeerConn.CreateOffer(nil); err != nil {
t.Errorf("Second Offer: got error: %v", err)
}
closePairNow(t, firstPeerConn, secondPeerConn)
} | explode_data.jsonl/8649 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 331
} | [
2830,
3393,
32089,
39462,
16141,
1155,
353,
8840,
836,
8,
341,
42190,
30888,
9701,
11,
1848,
1669,
1532,
30888,
4526,
45443,
37790,
743,
1848,
961,
2092,
341,
197,
3244,
13080,
445,
3564,
45147,
4526,
25,
2684,
1465,
25,
1018,
85,
497,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNetIPCollect(t *testing.T) {
t.Log("Testing Collect")
zerolog.SetGlobalLevel(zerolog.Disabled)
c, err := NewNetIPCollector("")
if err != nil {
t.Fatalf("expected NO error, got (%s)", err)
}
if err := c.Collect(context.Background()); err != nil {
t.Fatalf("expected NO error, got (%s)", err)
}
metrics := c.Flush()
if metrics == nil {
t.Fatal("expected error")
}
if len(metrics) == 0 {
t.Fatalf("expected metrics, got %v", metrics)
}
} | explode_data.jsonl/36365 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 180
} | [
2830,
3393,
6954,
3298,
47504,
1155,
353,
8840,
836,
8,
341,
3244,
5247,
445,
16451,
20513,
5130,
197,
7070,
1609,
4202,
11646,
4449,
7,
7070,
1609,
89576,
692,
1444,
11,
1848,
1669,
1532,
6954,
3298,
53694,
31764,
743,
1848,
961,
2092,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestInfoServerNameIsSettable(t *testing.T) {
opts := DefaultOptions()
opts.Port = 4222
opts.ClientAdvertise = "nats.example.com"
opts.ServerName = "test_server_name"
s := New(opts)
defer s.Shutdown()
if s.info.Name != "test_server_name" {
t.Fatalf("server info hostname is incorrect, got: '%v' expected: 'test_server_name'", s.info.Name)
}
} | explode_data.jsonl/3603 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
1731,
5475,
675,
3872,
1649,
2005,
1155,
353,
8840,
836,
8,
341,
64734,
1669,
7899,
3798,
741,
64734,
43013,
284,
220,
19,
17,
17,
17,
198,
64734,
11716,
2589,
67787,
284,
330,
77,
1862,
7724,
905,
698,
64734,
22997,
675,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDefaultRequestPrefix(t *testing.T) {
t.Parallel()
var idGen = &request.ID{}
finalCH := mw.ChainFunc(func(w http.ResponseWriter, r *http.Request) {
id := w.Header().Get(request.HeaderIDKeyName)
assert.Contains(t, id, "/")
}, idGen.With())
const regex = ".+/[A-Za-z0-9]+-[0-9]+"
matchr := regexp.MustCompile(regex)
bgwork.Wait(10, func(idx int) {
req := httptest.NewRequest("GET", "/", nil)
hpu := cstesting.NewHTTPParallelUsers(5, 10, 500, time.Millisecond)
hpu.AssertResponse = func(rec *httptest.ResponseRecorder) {
id := rec.Header().Get(request.HeaderIDKeyName)
if !matchr.MatchString(id) {
panic(fmt.Sprintf("ID %q does not match %q", id, regex))
}
}
hpu.ServeHTTP(req, finalCH)
})
assert.Exactly(t, 500, int(*idGen.Count), "ID counts do not match")
} | explode_data.jsonl/39365 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 339
} | [
2830,
3393,
3675,
1900,
14335,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
2405,
877,
9967,
284,
609,
2035,
9910,
31483,
14213,
2149,
1669,
52810,
98269,
9626,
18552,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGitGetter_GetFile(t *testing.T) {
if !testHasGit {
t.Log("git not found, skipping")
t.Skip()
}
g := new(GitGetter)
dst := tempFile(t)
repo := testGitRepo(t, "file")
repo.commitFile("file.txt", "hello")
// Download the file
repo.url.Path = filepath.Join(repo.url.Path, "file.txt")
if err := g.GetFile(dst, repo.url); err != nil {
t.Fatalf("err: %s", err)
}
// Verify the main file exists
if _, err := os.Stat(dst); err != nil {
t.Fatalf("err: %s", err)
}
assertContents(t, dst, "hello")
} | explode_data.jsonl/39697 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 230
} | [
2830,
3393,
46562,
31485,
13614,
1703,
1155,
353,
8840,
836,
8,
341,
743,
753,
1944,
10281,
46562,
341,
197,
3244,
5247,
445,
12882,
537,
1730,
11,
42659,
1138,
197,
3244,
57776,
741,
197,
630,
3174,
1669,
501,
6699,
275,
31485,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestResourcePermissionsDelete(t *testing.T) {
d, err := qa.ResourceFixture{
Fixtures: []qa.HTTPFixture{
{
Method: http.MethodGet,
Resource: "/api/2.0/permissions/clusters/abc",
Response: ObjectACL{
ObjectID: "/clusters/abc",
ObjectType: "clusters",
AccessControlList: []AccessControl{
{
UserName: TestingUser,
AllPermissions: []Permission{
{
PermissionLevel: "CAN_READ",
Inherited: false,
},
},
},
{
UserName: TestingAdminUser,
AllPermissions: []Permission{
{
PermissionLevel: "CAN_MANAGE",
Inherited: false,
},
},
},
},
},
},
{
Method: http.MethodPut,
Resource: "/api/2.0/permissions/clusters/abc",
ExpectedRequest: ObjectACL{},
},
},
Resource: ResourcePermissions(),
Delete: true,
ID: "/clusters/abc",
}.Apply(t)
assert.NoError(t, err, err)
assert.Equal(t, "/clusters/abc", d.Id())
} | explode_data.jsonl/50873 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 542
} | [
2830,
3393,
4783,
23851,
6435,
1155,
353,
8840,
836,
8,
341,
2698,
11,
1848,
1669,
88496,
20766,
18930,
515,
197,
12727,
941,
18513,
25,
3056,
15445,
27358,
18930,
515,
298,
197,
515,
571,
84589,
25,
256,
1758,
20798,
1949,
345,
571,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_CLG_Input_KnownInputSequence(t *testing.T) {
newCLG := MustNew()
newCtx := context.MustNew()
newStorageCollection := testMustNewStorageCollection(t)
// Create record for the test input.
informationID := "123"
newInput := "test input"
informationIDKey := fmt.Sprintf("information-sequence:%s:information-id", newInput)
err := newStorageCollection.General().Set(informationIDKey, informationID)
if err != nil {
t.Fatal("expected", nil, "got", err)
}
// Set prepared storage to CLG we want to test.
newCLG.(*clg).StorageCollection = newStorageCollection
// Execute CLG.
err = newCLG.(*clg).calculate(newCtx, newInput)
if err != nil {
t.Fatal("expected", nil, "got", err)
}
// Check if the information ID was set to the context.
injectedInformationID, _ := newCtx.GetInformationID()
if informationID != injectedInformationID {
t.Fatal("expected", informationID, "got", injectedInformationID)
}
} | explode_data.jsonl/52849 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 309
} | [
2830,
3393,
6843,
38,
48653,
10102,
4169,
2505,
14076,
1155,
353,
8840,
836,
8,
341,
8638,
3140,
38,
1669,
15465,
3564,
741,
8638,
23684,
1669,
2266,
50463,
3564,
741,
8638,
5793,
6482,
1669,
1273,
31776,
3564,
5793,
6482,
1155,
692,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestRadarSearchMissingKeywordNameAndType(t *testing.T) {
c, _ := NewClient(WithAPIKey(apiKey))
r := &RadarSearchRequest{
Location: &LatLng{1, 2},
Radius: 1000,
}
_, err := c.RadarSearch(context.Background(), r)
if err == nil {
t.Errorf("Error expected: maps: Keyword, Name and Type are missing")
}
if "maps: Keyword, Name and Type are missing" != err.Error() {
t.Errorf("Wrong error returned \"%v\"", err)
}
} | explode_data.jsonl/76307 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 167
} | [
2830,
3393,
49,
36080,
5890,
25080,
34481,
675,
3036,
929,
1155,
353,
8840,
836,
8,
341,
1444,
11,
716,
1669,
1532,
2959,
7,
2354,
7082,
1592,
24827,
1592,
1171,
7000,
1669,
609,
49,
36080,
5890,
1900,
515,
197,
197,
4707,
25,
609,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_WorkspaceFeatures_Status_WhenPropertiesConverted_RoundTripsWithoutLoss(t *testing.T) {
t.Parallel()
parameters := gopter.DefaultTestParameters()
parameters.MaxSize = 10
properties := gopter.NewProperties(parameters)
properties.Property(
"Round trip from WorkspaceFeatures_Status to WorkspaceFeatures_Status via AssignPropertiesToWorkspaceFeaturesStatus & AssignPropertiesFromWorkspaceFeaturesStatus returns original",
prop.ForAll(RunPropertyAssignmentTestForWorkspaceFeaturesStatus, WorkspaceFeaturesStatusGenerator()))
properties.TestingRun(t, gopter.NewFormatedReporter(false, 240, os.Stdout))
} | explode_data.jsonl/43372 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
87471,
8746,
21336,
36449,
62,
4498,
7903,
61941,
2568,
795,
21884,
1690,
26040,
39838,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
67543,
1669,
728,
73137,
13275,
2271,
9706,
741,
67543,
14535,
1695,
284,
220,
16,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRecover_sendRecoverEmail(t *testing.T) {
t.Parallel()
r, _, _ := testSetup()
mailer := mocks.NewMockMailer()
r.EmailSubjectPrefix = "foo "
r.RootURL = "bar"
r.Mailer = mailer
r.sendRecoverEmail(r.NewContext(), "a@b.c", "abc=")
if len(mailer.Last.To) != 1 {
t.Error("Expected 1 to email")
}
if mailer.Last.To[0] != "a@b.c" {
t.Error("Unexpected to email:", mailer.Last.To[0])
}
if mailer.Last.Subject != "foo Password Reset" {
t.Error("Unexpected subject:", mailer.Last.Subject)
}
url := fmt.Sprintf("%s/recover/complete?token=abc%%3D", r.RootURL)
if !strings.Contains(mailer.Last.HTMLBody, url) {
t.Error("Expected HTMLBody to contain url:", url)
}
if !strings.Contains(mailer.Last.TextBody, url) {
t.Error("Expected TextBody to contain url:", url)
}
} | explode_data.jsonl/61500 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 333
} | [
2830,
3393,
693,
3688,
13565,
693,
3688,
4781,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
7000,
11,
8358,
716,
1669,
1273,
21821,
2822,
2109,
38782,
1669,
68909,
7121,
11571,
47608,
741,
7000,
24066,
13019,
14335,
284,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func Test_Router_DomainControllerRest(t *testing.T) {
p := ports.PopRand()
s := g.Server(p)
d := s.Domain("localhost, local")
d.BindControllerRest("/", new(DomainControllerRest))
s.SetPort(p)
s.SetDumpRouteMap(false)
s.Start()
defer s.Shutdown()
// 等待启动完成
time.Sleep(time.Second)
gtest.Case(t, func() {
client := ghttp.NewClient()
client.SetPrefix(fmt.Sprintf("http://127.0.0.1:%d", p))
gtest.Assert(client.GetContent("/"), "Not Found")
gtest.Assert(client.PutContent("/"), "Not Found")
gtest.Assert(client.PostContent("/"), "Not Found")
gtest.Assert(client.DeleteContent("/"), "Not Found")
gtest.Assert(client.PatchContent("/"), "Not Found")
gtest.Assert(client.OptionsContent("/"), "Not Found")
resp1, err := client.Head("/")
if err == nil {
defer resp1.Close()
}
gtest.Assert(err, nil)
gtest.Assert(resp1.Header.Get("head-ok"), "")
gtest.Assert(client.GetContent("/none-exist"), "Not Found")
})
gtest.Case(t, func() {
client := ghttp.NewClient()
client.SetPrefix(fmt.Sprintf("http://localhost:%d", p))
gtest.Assert(client.GetContent("/"), "1Controller Get2")
gtest.Assert(client.PutContent("/"), "1Controller Put2")
gtest.Assert(client.PostContent("/"), "1Controller Post2")
gtest.Assert(client.DeleteContent("/"), "1Controller Delete2")
gtest.Assert(client.PatchContent("/"), "1Controller Patch2")
gtest.Assert(client.OptionsContent("/"), "1Controller Options2")
resp1, err := client.Head("/")
if err == nil {
defer resp1.Close()
}
gtest.Assert(err, nil)
gtest.Assert(resp1.Header.Get("head-ok"), "1")
gtest.Assert(client.GetContent("/none-exist"), "Not Found")
})
gtest.Case(t, func() {
client := ghttp.NewClient()
client.SetPrefix(fmt.Sprintf("http://local:%d", p))
gtest.Assert(client.GetContent("/"), "1Controller Get2")
gtest.Assert(client.PutContent("/"), "1Controller Put2")
gtest.Assert(client.PostContent("/"), "1Controller Post2")
gtest.Assert(client.DeleteContent("/"), "1Controller Delete2")
gtest.Assert(client.PatchContent("/"), "1Controller Patch2")
gtest.Assert(client.OptionsContent("/"), "1Controller Options2")
resp1, err := client.Head("/")
if err == nil {
defer resp1.Close()
}
gtest.Assert(err, nil)
gtest.Assert(resp1.Header.Get("head-ok"), "1")
gtest.Assert(client.GetContent("/none-exist"), "Not Found")
})
} | explode_data.jsonl/24933 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 955
} | [
2830,
3393,
2568,
2676,
1557,
3121,
2051,
12416,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
20325,
47424,
56124,
741,
1903,
1669,
342,
22997,
1295,
340,
2698,
1669,
274,
20442,
445,
8301,
11,
2205,
1138,
2698,
32451,
2051,
12416,
35460,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAWSGetSecretPreviousVersion(t *testing.T) {
mockAwsSecretManager := &mockAWSSecretManagerClient{
secretData: &secretsmanager.GetSecretValueOutput{
SecretString: aws.String("test-secret"),
VersionStages: []*string{aws.String("AWSPREVIOUS")},
},
}
secretValueInput := &secretsmanager.GetSecretValueInput{
SecretId: aws.String("test-secret"),
VersionStage: aws.String("AWSPREVIOUS"),
}
secretData, err := awsSecretsManager.GetSecretData(mockAwsSecretManager, secretValueInput)
if err != nil {
t.Fatalf("error getting secret data: %v", err)
}
assert.Equal(t, secretData["API_KEY"], "old123abc")
} | explode_data.jsonl/74986 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 232
} | [
2830,
3393,
36136,
1949,
19773,
21291,
5637,
1155,
353,
8840,
836,
8,
341,
77333,
47359,
19773,
2043,
1669,
609,
16712,
14419,
1220,
50856,
2043,
2959,
515,
197,
197,
20474,
1043,
25,
609,
325,
52710,
13297,
2234,
19773,
1130,
5097,
515,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRenderTemplate(t *testing.T) {
slb := NewSLB(&SLBOpts{NgxCfg: "fixture/nginx.conf", SvcCfg: "fixture/svc.yaml"})
slb.readConf()
text := slb.getNginxConf() + defaultTmpl
tmpl, err := template.New("ngxConf").Parse(text)
if err != nil {
logrus.Fatalf("new template error: %+v", err)
}
var buf bytes.Buffer
if err = tmpl.Execute(&buf, slb.Backend); err != nil {
logrus.Fatalf("tmpl exec error: %+v", err)
}
expected := slb.getFileContent("fixture/expected.conf")
assert.Equal(t, buf.String(), expected)
} | explode_data.jsonl/16639 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 220
} | [
2830,
3393,
6750,
7275,
1155,
353,
8840,
836,
8,
341,
1903,
21123,
1669,
1532,
7984,
33,
2099,
7984,
4677,
12754,
90,
20897,
12125,
4817,
25,
330,
59612,
69261,
13937,
497,
328,
7362,
42467,
25,
330,
59612,
2687,
7362,
33406,
23625,
190... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMyPoGo_GetStringLength(t *testing.T) {
dataContext := ast.NewDataContext()
pogo := &MyPoGo{}
err := dataContext.Add("Pogo", pogo)
if err != nil {
t.Fatal(err)
}
lib := ast.NewKnowledgeLibrary()
ruleBuilder := builder.NewRuleBuilder(lib)
err = ruleBuilder.BuildRuleFromResource("Test", "0.1.1", pkg.NewBytesResource([]byte(rule2)))
assert.NoError(t, err)
kb := lib.NewKnowledgeBaseInstance("Test", "0.1.1")
eng1 := &engine.GruleEngine{MaxCycle: 1}
err = eng1.Execute(dataContext, kb)
assert.NoError(t, err)
assert.Equal(t, "String len above 0", pogo.Result)
} | explode_data.jsonl/81840 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 230
} | [
2830,
3393,
5050,
32904,
10850,
13614,
44421,
1155,
353,
8840,
836,
8,
341,
8924,
1972,
1669,
11763,
7121,
81137,
741,
3223,
23500,
1669,
609,
5050,
32904,
10850,
16094,
9859,
1669,
821,
1972,
1904,
445,
47,
23500,
497,
281,
23500,
340,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestVindexHexTypes(t *testing.T) {
defer cluster.PanicHandler(t)
ctx := context.Background()
conn, err := mysql.Connect(ctx, &vtParams)
require.Nil(t, err)
defer conn.Close()
utils.Exec(t, conn, "INSERT INTO thex (id, field) VALUES "+
"(0x01,1), "+
"(x'a5',2), "+
"(0x48656c6c6f20476f7068657221,3), "+
"(x'c26caa1a5eb94096d29a1bec',4)")
result := utils.Exec(t, conn, "select id, field from thex order by id")
expected :=
"[[VARBINARY(\"\\x01\") INT64(1)] " +
"[VARBINARY(\"Hello Gopher!\") INT64(3)] " +
"[VARBINARY(\"\\xa5\") INT64(2)] " +
"[VARBINARY(\"\\xc2l\\xaa\\x1a^\\xb9@\\x96Қ\\x1b\\xec\") INT64(4)]]"
assert.Equal(t, expected, fmt.Sprintf("%v", result.Rows))
} | explode_data.jsonl/4243 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 340
} | [
2830,
3393,
53,
1252,
20335,
4173,
1155,
353,
8840,
836,
8,
341,
16867,
10652,
1069,
31270,
3050,
1155,
340,
20985,
1669,
2266,
19047,
741,
32917,
11,
1848,
1669,
10564,
43851,
7502,
11,
609,
9708,
4870,
340,
17957,
59678,
1155,
11,
184... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSafeTransferFromParameters(t *testing.T) {
// succeeds no data param
opts := &SafeTransferOptions{
Sender: senderT,
Recipient: recipientT,
TokenID: tokenIDT,
}
params, err := SafeTransferFromParameters(opts)
assert.NoError(t, err)
assert.Equal(t, "0x42842e0e000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001", hexutil.Encode(params.Calldata))
assert.Equal(t, "0x00", utils.ToHex(params.Value))
// succeeds data param
opts = &SafeTransferOptions{
Sender: senderT,
Recipient: recipientT,
TokenID: tokenIDT,
Data: common.FromHex("0x0000000000000000000000000000000000009004"),
}
params, err = SafeTransferFromParameters(opts)
assert.NoError(t, err)
assert.Equal(t, "0xb88d4fde000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000009004000000000000000000000000", hexutil.Encode(params.Calldata))
assert.Equal(t, "0x00", utils.ToHex(params.Value))
} | explode_data.jsonl/58638 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 360
} | [
2830,
3393,
25663,
21970,
3830,
9706,
1155,
353,
8840,
836,
8,
341,
197,
322,
50081,
902,
821,
1685,
198,
64734,
1669,
609,
25663,
21970,
3798,
515,
197,
7568,
1659,
25,
262,
4646,
51,
345,
197,
197,
74432,
25,
21713,
51,
345,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEntry_CursorColumn_Wrap(t *testing.T) {
entry := widget.NewMultiLineEntry()
entry.SetText("a\nb")
assert.Equal(t, 0, entry.CursorRow)
assert.Equal(t, 0, entry.CursorColumn)
// go to end of line
right := &fyne.KeyEvent{Name: fyne.KeyRight}
entry.TypedKey(right)
assert.Equal(t, 0, entry.CursorRow)
assert.Equal(t, 1, entry.CursorColumn)
// wrap to new line
entry.TypedKey(right)
assert.Equal(t, 1, entry.CursorRow)
assert.Equal(t, 0, entry.CursorColumn)
// and back
left := &fyne.KeyEvent{Name: fyne.KeyLeft}
entry.TypedKey(left)
assert.Equal(t, 0, entry.CursorRow)
assert.Equal(t, 1, entry.CursorColumn)
} | explode_data.jsonl/12316 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 266
} | [
2830,
3393,
5874,
920,
3823,
2933,
2763,
4611,
1155,
353,
8840,
836,
8,
341,
48344,
1669,
9086,
7121,
20358,
2460,
5874,
741,
48344,
92259,
445,
64,
1699,
65,
1138,
6948,
12808,
1155,
11,
220,
15,
11,
4343,
29929,
3102,
340,
6948,
128... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFrameToSeriesSlice(t *testing.T) {
tests := []struct {
name string
frame *data.Frame
seriesSlice legacydata.DataTimeSeriesSlice
Err require.ErrorAssertionFunc
}{
{
name: "a wide series",
frame: data.NewFrame("",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 0, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 30, 0, time.UTC),
}),
data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{
nil,
pointer.Int64(3),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{
2.0,
4.0,
})),
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"},
Points: legacydata.DataTimeSeriesPoints{
legacydata.DataTimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)},
legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)},
},
},
legacydata.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"},
Points: legacydata.DataTimeSeriesPoints{
legacydata.DataTimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)},
legacydata.DataTimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)},
},
},
},
Err: require.NoError,
},
{
name: "empty wide series",
frame: data.NewFrame("",
data.NewField("Time", nil, []time.Time{}),
data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{})),
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"},
Points: legacydata.DataTimeSeriesPoints{},
},
legacydata.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"},
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,
},
{
name: "empty labels",
frame: data.NewFrame("",
data.NewField("Time", data.Labels{}, []time.Time{}),
data.NewField(`Values`, data.Labels{}, []float64{})),
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values",
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,
},
{
name: "display name from data source",
frame: data.NewFrame("",
data.NewField("Time", data.Labels{}, []time.Time{}),
data.NewField(`Values`, data.Labels{"Rating": "10"}, []*int64{}).SetConfig(&data.FieldConfig{
DisplayNameFromDS: "sloth",
})),
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "sloth",
Points: legacydata.DataTimeSeriesPoints{},
Tags: map[string]string{"Rating": "10"},
},
},
Err: require.NoError,
},
{
name: "prefer display name over data source display name",
frame: data.NewFrame("",
data.NewField("Time", data.Labels{}, []time.Time{}),
data.NewField(`Values`, data.Labels{}, []*int64{}).SetConfig(&data.FieldConfig{
DisplayName: "sloth #1",
DisplayNameFromDS: "sloth #2",
})),
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "sloth #1",
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
seriesSlice, err := FrameToSeriesSlice(tt.frame)
tt.Err(t, err)
if diff := cmp.Diff(tt.seriesSlice, seriesSlice, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
} | explode_data.jsonl/32631 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1724
} | [
2830,
3393,
4369,
1249,
25544,
33236,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
286,
914,
198,
197,
34465,
981,
353,
691,
36434,
198,
197,
197,
19880,
33236,
19588,
691,
3336,
1462,
25544,
33236,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGocloak_ListAddRemoveDefaultClientScopes(t *testing.T) {
t.Parallel()
cfg := GetConfig(t)
client := NewClientWithDebug(t)
token := GetAdminToken(t, client)
defer ClearRealmCache(t, client)
scope := ClientScope{
Protocol: "openid-connect",
ClientScopeAttributes: &ClientScopeAttributes{
IncludeInTokenScope: "true",
},
}
tearDown, scopeID := CreateClientScope(t, client, &scope)
defer tearDown()
scopesBeforeAdding, err := client.GetClientsDefaultScopes(
token.AccessToken,
cfg.GoCloak.Realm,
gocloakClientID,
)
assert.NoError(t, err, "GetClientsDefaultScopes failed")
err = client.AddDefaultScopeToClient(
token.AccessToken,
cfg.GoCloak.Realm,
gocloakClientID,
scopeID,
)
assert.NoError(t, err, "AddDefaultScopeToClient failed")
scopesAfterAdding, err := client.GetClientsDefaultScopes(
token.AccessToken,
cfg.GoCloak.Realm,
gocloakClientID,
)
assert.NoError(t, err, "GetClientsDefaultScopes failed")
assert.NotEqual(t, len(scopesBeforeAdding), len(scopesAfterAdding), "scope should have been added")
err = client.RemoveDefaultScopeFromClient(
token.AccessToken,
cfg.GoCloak.Realm,
gocloakClientID,
scopeID,
)
assert.NoError(t, err, "RemoveDefaultScopeFromClient failed")
scopesAfterRemoving, err := client.GetClientsDefaultScopes(
token.AccessToken,
cfg.GoCloak.Realm,
gocloakClientID,
)
assert.NoError(t, err, "GetClientsDefaultScopes failed")
assert.Equal(t, len(scopesAfterRemoving), len(scopesBeforeAdding), "scope should have been removed")
} | explode_data.jsonl/79526 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 570
} | [
2830,
3393,
38,
509,
385,
585,
27104,
2212,
13021,
3675,
2959,
3326,
18523,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
50286,
1669,
2126,
2648,
1155,
340,
25291,
1669,
1532,
2959,
2354,
7939,
1155,
340,
43947,
1669,
2126,
721... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestResourcesGroupInfo(t *testing.T) {
testCases := []struct {
discoveryErr error
resourceErr error
resourcesLists []*metav1.APIResourceList
expectedGroupCount int
expectedErr error
}{
{
discoveryErr: sgerrors.ErrNotFound,
expectedErr: sgerrors.ErrNotFound,
},
{
resourceErr: sgerrors.ErrNotFound,
expectedErr: sgerrors.ErrNotFound,
},
{
resourcesLists: []*metav1.APIResourceList{
{
GroupVersion: "",
APIResources: []metav1.APIResource{
{
Name: "name-1",
Kind: "kind1",
},
{
Name: "name-2",
Kind: "kind2",
},
},
},
{
GroupVersion: "/",
APIResources: []metav1.APIResource{
{
Name: "name-2",
Kind: "kind2",
},
},
},
},
expectedGroupCount: 2,
},
}
for _, testCase := range testCases {
m := &mockServerResourceGetter{
resources: testCase.resourcesLists,
err: testCase.resourceErr,
}
svc := Service{
discoveryClientFn: func(k *model.Kube) (ServerResourceGetter, error) {
return m, testCase.discoveryErr
},
}
groups, err := svc.resourcesGroupInfo(&model.Kube{})
if errors.Cause(err) != testCase.expectedErr {
t.Errorf("expected error %v actual %v",
testCase.expectedErr, err)
}
if len(groups) != testCase.expectedGroupCount {
t.Errorf("expected group count %d actual %d",
testCase.expectedGroupCount, len(groups))
}
}
} | explode_data.jsonl/2002 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 718
} | [
2830,
3393,
11277,
2808,
1731,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
34597,
7449,
7747,
981,
1465,
198,
197,
50346,
7747,
286,
1465,
198,
197,
10202,
2360,
37848,
257,
29838,
4059,
402,
16,
24922,
4783,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIssue103(t *testing.T) {
d, err := NewDocumentFromReader(strings.NewReader("<html><title>Scientists Stored These Images in DNA—Then Flawlessly Retrieved Them</title></html>"))
if err != nil {
t.Error(err)
}
text := d.Find("title").Text()
for i, r := range text {
t.Logf("%d: %d - %q\n", i, r, string(r))
}
t.Log(text)
} | explode_data.jsonl/56750 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 134
} | [
2830,
3393,
42006,
16,
15,
18,
1155,
353,
8840,
836,
8,
341,
2698,
11,
1848,
1669,
1532,
7524,
3830,
5062,
51442,
68587,
9639,
1551,
1784,
2102,
29,
71226,
93785,
4220,
11779,
304,
15552,
2293,
12209,
2988,
672,
15740,
57791,
47938,
522... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNewImage(t *testing.T) {
buf := strings.NewReader(strings.Repeat("0", MaxImageFileSize+1))
_, err := NewImage(buf)
assert.ErrorIs(t, err, ErrImageTooLarge)
} | explode_data.jsonl/57566 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 63
} | [
2830,
3393,
3564,
1906,
1155,
353,
8840,
836,
8,
341,
26398,
1669,
9069,
68587,
51442,
2817,
10979,
445,
15,
497,
7487,
1906,
67649,
10,
16,
1171,
197,
6878,
1848,
1669,
1532,
1906,
10731,
692,
6948,
6141,
3872,
1155,
11,
1848,
11,
15... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestStreamAbort(t *testing.T) {
// Scenarios: node 1 is connected to node 2 in 2 channels,
// and the consumer of the communication calls receive.
// The two sub-scenarios happen:
// 1) The server certificate of node 2 changes in the first channel
// 2) Node 2 is evicted from the membership of the first channel
// In both of the scenarios, the Recv() call should be aborted
node2 := newTestNode(t)
defer node2.stop()
invalidNodeInfo := cluster.RemoteNode{
ID: node2.nodeInfo.ID,
ServerTLSCert: []byte{1, 2, 3},
ClientTLSCert: []byte{1, 2, 3},
}
for _, tst := range []struct {
testName string
membership []cluster.RemoteNode
expectedError string
}{
{
testName: "Evicted from membership",
membership: nil,
expectedError: "rpc error: code = Canceled desc = context canceled",
},
{
testName: "Changed TLS certificate",
membership: []cluster.RemoteNode{invalidNodeInfo},
expectedError: "rpc error: code = Canceled desc = context canceled",
},
} {
t.Run(tst.testName, func(t *testing.T) {
testStreamAbort(t, node2, tst.membership, tst.expectedError)
})
}
node2.handler.AssertNumberOfCalls(t, "OnSubmit", 2)
} | explode_data.jsonl/39834 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 454
} | [
2830,
3393,
3027,
85891,
1155,
353,
8840,
836,
8,
341,
197,
322,
2463,
60494,
25,
2436,
220,
16,
374,
8433,
311,
2436,
220,
17,
304,
220,
17,
11744,
345,
197,
322,
323,
279,
11502,
315,
279,
10535,
6738,
5258,
624,
197,
322,
576,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestProcesses_EnforceTTL(t *testing.T) {
p := &processes{maxLen: 10}
cfg, repo, _ := testcfg.BuildWithRepo(t)
cutoff := time.Now()
key0 := mustCreateKey(t, "0", repo)
value0, cancel := mustCreateCacheable(t, cfg, repo)
p.Add(key0, value0, cutoff.Add(-time.Hour), cancel)
key1 := mustCreateKey(t, "1", repo)
value1, cancel := mustCreateCacheable(t, cfg, repo)
p.Add(key1, value1, cutoff.Add(-time.Millisecond), cancel)
key2 := mustCreateKey(t, "2", repo)
value2, cancel := mustCreateCacheable(t, cfg, repo)
p.Add(key2, value2, cutoff.Add(time.Millisecond), cancel)
key3 := mustCreateKey(t, "3", repo)
value3, cancel := mustCreateCacheable(t, cfg, repo)
p.Add(key3, value3, cutoff.Add(time.Hour), cancel)
requireProcessesValid(t, p)
// We expect this cutoff to cause eviction of key0 and key1 but no other keys.
p.EnforceTTL(cutoff)
requireProcessesValid(t, p)
for i, v := range []cacheable{value0, value1} {
require.True(t, v.isClosed(), "value %d %v should be closed", i, v)
}
require.Equal(t, []key{key2, key3}, keys(t, p), "remaining keys after EnforceTTL")
p.EnforceTTL(cutoff)
requireProcessesValid(t, p)
require.Equal(t, []key{key2, key3}, keys(t, p), "remaining keys after second EnforceTTL")
} | explode_data.jsonl/81952 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 489
} | [
2830,
3393,
92727,
62,
1702,
8833,
51,
13470,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
609,
4630,
288,
90,
2810,
11271,
25,
220,
16,
15,
630,
50286,
11,
15867,
11,
716,
1669,
1273,
14072,
25212,
2354,
25243,
1155,
692,
1444,
27440,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNewSource(t *testing.T) {
if tr := os.Getenv("TRAVIS"); len(tr) > 0 {
return
}
conf := config.NewConfig()
conf.Load(NewSource())
if mongodbHost := conf.Get("mongodb", "host").String("localhost"); mongodbHost != "127.0.0.1" {
t.Errorf("expected %v and got %v", "127.0.0.1", mongodbHost)
}
if configPort := conf.Get("config", "port").Int(1337); configPort != 1337 {
t.Errorf("expected %v and got %v", "1337", configPort)
}
} | explode_data.jsonl/18357 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 186
} | [
2830,
3393,
3564,
3608,
1155,
353,
8840,
836,
8,
341,
743,
489,
1669,
2643,
64883,
445,
2378,
98716,
5038,
2422,
7624,
8,
861,
220,
15,
341,
197,
853,
198,
197,
630,
67850,
1669,
2193,
7121,
2648,
2822,
67850,
13969,
35063,
3608,
1236... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDeleteSubteamAdmin(t *testing.T) {
tc, owner, admin, _, root, sub := memberSetupSubteam(t)
defer tc.Cleanup()
assertRole(tc, root, owner.Username, keybase1.TeamRole_OWNER)
assertRole(tc, root, admin.Username, keybase1.TeamRole_ADMIN)
_, err := AddMember(context.TODO(), tc.G, sub, admin.Username, keybase1.TeamRole_ADMIN)
if err != nil {
t.Fatal(err)
}
assertRole(tc, sub, owner.Username, keybase1.TeamRole_NONE)
assertRole(tc, sub, admin.Username, keybase1.TeamRole_ADMIN)
// switch to `admin` user
tc.G.Logout(context.TODO())
if err := admin.Login(tc.G); err != nil {
t.Fatal(err)
}
if err := Delete(context.Background(), tc.G, &teamsUI{}, sub); err != nil {
t.Fatal(err)
}
_, err = GetTeamByNameForTest(context.Background(), tc.G, sub, false, false)
if err == nil {
t.Fatal("no error getting deleted team")
}
aerr, ok := err.(libkb.AppStatusError)
if !ok {
t.Fatalf("error type: %T (%s), expected libkb.AppStatusError", err, err)
}
if aerr.Code != int(keybase1.StatusCode_SCTeamReadError) {
t.Errorf("error status code: %d, expected %d (%s)", aerr.Code, keybase1.StatusCode_SCTeamReadError, aerr)
}
} | explode_data.jsonl/26469 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 467
} | [
2830,
3393,
6435,
3136,
9196,
7210,
1155,
353,
8840,
836,
8,
341,
78255,
11,
6372,
11,
3986,
11,
8358,
3704,
11,
1186,
1669,
4462,
21821,
3136,
9196,
1155,
340,
16867,
17130,
727,
60639,
2822,
6948,
9030,
44415,
11,
3704,
11,
6372,
42... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestBuildInfoLabels(t *testing.T) {
type args struct {
obj *metav1.ObjectMeta
}
tests := []struct {
name string
args args
want []string
want1 []string
}{
{
name: "empty labels map",
args: args{
obj: &metav1.ObjectMeta{
Labels: nil,
},
},
want: []string{},
want1: []string{},
},
{
name: "2 labels in map",
args: args{
obj: &metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
"tic": "tac",
},
},
},
want: []string{"foo", "tic"},
want1: []string{"bar", "tac"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, got1 := BuildInfoLabels(tt.args.obj)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("BuildInfoLabels() got = %#v, want %#v", got, tt.want)
}
if !reflect.DeepEqual(got1, tt.want1) {
t.Errorf("BuildInfoLabels() got1 = %#v, want %#v", got1, tt.want1)
}
})
}
} | explode_data.jsonl/47256 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 474
} | [
2830,
3393,
11066,
1731,
23674,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
22671,
353,
4059,
402,
16,
80222,
198,
197,
532,
78216,
1669,
3056,
1235,
341,
197,
11609,
220,
914,
198,
197,
31215,
220,
2827,
198,
197,
5078... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
t.Parallel()
settings := map[string]interface{}{
"hasCJKLanguage": true,
}
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
p := pages[0]
if p.WordCount() != 75 {
t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.Plain(), 74, p.WordCount())
}
if p.Summary() != simplePageWithIsCJKLanguageFalseSummary {
t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
simplePageWithIsCJKLanguageFalseSummary, p.Summary())
}
}
testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithIsCJKLanguageFalse)
} | explode_data.jsonl/60625 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 259
} | [
2830,
3393,
10879,
2507,
2354,
3872,
89349,
13806,
4049,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
62930,
1669,
2415,
14032,
31344,
67066,
197,
197,
1,
4648,
89349,
13806,
788,
830,
345,
197,
630,
6948,
9626,
1669,
2915,
115... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNewSignalsForwarderWaitWindows(t *testing.T) {
t.Parallel()
expectedWait := 5
terragruntOptions := options.NewTerragruntOptionsForTest("")
cmd := exec.Command(`..\testdata\test_sigint_wait.bat`, strconv.Itoa(expectedWait))
cmdChannel := make(chan error)
runChannel := make(chan error)
signalChannel := NewSignalsForwarder(forwardSignals, cmd, terragruntOptions.Logger, cmdChannel)
defer signalChannel.Close()
go func() {
runChannel <- cmd.Run()
}()
time.Sleep(1000 * time.Millisecond)
// start := time.Now()
// Note: sending interrupt on Windows is not supported by Windows and not implemented in Go
cmd.Process.Signal(os.Kill)
err := <-runChannel
cmdChannel <- err
assert.Error(t, err)
// Since we can't send an interrupt on Windows, our test script won't handle it gracefully and exit after the expected wait time,
// so this part of the test process cannot be done on Windows
// retCode, err := GetExitCode(err)
// assert.Nil(t, err)
// assert.Equal(t, retCode, expectedWait)
// assert.WithinDuration(t, start.Add(time.Duration(expectedWait)*time.Second), time.Now(), time.Second,
// "Expected to wait 5 (+/-1) seconds after SIGINT")
} | explode_data.jsonl/5180 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 383
} | [
2830,
3393,
3564,
96659,
25925,
261,
14190,
13164,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
42400,
14190,
1669,
220,
20,
271,
197,
465,
4101,
81,
3850,
3798,
1669,
2606,
7121,
51402,
68305,
3850,
3798,
2461,
2271,
31764,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIndexEditorUniqueMultipleNil(t *testing.T) {
format := types.Format_Default
db, err := dbfactory.MemFactory{}.CreateDB(context.Background(), format, nil, nil)
require.NoError(t, err)
colColl := schema.NewColCollection(
schema.NewColumn("pk", 0, types.IntKind, true),
schema.NewColumn("v1", 1, types.IntKind, false))
tableSch, err := schema.SchemaFromCols(colColl)
require.NoError(t, err)
index, err := tableSch.Indexes().AddIndexByColNames("idx_unique", []string{"v1"}, schema.IndexProperties{IsUnique: true, Comment: ""})
require.NoError(t, err)
indexSch := index.Schema()
emptyMap, err := types.NewMap(context.Background(), db)
require.NoError(t, err)
indexEditor := NewIndexEditor(context.Background(), index, emptyMap, tableSch)
for i := 0; i < 3; i++ {
dRow, err := row.New(format, indexSch, row.TaggedValues{
0: types.NullValue,
1: types.Int(i),
})
require.NoError(t, err)
fullKey, partialKey, value, err := dRow.ReduceToIndexKeys(index)
require.NoError(t, err)
require.NoError(t, indexEditor.InsertRow(context.Background(), fullKey, partialKey, value))
}
newIndexData, err := indexEditor.Map(context.Background())
require.NoError(t, err)
if assert.Equal(t, uint64(3), newIndexData.Len()) {
index := 0
_ = newIndexData.IterAll(context.Background(), func(key, value types.Value) error {
dReadRow, err := row.FromNoms(indexSch, key.(types.Tuple), value.(types.Tuple))
require.NoError(t, err)
dReadVals, err := dReadRow.TaggedValues()
require.NoError(t, err)
assert.Equal(t, row.TaggedValues{
1: types.Int(index), // We don't encode NULL values
}, dReadVals)
index++
return nil
})
}
} | explode_data.jsonl/27207 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 632
} | [
2830,
3393,
1552,
9410,
22811,
32089,
19064,
1155,
353,
8840,
836,
8,
341,
59416,
1669,
4494,
9978,
60336,
198,
20939,
11,
1848,
1669,
2927,
21618,
1321,
336,
4153,
46391,
4021,
3506,
5378,
19047,
1507,
3561,
11,
2092,
11,
2092,
340,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSBKeyTypeUnmarshalJSON(t *testing.T) {
var kt sbKeyType
testInvalidJSONInput(t, &kt)
// Valid values.
for _, v := range []sbKeyType{
SBKeyTypeGPGKeys,
SBKeyTypeSignedByGPGKeys,
SBKeyTypeX509Certificates,
SBKeyTypeSignedByX509CAs,
} {
kt = sbKeyType("")
err := json.Unmarshal([]byte(`"`+string(v)+`"`), &kt)
assert.NoError(t, err)
}
// Invalid values
kt = sbKeyType("")
err := json.Unmarshal([]byte(`""`), &kt)
assert.Error(t, err)
kt = sbKeyType("")
err = json.Unmarshal([]byte(`"this is invalid"`), &kt)
assert.Error(t, err)
} | explode_data.jsonl/36507 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 260
} | [
2830,
3393,
16680,
97964,
1806,
27121,
5370,
1155,
353,
8840,
836,
8,
341,
2405,
18541,
7898,
97964,
271,
18185,
7928,
5370,
2505,
1155,
11,
609,
5840,
692,
197,
322,
7818,
2750,
624,
2023,
8358,
348,
1669,
2088,
3056,
16892,
97964,
515... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestStream_GetPriority(t *testing.T) {
t.Run("test", func(t *testing.T) {
assert := base.NewAssert(t)
for i := 0; i < 1000; i++ {
v := NewStream()
priority := uint8(rand.Uint32())
v.SetPriority(priority)
assert(v.GetPriority()).Equals(priority)
v.Release()
}
})
} | explode_data.jsonl/21163 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 131
} | [
2830,
3393,
3027,
13614,
20555,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
1944,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
6948,
1669,
2331,
7121,
8534,
1155,
340,
197,
2023,
600,
1669,
220,
15,
26,
600,
366,
220,
16,
15,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestValidEnvironment(t *testing.T) {
config, err := loadYAML(`
version: "3"
services:
dict-env:
image: busybox
environment:
FOO: "1"
BAR: 2
BAZ: 2.5
QUUX:
list-env:
image: busybox
environment:
- FOO=1
- BAR=2
- BAZ=2.5
- QUUX=
`)
assert.NoError(t, err)
expected := map[string]string{
"FOO": "1",
"BAR": "2",
"BAZ": "2.5",
"QUUX": "",
}
assert.Equal(t, 2, len(config.Services))
for _, service := range config.Services {
assert.Equal(t, expected, service.Environment)
}
} | explode_data.jsonl/16388 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 280
} | [
2830,
3393,
4088,
12723,
1155,
353,
8840,
836,
8,
341,
25873,
11,
1848,
1669,
2795,
56,
31102,
61528,
4366,
25,
330,
18,
698,
12779,
510,
220,
6451,
67462,
510,
262,
2168,
25,
13028,
2011,
198,
262,
4573,
510,
414,
434,
19499,
25,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDutchNationalFlagWithoutPivot(t *testing.T) {
colors := []Color{
Blue,
Red,
Red,
Blue,
White,
Red,
White,
}
DutchNationalFlagWithoutPivot(colors)
expected := []Color{
Red,
Red,
Red,
White,
White,
Blue,
Blue,
}
for i, e := range colors {
assert.Equal(t, expected[i], e)
}
} | explode_data.jsonl/12219 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 150
} | [
2830,
3393,
35,
14061,
30812,
12135,
26040,
47,
16084,
1155,
353,
8840,
836,
8,
341,
21481,
82,
1669,
3056,
1636,
515,
197,
197,
10331,
345,
197,
197,
6033,
345,
197,
197,
6033,
345,
197,
197,
10331,
345,
197,
197,
14075,
345,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDuplicateGlobal(t *testing.T) {
if globalV4 == nil {
t.Skip("no globalV4 addresses configured")
return
}
var trA Transport
var trB Transport
listenerA, err := trA.Listen(globalV4)
if err != nil {
t.Fatal(err)
}
defer listenerA.Close()
listenerB1, err := trB.Listen(globalV4)
if err != nil {
t.Fatal(err)
}
defer listenerB1.Close()
listenerB2, err := trB.Listen(globalV4)
if err != nil {
t.Fatal(err)
}
defer listenerB2.Close()
// Check which port we're using
port := dialOne(t, &trB, listenerA)
// Check consistency
for i := 0; i < 10; i++ {
dialOne(t, &trB, listenerA, port)
}
} | explode_data.jsonl/3803 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 264
} | [
2830,
3393,
53979,
11646,
1155,
353,
8840,
836,
8,
341,
743,
3644,
53,
19,
621,
2092,
341,
197,
3244,
57776,
445,
2152,
3644,
53,
19,
14230,
19755,
1138,
197,
853,
198,
197,
630,
2405,
489,
32,
16742,
198,
2405,
489,
33,
16742,
198,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestFeatureSpecForStandAlone(t *testing.T) {
testCases := []struct {
name string
confFile string
featureName Feature
featureEnabled bool
}{
{
name: "Feature is enabled",
confFile: "./testdata/feature_config.yaml",
featureName: Feature("Actor.Reentrancy"),
featureEnabled: true,
},
{
name: "Feature is disabled",
confFile: "./testdata/feature_config.yaml",
featureName: Feature("Test.Feature"),
featureEnabled: false,
},
{
name: "Feature is disabled if missing",
confFile: "./testdata/feature_config.yaml",
featureName: Feature("Test.Missing"),
featureEnabled: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
config, _, err := LoadStandaloneConfiguration(tc.confFile)
assert.NoError(t, err)
assert.Equal(t, tc.featureEnabled, IsFeatureEnabled(config.Spec.Features, tc.featureName))
})
}
} | explode_data.jsonl/9033 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 421
} | [
2830,
3393,
13859,
8327,
2461,
48733,
2101,
603,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
198,
197,
67850,
1703,
981,
914,
198,
197,
1166,
15208,
675,
262,
19998,
198,
197,
1166,
15208,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOrderingServiceConnFailure(t *testing.T) {
testOrderingServiceConnFailure(t, blockDelivererConsumerWithRecv)
testOrderingServiceConnFailure(t, blockDelivererConsumerWithSend)
assert.Equal(t, 0, connNumber)
} | explode_data.jsonl/38311 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 70
} | [
2830,
3393,
4431,
287,
1860,
9701,
17507,
1155,
353,
8840,
836,
8,
341,
18185,
4431,
287,
1860,
9701,
17507,
1155,
11,
2504,
16532,
1524,
261,
29968,
2354,
63483,
340,
18185,
4431,
287,
1860,
9701,
17507,
1155,
11,
2504,
16532,
1524,
26... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestValidateSchema(t *testing.T) {
for _, test := range []struct {
name string
format string
fileDecl string
finalOutput *transform.Decl
err string
}{
{
name: "format not supported",
format: "mp3",
fileDecl: "",
finalOutput: nil,
err: "schema not supported",
},
{
name: "json schema validation fail",
format: fileFormatEDI,
fileDecl: `{
"file_declaration": {
"segment_delimiter": "\n",
"element_delimiter": "*",
"segment_declarations": [
{
"name": "ISA",
"is_target": true,
"max": -2,
"elements": [
{ "name": "e1", "index": 1 }
]
}
]
}
}`,
finalOutput: nil,
err: `schema 'test' validation failed: file_declaration.segment_declarations.0.max: Must be greater than or equal to -1`,
},
{
name: "in code schema validation fail",
format: fileFormatEDI,
fileDecl: `{
"file_declaration": {
"segment_delimiter": "\n",
"element_delimiter": "*",
"segment_declarations": [
{
"name": "ISA",
"is_target": true,
"max": 0,
"elements": [
{ "name": "e1", "index": 1 }
]
}
]
}
}`,
finalOutput: nil,
err: `schema 'test': segment 'ISA' has 'min' value 1 > 'max' value 0`,
},
{
name: "FINAL_OUTPUT is nil",
format: fileFormatEDI,
fileDecl: `{
"file_declaration": {
"segment_delimiter": "\n",
"element_delimiter": "*",
"segment_declarations": [
{
"name": "ISA",
"is_target": true,
"elements": [
{ "name": "e1", "index": 1 }
]
}
]
}
}`,
finalOutput: nil,
err: `schema 'test': 'FINAL_OUTPUT' is missing`,
},
{
name: "FINAL_OUTPUT xpath is invalid",
format: fileFormatEDI,
fileDecl: `{
"file_declaration": {
"segment_delimiter": "\n",
"element_delimiter": "*",
"segment_declarations": [
{
"name": "ISA",
"is_target": true,
"elements": [
{ "name": "e1", "index": 1 }
]
}
]
}
}`,
finalOutput: &transform.Decl{XPath: strs.StrPtr("[")},
err: `schema 'test': 'FINAL_OUTPUT.xpath' (value: '[') is invalid, err: expression must evaluate to a node-set`,
},
{
name: "success",
format: fileFormatEDI,
fileDecl: `{
"file_declaration": {
"segment_delimiter": "\n",
"element_delimiter": "*",
"segment_declarations": [
{
"name": "ISA",
"is_target": true,
"elements": [
{ "name": "e2", "index": 2 },
{ "name": "e1", "index": 1 }
]
}
]
}
}`,
finalOutput: &transform.Decl{XPath: strs.StrPtr(".")},
err: ``,
},
} {
t.Run(test.name, func(t *testing.T) {
rt, err := NewEDIFileFormat("test").ValidateSchema(test.format, []byte(test.fileDecl), test.finalOutput)
if test.err != "" {
assert.Error(t, err)
assert.Equal(t, test.err, err.Error())
assert.Nil(t, rt)
} else {
assert.NoError(t, err)
cupaloy.SnapshotT(t, jsons.BPM(rt))
}
})
}
} | explode_data.jsonl/47478 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1700
} | [
2830,
3393,
17926,
8632,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
3056,
1235,
341,
197,
11609,
286,
914,
198,
197,
59416,
414,
914,
198,
197,
17661,
21629,
262,
914,
198,
197,
14213,
5097,
353,
4701,
8934,
564,
198,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestFilters_building(t *testing.T) {
filterTest(
t,
`
tables:
buildings:
fields:
- name: id
type: id
- key: building
name: building
type: string
filters:
reject:
building: ["no","none"]
require_regexp:
'addr:housenumber': '^\d+[a-zA-Z,]*$'
building: '^[a-z_]+$'
mapping:
building:
- __any__
type: linestring
`,
// Accept
[]element.Tags{
element.Tags{"building": "yes", "addr:housenumber": "1a"},
element.Tags{"building": "house", "addr:housenumber": "131"},
element.Tags{"building": "residential", "addr:housenumber": "21"},
element.Tags{"building": "garage", "addr:housenumber": "0"},
element.Tags{"building": "hut", "addr:housenumber": "99999999"},
element.Tags{"building": "_", "addr:housenumber": "333"},
element.Tags{"building": "__any__", "addr:housenumber": "333"},
element.Tags{"building": "__nil__", "addr:housenumber": "333"},
element.Tags{"building": "y", "addr:housenumber": "1abcdefg"},
element.Tags{"building": "tower_block", "addr:housenumber": "1A"},
element.Tags{"building": "shed", "name": "N4", "addr:housenumber": "1AAA"},
element.Tags{"building": "office", "name": "N4", "addr:housenumber": "0XYAB,"},
},
// Reject
[]element.Tags{
element.Tags{"building": "yes", "addr:housenumber": "aaaaa-number"},
element.Tags{"building": "house", "addr:housenumber": "1-3a"},
element.Tags{"building": "house", "addr:housenumber": "❤"},
element.Tags{"building": "house", "addr:housenumber": "two"},
element.Tags{"building": "residential", "addr:housenumber": "x21"},
element.Tags{"building": "", "addr:housenumber": "111"},
element.Tags{"building": "no"},
element.Tags{"building": "no", "addr:housenumber": "1a"},
element.Tags{"building": "No", "addr:housenumber": "1a"},
element.Tags{"building": "NO", "addr:housenumber": "1a"},
element.Tags{"building": "none"},
element.Tags{"building": "none", "addr:housenumber": "0"},
element.Tags{"building": "nONe", "addr:housenumber": "0"},
element.Tags{"building": "No"},
element.Tags{"building": "NO"},
element.Tags{"building": "NONe"},
element.Tags{"building": "Garage"},
element.Tags{"building": "Hut"},
element.Tags{"building": "Farm"},
element.Tags{"building": "tower-block"},
element.Tags{"building": "❤"},
element.Tags{"building": "Ümlåütê"},
element.Tags{"building": "木"},
element.Tags{"building": "SheD", "name": "N4"},
element.Tags{"building": "oFFice", "name": "N4"},
element.Tags{"admin_level": "2"},
element.Tags{"admin_level": "4"},
element.Tags{"boundary": "administrative"},
element.Tags{"boundary": "maritime"},
element.Tags{"name": "maritime"},
},
)
} | explode_data.jsonl/25613 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1138
} | [
2830,
3393,
28351,
82397,
1155,
353,
8840,
836,
8,
1476,
50108,
2271,
1006,
197,
3244,
345,
197,
197,
3989,
32110,
510,
220,
13702,
510,
262,
5043,
510,
262,
481,
829,
25,
877,
198,
414,
943,
25,
877,
198,
262,
481,
1376,
25,
4752,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMakeLocations(t *testing.T) {
db, mock, err := sqlmock.New()
if err != nil {
t.Fatalf("an error '%s' was not expected when opening a stub database connection", err)
}
defer db.Close()
cdn := "mycdn"
mock.ExpectBegin()
expectedEdgeLocs, expectedRouterLocs := ExpectedMakeLocations()
MockMakeLocations(mock, expectedEdgeLocs, expectedRouterLocs, cdn)
mock.ExpectCommit()
dbCtx, _ := context.WithTimeout(context.TODO(), time.Duration(10)*time.Second)
tx, err := db.BeginTx(dbCtx, nil)
if err != nil {
t.Fatalf("creating transaction: %v", err)
}
defer tx.Commit()
actualEdgeLocs, actualRouterLocs, err := makeLocations(cdn, tx)
if err != nil {
t.Fatalf("makeLocations expected: nil error, actual: %v", err)
}
if !reflect.DeepEqual(expectedEdgeLocs, actualEdgeLocs) {
t.Errorf("makeLocations expected: %+v, actual: %+v", expectedEdgeLocs, actualEdgeLocs)
}
if !reflect.DeepEqual(expectedRouterLocs, actualRouterLocs) {
t.Errorf("makeLocations expected: %+v, actual: %+v", expectedRouterLocs, actualRouterLocs)
}
} | explode_data.jsonl/25152 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 400
} | [
2830,
3393,
8078,
43037,
1155,
353,
8840,
836,
8,
341,
20939,
11,
7860,
11,
1848,
1669,
5704,
16712,
7121,
741,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
276,
1465,
7677,
82,
6,
572,
537,
3601,
979,
8568,
264,
13633,
4625,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestPowN(t *testing.T) {
st := []struct {
name string
x float64
n int
exp float64
}{
{"x=0", 0.0, 123, 0.0},
{"x=1", 1, 1234, 1},
{"n is 2^-31", 2.0, math.MinInt32, 0.0},
{"testcase1", 2.0, 10, 1024.0},
}
for _, tt := range st {
t.Run(tt.name, func(t *testing.T) {
out := powN(tt.x, tt.n)
if out != tt.exp {
t.Fatalf("with input x:%f, x:%d wanted %02f, but got %02f", tt.x, tt.n, tt.exp, out)
}
t.Log("pass")
})
}
} | explode_data.jsonl/44787 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 265
} | [
2830,
3393,
66584,
45,
1155,
353,
8840,
836,
8,
341,
18388,
1669,
3056,
1235,
341,
197,
11609,
914,
198,
197,
10225,
262,
2224,
21,
19,
198,
197,
9038,
262,
526,
198,
197,
48558,
220,
2224,
21,
19,
198,
197,
59403,
197,
197,
4913,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestInteg_00_DescribeAgents(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
sess := integration.SessionWithDefaultRegion("us-west-2")
svc := applicationdiscoveryservice.New(sess)
params := &applicationdiscoveryservice.DescribeAgentsInput{}
_, err := svc.DescribeAgentsWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err != nil {
t.Errorf("expect no error, got %v", err)
}
} | explode_data.jsonl/39721 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 184
} | [
2830,
3393,
1072,
791,
62,
15,
15,
98054,
3114,
91804,
1155,
353,
8840,
836,
8,
341,
20985,
11,
9121,
24911,
1669,
2266,
26124,
7636,
5378,
19047,
1507,
220,
20,
77053,
32435,
340,
16867,
9121,
24911,
2822,
1903,
433,
1669,
17590,
20674... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestError_ResultString(t *testing.T) {
var herr Error
// happy path: transaction_failed with the appropriate extra fields
herr.Problem.Type = "transaction_failed"
herr.Problem.Extras = make(map[string]interface{})
herr.Problem.Extras["result_xdr"] = "AAAAAAAAAMj/////AAAAAgAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAA="
trs, err := herr.ResultString()
if assert.NoError(t, err) {
assert.Equal(t, "AAAAAAAAAMj/////AAAAAgAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAA=", trs)
}
// sad path: missing result_xdr extra
herr.Problem.Type = "transaction_failed"
herr.Problem.Extras = make(map[string]interface{})
_, err = herr.ResultString()
assert.Equal(t, ErrResultNotPopulated, err)
// sad path: unparseable result_xdr extra
herr.Problem.Type = "transaction_failed"
herr.Problem.Extras = make(map[string]interface{})
herr.Problem.Extras["result_xdr"] = 1234
_, err = herr.ResultString()
assert.Error(t, err)
} | explode_data.jsonl/12139 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 328
} | [
2830,
3393,
1454,
49596,
703,
1155,
353,
8840,
836,
8,
341,
2405,
54739,
4600,
271,
197,
322,
6247,
1815,
25,
7745,
35060,
448,
279,
8311,
4960,
5043,
198,
9598,
615,
7763,
10121,
10184,
284,
330,
13528,
35060,
698,
9598,
615,
7763,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSendMessage(t *testing.T) {
obj := New()
logEvent := logevent.LogEvent{}
err := obj.SendMessage(logEvent)
if err == nil {
t.Error("expected error from SendMessage() but got nil")
}
err = obj.OpenSvc()
if err != nil {
t.Errorf("OpenSvc() returned unexpected error %v", err)
}
defer obj.CloseSvc()
obj.SendMessage(logEvent)
} | explode_data.jsonl/71385 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 132
} | [
2830,
3393,
80863,
1155,
353,
8840,
836,
8,
341,
22671,
1669,
1532,
741,
6725,
1556,
1669,
1487,
3087,
5247,
1556,
31483,
9859,
1669,
2839,
66330,
12531,
1556,
340,
743,
1848,
621,
2092,
341,
197,
3244,
6141,
445,
7325,
1465,
504,
62292... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestIntArray_Join(t *testing.T) {
gtest.C(t, func(t *gtest.T) {
a1 := []int{0, 1, 2, 3, 4, 5, 6}
array1 := garray.NewIntArrayFrom(a1)
t.Assert(array1.Join("."), "0.1.2.3.4.5.6")
})
} | explode_data.jsonl/47613 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 108
} | [
2830,
3393,
95338,
10598,
1961,
1155,
353,
8840,
836,
8,
341,
3174,
1944,
727,
1155,
11,
2915,
1155,
353,
82038,
836,
8,
341,
197,
11323,
16,
1669,
3056,
396,
90,
15,
11,
220,
16,
11,
220,
17,
11,
220,
18,
11,
220,
19,
11,
220,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParam(t *testing.T) {
type Test struct {
String string `r:"ltrim=#$_"`
}
set := New()
set.SetTagName("r")
set.Register("ltrim", func(ctx context.Context, t *Transformer, value reflect.Value, param string) error {
value.SetString(strings.TrimLeft(value.String(), param))
return nil
})
tt := Test{String: "_test"}
err := set.Struct(context.Background(), &tt)
Equal(t, err, nil)
Equal(t, tt.String, "test")
} | explode_data.jsonl/43622 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 166
} | [
2830,
3393,
2001,
1155,
353,
8840,
836,
8,
1476,
13158,
3393,
2036,
341,
197,
4980,
914,
1565,
81,
2974,
75,
10666,
45131,
6401,
8805,
197,
630,
8196,
1669,
1532,
741,
8196,
4202,
22616,
445,
81,
1138,
8196,
19983,
445,
75,
10666,
497... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCommissionValidate(t *testing.T) {
testCases := []struct {
input types.Commission
expectErr bool
}{
// invalid commission; max rate < 0%
{types.NewCommission(sdk.ZeroDec(), sdk.MustNewDecFromStr("-1.00"), sdk.ZeroDec()), true},
// invalid commission; max rate > 100%
{types.NewCommission(sdk.ZeroDec(), sdk.MustNewDecFromStr("2.00"), sdk.ZeroDec()), true},
// invalid commission; rate < 0%
{types.NewCommission(sdk.MustNewDecFromStr("-1.00"), sdk.ZeroDec(), sdk.ZeroDec()), true},
// invalid commission; rate > max rate
{types.NewCommission(sdk.MustNewDecFromStr("0.75"), sdk.MustNewDecFromStr("0.50"), sdk.ZeroDec()), true},
// invalid commission; max change rate < 0%
{types.NewCommission(sdk.OneDec(), sdk.OneDec(), sdk.MustNewDecFromStr("-1.00")), true},
// invalid commission; max change rate > max rate
{types.NewCommission(sdk.OneDec(), sdk.MustNewDecFromStr("0.75"), sdk.MustNewDecFromStr("0.90")), true},
// valid commission
{types.NewCommission(sdk.MustNewDecFromStr("0.20"), sdk.OneDec(), sdk.MustNewDecFromStr("0.10")), false},
}
for i, tc := range testCases {
err := tc.input.Validate()
require.Equal(t, tc.expectErr, err != nil, "unexpected result; tc #%d, input: %v", i, tc.input)
}
} | explode_data.jsonl/72075 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 479
} | [
2830,
3393,
73750,
17926,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
22427,
257,
4494,
2961,
2728,
198,
197,
24952,
7747,
1807,
198,
197,
59403,
197,
197,
322,
8318,
12123,
26,
1932,
4379,
366,
220,
15,
13... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCallBackHandle(t *testing.T) {
nw := nacosWatcher{
nr: &nacosRegistry{},
exit: make(chan bool),
next: make(chan *registry.Result, 10),
services: make(map[string][]*registry.Service),
cacheServices: make(map[string][]model.SubscribeService),
}
//Create action
t.Run("CallBackHandleCreate", func(t *testing.T) {
services := make([]model.SubscribeService, 1)
services[0] = model.SubscribeService{
InstanceId: "1",
Ip: "127.0.0.1",
Port: 1234,
ServiceName: "DEMO",
}
nw.callBackHandle(services, nil)
result, err := nw.Next()
assert.True(t, result.Action == "create" && result.Service.Name == "DEMO" && err == nil)
})
//Update action
t.Run("CallBackHandleUpdate", func(t *testing.T) {
services := make([]model.SubscribeService, 1)
services[0] = model.SubscribeService{
InstanceId: "1",
Ip: "127.0.0.1",
Port: 1234,
ServiceName: "DEMO1",
}
nw.callBackHandle(services, nil)
result, err := nw.Next()
assert.True(t, result.Action == "create" && result.Service.Name == "DEMO1" && err == nil)
services = make([]model.SubscribeService, 1)
services[0] = model.SubscribeService{
InstanceId: "1",
Ip: "127.0.0.1",
Port: 12345,
ServiceName: "DEMO1",
}
nw.callBackHandle(services, nil)
result, err = nw.Next()
assert.True(t, result.Action == "update" && result.Service.Name == "DEMO1" && result.Service.Nodes[0].Address == "127.0.0.1:12345")
assert.Nil(t, err)
})
//Delete action
t.Run("CallBackHandleDelete", func(t *testing.T) {
services := make([]model.SubscribeService, 2)
services[0] = model.SubscribeService{
InstanceId: "1",
Ip: "127.0.0.1",
Port: 1234,
ServiceName: "DEMO1",
}
services[1] = model.SubscribeService{
InstanceId: "2",
Ip: "127.0.0.1",
Port: 12345,
ServiceName: "DEMO1",
}
nw.callBackHandle(services, nil)
result, err := nw.Next()
assert.True(t, result.Action == "create" && result.Service.Name == "DEMO1" && err == nil)
services = make([]model.SubscribeService, 1)
services[0] = model.SubscribeService{
InstanceId: "1",
Ip: "127.0.0.1",
Port: 1234,
ServiceName: "DEMO1",
}
nw.callBackHandle(services, nil)
result, err = nw.Next()
assert.True(t, result.Action == "delete" && result.Service.Name == "DEMO1" && result.Service.Nodes[0].Address == "127.0.0.1:12345")
assert.Nil(t, err)
})
} | explode_data.jsonl/47999 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1133
} | [
2830,
3393,
67273,
6999,
1155,
353,
8840,
836,
8,
341,
9038,
86,
1669,
308,
41647,
47248,
515,
197,
9038,
81,
25,
310,
609,
77,
41647,
15603,
38837,
197,
14519,
25,
688,
1281,
35190,
1807,
1326,
197,
28144,
25,
688,
1281,
35190,
353,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestServerPreferredResourcesRetries(t *testing.T) {
stable := metav1.APIResourceList{
GroupVersion: "v1",
APIResources: []metav1.APIResource{
{Name: "pods", Namespaced: true, Kind: "Pod"},
},
}
beta := metav1.APIResourceList{
GroupVersion: "extensions/v1",
APIResources: []metav1.APIResource{
{Name: "deployments", Namespaced: true, Kind: "Deployment"},
},
}
response := func(numErrors int) http.HandlerFunc {
var i = 0
return func(w http.ResponseWriter, req *http.Request) {
var list interface{}
switch req.URL.Path {
case "/apis/extensions/v1beta1":
if i < numErrors {
i++
w.WriteHeader(http.StatusInternalServerError)
return
}
list = &beta
case "/api/v1":
list = &stable
case "/api":
list = &metav1.APIVersions{
Versions: []string{
"v1",
},
}
case "/apis":
list = &metav1.APIGroupList{
Groups: []metav1.APIGroup{
{
Name: "extensions",
Versions: []metav1.GroupVersionForDiscovery{
{GroupVersion: "extensions/v1beta1", Version: "v1beta1"},
},
PreferredVersion: metav1.GroupVersionForDiscovery{
GroupVersion: "extensions/v1beta1",
Version: "v1beta1",
},
},
},
}
default:
t.Logf("unexpected request: %s", req.URL.Path)
w.WriteHeader(http.StatusNotFound)
return
}
output, err := json.Marshal(list)
if err != nil {
t.Errorf("unexpected encoding error: %v", err)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write(output)
}
}
tests := []struct {
responseErrors int
expectResources int
expectedError func(err error) bool
}{
{
responseErrors: 1,
expectResources: 2,
expectedError: func(err error) bool {
return err == nil
},
},
{
responseErrors: 2,
expectResources: 1,
expectedError: IsGroupDiscoveryFailedError,
},
}
for i, tc := range tests {
server := httptest.NewServer(http.HandlerFunc(response(tc.responseErrors)))
defer server.Close()
client := NewDiscoveryClientForConfigOrDie(&restclient.Config{Host: server.URL})
resources, err := client.ServerPreferredResources()
if !tc.expectedError(err) {
t.Errorf("case %d: unexpected error: %v", i, err)
}
got, err := GroupVersionResources(resources)
if err != nil {
t.Errorf("case %d: unexpected error: %v", i, err)
}
if len(got) != tc.expectResources {
t.Errorf("case %d: expect %d resources, got %#v", i, tc.expectResources, got)
}
server.Close()
}
} | explode_data.jsonl/19757 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1143
} | [
2830,
3393,
5475,
22482,
11277,
12020,
4019,
1155,
353,
8840,
836,
8,
341,
18388,
480,
1669,
77520,
16,
24922,
4783,
852,
515,
197,
197,
2808,
5637,
25,
330,
85,
16,
756,
197,
197,
7082,
11277,
25,
3056,
4059,
402,
16,
24922,
4783,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestJsonMarshalBadAdvertise(t *testing.T) {
s := zktest.New()
z, ch, _ := s.Connect()
b := zkplus.NewBuilder().PathPrefix("/test").Connector(&zkplus.StaticConnector{C: z, Ch: ch})
d1, _ := New(BuilderConnector(b), "TestAdvertise1", &Config{})
e := errors.New("nope")
d1.jsonMarshal = func(v interface{}) ([]byte, error) {
return nil, e
}
require.Equal(t, e, errors.Tail(d1.Advertise("TestAdvertiseService", "", (uint16)(1234))))
} | explode_data.jsonl/46863 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 183
} | [
2830,
3393,
5014,
55438,
17082,
2589,
67787,
1155,
353,
8840,
836,
8,
341,
1903,
1669,
94528,
1944,
7121,
741,
20832,
11,
521,
11,
716,
1669,
274,
43851,
741,
2233,
1669,
94528,
7138,
7121,
3297,
1005,
1820,
14335,
4283,
1944,
1827,
359... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDiscardStatsMove(t *testing.T) {
dir, err := ioutil.TempDir("", "badger-test")
require.NoError(t, err)
ops := getTestOptions(dir)
ops.ValueLogMaxEntries = 1
db, err := Open(ops)
require.NoError(t, err)
stat := make(map[uint32]int64, ops.ValueThreshold+10)
for i := uint32(0); i < uint32(ops.ValueThreshold+10); i++ {
stat[i] = 0
}
db.vlog.lfDiscardStats.Lock()
db.vlog.lfDiscardStats.m = stat
encodedDS, _ := json.Marshal(db.vlog.lfDiscardStats.m)
db.vlog.lfDiscardStats.Unlock()
entries := []*Entry{{
Key: y.KeyWithTs(lfDiscardStatsKey, 1),
// The discard stat value is more than value threshold.
Value: encodedDS,
}}
// Push discard stats entry to the write channel.
req, err := db.sendToWriteCh(entries)
require.NoError(t, err)
req.Wait()
// Unset discard stats. We've already pushed the stats. If we don't unset it then it will be
// pushed again on DB close. Also, the first insertion was in vlog file 1, this insertion would
// be in value log file 3.
db.vlog.lfDiscardStats.Lock()
db.vlog.lfDiscardStats.m = nil
db.vlog.lfDiscardStats.Unlock()
// Push more entries so that we get more than 1 value log files.
require.NoError(t, db.Update(func(txn *Txn) error {
e := NewEntry([]byte("f"), []byte("1"))
return txn.SetEntry(e)
}))
require.NoError(t, db.Update(func(txn *Txn) error {
e := NewEntry([]byte("ff"), []byte("1"))
return txn.SetEntry(e)
}))
tr := trace.New("Badger.ValueLog", "GC")
// Use first value log file for GC. This value log file contains the discard stats.
lf := db.vlog.filesMap[0]
require.NoError(t, db.vlog.rewrite(lf, tr))
require.NoError(t, db.Close())
db, err = Open(ops)
// discardStats will be populate using vlog.populateDiscardStats(), which pushes discard stats
// to vlog.lfDiscardStats.flushChan. Hence wait for some time, for discard stats to be updated.
time.Sleep(1 * time.Second)
require.NoError(t, err)
db.vlog.lfDiscardStats.RLock()
require.Equal(t, stat, db.vlog.lfDiscardStats.m)
db.vlog.lfDiscardStats.RUnlock()
require.NoError(t, db.Close())
} | explode_data.jsonl/39104 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 786
} | [
2830,
3393,
23477,
567,
16635,
9860,
1155,
353,
8840,
836,
8,
341,
48532,
11,
1848,
1669,
43144,
65009,
6184,
19814,
330,
13855,
1389,
16839,
1138,
17957,
35699,
1155,
11,
1848,
340,
197,
3721,
1669,
633,
2271,
3798,
14161,
340,
197,
37... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_JavaScript_Partial(t *testing.T) {
const tmpl = "let a = 1;\n<%= partial(\"part.js\") %>"
const part = "alert('Hi <%= name %>!');"
r := require.New(t)
e := NewEngine()
box := e.TemplatesBox
r.NoError(box.AddString(jsTemplate, tmpl))
r.NoError(box.AddString("_part.js", part))
h := e.JavaScript(jsTemplate)
r.Equal("application/javascript", h.ContentType())
bb := &bytes.Buffer{}
r.NoError(h.Render(bb, Data{"name": "Yonghwan"}))
r.Equal("let a = 1;\nalert('Hi Yonghwan!');", bb.String())
} | explode_data.jsonl/44614 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 217
} | [
2830,
3393,
10598,
2907,
5910,
1088,
20894,
1155,
353,
8840,
836,
8,
341,
4777,
79839,
284,
330,
1149,
264,
284,
220,
16,
17882,
77,
27,
35205,
7130,
36014,
4480,
2857,
62705,
1018,
19134,
4777,
949,
284,
330,
5083,
492,
13048,
90194,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMultistoreSnapshot_Checksum(t *testing.T) {
// Chunks from different nodes must fit together, so all nodes must produce identical chunks.
// This checksum test makes sure that the byte stream remains identical. If the test fails
// without having changed the data (e.g. because the Protobuf or zlib encoding changes),
// snapshottypes.CurrentFormat must be bumped.
store := newMultiStoreWithGeneratedData(dbm.NewMemDB(), 5, 10000)
version := uint64(store.LastCommitID().Version)
testcases := []struct {
format uint32
chunkHashes []string
}{
{1, []string{
"503e5b51b657055b77e88169fadae543619368744ad15f1de0736c0a20482f24",
"e1a0daaa738eeb43e778aefd2805e3dd720798288a410b06da4b8459c4d8f72e",
"aa048b4ee0f484965d7b3b06822cf0772cdcaad02f3b1b9055e69f2cb365ef3c",
"7921eaa3ed4921341e504d9308a9877986a879fe216a099c86e8db66fcba4c63",
"a4a864e6c02c9fca5837ec80dc84f650b25276ed7e4820cf7516ced9f9901b86",
"ca2879ac6e7205d257440131ba7e72bef784cd61642e32b847729e543c1928b9",
}},
}
for _, tc := range testcases {
tc := tc
t.Run(fmt.Sprintf("Format %v", tc.format), func(t *testing.T) {
chunks, err := store.Snapshot(version, tc.format)
require.NoError(t, err)
hashes := []string{}
for chunk := range chunks {
hasher := sha256.New()
_, err := io.Copy(hasher, chunk)
require.NoError(t, err)
hashes = append(hashes, hex.EncodeToString(hasher.Sum(nil)))
}
assert.Equal(t, tc.chunkHashes, hashes,
"Snapshot output for format %v has changed", tc.format)
})
}
} | explode_data.jsonl/44874 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 676
} | [
2830,
3393,
40404,
380,
460,
15009,
28188,
1242,
1155,
353,
8840,
836,
8,
341,
197,
322,
910,
15296,
504,
2155,
7798,
1969,
4946,
3786,
11,
773,
678,
7798,
1969,
8193,
19516,
26757,
624,
197,
322,
1096,
32529,
1273,
3643,
2704,
429,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRedis_BitOpNot(t *testing.T) {
runOnRedis(t, func(client *Redis) {
err := client.Set("key1", "\u0000")
assert.Nil(t, err)
_, err = NewRedis(client.Addr, "").BitOpNot("destKey", "key1")
assert.NotNil(t, err)
val, err := client.BitOpNot("destKey", "key1")
assert.Nil(t, err)
assert.Equal(t, int64(1), val)
valStr, err := client.Get("destKey")
assert.Nil(t, err)
assert.Equal(t, "\xff", valStr)
})
} | explode_data.jsonl/39175 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 200
} | [
2830,
3393,
48137,
1668,
275,
7125,
2623,
1155,
353,
8840,
836,
8,
341,
56742,
1925,
48137,
1155,
11,
2915,
12805,
353,
48137,
8,
341,
197,
9859,
1669,
2943,
4202,
445,
792,
16,
497,
2917,
84,
15,
15,
15,
15,
1138,
197,
6948,
59678,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBaseChunkSeries(t *testing.T) {
type refdSeries struct {
lset labels.Labels
chunks []chunks.Meta
ref uint64
}
cases := []struct {
series []refdSeries
// Postings should be in the sorted order of the the series
postings []uint64
expIdxs []int
}{
{
series: []refdSeries{
{
lset: labels.New([]labels.Label{{"a", "a"}}...),
chunks: []chunks.Meta{
{Ref: 29}, {Ref: 45}, {Ref: 245}, {Ref: 123}, {Ref: 4232}, {Ref: 5344},
{Ref: 121},
},
ref: 12,
},
{
lset: labels.New([]labels.Label{{"a", "a"}, {"b", "b"}}...),
chunks: []chunks.Meta{
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
},
ref: 10,
},
{
lset: labels.New([]labels.Label{{"b", "c"}}...),
chunks: []chunks.Meta{{Ref: 8282}},
ref: 1,
},
{
lset: labels.New([]labels.Label{{"b", "b"}}...),
chunks: []chunks.Meta{
{Ref: 829}, {Ref: 239}, {Ref: 2349}, {Ref: 659}, {Ref: 269},
},
ref: 108,
},
},
postings: []uint64{12, 13, 10, 108}, // 13 doesn't exist and should just be skipped over.
expIdxs: []int{0, 1, 3},
},
{
series: []refdSeries{
{
lset: labels.New([]labels.Label{{"a", "a"}, {"b", "b"}}...),
chunks: []chunks.Meta{
{Ref: 82}, {Ref: 23}, {Ref: 234}, {Ref: 65}, {Ref: 26},
},
ref: 10,
},
{
lset: labels.New([]labels.Label{{"b", "c"}}...),
chunks: []chunks.Meta{{Ref: 8282}},
ref: 3,
},
},
postings: []uint64{},
expIdxs: []int{},
},
}
for _, tc := range cases {
mi := newMockIndex()
for _, s := range tc.series {
mi.AddSeries(s.ref, s.lset, s.chunks...)
}
bcs := &baseChunkSeries{
p: index.NewListPostings(tc.postings),
index: mi,
tombstones: newMemTombstones(),
}
i := 0
for bcs.Next() {
lset, chks, _ := bcs.At()
idx := tc.expIdxs[i]
testutil.Equals(t, tc.series[idx].lset, lset)
testutil.Equals(t, tc.series[idx].chunks, chks)
i++
}
testutil.Equals(t, len(tc.expIdxs), i)
testutil.Ok(t, bcs.Err())
}
return
} | explode_data.jsonl/68061 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1112
} | [
2830,
3393,
3978,
28304,
25544,
1155,
353,
8840,
836,
8,
341,
13158,
2053,
67,
25544,
2036,
341,
197,
8810,
746,
256,
9201,
4679,
82,
198,
197,
23049,
15296,
3056,
84263,
58806,
271,
197,
59504,
2622,
21,
19,
198,
197,
630,
1444,
2264... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestCmd(t *testing.T) {
ctrl := gomock.NewController(assert.Tracing(t))
defer ctrl.Finish()
mockUpdaterFromFlags := rotor.NewMockUpdaterFromFlags(ctrl)
cmd := Cmd(mockUpdaterFromFlags)
cmd.Flags.Parse([]string{})
r := cmd.Runner.(*consulRunner)
assert.SameInstance(t, r.updaterFlags, mockUpdaterFromFlags)
assert.NonNil(t, r.endpoint)
} | explode_data.jsonl/71902 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 142
} | [
2830,
3393,
15613,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
342,
316,
1176,
7121,
2051,
75846,
8240,
4527,
1155,
1171,
16867,
23743,
991,
18176,
2822,
77333,
79854,
3830,
9195,
1669,
62025,
7121,
11571,
79854,
3830,
9195,
62100,
692,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHeadTracker_ResubscribeOnSubscriptionError(t *testing.T) {
t.Parallel()
g := gomega.NewGomegaWithT(t)
store, cleanup := cltest.NewStore(t)
defer cleanup()
logger := store.Config.CreateProductionLogger()
ethClient := new(mocks.Client)
sub := new(mocks.Subscription)
store.EthClient = ethClient
chchHeaders := make(chan chan<- *models.Head, 1)
ethClient.On("ChainID", mock.Anything).Maybe().Return(store.Config.ChainID(), nil)
ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).
Run(func(args mock.Arguments) { chchHeaders <- args.Get(1).(chan<- *models.Head) }).
Return(sub, nil)
sub.On("Unsubscribe").Return()
sub.On("Err").Return(nil)
checker := &cltest.MockHeadTrackable{}
ht := services.NewHeadTracker(logger, store, []strpkg.HeadTrackable{checker}, cltest.NeverSleeper{})
// connect
assert.Nil(t, ht.Start())
g.Eventually(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(1)))
assert.Equal(t, int32(0), checker.DisconnectedCount())
assert.Equal(t, int32(0), checker.OnNewLongestChainCount())
headers := <-chchHeaders
// trigger reconnect loop
close(headers)
g.Eventually(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(2)))
g.Consistently(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(2)))
assert.Equal(t, int32(1), checker.DisconnectedCount())
assert.Equal(t, int32(0), checker.OnNewLongestChainCount())
// stop
assert.NoError(t, ht.Stop())
} | explode_data.jsonl/9097 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 555
} | [
2830,
3393,
12346,
31133,
92815,
392,
6273,
1925,
33402,
1454,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
3174,
1669,
342,
32696,
7121,
38,
32696,
2354,
51,
1155,
692,
57279,
11,
21290,
1669,
1185,
1944,
7121,
6093,
1155,
340... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUnmarshalNestedAnonymousStructs(t *testing.T) {
type Nested struct {
Value string `toml:"nested_field"`
}
type Deep struct {
Nested
}
type Document struct {
Deep
Value string `toml:"own_field"`
}
var doc Document
err := Unmarshal([]byte(`nested_field = "nested value"`+"\n"+`own_field = "own value"`), &doc)
if err != nil {
t.Fatal("should not error")
}
if doc.Value != "own value" || doc.Nested.Value != "nested value" {
t.Fatal("unexpected values")
}
} | explode_data.jsonl/46354 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 198
} | [
2830,
3393,
1806,
27121,
71986,
32684,
9422,
82,
1155,
353,
8840,
836,
8,
341,
13158,
71742,
2036,
341,
197,
47399,
914,
1565,
37401,
75,
2974,
59271,
5013,
8805,
197,
532,
13158,
18183,
2036,
341,
197,
18317,
9980,
198,
197,
532,
13158... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.