text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestPut(t *testing.T) {
idStore := NewIdentityMapper(msgCryptoService, dummyID, noopPurgeTrigger, msgCryptoService)
identity := []byte("yacovm")
identity2 := []byte("not-yacovm")
identity3 := []byte("invalidIdentity")
msgCryptoService.revokedIdentities[string(identity3)] = struct{}{}
pkiID := msgCryptoService.GetPKIidOfCert(api.PeerIdentityType(identity))
pkiID2 := msgCryptoService.GetPKIidOfCert(api.PeerIdentityType(identity2))
pkiID3 := msgCryptoService.GetPKIidOfCert(api.PeerIdentityType(identity3))
assert.NoError(t, idStore.Put(pkiID, identity))
assert.NoError(t, idStore.Put(pkiID, identity))
assert.Error(t, idStore.Put(nil, identity))
assert.Error(t, idStore.Put(pkiID2, nil))
assert.Error(t, idStore.Put(pkiID2, identity))
assert.Error(t, idStore.Put(pkiID, identity2))
assert.Error(t, idStore.Put(pkiID3, identity3))
} | explode_data.jsonl/6633 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 338
} | [
2830,
3393,
19103,
1155,
353,
8840,
836,
8,
341,
15710,
6093,
1669,
1532,
18558,
10989,
8119,
58288,
1860,
11,
17292,
915,
11,
60829,
47,
39823,
17939,
11,
3750,
58288,
1860,
340,
197,
16912,
1669,
3056,
3782,
445,
88,
580,
859,
76,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBlockService_Online_External(t *testing.T) {
cfg := &configuration.Configuration{
Mode: configuration.Online,
}
mockIndexer := &mocks.Indexer{}
servicer := NewBlockAPIService(cfg, mockIndexer)
ctx := context.Background()
blockResponse := &types.BlockResponse{
Block: &types.Block{
BlockIdentifier: &types.BlockIdentifier{
Index: 100,
Hash: "block 100",
},
},
}
otherTxs := []*types.TransactionIdentifier{}
for i := 0; i < 200; i++ {
otherTxs = append(otherTxs, &types.TransactionIdentifier{
Hash: fmt.Sprintf("tx%d", i),
})
}
blockResponse.OtherTransactions = otherTxs
mockIndexer.On(
"GetBlockLazy",
ctx,
(*types.PartialBlockIdentifier)(nil),
).Return(
blockResponse,
nil,
).Once()
b, err := servicer.Block(ctx, &types.BlockRequest{})
assert.Nil(t, err)
assert.Equal(t, blockResponse, b)
for _, otherTx := range b.OtherTransactions {
tx := &types.Transaction{
TransactionIdentifier: otherTx,
}
mockIndexer.On(
"GetBlockTransaction",
ctx,
blockResponse.Block.BlockIdentifier,
otherTx,
).Return(
tx,
nil,
).Once()
bTx, err := servicer.BlockTransaction(ctx, &types.BlockTransactionRequest{
BlockIdentifier: blockResponse.Block.BlockIdentifier,
TransactionIdentifier: otherTx,
})
assert.Nil(t, err)
assert.Equal(t, &types.BlockTransactionResponse{
Transaction: tx,
}, bTx)
}
mockIndexer.AssertExpectations(t)
} | explode_data.jsonl/24697 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 570
} | [
2830,
3393,
4713,
1860,
62,
19598,
62,
25913,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
609,
21138,
17334,
515,
197,
197,
3636,
25,
6546,
8071,
1056,
345,
197,
532,
77333,
1552,
261,
1669,
609,
16712,
82,
18338,
261,
16094,
1903,
648... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestShell(t *testing.T) {
t.Parallel()
ctx := context.New(config.Project{})
ctx.CurrentDirectory = "TEST"
sh := Shell{
Context: ctx,
Commands: []Command{
{
ReturnCode: 0,
Stdout: "TEST",
Stderr: "TEST",
},
{
ReturnCode: 128,
Stdout: "TEST",
Stderr: "TEST",
},
},
}
dir := sh.CurrentDirectory()
assert.Equal(t, dir, ctx.CurrentDirectory)
exists := sh.Exists("echo")
assert.True(t, exists)
sh.SupportedPrograms = map[string]bool{
"echo": false,
}
exists = sh.Exists("echo")
assert.False(t, exists)
cmd := sh.NewCommand("echo", "true")
assert.NotNil(t, cmd)
proc, err := sh.Exec(cmd)
assert.NoError(t, err)
assert.NotNil(t, proc)
proc, err = sh.Exec(cmd)
assert.EqualError(t, err, "128")
assert.NotNil(t, proc)
sh.expectOverflowError = true
expectedErr := ErrCommandOverflow{
Index: 2,
Len: 2,
Command: cmd.String(),
}
_, err = sh.Exec(cmd)
assert.EqualError(t, err, expectedErr.Error())
} | explode_data.jsonl/37354 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 461
} | [
2830,
3393,
25287,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
20985,
1669,
2266,
7121,
8754,
30944,
37790,
20985,
11517,
9310,
284,
330,
10033,
698,
36196,
1669,
29402,
515,
197,
70871,
25,
5635,
345,
197,
197,
30479,
25,
30... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAPI_GetAuthorizationURL(t *testing.T) {
t.Parallel()
ctx := context.Background()
a := apiWithTempDB(t)
_, mock := tconf.MockedProvider(t, a.config, "")
a.ext.UseProviders(mock)
// no context
_, err := a.GetAuthorizationURL(ctx, provider.Unknown)
assert.Error(t, err)
// no request context
_, err = a.GetAuthorizationURL(context.Background(), provider.Unknown)
assert.Error(t, err)
// bad provider
ctx.SetProvider("bad")
_, err = a.GetAuthorizationURL(ctx, "bad")
assert.Error(t, err)
// internal provider
ctx.SetProvider(a.Provider())
_, err = a.GetAuthorizationURL(ctx, a.Provider())
assert.Error(t, err)
// disabled provider
ctx.SetProvider(provider.BitBucket)
_, err = a.GetAuthorizationURL(ctx, provider.BitBucket)
assert.Error(t, err)
// valid external provider
p := provider.Name(mock.Name())
ctx.SetProvider(p)
authURL, err := a.GetAuthorizationURL(ctx, p)
assert.NoError(t, err)
_, err = url.Parse(authURL)
assert.NoError(t, err)
} | explode_data.jsonl/75406 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 368
} | [
2830,
3393,
7082,
13614,
18124,
3144,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
20985,
1669,
2266,
19047,
741,
11323,
1669,
6330,
2354,
12151,
3506,
1155,
340,
197,
6878,
7860,
1669,
259,
6135,
24664,
291,
5179,
1155,
11,
26... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewTelegramClientIfTokenEmpty(t *testing.T) {
client, err := NewTelegramClient("", "", 0, &duration.Service{}, &TelegramSenderImpl{})
assert.NoError(t, err)
assert.Nil(t, client.Bot)
} | explode_data.jsonl/50720 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
3564,
72244,
2959,
2679,
3323,
3522,
1155,
353,
8840,
836,
8,
341,
25291,
11,
1848,
1669,
1532,
72244,
2959,
19814,
7342,
220,
15,
11,
609,
17021,
13860,
22655,
609,
72244,
20381,
9673,
37790,
6948,
35699,
1155,
11,
1848,
34... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestNumberDataPoint_IntVal(t *testing.T) {
ms := NewNumberDataPoint()
assert.EqualValues(t, int64(0), ms.IntVal())
testValIntVal := int64(17)
ms.SetIntVal(testValIntVal)
assert.EqualValues(t, testValIntVal, ms.IntVal())
} | explode_data.jsonl/32726 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 92
} | [
2830,
3393,
2833,
1043,
2609,
32054,
2208,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
2833,
1043,
2609,
741,
6948,
12808,
6227,
1155,
11,
526,
21,
19,
7,
15,
701,
9829,
7371,
2208,
2398,
18185,
2208,
1072,
2208,
1669,
526,
21,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReadFileFromUserHomeDir(t *testing.T) {
hds := NewHomedirService()
content := []byte(`t`)
pathToFile := "testfile"
user, _ := user.Current()
_, err := hds.ReadFileFromUserHomeDir(pathToFile)
assert.Error(t, err)
err = hds.WriteFileToUserHomeDir(content, pathToFile)
strcontent, err := hds.ReadFileFromUserHomeDir(pathToFile)
assert.NotEmpty(t, strcontent)
assert.Nil(t, err)
os.RemoveAll(filepath.Join(user.HomeDir, pathToFile))
} | explode_data.jsonl/35863 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
4418,
1703,
3830,
1474,
7623,
6184,
1155,
353,
8840,
836,
8,
341,
9598,
5356,
1669,
1532,
39,
24139,
404,
1860,
741,
27751,
1669,
3056,
3782,
5809,
83,
24183,
26781,
41550,
1669,
330,
1944,
1192,
698,
19060,
11,
716,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWriter(t *testing.T) {
w := New()
b := &bytes.Buffer{}
w.Out = b
w.Start()
for i := 0; i < 2; i++ {
_, _ = fmt.Fprintln(w, "foo")
}
w.Stop()
_, _ = fmt.Fprintln(b, "bar")
want := "foo\nfoo\nbar\n"
if b.String() != want {
t.Fatalf("want %q, got %q", want, b.String())
}
} | explode_data.jsonl/44304 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 148
} | [
2830,
3393,
6492,
1155,
353,
8840,
836,
8,
341,
6692,
1669,
1532,
741,
2233,
1669,
609,
9651,
22622,
16094,
6692,
47178,
284,
293,
198,
6692,
12101,
741,
2023,
600,
1669,
220,
15,
26,
600,
366,
220,
17,
26,
600,
1027,
341,
197,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestSameInodes(t *testing.T) {
port1 := Port{Proto: "tcp", Port: 100, Process: "proc", inode: "inode1"}
port2 := Port{Proto: "tcp", Port: 100, Process: "proc", inode: "inode1"}
portProto := Port{Proto: "udp", Port: 100, Process: "proc", inode: "inode1"}
portPort := Port{Proto: "tcp", Port: 101, Process: "proc", inode: "inode1"}
portInode := Port{Proto: "tcp", Port: 100, Process: "proc", inode: "inode2"}
portProcess := Port{Proto: "tcp", Port: 100, Process: "other", inode: "inode1"}
tests := []struct {
name string
a, b List
want bool
}{
{
"identical",
List{port1, port1},
List{port2, port2},
true,
},
{
"proto differs",
List{port1, port1},
List{port2, portProto},
false,
},
{
"port differs",
List{port1, port1},
List{port2, portPort},
false,
},
{
"inode differs",
List{port1, port1},
List{port2, portInode},
false,
},
{
// SameInodes does not check the Process field
"Process differs",
List{port1, port1},
List{port2, portProcess},
true,
},
}
for _, tt := range tests {
got := tt.a.SameInodes(tt.b)
if got != tt.want {
t.Errorf("%s: Equal = %v; want %v", tt.name, got, tt.want)
}
}
} | explode_data.jsonl/53619 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 561
} | [
2830,
3393,
19198,
641,
2539,
1155,
353,
8840,
836,
8,
341,
52257,
16,
1669,
5776,
90,
31549,
25,
330,
27161,
497,
5776,
25,
220,
16,
15,
15,
11,
8603,
25,
330,
15782,
497,
34803,
25,
330,
52239,
16,
16707,
52257,
17,
1669,
5776,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestKafkaClient_startKafkaConsumer_FailCreateConsumer(t *testing.T) {
module := fixtureModule()
module.Configure("test", "consumer.test")
// Set up the mock to return the leader broker for a test topic and partition
testError := errors.New("test error")
client := &helpers.MockSaramaClient{}
client.On("NewConsumerFromClient").Return((*helpers.MockSaramaConsumer)(nil), testError)
client.On("Close").Return(nil)
err := module.startKafkaConsumer(client)
client.AssertExpectations(t)
assert.Equal(t, testError, err, "Expected startKafkaConsumer to return error")
} | explode_data.jsonl/34254 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 186
} | [
2830,
3393,
42,
21883,
2959,
4906,
42,
21883,
29968,
1400,
604,
4021,
29968,
1155,
353,
8840,
836,
8,
341,
54020,
1669,
12507,
3332,
741,
54020,
78281,
445,
1944,
497,
330,
46764,
5958,
5130,
197,
322,
2573,
705,
279,
7860,
311,
470,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTransportIdleConnCrash(t *testing.T) {
defer afterTest(t)
tr := &Transport{}
c := &Client{Transport: tr}
unblockCh := make(chan bool, 1)
ts := httptest.NewServer(HandlerFunc(func(w ResponseWriter, r *Request) {
<-unblockCh
tr.CloseIdleConnections()
}))
defer ts.Close()
didreq := make(chan bool)
go func() {
res, err := c.Get(ts.URL)
if err != nil {
t.Error(err)
} else {
res.Body.Close() // returns idle conn
}
didreq <- true
}()
unblockCh <- true
<-didreq
} | explode_data.jsonl/4892 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 215
} | [
2830,
3393,
27560,
41370,
9701,
16001,
988,
1155,
353,
8840,
836,
8,
341,
16867,
1283,
2271,
1155,
340,
25583,
1669,
609,
27560,
16094,
1444,
1669,
609,
2959,
90,
27560,
25,
489,
630,
20479,
4574,
1143,
1669,
1281,
35190,
1807,
11,
220,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClientSetBuildConfigurationTemplate(t *testing.T) {
client, err := NewRealTestClient(t)
require.NoError(t, err, "Expected no error")
err = client.DeleteBuildConfiguration("Empty_TestClientSetBuildConfigurationTemplate")
require.NoError(t, err, "Expected no error")
err = client.DeleteBuildConfiguration("Empty_TestClientSetBuildConfigurationTemplateTemplate")
require.NoError(t, err, "Expected no error")
config := &types.BuildConfiguration{
ProjectID: "Empty",
Name: "TestClientSetBuildConfigurationTemplateTemplate",
TemplateFlag: true,
}
err = client.CreateBuildConfiguration(config)
require.NoError(t, err, "Expected no error")
require.NotNil(t, config, "Create to return config")
config = &types.BuildConfiguration{
ProjectID: "Empty",
Name: "TestClientSetBuildConfigurationTemplate",
}
err = client.CreateBuildConfiguration(config)
require.NoError(t, err, "Expected no error")
require.NotNil(t, config, "Create to return config")
assert.Equal(t, "", string(config.TemplateID), "Expected create to return empty TemplateID")
err = client.SetBuildConfigurationTemplate("Empty_TestClientSetBuildConfigurationTemplate", "Empty_TestClientSetBuildConfigurationTemplateTemplate")
require.NoError(t, err, "Expected no error")
config, err = client.GetBuildConfiguration("Empty_TestClientSetBuildConfigurationTemplate")
require.NoError(t, err, "Expected no error")
require.NotNil(t, config, "Get to return config")
assert.Equal(t, "Empty_TestClientSetBuildConfigurationTemplateTemplate", string(config.TemplateID), "Expected get to return templateID")
} | explode_data.jsonl/40091 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 477
} | [
2830,
3393,
2959,
1649,
11066,
7688,
7275,
1155,
353,
8840,
836,
8,
341,
25291,
11,
1848,
1669,
1532,
12768,
2271,
2959,
1155,
340,
17957,
35699,
1155,
11,
1848,
11,
330,
18896,
902,
1465,
1138,
9859,
284,
2943,
18872,
11066,
7688,
445,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDownloadTo_TLS(t *testing.T) {
// Set up mock server w/ tls enabled
srv, err := repotest.NewTempServerWithCleanup(t, "testdata/*.tgz*")
srv.Stop()
if err != nil {
t.Fatal(err)
}
srv.StartTLS()
defer srv.Stop()
if err := srv.CreateIndex(); err != nil {
t.Fatal(err)
}
if err := srv.LinkIndices(); err != nil {
t.Fatal(err)
}
repoConfig := filepath.Join(srv.Root(), "repositories.yaml")
repoCache := srv.Root()
c := ChartDownloader{
Out: os.Stderr,
Verify: VerifyAlways,
Keyring: "testdata/helm-test-key.pub",
RepositoryConfig: repoConfig,
RepositoryCache: repoCache,
Getters: getter.All(&cli.EnvSettings{
RepositoryConfig: repoConfig,
RepositoryCache: repoCache,
}),
Options: []getter.Option{},
}
cname := "test/signtest"
dest := srv.Root()
where, v, err := c.DownloadTo(cname, "", dest)
if err != nil {
t.Fatal(err)
}
target := filepath.Join(dest, "signtest-0.1.0.tgz")
if expect := target; where != expect {
t.Errorf("Expected download to %s, got %s", expect, where)
}
if v.FileHash == "" {
t.Error("File hash was empty, but verification is required.")
}
if _, err := os.Stat(target); err != nil {
t.Error(err)
}
} | explode_data.jsonl/1372 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 537
} | [
2830,
3393,
11377,
1249,
69067,
1155,
353,
8840,
836,
8,
341,
197,
322,
2573,
705,
7860,
3538,
289,
14,
55026,
8970,
198,
1903,
10553,
11,
1848,
1669,
2064,
354,
477,
7121,
12151,
5475,
2354,
67335,
1155,
11,
330,
92425,
23540,
41428,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 8 |
func TestWithoutVersion(t *testing.T) {
version := make(map[string]string)
version["tekton.dev/name:foo"] = "v1"
out := FindVersion(version)
test.AssertOutput(t, "", out)
} | explode_data.jsonl/20214 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 67
} | [
2830,
3393,
26040,
5637,
1155,
353,
8840,
836,
8,
341,
74954,
1669,
1281,
9147,
14032,
30953,
692,
74954,
1183,
42713,
777,
21523,
75992,
25,
7975,
1341,
284,
330,
85,
16,
1837,
13967,
1669,
7379,
5637,
37770,
692,
18185,
11711,
5097,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestLimit(t *testing.T) {
for _, tc := range tcs {
Prepare(tc.proc, tc.arg)
tc.proc.Reg.InputBatch = newBatch(t, tc.types, tc.proc, Rows)
Call(tc.proc, tc.arg)
tc.proc.Reg.InputBatch = newBatch(t, tc.types, tc.proc, Rows)
Call(tc.proc, tc.arg)
tc.proc.Reg.InputBatch = &batch.Batch{}
Call(tc.proc, tc.arg)
tc.proc.Reg.InputBatch = nil
Call(tc.proc, tc.arg)
}
} | explode_data.jsonl/58457 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 187
} | [
2830,
3393,
16527,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17130,
1669,
2088,
259,
4837,
341,
197,
197,
50590,
44415,
83430,
11,
17130,
21186,
340,
197,
78255,
83430,
8989,
16130,
21074,
284,
501,
21074,
1155,
11,
17130,
23226,
11,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGenerateQueueAttributes(t *testing.T) {
cases := map[string]struct {
in v1beta1.QueueParameters
out map[string]string
}{
"FilledInput": {
in: *sqsParams(),
out: map[string]string{
v1beta1.AttributeDelaySeconds: strconv.FormatInt(delaySeconds, 10),
v1beta1.AttributeKmsMasterKeyID: kmsMasterKeyID,
},
},
"RedrivePolicy": {
in: *sqsParams(func(p *v1beta1.QueueParameters) {
p.RedrivePolicy = &v1beta1.RedrivePolicy{
DeadLetterTargetARN: &arn,
MaxReceiveCount: maxReceiveCount,
}
}),
out: map[string]string{
v1beta1.AttributeDelaySeconds: strconv.FormatInt(delaySeconds, 10),
v1beta1.AttributeRedrivePolicy: `{"deadLetterTargetArn":"arn","maxReceiveCount":5}`,
v1beta1.AttributeKmsMasterKeyID: kmsMasterKeyID,
},
},
"EmptyInput": {
in: v1beta1.QueueParameters{},
out: nil,
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
r := GenerateQueueAttributes(&tc.in)
if diff := cmp.Diff(r, tc.out); diff != "" {
t.Errorf("GenerateQueueAttributes(...): -want, +got:\n%s", diff)
}
})
}
} | explode_data.jsonl/82682 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 500
} | [
2830,
3393,
31115,
7554,
10516,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
2415,
14032,
60,
1235,
341,
197,
17430,
220,
348,
16,
19127,
16,
50251,
9706,
198,
197,
13967,
2415,
14032,
30953,
198,
197,
59403,
197,
197,
1,
76046,
25... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestZetaIncrementally(t *testing.T) {
defer leaktest.AfterTest(t)()
if testing.Short() {
t.Skip("short")
}
// Theta cannot be 1 by definition, so this is a safe initial value.
oldTheta := 1.0
var oldZetaN float64
var oldN uint64
for _, test := range tests {
// If theta has changed, recompute from scratch
if test.theta != oldTheta {
var err error
oldZetaN, err = computeZetaFromScratch(test.n, test.theta)
if err != nil {
t.Fatalf("Failed to compute zeta(%d,%f): %s", test.n, test.theta, err)
}
oldN = test.n
continue
}
computedZeta, err := computeZetaIncrementally(oldN, test.n, test.theta, oldZetaN)
if err != nil {
t.Fatalf("Failed to compute zeta(%d,%f) incrementally: %s", test.n, test.theta, err)
}
if math.Abs(computedZeta-test.expected) > 0.000000001 {
t.Fatalf("expected %6.4f, got %6.4f", test.expected, computedZeta)
}
oldZetaN = computedZeta
oldN = test.n
}
} | explode_data.jsonl/54585 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 401
} | [
2830,
3393,
57,
1915,
38311,
745,
1155,
353,
8840,
836,
8,
341,
16867,
23352,
1944,
36892,
2271,
1155,
8,
741,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
445,
8676,
1138,
197,
532,
197,
322,
68710,
4157,
387,
220,
16,
553,
7271,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestE2E(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecsWithDefaultAndCustomReporters(t, "capa-e2e", []Reporter{framework.CreateJUnitReporterForProw(e2eCtx.Settings.ArtifactFolder)})
} | explode_data.jsonl/1361 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
36,
17,
36,
1155,
353,
8840,
836,
8,
341,
79096,
19524,
3050,
7832,
604,
340,
85952,
8327,
16056,
3675,
3036,
10268,
10361,
388,
1155,
11,
330,
11346,
64,
5655,
17,
68,
497,
3056,
52766,
90,
3794,
7251,
56248,
52766,
2461,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestGetReadingsByValueDescriptorNamesError(t *testing.T) {
reset()
myMock := &dbMock.DBClient{}
myMock.On("ReadingsByValueDescriptorNames", mock.Anything, mock.Anything).Return([]models.Reading{}, fmt.Errorf("some error"))
dbClient = myMock
_, err := getReadingsByValueDescriptorNames([]string{"error"}, 0, logger.NewMockClient())
if err == nil {
t.Errorf("Expected error in getting readings by value descriptor names")
}
} | explode_data.jsonl/48182 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 149
} | [
2830,
3393,
1949,
4418,
819,
1359,
1130,
11709,
7980,
1454,
1155,
353,
8840,
836,
8,
341,
70343,
741,
13624,
11571,
1669,
609,
1999,
11571,
22537,
2959,
31483,
13624,
11571,
8071,
445,
4418,
819,
1359,
1130,
11709,
7980,
497,
7860,
13311,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestInRepoConfigEnabled(t *testing.T) {
testCases := []struct {
name string
config Config
expected bool
}{
{
name: "Exact match",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"org/repo": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Orgname matches",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"org": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Globally enabled",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"*": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Disabled by default",
expected: false,
},
}
for idx := range testCases {
tc := testCases[idx]
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
if result := tc.config.InRepoConfigEnabled("org/repo"); result != tc.expected {
t.Errorf("Expected %t, got %t", tc.expected, result)
}
})
}
} | explode_data.jsonl/8095 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 584
} | [
2830,
3393,
641,
25243,
2648,
5462,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
25873,
256,
5532,
198,
197,
42400,
1807,
198,
197,
59403,
197,
197,
515,
298,
11609,
25,
330,
57954... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestValidateVirtualServer(t *testing.T) {
virtualServer := v1.VirtualServer{
ObjectMeta: meta_v1.ObjectMeta{
Name: "cafe",
Namespace: "default",
},
Spec: v1.VirtualServerSpec{
Host: "example.com",
TLS: &v1.TLS{
Secret: "abc",
},
Upstreams: []v1.Upstream{
{
Name: "first",
Service: "service-1",
LBMethod: "random",
Port: 80,
MaxFails: createPointerFromInt(8),
MaxConns: createPointerFromInt(16),
Keepalive: createPointerFromInt(32),
},
{
Name: "second",
Service: "service-2",
Port: 80,
},
},
Routes: []v1.Route{
{
Path: "/first",
Action: &v1.Action{
Pass: "first",
},
},
{
Path: "/second",
Action: &v1.Action{
Pass: "second",
},
},
},
},
}
err := ValidateVirtualServer(&virtualServer, false)
if err != nil {
t.Errorf("ValidateVirtualServer() returned error %v for valid input %v", err, virtualServer)
}
} | explode_data.jsonl/65818 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 511
} | [
2830,
3393,
17926,
33026,
5475,
1155,
353,
8840,
836,
8,
341,
9558,
5475,
1669,
348,
16,
95979,
5475,
515,
197,
23816,
12175,
25,
8823,
2273,
16,
80222,
515,
298,
21297,
25,
414,
330,
924,
1859,
756,
298,
90823,
25,
330,
2258,
756,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestAzureRMNATGateway(t *testing.T) {
t.Parallel()
if testing.Short() {
t.Skip("skipping test in short mode")
}
tftest.GoldenFileResourceTests(t, "nat_gateway_test")
} | explode_data.jsonl/25781 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 72
} | [
2830,
3393,
78107,
23652,
45,
828,
40709,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
445,
4886,
5654,
1273,
304,
2805,
3856,
1138,
197,
630,
3244,
723,
477,
1224,
813,
268,
1703... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestSegmentReporter(t *testing.T) {
logger := zap.L().Named("telemetry")
conf := &Conf{}
t.Run("state", func(t *testing.T) {
t.Run("no_state", func(t *testing.T) {
fsys := afero.NewMemMapFs()
mockClient := newMockAnalyticsClient()
r := newAnalyticsReporterWithClient(mockClient, conf, &mocks.Store{}, fsys, logger)
r.reportServerLaunch()
require.NoError(t, r.Stop())
mockClient.WaitForClose()
require.Len(t, mockClient.Events(), 2)
exists, err := afero.Exists(fsys, stateFile)
require.NoError(t, err)
require.True(t, exists)
})
t.Run("corrupt_state", func(t *testing.T) {
fsys := afero.NewMemMapFs()
require.NoError(t, afero.WriteFile(fsys, stateFile, []byte("rubbish"), 0o600))
mockClient := newMockAnalyticsClient()
r := newAnalyticsReporterWithClient(mockClient, conf, &mocks.Store{}, fsys, logger)
r.reportServerLaunch()
require.NoError(t, r.Stop())
mockClient.WaitForClose()
require.Len(t, mockClient.Events(), 2)
state, err := afero.ReadFile(fsys, stateFile)
require.NoError(t, err)
require.NotEqual(t, []byte("rubbish"), state)
})
t.Run("read_only_fs", func(t *testing.T) {
fsys := afero.NewReadOnlyFs(afero.NewMemMapFs())
mockClient := newMockAnalyticsClient()
r := newAnalyticsReporterWithClient(mockClient, conf, &mocks.Store{}, fsys, logger)
r.reportServerLaunch()
require.NoError(t, r.Stop())
mockClient.WaitForClose()
require.Len(t, mockClient.Events(), 2)
exists, err := afero.Exists(fsys, stateFile)
require.NoError(t, err)
require.False(t, exists)
})
})
} | explode_data.jsonl/56419 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 676
} | [
2830,
3393,
21086,
52766,
1155,
353,
8840,
836,
8,
341,
17060,
1669,
32978,
1214,
1005,
15810,
445,
665,
35958,
1138,
67850,
1669,
609,
15578,
31483,
3244,
16708,
445,
2454,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
3244,
16708,
445... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_spanDataToThrift(t *testing.T) {
now := time.Now()
answerValue := int64(42)
keyValue := "value"
resultValue := true
statusCodeValue := int64(2)
doubleValue := float64(123.456)
boolTrue := true
statusMessage := "error"
tests := []struct {
name string
data *trace.SpanData
want *jaeger.Span
}{
{
name: "no parent",
data: &trace.SpanData{
SpanContext: trace.SpanContext{
TraceID: trace.TraceID{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16},
SpanID: trace.SpanID{1, 2, 3, 4, 5, 6, 7, 8},
},
Name: "/foo",
StartTime: now,
EndTime: now,
Attributes: map[string]interface{}{
"double": doubleValue,
"key": keyValue,
},
Annotations: []trace.Annotation{
{
Time: now,
Message: statusMessage,
Attributes: map[string]interface{}{
"answer": answerValue,
},
},
{
Time: now,
Message: statusMessage,
Attributes: map[string]interface{}{
"result": resultValue,
},
},
},
Status: trace.Status{Code: trace.StatusCodeUnknown, Message: "error"},
},
want: &jaeger.Span{
TraceIdLow: 651345242494996240,
TraceIdHigh: 72623859790382856,
SpanId: 72623859790382856,
OperationName: "/foo",
StartTime: now.UnixNano() / 1000,
Duration: 0,
Tags: []*jaeger.Tag{
{Key: "double", VType: jaeger.TagType_DOUBLE, VDouble: &doubleValue},
{Key: "key", VType: jaeger.TagType_STRING, VStr: &keyValue},
{Key: "error", VType: jaeger.TagType_BOOL, VBool: &boolTrue},
{Key: "status.code", VType: jaeger.TagType_LONG, VLong: &statusCodeValue},
{Key: "status.message", VType: jaeger.TagType_STRING, VStr: &statusMessage},
},
Logs: []*jaeger.Log{
{Timestamp: now.UnixNano() / 1000, Fields: []*jaeger.Tag{
{Key: "answer", VType: jaeger.TagType_LONG, VLong: &answerValue},
{Key: "message", VType: jaeger.TagType_STRING, VStr: &statusMessage},
}},
{Timestamp: now.UnixNano() / 1000, Fields: []*jaeger.Tag{
{Key: "result", VType: jaeger.TagType_BOOL, VBool: &resultValue},
{Key: "message", VType: jaeger.TagType_STRING, VStr: &statusMessage},
}},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := spanDataToThrift(tt.data)
sort.Slice(got.Tags, func(i, j int) bool {
return got.Tags[i].Key < got.Tags[j].Key
})
sort.Slice(tt.want.Tags, func(i, j int) bool {
return tt.want.Tags[i].Key < tt.want.Tags[j].Key
})
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("spanDataToThrift()\nGot:\n%v\nWant;\n%v", got, tt.want)
}
})
}
} | explode_data.jsonl/19733 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1320
} | [
2830,
3393,
37382,
1043,
1249,
1001,
41380,
1155,
353,
8840,
836,
8,
341,
80922,
1669,
882,
13244,
2822,
72570,
1130,
1669,
526,
21,
19,
7,
19,
17,
340,
23634,
1130,
1669,
330,
957,
698,
9559,
1130,
1669,
830,
198,
23847,
2078,
1130,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFilterOutSchedulable(t *testing.T) {
p1 := BuildTestPod("p1", 1500, 200000)
p2 := BuildTestPod("p2", 3000, 200000)
p3 := BuildTestPod("p3", 100, 200000)
unschedulablePods := []*apiv1.Pod{p1, p2, p3}
scheduledPod1 := BuildTestPod("s1", 100, 200000)
scheduledPod2 := BuildTestPod("s2", 1500, 200000)
scheduledPod1.Spec.NodeName = "node1"
scheduledPod2.Spec.NodeName = "node1"
node := BuildTestNode("node1", 2000, 2000000)
SetNodeReadyState(node, true, time.Time{})
predicateChecker := simulator.NewTestPredicateChecker()
res := FilterOutSchedulable(unschedulablePods, []*apiv1.Node{node}, []*apiv1.Pod{scheduledPod1}, predicateChecker)
assert.Equal(t, 1, len(res))
assert.Equal(t, p2, res[0])
res2 := FilterOutSchedulable(unschedulablePods, []*apiv1.Node{node}, []*apiv1.Pod{scheduledPod1, scheduledPod2}, predicateChecker)
assert.Equal(t, 2, len(res2))
assert.Equal(t, p1, res2[0])
assert.Equal(t, p2, res2[1])
} | explode_data.jsonl/44988 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 398
} | [
2830,
3393,
5632,
2662,
50,
2397,
360,
480,
1155,
353,
8840,
836,
8,
341,
3223,
16,
1669,
7854,
2271,
23527,
445,
79,
16,
497,
220,
16,
20,
15,
15,
11,
220,
17,
15,
15,
15,
15,
15,
340,
3223,
17,
1669,
7854,
2271,
23527,
445,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMenuNoLogOutIngressSecurityNone(t *testing.T) {
uiOpts := map[string]interface{}{}
spec := &v1.JaegerSpec{Ingress: v1.JaegerIngressSpec{Security: v1.IngressSecurityNoneExplicit}}
enableLogOut(uiOpts, spec)
assert.NotContains(t, uiOpts, "menu")
} | explode_data.jsonl/21866 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 102
} | [
2830,
3393,
3514,
2753,
2201,
2662,
641,
2483,
15352,
4064,
1155,
353,
8840,
836,
8,
341,
37278,
43451,
1669,
2415,
14032,
31344,
6257,
16094,
98100,
1669,
609,
85,
16,
3503,
64,
1878,
8327,
90,
641,
2483,
25,
348,
16,
3503,
64,
1878,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIsVendor(t *testing.T) {
tests := []struct {
name string
path string
expected bool
}{
{name: "TestIsVendor_1", path: "foo/bar", expected: false},
{name: "TestIsVendor_2", path: "foo/vendor/foo", expected: true},
{name: "TestIsVendor_3", path: ".sublime-project", expected: true},
{name: "TestIsVendor_4", path: "leaflet.draw-src.js", expected: true},
{name: "TestIsVendor_5", path: "foo/bar/MochiKit.js", expected: true},
{name: "TestIsVendor_6", path: "foo/bar/dojo.js", expected: true},
{name: "TestIsVendor_7", path: "foo/env/whatever", expected: true},
{name: "TestIsVendor_8", path: "foo/.imageset/bar", expected: true},
{name: "TestIsVendor_9", path: "Vagrantfile", expected: true},
}
for _, test := range tests {
is := IsVendor(test.path)
assert.Equal(t, is, test.expected, fmt.Sprintf("%v: is = %v, expected: %v", test.name, is, test.expected))
}
} | explode_data.jsonl/20379 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 373
} | [
2830,
3393,
3872,
44691,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
26781,
257,
914,
198,
197,
42400,
1807,
198,
197,
59403,
197,
197,
47006,
25,
330,
2271,
3872,
44691,
62,
16,
497,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestParseTcp_splitResponse(t *testing.T) {
var private protos.ProtocolData
dns := newDns(testing.Verbose())
tcpQuery := elasticATcp
q := tcpQuery.request
r0 := tcpQuery.response[:1]
r1 := tcpQuery.response[1:10]
r2 := tcpQuery.response[10:]
tcptuple := testTcpTuple()
packet := newPacket(forward, q)
private = dns.Parse(packet, tcptuple, tcp.TcpDirectionOriginal, private)
assert.Equal(t, 1, dns.transactions.Size(), "There should be one transaction.")
packet = newPacket(reverse, r0)
private = dns.Parse(packet, tcptuple, tcp.TcpDirectionReverse, private)
assert.Equal(t, 1, dns.transactions.Size(), "There should be one transaction.")
packet = newPacket(reverse, r1)
dns.Parse(packet, tcptuple, tcp.TcpDirectionReverse, private)
assert.Equal(t, 1, dns.transactions.Size(), "There should be one transaction.")
packet = newPacket(reverse, r2)
dns.Parse(packet, tcptuple, tcp.TcpDirectionReverse, private)
assert.Empty(t, dns.transactions.Size(), "There should be no transaction.")
m := expectResult(t, dns)
assert.Equal(t, "tcp", mapValue(t, m, "transport"))
assert.Equal(t, len(tcpQuery.request), mapValue(t, m, "bytes_in"))
assert.Equal(t, len(tcpQuery.response), mapValue(t, m, "bytes_out"))
assert.NotNil(t, mapValue(t, m, "responsetime"))
assert.Equal(t, common.OK_STATUS, mapValue(t, m, "status"))
assert.Nil(t, mapValue(t, m, "notes"))
assertMapStrData(t, m, tcpQuery)
} | explode_data.jsonl/68708 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 552
} | [
2830,
3393,
14463,
77536,
17052,
2582,
1155,
353,
8840,
836,
8,
341,
2405,
869,
1724,
436,
54096,
1043,
198,
2698,
4412,
1669,
501,
35,
4412,
8623,
287,
42505,
8297,
2398,
3244,
4672,
2859,
1669,
35473,
828,
4672,
198,
18534,
1669,
2805... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMapProxy_ForceUnlockWithNonSerializableKey(t *testing.T) {
err := mp.ForceUnlock(student{})
AssertErrorNotNil(t, err, "forceUnlock did not return an error for nonserializable key")
mp.Clear()
} | explode_data.jsonl/57059 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 70
} | [
2830,
3393,
2227,
16219,
1400,
16316,
49679,
2354,
8121,
29268,
1592,
1155,
353,
8840,
836,
8,
341,
9859,
1669,
10490,
991,
16316,
49679,
39004,
37790,
18017,
1454,
96144,
1155,
11,
1848,
11,
330,
8833,
49679,
1521,
537,
470,
458,
1465,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestFlexibleUnionDeclConformsNonNullable(t *testing.T) {
decl, ok := testSchema(t).lookupDeclByName("ExampleFlexibleUnion", false)
if !ok {
t.Fatalf("lookupDeclByName failed")
}
unionDecl := decl.(*UnionDecl)
checkConforms(t,
context{},
unionDecl,
[]conformTest{
conformOk{gidlir.Record{
Name: "ExampleFlexibleUnion",
Fields: []gidlir.Field{
{Key: gidlir.FieldKey{Name: "s"}, Value: "foo"},
},
}},
conformOk{gidlir.Record{
Name: "ExampleFlexibleUnion",
Fields: []gidlir.Field{
{
Key: gidlir.FieldKey{UnknownOrdinal: 2},
Value: gidlir.UnknownData{},
},
},
}},
conformFail{gidlir.Record{
Name: "ExampleFlexibleUnion",
Fields: []gidlir.Field{
{Key: gidlir.FieldKey{Name: "DefinitelyNotS"}, Value: "foo"},
},
}, "field DefinitelyNotS: unknown"},
conformFail{gidlir.Record{
Name: "DefinitelyNotExampleFlexibleUnion",
Fields: []gidlir.Field{
{Key: gidlir.FieldKey{Name: "s"}, Value: "foo"},
},
}, "expecting union test.mixer/ExampleFlexibleUnion"},
conformFail{gidlir.Record{
Name: "ExampleFlexibleUnion",
Fields: []gidlir.Field{
{
Key: gidlir.FieldKey{UnknownOrdinal: 1},
Value: gidlir.UnknownData{},
},
},
}, "field name must be used rather than ordinal 1"},
conformFail{nil, "expecting non-null union"},
conformFail{"foo", "expecting union"},
conformFail{0, "expecting union"},
},
)
} | explode_data.jsonl/21403 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 684
} | [
2830,
3393,
75147,
32658,
21629,
1109,
9807,
8121,
15703,
1155,
353,
8840,
836,
8,
341,
197,
10005,
11,
5394,
1669,
1273,
8632,
1155,
568,
21020,
21629,
16898,
445,
13314,
75147,
32658,
497,
895,
340,
743,
753,
562,
341,
197,
3244,
3076... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestValidateLogMetricFilterName(t *testing.T) {
validNames := []string{
"YadaHereAndThere",
"Valid-5Metric_Name",
"This . is also %% valid@!)+(",
"1234",
strings.Repeat("W", 512),
}
for _, v := range validNames {
_, errors := validateLogMetricFilterName(v, "name")
if len(errors) != 0 {
t.Fatalf("%q should be a valid Log Metric Filter Name: %q", v, errors)
}
}
invalidNames := []string{
"Here is a name with: colon",
"and here is another * invalid name",
"*",
// length > 512
strings.Repeat("W", 513),
}
for _, v := range invalidNames {
_, errors := validateLogMetricFilterName(v, "name")
if len(errors) == 0 {
t.Fatalf("%q should be an invalid Log Metric Filter Name", v)
}
}
} | explode_data.jsonl/78568 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 292
} | [
2830,
3393,
17926,
2201,
54310,
5632,
675,
1155,
353,
8840,
836,
8,
341,
56322,
7980,
1669,
3056,
917,
515,
197,
197,
1,
56,
2584,
8420,
3036,
3862,
756,
197,
197,
1,
4088,
12,
20,
54310,
19015,
756,
197,
197,
21520,
659,
374,
1083,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestContentManager_Handlers(t *testing.T) {
controller := gomock.NewController(t)
defer controller.Finish()
dashConfig := configFake.NewMockDash(controller)
moduleManager := moduleFake.NewMockManagerInterface(controller)
logger := log.NopLogger()
manager := api.NewContentManager(moduleManager, dashConfig, logger)
AssertHandlers(t, manager, []string{
api.RequestSetContentPath,
action.RequestSetNamespace,
api.CheckLoading,
})
} | explode_data.jsonl/1647 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
2762,
2043,
2039,
437,
9254,
1155,
353,
8840,
836,
8,
341,
61615,
1669,
342,
316,
1176,
7121,
2051,
1155,
340,
16867,
6461,
991,
18176,
2822,
2698,
988,
2648,
1669,
2193,
52317,
7121,
11571,
42263,
40845,
340,
54020,
2043,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInspectDataType(t *testing.T) {
for i, tt := range []struct {
v interface{}
typ influxql.DataType
}{
{float64(100), influxql.Float},
{int64(100), influxql.Integer},
{int32(100), influxql.Integer},
{100, influxql.Integer},
{true, influxql.Boolean},
{"string", influxql.String},
{time.Now(), influxql.Time},
{time.Second, influxql.Duration},
{nil, influxql.Unknown},
} {
if typ := influxql.InspectDataType(tt.v); tt.typ != typ {
t.Errorf("%d. %v (%s): unexpected type: %s", i, tt.v, tt.typ, typ)
continue
}
}
} | explode_data.jsonl/24801 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 240
} | [
2830,
3393,
58533,
22653,
1155,
353,
8840,
836,
8,
341,
2023,
600,
11,
17853,
1669,
2088,
3056,
1235,
341,
197,
5195,
256,
3749,
16094,
197,
25314,
52852,
1470,
77277,
198,
197,
59403,
197,
197,
90,
3649,
21,
19,
7,
16,
15,
15,
701,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestInitKibanaConfig(t *testing.T) {
b, err := NewBeat("filebeat", "testidx", "0.9")
if err != nil {
panic(err)
}
assert.Equal(t, "filebeat", b.Info.Beat)
assert.Equal(t, "testidx", b.Info.IndexPrefix)
assert.Equal(t, "0.9", b.Info.Version)
cfg, err := cfgfile.Load("../test/filebeat_test.yml", nil)
err = cfg.Unpack(&b.Config)
assert.NoError(t, err)
kibanaConfig, err := initKibanaConfig(b.Config)
assert.NoError(t, err)
username, err := kibanaConfig.String("username", -1)
password, err := kibanaConfig.String("password", -1)
protocol, err := kibanaConfig.String("protocol", -1)
host, err := kibanaConfig.String("host", -1)
assert.Equal(t, "elastic-test-username", username)
assert.Equal(t, "elastic-test-password", password)
assert.Equal(t, "https", protocol)
assert.Equal(t, "127.0.0.1:5601", host)
} | explode_data.jsonl/47188 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 375
} | [
2830,
3393,
3803,
42,
579,
3362,
2648,
1155,
353,
8840,
836,
8,
972,
2233,
11,
1848,
1669,
1532,
43658,
445,
1192,
22227,
497,
330,
1944,
6361,
497,
330,
15,
13,
24,
6060,
743,
1848,
961,
2092,
972,
197,
30764,
3964,
1218,
197,
2570... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestBuildError(t *testing.T) {
testClient(t, func(e *cli.Engine, i *mocksdk.Interface) {
i.On("SystemGet").Return(fxSystem(), nil)
i.On("ObjectStore", "app1", mock.AnythingOfType("string"), mock.Anything, structs.ObjectStoreOptions{}).Return(&fxObject, nil).Run(func(args mock.Arguments) {
require.Regexp(t, `tmp/[0-9a-f]{30}\.tgz`, args.Get(1).(string))
})
i.On("BuildCreate", "app1", "object://test", structs.BuildCreateOptions{}).Return(nil, fmt.Errorf("err1"))
res, err := testExecute(e, "build ./testdata/httpd -a app1", nil)
require.NoError(t, err)
require.Equal(t, 1, res.Code)
res.RequireStderr(t, []string{"ERROR: err1"})
res.RequireStdout(t, []string{
"Packaging source... OK",
"Uploading source... OK",
"Starting build... ",
})
})
} | explode_data.jsonl/65787 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 323
} | [
2830,
3393,
11066,
1454,
1155,
353,
8840,
836,
8,
341,
18185,
2959,
1155,
11,
2915,
2026,
353,
19521,
54424,
11,
600,
353,
16712,
51295,
41065,
8,
341,
197,
8230,
8071,
445,
2320,
1949,
1827,
5598,
955,
87,
2320,
1507,
2092,
340,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExecute(t *testing.T) {
// TODO: add tests to validate output of Execute()
table := []struct {
name string
executor Executor
wantErr error
}{
{
name: "test LoadIacDir error",
executor: Executor{
dirPath: "./testdata/testdir",
iacProvider: MockIacProvider{err: errMockLoadIacDir},
},
wantErr: errMockLoadIacDir,
},
{
name: "test LoadIacDir no error",
executor: Executor{
dirPath: "./testdata/testdir",
iacProvider: MockIacProvider{err: nil},
policyEngines: []policy.Engine{MockPolicyEngine{err: nil}},
},
wantErr: nil,
},
{
name: "test LoadIacFile error",
executor: Executor{
filePath: "./testdata/testfile",
iacProvider: MockIacProvider{err: errMockLoadIacFile},
},
wantErr: errMockLoadIacFile,
},
{
name: "test LoadIacFile no error",
executor: Executor{
filePath: "./testdata/testfile",
iacProvider: MockIacProvider{err: nil},
policyEngines: []policy.Engine{MockPolicyEngine{err: nil}},
},
wantErr: nil,
},
{
name: "test SendNofitications no error",
executor: Executor{
iacProvider: MockIacProvider{err: nil},
notifiers: []notifications.Notifier{&MockNotifier{err: nil}},
policyEngines: []policy.Engine{MockPolicyEngine{err: nil}},
},
wantErr: nil,
},
{
name: "test SendNofitications mock error",
executor: Executor{
iacProvider: MockIacProvider{err: nil},
notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}},
policyEngines: []policy.Engine{MockPolicyEngine{err: nil}},
},
wantErr: errMockNotifier,
},
{
name: "test policy enginer no error",
executor: Executor{
iacProvider: MockIacProvider{err: nil},
notifiers: []notifications.Notifier{&MockNotifier{err: nil}},
policyEngines: []policy.Engine{MockPolicyEngine{err: nil}},
},
wantErr: nil,
},
{
name: "test policy engine error",
executor: Executor{
iacProvider: MockIacProvider{err: nil},
notifiers: []notifications.Notifier{&MockNotifier{err: nil}},
policyEngines: []policy.Engine{MockPolicyEngine{err: errMockPolicyEngine}},
},
wantErr: errMockPolicyEngine,
},
}
for _, tt := range table {
t.Run(tt.name, func(t *testing.T) {
_, gotErr := tt.executor.Execute()
if !reflect.DeepEqual(gotErr, tt.wantErr) {
t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr)
}
})
}
} | explode_data.jsonl/65421 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1109
} | [
2830,
3393,
17174,
1155,
353,
8840,
836,
8,
1476,
197,
322,
5343,
25,
912,
7032,
311,
9593,
2550,
315,
20848,
741,
26481,
1669,
3056,
1235,
341,
197,
11609,
257,
914,
198,
197,
67328,
4831,
56032,
198,
197,
50780,
7747,
220,
1465,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHttpParser_eatBody_connclose(t *testing.T) {
logp.TestingSetup(logp.WithSelectors("http", "httpdetailed"))
http := httpModForTests(nil)
http.parserConfig.sendHeaders = true
http.parserConfig.sendAllHeaders = true
data := []byte("HTTP/1.1 200 ok\r\n" +
"user-agent: curl/7.35.0\r\n" +
"host: localhost:9000\r\n" +
"accept: */*\r\n" +
"authorization: Company 1\r\n" +
"connection: close\r\n" +
"\r\n" +
"0123456789")
st := &stream{data: data, message: new(message)}
ok, complete := testParseStream(http, st, 0)
assert.True(t, ok)
assert.False(t, complete)
assert.Equal(t, st.bodyReceived, 10)
ok, complete = testParseStream(http, st, 5)
assert.True(t, ok)
assert.False(t, complete)
assert.Equal(t, st.bodyReceived, 15)
ok, complete = testParseStream(http, st, 5)
assert.True(t, ok)
assert.False(t, complete)
assert.Equal(t, st.bodyReceived, 20)
} | explode_data.jsonl/16492 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 378
} | [
2830,
3393,
2905,
6570,
2204,
266,
5444,
3382,
1016,
1469,
1155,
353,
8840,
836,
8,
341,
6725,
79,
8787,
287,
21821,
12531,
79,
26124,
96995,
445,
1254,
497,
330,
1254,
67,
10111,
28075,
28080,
1669,
1758,
4459,
2461,
18200,
27907,
340,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMakeVirtualServiceSpec_CorrectMetadata(t *testing.T) {
ci := &v1alpha1.ClusterIngress{
ObjectMeta: metav1.ObjectMeta{
Name: "test-ingress",
Labels: map[string]string{
serving.RouteLabelKey: "test-route",
serving.RouteNamespaceLabelKey: "test-ns",
},
},
Spec: v1alpha1.IngressSpec{},
}
expected := metav1.ObjectMeta{
Name: "test-ingress",
Namespace: system.Namespace(),
Labels: map[string]string{
networking.IngressLabelKey: "test-ingress",
serving.RouteLabelKey: "test-route",
serving.RouteNamespaceLabelKey: "test-ns",
},
OwnerReferences: []metav1.OwnerReference{
*kmeta.NewControllerRef(ci),
},
}
meta := MakeVirtualService(ci, []string{}).ObjectMeta
if diff := cmp.Diff(expected, meta); diff != "" {
t.Errorf("Unexpected metadata (-want +got): %v", diff)
}
} | explode_data.jsonl/68604 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 354
} | [
2830,
3393,
8078,
33026,
1860,
8327,
920,
27034,
14610,
1155,
353,
8840,
836,
8,
341,
1444,
72,
1669,
609,
85,
16,
7141,
16,
72883,
641,
2483,
515,
197,
23816,
12175,
25,
77520,
16,
80222,
515,
298,
21297,
25,
330,
1944,
83905,
673,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSpanSetMetric(t *testing.T) {
for name, tt := range map[string]func(assert *assert.Assertions, span *span){
"init": func(assert *assert.Assertions, span *span) {
assert.Equal(2, len(span.Metrics))
_, ok := span.Metrics[keySamplingPriority]
assert.True(ok)
_, ok = span.Metrics[keySamplingPriorityRate]
assert.True(ok)
},
"float": func(assert *assert.Assertions, span *span) {
span.SetTag("temp", 72.42)
assert.Equal(72.42, span.Metrics["temp"])
},
"int": func(assert *assert.Assertions, span *span) {
span.SetTag("bytes", 1024)
assert.Equal(1024.0, span.Metrics["bytes"])
},
"max": func(assert *assert.Assertions, span *span) {
span.SetTag("bytes", intUpperLimit-1)
assert.Equal(float64(intUpperLimit-1), span.Metrics["bytes"])
},
"min": func(assert *assert.Assertions, span *span) {
span.SetTag("bytes", intLowerLimit+1)
assert.Equal(float64(intLowerLimit+1), span.Metrics["bytes"])
},
"toobig": func(assert *assert.Assertions, span *span) {
span.SetTag("bytes", intUpperLimit)
assert.Equal(0.0, span.Metrics["bytes"])
assert.Equal(fmt.Sprint(intUpperLimit), span.Meta["bytes"])
},
"toosmall": func(assert *assert.Assertions, span *span) {
span.SetTag("bytes", intLowerLimit)
assert.Equal(0.0, span.Metrics["bytes"])
assert.Equal(fmt.Sprint(intLowerLimit), span.Meta["bytes"])
},
"finished": func(assert *assert.Assertions, span *span) {
span.Finish()
span.SetTag("finished.test", 1337)
assert.Equal(2, len(span.Metrics))
_, ok := span.Metrics["finished.test"]
assert.False(ok)
},
} {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
tracer := newTracer(withTransport(newDefaultTransport()))
span := tracer.newRootSpan("http.request", "mux.router", "/")
tt(assert, span)
})
}
} | explode_data.jsonl/42847 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 778
} | [
2830,
3393,
12485,
1649,
54310,
1155,
353,
8840,
836,
8,
341,
2023,
829,
11,
17853,
1669,
2088,
2415,
14032,
60,
2830,
75846,
353,
2207,
46312,
11,
9390,
353,
1480,
1264,
197,
197,
1,
2327,
788,
2915,
75846,
353,
2207,
46312,
11,
9390... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAdminServer_UndeleteTree(t *testing.T) {
ctx := context.Background()
ts, err := setupAdminServer(ctx, t)
if err != nil {
t.Fatalf("setupAdminServer() failed: %v", err)
}
defer ts.closeAll()
tests := []struct {
desc string
baseTree *trillian.Tree
}{
{desc: "logTree", baseTree: testonly.LogTree},
{desc: "mapTree", baseTree: testonly.MapTree},
}
for _, test := range tests {
createdTree, err := ts.adminClient.CreateTree(ctx, &trillian.CreateTreeRequest{Tree: test.baseTree})
if err != nil {
t.Fatalf("%v: CreateTree() returned err = %v", test.desc, err)
}
deletedTree, err := ts.adminClient.DeleteTree(ctx, &trillian.DeleteTreeRequest{TreeId: createdTree.TreeId})
if err != nil {
t.Fatalf("%v: DeleteTree() returned err = %v", test.desc, err)
}
undeletedTree, err := ts.adminClient.UndeleteTree(ctx, &trillian.UndeleteTreeRequest{TreeId: deletedTree.TreeId})
if err != nil {
t.Errorf("%v: UndeleteTree() returned err = %v", test.desc, err)
continue
}
if got, want := undeletedTree, createdTree; !proto.Equal(got, want) {
diff := pretty.Compare(got, want)
t.Errorf("%v: post-UndeleteTree() diff (-got +want):\n%v", test.desc, diff)
}
storedTree, err := ts.adminClient.GetTree(ctx, &trillian.GetTreeRequest{TreeId: deletedTree.TreeId})
if err != nil {
t.Fatalf("%v: GetTree() returned err = %v", test.desc, err)
}
if got, want := storedTree, createdTree; !proto.Equal(got, want) {
diff := pretty.Compare(got, want)
t.Errorf("%v: post-GetTree() diff (-got +want):\n%v", test.desc, diff)
}
}
} | explode_data.jsonl/43983 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 631
} | [
2830,
3393,
7210,
5475,
6665,
42341,
1617,
6533,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
2822,
57441,
11,
1848,
1669,
6505,
7210,
5475,
7502,
11,
259,
340,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
15188,
7210,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestChangingVersion_ResultsInRollingUpdateStrategyType(t *testing.T) {
mdb := newTestReplicaSet()
mgr := client.NewManager(&mdb)
mgrClient := mgr.GetClient()
r := NewReconciler(mgr)
res, err := r.Reconcile(context.TODO(), reconcile.Request{NamespacedName: mdb.NamespacedName()})
assertReconciliationSuccessful(t, res, err)
// fetch updated resource after first reconciliation
_ = mgrClient.Get(context.TODO(), mdb.NamespacedName(), &mdb)
sts := appsv1.StatefulSet{}
err = mgrClient.Get(context.TODO(), types.NamespacedName{Name: mdb.Name, Namespace: mdb.Namespace}, &sts)
assert.NoError(t, err)
assert.Equal(t, appsv1.RollingUpdateStatefulSetStrategyType, sts.Spec.UpdateStrategy.Type)
mdbRef := &mdb
mdbRef.Spec.Version = "4.2.3"
_ = mgrClient.Update(context.TODO(), &mdb)
// agents start the upgrade, they are not all ready
sts.Status.UpdatedReplicas = 1
sts.Status.ReadyReplicas = 2
err = mgrClient.Update(context.TODO(), &sts)
assert.NoError(t, err)
_ = mgrClient.Get(context.TODO(), mdb.NamespacedName(), &sts)
// reconcilliation is successful
res, err = r.Reconcile(context.TODO(), reconcile.Request{NamespacedName: types.NamespacedName{Namespace: mdb.Namespace, Name: mdb.Name}})
assertReconciliationSuccessful(t, res, err)
sts = appsv1.StatefulSet{}
err = mgrClient.Get(context.TODO(), types.NamespacedName{Name: mdb.Name, Namespace: mdb.Namespace}, &sts)
assert.NoError(t, err)
assert.Equal(t, appsv1.RollingUpdateStatefulSetStrategyType, sts.Spec.UpdateStrategy.Type,
"The StatefulSet should have be re-configured to use RollingUpdates after it reached the ready state")
} | explode_data.jsonl/80675 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 593
} | [
2830,
3393,
59046,
5637,
62,
9801,
641,
32355,
287,
4289,
19816,
929,
1155,
353,
8840,
836,
8,
341,
2109,
1999,
1669,
501,
2271,
18327,
15317,
1649,
741,
2109,
901,
1669,
2943,
7121,
2043,
2099,
78127,
340,
2109,
901,
2959,
1669,
57897,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBulkHash4(t *testing.T) {
for _, x := range deflateTests {
y := x.out
if len(y) < minMatchLength {
continue
}
y = append(y, y...)
for j := 4; j < len(y); j++ {
y := y[:j]
dst := make([]uint32, len(y)-minMatchLength+1)
for i := range dst {
dst[i] = uint32(i + 100)
}
bulkHash4(y, dst)
for i, got := range dst {
want := hash4(y[i:])
if got != want && got == uint32(i)+100 {
t.Errorf("Len:%d Index:%d, want 0x%08x but not modified", len(y), i, want)
} else if got != want {
t.Errorf("Len:%d Index:%d, got 0x%08x want:0x%08x", len(y), i, got, want)
}
}
}
}
} | explode_data.jsonl/81403 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 325
} | [
2830,
3393,
88194,
6370,
19,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
856,
1669,
2088,
92689,
18200,
341,
197,
14522,
1669,
856,
2532,
198,
197,
743,
2422,
7021,
8,
366,
1308,
8331,
4373,
341,
298,
11664,
198,
197,
197,
532,
197,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestConvertFromState(t *testing.T) {
tests := []struct {
src scm.State
dst string
}{
{
src: scm.StateCanceled,
dst: "error",
},
{
src: scm.StateError,
dst: "error",
},
{
src: scm.StateFailure,
dst: "failure",
},
{
src: scm.StatePending,
dst: "pending",
},
{
src: scm.StateRunning,
dst: "pending",
},
{
src: scm.StateSuccess,
dst: "success",
},
{
src: scm.StateUnknown,
dst: "error",
},
}
for _, test := range tests {
if got, want := convertFromState(test.src), test.dst; got != want {
t.Errorf("Want state %v converted to %s", test.src, test.dst)
}
}
} | explode_data.jsonl/29878 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 328
} | [
2830,
3393,
12012,
3830,
1397,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
41144,
85520,
18942,
198,
197,
52051,
914,
198,
197,
59403,
197,
197,
515,
298,
41144,
25,
85520,
18942,
63263,
345,
298,
52051,
25,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetPingOffset(t *testing.T) {
for k := uint(0); k < 8; k++ {
var beacon time.Duration
pingNb := 1 << k
pingPeriod := pingPeriodBase / pingNb
for test := 0; test < 100000; test++ {
offset, err := GetPingOffset(beacon, lorawan.DevAddr{}, pingNb)
if err != nil {
t.Fatal(err)
}
if offset > pingPeriod-1 {
t.Errorf("unexpected offset %d at pingNb %d test %d", offset, pingNb, test)
}
beacon += beaconPeriod
}
}
} | explode_data.jsonl/58811 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 202
} | [
2830,
3393,
1949,
69883,
6446,
1155,
353,
8840,
836,
8,
341,
2023,
595,
1669,
2622,
7,
15,
1215,
595,
366,
220,
23,
26,
595,
1027,
341,
197,
2405,
51302,
882,
33795,
198,
197,
3223,
287,
85007,
1669,
220,
16,
1115,
595,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestBadScanStructArgs(t *testing.T) {
x := []interface{}{"A", "b"}
test := func(v interface{}) {
if err := redis.ScanStruct(x, v); err == nil {
t.Errorf("Expect error for ScanStruct(%T, %T)", x, v)
}
}
test(nil)
var v0 *struct{}
test(v0)
var v1 int
test(&v1)
x = x[:1]
v2 := struct{ A string }{}
test(&v2)
} | explode_data.jsonl/44283 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 157
} | [
2830,
3393,
17082,
26570,
9422,
4117,
1155,
353,
8840,
836,
8,
341,
10225,
1669,
3056,
4970,
6257,
4913,
32,
497,
330,
65,
16707,
18185,
1669,
2915,
3747,
3749,
28875,
341,
197,
743,
1848,
1669,
20870,
54874,
9422,
2075,
11,
348,
1215,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestNetworkSetMacAddress(t *testing.T) {
if testing.Short() {
return
}
tl := testLink{name: "tstEth", linkType: "dummy"}
macaddr := "22:ce:e0:99:63:6f"
addLink(t, tl.name, tl.linkType)
defer deleteLink(t, tl.name)
ifcBeforeSet := readLink(t, tl.name)
if err := NetworkSetMacAddress(ifcBeforeSet, macaddr); err != nil {
t.Fatalf("Could not set %s MAC address on %#v interface: err", macaddr, tl, err)
}
ifcAfterSet := readLink(t, tl.name)
if ifcAfterSet.HardwareAddr.String() != macaddr {
t.Fatalf("Could not set %s MAC address on %#v interface", macaddr, tl)
}
} | explode_data.jsonl/76203 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 242
} | [
2830,
3393,
12320,
1649,
19552,
4286,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
853,
198,
197,
630,
3244,
75,
1669,
1273,
3939,
47006,
25,
330,
83,
267,
65390,
497,
2656,
929,
25,
330,
31390,
16707,
2109,
580,
6... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSignatureValidationCreatorValidateError(t *testing.T) {
validateErr := status.New(status.EndorserClientStatus, status.SignatureVerificationFailed.ToInt32(), "", nil)
// Sample request
request := Request{ChaincodeID: "testCC", Fcn: "invoke", Args: [][]byte{[]byte("query"), []byte("b")}}
requestContext := prepareRequestContext(request, Opts{}, t)
handler := NewQueryHandler()
mockPeer1 := &fcmocks.MockPeer{MockName: "Peer1", MockURL: "http://peer1.com", MockRoles: []string{}, MockCert: nil, MockMSP: "Org1MSP", Status: 200, Payload: []byte("value")}
clientContext := setupContextForSignatureValidation(nil, validateErr, []fab.Peer{mockPeer1}, t)
handler.Handle(requestContext, clientContext)
verifyExpectedError(requestContext, validateErr.Error(), t)
} | explode_data.jsonl/5373 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 264
} | [
2830,
3393,
25088,
13799,
31865,
17926,
1454,
1155,
353,
8840,
836,
8,
341,
197,
7067,
7747,
1669,
2639,
7121,
13838,
18569,
269,
799,
2959,
2522,
11,
2639,
41152,
1568,
62339,
9408,
15071,
18,
17,
1507,
7342,
2092,
340,
197,
322,
19143... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGenericArrayValueUnsupported(t *testing.T) {
_, err := GenericArray{true}.Value()
if err == nil {
t.Fatal("Expected error for bool")
}
if !strings.Contains(err.Error(), "bool to array") {
t.Errorf("Expected type to be mentioned, got %q", err)
}
} | explode_data.jsonl/5348 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 97
} | [
2830,
3393,
19964,
1857,
1130,
41884,
1155,
353,
8840,
836,
8,
341,
197,
6878,
1848,
1669,
21281,
1857,
90,
1866,
7810,
1130,
2822,
743,
1848,
621,
2092,
341,
197,
3244,
26133,
445,
18896,
1465,
369,
1807,
1138,
197,
532,
743,
753,
18... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_IntStrMap_Merge(t *testing.T) {
m1 := gmap.NewIntStrMap()
m2 := gmap.NewIntStrMap()
m1.Set(1, "a")
m2.Set(2, "b")
m1.Merge(m2)
gtest.Assert(m1.Map(), map[int]string{1: "a", 2: "b"})
} | explode_data.jsonl/7645 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 110
} | [
2830,
3393,
32054,
2580,
2227,
1245,
10080,
1155,
353,
8840,
836,
8,
341,
2109,
16,
1669,
342,
2186,
7121,
1072,
2580,
2227,
741,
2109,
17,
1669,
342,
2186,
7121,
1072,
2580,
2227,
741,
2109,
16,
4202,
7,
16,
11,
330,
64,
1138,
2109... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLocale(t *testing.T) {
trans := New()
expected := "fr_NE"
if trans.Locale() != expected {
t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale())
}
} | explode_data.jsonl/78399 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 70
} | [
2830,
3393,
19231,
1155,
353,
8840,
836,
8,
1476,
72453,
1669,
1532,
741,
42400,
1669,
330,
1626,
14039,
1837,
743,
1356,
59094,
368,
961,
3601,
341,
197,
3244,
13080,
445,
18896,
7677,
82,
6,
24528,
7677,
82,
22772,
3601,
11,
1356,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestUtilityFunctions(t *testing.T) {
env := NewTestVDBEnv(t)
defer env.Cleanup()
db, err := env.DBProvider.GetDBHandle("testutilityfunctions")
assert.NoError(t, err)
// BytesKeySupported should be true for goleveldb
byteKeySupported := db.BytesKeySupported()
assert.True(t, byteKeySupported)
// ValidateKeyValue should return nil for a valid key and value
assert.NoError(t, db.ValidateKeyValue("testKey", []byte("testValue")), "leveldb should accept all key-values")
} | explode_data.jsonl/63390 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 164
} | [
2830,
3393,
19199,
25207,
1155,
353,
8840,
836,
8,
341,
57538,
1669,
1532,
2271,
53,
3506,
14359,
1155,
340,
16867,
6105,
727,
60639,
2822,
20939,
11,
1848,
1669,
6105,
22537,
5179,
2234,
3506,
6999,
445,
1944,
30900,
21409,
1138,
6948,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDataCustomTypes(t *testing.T) {
d := DataCustom{}
ind := reflect.Indirect(reflect.ValueOf(&d))
for name, value := range DataValues {
e := ind.FieldByName(name)
if !e.IsValid() {
continue
}
e.Set(reflect.ValueOf(value).Convert(e.Type()))
}
id, err := dORM.Insert(&d)
throwFail(t, err)
throwFail(t, AssertIs(id, 1))
d = DataCustom{ID: 1}
err = dORM.Read(&d)
throwFail(t, err)
ind = reflect.Indirect(reflect.ValueOf(&d))
for name, value := range DataValues {
e := ind.FieldByName(name)
if !e.IsValid() {
continue
}
vu := e.Interface()
value = reflect.ValueOf(value).Convert(e.Type()).Interface()
throwFail(t, AssertIs(vu == value, true), value, vu)
}
} | explode_data.jsonl/18123 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 295
} | [
2830,
93200,
10268,
4173,
1155,
353,
8840,
836,
8,
341,
2698,
1669,
2885,
10268,
16094,
197,
484,
1669,
8708,
13,
1425,
1226,
13321,
767,
6167,
2124,
2099,
67,
4390,
2023,
829,
11,
897,
1669,
2088,
2885,
6227,
341,
197,
7727,
1669,
12... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestNumberOfRoutingShardsOverwrite(t *testing.T) {
beatVersion := "6.1.0"
beatName := "testbeat"
config := TemplateConfig{
Settings: TemplateSettings{
Index: map[string]interface{}{"number_of_routing_shards": 5},
},
}
// Test it exists in 6.1
template, err := New(beatVersion, beatName, "6.1.0", config)
assert.NoError(t, err)
data := template.Generate(nil, nil)
shards, err := data.GetValue("settings.index.number_of_routing_shards")
assert.NoError(t, err)
assert.Equal(t, 5, shards.(int))
} | explode_data.jsonl/70489 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 198
} | [
2830,
3393,
40619,
24701,
2016,
2347,
1918,
4934,
1155,
353,
8840,
836,
8,
1476,
197,
22227,
5637,
1669,
330,
21,
13,
16,
13,
15,
698,
197,
22227,
675,
1669,
330,
1944,
22227,
698,
25873,
1669,
14355,
2648,
515,
197,
197,
6086,
25,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_BigqueryConfig(t *testing.T) {
t.Run("Check to call the set environment variable.", func(t *testing.T) {
bqDataset := "xxx"
bqTable := "yyy"
if err := os.Setenv("BIGQUERY_DATASET", bqDataset); err != nil {
t.Fatalf("Failed to set file BIGQUERY_DATASET environment variables.")
}
if err := os.Setenv("BIGQUERY_TABLE", bqTable); err != nil {
t.Fatalf("Failed to set file BIGQUERY_TABLE environment variables.")
}
bqCfg := BigqueryConfig()
if e, a := bqCfg.DataSet, bqDataset; !reflect.DeepEqual(e, a) {
t.Fatal("Environment variable BIGQUERY_DATASET is not acquired correctly.")
}
if e, a := bqCfg.Table, bqTable; !reflect.DeepEqual(e, a) {
t.Fatal("Environment variable BIGQUERY_TABLE is not acquired correctly.")
}
})
} | explode_data.jsonl/44879 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 307
} | [
2830,
3393,
1668,
343,
1631,
2648,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
3973,
311,
1618,
279,
738,
4573,
3890,
10465,
2915,
1155,
353,
8840,
836,
8,
341,
197,
2233,
80,
33363,
1669,
330,
24048,
698,
197,
2233,
80,
2556,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestParseTs_shouldFailOnWrongFormat(t *testing.T) {
currentTime := time.Now().UTC().Truncate(time.Second)
viper.Set(endTSEnvVar, currentTime.Format(time.RFC822))
parsedTime, err := ParseTS(endTSEnvVar)
viper.Set(endTSEnvVar, nil)
assert.Error(t, err)
assert.Nil(t, parsedTime)
} | explode_data.jsonl/1904 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 121
} | [
2830,
3393,
14463,
52793,
43378,
19524,
1925,
29185,
4061,
1155,
353,
8840,
836,
8,
341,
20121,
1462,
1669,
882,
13244,
1005,
21183,
1005,
1282,
26900,
9730,
32435,
340,
5195,
12858,
4202,
15076,
51,
925,
36941,
3962,
11,
39199,
9978,
973... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAuthorization_Conditions(t *testing.T) {
framework.NewTest(t).
Run(func(ctx framework.TestContext) {
nsA := namespace.NewOrFail(t, ctx, namespace.Config{
Prefix: "v1beta1-conditions-a",
Inject: true,
})
nsB := namespace.NewOrFail(t, ctx, namespace.Config{
Prefix: "v1beta1-conditions-b",
Inject: true,
})
nsC := namespace.NewOrFail(t, ctx, namespace.Config{
Prefix: "v1beta1-conditions-c",
Inject: true,
})
portC := 8090
var a, b, c echo.Instance
echoboot.NewBuilder(ctx).
With(&a, util.EchoConfig("a", nsA, false, nil)).
With(&b, util.EchoConfig("b", nsB, false, nil)).
With(&c, echo.Config{
Service: "c",
Namespace: nsC,
Subsets: []echo.SubsetConfig{{}},
Ports: []echo.Port{
{
Name: "http",
Protocol: protocol.HTTP,
InstancePort: portC,
},
},
}).
BuildOrFail(t)
args := map[string]string{
"NamespaceA": nsA.Name(),
"NamespaceB": nsB.Name(),
"NamespaceC": nsC.Name(),
"IpA": getWorkload(a, t).Address(),
"IpB": getWorkload(b, t).Address(),
"IpC": getWorkload(c, t).Address(),
"PortC": fmt.Sprintf("%d", portC),
}
policies := tmpl.EvaluateAllOrFail(t, args, file.AsStringOrFail(t, "testdata/authz/v1beta1-conditions.yaml.tmpl"))
ctx.Config().ApplyYAMLOrFail(t, "", policies...)
defer ctx.Config().DeleteYAMLOrFail(t, "", policies...)
newTestCase := func(from echo.Instance, path string, headers map[string]string, expectAllowed bool) rbacUtil.TestCase {
return rbacUtil.TestCase{
Request: connection.Checker{
From: from,
Options: echo.CallOptions{
Target: c,
PortName: "http",
Scheme: scheme.HTTP,
Path: path,
},
},
Headers: headers,
ExpectAllowed: expectAllowed,
}
}
cases := []rbacUtil.TestCase{
newTestCase(a, "/request-headers", map[string]string{"x-foo": "foo"}, true),
newTestCase(b, "/request-headers", map[string]string{"x-foo": "foo"}, true),
newTestCase(a, "/request-headers", map[string]string{"x-foo": "bar"}, false),
newTestCase(b, "/request-headers", map[string]string{"x-foo": "bar"}, false),
newTestCase(a, "/request-headers", nil, false),
newTestCase(b, "/request-headers", nil, false),
newTestCase(a, "/source-ip-a", nil, true),
newTestCase(b, "/source-ip-a", nil, false),
newTestCase(a, "/source-ip-b", nil, false),
newTestCase(b, "/source-ip-b", nil, true),
newTestCase(a, "/source-namespace-a", nil, true),
newTestCase(b, "/source-namespace-a", nil, false),
newTestCase(a, "/source-namespace-b", nil, false),
newTestCase(b, "/source-namespace-b", nil, true),
newTestCase(a, "/source-principal-a", nil, true),
newTestCase(b, "/source-principal-a", nil, false),
newTestCase(a, "/source-principal-b", nil, false),
newTestCase(b, "/source-principal-b", nil, true),
newTestCase(a, "/destination-ip-good", nil, true),
newTestCase(b, "/destination-ip-good", nil, true),
newTestCase(a, "/destination-ip-bad", nil, false),
newTestCase(b, "/destination-ip-bad", nil, false),
newTestCase(a, "/destination-port-good", nil, true),
newTestCase(b, "/destination-port-good", nil, true),
newTestCase(a, "/destination-port-bad", nil, false),
newTestCase(b, "/destination-port-bad", nil, false),
newTestCase(a, "/connection-sni-good", nil, true),
newTestCase(b, "/connection-sni-good", nil, true),
newTestCase(a, "/connection-sni-bad", nil, false),
newTestCase(b, "/connection-sni-bad", nil, false),
newTestCase(a, "/other", nil, false),
newTestCase(b, "/other", nil, false),
}
rbacUtil.RunRBACTest(t, cases)
})
} | explode_data.jsonl/41499 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1729
} | [
2830,
3393,
18124,
920,
2111,
5930,
1155,
353,
8840,
836,
8,
341,
1166,
5794,
7121,
2271,
1155,
4292,
197,
85952,
18552,
7502,
12626,
8787,
1972,
8,
341,
298,
84041,
32,
1669,
4473,
7121,
46059,
1155,
11,
5635,
11,
4473,
10753,
515,
5... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSupportsECDSA(t *testing.T) {
tests := []struct {
CipherSuites []uint16
SignatureSchemes []tls.SignatureScheme
SupportedCurves []tls.CurveID
ecdsaOk bool
}{
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
}, nil, nil, false},
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
}, nil, nil, true},
// SignatureSchemes limits, not extends, CipherSuites
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
}, []tls.SignatureScheme{
tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256,
}, nil, false},
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
}, []tls.SignatureScheme{
tls.PKCS1WithSHA256,
}, nil, false},
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
}, []tls.SignatureScheme{
tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256,
}, nil, true},
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
}, []tls.SignatureScheme{
tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256,
}, []tls.CurveID{
tls.CurveP521,
}, false},
{[]uint16{
tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
}, []tls.SignatureScheme{
tls.PKCS1WithSHA256, tls.ECDSAWithP256AndSHA256,
}, []tls.CurveID{
tls.CurveP256,
tls.CurveP521,
}, true},
}
for i, tt := range tests {
result := supportsECDSA(&tls.ClientHelloInfo{
CipherSuites: tt.CipherSuites,
SignatureSchemes: tt.SignatureSchemes,
SupportedCurves: tt.SupportedCurves,
})
if result != tt.ecdsaOk {
t.Errorf("%d: supportsECDSA = %v; want %v", i, result, tt.ecdsaOk)
}
}
} | explode_data.jsonl/65062 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 942
} | [
2830,
3393,
7916,
82,
7498,
72638,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
6258,
10558,
62898,
288,
257,
3056,
2496,
16,
21,
198,
197,
197,
25088,
50,
66346,
3056,
34488,
41152,
1568,
28906,
198,
197,
7568,
12... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAnonymityPrincipalV11(t *testing.T) {
msp1, err := setupWithVersion("testdata/idemix/MSP1OU1", "MSP1OU1", msp2.MSPv1_1)
assert.NoError(t, err)
id1, err := getDefaultSigner(msp1)
assert.NoError(t, err)
principalBytes, err := proto.Marshal(&msp.MSPIdentityAnonymity{AnonymityType: msp.MSPIdentityAnonymity_NOMINAL})
assert.NoError(t, err)
principal := &msp.MSPPrincipal{
PrincipalClassification: msp.MSPPrincipal_ANONYMITY,
Principal: principalBytes}
err = id1.SatisfiesPrincipal(principal)
assert.Error(t, err)
assert.Contains(t, err.Error(), "Anonymity MSP Principals are unsupported in MSPv1_1")
} | explode_data.jsonl/46040 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 273
} | [
2830,
3393,
2082,
7831,
487,
31771,
53,
16,
16,
1155,
353,
8840,
836,
8,
341,
47691,
79,
16,
11,
1848,
1669,
6505,
2354,
5637,
445,
92425,
38146,
336,
941,
10270,
4592,
16,
11922,
16,
497,
330,
44,
4592,
16,
11922,
16,
497,
296,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEntry_CursorRow(t *testing.T) {
entry := widget.NewMultiLineEntry()
entry.SetText("test")
assert.Equal(t, 0, entry.CursorRow)
// only 1 line, do nothing
down := &fyne.KeyEvent{Name: fyne.KeyDown}
entry.TypedKey(down)
assert.Equal(t, 0, entry.CursorRow)
// 2 lines, this should increment
entry.SetText("test\nrows")
entry.TypedKey(down)
assert.Equal(t, 1, entry.CursorRow)
up := &fyne.KeyEvent{Name: fyne.KeyUp}
entry.TypedKey(up)
assert.Equal(t, 0, entry.CursorRow)
// don't go beyond top
entry.TypedKey(up)
assert.Equal(t, 0, entry.CursorRow)
} | explode_data.jsonl/12319 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 238
} | [
2830,
3393,
5874,
920,
3823,
3102,
1155,
353,
8840,
836,
8,
341,
48344,
1669,
9086,
7121,
20358,
2460,
5874,
741,
48344,
92259,
445,
1944,
1138,
6948,
12808,
1155,
11,
220,
15,
11,
4343,
29929,
3102,
692,
197,
322,
1172,
220,
16,
1555... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestQuickSort(t *testing.T) {
ints := []int{1, 2, 8, 9, 0, -9, 8}
QuickSortInts(ints)
if !checkIntsInOrder(ints) {
t.Error("quick sort failed!")
}
students := []Student{
{
Name: "李明",
Age: 13,
},
{
Name: "小花",
Age: 15,
},
{
Name: "王明",
Age: 14,
},
{
Name: "小亮",
Age: 20,
},
{
Name: "志明",
Age: 17,
},
}
QuickSortSlice(students, func(i, j int) bool {
return students[i].Age < students[j].Age
})
if !checkStudentInOrder(students) {
t.Error("QuickSortSlice failed!")
}
// use less function to implement descending sort
ints = []int{1, 2, 8, 9, 0, -9, 8}
QuickSortSlice(ints, func(i, j int) bool {
return ints[i] > ints[j]
})
} | explode_data.jsonl/77664 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 363
} | [
2830,
3393,
24318,
10231,
1155,
353,
8840,
836,
8,
1476,
2084,
82,
1669,
3056,
396,
90,
16,
11,
220,
17,
11,
220,
23,
11,
220,
24,
11,
220,
15,
11,
481,
24,
11,
220,
23,
532,
197,
24318,
10231,
1072,
82,
1548,
82,
340,
743,
75... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIgnoreReadOnlyColRWSets(t *testing.T) {
// Scenario: The transaction has some ColRWSets that have only reads and no writes,
// These should be ignored and not considered as missing private data that needs to be retrieved
// from the transient store or other peers.
// The gossip and transient store mocks in this test aren't initialized with
// actions, so if the coordinator attempts to fetch private data from the
// transient store or other peers, the test would fail.
// Also - we check that at commit time - the coordinator concluded that
// no missing private data was found.
peerSelfSignedData := common.SignedData{
Identity: []byte{0, 1, 2},
Signature: []byte{3, 4, 5},
Data: []byte{6, 7, 8},
}
cs := createcollectionStore(peerSelfSignedData).thatAcceptsAll()
var commitHappened bool
assertCommitHappened := func() {
assert.True(t, commitHappened)
commitHappened = false
}
committer := &mocks.Committer{}
committer.On("CommitWithPvtData", mock.Anything).Run(func(args mock.Arguments) {
blockAndPrivateData := args.Get(0).(*ledger.BlockAndPvtData)
// Ensure there is no private data to commit
assert.Empty(t, blockAndPrivateData.PvtData)
// Ensure there is no missing private data
assert.Empty(t, blockAndPrivateData.MissingPvtData)
commitHappened = true
}).Return(nil)
store := &mockTransientStore{t: t}
fetcher := &fetcherMock{t: t}
hash := util2.ComputeSHA256([]byte("rws-pre-image"))
bf := &blockFactory{
channelID: "test",
}
// The block contains a read only private data transaction
block := bf.AddReadOnlyTxn("tx1", "ns3", hash, "c3", "c2").create()
metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics
coordinator := NewCoordinator(Support{
CollectionStore: cs,
Committer: committer,
Fetcher: fetcher,
TransientStore: store,
Validator: &validatorMock{},
}, peerSelfSignedData, metrics, testConfig)
// We pass a nil private data slice to indicate no pre-images though the block contains
// private data reads.
err := coordinator.StoreBlock(block, nil)
assert.NoError(t, err)
assertCommitHappened()
} | explode_data.jsonl/36191 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 715
} | [
2830,
3393,
12497,
20914,
6127,
49,
7433,
1415,
1155,
353,
8840,
836,
8,
341,
197,
322,
58663,
25,
576,
7745,
702,
1045,
4254,
49,
7433,
1415,
429,
614,
1172,
15804,
323,
902,
13914,
345,
197,
322,
4220,
1265,
387,
12036,
323,
537,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTransferInputAmount(t *testing.T) {
in := TransferInput{
Amt: 1,
Input: Input{
SigIndices: []uint32{0, 1},
},
}
if amount := in.Amount(); amount != 1 {
t.Fatalf("Input.Amount returned the wrong amount. Result: %d ; Expected: %d", amount, 1)
}
} | explode_data.jsonl/31619 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 108
} | [
2830,
3393,
21970,
2505,
10093,
1155,
353,
8840,
836,
8,
341,
17430,
1669,
23200,
2505,
515,
197,
22985,
2501,
25,
220,
16,
345,
197,
66588,
25,
5571,
515,
298,
7568,
343,
31941,
25,
3056,
2496,
18,
17,
90,
15,
11,
220,
16,
1583,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestApp01myCustomerHndlrRowInsert(t *testing.T) {
var td *TestData_App01myCustomer
var rcd App01myCustomer.App01myCustomer
//expectedBody := ""
t.Logf("TestCustomerRowInsert()...\n")
td = &TestData_App01myCustomer{}
td.Setup(t)
// Insert a "Z" record.
rcd.TestData(25) // "Z"
keys := rcd.KeysToValue()
data := rcd.FieldsToValue()
urlStr := fmt.Sprintf("/Customer/insert?%s", keys)
t.Logf("\tSetting up to insert (%d)\"%s\" row...\n", len(keys), keys)
td.PostReq(urlStr, data)
// Now get the Response and check it.
td.CheckStatus(http.StatusOK)
t.Logf("\t actualHeader: %q\n", td.Resp.Header)
actualBody := td.ResponseBody()
t.Logf("\t actualBody: %s\n", string(actualBody))
//TODO: Update this (right now, output is too much.)
//if expectedBody != string(actualBody) {
//t.Errorf("Expected the message '%s'\n", expectedBody)
//}
t.Logf("TestCustomerRowInsert() - End of Test\n\n\n")
} | explode_data.jsonl/63215 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 438
} | [
2830,
3393,
2164,
15,
16,
2408,
12792,
39,
303,
19018,
3102,
13780,
1155,
353,
8840,
836,
8,
341,
262,
762,
17941,
688,
353,
83920,
36117,
15,
16,
2408,
12792,
198,
262,
762,
435,
4385,
260,
1845,
15,
16,
2408,
12792,
5105,
15,
16,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCacheMiss(t *testing.T) {
c := cache.New()
c.SetWithExpiration("test-key", "test-value", maxTime)
_, ok := c.Get("nothing")
if ok {
t.Error("cache hit")
}
} | explode_data.jsonl/70365 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 73
} | [
2830,
3393,
8233,
35312,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
6500,
7121,
2822,
1444,
4202,
2354,
66301,
445,
1944,
16173,
497,
330,
1944,
19083,
497,
1932,
1462,
692,
197,
6878,
5394,
1669,
272,
2234,
445,
41212,
1138,
743,
5394,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestProcessFlags(t *testing.T) {
ctx := &context.Context{
Version: "1.2.3",
}
ctx.Git.CurrentTag = "5.6.7"
artifact := &artifact.Artifact{
Name: "name",
Goos: "darwin",
Goarch: "amd64",
Goarm: "7",
Extra: map[string]interface{}{
"Binary": "binary",
},
}
source := []string{
"flag",
"{{.Version}}",
"{{.Os}}",
"{{.Arch}}",
"{{.Arm}}",
"{{.Binary}}",
"{{.ArtifactName}}",
}
expected := []string{
"-testflag=flag",
"-testflag=1.2.3",
"-testflag=darwin",
"-testflag=amd64",
"-testflag=7",
"-testflag=binary",
"-testflag=name",
}
flags, err := processFlags(ctx, artifact, []string{}, source, "-testflag=")
require.NoError(t, err)
require.Len(t, flags, 7)
require.Equal(t, expected, flags)
} | explode_data.jsonl/54155 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 351
} | [
2830,
3393,
7423,
9195,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
609,
2147,
9328,
515,
197,
77847,
25,
330,
16,
13,
17,
13,
18,
756,
197,
532,
20985,
1224,
275,
11517,
5668,
284,
330,
20,
13,
21,
13,
22,
1837,
197,
63722,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewServer(t *testing.T) {
// All of the settings to apply and verify. Currently just testing domain suffix,
// but we should expand this list.
cases := []struct {
name string
domain string
expectedDomain string
}{
{
name: "default domain",
domain: "",
expectedDomain: constants.DefaultKubernetesDomain,
},
{
name: "override domain",
domain: "mydomain.com",
expectedDomain: "mydomain.com",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
configDir, err := ioutil.TempDir("", "TestNewServer")
if err != nil {
t.Fatal(err)
}
defer func() {
_ = os.RemoveAll(configDir)
}()
args := NewPilotArgs(func(p *PilotArgs) {
p.Namespace = "istio-system"
p.DiscoveryOptions = DiscoveryServiceOptions{
// Dynamically assign all ports.
HTTPAddr: ":0",
MonitoringAddr: ":0",
GrpcAddr: ":0",
}
p.Config = ConfigArgs{
ControllerOptions: kubecontroller.Options{
DomainSuffix: c.domain,
},
FileDir: configDir,
}
meshCfg := mesh.DefaultMeshConfig()
p.MeshConfig = &meshCfg
// Use the config store for service entries as well.
p.Service = ServiceArgs{
// A ServiceEntry registry is added by default, which is what we want. Don't include any other registries.
Registries: []string{},
}
// Include all of the default plugins for integration with Mixer, etc.
p.Plugins = DefaultPlugins
p.ShutdownDuration = 1 * time.Millisecond
})
g := NewGomegaWithT(t)
s, err := NewServer(args)
g.Expect(err).To(Succeed())
stop := make(chan struct{})
g.Expect(s.Start(stop)).To(Succeed())
defer func() {
close(stop)
s.WaitUntilCompletion()
}()
g.Expect(s.environment.GetDomainSuffix()).To(Equal(c.expectedDomain))
})
}
} | explode_data.jsonl/9021 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 832
} | [
2830,
3393,
3564,
5475,
1155,
353,
8840,
836,
8,
341,
197,
322,
2009,
315,
279,
5003,
311,
3796,
323,
10146,
13,
24150,
1101,
7497,
7947,
20525,
345,
197,
322,
714,
582,
1265,
9225,
419,
1140,
624,
1444,
2264,
1669,
3056,
1235,
341,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAcls_DeleteAclSuccess(t *testing.T) {
mock := MockHttpClient{}
mk := MockKafkaClient{}
mock.DoRequestFn = func(method string, uri string, reqBody io.Reader) (responseBody []byte, statusCode int, status string, err error) {
assert.Equal(t, http.MethodDelete, method, "Expected method 'Delete', got %s", method)
assert.Equal(t, "/clusters/cluster-1/acls", uri)
return []byte(`
{
"data": [
{
"kind": "KafkaAcl",
"metadata": {
"self": "http://localhost:9391/v3/clusters/cluster-1/acls?resource_type=TOPIC&resource_name=topic-&pattern_type=PREFIXED&principal=alice&host=*&operation=ALL&permission=ALLOW"
},
"cluster_id": "cluster-1",
"resource_type": "TOPIC",
"resource_name": "topic-",
"pattern_type": "PREFIXED",
"principal": "alice",
"host": "*",
"operation": "ALL",
"permission": "ALLOW"
},
{
"kind": "KafkaAcl",
"metadata": {
"self": "http://localhost:9391/v3/clusters/cluster-1/acls?resource_type=CLUSTER&resource_name=cluster-1&pattern_type=LITERAL&principal=bob&host=*&operation=DESCRIBE&permission=DENY"
},
"cluster_id": "cluster-1",
"resource_type": "CLUSTER",
"resource_name": "cluster-2",
"pattern_type": "LITERAL",
"principal": "alice",
"host": "*",
"operation": "DESCRIBE",
"permission": "DENY"
}
]
}
`), 200, "200", nil
}
clusterAdmin, _ := mk.NewSaramaClusterAdmin()
c := NewClient(&mock, &mk, clusterAdmin)
err := c.DeleteAcl("cluster-1", "")
assert.NoError(t, err)
} | explode_data.jsonl/10191 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 730
} | [
2830,
3393,
32,
18074,
57418,
32,
564,
7188,
1155,
353,
8840,
836,
8,
341,
77333,
1669,
14563,
26316,
16094,
2109,
74,
1669,
14563,
42,
21883,
2959,
16094,
77333,
33596,
1900,
24911,
284,
2915,
17262,
914,
11,
13071,
914,
11,
4232,
5444... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServicePlanCompletionHandler(t *testing.T) {
serviceClassList := &scv1beta1.ClusterServiceClassList{
Items: []scv1beta1.ClusterServiceClass{testingutil.FakeClusterServiceClass("class name", "dummy")},
}
tests := []struct {
name string
returnedServiceClass *scv1beta1.ClusterServiceClassList
returnedServicePlan []scv1beta1.ClusterServicePlan
output []string
parsedArgs parsedArgs
}{
{
name: "Case 0: no service name supplied",
parsedArgs: parsedArgs{
original: complete.Args{
Completed: []string{"create"},
},
},
output: []string{},
},
{
name: "Case 1: single plan exists",
returnedServiceClass: serviceClassList,
returnedServicePlan: []scv1beta1.ClusterServicePlan{testingutil.FakeClusterServicePlan("default", 1)},
parsedArgs: parsedArgs{
original: complete.Args{
Completed: []string{"create", "class name"},
},
},
output: []string{"default"},
},
{
name: "Case 2: multiple plans exist",
returnedServiceClass: serviceClassList,
returnedServicePlan: []scv1beta1.ClusterServicePlan{
testingutil.FakeClusterServicePlan("plan1", 1),
testingutil.FakeClusterServicePlan("plan2", 2),
},
parsedArgs: parsedArgs{
original: complete.Args{
Completed: []string{"create", "class name"},
},
},
output: []string{"plan1", "plan2"},
},
}
for _, tt := range tests {
client, fakeClientSet := occlient.FakeNew()
context := genericclioptions.NewFakeContext("project", "app", "component", client)
fakeClientSet.ServiceCatalogClientSet.PrependReactor("list", "clusterserviceclasses", func(action ktesting.Action) (handled bool, ret runtime.Object, err error) {
return true, tt.returnedServiceClass, nil
})
fakeClientSet.ServiceCatalogClientSet.PrependReactor("list", "clusterserviceplans", func(action ktesting.Action) (handled bool, ret runtime.Object, err error) {
return true, &scv1beta1.ClusterServicePlanList{Items: tt.returnedServicePlan}, nil
})
completions := ServicePlanCompletionHandler(nil, tt.parsedArgs, context)
// Sort the output and expected output in order to avoid false negatives (since ordering of the results is not important)
sort.Strings(completions)
sort.Strings(tt.output)
if !reflect.DeepEqual(tt.output, completions) {
t.Errorf("expected output: %#v,got: %#v", tt.output, completions)
}
}
} | explode_data.jsonl/3545 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 926
} | [
2830,
3393,
1860,
20485,
33190,
3050,
1155,
353,
8840,
836,
8,
341,
52934,
1957,
852,
1669,
609,
2388,
85,
16,
19127,
16,
72883,
1860,
1957,
852,
515,
197,
197,
4353,
25,
3056,
2388,
85,
16,
19127,
16,
72883,
1860,
1957,
90,
8840,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSendRstOnListenerRxSynAckV6(t *testing.T) {
c := context.New(t, defaultMTU)
defer c.Cleanup()
c.CreateV6Endpoint(true)
if err := c.EP.Bind(tcpip.FullAddress{Port: context.StackPort}); err != nil {
t.Fatal("Bind failed:", err)
}
if err := c.EP.Listen(10); err != nil {
t.Fatal("Listen failed:", err)
}
c.SendV6Packet(nil, &context.Headers{
SrcPort: context.TestPort,
DstPort: context.StackPort,
Flags: header.TCPFlagSyn | header.TCPFlagAck,
SeqNum: 100,
AckNum: 200,
})
checker.IPv6(t, c.GetV6Packet(), checker.TCP(
checker.DstPort(context.TestPort),
checker.TCPFlags(header.TCPFlagRst),
checker.TCPSeqNum(200)))
} | explode_data.jsonl/75936 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 297
} | [
2830,
3393,
11505,
49,
267,
1925,
2743,
50639,
37134,
55559,
53,
21,
1155,
353,
8840,
836,
8,
341,
1444,
1669,
2266,
7121,
1155,
11,
1638,
8505,
52,
340,
16867,
272,
727,
60639,
2822,
1444,
7251,
53,
21,
27380,
3715,
692,
743,
1848,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestServerErrNilHandler(t *testing.T) {
srv := NewServer(RedisClientOpt{Addr: ":6379"}, Config{LogLevel: testLogLevel})
err := srv.Start(nil)
if err == nil {
t.Error("Starting server with nil handler: (*Server).Start(nil) did not return error")
srv.Stop()
}
} | explode_data.jsonl/81845 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 102
} | [
2830,
3393,
5475,
7747,
19064,
3050,
1155,
353,
8840,
836,
8,
341,
1903,
10553,
1669,
1532,
5475,
2785,
41825,
2959,
21367,
90,
13986,
25,
13022,
21,
18,
22,
24,
14345,
5532,
90,
72676,
25,
1273,
72676,
3518,
9859,
1669,
43578,
12101,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCaptiveGetLedger_NextLedgerIsDifferentToLedgerFromBuffer(t *testing.T) {
metaChan := make(chan metaResult, 100)
for i := 64; i <= 65; i++ {
meta := buildLedgerCloseMeta(testLedgerHeader{sequence: uint32(i)})
metaChan <- metaResult{
LedgerCloseMeta: &meta,
}
}
{
meta := buildLedgerCloseMeta(testLedgerHeader{sequence: uint32(68)})
metaChan <- metaResult{
LedgerCloseMeta: &meta,
}
}
ctx := context.Background()
mockRunner := &stellarCoreRunnerMock{}
mockRunner.On("catchup", uint32(65), uint32(66)).Return(nil)
mockRunner.On("getMetaPipe").Return((<-chan metaResult)(metaChan))
mockRunner.On("context").Return(ctx)
mockRunner.On("close").Return(nil)
mockArchive := &historyarchive.MockArchive{}
mockArchive.
On("GetRootHAS").
Return(historyarchive.HistoryArchiveState{
CurrentLedger: uint32(200),
}, nil)
captiveBackend := CaptiveStellarCore{
archive: mockArchive,
stellarCoreRunnerFactory: func(_ stellarCoreRunnerMode) (stellarCoreRunnerInterface, error) {
return mockRunner, nil
},
checkpointManager: historyarchive.NewCheckpointManager(64),
}
err := captiveBackend.PrepareRange(ctx, BoundedRange(65, 66))
assert.NoError(t, err)
_, err = captiveBackend.GetLedger(ctx, 66)
assert.EqualError(t, err, "unexpected ledger sequence (expected=66 actual=68)")
// TODO assertions should work - to be fixed in a separate PR.
// _, err = captiveBackend.GetLedger(ctx, 66)
// assert.EqualError(t, err, "session is closed, call PrepareRange first")
mockArchive.AssertExpectations(t)
mockRunner.AssertExpectations(t)
} | explode_data.jsonl/7325 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 572
} | [
2830,
3393,
34,
27781,
1949,
60850,
1389,
1604,
427,
60850,
1389,
3872,
69123,
1249,
60850,
1389,
3830,
4095,
1155,
353,
8840,
836,
8,
341,
84004,
46019,
1669,
1281,
35190,
8823,
2077,
11,
220,
16,
15,
15,
692,
2023,
600,
1669,
220,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIncr(t *testing.T) {
assert := assert.New(t)
// Set
err := cMem.Set("k3", "1", time.Minute)
assert.Nil(err)
// incr
data, err := cMem.Incr("k3")
assert.Nil(err)
assert.EqualValues(2, data)
// decr
data, err = cMem.Decr("k3")
assert.Nil(err)
assert.EqualValues(1, data)
// incr N
data, err = cMem.IncrN("k3", 3)
assert.Nil(err)
assert.EqualValues(4, data)
// decr N
data, err = cMem.DecrN("k3", 3)
assert.Nil(err)
assert.EqualValues(1, data)
//Get
d, err := cMem.Get("k3")
assert.Nil(err)
assert.Equal("1", d)
} | explode_data.jsonl/74759 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 267
} | [
2830,
3393,
641,
5082,
1155,
353,
8840,
836,
8,
341,
6948,
1669,
2060,
7121,
1155,
340,
197,
322,
2573,
198,
9859,
1669,
272,
18816,
4202,
445,
74,
18,
497,
330,
16,
497,
882,
75770,
340,
6948,
59678,
3964,
692,
197,
322,
66826,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestResizeParams(t *testing.T) {
src := image.NewNRGBA(image.Rect(0, 0, 64, 128))
tests := []struct {
opt Options
w, h int
resize bool
}{
{Options{Width: 0.5}, 32, 0, true},
{Options{Height: 0.5}, 0, 64, true},
{Options{Width: 0.5, Height: 0.5}, 32, 64, true},
{Options{Width: 100, Height: 200}, 0, 0, false},
{Options{Width: 100, Height: 200, ScaleUp: true}, 100, 200, true},
{Options{Width: 64}, 0, 0, false},
{Options{Height: 128}, 0, 0, false},
}
for _, tt := range tests {
w, h, resize := resizeParams(src, tt.opt)
if w != tt.w || h != tt.h || resize != tt.resize {
t.Errorf("resizeParams(%v) returned (%d,%d,%t), want (%d,%d,%t)", tt.opt, w, h, resize, tt.w, tt.h, tt.resize)
}
}
} | explode_data.jsonl/11689 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 340
} | [
2830,
3393,
30561,
4870,
1155,
353,
8840,
836,
8,
341,
41144,
1669,
2168,
7121,
45,
58927,
10075,
32153,
7,
15,
11,
220,
15,
11,
220,
21,
19,
11,
220,
16,
17,
23,
1171,
78216,
1669,
3056,
1235,
341,
197,
64838,
262,
14566,
198,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestExportedFields(t *testing.T) {
pkg, err := load(t, "golang.org/x/tools/internal/apidiff/testdata/exported_fields", "")
if err != nil {
t.Fatal(err)
}
typeof := func(name string) types.Type {
return pkg.Types.Scope().Lookup(name).Type()
}
s := typeof("S")
su := s.(*types.Named).Underlying().(*types.Struct)
ef := exportedSelectableFields(su)
wants := []struct {
name string
typ types.Type
}{
{"A1", typeof("A1")},
{"D", types.Typ[types.Bool]},
{"E", types.Typ[types.Int]},
{"F", typeof("F")},
{"S", types.NewPointer(s)},
}
if got, want := len(ef), len(wants); got != want {
t.Errorf("got %d fields, want %d\n%+v", got, want, ef)
}
for _, w := range wants {
if got := ef[w.name]; got != nil && !types.Identical(got.Type(), w.typ) {
t.Errorf("%s: got %v, want %v", w.name, got.Type(), w.typ)
}
}
} | explode_data.jsonl/39381 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 381
} | [
2830,
3393,
16894,
291,
8941,
1155,
353,
8840,
836,
8,
341,
3223,
7351,
11,
1848,
1669,
2795,
1155,
11,
330,
70,
37287,
2659,
10776,
45714,
30968,
24670,
307,
3092,
12697,
691,
66948,
291,
12132,
497,
14676,
743,
1848,
961,
2092,
341,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPolicy_SetSecureDefaults(t *testing.T) {
tests := []struct {
name string
p *Policy
}{
// TODO: Add test cases.
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.p.SetSecureDefaults()
})
}
} | explode_data.jsonl/10348 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 107
} | [
2830,
3393,
13825,
14812,
49813,
16273,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
914,
198,
197,
3223,
262,
353,
13825,
198,
197,
59403,
197,
197,
322,
5343,
25,
2691,
1273,
5048,
624,
197,
532,
2023,
835... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestOAuthComplete(t *testing.T) {
if testing.Short() {
t.SkipNow()
}
th := Setup().InitBasic()
defer th.TearDown()
Client := th.Client
gitLabSettingsEnable := th.App.Config().GitLabSettings.Enable
gitLabSettingsAuthEndpoint := th.App.Config().GitLabSettings.AuthEndpoint
gitLabSettingsId := th.App.Config().GitLabSettings.Id
gitLabSettingsSecret := th.App.Config().GitLabSettings.Secret
gitLabSettingsTokenEndpoint := th.App.Config().GitLabSettings.TokenEndpoint
gitLabSettingsUserApiEndpoint := th.App.Config().GitLabSettings.UserApiEndpoint
enableOAuthServiceProvider := th.App.Config().ServiceSettings.EnableOAuthServiceProvider
defer func() {
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.Enable = gitLabSettingsEnable })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.AuthEndpoint = gitLabSettingsAuthEndpoint })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.Id = gitLabSettingsId })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.Secret = gitLabSettingsSecret })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.TokenEndpoint = gitLabSettingsTokenEndpoint })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.GitLabSettings.UserApiEndpoint = gitLabSettingsUserApiEndpoint })
th.App.UpdateConfig(func(cfg *model.Config) { cfg.ServiceSettings.EnableOAuthServiceProvider = enableOAuthServiceProvider })
}()
r, err := HttpGet(Client.Url+"/login/gitlab/complete?code=123", Client.HttpClient, "", true)
assert.NotNil(t, err)
closeBody(r)
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.Enable = true })
r, err = HttpGet(Client.Url+"/login/gitlab/complete?code=123&state=!#$#F@#Yˆ&~ñ", Client.HttpClient, "", true)
assert.NotNil(t, err)
closeBody(r)
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.AuthEndpoint = Client.Url + "/oauth/authorize" })
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.Id = model.NewId() })
stateProps := map[string]string{}
stateProps["action"] = model.OAUTH_ACTION_LOGIN
stateProps["team_id"] = th.BasicTeam.Id
stateProps["redirect_to"] = *th.App.Config().GitLabSettings.AuthEndpoint
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
r, err = HttpGet(Client.Url+"/login/gitlab/complete?code=123&state="+url.QueryEscape(state), Client.HttpClient, "", true)
assert.NotNil(t, err)
closeBody(r)
stateProps["hash"] = utils.HashSha256(*th.App.Config().GitLabSettings.Id)
state = base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
r, err = HttpGet(Client.Url+"/login/gitlab/complete?code=123&state="+url.QueryEscape(state), Client.HttpClient, "", true)
assert.NotNil(t, err)
closeBody(r)
// We are going to use mattermost as the provider emulating gitlab
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableOAuthServiceProvider = true })
defaultRolePermissions := th.SaveDefaultRolePermissions()
defer func() {
th.RestoreDefaultRolePermissions(defaultRolePermissions)
}()
th.AddPermissionToRole(model.PERMISSION_MANAGE_OAUTH.Id, model.TEAM_USER_ROLE_ID)
th.AddPermissionToRole(model.PERMISSION_MANAGE_OAUTH.Id, model.SYSTEM_USER_ROLE_ID)
oauthApp := &model.OAuthApp{
Name: "TestApp5" + model.NewId(),
Homepage: "https://nowhere.com",
Description: "test",
CallbackUrls: []string{
Client.Url + "/signup/" + model.SERVICE_GITLAB + "/complete",
Client.Url + "/login/" + model.SERVICE_GITLAB + "/complete",
},
IsTrusted: true,
}
oauthApp = Client.Must(Client.CreateOAuthApp(oauthApp)).(*model.OAuthApp)
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.Id = oauthApp.Id })
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.Secret = oauthApp.ClientSecret })
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.AuthEndpoint = Client.Url + "/oauth/authorize" })
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.TokenEndpoint = Client.Url + "/oauth/access_token" })
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.GitLabSettings.UserApiEndpoint = Client.ApiUrl + "/users/me" })
provider := &MattermostTestProvider{}
authRequest := &model.AuthorizeRequest{
ResponseType: model.AUTHCODE_RESPONSE_TYPE,
ClientId: oauthApp.Id,
RedirectUri: oauthApp.CallbackUrls[0],
Scope: "all",
State: "123",
}
redirect, resp := Client.AuthorizeOAuthApp(authRequest)
CheckNoError(t, resp)
rurl, _ := url.Parse(redirect)
code := rurl.Query().Get("code")
stateProps["action"] = model.OAUTH_ACTION_EMAIL_TO_SSO
delete(stateProps, "team_id")
stateProps["redirect_to"] = *th.App.Config().GitLabSettings.AuthEndpoint
stateProps["hash"] = utils.HashSha256(*th.App.Config().GitLabSettings.Id)
stateProps["redirect_to"] = "/oauth/authorize"
state = base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
if r, err := HttpGet(Client.Url+"/login/"+model.SERVICE_GITLAB+"/complete?code="+url.QueryEscape(code)+"&state="+url.QueryEscape(state), Client.HttpClient, "", false); err == nil {
closeBody(r)
}
einterfaces.RegisterOauthProvider(model.SERVICE_GITLAB, provider)
redirect, resp = Client.AuthorizeOAuthApp(authRequest)
CheckNoError(t, resp)
rurl, _ = url.Parse(redirect)
code = rurl.Query().Get("code")
if r, err := HttpGet(Client.Url+"/login/"+model.SERVICE_GITLAB+"/complete?code="+url.QueryEscape(code)+"&state="+url.QueryEscape(state), Client.HttpClient, "", false); err == nil {
closeBody(r)
}
if result := <-th.App.Srv.Store.User().UpdateAuthData(
th.BasicUser.Id, model.SERVICE_GITLAB, &th.BasicUser.Email, th.BasicUser.Email, true); result.Err != nil {
t.Fatal(result.Err)
}
redirect, resp = Client.AuthorizeOAuthApp(authRequest)
CheckNoError(t, resp)
rurl, _ = url.Parse(redirect)
code = rurl.Query().Get("code")
stateProps["action"] = model.OAUTH_ACTION_LOGIN
state = base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
if r, err := HttpGet(Client.Url+"/login/"+model.SERVICE_GITLAB+"/complete?code="+url.QueryEscape(code)+"&state="+url.QueryEscape(state), Client.HttpClient, "", false); err == nil {
closeBody(r)
}
redirect, resp = Client.AuthorizeOAuthApp(authRequest)
CheckNoError(t, resp)
rurl, _ = url.Parse(redirect)
code = rurl.Query().Get("code")
delete(stateProps, "action")
state = base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
if r, err := HttpGet(Client.Url+"/login/"+model.SERVICE_GITLAB+"/complete?code="+url.QueryEscape(code)+"&state="+url.QueryEscape(state), Client.HttpClient, "", false); err == nil {
closeBody(r)
}
redirect, resp = Client.AuthorizeOAuthApp(authRequest)
CheckNoError(t, resp)
rurl, _ = url.Parse(redirect)
code = rurl.Query().Get("code")
stateProps["action"] = model.OAUTH_ACTION_SIGNUP
state = base64.StdEncoding.EncodeToString([]byte(model.MapToJson(stateProps)))
if r, err := HttpGet(Client.Url+"/login/"+model.SERVICE_GITLAB+"/complete?code="+url.QueryEscape(code)+"&state="+url.QueryEscape(state), Client.HttpClient, "", false); err == nil {
closeBody(r)
}
} | explode_data.jsonl/30135 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2718
} | [
2830,
3393,
57850,
12548,
1155,
353,
8840,
836,
8,
341,
743,
7497,
55958,
368,
341,
197,
3244,
57776,
7039,
741,
197,
630,
70479,
1669,
18626,
1005,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
2822,
71724,
1669,
270,
11716,
271,
90731... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestExtractHostname(t *testing.T) {
for in, out := range map[string]string{
"": "",
"http://www.example.org/": "www.example.org",
"++#+++#jhlkadsrezu 33 553q ++++##$§&": "jhlkadsrezu_33_553q",
"www.example.org/?foo=bar#abc": "www.example.org",
"a test": "a_test",
} {
assert.Equal(t, out, extractHostname(in))
}
} | explode_data.jsonl/23332 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 242
} | [
2830,
3393,
28959,
88839,
1155,
353,
8840,
836,
8,
341,
2023,
304,
11,
700,
1669,
2088,
2415,
14032,
30953,
515,
197,
197,
28796,
10589,
8324,
197,
197,
76932,
1110,
2136,
7724,
2659,
14,
788,
1060,
330,
2136,
7724,
2659,
756,
197,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCreateService(t *testing.T) {
handler := func(w http.ResponseWriter, r *http.Request) {
testutils.AssertEqual(t, r.Method, "POST")
testutils.AssertEqual(t, r.URL.Path, "/service/")
var req models.CreateServiceRequest
Unmarshal(t, r, &req)
testutils.AssertEqual(t, req.ServiceName, "name")
testutils.AssertEqual(t, req.EnvironmentID, "environmentID")
testutils.AssertEqual(t, req.DeployID, "deployID")
testutils.AssertEqual(t, req.LoadBalancerID, "loadBalancerID")
MarshalAndWrite(t, w, models.Service{ServiceID: "id"}, 200)
}
client, server := newClientAndServer(handler)
defer server.Close()
service, err := client.CreateService("name", "environmentID", "deployID", "loadBalancerID")
if err != nil {
t.Fatal(err)
}
testutils.AssertEqual(t, service.ServiceID, "id")
} | explode_data.jsonl/24178 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 309
} | [
2830,
3393,
4021,
1860,
1155,
353,
8840,
836,
8,
341,
53326,
1669,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659,
8,
341,
197,
18185,
6031,
11711,
2993,
1155,
11,
435,
20798,
11,
330,
2946,
1138,
197,
18185,
6031,
11711,
2993,
115... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCustomErrorWriter(t *testing.T) {
t.Parallel()
subfiles := skymodules.SkyfileSubfiles{
"400.html": skymodules.SkyfileSubfileMetadata{
Filename: "400.html",
ContentType: "text/html",
Offset: 0,
Len: 14,
},
"404.html": skymodules.SkyfileSubfileMetadata{
Filename: "404.html",
ContentType: "text/html",
Offset: 14,
Len: 14,
},
"418.html": skymodules.SkyfileSubfileMetadata{
Filename: "418.html",
ContentType: "text/html",
Offset: 28,
Len: 14,
},
"500.html": skymodules.SkyfileSubfileMetadata{
Filename: "500.html",
ContentType: "text/html",
Offset: 42,
Len: 14,
},
"502.html": skymodules.SkyfileSubfileMetadata{
Filename: "502.html",
ContentType: "text/html",
Offset: 56,
Len: 14,
},
}
eps := map[int]string{
400: "/400.html",
404: "/404.html",
418: "/418.html",
500: "/500.html",
502: "/502.html",
}
meta := skymodules.SkyfileMetadata{
Filename: t.Name(),
Length: 60,
Subfiles: subfiles,
ErrorPages: eps,
}
data := []byte("FileContent400FileContent404FileContent418FileContent500FileContent502")
rawMD, err := json.Marshal(meta)
if err != nil {
t.Fatal(err)
}
streamer := renter.SkylinkStreamerFromSlice(data, meta, rawMD, skymodules.Skylink{}, skymodules.SkyfileLayout{})
ew := newCustomErrorWriter(meta, streamer)
w := newTestHTTPWriter()
// test all errorpage codes
for code := range eps {
codeStr := strconv.Itoa(code)
ew.WriteError(w, Error{"This is an error with status " + codeStr}, code)
sf, exists := subfiles[codeStr+".html"]
if !exists {
t.Fatalf("Expected to find a subfile with name %s", codeStr+".html")
}
expectedData := data[sf.Offset : sf.Offset+sf.Len]
if !reflect.DeepEqual(expectedData, w.WrittenContent()) {
t.Fatalf("Expected content '%s', got '%s'", string(expectedData), string(w.WrittenContent()))
}
}
// test a non-errorpages code
errmsg := "we want to see this"
ew.WriteError(w, Error{errmsg}, 401)
if !strings.Contains(string(w.WrittenContent()), errmsg) {
t.Fatalf("Expected content to contain '%s', got '%s'", errmsg, string(w.WrittenContent()))
}
} | explode_data.jsonl/67180 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 950
} | [
2830,
3393,
10268,
1454,
6492,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
28624,
7198,
1669,
1901,
1600,
347,
2425,
808,
7891,
1192,
3136,
7198,
515,
197,
197,
1,
19,
15,
15,
2564,
788,
1901,
1600,
347,
2425,
808,
7891,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestClient_CreateUser(t *testing.T) {
// given
user := getDummyUser()
realm := getDummyRealm()
handler := http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, fmt.Sprintf(UserCreatePath, realm.Spec.Realm.Realm), req.URL.Path)
w.WriteHeader(201)
})
server := httptest.NewServer(handler)
defer server.Close()
client := Client{
requester: server.Client(),
URL: server.URL,
token: "dummy",
}
// when
err := client.CreateUser(user, realm.Spec.Realm.Realm)
// then
// correct path expected on httptest server
assert.NoError(t, err)
} | explode_data.jsonl/49874 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 232
} | [
2830,
3393,
2959,
34325,
1474,
1155,
353,
8840,
836,
8,
341,
197,
322,
2661,
198,
19060,
1669,
633,
43344,
1474,
741,
17200,
7673,
1669,
633,
43344,
64290,
2822,
53326,
1669,
1758,
89164,
18552,
3622,
1758,
37508,
11,
4232,
353,
1254,
9... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRemoveGoPath(t *testing.T) {
oldGoPath := defaultGoPath
oldSep := separator
defer func() {
defaultGoPath = oldGoPath
separator = oldSep
}()
cases := []struct {
typ string
result string
gopath []string
sep rune
}{
{
`E:\workspace\gopath\src\github.com\networkteam\go-kallax\tests\fixtures.AliasString`,
"github.com/networkteam/go-kallax/tests/fixtures.AliasString",
[]string{
`E:\workspace\gopath`,
},
'\\',
},
{
"/home/workspace/gopath/src/github.com/networkteam/go-kallax/tests/fixtures.AliasString",
"github.com/networkteam/go-kallax/tests/fixtures.AliasString",
[]string{
"/home/foo/go",
"/home/workspace/gopath",
},
'/',
},
{
"/go/src/foo/go/src/fixtures.AliasString",
"foo/go/src/fixtures.AliasString",
[]string{
"/go",
},
'/',
},
{
"/home/workspace/gopath/src/foo/bar/vendor/github.com/networkteam/go-kallax/tests/fixtures.AliasString",
"github.com/networkteam/go-kallax/tests/fixtures.AliasString",
[]string{
"/home/foo/go",
"/home/workspace/gopath",
},
'/',
},
{
"/home/vendor/workspace/gopath/src/github.com/networkteam/go-kallax/tests/fixtures.AliasString",
"github.com/networkteam/go-kallax/tests/fixtures.AliasString",
[]string{
"/home/foo/go",
"/home/vendor/workspace/gopath",
},
'/',
},
{
"/home/vendor/workspace/gopath/src/vendor/github.com/networkteam/go-kallax/tests/fixtures.AliasString",
"github.com/networkteam/go-kallax/tests/fixtures.AliasString",
[]string{
"/home/foo/go",
"/home/vendor/workspace/gopath",
},
'/',
},
}
for _, c := range cases {
defaultGoPath = c.gopath
separator = c.sep
require.Equal(t, c.result, removeGoPath(c.typ), c.typ)
}
} | explode_data.jsonl/43072 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 829
} | [
2830,
3393,
13021,
10850,
1820,
1155,
353,
8840,
836,
8,
341,
61828,
10850,
1820,
1669,
1638,
10850,
1820,
198,
61828,
41114,
1669,
24792,
198,
16867,
2915,
368,
341,
197,
11940,
10850,
1820,
284,
2310,
10850,
1820,
198,
197,
197,
40120,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBuildWithRemoveAndForceRemove(t *testing.T) {
skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
defer setupTest(t)()
cases := []struct {
name string
dockerfile string
numberOfIntermediateContainers int
rm bool
forceRm bool
}{
{
name: "successful build with no removal",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 0`,
numberOfIntermediateContainers: 2,
rm: false,
forceRm: false,
},
{
name: "successful build with remove",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 0`,
numberOfIntermediateContainers: 0,
rm: true,
forceRm: false,
},
{
name: "successful build with remove and force remove",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 0`,
numberOfIntermediateContainers: 0,
rm: true,
forceRm: true,
},
{
name: "failed build with no removal",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 1`,
numberOfIntermediateContainers: 2,
rm: false,
forceRm: false,
},
{
name: "failed build with remove",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 1`,
numberOfIntermediateContainers: 1,
rm: true,
forceRm: false,
},
{
name: "failed build with remove and force remove",
dockerfile: `FROM busybox
RUN exit 0
RUN exit 1`,
numberOfIntermediateContainers: 0,
rm: true,
forceRm: true,
},
}
client := testEnv.APIClient()
ctx := context.Background()
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
t.Parallel()
dockerfile := []byte(c.dockerfile)
buff := bytes.NewBuffer(nil)
tw := tar.NewWriter(buff)
assert.NilError(t, tw.WriteHeader(&tar.Header{
Name: "Dockerfile",
Size: int64(len(dockerfile)),
}))
_, err := tw.Write(dockerfile)
assert.NilError(t, err)
assert.NilError(t, tw.Close())
resp, err := client.ImageBuild(ctx, buff, types.ImageBuildOptions{Remove: c.rm, ForceRemove: c.forceRm, NoCache: true})
assert.NilError(t, err)
defer resp.Body.Close()
filter, err := buildContainerIdsFilter(resp.Body)
assert.NilError(t, err)
remainingContainers, err := client.ContainerList(ctx, types.ContainerListOptions{Filters: filter, All: true})
assert.NilError(t, err)
assert.Equal(t, c.numberOfIntermediateContainers, len(remainingContainers), "Expected %v remaining intermediate containers, got %v", c.numberOfIntermediateContainers, len(remainingContainers))
})
}
} | explode_data.jsonl/82579 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1402
} | [
2830,
3393,
11066,
2354,
13021,
3036,
18573,
13021,
1155,
353,
8840,
836,
8,
341,
1903,
13389,
32901,
1155,
11,
1273,
14359,
909,
64,
7291,
1731,
13,
4233,
499,
621,
330,
27077,
497,
330,
81019,
1138,
16867,
6505,
2271,
1155,
8,
2822,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewHTTPLogWriter(t *testing.T) {
cancel, hlw, err := buildTestHTTPLogWriter()
defer cancel()
assert.Nil(t, err)
assert.NotNil(t, hlw)
assert.Implements(t, (*LogWriter)(nil), hlw)
} | explode_data.jsonl/31962 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 88
} | [
2830,
3393,
3564,
9230,
2201,
6492,
1155,
353,
8840,
836,
8,
341,
84441,
11,
49586,
86,
11,
1848,
1669,
1936,
2271,
9230,
2201,
6492,
741,
16867,
9121,
2822,
6948,
59678,
1155,
11,
1848,
340,
6948,
93882,
1155,
11,
49586,
86,
340,
694... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestCookieStore_Set_Get(t *testing.T) {
opt := CookieOption{}
cookieman := New(&fakeCipher{}, opt)
name := "vim"
value := "vim vim vim"
w := httptest.NewRecorder()
vimStore := cookieman.NewCookieStore(name, nil)
if vimStore.Name() != name {
t.Errorf("CookieStore.Name() = %q, want %q", vimStore.Name(), name)
}
if err := vimStore.Set(w, []byte(value)); err != nil {
t.Error(err)
}
response := w.Result()
gotSetCookie := response.Header.Get("Set-Cookie")
wantSetCookie := fmt.Sprintf("%s=%s", name, base64.URLEncoding.EncodeToString([]byte(value)))
if gotSetCookie != wantSetCookie {
t.Errorf("CookieStore.Get: Set-Cookie value: got %q, want %q", gotSetCookie, wantSetCookie)
}
req := GetRequestWithCookie(w)
b, err := vimStore.Get(req)
if err != nil {
t.Fatal(err)
}
if got := string(b); got != value {
t.Errorf("CookieStore.Get: got %q, want %q", got, value)
}
} | explode_data.jsonl/48008 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 369
} | [
2830,
3393,
20616,
6093,
14812,
13614,
1155,
353,
8840,
836,
8,
341,
64838,
1669,
24356,
5341,
16094,
197,
1015,
38191,
15977,
1669,
1532,
2099,
30570,
79460,
22655,
3387,
340,
11609,
1669,
330,
41194,
698,
16309,
1669,
330,
41194,
36157,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestMysqlWithData(t *testing.T) {
client, err := GetClinet()
assert.Nil(t, err)
// geting the curnnet dir.
dir, err := os.Getwd()
if err != nil {
panic(err)
}
dataDir := strings.Replace(dir, " ", "\\ ", -1) + "/data/sql"
tests := []Continer{&Mysql{"root", "dbname", "root", "", dataDir, "5.6"},
&Mysql{"root", "dbname", "root", "", dataDir, "latest"}}
for _, m := range tests {
i, cid, err := m.CreateContiner(client)
defer m.RemoveContiner(client, cid)
assert.Nil(t, err)
db, ok := i.(*sql.DB)
assert.Equal(t, true, ok)
err = db.Ping()
assert.Nil(t, err)
rows, err := db.Query("SELECT * FROM world.City")
assert.Nil(t, err)
assert.True(t, rows.Next(), "expected true got ", err)
db.Close()
}
} | explode_data.jsonl/18766 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 330
} | [
2830,
3393,
44,
14869,
80356,
1155,
353,
8840,
836,
8,
341,
25291,
11,
1848,
1669,
2126,
67033,
295,
741,
6948,
59678,
1155,
11,
1848,
340,
197,
322,
633,
287,
279,
272,
399,
4711,
5419,
624,
48532,
11,
1848,
1669,
2643,
2234,
6377,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGetBoardById_asJson(t *testing.T) {
board := testData.EmptyBoard
req := httptest.NewRequest("GET", fmt.Sprintf("/boards/%d", board.ID), nil)
req.Header.Set("Accept", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
httpassert.Success(t, w)
httpassert.JsonObject(t, w)
responseJson := app.Board{}
if err := json.NewDecoder(w.Body).Decode(&responseJson); err != nil {
t.Fatal(err)
}
if responseJson.ID != board.ID {
t.Error("response ID does not match board ID")
}
if responseJson.Width != board.Width {
t.Error("response Width does not match board")
}
if responseJson.Height != board.Height {
t.Error("response Height does not match board")
}
} | explode_data.jsonl/12542 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 263
} | [
2830,
3393,
1949,
11932,
2720,
11898,
5014,
1155,
353,
8840,
836,
8,
341,
59868,
1669,
67348,
11180,
11932,
271,
24395,
1669,
54320,
70334,
75274,
445,
3806,
497,
8879,
17305,
4283,
19270,
12627,
67,
497,
4479,
9910,
701,
2092,
340,
24395... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestConvertToTimeSeries(t *testing.T) {
// Setup exporter with default quantiles and histogram buckets
exporter := Exporter{
config: Config{
Quantiles: []float64{0.5, 0.9, .99},
},
}
// Test conversions based on aggregation type
tests := []struct {
name string
input export.CheckpointSet
want []*prompb.TimeSeries
wantLength int
}{
{
name: "convertFromSum",
input: getSumCheckpoint(t, 1, 2, 3, 4, 5),
want: wantSumCheckpointSet,
wantLength: 1,
},
{
name: "convertFromLastValue",
input: getLastValueCheckpoint(t, 1, 2, 3, 4, 5),
want: wantLastValueCheckpointSet,
wantLength: 1,
},
{
name: "convertFromMinMaxSumCount",
input: getMMSCCheckpoint(t, 123.456, 876.543),
want: wantMMSCCheckpointSet,
wantLength: 4,
},
{
name: "convertFromHistogram",
input: getHistogramCheckpoint(t),
want: wantHistogramCheckpointSet,
wantLength: 6,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := exporter.ConvertToTimeSeries(tt.input)
want := tt.want
// Check for errors and for the correct number of timeseries.
assert.Nil(t, err, "ConvertToTimeSeries error")
assert.Len(t, got, tt.wantLength, "Incorrect number of timeseries")
// The TimeSeries cannot be compared easily using assert.ElementsMatch or
// cmp.Equal since both the ordering of the timeseries and the ordering of the
// attributes inside each timeseries can change. To get around this, all the
// attributes and samples are added to maps first. There aren't many attributes or
// samples, so this nested loop shouldn't be a bottleneck.
gotAttributes := make(map[string]bool)
wantAttributes := make(map[string]bool)
gotSamples := make(map[string]bool)
wantSamples := make(map[string]bool)
for i := 0; i < len(got); i++ {
for _, attribute := range got[i].Labels {
gotAttributes[attribute.String()] = true
}
for _, attribute := range want[i].Labels {
wantAttributes[attribute.String()] = true
}
for _, sample := range got[i].Samples {
gotSamples[sample.String()] = true
}
for _, sample := range want[i].Samples {
wantSamples[sample.String()] = true
}
}
assert.Equal(t, wantAttributes, gotAttributes)
assert.Equal(t, wantSamples, gotSamples)
})
}
} | explode_data.jsonl/8036 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 989
} | [
2830,
3393,
12012,
1249,
1462,
25544,
1155,
353,
8840,
836,
8,
341,
197,
322,
18626,
57378,
448,
1638,
10272,
3658,
323,
30281,
42112,
198,
59440,
261,
1669,
22191,
261,
515,
197,
25873,
25,
5532,
515,
298,
197,
44220,
3658,
25,
3056,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestElderLedger(t *testing.T) {
tt := test.Start(t).Scenario("base")
defer tt.Finish()
q := &Q{tt.HorizonSession()}
var seq int
err := q.ElderLedger(&seq)
if tt.Assert.NoError(err) {
tt.Assert.Equal(1, seq)
}
} | explode_data.jsonl/7260 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 105
} | [
2830,
3393,
36,
76869,
60850,
1389,
1155,
353,
8840,
836,
8,
341,
3244,
83,
1669,
1273,
12101,
1155,
568,
54031,
445,
3152,
1138,
16867,
17853,
991,
18176,
741,
18534,
1669,
609,
48,
90,
5566,
3839,
269,
16973,
5283,
368,
630,
2405,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestGetBalanceHistory(t *testing.T) {
t.Parallel()
_, err := b.GetBalanceHistory("USD", time.Time{}, time.Time{}, 1, "deposit")
if err == nil {
t.Error("Test Failed - GetBalanceHistory() error")
}
} | explode_data.jsonl/79955 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 78
} | [
2830,
3393,
1949,
21190,
13424,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
197,
6878,
1848,
1669,
293,
2234,
21190,
13424,
445,
26749,
497,
882,
16299,
22655,
882,
16299,
22655,
220,
16,
11,
330,
60514,
1138,
743,
1848,
621,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestConsulFingerprint(t *testing.T) {
fp := NewConsulFingerprint(testlog.HCLogger(t))
node := &structs.Node{
Attributes: make(map[string]string),
}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
fmt.Fprintln(w, mockConsulResponse)
}))
defer ts.Close()
conf := config.DefaultConfig()
conf.ConsulConfig.Addr = strings.TrimPrefix(ts.URL, "http://")
request := &FingerprintRequest{Config: conf, Node: node}
var response FingerprintResponse
err := fp.Fingerprint(request, &response)
if err != nil {
t.Fatalf("Failed to fingerprint: %s", err)
}
if !response.Detected {
t.Fatalf("expected response to be applicable")
}
assertNodeAttributeContains(t, response.Attributes, "consul.server")
assertNodeAttributeContains(t, response.Attributes, "consul.version")
assertNodeAttributeContains(t, response.Attributes, "consul.revision")
assertNodeAttributeContains(t, response.Attributes, "unique.consul.name")
assertNodeAttributeContains(t, response.Attributes, "consul.datacenter")
if _, ok := response.Links["consul"]; !ok {
t.Errorf("Expected a link to consul, none found")
}
} | explode_data.jsonl/67917 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 417
} | [
2830,
3393,
15220,
360,
37,
47918,
1155,
353,
8840,
836,
8,
341,
65219,
1669,
1532,
15220,
360,
37,
47918,
8623,
839,
3839,
34,
7395,
1155,
1171,
20831,
1669,
609,
1235,
82,
21714,
515,
197,
197,
10516,
25,
1281,
9147,
14032,
30953,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReconcileWithFailingConditionChecks(t *testing.T) {
// TestReconcileWithFailingConditionChecks runs "Reconcile" on a PipelineRun that has a task with
// multiple conditions, some that fails. It verifies that reconcile is successful, taskruns are
// created and the status is updated. It checks that the correct events are sent.
names.TestingSeed()
conditions := []*v1alpha1.Condition{parse.MustParseCondition(t, `
metadata:
name: always-false
namespace: foo
spec:
check:
args:
- bar
image: foo
`)}
pipelineRunName := "test-pipeline-run-with-conditions"
prccs := make(map[string]*v1beta1.PipelineRunConditionCheckStatus)
conditionCheckName := pipelineRunName + "task-2-always-false"
prccs[conditionCheckName] = &v1beta1.PipelineRunConditionCheckStatus{
ConditionName: "always-false-0",
Status: &v1beta1.ConditionCheckStatus{},
}
ps := []*v1beta1.Pipeline{parse.MustParsePipeline(t, `
metadata:
name: test-pipeline
namespace: foo
spec:
tasks:
- name: task-1
taskRef:
name: hello-world
- conditions:
- conditionRef: always-false
name: task-2
taskRef:
name: hello-world
- name: task-3
runAfter:
- task-1
taskRef:
name: hello-world
`)}
prs := []*v1beta1.PipelineRun{parse.MustParsePipelineRun(t, `
metadata:
annotations:
PipelineRunAnnotation: PipelineRunValue
name: test-pipeline-run-with-conditions
namespace: foo
spec:
pipelineRef:
name: test-pipeline
serviceAccountName: test-sa
status:
conditions:
- message: Not all Tasks in the Pipeline have finished executing
reason: Running
status: Unknown
type: Succeeded
taskRuns:
test-pipeline-run-with-conditionstask-1:
pipelineTaskName: task-1
test-pipeline-run-with-conditionstask-2:
conditionChecks:
test-pipeline-run-with-conditionstask-2-always-false:
conditionName: always-false-0
status:
check: {}
pipelineTaskName: task-2
`)}
ts := []*v1beta1.Task{simpleHelloWorldTask}
trs := []*v1beta1.TaskRun{
parse.MustParseTaskRun(t, `
metadata:
labels:
tekton.dev/memberOf: tasks
tekton.dev/pipeline: test-pipeine-run-with-conditions
tekton.dev/pipelineRun: test-pipeline
name: test-pipeline-run-with-conditionstask-1
namespace: foo
ownerReferences:
- kind: kind
name: name
spec:
taskRef:
name: hello-world
status:
conditions:
- lastTransitionTime: null
status: "True"
type: Succeeded
`),
parse.MustParseTaskRun(t, `
metadata:
labels:
tekton.dev/conditionCheck: test-pipeline-run-with-conditionstask-2-always-false
tekton.dev/conditionName: always-false
tekton.dev/pipeline: test-pipeine-run-with-conditions
tekton.dev/pipelineRun: test-pipeline
name: test-pipeline-run-with-conditionstask-2-always-false
namespace: foo
ownerReferences:
- kind: kind
name: name
spec:
taskSpec: {}
status:
conditions:
- lastTransitionTime: null
status: "False"
type: Succeeded
`),
}
d := test.Data{
PipelineRuns: prs,
Pipelines: ps,
Tasks: ts,
TaskRuns: trs,
Conditions: conditions,
}
prt := newPipelineRunTest(d, t)
defer prt.Cancel()
wantEvents := []string{
"Normal Started",
"Normal Running Tasks Completed: 1 \\(Failed: 0, Cancelled 0\\), Incomplete: 1, Skipped: 1",
}
_, clients := prt.reconcileRun("foo", pipelineRunName, wantEvents, false)
actions := clients.Pipeline.Actions()
if len(actions) < 2 {
t.Fatalf("Expected client to have at least two action implementation but it has %d", len(actions))
}
// Check that the expected TaskRun was created
actual := getTaskRunCreations(t, actions)[0]
expectedTaskRunObjectMeta := taskRunObjectMeta("test-pipeline-run-with-conditions-task-3", "foo", "test-pipeline-run-with-conditions", "test-pipeline", "task-3", false)
expectedTaskRunObjectMeta.Annotations["PipelineRunAnnotation"] = "PipelineRunValue"
expectedTaskRun := mustParseTaskRunWithObjectMeta(t, expectedTaskRunObjectMeta, `
spec:
resources: {}
serviceAccountName: test-sa
taskRef:
name: hello-world
timeout: 1h0m0s
`)
if d := cmp.Diff(actual, expectedTaskRun, ignoreTypeMeta); d != "" {
t.Errorf("expected to see ConditionCheck TaskRun %v created. Diff %s", expectedTaskRun, diff.PrintWantGot(d))
}
} | explode_data.jsonl/27311 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1657
} | [
2830,
3393,
693,
40446,
457,
2354,
37,
14277,
10547,
49820,
1155,
353,
8840,
836,
8,
341,
197,
322,
3393,
693,
40446,
457,
2354,
37,
14277,
10547,
49820,
8473,
330,
693,
40446,
457,
1,
389,
264,
40907,
6727,
429,
702,
264,
3383,
448,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestNodes(t *testing.T) {
t.Parallel()
ctx, cancel := testAllocate(t, "table.html")
defer cancel()
tests := []struct {
sel string
by QueryOption
n int
}{
{`/html/body/table/tbody[1]/tr[2]/td`, BySearch, 3},
{`body > table > tbody:nth-child(2) > tr:nth-child(2) > td:not(:last-child)`, ByQueryAll, 2},
{`body > table > tbody:nth-child(2) > tr:nth-child(2) > td`, ByQuery, 1},
{`#footer`, ByID, 1},
{`document.querySelector("body > table > tbody:nth-child(2) > tr:nth-child(2) > td:nth-child(1)")`, ByJSPath, 1},
}
for i, test := range tests {
var nodes []*cdp.Node
if err := Run(ctx, Nodes(test.sel, &nodes, test.by)); err != nil {
t.Fatalf("test %d got error: %v", i, err)
}
if len(nodes) != test.n {
t.Errorf("test %d expected to have %d nodes: got %d", i, test.n, len(nodes))
}
}
} | explode_data.jsonl/59464 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 388
} | [
2830,
3393,
12288,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
20985,
11,
9121,
1669,
1273,
75380,
1155,
11,
330,
2005,
2564,
1138,
16867,
9121,
2822,
78216,
1669,
3056,
1235,
341,
197,
1903,
301,
914,
198,
197,
197,
1694,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSubscriptions(t *testing.T) {
t.Run("Get subscription list test", func(t *testing.T) {
Init(clientId)
_, err := Subscriptions(topicName)
if err != nil {
t.Error(err)
} else {
log.Println("Get subscription list test is successful.")
}
})
} | explode_data.jsonl/4858 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 104
} | [
2830,
3393,
3136,
29966,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
1949,
15142,
1140,
1273,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
98762,
12805,
764,
692,
197,
197,
6878,
1848,
1669,
3719,
29966,
43839,
675,
692,
197,
743... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestSpanStatus_Code(t *testing.T) {
ms := NewSpanStatus()
assert.EqualValues(t, StatusCode(0), ms.Code())
testValCode := StatusCode(1)
ms.SetCode(testValCode)
assert.EqualValues(t, testValCode, ms.Code())
} | explode_data.jsonl/63303 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 85
} | [
2830,
3393,
12485,
2522,
38084,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
12485,
2522,
741,
6948,
12808,
6227,
1155,
11,
53403,
7,
15,
701,
9829,
20274,
2398,
18185,
2208,
2078,
1669,
53403,
7,
16,
340,
47691,
4202,
2078,
8623,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestCreateRun(t *testing.T) {
logger := zerolog.New(os.Stderr)
fakeStorer := &fakes.FakeStorer{}
imagePullOutput := &bytes.Buffer{}
imagePullOutput.WriteString("success")
logsOutput := &bytes.Buffer{}
logsOutput.WriteString("I haz logs.")
containerOkWaitChannel := make(chan containertypes.ContainerWaitOKBody)
createOutput := containertypes.ContainerCreateCreatedBody{
ID: "new-container-id",
}
apiClient := &mockDockerClient{
imagePullOutput: io.NopCloser(imagePullOutput),
createOutput: createOutput,
logsOutput: io.NopCloser(logsOutput),
containerOkChan: containerOkWaitChannel,
}
r := Runner{
Dependencies: Dependencies{
Storer: fakeStorer,
Logger: logger,
},
Config: Config{
DefaultMaximumCommandRuntime: 15,
},
cli: apiClient,
}
fakeStorer.GetReturns(&models.Plugin{
ID: 1,
Name: "test",
Type: models.Container,
Container: &models.ContainerPlugin{
Image: "test-image",
},
}, nil)
go func() {
apiClient.containerOkChan <- containertypes.ContainerWaitOKBody{
StatusCode: 0,
}
}()
err := r.Run(context.Background(), "test", []string{"arg1", "arg2"})
assert.NoError(t, err)
} | explode_data.jsonl/20590 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 441
} | [
2830,
3393,
4021,
6727,
1155,
353,
8840,
836,
8,
341,
17060,
1669,
76178,
1609,
7121,
9638,
77319,
340,
1166,
726,
623,
14827,
1669,
609,
69,
2050,
991,
726,
623,
14827,
16094,
31426,
36068,
5097,
1669,
609,
9651,
22622,
16094,
31426,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLastHasNoEffectGivenLessThanNInputLines(t *testing.T) {
t.Parallel()
input := "a\nb\nc\n"
want := "a\nb\nc\n"
got, err := script.Echo(input).Last(4).String()
if err != nil {
t.Fatal(err)
}
if want != got {
t.Error(cmp.Diff(want, got))
}
} | explode_data.jsonl/51492 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 121
} | [
2830,
3393,
5842,
10281,
2753,
7738,
22043,
27451,
26067,
45,
2505,
16794,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
22427,
1669,
330,
64,
1699,
65,
59,
1016,
1699,
698,
50780,
1669,
330,
64,
1699,
65,
59,
1016,
1699,
698,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestIn(t *testing.T) {
const record = "{a:[1 (int32),2 (int32),3 (int32)] (=0),s:|[4 (int32),5 (int32),6 (int32)]| (=1)} (=2)"
testSuccessful(t, "1 in a", record, zbool(true))
testSuccessful(t, "0 in a", record, zbool(false))
testSuccessful(t, "1 in s", record, zbool(false))
testSuccessful(t, "4 in s", record, zbool(true))
testSuccessful(t, `"boo" in a`, record, zbool(false))
testSuccessful(t, `"boo" in s`, record, zbool(false))
} | explode_data.jsonl/2310 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
641,
1155,
353,
8840,
836,
8,
341,
4777,
3255,
284,
13868,
64,
7259,
16,
320,
396,
18,
17,
701,
17,
320,
396,
18,
17,
701,
18,
320,
396,
18,
17,
7252,
38738,
15,
701,
82,
25,
74723,
19,
320,
396,
18,
17,
701,
20,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEntry_SelectAll(t *testing.T) {
e, window := setupImageTest(t, true)
defer teardownImageTest(window)
c := window.Canvas()
c.Focus(e)
e.SetText("First Row\nSecond Row\nThird Row")
test.AssertRendersToMarkup(t, "entry/select_all_initial.xml", c)
shortcut := &fyne.ShortcutSelectAll{}
e.TypedShortcut(shortcut)
assert.Equal(t, 2, e.CursorRow)
assert.Equal(t, 9, e.CursorColumn)
test.AssertRendersToMarkup(t, "entry/select_all_selected.xml", c)
} | explode_data.jsonl/12353 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 192
} | [
2830,
3393,
5874,
58073,
2403,
1155,
353,
8840,
836,
8,
341,
7727,
11,
3241,
1669,
6505,
1906,
2271,
1155,
11,
830,
340,
16867,
49304,
1906,
2271,
15906,
340,
1444,
1669,
3241,
54121,
2822,
1444,
40141,
2026,
340,
7727,
92259,
445,
5338... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestInstrumentationLibraryMetrics_CopyTo(t *testing.T) {
ms := NewInstrumentationLibraryMetrics()
generateTestInstrumentationLibraryMetrics().CopyTo(ms)
assert.EqualValues(t, generateTestInstrumentationLibraryMetrics(), ms)
} | explode_data.jsonl/32677 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 71
} | [
2830,
3393,
56324,
367,
16915,
27328,
77637,
1249,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
56324,
367,
16915,
27328,
741,
3174,
13220,
2271,
56324,
367,
16915,
27328,
1005,
12106,
1249,
35680,
340,
6948,
12808,
6227,
1155,
11,
69... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestSOCKS5(t *testing.T) {
endSystem, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
t.Fatalf("net.Listen failed: %v", err)
}
defer endSystem.Close()
gateway, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
t.Fatalf("net.Listen failed: %v", err)
}
defer gateway.Close()
var wg sync.WaitGroup
wg.Add(1)
go socks5Gateway(t, gateway, endSystem, socks5IP4, &wg)
proxy, err := SOCKS5("tcp", gateway.Addr().String(), nil, Direct)
if err != nil {
t.Fatalf("SOCKS5 failed: %v", err)
}
if c, err := proxy.Dial("tcp", endSystem.Addr().String()); err != nil {
t.Fatalf("SOCKS5.Dial failed: %v", err)
} else {
c.Close()
}
wg.Wait()
} | explode_data.jsonl/72578 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 304
} | [
2830,
3393,
13880,
3021,
50,
20,
1155,
353,
8840,
836,
8,
341,
6246,
2320,
11,
1848,
1669,
4179,
68334,
445,
27161,
497,
330,
16,
17,
22,
13,
15,
13,
15,
13,
16,
25,
15,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
30762,
445,
47... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.