text stringlengths 93 16.4k | id stringlengths 20 40 | metadata dict | input_ids listlengths 45 2.05k | attention_mask listlengths 45 2.05k | complexity int64 1 9 |
|---|---|---|---|---|---|
func TestPathComponents(t *testing.T) {
var tests = []struct {
p string
c []string
virtual bool
rel bool
win bool
}{
{
p: "/foo/bar/baz",
c: []string{"foo", "bar", "baz"},
},
{
p: "/foo/bar/baz",
c: []string{"foo", "bar", "baz"},
rel: true,
},
{
p: "foo/bar/baz",
c: []string{"foo", "bar", "baz"},
},
{
p: "foo/bar/baz",
c: []string{"foo", "bar", "baz"},
rel: true,
},
{
p: "../foo/bar/baz",
c: []string{"foo", "bar", "baz"},
},
{
p: "../foo/bar/baz",
c: []string{"..", "foo", "bar", "baz"},
rel: true,
},
{
p: "c:/foo/bar/baz",
c: []string{"c", "foo", "bar", "baz"},
virtual: true,
rel: true,
win: true,
},
{
p: "c:/foo/../bar/baz",
c: []string{"c", "bar", "baz"},
virtual: true,
win: true,
},
{
p: `c:\foo\..\bar\baz`,
c: []string{"c", "bar", "baz"},
virtual: true,
win: true,
},
{
p: "c:/foo/../bar/baz",
c: []string{"c", "bar", "baz"},
virtual: true,
rel: true,
win: true,
},
{
p: `c:\foo\..\bar\baz`,
c: []string{"c", "bar", "baz"},
virtual: true,
rel: true,
win: true,
},
}
for _, test := range tests {
t.Run("", func(t *testing.T) {
if test.win && runtime.GOOS != "windows" {
t.Skip("skip test on unix")
}
c, v := pathComponents(fs.Local{}, filepath.FromSlash(test.p), test.rel)
if !cmp.Equal(test.c, c) {
t.Error(test.c, c)
}
if v != test.virtual {
t.Errorf("unexpected virtual prefix count returned, want %v, got %v", test.virtual, v)
}
})
}
} | explode_data.jsonl/2036 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 960
} | [
2830,
3393,
1820,
10443,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
3223,
981,
914,
198,
197,
1444,
981,
3056,
917,
198,
197,
9558,
1807,
198,
197,
197,
3748,
257,
1807,
198,
197,
68452,
257,
1807,
198,
197... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestAssignUnderscore(t *testing.T) {
gopClTest(t, `import log "fmt"
_, err := log.Println("Hello")
`, `package main
import fmt "fmt"
func main() {
_, err := fmt.Println("Hello")
}
`)
} | explode_data.jsonl/73692 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 83
} | [
2830,
3393,
28933,
19957,
388,
2153,
1155,
353,
8840,
836,
8,
341,
3174,
453,
5066,
2271,
1155,
11,
1565,
474,
1487,
330,
12501,
1837,
6878,
1848,
1669,
1487,
12419,
445,
9707,
1138,
7808,
1565,
1722,
1887,
271,
474,
8879,
330,
12501,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestUpdateClusterTemplateEmptyTime(t *testing.T) {
th.SetupHTTP()
defer th.TeardownHTTP()
HandleUpdateClusterTemplateEmptyTimeSuccessfully(t)
updateOpts := []clustertemplates.UpdateOptsBuilder{
clustertemplates.UpdateOpts{
Op: clustertemplates.ReplaceOp,
Path: "/master_lb_enabled",
Value: "True",
},
clustertemplates.UpdateOpts{
Op: clustertemplates.ReplaceOp,
Path: "/registry_enabled",
Value: "True",
},
}
sc := fake.ServiceClient()
sc.Endpoint = sc.Endpoint + "v1/"
actual, err := clustertemplates.Update(sc, "7d85f602-a948-4a30-afd4-e84f47471c15", updateOpts).Extract()
th.AssertNoErr(t, err)
th.AssertDeepEquals(t, ExpectedUpdateClusterTemplate_EmptyTime, *actual)
} | explode_data.jsonl/20369 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 299
} | [
2830,
3393,
4289,
28678,
7275,
3522,
1462,
1155,
353,
8840,
836,
8,
341,
70479,
39820,
9230,
741,
16867,
270,
94849,
37496,
9230,
2822,
197,
6999,
4289,
28678,
7275,
3522,
1462,
35959,
1155,
692,
27175,
43451,
1669,
3056,
564,
590,
529,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestBuildTrafficConfigurationVanillaScaledToZero(t *testing.T) {
tts := v1.TrafficTarget{
ConfigurationName: inactiveConfig.Name,
Percent: ptr.Int64(100),
}
expected := &Config{
Targets: map[string]RevisionTargets{
DefaultTarget: {{
TrafficTarget: v1.TrafficTarget{
ConfigurationName: inactiveConfig.Name,
RevisionName: inactiveRev.Name,
Percent: ptr.Int64(100),
LatestRevision: ptr.Bool(true),
},
Active: false,
Protocol: net.ProtocolHTTP1,
}},
},
revisionTargets: []RevisionTarget{{
TrafficTarget: v1.TrafficTarget{
ConfigurationName: inactiveConfig.Name,
RevisionName: inactiveRev.Name,
Percent: ptr.Int64(100),
LatestRevision: ptr.Bool(true),
},
Active: false,
Protocol: net.ProtocolHTTP1,
}},
Configurations: map[string]*v1.Configuration{
inactiveConfig.Name: inactiveConfig,
},
Revisions: map[string]*v1.Revision{
inactiveRev.Name: inactiveRev,
},
}
if tc, err := BuildTrafficConfiguration(configLister, revLister, testRouteWithTrafficTargets(WithSpecTraffic(tts))); err != nil {
t.Error("Unexpected error", err)
} else if got, want := tc, expected; !cmp.Equal(want, got, cmpOpts...) {
t.Error("Unexpected traffic diff (-want +got):", cmp.Diff(want, got, cmpOpts...))
}
} | explode_data.jsonl/37873 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 566
} | [
2830,
3393,
11066,
87229,
7688,
45224,
6241,
94201,
1249,
17999,
1155,
353,
8840,
836,
8,
341,
3244,
2576,
1669,
348,
16,
836,
956,
20615,
6397,
515,
197,
197,
7688,
675,
25,
31799,
2648,
2967,
345,
197,
197,
32010,
25,
1843,
10087,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAssetUploadServiceJobPostsubmit(t *testing.T) {
// WHEN
jobConfig, err := tester.ReadJobConfig("./../../../../prow/jobs/kyma/components/asset-upload-service/asset-upload-service.yaml")
// THEN
require.NoError(t, err)
assert.Len(t, jobConfig.Postsubmits, 1)
kymaPost, ex := jobConfig.Postsubmits["kyma-project/kyma"]
assert.True(t, ex)
assert.Len(t, kymaPost, 1)
expName := "post-master-kyma-components-asset-upload-service"
actualPost := tester.FindPostsubmitJobByName(kymaPost, expName, "master")
require.NotNil(t, actualPost)
assert.Equal(t, expName, actualPost.Name)
assert.Equal(t, []string{"master"}, actualPost.Branches)
assert.Equal(t, 10, actualPost.MaxConcurrency)
assert.True(t, actualPost.Decorate)
assert.Equal(t, "github.com/kyma-project/kyma", actualPost.PathAlias)
tester.AssertThatHasExtraRefTestInfra(t, actualPost.JobBase.UtilityConfig, "master")
tester.AssertThatHasPresets(t, actualPost.JobBase, tester.PresetDindEnabled, tester.PresetDockerPushRepo, tester.PresetGcrPush, tester.PresetBuildMaster)
assert.Equal(t, "^components/asset-upload-service/", actualPost.RunIfChanged)
tester.AssertThatJobRunIfChanged(t, *actualPost, "components/asset-upload-service/some_random_file.go")
tester.AssertThatExecGolangBuildpack(t, actualPost.JobBase, tester.ImageGolangBuildpack1_11, "/home/prow/go/src/github.com/kyma-project/kyma/components/asset-upload-service")
} | explode_data.jsonl/61138 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 530
} | [
2830,
3393,
16604,
13844,
1860,
12245,
4133,
5955,
1155,
353,
8840,
836,
8,
341,
197,
322,
33633,
198,
68577,
2648,
11,
1848,
1669,
37111,
6503,
12245,
2648,
13988,
84257,
79,
651,
4437,
5481,
14109,
1600,
64,
20261,
14,
9852,
47169,
23... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRedis_BitCount(t *testing.T) {
runOnRedis(t, func(client *Redis) {
for i := 0; i < 11; i++ {
err := client.SetBit("key", int64(i), 1)
assert.Nil(t, err)
}
_, err := NewRedis(client.Addr, "").BitCount("key", 0, -1)
assert.NotNil(t, err)
val, err := client.BitCount("key", 0, -1)
assert.Nil(t, err)
assert.Equal(t, int64(11), val)
val, err = client.BitCount("key", 0, 0)
assert.Nil(t, err)
assert.Equal(t, int64(8), val)
val, err = client.BitCount("key", 1, 1)
assert.Nil(t, err)
assert.Equal(t, int64(3), val)
val, err = client.BitCount("key", 0, 1)
assert.Nil(t, err)
assert.Equal(t, int64(11), val)
val, err = client.BitCount("key", 2, 2)
assert.Nil(t, err)
assert.Equal(t, int64(0), val)
})
} | explode_data.jsonl/39173 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 366
} | [
2830,
3393,
48137,
1668,
275,
2507,
1155,
353,
8840,
836,
8,
341,
56742,
1925,
48137,
1155,
11,
2915,
12805,
353,
48137,
8,
341,
197,
2023,
600,
1669,
220,
15,
26,
600,
366,
220,
16,
16,
26,
600,
1027,
341,
298,
9859,
1669,
2943,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_gitHubReleaseAdapter_Validate(t *testing.T) {
type args struct {
cfg monitor.AdapterConfig
}
tests := []struct {
name string
args args
wantErr error
}{
{
name: "no_repo_set",
args: args{
cfg: monitor.AdapterConfig{GitHubRelease: monitor.GitHubRelease{
Owner: "test",
Repo: "",
}},
},
wantErr: ErrRepoEmpty,
},
{
name: "no_owner_set",
args: args{
cfg: monitor.AdapterConfig{GitHubRelease: monitor.GitHubRelease{
Owner: "",
Repo: "test",
}},
},
wantErr: ErrOwnerEmpty,
},
{
name: "all_set",
args: args{
cfg: monitor.AdapterConfig{GitHubRelease: monitor.GitHubRelease{
Owner: "test",
Repo: "test",
}},
},
wantErr: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := newGitHubReleaseAdapter(nil, nil)
if err := a.Validate(tt.args.cfg); err != tt.wantErr {
t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
} | explode_data.jsonl/40577 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 492
} | [
2830,
3393,
68801,
19316,
16077,
5940,
62,
17926,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
50286,
8718,
34190,
2648,
198,
197,
532,
78216,
1669,
3056,
1235,
341,
197,
11609,
262,
914,
198,
197,
31215,
262,
2827,
198,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestRuneWrapNeeded(t *testing.T) {
tests := []struct {
desc string
r rune
posX int
width int
want bool
}{
{
desc: "half-width rune, falls within canvas",
r: 'a',
posX: 2,
width: 3,
want: false,
},
{
desc: "full-width rune, falls within canvas",
r: '世',
posX: 1,
width: 3,
want: false,
},
{
desc: "half-width rune, falls outside of canvas, wrapping configured",
r: 'a',
posX: 3,
width: 3,
want: true,
},
{
desc: "full-width rune, starts in and falls outside of canvas, wrapping configured",
r: '世',
posX: 3,
width: 3,
want: true,
},
{
desc: "full-width rune, starts outside of canvas, wrapping configured",
r: '世',
posX: 3,
width: 3,
want: true,
},
{
desc: "doesn't wrap for newline characters",
r: '\n',
posX: 3,
width: 3,
want: false,
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
got := runeWrapNeeded(tc.r, tc.posX, tc.width)
if got != tc.want {
t.Errorf("runeWrapNeeded => got %v, want %v", got, tc.want)
}
})
}
} | explode_data.jsonl/62777 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 581
} | [
2830,
3393,
49,
2886,
26787,
56706,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
41653,
220,
914,
198,
197,
7000,
257,
63499,
198,
197,
28164,
55,
220,
526,
198,
197,
24219,
526,
198,
197,
50780,
220,
1807,
198,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestWriteResponse(t *testing.T) {
testRecorder := httptest.NewRecorder()
writeResponse(testRecorder, testMessage, nil)
if testRecorder.Code != statusOk {
t.Fail()
t.Logf(fmt.Sprint("expected: ", statusOk, ", found: ", testRecorder.Code))
}
var resultMessage string
errResultMessage := json.NewDecoder(testRecorder.Body).Decode(&resultMessage)
if errResultMessage != nil {
t.Fail()
t.Logf(errResultMessage.Error())
}
if resultMessage != testMessage {
t.Fail()
t.Logf(fmt.Sprint("expected: ", testMessage, ", found: ", resultMessage))
}
} | explode_data.jsonl/48435 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 203
} | [
2830,
3393,
7985,
2582,
1155,
353,
8840,
836,
8,
341,
18185,
47023,
1669,
54320,
70334,
7121,
47023,
741,
24945,
2582,
8623,
47023,
11,
1273,
2052,
11,
2092,
692,
743,
1273,
47023,
20274,
961,
2639,
11578,
341,
197,
3244,
57243,
741,
19... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestNaNTest(t *testing.T) {
for _, nkind := range []number.Kind{number.Float64Kind, number.Int64Kind} {
t.Run(nkind.String(), func(t *testing.T) {
for _, mkind := range []sdkapi.InstrumentKind{
sdkapi.CounterInstrumentKind,
sdkapi.HistogramInstrumentKind,
sdkapi.GaugeObserverInstrumentKind,
} {
desc := metric.NewDescriptor(
"name",
mkind,
nkind,
)
testRangeNaN(t, &desc)
}
})
}
} | explode_data.jsonl/23272 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 210
} | [
2830,
3393,
22831,
2271,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
308,
15314,
1669,
2088,
3056,
4082,
54199,
90,
4082,
29794,
21,
19,
10629,
11,
1372,
7371,
21,
19,
10629,
92,
341,
197,
3244,
16708,
1445,
15314,
6431,
1507,
2915,
115... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCredentialsRequestAzureReconcile(t *testing.T) {
schemeutils.SetupScheme(scheme.Scheme)
codec, err := minterv1.NewCodec()
if err != nil {
fmt.Printf("error creating codec: %v", err)
t.FailNow()
return
}
tests := []struct {
name string
existing []runtime.Object
expectErr bool
mockAzureAppClient func(mockCtrl *gomock.Controller) *mockazure.MockAppClient
validate func(client.Client, *testing.T)
// Expected conditions on the credentials request:
expectedConditions []ExpectedCondition
// Expected conditions on the credentials cluster operator:
expectedCOConditions []ExpectedCOCondition
}{
{
name: "new credential",
existing: []runtime.Object{
testOperatorConfig(""),
createTestNamespace(testNamespace),
createTestNamespace(testSecretNamespace),
testAzureCredsSecret(constants.CloudCredSecretNamespace, constants.AzureCloudCredSecretName),
testAzureCredentialsRequest(t),
},
mockAzureAppClient: func(mockCtrl *gomock.Controller) *mockazure.MockAppClient {
return mockazure.NewMockAppClient(mockCtrl)
},
validate: func(c client.Client, t *testing.T) {
targetSecret := getCredRequestTargetSecret(c)
require.NotNil(t, targetSecret, "expected non-empty target secret to exist")
// most of these checks are done at the actuator-specific testing, so just a high level sanity check here...
assert.Equal(t, testAzureClientID, string(targetSecret.Data[azureactuator.AzureClientID]), "unexpected AzureClientID field set in target secret")
cr := getCredRequest(c)
assert.NotNil(t, cr)
assert.True(t, cr.Status.Provisioned)
assert.Equal(t, int64(testCRGeneration), int64(cr.Status.LastSyncGeneration))
assert.NotNil(t, cr.Status.LastSyncTimestamp)
},
},
{
name: "orphaned cloud resources",
existing: []runtime.Object{
testOperatorConfig(""),
createTestNamespace(testNamespace),
createTestNamespace(testSecretNamespace),
testAzureCredsSecret(constants.CloudCredSecretNamespace, constants.AzureCloudCredSecretName),
testAzureCredentialsRequestNeedingCleanup(t),
testAzureTargetSecret(testSecretNamespace, testSecretName, "mintedAzureClientID"),
},
mockAzureAppClient: func(mockCtrl *gomock.Controller) *mockazure.MockAppClient {
mockAzureAppClient := mockazure.NewMockAppClient(mockCtrl)
mockAzureAppClient.EXPECT().List(gomock.Any(), gomock.Any()).Return(
[]graphrbac.Application{}, fmt.Errorf("Azure AD Graph API has been sunset"),
)
// No Delete() call b/c of List() error above
return mockAzureAppClient
},
validate: func(c client.Client, t *testing.T) {
targetSecret := getCredRequestTargetSecret(c)
require.NotNil(t, targetSecret, "expected non-empty target secret to exist")
// most of these checks are done at the actuator-specific testing, so just a high level sanity check here...
assert.Equal(t, testAzureClientID, string(targetSecret.Data[azureactuator.AzureClientID]), "unexpected AzureClientID field set in target secret")
cr := getCredRequest(c)
assert.NotNil(t, cr)
assert.True(t, cr.Status.Provisioned)
assert.Equal(t, int64(testCRGeneration), int64(cr.Status.LastSyncGeneration))
assert.NotNil(t, cr.Status.LastSyncTimestamp)
},
expectedConditions: []ExpectedCondition{
{
conditionType: minterv1.OrphanedCloudResource,
reason: cloudResourceOrphaned,
status: corev1.ConditionTrue,
},
},
},
{
name: "clear orphaned cloud resources condition",
existing: []runtime.Object{
testOperatorConfig(""),
createTestNamespace(testNamespace),
createTestNamespace(testSecretNamespace),
testAzureCredsSecret(constants.CloudCredSecretNamespace, constants.AzureCloudCredSecretName),
testAzureCredentialsRequestWithOrphanedCloudResource(t),
testAzureTargetSecret(testSecretNamespace, testSecretName, testAzureClientID),
},
mockAzureAppClient: func(mockCtrl *gomock.Controller) *mockazure.MockAppClient {
mockAzureAppClient := mockazure.NewMockAppClient(mockCtrl)
mockAzureAppClient.EXPECT().List(gomock.Any(), gomock.Any()).Return(
[]graphrbac.Application{testAzureMintedAppRegistration}, nil,
)
mockAzureAppClient.EXPECT().Delete(gomock.Any(), testAzureAppRegObjectID)
return mockAzureAppClient
},
validate: func(c client.Client, t *testing.T) {
targetSecret := getCredRequestTargetSecret(c)
require.NotNil(t, targetSecret, "expected non-empty target secret to exist")
// most of these checks are done at the actuator-specific testing, so just a high level sanity check here...
assert.Equal(t, testAzureClientID, string(targetSecret.Data[azureactuator.AzureClientID]), "unexpected AzureClientID field set in target secret")
cr := getCredRequest(c)
assert.NotNil(t, cr)
assert.True(t, cr.Status.Provisioned)
assert.Equal(t, int64(testCRGeneration), int64(cr.Status.LastSyncGeneration))
assert.NotNil(t, cr.Status.LastSyncTimestamp)
},
expectedConditions: []ExpectedCondition{
{
conditionType: minterv1.OrphanedCloudResource,
reason: cloudResourceCleaned,
status: corev1.ConditionFalse,
},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockAzureAppClient := mockazure.NewMockAppClient(mockCtrl)
if test.mockAzureAppClient != nil {
mockAzureAppClient = test.mockAzureAppClient(mockCtrl)
}
fakeClient := fake.NewClientBuilder().WithRuntimeObjects(test.existing...).Build()
azureActuator := azureactuator.NewFakeActuator(
fakeClient,
codec,
func(logger log.FieldLogger, clientID, clientSecret, tenantID, subscriptionID string) (*azureactuator.AzureCredentialsMinter, error) {
return azureactuator.NewFakeAzureCredentialsMinter(logger,
clientID,
clientSecret,
tenantID,
subscriptionID,
mockAzureAppClient,
)
},
)
rcr := &ReconcileCredentialsRequest{
Client: fakeClient,
Actuator: azureActuator,
platformType: configv1.AzurePlatformType,
}
_, err := rcr.Reconcile(context.TODO(), reconcile.Request{
NamespacedName: types.NamespacedName{
Name: testCRName,
Namespace: testNamespace,
},
})
if test.validate != nil {
test.validate(fakeClient, t)
}
if err != nil && !test.expectErr {
t.Errorf("Unexpected error: %v", err)
}
if err == nil && test.expectErr {
t.Errorf("Expected error but got none")
}
cr := getCredRequest(fakeClient)
assert.Equal(t, len(test.expectedConditions), len(cr.Status.Conditions), "number of expected conditions doesn't match actual number of conditions")
for _, condition := range test.expectedConditions {
foundCondition := utils.FindCredentialsRequestCondition(cr.Status.Conditions, condition.conditionType)
assert.NotNil(t, foundCondition)
assert.Exactly(t, condition.status, foundCondition.Status)
assert.Exactly(t, condition.reason, foundCondition.Reason)
}
if test.expectedCOConditions != nil {
logger := log.WithFields(log.Fields{"controller": controllerName})
currentConditions, err := rcr.GetConditions(logger)
require.NoError(t, err, "failed getting conditions")
for _, expectedCondition := range test.expectedCOConditions {
foundCondition := utils.FindClusterOperatorCondition(currentConditions, expectedCondition.conditionType)
require.NotNil(t, foundCondition)
assert.Equal(t, string(expectedCondition.status), string(foundCondition.Status), "condition %s had unexpected status", expectedCondition.conditionType)
if expectedCondition.reason != "" {
assert.Exactly(t, expectedCondition.reason, foundCondition.Reason)
}
}
}
})
}
} | explode_data.jsonl/36215 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 3029
} | [
2830,
3393,
27025,
1900,
78107,
693,
40446,
457,
1155,
353,
8840,
836,
8,
341,
1903,
8058,
6031,
39820,
28906,
1141,
8058,
92719,
692,
43343,
66,
11,
1848,
1669,
28337,
648,
16,
7121,
36913,
741,
743,
1848,
961,
2092,
341,
197,
11009,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestAuthHeaderGetsSet(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
mod func(*client)
expectedHeader http.Header
}{
{
name: "Empty token, no auth header",
mod: func(c *client) { c.getToken = func() []byte { return []byte{} } },
},
{
name: "Token, auth header",
mod: func(c *client) { c.getToken = func() []byte { return []byte("sup") } },
expectedHeader: http.Header{"Authorization": []string{"Bearer sup"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
fake := &fakeHttpClient{}
c := &client{delegate: &delegate{client: fake}, logger: logrus.NewEntry(logrus.New())}
tc.mod(c)
if _, err := c.doRequest("POST", "/hello", "", nil); err != nil {
t.Fatalf("unexpected error: %v", err)
}
if tc.expectedHeader == nil {
tc.expectedHeader = http.Header{}
}
tc.expectedHeader["Accept"] = []string{"application/vnd.github.v3+json"}
// Bazel injects some stuff in here, exclude it from comparison so both bazel test
// and go test yield the same result.
delete(fake.received[0].Header, "User-Agent")
if diff := cmp.Diff(tc.expectedHeader, fake.received[0].Header); diff != "" {
t.Errorf("expected header differs from actual: %s", diff)
}
})
}
} | explode_data.jsonl/6306 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 543
} | [
2830,
3393,
5087,
4047,
49358,
1649,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
198,
197,
42228,
310,
2915,
4071,
2972,
340,
197,
42400,
4047,
1758,
15753,
198,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestWSReadPingFrame(t *testing.T) {
for _, test := range []struct {
name string
payload []byte
}{
{"without payload", nil},
{"with payload", []byte("optional payload")},
} {
t.Run(test.name, func(t *testing.T) {
c, ri, tr := testWSSetupForRead()
ping := testWSCreateClientMsg(wsPingMessage, 1, true, false, test.payload)
rb := append([]byte(nil), ping...)
bufs, err := c.wsRead(ri, tr, rb)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
if n := len(bufs); n != 0 {
t.Fatalf("Unexpected buffer returned: %v", n)
}
// A PONG should have been queued with the payload of the ping
c.mu.Lock()
nb, _ := c.collapsePtoNB()
c.mu.Unlock()
if n := len(nb); n == 0 {
t.Fatalf("Expected buffers, got %v", n)
}
if expected := 2 + len(test.payload); expected != len(nb[0]) {
t.Fatalf("Expected buffer to be %v bytes long, got %v", expected, len(nb[0]))
}
b := nb[0][0]
if b&wsFinalBit == 0 {
t.Fatalf("Control frame should have been the final flag, it was not set: %v", b)
}
if b&byte(wsPongMessage) == 0 {
t.Fatalf("Should have been a PONG, it wasn't: %v", b)
}
if len(test.payload) > 0 {
if !bytes.Equal(nb[0][2:], test.payload) {
t.Fatalf("Unexpected content: %s", nb[0][2:])
}
}
})
}
} | explode_data.jsonl/42695 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 601
} | [
2830,
3393,
7433,
4418,
69883,
4369,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
3056,
1235,
341,
197,
11609,
262,
914,
198,
197,
76272,
3056,
3782,
198,
197,
59403,
197,
197,
4913,
28996,
7729,
497,
2092,
1583,
197,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 9 |
func TestColouriseStatus(t *testing.T) {
for input, colour := range map[string]func(...interface{}) string{
"ROLLBACK_FAILED": console.Red,
"SOMETHING_ELSE_FAILED": console.Red,
"ROLLBACK_SUCCEEDED": console.Red,
"SOMETHING_ROLLBACK": console.Red,
"BANANA_IN_PROGRESS": console.Blue,
"SOMETHING_COMPLETE": console.Green,
"ANOTHER THING": console.Plain,
} {
actual := ColouriseStatus(input)
expected := colour(input)
if actual != expected {
fmt.Printf("Got '%s'. Want: '%s'.\n", actual, expected)
t.Fail()
}
}
} | explode_data.jsonl/36894 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 255
} | [
2830,
3393,
33281,
1064,
2522,
1155,
353,
8840,
836,
8,
341,
2023,
1946,
11,
12463,
1669,
2088,
2415,
14032,
60,
2830,
18850,
4970,
28875,
914,
515,
197,
197,
1,
22284,
15839,
22775,
788,
981,
2339,
20943,
345,
197,
197,
66310,
1898,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFactory(t *testing.T) {
tests := map[string]struct {
expectedGen IDGenerator
expectedErr error
}{
"elasticsearch": {
ESTimeBasedUUIDGenerator(),
nil,
},
"foobar": {
nil,
makeErrUnknownType("foobar"),
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
typ := name
gen, err := Factory(typ)
if test.expectedGen != nil {
assert.Equal(t, test.expectedGen, gen)
}
if test.expectedErr != nil {
assert.EqualError(t, err, test.expectedErr.Error())
}
})
}
} | explode_data.jsonl/19889 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 237
} | [
2830,
3393,
4153,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
2415,
14032,
60,
1235,
341,
197,
42400,
9967,
3034,
12561,
198,
197,
42400,
7747,
1465,
198,
197,
59403,
197,
197,
1,
301,
27791,
788,
341,
298,
197,
5177,
545,
28715,
24754... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestAccAzureRMLoadBalancerNatRule_complete(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_lb_nat_rule", "test")
r := LoadBalancerNatRule{}
data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.complete(data, "Standard"),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep(),
})
} | explode_data.jsonl/29069 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 155
} | [
2830,
3393,
14603,
78107,
49,
2668,
2731,
93825,
65214,
11337,
27675,
1155,
353,
8840,
836,
8,
341,
8924,
1669,
25505,
25212,
83920,
1155,
11,
330,
1370,
324,
4195,
63601,
38169,
21124,
497,
330,
1944,
1138,
7000,
1669,
8893,
93825,
65214... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNamespaceTick(t *testing.T) {
ctrl := xtest.NewController(t)
defer ctrl.Finish()
ns, closer := newTestNamespace(t)
defer closer()
for i := range testShardIDs {
shard := NewMockdatabaseShard(ctrl)
shard.EXPECT().Tick(context.NewNoOpCanncellable(), gomock.Any(), gomock.Any()).Return(tickResult{}, nil)
ns.shards[testShardIDs[i].ID()] = shard
}
// Only asserting the expected methods are called
require.NoError(t, ns.Tick(context.NewNoOpCanncellable(), xtime.Now()))
} | explode_data.jsonl/35338 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
22699,
22213,
1155,
353,
8840,
836,
8,
341,
84381,
1669,
856,
1944,
7121,
2051,
1155,
340,
16867,
23743,
991,
18176,
2822,
84041,
11,
12128,
1669,
501,
2271,
22699,
1155,
340,
16867,
12128,
741,
2023,
600,
1669,
2088,
1273,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestFindImportGoPath(t *testing.T) {
goroot, err := ioutil.TempDir("", "goimports-")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(goroot)
origStdlib := stdlib
defer func() {
stdlib = origStdlib
}()
stdlib = nil
withEmptyGoPath(func() {
// Test against imaginary bits/bytes package in std lib
bytesDir := filepath.Join(goroot, "src", "pkg", "bits", "bytes")
for _, tag := range build.Default.ReleaseTags {
// Go 1.4 rearranged the GOROOT tree to remove the "pkg" path component.
if tag == "go1.4" {
bytesDir = filepath.Join(goroot, "src", "bits", "bytes")
}
}
if err := os.MkdirAll(bytesDir, 0755); err != nil {
t.Fatal(err)
}
bytesSrcPath := filepath.Join(bytesDir, "bytes.go")
bytesPkgPath := "bits/bytes"
bytesSrc := []byte(`package bytes
type Buffer2 struct {}
`)
if err := ioutil.WriteFile(bytesSrcPath, bytesSrc, 0775); err != nil {
t.Fatal(err)
}
build.Default.GOROOT = goroot
got, rename, err := findImportGoPath("bytes", map[string]bool{"Buffer2": true}, "x.go")
if err != nil {
t.Fatal(err)
}
if got != bytesPkgPath || rename {
t.Errorf(`findImportGoPath("bytes", Buffer2 ...)=%q, %t, want "%s", false`, got, rename, bytesPkgPath)
}
got, rename, err = findImportGoPath("bytes", map[string]bool{"Missing": true}, "x.go")
if err != nil {
t.Fatal(err)
}
if got != "" || rename {
t.Errorf(`findImportGoPath("bytes", Missing ...)=%q, %t, want "", false`, got, rename)
}
})
} | explode_data.jsonl/12425 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 618
} | [
2830,
3393,
9885,
11511,
10850,
1820,
1155,
353,
8840,
836,
8,
341,
3174,
269,
1905,
11,
1848,
1669,
43144,
65009,
6184,
19814,
330,
3346,
62888,
12,
1138,
743,
1848,
961,
2092,
341,
197,
3244,
26133,
3964,
340,
197,
532,
16867,
2643,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIsolateKeyFlags(t *testing.T) {
t.Run("Valid flags succeeds", testIsolateKeyFlagsValidFlagsSucceeds)
t.Run("Missing wallet fails", testIsolateKeyFlagsMissingWalletFails)
t.Run("Missing public key fails", testIsolateKeyFlagsMissingPubKeyFails)
} | explode_data.jsonl/24141 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
3872,
33066,
1592,
9195,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
4088,
8042,
50081,
497,
1273,
3872,
33066,
1592,
9195,
4088,
9195,
50,
29264,
82,
340,
3244,
16708,
445,
25080,
15085,
14525,
497,
1273,
3872,
33066,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseNotChar(t *testing.T) {
testCases := []struct {
input string
expected string
}{
{
"\"",
"",
},
{
"123abc$\"",
"123abc$",
},
}
for _, testCase := range testCases {
p := NewHtmlParser(testCase.input)
actual, err := parseNotChar(p, '"')
if err != nil {
t.Errorf("raise error: %s", err.Error())
}
if testCase.expected != actual {
t.Errorf("expect %s, actual %s", testCase.expected, actual)
}
}
} | explode_data.jsonl/34979 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 210
} | [
2830,
3393,
14463,
2623,
4768,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
3056,
1235,
341,
197,
22427,
262,
914,
198,
197,
42400,
914,
198,
197,
59403,
197,
197,
515,
298,
197,
1,
95901,
298,
197,
39680,
197,
197,
1583,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestWithStart(t *testing.T) {
startCalled := false
start := func(context.Context, component.Host) error { startCalled = true; return nil }
bp := newBaseProcessor(testFullName, WithStart(start))
assert.NoError(t, bp.Start(context.Background(), componenttest.NewNopHost()))
assert.True(t, startCalled)
} | explode_data.jsonl/4006 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
2354,
3479,
1155,
353,
8840,
836,
8,
341,
21375,
20960,
1669,
895,
198,
21375,
1669,
2915,
5378,
9328,
11,
3692,
29840,
8,
1465,
314,
1191,
20960,
284,
830,
26,
470,
2092,
555,
2233,
79,
1669,
501,
3978,
22946,
8623,
36217... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNodes_Any(t *testing.T) {
assert.False(t, Nodes{"": NodeStatus{Name: "foo"}}.Any(func(node NodeStatus) bool { return node.Name == "bar" }))
assert.True(t, Nodes{"": NodeStatus{Name: "foo"}}.Any(func(node NodeStatus) bool { return node.Name == "foo" }))
} | explode_data.jsonl/26036 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 100
} | [
2830,
3393,
12288,
1566,
3834,
1155,
353,
8840,
836,
8,
341,
6948,
50757,
1155,
11,
52501,
4913,
788,
6018,
2522,
63121,
25,
330,
7975,
30975,
13,
8610,
18552,
6958,
6018,
2522,
8,
1807,
314,
470,
2436,
2967,
621,
330,
2257,
1,
28712,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTTL(t *testing.T) {
Convey("Subject: Test ES TTL\n", t, func() {
defer func() {
r := recover()
So(r, ShouldEqual, nil)
}()
es.DeleteExpiredData()
})
} | explode_data.jsonl/64116 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 80
} | [
2830,
3393,
51,
13470,
1155,
353,
8840,
836,
8,
341,
93070,
5617,
445,
13019,
25,
3393,
19287,
78532,
1699,
497,
259,
11,
2915,
368,
341,
197,
16867,
2915,
368,
341,
298,
7000,
1669,
11731,
741,
298,
76912,
2601,
11,
12260,
2993,
11,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestPrimitivePutDate(t *testing.T) {
client := newPrimitiveClient()
a, err := time.Parse("2006-01-02", "0001-01-01")
if err != nil {
t.Fatalf("Unable to parse date string: %v", err)
}
b, err := time.Parse("2006-01-02", "2016-02-29")
if err != nil {
t.Fatalf("Unable to parse leap year date string: %v", err)
}
resp, err := client.PutDate(context.Background(), DateWrapper{Field: &a, Leap: &b}, nil)
if err != nil {
t.Fatalf("PutDate: %v", err)
}
if s := resp.RawResponse.StatusCode; s != http.StatusOK {
t.Fatalf("unexpected status code %d", s)
}
} | explode_data.jsonl/61683 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 232
} | [
2830,
3393,
33313,
19103,
1916,
1155,
353,
8840,
836,
8,
341,
25291,
1669,
501,
33313,
2959,
741,
11323,
11,
1848,
1669,
882,
8937,
445,
17,
15,
15,
21,
12,
15,
16,
12,
15,
17,
497,
330,
15,
15,
15,
16,
12,
15,
16,
12,
15,
16,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestClientTimeout(t *testing.T) {
ts := createGetServer(t)
defer ts.Close()
c := dc().SetTimeout(time.Second * 3)
_, err := c.R().Get(ts.URL + "/set-timeout-test")
assertEqual(t, true, strings.Contains(strings.ToLower(err.Error()), "timeout"))
} | explode_data.jsonl/39337 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 98
} | [
2830,
3393,
2959,
7636,
1155,
353,
8840,
836,
8,
341,
57441,
1669,
1855,
1949,
5475,
1155,
340,
16867,
10591,
10421,
2822,
1444,
1669,
19402,
1005,
1649,
7636,
9730,
32435,
353,
220,
18,
340,
197,
6878,
1848,
1669,
272,
2013,
1005,
1949... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClientOnResponseError(t *testing.T) {
ts := createAuthServer(t)
defer ts.Close()
tests := []struct {
name string
setup func(*Client)
isError bool
hasResponse bool
}{
{
name: "successful_request",
},
{
name: "http_status_error",
setup: func(client *Client) {
client.SetAuthToken("BAD")
},
},
{
name: "before_request_error",
setup: func(client *Client) {
client.OnBeforeRequest(func(client *Client, request *Request) error {
return fmt.Errorf("before request")
})
},
isError: true,
},
{
name: "before_request_error_retry",
setup: func(client *Client) {
client.SetRetryCount(3).OnBeforeRequest(func(client *Client, request *Request) error {
return fmt.Errorf("before request")
})
},
isError: true,
},
{
name: "after_response_error",
setup: func(client *Client) {
client.OnAfterResponse(func(client *Client, response *Response) error {
return fmt.Errorf("after response")
})
},
isError: true,
hasResponse: true,
},
{
name: "after_response_error_retry",
setup: func(client *Client) {
client.SetRetryCount(3).OnAfterResponse(func(client *Client, response *Response) error {
return fmt.Errorf("after response")
})
},
isError: true,
hasResponse: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var assertErrorHook = func(r *Request, err error) {
assertNotNil(t, r)
v, ok := err.(*ResponseError)
assertEqual(t, test.hasResponse, ok)
if ok {
assertNotNil(t, v.Response)
assertNotNil(t, v.Err)
}
}
var hook1, hook2 int
c := New().outputLogTo(ioutil.Discard).
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}).
SetAuthToken("004DDB79-6801-4587-B976-F093E6AC44FF").
SetRetryCount(0).
SetRetryMaxWaitTime(time.Microsecond).
AddRetryCondition(func(response *Response, err error) bool {
if err != nil {
return true
}
return response.IsError()
}).
OnError(func(r *Request, err error) {
assertErrorHook(r, err)
hook1++
}).
OnError(func(r *Request, err error) {
assertErrorHook(r, err)
hook2++
})
if test.setup != nil {
test.setup(c)
}
_, err := c.R().Get(ts.URL + "/profile")
if test.isError {
assertNotNil(t, err)
assertEqual(t, 1, hook1)
assertEqual(t, 1, hook2)
} else {
assertError(t, err)
}
})
}
} | explode_data.jsonl/39360 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1142
} | [
2830,
3393,
2959,
1925,
2582,
1454,
1155,
353,
8840,
836,
8,
341,
57441,
1669,
1855,
5087,
5475,
1155,
340,
16867,
10591,
10421,
2822,
78216,
1669,
3056,
1235,
341,
197,
11609,
286,
914,
198,
197,
84571,
981,
2915,
4071,
2959,
340,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIssue26873(t *testing.T) {
store, clean := testkit.CreateMockStore(t)
defer clean()
orgEnable := core.PreparedPlanCacheEnabled()
defer core.SetPreparedPlanCache(orgEnable)
core.SetPreparedPlanCache(true)
se, err := session.CreateSession4TestWithOpt(store, &session.Opt{
PreparedPlanCache: kvcache.NewSimpleLRUCache(100, 0.1, math.MaxUint64),
})
require.NoError(t, err)
tk := testkit.NewTestKitWithSession(t, store, se)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a int primary key, b int, c int)")
tk.MustExec("prepare stmt from 'select * from t where a = 2 or a = ?'")
tk.MustExec("set @p = 3")
tk.MustQuery("execute stmt using @p").Check(testkit.Rows())
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("0"))
tk.MustQuery("execute stmt using @p").Check(testkit.Rows())
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows("1"))
} | explode_data.jsonl/5529 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 347
} | [
2830,
3393,
42006,
17,
21,
23,
22,
18,
1155,
353,
8840,
836,
8,
341,
57279,
11,
4240,
1669,
1273,
8226,
7251,
11571,
6093,
1155,
340,
16867,
4240,
741,
87625,
11084,
1669,
6200,
28770,
7212,
20485,
8233,
5462,
741,
16867,
6200,
4202,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServiceDiscoveryWorkloadInstance(t *testing.T) {
store, sd, events, stopFn := initServiceDiscovery()
defer stopFn()
// Setup a couple of workload instances for test. These will be selected by the `selector` SE
fi1 := &model.WorkloadInstance{
Name: selector.Name,
Namespace: selector.Namespace,
Endpoint: &model.IstioEndpoint{
Address: "2.2.2.2",
Labels: map[string]string{"app": "wle"},
ServiceAccount: spiffe.MustGenSpiffeURI(selector.Name, "default"),
TLSMode: model.IstioMutualTLSModeLabel,
},
}
fi2 := &model.WorkloadInstance{
Name: "some-other-name",
Namespace: selector.Namespace,
Endpoint: &model.IstioEndpoint{
Address: "3.3.3.3",
Labels: map[string]string{"app": "wle"},
ServiceAccount: spiffe.MustGenSpiffeURI(selector.Name, "default"),
TLSMode: model.IstioMutualTLSModeLabel,
},
}
t.Run("service entry", func(t *testing.T) {
// Add just the ServiceEntry with selector. We should see no instances
createConfigs([]*model.Config{selector}, store, t)
instances := []*model.ServiceInstance{}
expectProxyInstances(t, sd, instances, "2.2.2.2")
expectServiceInstances(t, sd, selector, 0, instances)
expectEvents(t, events,
Event{kind: "svcupdate", host: "selector.com", namespace: selector.Namespace},
Event{kind: "eds", host: "selector.com", namespace: selector.Namespace},
Event{kind: "xds"})
})
t.Run("add workload instance", func(t *testing.T) {
// Add a workload instance, we expect this to update
callInstanceHandlers([]*model.WorkloadInstance{fi1}, sd, model.EventAdd, t)
instances := []*model.ServiceInstance{
makeInstanceWithServiceAccount(selector, "2.2.2.2", 444,
selector.Spec.(*networking.ServiceEntry).Ports[0], map[string]string{"app": "wle"}, "default"),
makeInstanceWithServiceAccount(selector, "2.2.2.2", 445,
selector.Spec.(*networking.ServiceEntry).Ports[1], map[string]string{"app": "wle"}, "default"),
}
expectProxyInstances(t, sd, instances, "2.2.2.2")
expectServiceInstances(t, sd, selector, 0, instances)
expectEvents(t, events, Event{kind: "eds", host: "selector.com", namespace: selector.Namespace, endpoints: 2})
})
t.Run("another workload instance", func(t *testing.T) {
// Add a different instance
callInstanceHandlers([]*model.WorkloadInstance{fi2}, sd, model.EventAdd, t)
instances := []*model.ServiceInstance{
makeInstanceWithServiceAccount(selector, "2.2.2.2", 444,
selector.Spec.(*networking.ServiceEntry).Ports[0], map[string]string{"app": "wle"}, "default"),
makeInstanceWithServiceAccount(selector, "2.2.2.2", 445,
selector.Spec.(*networking.ServiceEntry).Ports[1], map[string]string{"app": "wle"}, "default"),
}
expectProxyInstances(t, sd, instances, "2.2.2.2")
instances = append(instances,
makeInstanceWithServiceAccount(selector, "3.3.3.3", 444,
selector.Spec.(*networking.ServiceEntry).Ports[0], map[string]string{"app": "wle"}, "default"),
makeInstanceWithServiceAccount(selector, "3.3.3.3", 445,
selector.Spec.(*networking.ServiceEntry).Ports[1], map[string]string{"app": "wle"}, "default"))
expectServiceInstances(t, sd, selector, 0, instances)
expectEvents(t, events, Event{kind: "eds", host: "selector.com", namespace: selector.Namespace, endpoints: 4})
})
t.Run("delete workload instance", func(t *testing.T) {
// Delete the instances, it should be gone
callInstanceHandlers([]*model.WorkloadInstance{fi2}, sd, model.EventDelete, t)
instances := []*model.ServiceInstance{
makeInstanceWithServiceAccount(selector, "2.2.2.2", 444,
selector.Spec.(*networking.ServiceEntry).Ports[0], map[string]string{"app": "wle"}, "default"),
makeInstanceWithServiceAccount(selector, "2.2.2.2", 445,
selector.Spec.(*networking.ServiceEntry).Ports[1], map[string]string{"app": "wle"}, "default"),
}
expectProxyInstances(t, sd, instances, "2.2.2.2")
expectServiceInstances(t, sd, selector, 0, instances)
expectEvents(t, events, Event{kind: "eds", host: "selector.com", namespace: selector.Namespace, endpoints: 2})
// Delete the other instance
callInstanceHandlers([]*model.WorkloadInstance{fi1}, sd, model.EventDelete, t)
instances = []*model.ServiceInstance{}
expectServiceInstances(t, sd, selector, 0, instances)
expectProxyInstances(t, sd, instances, "2.2.2.2")
expectEvents(t, events, Event{kind: "eds", host: "selector.com", namespace: selector.Namespace, endpoints: 0})
// Add the instance back
callInstanceHandlers([]*model.WorkloadInstance{fi1}, sd, model.EventAdd, t)
instances = []*model.ServiceInstance{
makeInstanceWithServiceAccount(selector, "2.2.2.2", 444,
selector.Spec.(*networking.ServiceEntry).Ports[0], map[string]string{"app": "wle"}, "default"),
makeInstanceWithServiceAccount(selector, "2.2.2.2", 445,
selector.Spec.(*networking.ServiceEntry).Ports[1], map[string]string{"app": "wle"}, "default"),
}
expectProxyInstances(t, sd, instances, "2.2.2.2")
expectServiceInstances(t, sd, selector, 0, instances)
expectEvents(t, events, Event{kind: "eds", host: "selector.com", namespace: selector.Namespace, endpoints: 2})
})
} | explode_data.jsonl/12837 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1944
} | [
2830,
3393,
1860,
67400,
6776,
1078,
2523,
1155,
353,
8840,
836,
8,
341,
57279,
11,
20585,
11,
4357,
11,
2936,
24911,
1669,
2930,
1860,
67400,
741,
16867,
2936,
24911,
2822,
197,
322,
18626,
264,
5625,
315,
53596,
13121,
369,
1273,
13,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMatchesApi(t *testing.T) {
web := setupTestWeb(t)
match1 := model.Match{Type: "qualification", DisplayName: "1", Time: time.Unix(0, 0), Red1: 1, Red2: 2, Red3: 3,
Blue1: 4, Blue2: 5, Blue3: 6, Blue1IsSurrogate: true, Blue2IsSurrogate: true, Blue3IsSurrogate: true}
match2 := model.Match{Type: "qualification", DisplayName: "2", Time: time.Unix(600, 0), Red1: 7, Red2: 8, Red3: 9,
Blue1: 10, Blue2: 11, Blue3: 12, Red1IsSurrogate: true, Red2IsSurrogate: true, Red3IsSurrogate: true}
match3 := model.Match{Type: "practice", DisplayName: "1", Time: time.Now(), Red1: 6, Red2: 5, Red3: 4,
Blue1: 3, Blue2: 2, Blue3: 1}
web.arena.Database.CreateMatch(&match1)
web.arena.Database.CreateMatch(&match2)
web.arena.Database.CreateMatch(&match3)
matchResult1 := model.BuildTestMatchResult(match1.Id, 1)
web.arena.Database.CreateMatchResult(matchResult1)
recorder := web.getHttpResponse("/api/matches/qualification")
assert.Equal(t, 200, recorder.Code)
assert.Equal(t, "application/json", recorder.HeaderMap["Content-Type"][0])
var matchesData []MatchWithResult
err := json.Unmarshal([]byte(recorder.Body.String()), &matchesData)
assert.Nil(t, err)
if assert.Equal(t, 2, len(matchesData)) {
assert.Equal(t, match1.Id, matchesData[0].Match.Id)
assert.Equal(t, *matchResult1, matchesData[0].Result.MatchResult)
assert.Equal(t, match2.Id, matchesData[1].Match.Id)
assert.Nil(t, matchesData[1].Result)
}
} | explode_data.jsonl/75528 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 569
} | [
2830,
3393,
42470,
6563,
1155,
353,
8840,
836,
8,
341,
97250,
1669,
6505,
2271,
5981,
1155,
692,
47706,
16,
1669,
1614,
36062,
90,
929,
25,
330,
71909,
497,
81783,
25,
330,
16,
497,
4120,
25,
882,
10616,
941,
7,
15,
11,
220,
15,
7... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func Test_downloadRepositoryCRDs(t *testing.T) {
renderer := buildTestRenderer()
crds, err := renderer.downloadRepositoryCRDs(context.Background(), RemoteRepositoryDefinition{
Path: "config/crd",
Owner: "giantswarm",
Provider: "common",
Name: "apiextensions",
Reference: "v3.35.0",
})
require.Nil(t, err, err)
require.Len(t, crds, 30)
} | explode_data.jsonl/3689 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 152
} | [
2830,
3393,
35939,
4624,
8973,
66950,
1155,
353,
8840,
836,
8,
341,
83509,
1669,
1936,
2271,
11541,
741,
91492,
5356,
11,
1848,
1669,
19715,
35381,
4624,
8973,
66950,
5378,
19047,
1507,
20738,
4624,
10398,
515,
197,
69640,
25,
414,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestUFuturesNewOrder(t *testing.T) {
t.Parallel()
if !areTestAPIKeysSet() || !canManipulateRealOrders {
t.Skip("skipping test: api keys not set or canManipulateRealOrders set to false")
}
_, err := b.UFuturesNewOrder(context.Background(), currency.NewPair(currency.BTC, currency.USDT), "BUY", "", "LIMIT", "GTC", "", "", "", "", 1, 1, 0, 0, 0, false)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/76570 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 161
} | [
2830,
3393,
20538,
74606,
3564,
4431,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
743,
753,
546,
2271,
7082,
8850,
1649,
368,
1369,
753,
4814,
92876,
6334,
12768,
24898,
341,
197,
3244,
57776,
445,
4886,
5654,
1273,
25,
6330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestNativeCtorNewTarget(t *testing.T) {
const SCRIPT = `
function NewTarget() {
}
var o = Reflect.construct(Number, [1], NewTarget);
o.__proto__ === NewTarget.prototype && o.toString() === "[object Number]";
`
testScript1(SCRIPT, valueTrue, t)
} | explode_data.jsonl/10519 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 93
} | [
2830,
3393,
20800,
34,
10980,
3564,
6397,
1155,
353,
8840,
836,
8,
341,
4777,
53679,
284,
22074,
7527,
1532,
6397,
368,
341,
197,
630,
2405,
297,
284,
34598,
84254,
42999,
11,
508,
16,
1125,
1532,
6397,
317,
22229,
4847,
15110,
563,
2... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_JSHash(t *testing.T) {
var x uint32 = 498688898
gtest.C(t, func(t *gtest.T) {
j := ghash.JSHash(strBasic)
t.Assert(j, x)
})
} | explode_data.jsonl/60232 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 77
} | [
2830,
3393,
77846,
6370,
1155,
353,
8840,
836,
8,
341,
2405,
856,
2622,
18,
17,
284,
220,
19,
24,
23,
21,
23,
23,
23,
24,
23,
198,
3174,
1944,
727,
1155,
11,
2915,
1155,
353,
82038,
836,
8,
341,
197,
12428,
1669,
342,
8296,
3503... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestCaptiveRunFromParams(t *testing.T) {
var tests = []struct {
from uint32
runFrom uint32
ledgerArchives uint32
}{
// Before and including 1st checkpoint:
{2, 2, 3},
{3, 2, 3},
{3, 2, 3},
{4, 2, 3},
{62, 2, 3},
{63, 2, 3},
// Starting from 64 we go normal path: between 1st and 2nd checkpoint:
{64, 63, 64},
{65, 64, 65},
{66, 65, 66},
{126, 125, 126},
// between 2nd and 3rd checkpoint... and so on.
{127, 126, 127},
{128, 127, 128},
{129, 128, 129},
}
for _, tc := range tests {
t.Run(fmt.Sprintf("from_%d", tc.from), func(t *testing.T) {
tt := assert.New(t)
mockArchive := &historyarchive.MockArchive{}
mockArchive.
On("GetLedgerHeader", uint32(tc.ledgerArchives)).
Return(xdr.LedgerHeaderHistoryEntry{
Header: xdr.LedgerHeader{
PreviousLedgerHash: xdr.Hash{1, 1, 1, 1},
},
}, nil)
captiveBackend := CaptiveStellarCore{
archive: mockArchive,
checkpointManager: historyarchive.NewCheckpointManager(64),
}
ctx := context.Background()
runFrom, ledgerHash, err := captiveBackend.runFromParams(ctx, tc.from)
tt.NoError(err)
tt.Equal(tc.runFrom, runFrom, "runFrom")
tt.Equal("0101010100000000000000000000000000000000000000000000000000000000", ledgerHash)
mockArchive.AssertExpectations(t)
})
}
} | explode_data.jsonl/7334 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 597
} | [
2830,
3393,
34,
27781,
6727,
3830,
4870,
1155,
353,
8840,
836,
8,
341,
2405,
7032,
284,
3056,
1235,
341,
197,
42727,
1843,
2622,
18,
17,
198,
197,
56742,
3830,
286,
2622,
18,
17,
198,
197,
197,
50704,
18727,
1886,
2622,
18,
17,
198,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReadYMLConfig(t *testing.T) {
var c TestConfig
err := GetConfig(&c, "config_test.yml")
assert.Equal(t, nil, err)
assert.Equal(t, "configItem1", c.First)
assert.Equal(t, "configItem2", c.Second)
} | explode_data.jsonl/3113 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 88
} | [
2830,
3393,
4418,
56,
2668,
2648,
1155,
353,
8840,
836,
8,
341,
2405,
272,
3393,
2648,
271,
9859,
1669,
2126,
2648,
2099,
66,
11,
330,
1676,
4452,
33936,
5130,
6948,
12808,
1155,
11,
2092,
11,
1848,
340,
6948,
12808,
1155,
11,
330,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestJson2Struct_slice_interface(t *testing.T) {
ret, err := Json2Struct("auto_name", `[1,2,3,"232343"]`, GoStructType)
fmt.Printf("ret:\n %s, %v\n", ret, err)
} | explode_data.jsonl/72761 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 75
} | [
2830,
3393,
5014,
17,
9422,
26488,
20546,
1155,
353,
8840,
836,
8,
341,
11262,
11,
1848,
1669,
8308,
17,
9422,
445,
3902,
1269,
497,
77644,
16,
11,
17,
11,
18,
1335,
17,
18,
17,
18,
19,
18,
1341,
7808,
5994,
9422,
929,
340,
11009,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestInitializeShouldCreateExtraDirectories(t *testing.T) {
const expectedExtraDirectory = "/testdir"
extraDirectories := []string{expectedExtraDirectory}
extraMountPoints := []*MountPoint{}
dir := filepath.Join(tmpDir, "TestInitializeShouldCreateExtraDirectories")
chroot := NewChroot(dir, isExistingDir)
err := chroot.Initialize(emptyPath, extraDirectories, extraMountPoints)
assert.NoError(t, err)
defer chroot.Close(defaultLeaveOnDisk)
fullPath := filepath.Join(chroot.RootDir(), expectedExtraDirectory)
_, err = os.Stat(fullPath)
assert.True(t, !os.IsNotExist(err))
} | explode_data.jsonl/38800 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 190
} | [
2830,
3393,
9928,
14996,
4021,
11612,
56397,
1155,
353,
8840,
836,
8,
341,
4777,
3601,
11612,
9310,
284,
3521,
1944,
3741,
1837,
8122,
2172,
56397,
1669,
3056,
917,
90,
7325,
11612,
9310,
532,
8122,
2172,
16284,
11411,
1669,
29838,
16284,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestIsRoundRobin(t *testing.T) {
var (
sc1 = testSubConns[0]
sc2 = testSubConns[1]
sc3 = testSubConns[2]
)
testCases := []struct {
desc string
want []balancer.SubConn
got []balancer.SubConn
pass bool
}{
{
desc: "0 element",
want: []balancer.SubConn{},
got: []balancer.SubConn{},
pass: true,
},
{
desc: "1 element RR",
want: []balancer.SubConn{sc1},
got: []balancer.SubConn{sc1, sc1, sc1, sc1},
pass: true,
},
{
desc: "1 element not RR",
want: []balancer.SubConn{sc1},
got: []balancer.SubConn{sc1, sc2, sc1},
pass: false,
},
{
desc: "2 elements RR",
want: []balancer.SubConn{sc1, sc2},
got: []balancer.SubConn{sc1, sc2, sc1, sc2, sc1, sc2},
pass: true,
},
{
desc: "2 elements RR different order from want",
want: []balancer.SubConn{sc2, sc1},
got: []balancer.SubConn{sc1, sc2, sc1, sc2, sc1, sc2},
pass: true,
},
{
desc: "2 elements RR not RR, mistake in first iter",
want: []balancer.SubConn{sc1, sc2},
got: []balancer.SubConn{sc1, sc1, sc1, sc2, sc1, sc2},
pass: false,
},
{
desc: "2 elements RR not RR, mistake in second iter",
want: []balancer.SubConn{sc1, sc2},
got: []balancer.SubConn{sc1, sc2, sc1, sc1, sc1, sc2},
pass: false,
},
{
desc: "2 elements weighted RR",
want: []balancer.SubConn{sc1, sc1, sc2},
got: []balancer.SubConn{sc1, sc1, sc2, sc1, sc1, sc2},
pass: true,
},
{
desc: "2 elements weighted RR different order",
want: []balancer.SubConn{sc1, sc1, sc2},
got: []balancer.SubConn{sc1, sc2, sc1, sc1, sc2, sc1},
pass: true,
},
{
desc: "3 elements RR",
want: []balancer.SubConn{sc1, sc2, sc3},
got: []balancer.SubConn{sc1, sc2, sc3, sc1, sc2, sc3, sc1, sc2, sc3},
pass: true,
},
{
desc: "3 elements RR different order",
want: []balancer.SubConn{sc1, sc2, sc3},
got: []balancer.SubConn{sc3, sc2, sc1, sc3, sc2, sc1},
pass: true,
},
{
desc: "3 elements weighted RR",
want: []balancer.SubConn{sc1, sc1, sc1, sc2, sc2, sc3},
got: []balancer.SubConn{sc1, sc2, sc3, sc1, sc2, sc1, sc1, sc2, sc3, sc1, sc2, sc1},
pass: true,
},
{
desc: "3 elements weighted RR not RR, mistake in first iter",
want: []balancer.SubConn{sc1, sc1, sc1, sc2, sc2, sc3},
got: []balancer.SubConn{sc1, sc2, sc1, sc1, sc2, sc1, sc1, sc2, sc3, sc1, sc2, sc1},
pass: false,
},
{
desc: "3 elements weighted RR not RR, mistake in second iter",
want: []balancer.SubConn{sc1, sc1, sc1, sc2, sc2, sc3},
got: []balancer.SubConn{sc1, sc2, sc3, sc1, sc2, sc1, sc1, sc1, sc3, sc1, sc2, sc1},
pass: false,
},
}
for _, tC := range testCases {
t.Run(tC.desc, func(t *testing.T) {
err := isRoundRobin(tC.want, (&testClosure{r: tC.got}).next)
if err == nil != tC.pass {
t.Errorf("want pass %v, want %v, got err %v", tC.pass, tC.want, err)
}
})
}
} | explode_data.jsonl/47449 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1433
} | [
2830,
3393,
3872,
27497,
76671,
1155,
353,
8840,
836,
8,
341,
2405,
2399,
197,
29928,
16,
284,
1273,
3136,
1109,
4412,
58,
15,
921,
197,
29928,
17,
284,
1273,
3136,
1109,
4412,
58,
16,
921,
197,
29928,
18,
284,
1273,
3136,
1109,
441... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestDecodeLogLevel(t *testing.T) {
assert.Equal(t, zerolog.InfoLevel, log.DecodeLogLevel("bla"), "Unknown level should match InfoLevel")
assert.Equal(t, zerolog.DebugLevel, log.DecodeLogLevel("debug"), "String debug does not match DebugLevel")
assert.Equal(t, zerolog.InfoLevel, log.DecodeLogLevel("info"), "String info does not match InfoLevel")
assert.Equal(t, zerolog.WarnLevel, log.DecodeLogLevel("warn"), "String warn does not match WarnLevel")
assert.Equal(t, zerolog.ErrorLevel, log.DecodeLogLevel("error"), "String error does not match ErrorLevel")
assert.Equal(t, zerolog.FatalLevel, log.DecodeLogLevel("fatal"), "String fatal does not match FatalLevel")
} | explode_data.jsonl/9754 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 223
} | [
2830,
3393,
32564,
72676,
1155,
353,
8840,
836,
8,
341,
6948,
12808,
1155,
11,
76178,
1609,
20132,
4449,
11,
1487,
56372,
72676,
445,
64726,
3975,
330,
13790,
2188,
1265,
2432,
13074,
4449,
1138,
6948,
12808,
1155,
11,
76178,
1609,
20345,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNodeHandlerUpdate(t *testing.T) {
tests := []struct {
name string
id string
reqBody string
expCode int
expBody string
}{
{
name: "Request to update a node should return not implemented.",
reqBody: "",
id: "",
expCode: 500,
expBody: `{"error":"not implemented"}`,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
assert := assert.New(t)
// Mocks.
mcv1 := &mcliclusterv1.NodeClientInterface{}
nh := webapiclusterv1.NewNodeHandler(serializer.DefaultSerializer, mcv1)
b := bytes.NewBufferString(test.reqBody)
r := httptest.NewRequest("POST", "http://test", b)
w := httptest.NewRecorder()
nh.Update(w, r, test.id)
assert.Equal(test.expCode, w.Code)
assert.Equal(test.expBody, strings.TrimSuffix(w.Body.String(), "\n"))
})
}
} | explode_data.jsonl/62160 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 364
} | [
2830,
3393,
1955,
3050,
4289,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
262,
914,
198,
197,
15710,
414,
914,
198,
197,
24395,
5444,
914,
198,
197,
48558,
2078,
526,
198,
197,
48558,
5444,
914,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEvents_Exists(t *testing.T) {
m := NewClient(os.Getenv("MNUBO_CLIENT_ID"), os.Getenv("MNUBO_CLIENT_SECRET"), os.Getenv("MNUBO_HOST"))
var results [1]EntitiesExist
cases := []struct {
Error error
ExpectedLength int
}{
{
Error: m.Events.Exists([]string{uuid.New().String()}, &results[0]),
ExpectedLength: 1,
},
}
for i, c := range cases {
if c.Error != nil {
t.Errorf("%d, client call failed: %+v", i, c.Error)
}
if len(results) != c.ExpectedLength {
t.Errorf("%d, expecting length: %d, got %d", i, c.ExpectedLength, len(results))
}
}
} | explode_data.jsonl/81379 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 271
} | [
2830,
3393,
7900,
62,
15575,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
1532,
2959,
9638,
64883,
445,
44,
3926,
4677,
22521,
3450,
3975,
2643,
64883,
445,
44,
3926,
4677,
22521,
31408,
3975,
2643,
64883,
445,
44,
3926,
4677,
17213,
28075... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSetLoggerNoClient(t *testing.T) {
defer func() {
client = nil
logger = nil
}()
Convey("Given a logger", t, func() {
l := logging.NewBasicLogger()
Convey("When SetLogger is called with no client established", func() {
SetLogger(l)
Convey("Then the logger var should be set", func() {
So(logger, ShouldNotBeNil)
})
})
})
} | explode_data.jsonl/9672 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 143
} | [
2830,
3393,
1649,
7395,
2753,
2959,
1155,
353,
8840,
836,
8,
341,
16867,
2915,
368,
341,
197,
25291,
284,
2092,
198,
197,
17060,
284,
2092,
198,
197,
69826,
93070,
5617,
445,
22043,
264,
5925,
497,
259,
11,
2915,
368,
341,
197,
8810,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestLoadReader(t *testing.T) {
var conf = `
nfpms:
- homepage: http://goreleaser.github.io
`
buf := strings.NewReader(conf)
prop, err := LoadReader(buf)
assert.NoError(t, err)
assert.Equal(t, "http://goreleaser.github.io", prop.NFPMs[0].Homepage, "yaml did not load correctly")
} | explode_data.jsonl/38048 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 115
} | [
2830,
3393,
5879,
5062,
1155,
353,
8840,
836,
8,
341,
2405,
2335,
284,
22074,
77,
10852,
1011,
510,
220,
481,
34589,
25,
1758,
1110,
70,
460,
273,
12080,
11021,
4245,
198,
3989,
26398,
1669,
9069,
68587,
29879,
340,
79244,
11,
1848,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestStackDependencyGraph(t *testing.T) {
integration.ProgramTest(t, &integration.ProgramTestOptions{
Dir: "stack_dependencies",
Dependencies: []string{"@pulumi/pulumi"},
Quick: true,
ExtraRuntimeValidation: func(t *testing.T, stackInfo integration.RuntimeValidationStackInfo) {
assert.NotNil(t, stackInfo.Deployment)
latest := stackInfo.Deployment
assert.True(t, len(latest.Resources) >= 2)
sawFirst := false
sawSecond := false
for _, res := range latest.Resources {
urn := string(res.URN)
if strings.Contains(urn, "dynamic:Resource::first") {
// The first resource doesn't depend on anything.
assert.Equal(t, 0, len(res.Dependencies))
sawFirst = true
} else if strings.Contains(urn, "dynamic:Resource::second") {
// The second resource uses an Output property of the first resource, so it
// depends directly on first.
assert.Equal(t, 1, len(res.Dependencies))
assert.True(t, strings.Contains(string(res.Dependencies[0]), "dynamic:Resource::first"))
sawSecond = true
}
}
assert.True(t, sawFirst && sawSecond)
},
})
} | explode_data.jsonl/76355 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 436
} | [
2830,
3393,
4336,
36387,
11212,
1155,
353,
8840,
836,
8,
341,
2084,
17376,
80254,
2271,
1155,
11,
609,
60168,
80254,
2271,
3798,
515,
197,
197,
6184,
25,
688,
330,
7693,
71841,
756,
197,
197,
48303,
25,
3056,
917,
4913,
31,
79,
65482,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestContextRenderJSONPWithoutCallback(t *testing.T) {
w := httptest.NewRecorder()
c, _ := CreateTestContext(w)
c.Request, _ = http.NewRequest("GET", "http://example.com", nil)
c.JSONP(http.StatusCreated, H{"foo": "bar"})
assert.Equal(t, http.StatusCreated, w.Code)
assert.Equal(t, "{\"foo\":\"bar\"}\n", w.Body.String())
assert.Equal(t, "application/json; charset=utf-8", w.Header().Get("Content-Type"))
} | explode_data.jsonl/26771 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 160
} | [
2830,
3393,
1972,
6750,
5370,
47,
26040,
7494,
1155,
353,
8840,
836,
8,
341,
6692,
1669,
54320,
70334,
7121,
47023,
741,
1444,
11,
716,
1669,
4230,
2271,
1972,
3622,
340,
1444,
9659,
11,
716,
284,
1758,
75274,
445,
3806,
497,
330,
125... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestTeamsService_AddTeamRepoBySlug_invalidOwner(t *testing.T) {
client, _, _, teardown := setup()
defer teardown()
ctx := context.Background()
_, err := client.Teams.AddTeamRepoBySlug(ctx, "o", "s", "%", "r", nil)
testURLParseError(t, err)
} | explode_data.jsonl/4539 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 103
} | [
2830,
3393,
60669,
1860,
21346,
14597,
25243,
1359,
54968,
31433,
13801,
1155,
353,
8840,
836,
8,
341,
25291,
11,
8358,
8358,
49304,
1669,
6505,
741,
16867,
49304,
2822,
20985,
1669,
2266,
19047,
741,
197,
6878,
1848,
1669,
2943,
94849,
4... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServiceCreatedAfterImported(t *testing.T) {
for _, mode := range []EndpointMode{EndpointsOnly, EndpointSliceOnly} {
t.Run(mode.String(), func(t *testing.T) {
c, ic, cleanup := newTestServiceImportCache(mode)
defer cleanup()
ic.createServiceImport(t, mcsapi.ClusterSetIP, serviceImportVIPs)
ic.createKubeService(t, c)
// Check that the service has been assigned ClusterSet IPs.
ic.checkServiceInstances(t)
})
}
} | explode_data.jsonl/50092 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 166
} | [
2830,
3393,
1860,
11694,
6025,
11511,
291,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
3856,
1669,
2088,
3056,
27380,
3636,
90,
80786,
7308,
11,
47269,
33236,
7308,
92,
341,
197,
3244,
16708,
31356,
6431,
1507,
2915,
1155,
353,
8840,
836,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestServerCORS(t *testing.T) {
true := true
tests := []struct {
expectedStatus int
origin string
allowedOrigins []string
}{
{
expectedStatus: http.StatusForbidden,
origin: "http://www.example.com",
allowedOrigins: []string{"http://notmydomain.com", "http://neitherthisone.com"},
},
{
expectedStatus: http.StatusForbidden,
origin: "http://www.example.com",
allowedOrigins: []string{""},
},
{
expectedStatus: http.StatusForbidden,
origin: "http://www.example.com",
allowedOrigins: []string{"example.com"},
},
{
expectedStatus: http.StatusAccepted,
origin: "whatever",
allowedOrigins: []string{"http://notmydomain.com", "*"},
},
{
expectedStatus: http.StatusAccepted,
origin: "http://www.example.co.uk",
allowedOrigins: []string{"http://*.example.co*"},
},
{
expectedStatus: http.StatusAccepted,
origin: "https://www.example.com",
allowedOrigins: []string{"http://*example.com", "https://*example.com"},
},
}
var teardown = func() {}
defer teardown() // in case test crashes. calling teardown twice is ok
for idx, test := range tests {
ucfg, err := common.NewConfigFrom(m{"rum": m{"enabled": true, "allow_origins": test.allowedOrigins}})
assert.NoError(t, err)
var apm *beater
apm, teardown, err = setupServer(t, ucfg, nil)
require.NoError(t, err)
baseUrl, client := apm.client(false)
for _, endpoint := range []struct {
url, contentType string
testData []byte
}{
{RumTransactionsURL, "application/json", testData},
{V2RumURL, "application/x-ndjson", testDataV2},
} {
req, err := http.NewRequest("POST", baseUrl+endpoint.url, bytes.NewReader(endpoint.testData))
req.Header.Set("Origin", test.origin)
req.Header.Set("Content-Type", endpoint.contentType)
assert.NoError(t, err)
res, err := client.Do(req)
assert.Equal(t, test.expectedStatus, res.StatusCode, fmt.Sprintf("Failed at idx %v; %s", idx, body(t, res)))
}
teardown()
}
} | explode_data.jsonl/4943 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 835
} | [
2830,
3393,
5475,
34,
9821,
1155,
353,
8840,
836,
8,
341,
42808,
1669,
830,
198,
78216,
1669,
3056,
1235,
341,
197,
42400,
2522,
526,
198,
197,
197,
8611,
260,
914,
198,
197,
197,
20967,
62726,
1330,
3056,
917,
198,
197,
59403,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestValidatePresubmits(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
presubmits []Presubmit
expectedError string
}{
{
name: "Duplicate context causes error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}},
},
expectedError: `[jobs b and a report to the same GitHub context "repeated", jobs a and b report to the same GitHub context "repeated"]`,
},
{
name: "Duplicate context on different branch doesn't cause error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"master"}}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"next"}}},
},
},
{
name: "Duplicate jobname causes error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}},
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "bar"}},
},
expectedError: "duplicated presubmit job: a",
},
{
name: "Duplicate jobname on different branches doesn't cause error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}, Brancher: Brancher{Branches: []string{"master"}}},
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}, Brancher: Brancher{Branches: []string{"next"}}},
},
},
{
name: "Invalid JobBase causes error",
presubmits: []Presubmit{{Reporter: Reporter{Context: "foo"}}},
expectedError: `invalid presubmit job : name: must match regex "^[A-Za-z0-9-._]+$"`,
},
{
name: "Invalid triggering config causes error",
presubmits: []Presubmit{{Trigger: "some-trigger", JobBase: JobBase{Name: "my-job"}, Reporter: Reporter{Context: "foo"}}},
expectedError: `Either both of job.Trigger and job.RerunCommand must be set, wasnt the case for job "my-job"`,
},
{
name: "Invalid reporting config causes error",
presubmits: []Presubmit{{JobBase: JobBase{Name: "my-job"}}},
expectedError: "invalid presubmit job my-job: job is set to report but has no context configured",
},
}
for _, tc := range testCases {
var errMsg string
err := validatePresubmits(tc.presubmits, "")
if err != nil {
errMsg = err.Error()
}
if errMsg != tc.expectedError {
t.Errorf("expected error '%s', got error '%s'", tc.expectedError, errMsg)
}
}
} | explode_data.jsonl/8102 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 997
} | [
2830,
3393,
17926,
14367,
392,
44703,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
18185,
37302,
1669,
3056,
1235,
341,
197,
11609,
688,
914,
198,
197,
3223,
416,
392,
44703,
262,
3056,
14367,
392,
1763,
198,
197,
42400,
1454,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSetAgentProfileDefaultsOnAzureStack(t *testing.T) {
location := "testlocation"
oldFaultDomainCount := 2
//Test setMasterProfileDefaults with portal url
mockCS := getMockBaseContainerService("1.11.6")
mockCS.Properties.CustomCloudProfile = &CustomCloudProfile{
PortalURL: "https://portal.testlocation.contoso.com",
}
mockCS.Location = location
mockCS.Properties.MasterProfile.AvailabilityProfile = ""
mockCS.Properties.MasterProfile.Count = 1
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterResponder("GET", fmt.Sprintf("%smetadata/endpoints?api-version=1.0", fmt.Sprintf("https://management.%s.contoso.com/", location)),
func(req *http.Request) (*http.Response, error) {
resp := httpmock.NewStringResponse(200, `{"galleryEndpoint":"https://galleryartifacts.hosting.testlocation.contoso.com/galleryartifacts/","graphEndpoint":"https://graph.testlocation.contoso.com/","portalEndpoint":"https://portal.testlocation.contoso.com/","authentication":{"loginEndpoint":"https://adfs.testlocation.contoso.com/adfs","audiences":["https://management.adfs.azurestack.testlocation/ce080287-be51-42e5-b99e-9de760fecae7"]}}`)
return resp, nil
},
)
mockCS.SetPropertiesDefaults(false, false)
for _, pool := range mockCS.Properties.AgentPoolProfiles {
if (*pool.PlatformFaultDomainCount) != DefaultAzureStackFaultDomainCount {
t.Fatalf("PlatformFaultDomainCount did not have the expected value, got %d, expected %d",
(*pool.PlatformFaultDomainCount), DefaultAzureStackFaultDomainCount)
}
if (*pool.AcceleratedNetworkingEnabled) != DefaultAzureStackAcceleratedNetworking {
t.Fatalf("AcceleratedNetworkingEnabled did not have the expected value, got %t, expected %t",
(*pool.AcceleratedNetworkingEnabled), DefaultAzureStackAcceleratedNetworking)
}
if (*pool.AcceleratedNetworkingEnabledWindows) != DefaultAzureStackAcceleratedNetworking {
t.Fatalf("AcceleratedNetworkingEnabledWindows did not have the expected value, got %t, expected %t",
(*pool.AcceleratedNetworkingEnabledWindows), DefaultAzureStackAcceleratedNetworking)
}
}
// Check scenario where value is already set.
mockCS.Properties.CustomCloudProfile = &CustomCloudProfile{
PortalURL: "https://portal.testlocation.contoso.com",
}
mockCS.Properties.MasterProfile.AvailabilityProfile = ""
mockCS.Properties.MasterProfile.Count = 1
for _, pool := range mockCS.Properties.AgentPoolProfiles {
pool.PlatformFaultDomainCount = &oldFaultDomainCount
}
mockCS.Location = location
httpmock.DeactivateAndReset()
httpmock.Activate()
httpmock.RegisterResponder("GET", fmt.Sprintf("%smetadata/endpoints?api-version=1.0", fmt.Sprintf("https://management.%s.contoso.com/", location)),
func(req *http.Request) (*http.Response, error) {
resp := httpmock.NewStringResponse(200, `{"galleryEndpoint":"https://galleryartifacts.hosting.testlocation.contoso.com/galleryartifacts/","graphEndpoint":"https://graph.testlocation.contoso.com/","portalEndpoint":"https://portal.testlocation.contoso.com/","authentication":{"loginEndpoint":"https://adfs.testlocation.contoso.com/","audiences":["https://management.adfs.azurestack.testlocation/ce080287-be51-42e5-b99e-9de760fecae7"]}}`)
return resp, nil
},
)
mockCS.SetPropertiesDefaults(false, false)
for _, pool := range mockCS.Properties.AgentPoolProfiles {
if (*pool.PlatformFaultDomainCount) != oldFaultDomainCount {
t.Fatalf("PlatformFaultDomainCount did not have the expected value, got %d, expected %d",
(*pool.PlatformFaultDomainCount), oldFaultDomainCount)
}
}
} | explode_data.jsonl/33894 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1184
} | [
2830,
3393,
1649,
16810,
8526,
16273,
1925,
78107,
4336,
1155,
353,
8840,
836,
8,
341,
53761,
1669,
330,
1944,
2527,
698,
61828,
58780,
13636,
2507,
1669,
220,
17,
198,
197,
322,
2271,
738,
18041,
8526,
16273,
448,
23132,
2515,
198,
773... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func Test_sqlCollectionDataQueryOptions(t *testing.T) {
var q resources.CollectionQuery
t.Log("Test 1: get an error for no collection id")
if _, err := sqlCollectionDataQueryOptions(q); err == nil {
t.Error("no error returned")
}
// Setup for remaining tests
q.CollectionID = "81f6f8c8-061c-4cb0-97e6-98b317ee5c93"
t.Log("Test 2: get an error for invalid timestamp")
q.AddedAfter = nil
q.AddedAfter = []string{"20111"}
if _, err := sqlCollectionDataQueryOptions(q); err == nil {
t.Error("no error returned")
}
q.AddedAfter = nil
t.Log("Test 3: get an error for invalid stix id")
q.STIXID = []string{"foo--1234"}
if _, err := sqlCollectionDataQueryOptions(q); err == nil {
t.Error("no error returned")
}
q.STIXID = nil
t.Log("Test 4: get an error for invalid stix type")
q.STIXType = []string{"indicatorr"}
if _, err := sqlCollectionDataQueryOptions(q); err == nil {
t.Error("no error returned")
}
q.STIXType = nil
t.Log("Test 5: get an error for invalid stix version")
q.STIXVersion = []string{"200111"}
if _, err := sqlCollectionDataQueryOptions(q); err == nil {
t.Error("no error returned")
}
q.STIXVersion = nil
} | explode_data.jsonl/19848 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 422
} | [
2830,
3393,
18063,
6482,
1043,
2859,
3798,
1155,
353,
8840,
836,
8,
341,
2405,
2804,
4963,
28629,
2859,
271,
3244,
5247,
445,
2271,
220,
16,
25,
633,
458,
1465,
369,
902,
4426,
877,
1138,
743,
8358,
1848,
1669,
5704,
6482,
1043,
2859,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 6 |
func TestNodeGetVolumeStats(t *testing.T) {
// Create server and client connection
s := newTestServer(t)
defer s.Stop()
size := int64(4 * 1024 * 1024)
used := int64(1 * 1024 * 1024)
available := size - used
id := "myvol123"
vol := &api.Volume{
AttachPath: []string{"/test"},
Id: id,
Locator: &api.VolumeLocator{
Name: id,
},
Spec: &api.VolumeSpec{
Size: uint64(size),
},
Usage: uint64(used),
Status: api.VolumeStatus_VOLUME_STATUS_UP,
}
gomock.InOrder(
s.MockDriver().
EXPECT().
Inspect([]string{id}).
Return([]*api.Volume{
vol,
}, nil).
AnyTimes(),
)
// Make a call
c := csi.NewNodeClient(s.Conn())
// Get VolumeStats - all OK
resp, err := c.NodeGetVolumeStats(
context.Background(),
&csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"})
assert.NoError(t, err)
assert.Equal(t, 1, len(resp.Usage))
assert.Equal(t, size, resp.Usage[0].Total)
assert.Equal(t, used, resp.Usage[0].Used)
assert.Equal(t, available, resp.Usage[0].Available)
assert.Equal(t, false, resp.VolumeCondition.Abnormal)
assert.Equal(t, "Volume status is up", resp.VolumeCondition.Message)
// Get VolumeStats - down
vol.Status = api.VolumeStatus_VOLUME_STATUS_DOWN
resp, err = c.NodeGetVolumeStats(
context.Background(),
&csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"})
assert.NoError(t, err)
assert.Equal(t, true, resp.VolumeCondition.Abnormal)
assert.Equal(t, "Volume status is down", resp.VolumeCondition.Message)
// Get VolumeStats - degraded
vol.Status = api.VolumeStatus_VOLUME_STATUS_DEGRADED
resp, err = c.NodeGetVolumeStats(
context.Background(),
&csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"})
assert.NoError(t, err)
assert.Equal(t, true, resp.VolumeCondition.Abnormal)
assert.Equal(t, "Volume status is degraded", resp.VolumeCondition.Message)
// Get VolumeStats - none
vol.Status = api.VolumeStatus_VOLUME_STATUS_NONE
resp, err = c.NodeGetVolumeStats(
context.Background(),
&csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"})
assert.NoError(t, err)
assert.Equal(t, true, resp.VolumeCondition.Abnormal)
assert.Equal(t, "Volume status is unknown", resp.VolumeCondition.Message)
// Get VolumeStats - not present
vol.Status = api.VolumeStatus_VOLUME_STATUS_NOT_PRESENT
resp, err = c.NodeGetVolumeStats(
context.Background(),
&csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"})
assert.NoError(t, err)
assert.Equal(t, true, resp.VolumeCondition.Abnormal)
assert.Equal(t, "Volume status is not present", resp.VolumeCondition.Message)
} | explode_data.jsonl/51455 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 979
} | [
2830,
3393,
1955,
1949,
18902,
16635,
1155,
353,
8840,
836,
8,
341,
197,
322,
4230,
3538,
323,
2943,
3633,
198,
1903,
1669,
501,
2271,
5475,
1155,
340,
16867,
274,
30213,
2822,
13832,
1669,
526,
21,
19,
7,
19,
353,
220,
16,
15,
17,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestRewriteWithUnknownLazyNoScriptImage(t *testing.T) {
description := `<img src="" data-non-candidate="https://example.org/image.jpg" alt="Image"><noscript><img src="https://example.org/fallback.jpg" alt="Fallback"></noscript>`
output := Rewriter("https://example.org/article", description, "add_dynamic_image")
expected := `<img src="" data-non-candidate="https://example.org/image.jpg" alt="Image"/><img src="https://example.org/fallback.jpg" alt="Fallback"/>`
if expected != output {
t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected)
}
} | explode_data.jsonl/21482 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 191
} | [
2830,
3393,
58465,
1247,
2354,
13790,
39766,
2753,
5910,
1906,
1155,
353,
8840,
836,
8,
341,
42407,
1669,
30586,
1892,
2286,
8407,
821,
98700,
1786,
17050,
428,
2428,
1110,
8687,
2659,
23349,
4819,
1,
4797,
428,
1906,
3088,
36391,
1228,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestIPv4Conversion(t *testing.T) {
ipWant := net.ParseIP("192.168.100.100")
uWant := uint32(3232261220)
if uGot := IPv4ToUInt(ipWant); uGot != uWant {
t.Errorf("Failed to convert IPv4 %s to correct uint32 - got %d, want %d", ipWant, uGot, uWant)
}
if ipGot := UintToIPv4(uWant); !ipGot.Equal(ipWant) {
t.Errorf("Failed to convert uint32 %d to IPv4 - got %s, want %s", uWant, ipGot, ipWant)
}
} | explode_data.jsonl/49339 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 178
} | [
2830,
3393,
58056,
19,
48237,
1155,
353,
8840,
836,
8,
341,
46531,
28823,
1669,
4179,
8937,
3298,
445,
16,
24,
17,
13,
16,
21,
23,
13,
16,
15,
15,
13,
16,
15,
15,
1138,
10676,
28823,
1669,
2622,
18,
17,
7,
18,
17,
18,
17,
17,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestMarkdownHandler(t *testing.T) {
for _, test := range tests {
req := httptest.NewRequest("POST", "/", strings.NewReader(test.input))
rr := httptest.NewRecorder()
markdownHandler(rr, req)
if got := rr.Body.String(); got != test.want {
t.Errorf("%s: got %q, want %q", test.label, got, test.want)
}
}
} | explode_data.jsonl/40656 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 130
} | [
2830,
3393,
68005,
3050,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
7032,
341,
197,
24395,
1669,
54320,
70334,
75274,
445,
2946,
497,
64657,
9069,
68587,
8623,
10046,
4390,
197,
197,
634,
1669,
54320,
70334,
7121,
47023,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestInitializeClientFromSecretProvider(t *testing.T) {
// setup
tokenPeriod := 6
tokenDataMap := initTokenData(tokenPeriod)
server := mock.GetMockTokenServer(tokenDataMap)
defer server.Close()
serverURL, err := url.Parse(server.URL)
require.NoErrorf(t, err, "error on parsing server url %s: %s", server.URL, err)
host, port, _ := net.SplitHostPort(serverURL.Host)
portNum, _ := strconv.Atoi(port)
ctx, cancelFunc := context.WithCancel(context.Background())
defer cancelFunc()
lc := logger.NewMockClient()
testSecretStoreInfo := config.SecretStoreInfo{
Host: host,
Port: portNum,
Protocol: "http",
ServerName: "mockVaultServer",
AdditionalRetryAttempts: 2,
RetryWaitPeriod: "100ms",
}
emptySecretStoreInfo := config.SecretStoreInfo{}
tests := []struct {
name string
tokenFileForShared string
tokenFileForExclusive string
sharedSecretStore config.SecretStoreInfo
exclusiveSecretStore config.SecretStoreInfo
expectError bool
expectSharedSecretClientEmpty bool
expectExclusiveSecretClientEmpty bool
}{
{
name: "Create client with test-token",
tokenFileForShared: "client/testdata/testToken.json",
tokenFileForExclusive: "client/testdata/testToken.json",
sharedSecretStore: testSecretStoreInfo,
exclusiveSecretStore: testSecretStoreInfo,
expectError: false,
expectSharedSecretClientEmpty: false,
expectExclusiveSecretClientEmpty: false,
},
{
name: "Create client with expired token, no TTL remaining",
tokenFileForShared: "client/testdata/expiredToken.json",
tokenFileForExclusive: "client/testdata/expiredToken.json",
sharedSecretStore: testSecretStoreInfo,
exclusiveSecretStore: testSecretStoreInfo,
expectError: true,
expectSharedSecretClientEmpty: true,
expectExclusiveSecretClientEmpty: true,
},
{
name: "Create client with non-existing TokenFile path",
tokenFileForShared: "client/testdata/non-existing.json",
tokenFileForExclusive: "client/testdata/non-existing.json",
sharedSecretStore: testSecretStoreInfo,
exclusiveSecretStore: testSecretStoreInfo,
expectError: true,
expectSharedSecretClientEmpty: true,
expectExclusiveSecretClientEmpty: true,
},
{
name: "New secret client with no TokenFile",
sharedSecretStore: testSecretStoreInfo,
exclusiveSecretStore: testSecretStoreInfo,
expectError: true,
expectSharedSecretClientEmpty: true,
expectExclusiveSecretClientEmpty: true,
},
{
name: "empty shared secret store",
tokenFileForExclusive: "client/testdata/testToken.json",
sharedSecretStore: emptySecretStoreInfo,
exclusiveSecretStore: testSecretStoreInfo,
expectError: false,
expectSharedSecretClientEmpty: true,
expectExclusiveSecretClientEmpty: false,
},
{
name: "empty exclusive secret store",
tokenFileForShared: "client/testdata/testToken.json",
sharedSecretStore: testSecretStoreInfo,
exclusiveSecretStore: emptySecretStoreInfo,
expectError: false,
expectSharedSecretClientEmpty: false,
expectExclusiveSecretClientEmpty: true,
},
{
name: "both empty secret stores",
sharedSecretStore: emptySecretStoreInfo,
exclusiveSecretStore: emptySecretStoreInfo,
expectError: false,
expectSharedSecretClientEmpty: true,
expectExclusiveSecretClientEmpty: true,
},
}
for _, test := range tests {
// pinned local test variables to avoid scopelint warnings
currentTest := test
t.Run(test.name, func(t *testing.T) {
currentTest.sharedSecretStore.TokenFile = currentTest.tokenFileForShared
currentTest.exclusiveSecretStore.TokenFile = currentTest.tokenFileForExclusive
config := &common.ConfigurationStruct{
SecretStore: currentTest.sharedSecretStore,
SecretStoreExclusive: currentTest.exclusiveSecretStore,
}
secretProvider := NewSecretProvider(lc, config)
ok := secretProvider.Initialize(ctx)
if currentTest.expectError {
assert.False(t, ok, "Expect error but none was received")
} else {
assert.True(t, ok, "Expect no error but got not ok")
}
if currentTest.expectSharedSecretClientEmpty {
assert.Nil(t, secretProvider.SharedSecretClient, "shared secret client should be empty")
} else {
assert.NotNil(t, secretProvider.SharedSecretClient, "shared secret client should NOT be empty")
}
if currentTest.expectExclusiveSecretClientEmpty {
assert.Nil(t, secretProvider.ExclusiveSecretClient, "exclusive secret client should be empty")
} else {
assert.NotNil(t, secretProvider.ExclusiveSecretClient, "exclusive secret client should NOT be empty")
}
})
}
// wait for some time to allow renewToken to be run if any
time.Sleep(7 * time.Second)
} | explode_data.jsonl/39202 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 2427
} | [
2830,
3393,
9928,
2959,
3830,
19773,
5179,
1155,
353,
8840,
836,
8,
341,
197,
322,
6505,
198,
43947,
23750,
1669,
220,
21,
198,
43947,
1043,
2227,
1669,
2930,
3323,
1043,
13274,
23750,
692,
41057,
1669,
7860,
2234,
11571,
3323,
5475,
13... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestSession(t *testing.T) {
cfg := DefaultConfig()
gd, err := startDispatcher(cfg)
assert.NoError(t, err)
defer gd.Close()
stream, err := gd.Clients[0].Session(context.Background(), &api.SessionRequest{})
assert.NoError(t, err)
stream.CloseSend()
resp, err := stream.Recv()
assert.NoError(t, err)
assert.NotEmpty(t, resp.SessionID)
assert.Equal(t, 1, len(resp.Managers))
} | explode_data.jsonl/13857 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 152
} | [
2830,
3393,
5283,
1155,
353,
8840,
836,
8,
341,
50286,
1669,
7899,
2648,
741,
3174,
67,
11,
1848,
1669,
1191,
21839,
28272,
340,
6948,
35699,
1155,
11,
1848,
340,
16867,
32630,
10421,
2822,
44440,
11,
1848,
1669,
32630,
727,
7041,
58,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestPacketDot11DataARP(t *testing.T) {
p := gopacket.NewPacket(testPacketDot11DataARP, LinkTypeIEEE80211Radio, gopacket.Default)
if p.ErrorLayer() != nil {
t.Error("Failed to decode packet:", p.ErrorLayer().Error())
}
checkLayers(p, []gopacket.LayerType{LayerTypeRadioTap, LayerTypeDot11, LayerTypeDot11Data, LayerTypeLLC, LayerTypeSNAP, LayerTypeARP}, t)
if got, ok := p.Layer(LayerTypeARP).(*ARP); ok {
want := &ARP{
BaseLayer: BaseLayer{
Contents: []uint8{0x0, 0x1, 0x8, 0x0, 0x6, 0x4, 0x0, 0x1, 0x0, 0x19, 0xe3, 0xd3, 0x53, 0x52, 0xa9, 0xfe, 0xf7, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x43, 0x8, 0xe, 0x36},
Payload: []uint8{},
},
AddrType: 0x1,
Protocol: 0x800,
HwAddressSize: 0x6,
ProtAddressSize: 0x4,
Operation: 0x1,
SourceHwAddress: []uint8{0x0, 0x19, 0xe3, 0xd3, 0x53, 0x52},
SourceProtAddress: []uint8{0xa9, 0xfe, 0xf7, 0x0},
DstHwAddress: []uint8{0x0, 0x0, 0x0, 0x0, 0x0, 0x0},
DstProtAddress: []uint8{0x43, 0x8, 0xe, 0x36},
}
if !reflect.DeepEqual(got, want) {
t.Errorf("ARP packet processing failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", got, want)
}
}
} | explode_data.jsonl/7565 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 619
} | [
2830,
3393,
16679,
34207,
16,
16,
1043,
42793,
1155,
353,
8840,
836,
8,
341,
3223,
1669,
342,
453,
5709,
7121,
16679,
8623,
16679,
34207,
16,
16,
1043,
42793,
11,
5948,
929,
76705,
23,
15,
17,
16,
16,
28203,
11,
342,
453,
5709,
1327... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestTerragruntHookRunAllApply(t *testing.T) {
t.Parallel()
cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_ALL_PATH)
tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_ALL_PATH)
rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_ALL_PATH)
beforeOnlyPath := util.JoinPath(rootPath, "before-only")
afterOnlyPath := util.JoinPath(rootPath, "after-only")
runTerragrunt(t, fmt.Sprintf("terragrunt run-all apply -auto-approve --terragrunt-log-level debug --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath))
_, beforeErr := ioutil.ReadFile(beforeOnlyPath + "/file.out")
assert.NoError(t, beforeErr)
_, afterErr := ioutil.ReadFile(afterOnlyPath + "/file.out")
assert.NoError(t, afterErr)
} | explode_data.jsonl/10062 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 285
} | [
2830,
3393,
51402,
68305,
3850,
31679,
6727,
2403,
28497,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
1444,
60639,
51,
13886,
627,
13682,
1155,
11,
13602,
42635,
41486,
82251,
50,
16269,
7944,
340,
20082,
14359,
1820,
1669,
297... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNotEnoughQueryArgs(t *testing.T) {
session := createSession(t)
defer session.Close()
if session.cfg.ProtoVersion == 1 {
t.Skip("atomic batches not supported. Please use Cassandra >= 2.0")
}
if err := createTable(session, `CREATE TABLE gocql_test.not_enough_query_args (id int, cluster int, value int, primary key (id, cluster))`); err != nil {
t.Fatal("create table:", err)
}
_, err := session.Query(`SELECT * FROM not_enough_query_args WHERE id = ? and cluster = ?`, 1).Iter().SliceMap()
if err == nil {
t.Fatal("'`SELECT * FROM not_enough_query_args WHERE id = ? and cluster = ?`, 1' should return an error")
}
batch := session.NewBatch(UnloggedBatch)
batch.Query("INSERT INTO not_enough_query_args (id, cluster, value) VALUES (?, ?, ?)", 1, 2)
err = session.ExecuteBatch(batch)
if err == nil {
t.Fatal("'`INSERT INTO not_enough_query_args (id, cluster, value) VALUES (?, ?, ?)`, 1, 2' should return an error")
}
} | explode_data.jsonl/11144 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 336
} | [
2830,
3393,
2623,
95801,
2859,
4117,
1155,
353,
8840,
836,
8,
341,
25054,
1669,
1855,
5283,
1155,
340,
16867,
3797,
10421,
2822,
743,
3797,
30481,
7763,
983,
5637,
621,
220,
16,
341,
197,
3244,
57776,
445,
6618,
44792,
537,
7248,
13,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestRedfishRemoteDirectGetSystemNetworkError(t *testing.T) {
m := &redfishMocks.RedfishAPI{}
defer m.AssertExpectations(t)
systemID := computerSystemID
realErr := fmt.Errorf("server request timeout")
httpResp := &http.Response{StatusCode: 408}
m.On("GetSystem", context.Background(), systemID).
Times(1).
Return(redfishClient.ComputerSystem{}, httpResp, realErr)
rDCfg := getDefaultRedfishRemoteDirectObj(t, m)
err := rDCfg.DoRemoteDirect()
_, ok := err.(ErrRedfishClient)
assert.True(t, ok)
} | explode_data.jsonl/12278 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 184
} | [
2830,
3393,
6033,
18170,
24703,
16027,
1949,
2320,
12320,
1454,
1155,
353,
8840,
836,
8,
341,
2109,
1669,
609,
1151,
18170,
72577,
20943,
18170,
7082,
16094,
16867,
296,
11711,
17536,
804,
1155,
692,
40293,
915,
1669,
6366,
2320,
915,
198... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestGlobalList(t *testing.T) {
foo := DriverDef{Name: "foo"}
globalRegistry = newRegistry()
if err := Register(foo); err != nil {
t.Errorf("register returned error: %v", err)
}
if diff := cmp.Diff(List(), []DriverDef{foo}); diff != "" {
t.Errorf("list mismatch (-want +got):\n%s", diff)
}
} | explode_data.jsonl/15481 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 116
} | [
2830,
3393,
11646,
852,
1155,
353,
8840,
836,
8,
341,
197,
7975,
1669,
14577,
2620,
63121,
25,
330,
7975,
16707,
18842,
15603,
284,
501,
15603,
741,
743,
1848,
1669,
8451,
71880,
1215,
1848,
961,
2092,
341,
197,
3244,
13080,
445,
6343,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestParseVolumeWithReadOnly(t *testing.T) {
for _, path := range []string{"./foo", "/home/user"} {
volume, err := parseVolume(path + ":/target:ro")
expected := types.ServiceVolumeConfig{
Type: "bind",
Source: path,
Target: "/target",
ReadOnly: true,
}
assert.NoError(t, err)
assert.Equal(t, expected, volume)
}
} | explode_data.jsonl/70103 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 144
} | [
2830,
3393,
14463,
18902,
2354,
20914,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1815,
1669,
2088,
3056,
917,
4913,
1725,
7975,
497,
3521,
5117,
11739,
9207,
341,
197,
5195,
4661,
11,
1848,
1669,
4715,
18902,
5581,
488,
13022,
14,
5657,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestPrintReplicaSet(t *testing.T) {
tests := []struct {
replicaSet apps.ReplicaSet
options printers.GenerateOptions
expected []metav1.TableRow
}{
// Generate options empty
{
replicaSet: apps.ReplicaSet{
ObjectMeta: metav1.ObjectMeta{
Name: "test1",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Spec: apps.ReplicaSetSpec{
Replicas: 5,
Template: api.PodTemplateSpec{
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "fake-container1",
Image: "fake-image1",
},
{
Name: "fake-container2",
Image: "fake-image2",
},
},
},
},
Selector: &metav1.LabelSelector{MatchLabels: map[string]string{"foo": "bar"}},
},
Status: apps.ReplicaSetStatus{
Replicas: 5,
ReadyReplicas: 2,
},
},
options: printers.GenerateOptions{},
// Columns: Name, Desired, Current, Ready, Age
expected: []metav1.TableRow{{Cells: []interface{}{"test1", int64(5), int64(5), int64(2), "0s"}}},
},
// Generate options "Wide"
{
replicaSet: apps.ReplicaSet{
ObjectMeta: metav1.ObjectMeta{
Name: "test1",
CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)},
},
Spec: apps.ReplicaSetSpec{
Replicas: 5,
Template: api.PodTemplateSpec{
Spec: api.PodSpec{
Containers: []api.Container{
{
Name: "fake-container1",
Image: "fake-image1",
},
{
Name: "fake-container2",
Image: "fake-image2",
},
},
},
},
Selector: &metav1.LabelSelector{MatchLabels: map[string]string{"foo": "bar"}},
},
Status: apps.ReplicaSetStatus{
Replicas: 5,
ReadyReplicas: 2,
},
},
options: printers.GenerateOptions{Wide: true},
// Columns: Name, Desired, Current, Ready, Age, Containers, Images, Selector
expected: []metav1.TableRow{{Cells: []interface{}{"test1", int64(5), int64(5), int64(2), "0s", "fake-container1,fake-container2", "fake-image1,fake-image2", "foo=bar"}}},
},
}
for i, test := range tests {
rows, err := printReplicaSet(&test.replicaSet, test.options)
if err != nil {
t.Fatal(err)
}
for i := range rows {
rows[i].Object.Object = nil
}
if !reflect.DeepEqual(test.expected, rows) {
t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows))
}
}
} | explode_data.jsonl/21623 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 1212
} | [
2830,
3393,
8994,
18327,
15317,
1649,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
73731,
15317,
1649,
10500,
2817,
79,
15317,
1649,
198,
197,
35500,
262,
55953,
57582,
3798,
198,
197,
42400,
256,
3056,
4059,
402,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestGet(t *testing.T) {
storage, _, server := newStorage(t)
defer server.Terminate(t)
defer storage.Store.DestroyFunc()
test := genericregistrytest.New(t, storage.Store)
test.TestGet(validIngress())
} | explode_data.jsonl/47444 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 77
} | [
2830,
3393,
1949,
1155,
353,
8840,
836,
8,
341,
197,
16172,
11,
8358,
3538,
1669,
501,
5793,
1155,
340,
16867,
3538,
836,
261,
34016,
1155,
340,
16867,
5819,
38047,
57011,
9626,
741,
18185,
1669,
13954,
29172,
1944,
7121,
1155,
11,
5819... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 1 |
func TestDirectoryResolver_FilesByPath_absoluteRoot(t *testing.T) {
cases := []struct {
name string
relativeRoot string
input string
expected []string
}{
{
name: "should find a file from an absolute input",
relativeRoot: "./test-fixtures/",
input: "/image-symlinks/file-1.txt",
expected: []string{
"image-symlinks/file-1.txt",
},
},
{
name: "should find a file from a relative path",
relativeRoot: "./test-fixtures/",
input: "image-symlinks/file-1.txt",
expected: []string{
"image-symlinks/file-1.txt",
},
},
{
name: "should find a file from a relative path (root above cwd)",
relativeRoot: "../",
input: "sbom/sbom.go",
expected: []string{
"sbom/sbom.go",
},
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
// note: this test is all about asserting correct functionality when the given analysis path
// is an absolute path
absRoot, err := filepath.Abs(c.relativeRoot)
require.NoError(t, err)
resolver, err := newDirectoryResolver(absRoot)
assert.NoError(t, err)
refs, err := resolver.FilesByPath(c.input)
require.NoError(t, err)
assert.Len(t, refs, len(c.expected))
s := strset.New()
for _, actual := range refs {
s.Add(actual.RealPath)
}
assert.ElementsMatch(t, c.expected, s.List())
})
}
} | explode_data.jsonl/50111 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 622
} | [
2830,
3393,
9310,
18190,
1400,
3658,
1359,
1820,
50874,
8439,
1155,
353,
8840,
836,
8,
341,
1444,
2264,
1669,
3056,
1235,
341,
197,
11609,
260,
914,
198,
197,
197,
20432,
8439,
914,
198,
197,
22427,
286,
914,
198,
197,
42400,
257,
305... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestListJobs(t *testing.T) {
ctx := context.Background()
s, err := standard.New(ctx, standard.WithLogLevel(zerolog.Disabled), standard.WithMonitor(&nullmetrics.Service{}))
require.NoError(t, err)
require.NotNil(t, s)
run := 0
runFunc := func(ctx context.Context, data interface{}) {
run++
}
jobs := s.ListJobs(ctx)
require.Len(t, jobs, 0)
require.NoError(t, s.ScheduleJob(ctx, "Test", "Test job 1", time.Now().Add(time.Second), runFunc, nil))
jobs = s.ListJobs(ctx)
require.Len(t, jobs, 1)
require.Contains(t, jobs, "Test job 1")
require.NoError(t, s.ScheduleJob(ctx, "Test", "Test job 2", time.Now().Add(time.Second), runFunc, nil))
jobs = s.ListJobs(ctx)
require.Len(t, jobs, 2)
require.Contains(t, jobs, "Test job 1")
require.Contains(t, jobs, "Test job 2")
require.NoError(t, s.CancelJob(ctx, "Test job 1"))
jobs = s.ListJobs(ctx)
require.Len(t, jobs, 1)
require.Contains(t, jobs, "Test job 2")
} | explode_data.jsonl/44224 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 378
} | [
2830,
3393,
852,
40667,
1155,
353,
8840,
836,
8,
341,
20985,
1669,
2266,
19047,
741,
1903,
11,
1848,
1669,
5297,
7121,
7502,
11,
5297,
26124,
72676,
7,
7070,
1609,
89576,
701,
5297,
26124,
30098,
2099,
2921,
43262,
13860,
6257,
1171,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestClient_GetCurrentWeather_404(t *testing.T) {
mux := http.NewServeMux()
mux.HandleFunc("/notfoundcity", func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound)
_, err := io.WriteString(w, "404 Page Not Found")
if err != nil {
t.Fatalf("ParseWeather: unexpected error %s", err)
}
})
s := httptest.NewServer(mux)
defer s.Close()
c := getClient(t, s)
_, err := c.GetCurrentWeather("fakecity")
assert.NotNil(t, err, "Client_GetCurrentWeather: expected 404 error did not occur")
} | explode_data.jsonl/63364 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 204
} | [
2830,
3393,
2959,
13614,
5405,
28981,
62,
19,
15,
19,
1155,
353,
8840,
836,
8,
341,
2109,
2200,
1669,
1758,
7121,
60421,
44,
2200,
741,
2109,
2200,
63623,
4283,
1921,
15105,
8926,
497,
2915,
3622,
1758,
37508,
11,
435,
353,
1254,
9659... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestCaptureRestarts(t *testing.T) {
t.Parallel()
ctx := cdcContext.NewBackendContext4Test(false)
communicator := NewMockScheduleDispatcherCommunicator()
dispatcher := NewBaseScheduleDispatcher("cf-1", communicator, 1000)
dispatcher.captureStatus = map[model.CaptureID]*captureStatus{
"capture-1": {
SyncStatus: captureSyncFinished,
CheckpointTs: 1500,
ResolvedTs: 1500,
},
"capture-2": {
SyncStatus: captureSyncFinished,
CheckpointTs: 1500,
ResolvedTs: 1500,
},
}
dispatcher.tables.AddTableRecord(&util.TableRecord{
TableID: 1,
CaptureID: "capture-1",
Status: util.RunningTable,
})
dispatcher.tables.AddTableRecord(&util.TableRecord{
TableID: 2,
CaptureID: "capture-2",
Status: util.RunningTable,
})
dispatcher.tables.AddTableRecord(&util.TableRecord{
TableID: 3,
CaptureID: "capture-1",
Status: util.RunningTable,
})
dispatcher.OnAgentSyncTaskStatuses("capture-2", []model.TableID{}, []model.TableID{}, []model.TableID{})
communicator.On("DispatchTable", mock.Anything, "cf-1", model.TableID(2), "capture-2", false).
Return(true, nil)
checkpointTs, resolvedTs, err := dispatcher.Tick(ctx, 1500, []model.TableID{1, 2, 3}, defaultMockCaptureInfos)
require.NoError(t, err)
require.Equal(t, CheckpointCannotProceed, checkpointTs)
require.Equal(t, CheckpointCannotProceed, resolvedTs)
communicator.AssertExpectations(t)
} | explode_data.jsonl/28504 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 554
} | [
2830,
3393,
27429,
12416,
7038,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
2822,
20985,
1669,
272,
7628,
1972,
7121,
29699,
1972,
19,
2271,
3576,
340,
197,
25579,
850,
1669,
1532,
11571,
32210,
21839,
80923,
850,
741,
60072,
261,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNetworkNumberAndMask(t *testing.T) {
for _, tt := range networkNumberAndMaskTests {
ip, m := networkNumberAndMask(&tt.in)
out := &IPNet{IP: ip, Mask: m}
if !reflect.DeepEqual(&tt.out, out) {
t.Errorf("networkNumberAndMask(%v) = %v, want %v", tt.in, out, &tt.out)
}
}
} | explode_data.jsonl/14226 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 127
} | [
2830,
3393,
12320,
2833,
3036,
12686,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
17853,
1669,
2088,
3922,
2833,
3036,
12686,
18200,
341,
197,
46531,
11,
296,
1669,
3922,
2833,
3036,
12686,
2099,
5566,
1858,
340,
197,
13967,
1669,
609,
32... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestStoryServiceAddStory(t *testing.T) {
testCases := map[string]struct {
store func() store.StoriesStore
expectedError error
}{
"test add story success": {
store: func() store.StoriesStore {
mst := &store.MockStoriesStore{}
mst.On("AddStory", mock.AnythingOfType("*model.Story")).Return("a45c9dac-56dc-4771-a3f4-f10ad30a20a5", nil)
return mst
},
expectedError: nil,
},
"test add story failure when dependency fails": {
store: func() store.StoriesStore {
mst := &store.MockStoriesStore{}
mst.On("AddStory", mock.AnythingOfType("*model.Story")).Return("", liberr.WithArgs(liberr.SeverityError, errors.New("failed to insert story")))
return mst
},
expectedError: errors.New("failed to insert story"),
},
}
for name, testCase := range testCases {
t.Run(name, func(t *testing.T) {
svc := service.NewStoriesService(testCase.store())
str, err := model.NewStoryBuilder().
SetTitle(100, "title").
SetBody(100, "test body").
Build()
require.NoError(t, err)
err = svc.AddStory(str)
if testCase.expectedError != nil {
assert.Equal(t, testCase.expectedError.Error(), err.Error())
} else {
assert.Nil(t, err)
}
})
}
} | explode_data.jsonl/44019 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 508
} | [
2830,
3393,
17938,
1860,
2212,
17938,
1155,
353,
8840,
836,
8,
341,
18185,
37302,
1669,
2415,
14032,
60,
1235,
341,
197,
57279,
260,
2915,
368,
3553,
7758,
2433,
6093,
198,
197,
42400,
1454,
1465,
198,
197,
59403,
197,
197,
1,
1944,
9... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestReadyOverride(t *testing.T) {
ch := make(chan bool, 1)
handler := readyHandler(ch)
verifyState(t, handler, "/ready/true", http.StatusOK, http.MethodPost)
select {
case <-ch:
assert.Fail(t, "Same state override should not happen")
default:
}
verifyState(t, handler, "/ready/false", http.StatusOK, http.MethodPost)
change := <-ch
assert.False(t, change)
verifyState(t, handler, "/ready/false", http.StatusOK, http.MethodPost)
select {
case <-ch:
assert.Fail(t, "Same state override should not happen")
default:
}
} | explode_data.jsonl/19082 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 193
} | [
2830,
3393,
19202,
2177,
1155,
353,
8840,
836,
8,
341,
23049,
1669,
1281,
35190,
1807,
11,
220,
16,
340,
53326,
1669,
5527,
3050,
7520,
692,
93587,
1397,
1155,
11,
7013,
11,
3521,
2307,
14,
1866,
497,
1758,
52989,
11,
1758,
20798,
413... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func Test_HextoRGB(t *testing.T) {
tests := []struct {
name string
hex string
arg string
expectedOutput string
expectedError bool
cs *ColorScheme
}{
{
name: "Colored red enabled color",
hex: "fc0303",
arg: "red",
expectedOutput: "\033[38;2;252;3;3mred\033[0m",
cs: NewColorScheme(true, true),
},
{
name: "Failed colored red enabled color",
hex: "fc0303",
arg: "red",
expectedOutput: "\033[38;2;252;2;3mred\033[0m",
expectedError: true,
cs: NewColorScheme(true, true),
},
{
name: "Colored red disabled color",
hex: "fc0303",
arg: "red",
expectedOutput: "red",
cs: NewColorScheme(false, false),
},
{
name: "Failed colored red disabled color",
hex: "fc0303",
arg: "red",
expectedOutput: "\033[38;2;252;3;3mred\033[0m",
expectedError: true,
cs: NewColorScheme(false, false),
},
}
for _, tt := range tests {
output := tt.cs.HexToRGB(tt.hex, tt.arg)
if tt.expectedError {
assert.NotEqual(t, tt.expectedOutput, output)
} else {
assert.Equal(t, tt.expectedOutput, output)
}
}
} | explode_data.jsonl/42288 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 720
} | [
2830,
3393,
2039,
427,
78,
18184,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
1843,
914,
198,
197,
9598,
327,
310,
914,
198,
197,
47903,
310,
914,
198,
197,
42400,
5097,
914,
198,
197,
42400,
1454,
220,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestReducible(t *testing.T) {
var count = []int{1, 2, 3, 6, 9, 18, 30, 56, 99, 186} // oeis.org/A1037
for i, want := range count {
n := 0
for p := 1 << uint(i+2); p < 1<<uint(i+3); p++ {
if !reducible(p) {
n++
}
}
if n != want {
t.Errorf("#reducible(%d-bit) = %d, want %d", i+2, n, want)
}
}
} | explode_data.jsonl/18054 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
16609,
1238,
1155,
353,
8840,
836,
8,
341,
2405,
1760,
284,
3056,
396,
90,
16,
11,
220,
17,
11,
220,
18,
11,
220,
21,
11,
220,
24,
11,
220,
16,
23,
11,
220,
18,
15,
11,
220,
20,
21,
11,
220,
24,
24,
11,
220,
16... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestIntegration_BucketInCopyAttrs(t *testing.T) {
// Confirm that if bucket is included in the object attributes of a rewrite
// call, but object name and content-type aren't, then we get an error. See
// the comment in Copier.Run.
ctx := context.Background()
client, bucket := testConfig(ctx, t)
defer client.Close()
bkt := client.Bucket(bucket)
obj := bkt.Object("bucketInCopyAttrs")
if err := writeObject(ctx, obj, "", []byte("foo")); err != nil {
t.Fatal(err)
}
copier := obj.CopierFrom(obj)
rawObject := copier.ObjectAttrs.toRawObject(bucket)
_, err := copier.callRewrite(ctx, rawObject)
if err == nil {
t.Errorf("got nil, want error")
}
} | explode_data.jsonl/8908 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 240
} | [
2830,
3393,
52464,
1668,
11152,
641,
12106,
53671,
1155,
353,
8840,
836,
8,
341,
197,
322,
33563,
429,
421,
15621,
374,
5230,
304,
279,
1633,
8201,
315,
264,
18130,
198,
197,
322,
1618,
11,
714,
1633,
829,
323,
2213,
10604,
7629,
944,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestTermsAggregationWithMultipleSubAggregation(t *testing.T) {
subAgg1 := NewAvgAggregation().Field("height")
subAgg2 := NewAvgAggregation().Field("width")
agg := NewTermsAggregation().Field("gender").Size(10).
OrderByAggregation("avg_height", false)
agg = agg.SubAggregation("avg_height", subAgg1)
agg = agg.SubAggregation("avg_width", subAgg2)
data, err := json.Marshal(agg.Source())
if err != nil {
t.Fatalf("marshaling to JSON failed: %v", err)
}
got := string(data)
expected := `{"aggregations":{"avg_height":{"avg":{"field":"height"}},"avg_width":{"avg":{"field":"width"}}},"terms":{"field":"gender","order":{"avg_height":"desc"},"size":10}}`
if got != expected {
t.Errorf("expected\n%s\n,got:\n%s", expected, got)
}
} | explode_data.jsonl/4226 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 268
} | [
2830,
3393,
43128,
9042,
34442,
2354,
32089,
3136,
9042,
34442,
1155,
353,
8840,
836,
8,
341,
28624,
9042,
70,
16,
1669,
1532,
39447,
9042,
34442,
1005,
1877,
445,
2563,
1138,
28624,
9042,
70,
17,
1669,
1532,
39447,
9042,
34442,
1005,
1... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestParseAlterStatement(t *testing.T) {
statement := "add column t int, engine=innodb"
parser := NewAlterTableParser()
err := parser.ParseAlterStatement(statement)
test.S(t).ExpectNil(err)
test.S(t).ExpectEquals(parser.alterStatementOptions, statement)
test.S(t).ExpectFalse(parser.HasNonTrivialRenames())
} | explode_data.jsonl/71259 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 115
} | [
2830,
3393,
14463,
74290,
8636,
1155,
353,
8840,
836,
8,
341,
89566,
1669,
330,
718,
3250,
259,
526,
11,
4712,
28,
6130,
16853,
698,
55804,
1669,
1532,
74290,
2556,
6570,
741,
9859,
1669,
6729,
8937,
74290,
8636,
60971,
340,
18185,
808,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestNewMetricsExporter_ProcessMetricsError(t *testing.T) {
want := errors.New("my_error")
me, err := NewMetricsProcessor(testCfg, exportertest.NewNopMetricsExporter(), newTestMProcessor(want))
require.NoError(t, err)
assert.Equal(t, want, me.ConsumeMetrics(context.Background(), testdata.GenerateMetricsEmpty()))
} | explode_data.jsonl/4016 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 113
} | [
2830,
3393,
3564,
27328,
88025,
70241,
27328,
1454,
1155,
353,
8840,
836,
8,
341,
50780,
1669,
5975,
7121,
445,
2408,
4096,
1138,
49294,
11,
1848,
1669,
1532,
27328,
22946,
8623,
42467,
11,
7485,
83386,
7121,
45,
453,
27328,
88025,
1507,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFeatureProjectionField(t *testing.T) {
t.Run("should generate featureProjection argument properly", func(t *testing.T) {
// given
classname := "Class"
// when
featureProjection := additionalFeatureProjectionField(classname)
// then
// the built graphQL field needs to support this structure:
// Args: {
// algorithm: "a",
// dimensions: 1,
// learningRate: 2,
// iterations: 3,
// perplexity: 4
// }
// Type: {
// vector: [0, 1]
// }
assert.NotNil(t, featureProjection)
assert.Equal(t, "ClassAdditionalFeatureProjection", featureProjection.Type.Name())
assert.NotNil(t, featureProjection.Args)
assert.Equal(t, 5, len(featureProjection.Args))
assert.NotNil(t, featureProjection.Args["algorithm"])
assert.NotNil(t, featureProjection.Args["dimensions"])
assert.NotNil(t, featureProjection.Args["learningRate"])
assert.NotNil(t, featureProjection.Args["iterations"])
assert.NotNil(t, featureProjection.Args["perplexity"])
featureProjectionObject, featureProjectionObjectOK := featureProjection.Type.(*graphql.Object)
assert.True(t, featureProjectionObjectOK)
assert.Equal(t, 1, len(featureProjectionObject.Fields()))
assert.NotNil(t, featureProjectionObject.Fields()["vector"])
})
} | explode_data.jsonl/40505 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 457
} | [
2830,
3393,
13859,
46321,
1877,
1155,
353,
8840,
836,
8,
341,
3244,
16708,
445,
5445,
6923,
4565,
46321,
5693,
10277,
497,
2915,
1155,
353,
8840,
836,
8,
341,
197,
197,
322,
2661,
198,
197,
15487,
606,
1669,
330,
1957,
1837,
197,
197,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestFetchOpenOrders(t *testing.T) {
t.Parallel()
if !areTestAPIKeysSet() {
t.Skip("API keys required but not set, skipping test")
}
_, err := c.FetchOpenSpotOrders(spotTestPair)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/42928 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 94
} | [
2830,
3393,
20714,
5002,
24898,
1155,
353,
8840,
836,
8,
341,
3244,
41288,
7957,
741,
743,
753,
546,
2271,
7082,
8850,
1649,
368,
341,
197,
3244,
57776,
445,
7082,
6894,
2567,
714,
537,
738,
11,
42659,
1273,
1138,
197,
532,
197,
6878,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestCBALogin(t *testing.T) {
th := Setup().InitBasic()
defer th.TearDown()
Client := th.Client
Client.Logout()
th.App.SetLicense(model.NewTestLicense("saml"))
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ExperimentalSettings.ClientSideCertEnable = true
*cfg.ExperimentalSettings.ClientSideCertCheck = model.CLIENT_SIDE_CERT_CHECK_PRIMARY_AUTH
})
user, resp := Client.Login(th.BasicUser.Email, th.BasicUser.Password)
if resp.Error.StatusCode != 400 && user == nil {
t.Fatal("Should have failed because it's missing the cert header")
}
Client.HttpHeader["X-SSL-Client-Cert"] = "valid_cert_fake"
user, resp = Client.Login(th.BasicUser.Email, th.BasicUser.Password)
if resp.Error.StatusCode != 400 && user == nil {
t.Fatal("Should have failed because it's missing the cert subject")
}
Client.HttpHeader["X-SSL-Client-Cert-Subject-DN"] = "C=US, ST=Maryland, L=Pasadena, O=Brent Baccala, OU=FreeSoft, CN=www.freesoft.org/emailAddress=mis_match" + th.BasicUser.Email
user, resp = Client.Login(th.BasicUser.Email, "")
if resp.Error.StatusCode != 400 && user == nil {
t.Fatal("Should have failed because the emails mismatch")
}
Client.HttpHeader["X-SSL-Client-Cert-Subject-DN"] = "C=US, ST=Maryland, L=Pasadena, O=Brent Baccala, OU=FreeSoft, CN=www.freesoft.org/emailAddress=" + th.BasicUser.Email
user, _ = Client.Login(th.BasicUser.Email, "")
if !(user != nil && user.Email == th.BasicUser.Email) {
t.Fatal("Should have been able to login")
}
th.App.UpdateConfig(func(cfg *model.Config) {
*cfg.ExperimentalSettings.ClientSideCertEnable = true
*cfg.ExperimentalSettings.ClientSideCertCheck = model.CLIENT_SIDE_CERT_CHECK_SECONDARY_AUTH
})
Client.HttpHeader["X-SSL-Client-Cert-Subject-DN"] = "C=US, ST=Maryland, L=Pasadena, O=Brent Baccala, OU=FreeSoft, CN=www.freesoft.org/emailAddress=" + th.BasicUser.Email
user, _ = Client.Login(th.BasicUser.Email, "")
if resp.Error.StatusCode != 400 && user == nil {
t.Fatal("Should have failed because password is required")
}
Client.HttpHeader["X-SSL-Client-Cert-Subject-DN"] = "C=US, ST=Maryland, L=Pasadena, O=Brent Baccala, OU=FreeSoft, CN=www.freesoft.org/emailAddress=" + th.BasicUser.Email
user, _ = Client.Login(th.BasicUser.Email, th.BasicUser.Password)
if !(user != nil && user.Email == th.BasicUser.Email) {
t.Fatal("Should have been able to login")
}
} | explode_data.jsonl/21556 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 865
} | [
2830,
3393,
12979,
969,
538,
258,
1155,
353,
8840,
836,
8,
341,
70479,
1669,
18626,
1005,
3803,
15944,
741,
16867,
270,
836,
682,
4454,
741,
71724,
1669,
270,
11716,
198,
71724,
5247,
411,
2822,
70479,
5105,
4202,
9827,
7635,
7121,
2271... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestDeliverServiceDisconnectReconnect(t *testing.T) {
// Scenario: Launch an ordering service node and let the client pull some blocks.
// Stop ordering service, wait for while - simulate disconnect and restart it back.
// Wait for some time, without sending blocks - simulate recv wait on empty channel.
// Repeat stop/start sequence multiple times, to make sure total retry time will pass
// value returned by getReConnectTotalTimeThreshold - in test it set to 2 seconds
// (0.5s + 1s + 2s + 4s) > 2s.
// Send new block and check that delivery client got it.
// So, we can see that waiting on recv in empty channel do reset total time spend in reconnection.
viper.Set("peer.deliveryclient.reconnectTotalTimeThreshold", time.Second*2)
defer viper.Reset()
defer ensureNoGoroutineLeak(t)()
osn := mocks.NewOrderer(5614, t)
time.Sleep(time.Second)
gossipServiceAdapter := &mocks.MockGossipServiceAdapter{GossipBlockDisseminations: make(chan uint64)}
service, err := NewDeliverService(&Config{
Endpoints: []string{"localhost:5614"},
Gossip: gossipServiceAdapter,
CryptoSvc: &mockMCS{},
ABCFactory: DefaultABCFactory,
ConnFactory: DefaultConnectionFactory,
})
assert.NoError(t, err)
li := &mocks.MockLedgerInfo{Height: uint64(100)}
osn.SetNextExpectedSeek(uint64(100))
err = service.StartDeliverForChannel("TEST_CHAINID", li, func() {})
assert.NoError(t, err, "can't start delivery")
// Check that delivery service requests blocks in order
go osn.SendBlock(uint64(100))
assertBlockDissemination(100, gossipServiceAdapter.GossipBlockDisseminations, t)
go osn.SendBlock(uint64(101))
assertBlockDissemination(101, gossipServiceAdapter.GossipBlockDisseminations, t)
atomic.StoreUint64(&li.Height, uint64(102))
for i := 0; i < 5; i += 1 {
// Shutdown orderer, simulate network disconnect
osn.Shutdown()
// Now wait for a disconnect to be discovered
assert.True(t, waitForConnectionCount(osn, 0), "deliverService can't disconnect from orderer")
// Recreate orderer, simulating network is back
osn = mocks.NewOrderer(5614, t)
osn.SetNextExpectedSeek(atomic.LoadUint64(&li.Height))
// Now wait for a while, to client connect back and simulate empty channel
assert.True(t, waitForConnectionCount(osn, 1), "deliverService can't reconnect to orderer")
}
// Send a block from orderer
go osn.SendBlock(uint64(102))
// Ensure it is received
assertBlockDissemination(102, gossipServiceAdapter.GossipBlockDisseminations, t)
service.Stop()
osn.Shutdown()
} | explode_data.jsonl/10584 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 821
} | [
2830,
3393,
16532,
1524,
1860,
60651,
693,
6459,
1155,
353,
8840,
836,
8,
341,
197,
322,
58663,
25,
23205,
458,
21391,
2473,
2436,
323,
1077,
279,
2943,
6815,
1045,
10010,
624,
197,
322,
14215,
21391,
2473,
11,
3783,
369,
1393,
481,
3... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestKpt_Cleanup(t *testing.T) {
tests := []struct {
description string
applyDir string
globalFlags []string
commands util.Command
shouldErr bool
}{
{
description: "invalid user specified applyDir",
applyDir: "invalid_path",
shouldErr: true,
},
{
description: "valid user specified applyDir w/o template resource",
applyDir: "valid_path",
commands: testutil.CmdRunErr("kpt live destroy valid_path --context kubecontext --namespace testNamespace", errors.New("BUG")),
shouldErr: true,
},
{
description: "valid user specified applyDir w/ template resource (emulated)",
applyDir: "valid_path",
commands: testutil.CmdRun("kpt live destroy valid_path --context kubecontext --namespace testNamespace"),
},
{
description: "unspecified applyDir",
commands: testutil.
CmdRunOut("kpt live init .kpt-hydrated --context kubecontext --namespace testNamespace", "").
AndRun("kpt live destroy .kpt-hydrated --context kubecontext --namespace testNamespace"),
},
}
for _, test := range tests {
testutil.Run(t, test.description, func(t *testutil.T) {
t.Override(&util.DefaultExecCommand, test.commands)
t.NewTempDir().Chdir()
if test.applyDir == "valid_path" {
// 0755 is a permission setting where the owner can read, write, and execute.
// Others can read and execute but not modify the directory.
t.CheckNoError(os.Mkdir(test.applyDir, 0755))
}
k := NewDeployer(&kptConfig{
workingDir: ".",
}, nil, deploy.NoopComponentProvider, &latestV1.KptDeploy{
Live: latestV1.KptLive{
Apply: latestV1.KptApplyInventory{
Dir: test.applyDir,
},
},
})
err := k.Cleanup(context.Background(), ioutil.Discard)
t.CheckError(test.shouldErr, err)
})
}
} | explode_data.jsonl/9524 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 707
} | [
2830,
3393,
42,
417,
920,
60639,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
42407,
914,
198,
197,
197,
10280,
6184,
262,
914,
198,
197,
18842,
9195,
3056,
917,
198,
197,
197,
24270,
262,
4094,
12714,
198,
197,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestHotFix001_GetKey(t *testing.T) {
hf001 := NewHotFix001("", "")
if hf001.GetKey() != "001" {
t.Fatal("GetKey() != 001")
}
} | explode_data.jsonl/65378 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 65
} | [
2830,
3393,
20170,
25958,
15,
15,
16,
13614,
1592,
1155,
353,
8840,
836,
8,
341,
9598,
69,
15,
15,
16,
1669,
1532,
20170,
25958,
15,
15,
16,
19814,
14676,
743,
43242,
15,
15,
16,
51723,
368,
961,
330,
15,
15,
16,
1,
341,
197,
32... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
] | 2 |
func TestHistogramDataPoint_Count(t *testing.T) {
ms := NewHistogramDataPoint()
assert.EqualValues(t, uint64(0), ms.Count())
testValCount := uint64(17)
ms.SetCount(testValCount)
assert.EqualValues(t, testValCount, ms.Count())
} | explode_data.jsonl/32737 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 87
} | [
2830,
3393,
77210,
1043,
2609,
50775,
1155,
353,
8840,
836,
8,
341,
47691,
1669,
1532,
77210,
1043,
2609,
741,
6948,
12808,
6227,
1155,
11,
2622,
21,
19,
7,
15,
701,
9829,
6134,
2398,
18185,
2208,
2507,
1669,
2622,
21,
19,
7,
16,
22... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestMilestone_APIFormat(t *testing.T) {
milestone := &Milestone{
ID: 3,
RepoID: 4,
Name: "milestoneName",
Content: "milestoneContent",
IsClosed: false,
NumOpenIssues: 5,
NumClosedIssues: 6,
DeadlineUnix: util.TimeStamp(time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC).Unix()),
}
assert.Equal(t, api.Milestone{
ID: milestone.ID,
State: api.StateOpen,
Title: milestone.Name,
Description: milestone.Content,
OpenIssues: milestone.NumOpenIssues,
ClosedIssues: milestone.NumClosedIssues,
Deadline: milestone.DeadlineUnix.AsTimePtr(),
}, *milestone.APIFormat())
} | explode_data.jsonl/21819 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 330
} | [
2830,
3393,
44,
93028,
11415,
4061,
1155,
353,
8840,
836,
8,
341,
2109,
93028,
1669,
609,
44,
93028,
515,
197,
29580,
25,
1060,
220,
18,
345,
197,
197,
25243,
915,
25,
688,
220,
19,
345,
197,
21297,
25,
310,
330,
66597,
10812,
675,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestSubmitBlock(t *testing.T) {
db, err := newTestChainDb()
if err != nil {
t.Errorf("%v", err)
return
}
defer db.Close()
blks, err := loadTopNBlk(8)
assert.Nil(t, err)
for i := 1; i < 8; i++ {
err = db.SubmitBlock(blks[i])
assert.Nil(t, err)
blkHash := blks[i].Hash()
preCommit(db, blkHash)
err = db.Commit(*blkHash)
assert.Nil(t, err)
_, height, err := db.NewestSha()
assert.Nil(t, err)
assert.Equal(t, blks[i].Height(), height)
}
} | explode_data.jsonl/12272 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 230
} | [
2830,
3393,
8890,
4713,
1155,
353,
8840,
836,
8,
341,
20939,
11,
1848,
1669,
501,
2271,
18837,
7994,
741,
743,
1848,
961,
2092,
341,
197,
3244,
13080,
4430,
85,
497,
1848,
340,
197,
853,
198,
197,
532,
16867,
2927,
10421,
2822,
96421,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestString_Equal(t *testing.T) {
type args struct {
x String
}
tests := []struct {
name string
e String
args args
want bool
}{
{name: "True", e: String{"abc"}, args: args{x: String{"abc"}}, want: true},
{name: "False", e: String{"abc"}, args: args{x: String{"def"}}, want: false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := tt.e.Equal(tt.args.x); got != tt.want {
t.Errorf("String.Equal() = %v, want %v", got, tt.want)
}
})
}
} | explode_data.jsonl/34774 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 227
} | [
2830,
3393,
703,
2089,
1751,
1155,
353,
8840,
836,
8,
341,
13158,
2827,
2036,
341,
197,
10225,
923,
198,
197,
532,
78216,
1669,
3056,
1235,
341,
197,
11609,
914,
198,
197,
7727,
262,
923,
198,
197,
31215,
2827,
198,
197,
50780,
1807,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestLex(t *testing.T) {
tests := []struct {
name string
text string
tokens string
wantedErr string
}{
{
name: "empty",
text: "",
tokens: "",
},
{
name: "only spaces",
text: " \n\t\r\n ",
tokens: "",
},
{
name: "nil",
text: "",
tokens: "",
},
{
name: "single line, mixed tokens, ends with comment",
text: "bla foo 7+ 6 ;some comment",
tokens: "1,1,bla,identifier~1,5,foo,identifier~1,9,7,literal~1,10,+,operator~1,12,6,literal~1,14,;some comment,comment",
},
{
name: "line starts with spaces, mixed tokens, ends with spaces",
text: " blabla 4 \t[:foo 56 \n",
tokens: "1,3,blabla,identifier~1,10,4,literal~1,13,[,separator~1,14,:foo,identifier~1,19,56,literal",
},
{
name: "multiline",
text: ";foo proc\ndo foo :a\n fd 50\nend",
tokens: "1,1,;foo proc,comment~2,1,do,identifier~2,4,foo,identifier~2,8,:a,identifier~3,3,fd,identifier~3,6,50,literal~4,1,end,identifier",
},
{
name: "arithmetic expression",
text: " 1+12 /5 - :x * :Y ",
tokens: "1,3,1,literal~1,4,+,operator~1,5,12,literal~1,8,/,operator~1,9,5,literal~1,11,-,operator~1,13,:x,identifier~1,16,*,operator~1,18,:Y,identifier",
},
{
name: "error1",
text: " 9a",
wantedErr: "Lexer failed at line 1, column 3: 9a",
},
{
name: "error2",
text: " :foo:bar",
wantedErr: "Lexer failed at line 1, column 2: :foo:bar",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
tokens, err := lexer.Lex(strings.NewReader(test.text))
if test.wantedErr == "" {
assert.NoError(t, err, "unexpected lexer error")
assert.Equal(t, test.tokens, tokensAsString(tokens), "unexpected tokens result")
} else if err == nil {
assert.Fail(t, fmt.Sprintf("expected an error: %s", test.wantedErr))
} else {
assert.Equal(t, test.wantedErr, err.Error(), "unexpected error message")
}
})
}
} | explode_data.jsonl/22492 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 997
} | [
2830,
3393,
47778,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
1235,
341,
197,
11609,
414,
914,
198,
197,
15425,
414,
914,
198,
197,
3244,
9713,
262,
914,
198,
197,
6692,
7566,
7747,
914,
198,
197,
59403,
197,
197,
515,
298,
11... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 3 |
func TestGrpcClient(t *testing.T) {
conn, err := grpc.Dial("localhost:5656", grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
t.Error(err)
}
defer conn.Close()
file, err := os.OpenFile("render.png", os.O_CREATE|os.O_TRUNC|os.O_WRONLY, os.ModePerm)
if err != nil {
t.Error(err)
}
defer file.Close()
c := pb.NewRendererClient(conn)
render, err := c.RequestRender(context.Background(), &pb.RenderRequest{
Objects: []*pb.Object{
{
Shape: "cube",
Position: &pb.Vector{X: 0, Y: 0, Z: 0},
Scale: &pb.Vector{X: 1, Y: 1, Z: 1},
}, {
Shape: "plane",
Position: &pb.Vector{X: 0, Y: 0, Z: -0.6},
Scale: &pb.Vector{X: 2, Y: 2, Z: 1},
},
},
})
if err != nil {
t.Error(err)
}
_, err = file.Write(render.ImageBytes)
if err != nil {
t.Error(err)
}
} | explode_data.jsonl/21709 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 404
} | [
2830,
3393,
6464,
3992,
2959,
1155,
353,
8840,
836,
8,
341,
32917,
11,
1848,
1669,
47900,
98462,
445,
8301,
25,
20,
21,
20,
21,
497,
47900,
26124,
27560,
27025,
5900,
25132,
7121,
27025,
12145,
743,
1848,
961,
2092,
341,
197,
3244,
61... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 5 |
func TestWalkTree(t *testing.T) {
mockFs, countFiles := setupPopulatedFilesystem()
var discoveredFiles int
WalkTree(mockFs, func(path string, info os.FileInfo, err error) error {
if !info.IsDir() {
discoveredFiles++
}
return nil
})
if discoveredFiles != countFiles {
t.Errorf("discovered files count %d does not equal expected files count %d", discoveredFiles, countFiles)
}
} | explode_data.jsonl/3274 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 132
} | [
2830,
3393,
48849,
6533,
1155,
353,
8840,
836,
8,
341,
77333,
48300,
11,
1760,
10809,
1669,
6505,
11598,
7757,
1703,
8948,
741,
2405,
11105,
10809,
526,
198,
17300,
1692,
6533,
30389,
48300,
11,
2915,
5581,
914,
11,
3546,
2643,
8576,
17... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestParseUnrepresentableTimesFail(t *testing.T) {
for _, s := range []string{
"25:12",
"3:87",
"00:00pm",
"13:00am",
"13:00pm",
} {
tm, err := NewTimeFromString(s)
require.Nil(t, tm, s)
assert.EqualError(t, err, "INVALID_TIME", s)
}
} | explode_data.jsonl/4307 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 131
} | [
2830,
3393,
14463,
1806,
35269,
480,
18889,
19524,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
274,
1669,
2088,
3056,
917,
515,
197,
197,
1,
17,
20,
25,
16,
17,
756,
197,
197,
1,
18,
25,
23,
22,
756,
197,
197,
1,
15,
15,
25,
15,... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestParseInvalidMessages(t *testing.T) {
tests := []string{
``,
`89`,
`89 <45>`,
`89 <45>1`,
`89 <45>1 2016-10-15T08:59:08.723822+00:00`,
`89 <45>1 2016-10-15T08:59:08.723822+00:00 host`,
`89 <45>1 2016-10-15T08:59:08.723822+00:00 host heroku`,
`89 <45>1 2016-10-15T08:59:08.723822+00:00 host heroku web.1`,
`89 <45>1 2016-10-15T08:59:08.723822+00:00 host heroku web.1 -`,
`<45>1 2016-10-15T08:59:08.723822+00:00 host heroku web.1 - - State changed from up to down`,
}
for _, test := range tests {
entry, err := Parse([]byte(test))
assert.Error(t, err)
assert.Nil(t, entry)
}
} | explode_data.jsonl/34025 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 312
} | [
2830,
3393,
14463,
7928,
15820,
1155,
353,
8840,
836,
8,
341,
78216,
1669,
3056,
917,
515,
197,
197,
63,
12892,
197,
197,
63,
23,
24,
12892,
197,
197,
63,
23,
24,
366,
19,
20,
29,
12892,
197,
197,
63,
23,
24,
366,
19,
20,
29,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 2 |
func TestLiveUpdateRunTriggerExec(t *testing.T) {
f := newBDFixture(t, k8s.EnvGKE, container.RuntimeDocker)
defer f.TearDown()
runs := []model.LiveUpdateRunStep{
model.LiveUpdateRunStep{Command: model.ToUnixCmd("echo hello")},
model.LiveUpdateRunStep{Command: model.ToUnixCmd("echo a"), Triggers: f.NewPathSet("a.txt")}, // matches changed file
model.LiveUpdateRunStep{Command: model.ToUnixCmd("echo b"), Triggers: f.NewPathSet("b.txt")}, // does NOT match changed file
}
lu := assembleLiveUpdate(SanchoSyncSteps(f), runs, false, nil, f)
tCase := testCase{
manifest: manifestbuilder.New(f, "sancho").
WithK8sYAML(SanchoYAML).
WithImageTarget(NewSanchoDockerBuildImageTarget(f)).
WithLiveUpdate(lu).
Build(),
changedFiles: []string{"a.txt"},
expectDockerBuildCount: 0,
expectDockerPushCount: 0,
expectSyncletUpdateContainerCount: 0,
expectDockerCopyCount: 0,
expectDockerExecCount: 0, // one run's triggers don't match -- should only exec the other two.
expectDockerRestartCount: 0,
expectK8sExecCount: 3, // one copy, two runs (third run's triggers don't match so don't exec it)
}
runTestCase(t, f, tCase)
} | explode_data.jsonl/35170 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 522
} | [
2830,
3393,
20324,
4289,
6727,
17939,
10216,
1155,
353,
8840,
836,
8,
341,
1166,
1669,
501,
33,
5262,
12735,
1155,
11,
595,
23,
82,
81214,
38,
3390,
11,
5476,
16706,
35,
13659,
340,
16867,
282,
836,
682,
4454,
2822,
197,
53310,
1669,
... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestHasTemplateFields(t *testing.T) {
emptyNodes := []*client.Node{}
node0 := createTestNode("foo")
node1 := createTestNode("check_names")
node2 := createTestNode("init_configs")
node3 := createTestNode("instances")
res := hasTemplateFields(emptyNodes)
assert.False(t, res)
tooFewNodes := []*client.Node{node0, node1}
res = hasTemplateFields(tooFewNodes)
assert.False(t, res)
invalidNodes := []*client.Node{node0, node1, node2}
res = hasTemplateFields(invalidNodes)
assert.False(t, res)
validNodes := []*client.Node{node1, node2, node3}
res = hasTemplateFields(validNodes)
assert.True(t, res)
} | explode_data.jsonl/53942 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 235
} | [
2830,
3393,
10281,
7275,
8941,
1155,
353,
8840,
836,
8,
341,
197,
3194,
12288,
1669,
29838,
2972,
21714,
16094,
20831,
15,
1669,
1855,
2271,
1955,
445,
7975,
1138,
20831,
16,
1669,
1855,
2271,
1955,
445,
2028,
9187,
1138,
20831,
17,
166... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestParseDialTarget(t *testing.T) {
for _, test := range []struct {
target, wantNet, wantAddr string
}{
{"unix:etcd:0", "unix", "etcd:0"},
{"unix:///tmp/unix-3", "unix", "/tmp/unix-3"},
{"unix://domain", "unix", "domain"},
{"unix://etcd:0", "unix", "etcd:0"},
{"unix:///etcd:0", "unix", "/etcd:0"},
{"passthrough://unix://domain", "tcp", "passthrough://unix://domain"},
{"https://google.com:443", "tcp", "https://google.com:443"},
{"dns:///google.com", "tcp", "dns:///google.com"},
{"/unix/socket/address", "tcp", "/unix/socket/address"},
} {
gotNet, gotAddr := parseDialTarget(test.target)
if gotNet != test.wantNet || gotAddr != test.wantAddr {
t.Errorf("parseDialTarget(%q) = %s, %s want %s, %s", test.target, gotNet, gotAddr, test.wantNet, test.wantAddr)
}
}
} | explode_data.jsonl/1901 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 355
} | [
2830,
3393,
14463,
35,
530,
6397,
1155,
353,
8840,
836,
8,
341,
2023,
8358,
1273,
1669,
2088,
3056,
1235,
341,
197,
28861,
11,
1366,
6954,
11,
1366,
13986,
914,
198,
197,
59403,
197,
197,
4913,
56646,
25,
295,
4385,
25,
15,
497,
330... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 4 |
func TestDevicePluginReRegistrationProbeMode(t *testing.T) {
socketDir, socketName, pluginSocketName, err := tmpSocketDir()
require.NoError(t, err)
defer os.RemoveAll(socketDir)
devs := []*pluginapi.Device{
{ID: "Dev1", Health: pluginapi.Healthy},
{ID: "Dev2", Health: pluginapi.Healthy},
}
devsForRegistration := []*pluginapi.Device{
{ID: "Dev3", Health: pluginapi.Healthy},
}
m, ch, p1, _ := setupInProbeMode(t, devs, nil, socketName, pluginSocketName)
// Wait for the first callback to be issued.
select {
case <-ch:
case <-time.After(5 * time.Second):
t.FailNow()
}
capacity, allocatable, _ := m.GetCapacity()
resourceCapacity := capacity[v1.ResourceName(testResourceName)]
resourceAllocatable := allocatable[v1.ResourceName(testResourceName)]
require.Equal(t, resourceCapacity.Value(), resourceAllocatable.Value(), "capacity should equal to allocatable")
require.Equal(t, int64(2), resourceAllocatable.Value(), "Devices are not updated.")
p2 := NewDevicePluginStub(devs, pluginSocketName+".new", testResourceName, false, false)
err = p2.Start()
require.NoError(t, err)
// Wait for the second callback to be issued.
select {
case <-ch:
case <-time.After(5 * time.Second):
t.FailNow()
}
capacity, allocatable, _ = m.GetCapacity()
resourceCapacity = capacity[v1.ResourceName(testResourceName)]
resourceAllocatable = allocatable[v1.ResourceName(testResourceName)]
require.Equal(t, resourceCapacity.Value(), resourceAllocatable.Value(), "capacity should equal to allocatable")
require.Equal(t, int64(2), resourceAllocatable.Value(), "Devices are not updated.")
// Test the scenario that a plugin re-registers with different devices.
p3 := NewDevicePluginStub(devsForRegistration, pluginSocketName+".third", testResourceName, false, false)
err = p3.Start()
require.NoError(t, err)
// Wait for the third callback to be issued.
select {
case <-ch:
case <-time.After(5 * time.Second):
t.FailNow()
}
capacity, allocatable, _ = m.GetCapacity()
resourceCapacity = capacity[v1.ResourceName(testResourceName)]
resourceAllocatable = allocatable[v1.ResourceName(testResourceName)]
require.Equal(t, resourceCapacity.Value(), resourceAllocatable.Value(), "capacity should equal to allocatable")
require.Equal(t, int64(1), resourceAllocatable.Value(), "Devices of previous registered should be removed")
p2.Stop()
p3.Stop()
cleanup(t, m, p1)
} | explode_data.jsonl/82142 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 796
} | [
2830,
3393,
6985,
11546,
693,
23365,
81426,
3636,
1155,
353,
8840,
836,
8,
341,
58279,
6184,
11,
7575,
675,
11,
9006,
10286,
675,
11,
1848,
1669,
4174,
10286,
6184,
741,
17957,
35699,
1155,
11,
1848,
340,
16867,
2643,
84427,
27050,
6184... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 7 |
func TestConcurrent(t *testing.T) {
is := is.New(t)
m, err := NewMemtable(10000, "/tmp/memtable.log")
is.NoErr(err)
var wg sync.WaitGroup
wg.Add(1)
go checkAfterOneSecond(&wg, is, m, "key1", "value1")
go m.Put("key1", "value1")
m.Put("key2", "value2")
wg.Add(1)
go checkAfterOneSecond(&wg, is, m, "key2", "")
go m.Delete("key2")
wg.Wait()
m.Clear()
} | explode_data.jsonl/56391 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 175
} | [
2830,
3393,
1109,
3231,
1155,
353,
8840,
836,
8,
341,
19907,
1669,
374,
7121,
1155,
340,
2109,
11,
1848,
1669,
1532,
18816,
2005,
7,
16,
15,
15,
15,
15,
11,
3521,
5173,
3183,
336,
2005,
1665,
1138,
19907,
16766,
7747,
3964,
340,
240... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestEC2CredentialsBuildCanonicalQueryStringV2(t *testing.T) {
params := map[string]string{
"Action": "foo",
"Value": "bar",
}
expected := "Action=foo&Value=bar"
testhelper.CheckEquals(t, expected, ec2tokens.EC2CredentialsBuildCanonicalQueryStringV2(params))
} | explode_data.jsonl/68584 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 106
} | [
2830,
3393,
7498,
17,
27025,
11066,
70914,
67001,
53,
17,
1155,
353,
8840,
836,
8,
341,
25856,
1669,
2415,
14032,
30953,
515,
197,
197,
1,
2512,
788,
330,
7975,
756,
197,
197,
1,
1130,
788,
220,
330,
2257,
756,
197,
532,
42400,
1669... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
func TestCreateDaisyInflater_File_NotUEFI(t *testing.T) {
source := fileSource{gcsPath: "gs://bucket/vmdk"}
inflater := createDaisyInflaterSafe(t, ImageImportRequest{
Source: source,
OS: "ubuntu-1804",
UefiCompatible: false,
}, imagefile.Metadata{})
daisyutils.CheckWorkflow(inflater.worker, func(wf *daisy.Workflow, err error) {
inflatedDisk := getDisk(wf, 1)
assert.NotContains(t, inflatedDisk.GuestOsFeatures, &compute.GuestOsFeature{
Type: "UEFI_COMPATIBLE",
})
})
} | explode_data.jsonl/75638 | {
"file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl",
"token_count": 219
} | [
2830,
3393,
4021,
35,
49056,
12342,
34061,
60816,
2230,
19426,
1155,
353,
8840,
836,
8,
341,
47418,
1669,
1034,
3608,
90,
70,
4837,
1820,
25,
330,
5857,
1110,
30410,
5457,
2277,
74,
16707,
17430,
11729,
1669,
1855,
35,
49056,
12342,
256... | [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1... | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.