language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
components/camel-ibm/camel-ibm-cos/src/main/java/org/apache/camel/component/ibm/cos/IBMCOSComponent.java
|
{
"start": 1162,
"end": 2543
}
|
class ____ extends HealthCheckComponent {
@Metadata
private IBMCOSConfiguration configuration = new IBMCOSConfiguration();
public IBMCOSComponent() {
this(null);
}
public IBMCOSComponent(CamelContext context) {
super(context);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
if (remaining == null || remaining.isBlank()) {
throw new IllegalArgumentException("Bucket name must be specified.");
}
final IBMCOSConfiguration configuration
= this.configuration != null ? this.configuration.copy() : new IBMCOSConfiguration();
configuration.setBucketName(remaining);
IBMCOSEndpoint endpoint = new IBMCOSEndpoint(uri, this, configuration);
setProperties(endpoint, parameters);
if (configuration.getCosClient() == null && configuration.getApiKey() == null) {
throw new IllegalArgumentException("Either cosClient or apiKey must be specified");
}
return endpoint;
}
public IBMCOSConfiguration getConfiguration() {
return configuration;
}
/**
* The component configuration
*/
public void setConfiguration(IBMCOSConfiguration configuration) {
this.configuration = configuration;
}
}
|
IBMCOSComponent
|
java
|
google__auto
|
service/processor/src/main/java/com/google/auto/service/processor/ServicesFiles.java
|
{
"start": 1052,
"end": 1235
}
|
class ____ {
public static final String SERVICES_PATH = "META-INF/services";
private ServicesFiles() {}
/**
* Returns an absolute path to a service file given the
|
ServicesFiles
|
java
|
apache__flink
|
flink-python/src/test/java/org/apache/flink/python/PythonOptionsTest.java
|
{
"start": 1077,
"end": 8800
}
|
class ____ {
@Test
void testBundleSize() {
final Configuration configuration = new Configuration();
final int defaultBundleSize = configuration.get(PythonOptions.MAX_BUNDLE_SIZE);
assertThat(defaultBundleSize).isEqualTo(PythonOptions.MAX_BUNDLE_SIZE.defaultValue());
final int expectedBundleSize = 100;
configuration.set(PythonOptions.MAX_BUNDLE_SIZE, expectedBundleSize);
final int actualBundleSize = configuration.get(PythonOptions.MAX_BUNDLE_SIZE);
assertThat(actualBundleSize).isEqualTo(expectedBundleSize);
}
@Test
void testBundleTime() {
final Configuration configuration = new Configuration();
final long defaultBundleTime = configuration.get(PythonOptions.MAX_BUNDLE_TIME_MILLS);
assertThat(defaultBundleTime).isEqualTo(PythonOptions.MAX_BUNDLE_TIME_MILLS.defaultValue());
final long expectedBundleTime = 100;
configuration.set(PythonOptions.MAX_BUNDLE_TIME_MILLS, expectedBundleTime);
final long actualBundleSize = configuration.get(PythonOptions.MAX_BUNDLE_TIME_MILLS);
assertThat(actualBundleSize).isEqualTo(expectedBundleTime);
}
@Test
void testArrowBatchSize() {
final Configuration configuration = new Configuration();
final int defaultArrowBatchSize = configuration.get(PythonOptions.MAX_ARROW_BATCH_SIZE);
assertThat(defaultArrowBatchSize)
.isEqualTo(PythonOptions.MAX_ARROW_BATCH_SIZE.defaultValue());
final int expectedArrowBatchSize = 100;
configuration.set(PythonOptions.MAX_ARROW_BATCH_SIZE, expectedArrowBatchSize);
final int actualArrowBatchSize = configuration.get(PythonOptions.MAX_ARROW_BATCH_SIZE);
assertThat(actualArrowBatchSize).isEqualTo(expectedArrowBatchSize);
}
@Test
void testPythonMetricEnabled() {
final Configuration configuration = new Configuration();
final boolean isMetricEnabled = configuration.get(PythonOptions.PYTHON_METRIC_ENABLED);
assertThat(isMetricEnabled).isEqualTo(PythonOptions.PYTHON_METRIC_ENABLED.defaultValue());
final boolean expectedIsMetricEnabled = false;
configuration.set(PythonOptions.PYTHON_METRIC_ENABLED, false);
final boolean actualIsMetricEnabled =
configuration.get(PythonOptions.PYTHON_METRIC_ENABLED);
assertThat(actualIsMetricEnabled).isEqualTo(expectedIsMetricEnabled);
}
@Test
void testPythonProfileEnabled() {
final Configuration configuration = new Configuration();
final boolean isProfileEnabled = configuration.get(PythonOptions.PYTHON_PROFILE_ENABLED);
assertThat(isProfileEnabled).isEqualTo(PythonOptions.PYTHON_PROFILE_ENABLED.defaultValue());
final boolean expectedIsProfileEnabled = true;
configuration.set(PythonOptions.PYTHON_PROFILE_ENABLED, true);
final boolean actualIsProfileEnabled =
configuration.get(PythonOptions.PYTHON_PROFILE_ENABLED);
assertThat(actualIsProfileEnabled).isEqualTo(expectedIsProfileEnabled);
}
@Test
void testPythonFiles() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonFiles =
configuration.getOptional(PythonOptions.PYTHON_FILES);
assertThat(defaultPythonFiles).isEmpty();
final String expectedPythonFiles = "tmp_dir/test1.py,tmp_dir/test2.py";
configuration.set(PythonOptions.PYTHON_FILES, expectedPythonFiles);
final String actualPythonFiles = configuration.get(PythonOptions.PYTHON_FILES);
assertThat(actualPythonFiles).isEqualTo(expectedPythonFiles);
}
@Test
void testPythonRequirements() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonRequirements =
configuration.getOptional(PythonOptions.PYTHON_REQUIREMENTS);
assertThat(defaultPythonRequirements).isEmpty();
final String expectedPythonRequirements = "tmp_dir/requirements.txt#tmp_dir/cache";
configuration.set(PythonOptions.PYTHON_REQUIREMENTS, expectedPythonRequirements);
final String actualPythonRequirements =
configuration.get(PythonOptions.PYTHON_REQUIREMENTS);
assertThat(actualPythonRequirements).isEqualTo(expectedPythonRequirements);
}
@Test
void testPythonArchives() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonArchives =
configuration.getOptional(PythonOptions.PYTHON_ARCHIVES);
assertThat(defaultPythonArchives).isEmpty();
final String expectedPythonArchives = "tmp_dir/py37.zip#venv,tmp_dir/data.zip";
configuration.set(PythonOptions.PYTHON_ARCHIVES, expectedPythonArchives);
final String actualPythonArchives = configuration.get(PythonOptions.PYTHON_ARCHIVES);
assertThat(actualPythonArchives).isEqualTo(expectedPythonArchives);
}
@Test
void testPythonExecutable() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonExecutable =
configuration.getOptional(PythonOptions.PYTHON_EXECUTABLE);
assertThat(defaultPythonExecutable).isEmpty();
final String expectedPythonExecutable = "venv/py37/bin/python";
configuration.set(PythonOptions.PYTHON_EXECUTABLE, expectedPythonExecutable);
final String actualPythonExecutable = configuration.get(PythonOptions.PYTHON_EXECUTABLE);
assertThat(actualPythonExecutable).isEqualTo(expectedPythonExecutable);
}
@Test
void testPythonClientExecutable() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonClientExecutable =
configuration.getOptional(PythonOptions.PYTHON_CLIENT_EXECUTABLE);
assertThat(defaultPythonClientExecutable).isEmpty();
final String expectedPythonClientExecutable = "tmp_dir/test1.py,tmp_dir/test2.py";
configuration.set(PythonOptions.PYTHON_CLIENT_EXECUTABLE, expectedPythonClientExecutable);
final String actualPythonClientExecutable =
configuration.get(PythonOptions.PYTHON_CLIENT_EXECUTABLE);
assertThat(actualPythonClientExecutable).isEqualTo(expectedPythonClientExecutable);
}
@Test
void testPythonSystemEnvEnabled() {
final Configuration configuration = new Configuration();
final boolean isSystemEnvEnabled =
configuration.get(PythonOptions.PYTHON_SYSTEMENV_ENABLED);
assertThat(isSystemEnvEnabled)
.isEqualTo(PythonOptions.PYTHON_SYSTEMENV_ENABLED.defaultValue());
final boolean expectedIsSystemEnvEnabled = false;
configuration.set(PythonOptions.PYTHON_SYSTEMENV_ENABLED, false);
final boolean actualIsSystemEnvEnabled =
configuration.get(PythonOptions.PYTHON_SYSTEMENV_ENABLED);
assertThat(actualIsSystemEnvEnabled).isEqualTo(expectedIsSystemEnvEnabled);
}
@Test
void testPythonPath() {
final Configuration configuration = new Configuration();
final Optional<String> defaultPythonPath =
configuration.getOptional(PythonOptions.PYTHON_PATH);
assertThat(defaultPythonPath).isEmpty();
final String expectedPythonPath = "venv/py37/bin/python";
configuration.set(PythonOptions.PYTHON_PATH, expectedPythonPath);
final String actualPythonPath = configuration.get(PythonOptions.PYTHON_PATH);
assertThat(actualPythonPath).isEqualTo(expectedPythonPath);
}
}
|
PythonOptionsTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security-oauth2-client/src/main/java/org/springframework/boot/security/oauth2/client/autoconfigure/OAuth2ClientProperties.java
|
{
"start": 1249,
"end": 2144
}
|
class ____ implements InitializingBean {
/**
* OAuth provider details.
*/
private final Map<String, Provider> provider = new HashMap<>();
/**
* OAuth client registrations.
*/
private final Map<String, Registration> registration = new HashMap<>();
public Map<String, Provider> getProvider() {
return this.provider;
}
public Map<String, Registration> getRegistration() {
return this.registration;
}
@Override
public void afterPropertiesSet() {
validate();
}
public void validate() {
getRegistration().forEach(this::validateRegistration);
}
private void validateRegistration(String id, Registration registration) {
if (!StringUtils.hasText(registration.getClientId())) {
throw new IllegalStateException("Client id of registration '%s' must not be empty.".formatted(id));
}
}
/**
* A single client registration.
*/
public static
|
OAuth2ClientProperties
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java
|
{
"start": 19745,
"end": 21114
}
|
class ____ extends AllocationService {
private volatile long nanoTimeOverride = -1L;
public final GatewayAllocator gatewayAllocator;
public final ShardsAllocator shardsAllocator;
public MockAllocationService(
AllocationDeciders allocationDeciders,
GatewayAllocator gatewayAllocator,
ShardsAllocator shardsAllocator,
ClusterInfoService clusterInfoService,
SnapshotsInfoService snapshotsInfoService
) {
super(
allocationDeciders,
gatewayAllocator,
shardsAllocator,
clusterInfoService,
snapshotsInfoService,
TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY
);
this.gatewayAllocator = gatewayAllocator;
this.shardsAllocator = shardsAllocator;
}
public void setNanoTimeOverride(long nanoTime) {
this.nanoTimeOverride = nanoTime;
}
@Override
protected long currentNanoTime() {
return nanoTimeOverride == -1L ? super.currentNanoTime() : nanoTimeOverride;
}
}
/**
* Mocks behavior in ReplicaShardAllocator to remove delayed shards from list of unassigned shards so they don't get reassigned yet.
*/
protected static
|
MockAllocationService
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdateTests.java
|
{
"start": 1048,
"end": 6889
}
|
class ____ extends AbstractNlpConfigUpdateTestCase<TextSimilarityConfigUpdate> {
public static TextSimilarityConfigUpdate randomUpdate() {
return new TextSimilarityConfigUpdate(
randomAlphaOfLength(10),
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : new BertTokenizationUpdate(randomFrom(Tokenization.Truncate.values()), null),
randomBoolean()
? null
: randomFrom(
Arrays.stream(TextSimilarityConfig.SpanScoreFunction.values())
.map(TextSimilarityConfig.SpanScoreFunction::toString)
.toArray(String[]::new)
)
);
}
public static TextSimilarityConfigUpdate mutateForVersion(TextSimilarityConfigUpdate instance, TransportVersion version) {
if (version.before(TransportVersions.V_8_1_0)) {
return new TextSimilarityConfigUpdate(instance.getText(), instance.getResultsField(), null, null);
}
return instance;
}
@Override
protected TextSimilarityConfigUpdate doParseInstance(XContentParser parser) throws IOException {
return TextSimilarityConfigUpdate.fromXContentStrict(parser);
}
@Override
protected Writeable.Reader<TextSimilarityConfigUpdate> instanceReader() {
return TextSimilarityConfigUpdate::new;
}
@Override
protected TextSimilarityConfigUpdate createTestInstance() {
return createRandom();
}
@Override
protected TextSimilarityConfigUpdate mutateInstance(TextSimilarityConfigUpdate instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected TextSimilarityConfigUpdate mutateInstanceForVersion(TextSimilarityConfigUpdate instance, TransportVersion version) {
return mutateForVersion(instance, version);
}
@Override
Tuple<Map<String, Object>, TextSimilarityConfigUpdate> fromMapTestInstances(TokenizationUpdate expectedTokenization) {
String func = randomFrom(
Arrays.stream(TextSimilarityConfig.SpanScoreFunction.values())
.map(TextSimilarityConfig.SpanScoreFunction::toString)
.toArray(String[]::new)
);
TextSimilarityConfigUpdate expected = new TextSimilarityConfigUpdate(
"What is the meaning of life?",
"ml-results",
expectedTokenization,
func
);
Map<String, Object> config = new HashMap<>() {
{
put(TEXT.getPreferredName(), "What is the meaning of life?");
put(TextSimilarityConfig.RESULTS_FIELD.getPreferredName(), "ml-results");
put(TextSimilarityConfig.SPAN_SCORE_COMBINATION_FUNCTION.getPreferredName(), func);
}
};
return Tuple.tuple(config, expected);
}
@Override
TextSimilarityConfigUpdate fromMap(Map<String, Object> map) {
return TextSimilarityConfigUpdate.fromMap(map);
}
public void testApply() {
Tokenization tokenizationConfig = randomFrom(
BertTokenizationTests.createRandom(),
MPNetTokenizationTests.createRandom(),
RobertaTokenizationTests.createRandom()
);
TextSimilarityConfig originalConfig = new TextSimilarityConfig(
randomBoolean() ? null : VocabularyConfigTests.createRandom(),
tokenizationConfig,
randomBoolean() ? null : randomAlphaOfLength(8),
randomBoolean()
? null
: randomFrom(
Arrays.stream(TextSimilarityConfig.SpanScoreFunction.values())
.map(TextSimilarityConfig.SpanScoreFunction::toString)
.toArray(String[]::new)
)
);
assertThat(
new TextSimilarityConfig(
"Are you my mother?",
originalConfig.getVocabularyConfig(),
originalConfig.getTokenization(),
originalConfig.getResultsField(),
originalConfig.getSpanScoreFunction()
),
equalTo(originalConfig.apply(new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").build()))
);
assertThat(
new TextSimilarityConfig(
"Are you my mother?",
originalConfig.getVocabularyConfig(),
originalConfig.getTokenization(),
"updated-field",
originalConfig.getSpanScoreFunction()
),
equalTo(
originalConfig.apply(
new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?").setResultsField("updated-field").build()
)
)
);
Tokenization.Truncate truncate = randomFrom(Tokenization.Truncate.values());
Tokenization tokenization = cloneWithNewTruncation(originalConfig.getTokenization(), truncate);
assertThat(
new TextSimilarityConfig(
"Are you my mother?",
originalConfig.getVocabularyConfig(),
tokenization,
originalConfig.getResultsField(),
originalConfig.getSpanScoreFunction()
),
equalTo(
originalConfig.apply(
new TextSimilarityConfigUpdate.Builder().setText("Are you my mother?")
.setTokenizationUpdate(createTokenizationUpdate(originalConfig.getTokenization(), truncate, null))
.build()
)
)
);
}
public static TextSimilarityConfigUpdate createRandom() {
return randomUpdate();
}
}
|
TextSimilarityConfigUpdateTests
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4633DualCompilerExecutionsWeaveModeTest.java
|
{
"start": 1902,
"end": 2678
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Submodule2 depends on compiler output from submodule1, but dependency is in generate-resources phase in
* submodule2. This effectively tests the module-locking of the project artifact.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4633");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.addCliArgument("-T");
verifier.addCliArgument("2W");
verifier.addCliArgument("install");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
|
MavenITmng4633DualCompilerExecutionsWeaveModeTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2245/TestMapper.java
|
{
"start": 593,
"end": 788
}
|
class ____ {
private final String id;
public Inner(String id) {
this.id = id;
}
public String getId() {
return id;
}
}
|
Inner
|
java
|
spring-projects__spring-boot
|
buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/docker/ssl/PemFileWriter.java
|
{
"start": 1031,
"end": 10662
}
|
class ____ {
private static final String EXAMPLE_SECRET_QUALIFIER = "example";
public static final String CA_CERTIFICATE = """
-----BEGIN TRUSTED CERTIFICATE-----
MIIClzCCAgACCQCPbjkRoMVEQDANBgkqhkiG9w0BAQUFADCBjzELMAkGA1UEBhMC
VVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28x
DTALBgNVBAoMBFRlc3QxDTALBgNVBAsMBFRlc3QxFDASBgNVBAMMC2V4YW1wbGUu
Y29tMR8wHQYJKoZIhvcNAQkBFhB0ZXN0QGV4YW1wbGUuY29tMB4XDTIwMDMyNzIx
NTgwNFoXDTIxMDMyNzIxNTgwNFowgY8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApD
YWxpZm9ybmlhMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQ0wCwYDVQQKDARUZXN0
MQ0wCwYDVQQLDARUZXN0MRQwEgYDVQQDDAtleGFtcGxlLmNvbTEfMB0GCSqGSIb3
DQEJARYQdGVzdEBleGFtcGxlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkC
gYEA1YzixWEoyzrd20C2R1gjyPCoPfFLlG6UYTyT0tueNy6yjv6qbJ8lcZg7616O
3I9LuOHhZh9U+fCDCgPfiDdyJfDEW/P+dsOMFyMUXPrJPze2yPpOnvV8iJ5DM93u
fEVhCCyzLdYu0P2P3hU2W+T3/Im9DA7FOPA2vF1SrIJ2qtUCAwEAATANBgkqhkiG
9w0BAQUFAAOBgQBdShkwUv78vkn1jAdtfbB+7mpV9tufVdo29j7pmotTCz3ny5fc
zLEfeu6JPugAR71JYbc2CqGrMneSk1zT91EH6ohIz8OR5VNvzB7N7q65Ci7OFMPl
ly6k3rHpMCBtHoyNFhNVfPLxGJ9VlWFKLgIAbCmL4OIQm1l6Fr1MSM38Zw==
-----END TRUSTED CERTIFICATE-----
""";
public static final String CERTIFICATE = """
-----BEGIN CERTIFICATE-----
MIICjzCCAfgCAQEwDQYJKoZIhvcNAQEFBQAwgY8xCzAJBgNVBAYTAlVTMRMwEQYD
VQQIDApDYWxpZm9ybmlhMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQ0wCwYDVQQK
DARUZXN0MQ0wCwYDVQQLDARUZXN0MRQwEgYDVQQDDAtleGFtcGxlLmNvbTEfMB0G
CSqGSIb3DQEJARYQdGVzdEBleGFtcGxlLmNvbTAeFw0yMDAzMjcyMjAxNDZaFw0y
MTAzMjcyMjAxNDZaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5p
YTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwEVGVzdDENMAsGA1UE
CwwEVGVzdDEUMBIGA1UEAwwLZXhhbXBsZS5jb20xHzAdBgkqhkiG9w0BCQEWEHRl
c3RAZXhhbXBsZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAM7kd2cj
F49wm1+OQ7Q5GE96cXueWNPr/Nwei71tf6G4BmE0B+suXHEvnLpHTj9pdX/ZzBIK
8jIZ/x8RnSduK/Ky+zm1QMYUWZtWCAgCW8WzgB69Cn/hQG8KSX3S9bqODuQAvP54
GQJD7+4kVuNBGjFb4DaD4nvMmPtALSZf8ZCZAgMBAAEwDQYJKoZIhvcNAQEFBQAD
gYEAOn6X8+0VVlDjF+TvTgI0KIasA6nDm+KXe7LVtfvqWqQZH4qyd2uiwcDM3Aux
a/OsPdOw0j+NqFDBd3mSMhSVgfvXdK6j9WaxY1VGXyaidLARgvn63wfzgr857sQW
c8eSxbwEQxwlMvVxW6Os4VhCfUQr8VrBrvPa2zs+6IlK+Ug=
-----END CERTIFICATE-----
""";
public static final String PRIVATE_RSA_KEY = """
%s-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQDO5HdnIxePcJtfjkO0ORhPenF7nljT6/zcHou9bX+huAZhNAfr
LlxxL5y6R04/aXV/2cwSCvIyGf8fEZ0nbivysvs5tUDGFFmbVggIAlvFs4AevQp/
4UBvCkl90vW6jg7kALz+eBkCQ+/uJFbjQRoxW+A2g+J7zJj7QC0mX/GQmQIDAQAB
AoGAIWPsBWA7gDHrUYuzT5XbX5BiWlIfAezXPWtMoEDY1W/Oz8dG8+TilH3brJCv
hzps9TpgXhUYK4/Yhdog4+k6/EEY80RvcObOnflazTCVS041B0Ipm27uZjIq2+1F
ZfbWP+B3crpzh8wvIYA+6BCcZV9zi8Od32NEs39CtrOrFPUCQQDxnt9+JlWjtteR
VttRSKjtzKIF08BzNuZlRP9HNWveLhphIvdwBfjASwqgtuslqziEnGG8kniWzyYB
a/ZZVoT3AkEA2zSBMpvGPDkGbOMqbnR8UL3uijkOj+blQe1gsyu3dUa9T42O1u9h
Iz5SdCYlSFHbDNRFrwuW2QnhippqIQqC7wJAbVeyWEpM0yu5XiJqWdyB5iuG3xA2
tW0Q0p9ozvbT+9XtRiwmweFR8uOCybw9qexURV7ntAis3cKctmP/Neq7fQJBAKGa
59UjutYTRIVqRJICFtR/8ii9P9sfYs1j7/KnvC0d5duMhU44VOjivW8b4Eic8F1Y
8bbHWILSIhFJHg0V7skCQDa8/YkRWF/3pwIZNWQr4ce4OzvYsFMkRvGRdX8B2a0p
wSKcVTdEdO2DhBlYddN0zG0rjq4vDMtdmldEl4BdldQ=
-----END RSA PRIVATE KEY-----
""".formatted(EXAMPLE_SECRET_QUALIFIER);
public static final String PRIVATE_EC_KEY = EXAMPLE_SECRET_QUALIFIER + "-----BEGIN EC PRIVATE KEY-----\n"
+ "MIGkAgEBBDB21WGGOb1DokKW0MUHO7RQ6jZSUYXfO2iyfCbjmSJhyK8fSuq1V0N2\n"
+ "Bj7X+XYhS6ygBwYFK4EEACKhZANiAATsRaYri/tDMvrrB2NJlxWFOZ4YBLYdSM+a\n"
+ "FlGh1FuLjOHW9cx8w0iRHd1Hxn4sxqsa62KzGoCj63lGoaJgi67YNCF0lBa/zCLy\n"
+ "ktaMsQePDOR8UR0Cfi2J9bh+IjxXd+o=\n" + "-----END EC PRIVATE KEY-----";
public static final String PRIVATE_EC_KEY_PRIME_256_V1 = EXAMPLE_SECRET_QUALIFIER
+ "-----BEGIN EC PRIVATE KEY-----\n" + "MHcCAQEEIIwZkO8Zjbggzi8wwrk5rzSPzUX31gqTRhBYw4AL6w44oAoGCCqGSM49\n"
+ "AwEHoUQDQgAE8y28khug747bA68M90IAMCPHAYyen+RsN6i84LORpNDUhv00QZWd\n"
+ "hOhjWFCQjnewR98Y8pEb1fnORll4LhHPlQ==\n" + "-----END EC PRIVATE KEY-----";
public static final String PRIVATE_DSA_KEY = EXAMPLE_SECRET_QUALIFIER + "-----BEGIN PRIVATE KEY-----\n"
+ "MIICXAIBADCCAjUGByqGSM44BAEwggIoAoIBAQCPeTXZuarpv6vtiHrPSVG28y7F\n"
+ "njuvNxjo6sSWHz79NgbnQ1GpxBgzObgJ58KuHFObp0dbhdARrbi0eYd1SYRpXKwO\n"
+ "jxSzNggooi/6JxEKPWKpk0U0CaD+aWxGWPhL3SCBnDcJoBBXsZWtzQAjPbpUhLYp\n"
+ "H51kjviDRIZ3l5zsBLQ0pqwudemYXeI9sCkvwRGMn/qdgYHnM423krcw17njSVkv\n"
+ "aAmYchU5Feo9a4tGU8YzRY+AOzKkwuDycpAlbk4/ijsIOKHEUOThjBopo33fXqFD\n"
+ "3ktm/wSQPtXPFiPhWNSHxgjpfyEc2B3KI8tuOAdl+CLjQr5ITAV2OTlgHNZnAh0A\n"
+ "uvaWpoV499/e5/pnyXfHhe8ysjO65YDAvNVpXQKCAQAWplxYIEhQcE51AqOXVwQN\n"
+ "NNo6NHjBVNTkpcAtJC7gT5bmHkvQkEq9rI837rHgnzGC0jyQQ8tkL4gAQWDt+coJ\n"
+ "syB2p5wypifyRz6Rh5uixOdEvSCBVEy1W4AsNo0fqD7UielOD6BojjJCilx4xHjG\n"
+ "jQUntxyaOrsLC+EsRGiWOefTznTbEBplqiuH9kxoJts+xy9LVZmDS7TtsC98kOmk\n"
+ "ltOlXVNb6/xF1PYZ9j897buHOSXC8iTgdzEpbaiH7B5HSPh++1/et1SEMWsiMt7l\n"
+ "U92vAhErDR8C2jCXMiT+J67ai51LKSLZuovjntnhA6Y8UoELxoi34u1DFuHvF9ve\n"
+ "BB4CHHBQgJ3ST6U8rIxoTqGe42TiVckPf1PoSiJy8GY=\n" + "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_EC_NIST_P256_KEY = EXAMPLE_SECRET_QUALIFIER
+ "-----BEGIN PRIVATE KEY-----\n" + "MIGTAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBHkwdwIBAQQgd6SePFfpaTKFd1Gm\n"
+ "+WeHZNkORkot5hx6X9elPdICL9ygCgYIKoZIzj0DAQehRANCAASnMAMgeFBv9ks0\n"
+ "d0jP+utQ3mohwmxY93xljfaBofdg1IeHgDd4I4pBzPxEnvXrU3kcz+SgPZyH1ybl\n" + "P6mSXDXu\n"
+ "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_EC_NIST_P384_KEY = EXAMPLE_SECRET_QUALIFIER
+ "-----BEGIN PRIVATE KEY-----\n" + "MIG/AgEAMBAGByqGSM49AgEGBSuBBAAiBIGnMIGkAgEBBDCexXiWKrtrqV1+d1Tv\n"
+ "t1n5huuw2A+204mQHRuPL9UC8l0XniJjx/PVELCciyJM/7+gBwYFK4EEACKhZANi\n"
+ "AASHEELZSdrHiSXqU1B+/jrOCr6yjxCMqQsetTb0q5WZdCXOhggGXfbzlRynqphQ\n"
+ "i4G7azBUklgLaXfxN5eFk6C+E38SYOR7iippcQsSR2ZsCiTk7rnur4b40gQ7IgLA\n" + "/sU=\n"
+ "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_EC_PRIME256V1_KEY = EXAMPLE_SECRET_QUALIFIER
+ "-----BEGIN PRIVATE KEY-----\n" + "MIGTAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBHkwdwIBAQQg4dVuddgQ6enDvPPw\n"
+ "Dd1mmS6FMm/kzTJjDVsltrNmRuSgCgYIKoZIzj0DAQehRANCAAR1WMrRADEaVj9m\n"
+ "uoUfPhUefJK+lS89NHikQ0ZdkHkybyVKLFMLe1hCynhzpKQmnpgud3E10F0P2PZQ\n" + "L9RCEpGf\n"
+ "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_EC_SECP256R1_KEY = EXAMPLE_SECRET_QUALIFIER
+ "-----BEGIN PRIVATE KEY-----\n" + "MIGTAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBHkwdwIBAQQgU9+v5hUNnTKix8fe\n"
+ "Pfz+NfXFlGxQZMReSCT2Id9PfKagCgYIKoZIzj0DAQehRANCAATeJg+YS4BrJ35A\n"
+ "KgRlZ59yKLDpmENCMoaYUuWbQ9hqHzdybQGzQsrNJqgH0nzWghPwP4nFaLPN+pgB\n" + "bqiRgbjG\n"
+ "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_RSA_KEY = EXAMPLE_SECRET_QUALIFIER + "-----BEGIN PRIVATE KEY-----\n"
+ "MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDR0KfxUw7MF/8R\n"
+ "B5/YXOM7yLnoHYb/M/6dyoulMbtEdKKhQhU28o5FiDkHcEG9PJQLgqrRgAjl3VmC\n"
+ "C9omtfZJQ2EpfkTttkJjnKOOroXhYE51/CYSckapBYCVh8GkjUEJuEfnp07cTfYZ\n"
+ "FqViIgIWPZyjkzl3w4girS7kCuzNdDntVJVx5F/EsFwMA8n3C0QazHQoM5s00Fer\n"
+ "6aTwd6AW0JD5QkADavpfzZ554e4HrVGwHlM28WKQQkFzzGu44FFXyVuEF3HeyVPu\n"
+ "g8GRHAc8UU7ijVgJB5TmbvRGYowIErD5i4VvGLuOv9mgR3aVyN0SdJ1N7aJnXpeS\n"
+ "QjAgf03jAgMBAAECggEBAIhQyzwj3WJGWOZkkLqOpufJotcmj/Wwf0VfOdkq9WMl\n"
+ "cB/bAlN/xWVxerPVgDCFch4EWBzi1WUaqbOvJZ2u7QNubmr56aiTmJCFTVI/GyZx\n"
+ "XqiTGN01N6lKtN7xo6LYTyAUhUsBTWAemrx0FSErvTVb9C/mUBj6hbEZ2XQ5kN5t\n"
+ "7qYX4Lu0zyn7s1kX5SLtm5I+YRq7HSwB6wLy+DSroO71izZ/VPwME3SwT5SN+c87\n"
+ "3dkklR7fumNd9dOpSWKrLPnq4aMko00rvIGc63xD1HrEpXUkB5v24YEn7HwCLEH7\n"
+ "b8jrp79j2nCvvR47inpf+BR8FIWAHEOUUqCEzjQkdiECgYEA6ifjMM0f02KPeIs7\n"
+ "zXd1lI7CUmJmzkcklCIpEbKWf/t/PHv3QgqIkJzERzRaJ8b+GhQ4zrSwAhrGUmI8\n"
+ "kDkXIqe2/2ONgIOX2UOHYHyTDQZHnlXyDecvHUTqs2JQZCGBZkXyZ9i0j3BnTymC\n"
+ "iZ8DvEa0nxsbP+U3rgzPQmXiQVMCgYEA5WN2Y/RndbriNsNrsHYRldbPO5nfV9rp\n"
+ "cDzcQU66HRdK5VIdbXT9tlMYCJIZsSqE0tkOwTgEB/sFvF/tIHSCY5iO6hpIyk6g\n"
+ "kkUzPcld4eM0dEPAge7SYUbakB9CMvA7MkDQSXQNFyZ0mH83+UikwT6uYHFh7+ox\n"
+ "N1P+psDhXzECgYEA1gXLVQnIcy/9LxMkgDMWV8j8uMyUZysDthpbK3/uq+A2dhRg\n"
+ "9g4msPd5OBQT65OpIjElk1n4HpRWfWqpLLHiAZ0GWPynk7W0D7P3gyuaRSdeQs0P\n"
+ "x8FtgPVDCN9t13gAjHiWjnC26Py2kNbCKAQeJ/MAmQTvrUFX2VCACJKTcV0CgYAj\n"
+ "xJWSUmrLfb+GQISLOG3Xim434e9keJsLyEGj4U29+YLRLTOvfJ2PD3fg5j8hU/rw\n"
+ "Ea5uTHi8cdTcIa0M8X3fX8txD3YoLYh2JlouGTcNYOst8d6TpBSj3HN6I5Wj8beZ\n"
+ "R2fy/CiKYpGtsbCdq0kdZNO18BgQW9kewncjs1GxEQKBgQCf8q34h6KuHpHSDh9h\n"
+ "YkDTypk0FReWBAVJCzDNDUMhVLFivjcwtaMd2LiC3FMKZYodr52iKg60cj43vbYI\n"
+ "frmFFxoL37rTmUocCTBKc0LhWj6MicI+rcvQYe1uwTrpWdFf1aZJMYRLRczeKtev\n" + "OWaE/9hVZ5+9pild1NukGpOydw==\n"
+ "-----END PRIVATE KEY-----\n";
public static final String PKCS8_PRIVATE_EC_ED25519_KEY = EXAMPLE_SECRET_QUALIFIER + "-----BEGIN PRIVATE KEY-----\n"
+ "MC4CAQAwBQYDK2VwBCIEIJOKNTaIJQTVuEqZ+yvclnjnlWJG6F+K+VsNCOlWRda+\n" + "-----END PRIVATE KEY-----";
private final Path tempDir;
public PemFileWriter() throws IOException {
this.tempDir = Files.createTempDirectory("buildpack-platform-docker-ssl-tests");
}
Path writeFile(String name, String... contents) throws IOException {
Path path = Paths.get(this.tempDir.toString(), name);
for (String content : contents) {
Files.write(path, content.replaceAll(EXAMPLE_SECRET_QUALIFIER, "").getBytes(), StandardOpenOption.CREATE,
StandardOpenOption.APPEND);
}
return path;
}
public Path getTempDir() {
return this.tempDir;
}
void cleanup() throws IOException {
FileSystemUtils.deleteRecursively(this.tempDir);
}
}
|
PemFileWriter
|
java
|
quarkusio__quarkus
|
extensions/grpc/codegen/src/main/java/io/quarkus/grpc/codegen/GrpcCodeGen.java
|
{
"start": 1620,
"end": 25955
}
|
class ____ implements CodeGenProvider {
private static final Logger log = Logger.getLogger(GrpcCodeGen.class);
private static final String quarkusProtocPluginMain = "io.quarkus.grpc.protoc.plugin.MutinyGrpcGenerator";
private static final String EXE = "exe";
private static final String PROTO = ".proto";
private static final String PROTOC = "protoc";
private static final String PROTOC_GROUPID = "com.google.protobuf";
private static final String SCAN_DEPENDENCIES_FOR_PROTO = "quarkus.generate-code.grpc.scan-for-proto";
private static final String SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN = "quarkus.generate-code.grpc.scan-for-proto-include.\"%s\"";
private static final String SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN = "quarkus.generate-code.grpc.scan-for-proto-exclude.\"%s\"";
private static final String SCAN_FOR_IMPORTS = "quarkus.generate-code.grpc.scan-for-imports";
private static final String POST_PROCESS_SKIP = "quarkus.generate.code.grpc-post-processing.skip";
private static final String GENERATE_DESCRIPTOR_SET = "quarkus.generate-code.grpc.descriptor-set.generate";
private static final String DESCRIPTOR_SET_OUTPUT_DIR = "quarkus.generate-code.grpc.descriptor-set.output-dir";
private static final String DESCRIPTOR_SET_FILENAME = "quarkus.generate-code.grpc.descriptor-set.name";
private static final String USE_ARG_FILE = "quarkus.generate-code.grpc.use-arg-file";
private static final String GENERATE_KOTLIN = "quarkus.generate-code.grpc.kotlin.generate";
private Executables executables;
private String input;
private boolean hasQuarkusKotlinDependency;
@Override
public String providerId() {
return "grpc";
}
@Override
public String[] inputExtensions() {
return new String[] { "proto" };
}
@Override
public String inputDirectory() {
return "proto";
}
@Override
public Path getInputDirectory() {
if (input != null) {
return Path.of(input);
}
return null;
}
@Override
public void init(ApplicationModel model, Map<String, String> properties) {
this.input = properties.get("quarkus.grpc.codegen.proto-directory");
this.hasQuarkusKotlinDependency = containsQuarkusKotlin(model.getDependencies());
}
@Override
public boolean trigger(CodeGenContext context) throws CodeGenException {
if (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty("grpc.codegen.skip", "false"))
|| context.config().getOptionalValue("quarkus.grpc.codegen.skip", Boolean.class).orElse(false)) {
log.info("Skipping gRPC code generation on user's request");
return false;
}
Path outDir = context.outDir();
Path workDir = context.workDir();
Path inputDir = CodeGenProvider.resolve(context.inputDir());
Set<String> protoDirs = new LinkedHashSet<>();
boolean useArgFile = context.config().getOptionalValue(USE_ARG_FILE, Boolean.class).orElse(false);
try {
List<String> protoFiles = new ArrayList<>();
if (Files.isDirectory(inputDir)) {
try (Stream<Path> protoFilesPaths = Files.walk(inputDir)) {
protoFilesPaths
.filter(Files::isRegularFile)
.filter(s -> s.toString().endsWith(PROTO))
.map(Path::normalize)
.map(Path::toAbsolutePath)
.map(Path::toString)
.forEach(protoFiles::add);
protoDirs.add(inputDir.normalize().toAbsolutePath().toString());
}
}
Path dirWithProtosFromDependencies = workDir.resolve("protoc-protos-from-dependencies");
Collection<Path> protoFilesFromDependencies = gatherProtosFromDependencies(dirWithProtosFromDependencies, protoDirs,
context);
if (!protoFilesFromDependencies.isEmpty()) {
for (Path files : protoFilesFromDependencies) {
var pathToProtoFile = files.normalize().toAbsolutePath();
var pathToParentDir = files.getParent();
// Add the proto file to the list of proto to compile, but also add the directory containing the
// proto file to the list of directories to include (it's a set, so no duplicate).
protoFiles.add(pathToProtoFile.toString());
protoDirs.add(pathToParentDir.toString());
}
}
if (!protoFiles.isEmpty()) {
initExecutables(workDir, context.applicationModel());
Collection<String> protosToImport = gatherDirectoriesWithImports(workDir.resolve("protoc-dependencies"),
context);
List<String> command = new ArrayList<>();
command.add(executables.protoc.toString());
for (String protoDir : protoDirs) {
command.add(String.format("-I=%s", escapeWhitespace(protoDir)));
}
for (String protoImportDir : protosToImport) {
command.add(String.format("-I=%s", escapeWhitespace(protoImportDir)));
}
command.addAll(asList("--plugin=protoc-gen-grpc=" + executables.grpc,
"--plugin=protoc-gen-q-grpc=" + executables.quarkusGrpc,
"--q-grpc_out=" + outDir,
"--grpc_out=" + outDir,
"--java_out=" + outDir));
if (shouldGenerateKotlin(context.config())) {
command.add("--kotlin_out=" + outDir);
}
if (shouldGenerateDescriptorSet(context.config())) {
command.add(String.format("--descriptor_set_out=%s", getDescriptorSetOutputFile(context)));
}
command.addAll(protoFiles);
// Estimate command length to avoid command line too long error
int commandLength = command.stream().mapToInt(String::length).sum();
// 8191 is the maximum command line length for Windows
if (useArgFile || (commandLength > 8190 && OS.current() == OS.WINDOWS)) {
File argFile = File.createTempFile("grpc-protoc-params", ".txt");
argFile.deleteOnExit();
try (PrintWriter writer = new PrintWriter(argFile, StandardCharsets.UTF_8)) {
for (int i = 1; i < command.size(); i++) {
writer.println(command.get(i));
}
}
command = new ArrayList<>(List.of(command.get(0), "@" + argFile.getAbsolutePath()));
}
log.debugf("Executing command: %s", String.join(" ", command));
try {
ProcessBuilder.exec(command.get(0), command.subList(1, command.size()));
} catch (Exception e) {
throw new CodeGenException("Failed to generate Java classes from proto files: %s to %s with command %s"
.formatted(protoFiles, outDir.toAbsolutePath(), String.join(" ", command)), e);
}
postprocessing(context, outDir);
log.info("Successfully finished generating and post-processing sources from proto files");
return true;
}
} catch (IOException e) {
throw new CodeGenException(
"Failed to generate java files from proto file in " + inputDir.toAbsolutePath(), e);
}
return false;
}
private static void copySanitizedProtoFile(ResolvedDependency artifact, Path protoPath, Path outProtoPath)
throws IOException {
boolean genericServicesFound = false;
try (var reader = Files.newBufferedReader(protoPath);
var writer = Files.newBufferedWriter(outProtoPath)) {
String line = reader.readLine();
while (line != null) {
// filter java_generic_services to avoid "Tried to write the same file twice"
// when set to true. Generic services are deprecated and replaced by classes generated by
// this plugin
if (!line.contains("java_generic_services")) {
writer.write(line);
writer.newLine();
} else {
genericServicesFound = true;
}
line = reader.readLine();
}
}
if (genericServicesFound) {
log.infof("Ignoring option java_generic_services in %s:%s%s.", artifact.getGroupId(), artifact.getArtifactId(),
protoPath);
}
}
private void postprocessing(CodeGenContext context, Path outDir) {
if (TRUE.toString().equalsIgnoreCase(System.getProperties().getProperty(POST_PROCESS_SKIP, "false"))
|| context.config().getOptionalValue(POST_PROCESS_SKIP, Boolean.class).orElse(false)) {
log.info("Skipping gRPC Post-Processing on user's request");
return;
}
new GrpcPostProcessing(context, outDir).postprocess();
}
private Collection<Path> gatherProtosFromDependencies(Path workDir, Set<String> protoDirectories,
CodeGenContext context) throws CodeGenException {
if (context.test()) {
return Collections.emptyList();
}
Config properties = context.config();
String scanDependencies = properties.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)
.orElse("none");
if ("none".equalsIgnoreCase(scanDependencies)) {
return Collections.emptyList();
}
boolean scanAll = "all".equalsIgnoreCase(scanDependencies);
List<String> dependenciesToScan = Arrays.stream(scanDependencies.split(",")).map(String::trim)
.collect(Collectors.toList());
ApplicationModel appModel = context.applicationModel();
List<Path> protoFilesFromDependencies = new ArrayList<>();
for (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {
String packageId = String.format("%s:%s", artifact.getGroupId(), artifact.getArtifactId());
Collection<String> includes = properties
.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_INCLUDE_PATTERN, packageId), String.class)
.map(s -> Arrays.stream(s.split(",")).map(String::trim).collect(Collectors.toList()))
.orElse(List.of());
Collection<String> excludes = properties
.getOptionalValue(String.format(SCAN_DEPENDENCIES_FOR_PROTO_EXCLUDE_PATTERN, packageId), String.class)
.map(s -> Arrays.stream(s.split(",")).map(String::trim).collect(Collectors.toList()))
.orElse(List.of());
if (scanAll
|| dependenciesToScan.contains(packageId)) {
extractProtosFromArtifact(workDir, protoFilesFromDependencies, protoDirectories, artifact, includes, excludes,
true);
}
}
return protoFilesFromDependencies;
}
@Override
public boolean shouldRun(Path sourceDir, Config config) {
return CodeGenProvider.super.shouldRun(sourceDir, config)
|| isGeneratingFromAppDependenciesEnabled(config);
}
private boolean isGeneratingFromAppDependenciesEnabled(Config config) {
return config.getOptionalValue(SCAN_DEPENDENCIES_FOR_PROTO, String.class)
.filter(value -> !"none".equals(value)).isPresent();
}
private boolean shouldGenerateKotlin(Config config) {
return config.getOptionalValue(GENERATE_KOTLIN, Boolean.class).orElse(
hasQuarkusKotlinDependency);
}
private boolean shouldGenerateDescriptorSet(Config config) {
return config.getOptionalValue(GENERATE_DESCRIPTOR_SET, Boolean.class).orElse(FALSE);
}
private Path getDescriptorSetOutputFile(CodeGenContext context) throws IOException {
var dscOutputDir = context.config().getOptionalValue(DESCRIPTOR_SET_OUTPUT_DIR, String.class)
.map(context.workDir()::resolve)
.orElseGet(context::outDir);
if (Files.notExists(dscOutputDir)) {
Files.createDirectories(dscOutputDir);
}
var dscFilename = context.config().getOptionalValue(DESCRIPTOR_SET_FILENAME, String.class)
.orElse("descriptor_set.dsc");
return dscOutputDir.resolve(dscFilename).normalize();
}
private Collection<String> gatherDirectoriesWithImports(Path workDir, CodeGenContext context) throws CodeGenException {
Config properties = context.config();
String scanForImports = properties.getOptionalValue(SCAN_FOR_IMPORTS, String.class)
.orElse("com.google.protobuf:protobuf-java");
if ("none".equals(scanForImports.toLowerCase(Locale.getDefault()))) {
return Collections.emptyList();
}
boolean scanAll = "all".equals(scanForImports.toLowerCase(Locale.getDefault()));
List<String> dependenciesToScan = Arrays.stream(scanForImports.split(",")).map(String::trim)
.collect(Collectors.toList());
Set<String> importDirectories = new HashSet<>();
ApplicationModel appModel = context.applicationModel();
for (ResolvedDependency artifact : appModel.getRuntimeDependencies()) {
if (scanAll
|| dependenciesToScan.contains(
String.format("%s:%s", artifact.getGroupId(), artifact.getArtifactId()))) {
extractProtosFromArtifact(workDir, new ArrayList<>(), importDirectories, artifact, List.of(),
List.of(), false);
}
}
return importDirectories;
}
private void extractProtosFromArtifact(Path workDir, Collection<Path> protoFiles,
Set<String> protoDirectories, ResolvedDependency artifact, Collection<String> filesToInclude,
Collection<String> filesToExclude, boolean isDependency) throws CodeGenException {
try {
artifact.getContentTree(new PathFilter(filesToInclude, filesToExclude)).walk(
pathVisit -> {
Path path = pathVisit.getPath();
if (Files.isRegularFile(path) && path.getFileName().toString().endsWith(PROTO)) {
Path root = pathVisit.getRoot();
if (Files.isDirectory(root)) {
protoFiles.add(path);
protoDirectories.add(path.getParent().normalize().toAbsolutePath().toString());
} else { // archive
Path relativePath = path.getRoot().relativize(path);
String uniqueName = artifact.getGroupId() + ":" + artifact.getArtifactId();
if (artifact.getVersion() != null) {
uniqueName += ":" + artifact.getVersion();
}
if (artifact.getClassifier() != null) {
uniqueName += "-" + artifact.getClassifier();
}
Path protoUnzipDir = workDir
.resolve(HashUtil.sha1(uniqueName))
.normalize().toAbsolutePath();
try {
Files.createDirectories(protoUnzipDir);
protoDirectories.add(protoUnzipDir.toString());
} catch (IOException e) {
throw new GrpcCodeGenException("Failed to create directory: " + protoUnzipDir, e);
}
Path outPath = protoUnzipDir;
for (Path part : relativePath) {
outPath = outPath.resolve(part.toString());
}
try {
Files.createDirectories(outPath.getParent());
if (isDependency) {
copySanitizedProtoFile(artifact, path, outPath);
} else {
Files.copy(path, outPath, StandardCopyOption.REPLACE_EXISTING);
}
protoFiles.add(outPath);
} catch (IOException e) {
throw new GrpcCodeGenException("Failed to extract proto file" + path + " to target: "
+ outPath, e);
}
}
}
});
} catch (GrpcCodeGenException e) {
throw new CodeGenException(e.getMessage(), e);
}
}
private String escapeWhitespace(String path) {
if (OS.current() == OS.LINUX) {
return path.replace(" ", "\\ ");
} else {
return path;
}
}
private void initExecutables(Path workDir, ApplicationModel model) throws CodeGenException {
if (executables == null) {
Path protocPath;
String protocPathProperty = System.getProperty("quarkus.grpc.protoc-path");
String classifier = System.getProperty("quarkus.grpc.protoc-os-classifier", osClassifier());
Path protocExe;
if (protocPathProperty == null) {
protocPath = findArtifactPath(model, PROTOC_GROUPID, PROTOC, classifier, EXE);
protocExe = makeExecutableFromPath(workDir, PROTOC_GROUPID, PROTOC, classifier, "exe", protocPath);
} else {
log.debugf("Using protoc from %s", protocPathProperty);
protocPath = Paths.get(protocPathProperty);
protocExe = protocPath;
}
Path protocGrpcPluginExe = prepareExecutable(workDir, model,
"io.grpc", "protoc-gen-grpc-java", classifier, "exe");
Path quarkusGrpcPluginExe = prepareQuarkusGrpcExecutable(model, workDir);
executables = new Executables(protocExe, protocGrpcPluginExe, quarkusGrpcPluginExe);
}
}
private Path prepareExecutable(Path buildDir, ApplicationModel model,
String groupId, String artifactId, String classifier, String packaging) throws CodeGenException {
Path artifactPath = findArtifactPath(model, groupId, artifactId, classifier, packaging);
return makeExecutableFromPath(buildDir, groupId, artifactId, classifier, packaging, artifactPath);
}
private Path makeExecutableFromPath(Path buildDir, String groupId, String artifactId, String classifier, String packaging,
Path artifactPath) throws CodeGenException {
Path exe = buildDir.resolve(String.format("%s-%s-%s-%s", groupId, artifactId, classifier, packaging));
if (Files.exists(exe)) {
return exe;
}
if (artifactPath == null) {
String location = String.format("%s:%s:%s:%s", groupId, artifactId, classifier, packaging);
throw new CodeGenException("Failed to find " + location + " among dependencies");
}
try {
Files.copy(artifactPath, exe, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
throw new CodeGenException("Failed to copy file: " + artifactPath + " to " + exe, e);
}
if (!exe.toFile().setExecutable(true)) {
throw new CodeGenException("Failed to make the file executable: " + exe);
}
return exe;
}
private static Path findArtifactPath(ApplicationModel model, String groupId, String artifactId, String classifier,
String packaging) {
Path artifactPath = null;
for (ResolvedDependency artifact : model.getDependencies()) {
if (groupId.equals(artifact.getGroupId())
&& artifactId.equals(artifact.getArtifactId())
&& classifier.equals(artifact.getClassifier())
&& packaging.equals(artifact.getType())) {
artifactPath = artifact.getResolvedPaths().getSinglePath();
}
}
return artifactPath;
}
private String osClassifier() throws CodeGenException {
String architecture = getArchitecture();
return switch (OS.current()) {
case LINUX -> "linux-" + architecture;
case WINDOWS -> "windows-" + architecture;
case MAC -> "osx-" + architecture;
default -> throw new CodeGenException(
"Unsupported OS, please use maven plugin instead to generate Java classes from proto files");
};
}
/**
* {@return the bespoke architecture string, or {@code null} if unknown}
*/
private static String getArchitecture() {
return switch (CPU.host()) {
case x64 -> "x86_64";
case x86 -> "x86_32";
case arm -> "arm_32";
case aarch64 -> "aarch_64";
case mips -> "mips_32";
case mipsel -> "mipsel_32";
case mips64 -> "mips_64";
case mips64el -> "mipsel_64";
case ppc32 -> "ppc_32";
case ppc32le -> "ppcle_32";
case ppc -> "ppc_64";
case ppcle -> "ppcle_64";
default -> null;
};
}
private static Path prepareQuarkusGrpcExecutable(ApplicationModel appModel, Path buildDir) throws CodeGenException {
Path pluginPath = findArtifactPath(appModel, "io.quarkus", "quarkus-grpc-protoc-plugin", "shaded", "jar");
if (pluginPath == null) {
throw new CodeGenException("Failed to find Quarkus gRPC protoc plugin among dependencies");
}
if (OS.current() != OS.WINDOWS) {
return writeScript(buildDir, pluginPath, "#!/bin/sh\n", ".sh");
} else {
return writeScript(buildDir, pluginPath, "@echo off\r\n", ".cmd");
}
}
private static Path writeScript(Path buildDir, Path pluginPath, String shebang, String suffix) throws CodeGenException {
Path script;
try {
script = Files.createTempFile(buildDir, "quarkus-grpc", suffix);
try (BufferedWriter writer = Files.newBufferedWriter(script)) {
writer.write(shebang);
writePluginExeCmd(pluginPath, writer);
}
} catch (IOException e) {
throw new CodeGenException("Failed to create a wrapper script for quarkus-grpc plugin", e);
}
if (!script.toFile().setExecutable(true)) {
throw new CodeGenFailureException("failed to set file: " + script + " executable. Protoc invocation may fail");
}
return script;
}
private static void writePluginExeCmd(Path pluginPath, BufferedWriter writer) throws IOException {
writer.write("\"" + io.smallrye.common.process.ProcessUtil.pathOfJava().toString() + "\" -cp \"" +
pluginPath.toAbsolutePath() + "\" " + quarkusProtocPluginMain);
writer.newLine();
}
private static boolean containsQuarkusKotlin(Collection<ResolvedDependency> dependencies) {
return dependencies.stream().anyMatch(new Predicate<ResolvedDependency>() {
@Override
public boolean test(ResolvedDependency rd) {
return rd.getGroupId().equalsIgnoreCase("io.quarkus")
&& rd.getArtifactId().equalsIgnoreCase("quarkus-kotlin");
}
});
}
private static
|
GrpcCodeGen
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RBloomFilter.java
|
{
"start": 823,
"end": 3271
}
|
interface ____<T> extends RExpirable, RBloomFilterAsync<T> {
/**
* Adds element
*
* @param object - element to add
* @return <code>true</code> if element has been added successfully
* <code>false</code> if element is already present
*/
boolean add(T object);
/**
* Adds elements
*
* @param elements elements to add
* @return number of added elements
*/
long add(Collection<T> elements);
/**
* Checks for element presence
*
* @param object element
* @return <code>true</code> if element is present
* <code>false</code> if element is not present
*/
boolean contains(T object);
/**
* Checks for elements presence
*
* @param elements elements to check presence
* @return number of elements present
*/
long contains(Collection<T> elements);
/**
* Initializes Bloom filter params (size and hashIterations)
* calculated from <code>expectedInsertions</code> and <code>falseProbability</code>
* Stores config to Redis server.
*
* @param expectedInsertions - expected amount of insertions per element
* @param falseProbability - expected false probability
* @return <code>true</code> if Bloom filter initialized
* <code>false</code> if Bloom filter already has been initialized
*/
boolean tryInit(long expectedInsertions, double falseProbability);
/**
* Returns expected amount of insertions per element.
* Calculated during bloom filter initialization.
*
* @return expected amount of insertions per element
*/
long getExpectedInsertions();
/**
* Returns false probability of element presence.
* Calculated during bloom filter initialization.
*
* @return false probability of element presence
*/
double getFalseProbability();
/**
* Returns number of bits in Redis memory required by this instance
*
* @return number of bits
*/
long getSize();
/**
* Returns hash iterations amount used per element.
* Calculated during bloom filter initialization.
*
* @return hash iterations amount
*/
int getHashIterations();
/**
* Calculates probabilistic number of elements already added to Bloom filter.
*
* @return probabilistic number of elements
*/
long count();
}
|
RBloomFilter
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/activemq/ActiveMQOriginalDestinationIT.java
|
{
"start": 2188,
"end": 5051
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected String componentName = "activemq";
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testActiveMQOriginalDestination() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("activemq:queue:ActiveMQOriginalDestinationTest", "Hello World");
MockEndpoint.assertIsSatisfied(context);
// consume from bar
Exchange out = consumer.receive("activemq:queue:ActiveMQOriginalDestinationTest.dest", 5000);
assertNotNull(out);
// and we should have ActiveMQOriginalDestinationTest as the original destination
JmsMessage msg = out.getIn(JmsMessage.class);
Message jms = msg.getJmsMessage();
ActiveMQMessage amq = assertIsInstanceOf(ActiveMQMessage.class, jms);
ActiveMQDestination original = (ActiveMQDestination) amq.getJMSDestination();
assertNotNull(original);
assertEquals("ActiveMQOriginalDestinationTest.dest", original.getName());
assertEquals("QUEUE", original.getType().name());
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected JmsComponent setupComponent(CamelContext camelContext, ArtemisService service, String componentName) {
JmsComponent component = super.setupComponent(camelContext, service, componentName);
component.setMessageCreatedStrategy(new OriginalDestinationPropagateStrategy());
return component;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:ActiveMQOriginalDestinationTest")
.to("activemq:queue:ActiveMQOriginalDestinationTest.dest")
.to("mock:result");
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
/**
* A strategy to enrich JMS message with their original destination if the Camel route originates from a JMS
* destination.
* <p/>
* This implementation uses ActiveMQ specific code which can be moved to activemq-camel when it supports Camel 2.16
*/
private static
|
ActiveMQOriginalDestinationIT
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/FormLoginConfigurerTests.java
|
{
"start": 30778,
"end": 31574
}
|
class ____<T> {
private final AuthorizationManager<T> authorities;
AuthorizationManagerFactory(String... authorities) {
this.authorities = AllAuthoritiesAuthorizationManager.hasAllAuthorities(authorities);
}
public AuthorizationManager<T> authenticated() {
AuthenticatedAuthorizationManager<T> authenticated = AuthenticatedAuthorizationManager.authenticated();
return AuthorizationManagers.allOf(new AuthorizationDecision(false), this.authorities, authenticated);
}
public AuthorizationManager<T> hasAuthority(String authority) {
AuthorityAuthorizationManager<T> authorized = AuthorityAuthorizationManager.hasAuthority(authority);
return AuthorizationManagers.allOf(new AuthorizationDecision(false), this.authorities, authorized);
}
}
}
|
AuthorizationManagerFactory
|
java
|
apache__camel
|
components/camel-caffeine/src/generated/java/org/apache/camel/component/caffeine/load/CaffeineLoadCacheEndpointUriFactory.java
|
{
"start": 523,
"end": 2621
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":cacheName";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(15);
props.add("action");
props.add("cacheLoader");
props.add("cacheName");
props.add("createCacheIfNotExist");
props.add("evictionType");
props.add("expireAfterAccessTime");
props.add("expireAfterWriteTime");
props.add("initialCapacity");
props.add("key");
props.add("lazyStartProducer");
props.add("maximumSize");
props.add("removalListener");
props.add("statsCounter");
props.add("statsEnabled");
props.add("valueType");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "caffeine-loadcache".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "cacheName", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
CaffeineLoadCacheEndpointUriFactory
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/EntryPointAssertions_anyOf_Test.java
|
{
"start": 1262,
"end": 3025
}
|
class ____ extends EntryPointAssertionsBaseTest {
@ParameterizedTest
@MethodSource("anyOfWithArrayFactories")
<T> void should_create_anyOf_condition_from_condition_array(Function<Condition<T>[], Condition<T>> anyOfFactory) {
// GIVEN
Condition<T> condition1 = new TestCondition<>("condition 1");
Condition<T> condition2 = new TestCondition<>("condition 2");
// WHEN
Condition<T> anyOfCondition = anyOfFactory.apply(array(condition1, condition2));
// THEN
then(anyOfCondition).isInstanceOf(AnyOf.class)
.extracting("conditions", as(ITERABLE))
.containsExactly(condition1, condition2);
}
@SuppressWarnings("unchecked")
private static <T> Stream<Function<Condition<T>[], Condition<T>>> anyOfWithArrayFactories() {
return Stream.of(Assertions::anyOf, BDDAssertions::anyOf, withAssertions::anyOf);
}
@ParameterizedTest
@MethodSource("anyOfWithCollectionFactories")
<T> void should_create_anyOf_condition_from_condition_collection(Function<Collection<Condition<T>>, Condition<T>> anyOfFactory) {
// GIVEN
Condition<T> condition1 = new TestCondition<>("condition 1");
Condition<T> condition2 = new TestCondition<>("condition 2");
// WHEN
Condition<T> anyOfCondition = anyOfFactory.apply(list(condition1, condition2));
// THEN
then(anyOfCondition).isInstanceOf(AnyOf.class)
.extracting("conditions", as(ITERABLE))
.containsExactly(condition1, condition2);
}
private static <T> Stream<Function<Collection<Condition<T>>, Condition<T>>> anyOfWithCollectionFactories() {
return Stream.of(Assertions::anyOf, BDDAssertions::anyOf, withAssertions::anyOf);
}
}
|
EntryPointAssertions_anyOf_Test
|
java
|
micronaut-projects__micronaut-core
|
benchmarks/src/jmh/java/io/micronaut/http/server/stack/NettyUtil.java
|
{
"start": 218,
"end": 1014
}
|
class ____ {
static ByteBuf readAllOutboundContiguous(EmbeddedChannel clientChannel) {
ByteBuf requestBytes = PooledByteBufAllocator.DEFAULT.buffer();
while (true) {
ByteBuf part = clientChannel.readOutbound();
if (part == null) {
break;
}
requestBytes.writeBytes(part);
}
return requestBytes;
}
static ByteBuf readAllOutboundComposite(EmbeddedChannel channel) {
CompositeByteBuf response = PooledByteBufAllocator.DEFAULT.compositeBuffer();
while (true) {
ByteBuf part = channel.readOutbound();
if (part == null) {
break;
}
response.addComponent(true, part);
}
return response;
}
}
|
NettyUtil
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerFormatStringTest.java
|
{
"start": 4336,
"end": 4596
}
|
class ____ {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
public void f() {
logger.atSevere().log("hello %s");
}
}
""")
.doTest();
}
}
|
Test
|
java
|
bumptech__glide
|
instrumentation/src/androidTest/java/com/bumptech/glide/load/resource/bitmap/DownsamplerEmulatorTest.java
|
{
"start": 28402,
"end": 28920
}
|
class ____ when
// running on SDK levels < 34. Also, do not extract methods with Gainmap in the method
// signature for the same reason.
bitmap.setGainmap(
new android.graphics.Gainmap(Bitmap.createBitmap(width / 2, height / 2, Config.ALPHA_8)));
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
bitmap.compress(format, 100 /*quality*/, os);
bitmap.recycle();
byte[] data = os.toByteArray();
return new ByteArrayInputStream(data);
}
static final
|
resolution
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/JdbcTimestampJavaType.java
|
{
"start": 1410,
"end": 8247
}
|
class ____ extends AbstractTemporalJavaType<Date> implements VersionJavaType<Date> {
public static final JdbcTimestampJavaType INSTANCE = new JdbcTimestampJavaType();
public static final String TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss.SSSSSSSSS";
/**
* Intended for use in reading HQL literals and writing SQL literals
*
* @see #TIMESTAMP_FORMAT
*/
public static final DateTimeFormatter LITERAL_FORMATTER = DateTimeFormatter.ofPattern( TIMESTAMP_FORMAT )
.withZone( ZoneId.from( ZoneOffset.UTC ) );
private static final DateTimeFormatter ENCODED_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE_TIME
.withZone( ZoneId.from( ZoneOffset.UTC ) );
public JdbcTimestampJavaType() {
super( Date.class, TimestampMutabilityPlan.INSTANCE );
}
@Override
public Class<Date> getJavaType() {
// wrong, but needed for backward compatibility
//noinspection unchecked, rawtypes
return (Class) java.sql.Timestamp.class;
}
@Override
public TemporalType getPrecision() {
return TemporalType.TIMESTAMP;
}
@Override
public boolean isInstance(Object value) {
// this check holds true for java.sql.Timestamp as well
return value instanceof Date;
}
@Override
public boolean areEqual(Date one, Date another) {
if ( one == another ) {
return true;
}
if ( one == null || another == null) {
return false;
}
long t1 = one.getTime();
long t2 = another.getTime();
boolean oneIsTimestamp = one instanceof Timestamp;
boolean anotherIsTimestamp = another instanceof Timestamp;
int n1 = oneIsTimestamp ? ( (Timestamp) one ).getNanos() : 0;
int n2 = anotherIsTimestamp ? ( (Timestamp) another ).getNanos() : 0;
if ( t1 != t2 ) {
return false;
}
if ( oneIsTimestamp && anotherIsTimestamp ) {
// both are Timestamps
int nn1 = n1 % 1000000;
int nn2 = n2 % 1000000;
return nn1 == nn2;
}
else {
// at least one is a plain old Date
return true;
}
}
@Override
public int extractHashCode(Date value) {
return Long.hashCode( value.getTime() / 1000 );
}
@Override
public Date coerce(Object value, CoercionContext coercionContext) {
return wrap( value, null );
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Object unwrap(Date value, Class type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( Timestamp.class.isAssignableFrom( type ) ) {
return value instanceof Timestamp
? (Timestamp) value
: new Timestamp( value.getTime() );
}
if ( Date.class.isAssignableFrom( type ) ) {
return value;
}
if ( LocalDateTime.class.isAssignableFrom( type ) ) {
final Instant instant = value.toInstant();
return LocalDateTime.ofInstant( instant, ZoneId.systemDefault() );
}
if ( Calendar.class.isAssignableFrom( type ) ) {
final var gregorianCalendar = new GregorianCalendar();
gregorianCalendar.setTimeInMillis( value.getTime() );
return gregorianCalendar;
}
if ( Long.class.isAssignableFrom( type ) ) {
return value.getTime();
}
if ( java.sql.Date.class.isAssignableFrom( type ) ) {
return value instanceof java.sql.Date
? (java.sql.Date) value
: new java.sql.Date( value.getTime() );
}
if ( java.sql.Time.class.isAssignableFrom( type ) ) {
return value instanceof java.sql.Time
? (java.sql.Time) value
: new java.sql.Time( value.getTime() % 86_400_000 );
}
throw unknownUnwrap( type );
}
@Override
public <X> Date wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( value instanceof Timestamp timestamp ) {
return timestamp;
}
if ( value instanceof Date date ) {
return new Timestamp( date.getTime() );
}
if ( value instanceof LocalDateTime localDateTime ) {
return Timestamp.valueOf( localDateTime );
}
if ( value instanceof Long longValue ) {
return new Timestamp( longValue );
}
if ( value instanceof Calendar calendar ) {
return new Timestamp( calendar.getTimeInMillis() );
}
throw unknownWrap( value.getClass() );
}
@Override
public boolean isWider(JavaType<?> javaType) {
return switch ( javaType.getTypeName() ) {
case "java.sql.Date", "java.sql.Timestamp", "java.util.Date", "java.util.Calendar" -> true;
default -> false;
};
}
@Override
public String toString(Date value) {
return LITERAL_FORMATTER.format( value.toInstant() );
}
@Override
public Date fromString(CharSequence string) {
try {
final var temporalAccessor = LITERAL_FORMATTER.parse( string );
final var timestamp = new Timestamp( temporalAccessor.getLong( ChronoField.INSTANT_SECONDS ) * 1000L );
timestamp.setNanos( temporalAccessor.get( ChronoField.NANO_OF_SECOND ) );
return timestamp;
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse timestamp string " + string, pe );
}
}
@Override
public void appendEncodedString(SqlAppender sb, Date value) {
ENCODED_FORMATTER.formatTo( value.toInstant(), sb );
}
@Override
public Date fromEncodedString(CharSequence charSequence, int start, int end) {
try {
final var temporalAccessor = ENCODED_FORMATTER.parse( subSequence( charSequence, start, end ) );
final Timestamp timestamp;
if ( temporalAccessor.isSupported( ChronoField.INSTANT_SECONDS ) ) {
timestamp = new Timestamp( temporalAccessor.getLong( ChronoField.INSTANT_SECONDS ) * 1000L );
timestamp.setNanos( temporalAccessor.get( ChronoField.NANO_OF_SECOND ) );
}
else {
timestamp = Timestamp.valueOf( LocalDateTime.from( temporalAccessor ) );
}
return timestamp;
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse timestamp string " + subSequence( charSequence, start, end ), pe );
}
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators context) {
return context.getJdbcType( Types.TIMESTAMP );
}
@Override @SuppressWarnings("unchecked")
protected <X> TemporalJavaType<X> forTimestampPrecision(TypeConfiguration typeConfiguration) {
return (TemporalJavaType<X>) this;
}
@Override @SuppressWarnings("unchecked")
protected <X> TemporalJavaType<X> forDatePrecision(TypeConfiguration typeConfiguration) {
return (TemporalJavaType<X>) JdbcDateJavaType.INSTANCE;
}
@Override
public int getDefaultSqlPrecision(Dialect dialect, JdbcType jdbcType) {
return dialect.getDefaultTimestampPrecision();
}
@Override
public Date next(
Date current,
Long length,
Integer precision,
Integer scale,
SharedSessionContractImplementor session) {
return seed( length, precision, scale, session );
}
@Override
public Date seed(
Long length,
Integer precision,
Integer scale,
SharedSessionContractImplementor session) {
return Timestamp.from( ClockHelper.forPrecision( precision, session ).instant() );
}
public static
|
JdbcTimestampJavaType
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/bytecode/enhancement/extension/engine/BytecodeEnhancedTestEngine.java
|
{
"start": 4444,
"end": 4816
}
|
class ____ annotated with @BytecodeEnhanced
// we replace the descriptor with the new one that will point to an enhanced test class,
// this also means that we need to add all the child descriptors back as well...
// Then on the extension side we set the classloader that contains the enhanced test class
// and set it back to the original once the test
|
is
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/specification/SimpleProjectionSpecification.java
|
{
"start": 2172,
"end": 3397
}
|
interface ____<T,X> extends QuerySpecification<T> {
/**
* Create a new {@code ProjectionSpecification} which augments the given
* {@link SelectionSpecification}.
*/
static <T,X> SimpleProjectionSpecification<T,X> create(
SelectionSpecification<T> selectionSpecification,
Path<T,X> projectedPath) {
return new SimpleProjectionSpecificationImpl<>( selectionSpecification, projectedPath );
}
/**
* Create a new {@code ProjectionSpecification} which augments the given
* {@link SelectionSpecification}.
*/
static <T,X> SimpleProjectionSpecification<T,X> create(
SelectionSpecification<T> selectionSpecification,
SingularAttribute<? super T,X> projectedAttribute) {
return new SimpleProjectionSpecificationImpl<>( selectionSpecification, projectedAttribute );
}
@Override
SelectionQuery<X> createQuery(Session session);
@Override
SelectionQuery<X> createQuery(StatelessSession session);
@Override
SelectionQuery<X> createQuery(EntityManager entityManager);
@Override
CriteriaQuery<X> buildCriteria(CriteriaBuilder builder);
@Override
TypedQueryReference<X> reference();
@Override
SimpleProjectionSpecification<T,X> validate(CriteriaBuilder builder);
}
|
SimpleProjectionSpecification
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInputTests.java
|
{
"start": 1402,
"end": 5718
}
|
class ____ extends ESTestCase {
public void testMapperSimple() {
var mapper = new PlanStreamInput.NameIdMapper();
NameId first = mapper.apply(1L);
NameId second = mapper.apply(1L);
assertThat(second, equalTo(first));
NameId third = mapper.apply(2L);
NameId fourth = mapper.apply(2L);
assertThat(third, not(equalTo(second)));
assertThat(fourth, equalTo(third));
assertThat(mapper.seen.size(), is(2));
}
public void testMapper() {
List<Long> longs = randomLongsListOfSize(100);
List<Long> nameIds = new ArrayList<>();
for (long l : longs) {
nameIds.add(l);
if (randomBoolean()) { // randomly insert additional values from the known list
int idx = randomIntBetween(0, longs.size() - 1);
nameIds.add(longs.get(idx));
}
}
var mapper = new PlanStreamInput.NameIdMapper();
List<NameId> mappedIds = nameIds.stream().map(mapper::apply).toList();
assertThat(mappedIds.size(), is(nameIds.size()));
// there must be exactly 100 distinct elements
assertThat(mapper.seen.size(), is(100));
assertThat(mappedIds.stream().distinct().count(), is(100L));
// The pre-mapped name id pattern must match that of the mapped one
Map<Long, List<Long>> nameIdsSeen = new LinkedHashMap<>(); // insertion order
for (int i = 0; i < nameIds.size(); i++) {
long value = nameIds.get(i);
nameIdsSeen.computeIfAbsent(value, k -> new ArrayList<>());
nameIdsSeen.get(value).add((long) i);
}
assert nameIdsSeen.size() == 100;
Map<NameId, List<Long>> mappedSeen = new LinkedHashMap<>(); // insertion order
for (int i = 0; i < mappedIds.size(); i++) {
NameId nameId = mappedIds.get(i);
mappedSeen.computeIfAbsent(nameId, k -> new ArrayList<>());
mappedSeen.get(nameId).add((long) i);
}
assert mappedSeen.size() == 100;
var mappedSeenItr = mappedSeen.values().iterator();
for (List<Long> indexes : nameIdsSeen.values()) {
assertThat(indexes, equalTo(mappedSeenItr.next()));
}
}
List<Long> randomLongsListOfSize(int size) {
Set<Long> longs = new HashSet<>();
while (longs.size() < size) {
longs.add(randomLong());
}
return longs.stream().toList();
}
public void testSourceSerialization() {
Function<String, String> queryFn = delimiter -> delimiter
+ "FROM "
+ delimiter
+ " test "
+ delimiter
+ "| EVAL "
+ delimiter
+ " x = CONCAT(first_name, \"baz\")"
+ delimiter
+ "| EVAL last_name IN (\"foo\", "
+ delimiter
+ " \"bar\")"
+ delimiter
+ "| "
+ delimiter
+ "WHERE emp_no == abs("
+ delimiter
+ "emp_no)"
+ delimiter;
Function<LogicalPlan, List<Source>> sources = plan -> {
List<Expression> exp = new ArrayList<>();
plan.forEachDown(p -> {
if (p instanceof Eval e) {
e.fields().forEach(a -> exp.add(a.child()));
} else if (p instanceof Filter f) {
exp.add(f.condition());
}
});
return exp.stream().map(Expression::source).toList();
};
for (var delim : new String[] { "", "\r", "\n", "\r\n" }) {
String query = queryFn.apply(delim);
Configuration config = configuration(query);
LogicalPlan planIn = analyze(query);
LogicalPlan planOut = serializeDeserialize(
planIn,
PlanStreamOutput::writeNamedWriteable,
in -> in.readNamedWriteable(LogicalPlan.class),
config
);
assertThat(planIn, equalTo(planOut));
assertThat(sources.apply(planIn), equalTo(sources.apply(planOut)));
}
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
}
}
|
PlanStreamInputTests
|
java
|
junit-team__junit5
|
junit-jupiter-migrationsupport/src/main/java/org/junit/jupiter/migrationsupport/rules/adapter/GenericBeforeAndAfterAdvice.java
|
{
"start": 531,
"end": 714
}
|
interface ____ {
default void before() {
}
default void handleTestExecutionException(Throwable cause) throws Throwable {
}
default void after() {
}
}
|
GenericBeforeAndAfterAdvice
|
java
|
apache__spark
|
common/unsafe/src/main/java/org/apache/spark/sql/catalyst/expressions/HiveHasher.java
|
{
"start": 1046,
"end": 1634
}
|
class ____ {
@Override
public String toString() {
return HiveHasher.class.getSimpleName();
}
public static int hashInt(int input) {
return input;
}
public static int hashLong(long input) {
return (int) ((input >>> 32) ^ input);
}
public static int hashUnsafeBytes(Object base, long offset, int lengthInBytes) {
assert (lengthInBytes >= 0): "lengthInBytes cannot be negative";
int result = 0;
for (int i = 0; i < lengthInBytes; i++) {
result = (result * 31) + (int) Platform.getByte(base, offset + i);
}
return result;
}
}
|
HiveHasher
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/discovery/DiscoverySelectorResolverTests.java
|
{
"start": 13255,
"end": 37270
}
|
class ____ name: " + className);
}
@Test
void methodResolutionForNonexistentMethod() {
MethodSelector selector = selectMethod(MyTestClass.class, "bogus", "");
resolve(request().selectors(selector));
assertTrue(requireNonNull(engineDescriptor).getDescendants().isEmpty());
var result = verifySelectorProcessed(selector);
assertThat(result.getStatus()).isEqualTo(FAILED);
assertThat(result.getThrowable().orElseThrow()).hasMessageContaining("Could not find method");
}
@Test
void classResolutionByUniqueId() {
UniqueIdSelector selector = selectUniqueId(uniqueIdForClass(MyTestClass.class).toString());
resolve(request().selectors(selector));
assertEquals(4, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertUniqueIdsForMyTestClass(uniqueIds);
}
@Test
void staticNestedClassResolutionByUniqueId() {
UniqueIdSelector selector = selectUniqueId(uniqueIdForClass(OtherTestClass.NestedTestClass.class).toString());
resolve(request().selectors(selector));
assertEquals(3, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(OtherTestClass.NestedTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(OtherTestClass.NestedTestClass.class, "test5()"));
assertThat(uniqueIds).contains(uniqueIdForMethod(OtherTestClass.NestedTestClass.class, "test6()"));
}
@Test
void methodOfInnerClassByUniqueId() {
UniqueIdSelector selector = selectUniqueId(
uniqueIdForMethod(OtherTestClass.NestedTestClass.class, "test5()").toString());
resolve(request().selectors(selector));
assertEquals(2, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(OtherTestClass.NestedTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(OtherTestClass.NestedTestClass.class, "test5()"));
}
@Test
void resolvingUniqueIdWithUnknownSegmentTypeResolvesNothing() {
UniqueId uniqueId = engineId().append("bogus", "enigma");
UniqueIdSelector selector = selectUniqueId(uniqueId);
resolve(request().selectors(selector));
assertTrue(requireNonNull(engineDescriptor).getDescendants().isEmpty());
assertUnresolved(selector);
}
@Test
void resolvingUniqueIdOfNonTestMethodResolvesNothing() {
UniqueIdSelector selector = selectUniqueId(uniqueIdForMethod(MyTestClass.class, "notATest()"));
resolve(request().selectors(selector));
assertThat(requireNonNull(engineDescriptor).getDescendants()).isEmpty();
assertUnresolved(selector);
}
@Test
void methodResolutionByUniqueIdWithMissingMethodName() {
UniqueId uniqueId = uniqueIdForMethod(getClass(), "()");
resolve(request().selectors(selectUniqueId(uniqueId)));
assertTrue(requireNonNull(engineDescriptor).getDescendants().isEmpty());
var result = verifySelectorProcessed(selectUniqueId(uniqueId));
assertThat(result.getStatus()).isEqualTo(FAILED);
assertThat(result.getThrowable().orElseThrow())//
.isInstanceOf(PreconditionViolationException.class)//
.hasMessageStartingWith("Method [()] does not match pattern");
}
@Test
void methodResolutionByUniqueIdWithMissingParameters() {
UniqueId uniqueId = uniqueIdForMethod(getClass(), "methodName");
resolve(request().selectors(selectUniqueId(uniqueId)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).isEmpty();
var result = verifySelectorProcessed(selectUniqueId(uniqueId));
assertThat(result.getStatus()).isEqualTo(FAILED);
assertThat(result.getThrowable().orElseThrow())//
.isInstanceOf(PreconditionViolationException.class)//
.hasMessageStartingWith("Method [methodName] does not match pattern");
}
@Test
void methodResolutionByUniqueIdWithBogusParameters() {
UniqueId uniqueId = uniqueIdForMethod(getClass(), "methodName(java.lang.String, junit.foo.Enigma)");
resolve(request().selectors(selectUniqueId(uniqueId)));
assertTrue(requireNonNull(engineDescriptor).getDescendants().isEmpty());
var result = verifySelectorProcessed(selectUniqueId(uniqueId));
assertThat(result.getStatus()).isEqualTo(FAILED);
assertThat(result.getThrowable().orElseThrow())//
.isInstanceOf(JUnitException.class)//
.hasMessage("Failed to load parameter type [%s] for method [%s] in class [%s].", "junit.foo.Enigma",
"methodName", getClass().getName());
}
@Test
void methodResolutionByUniqueId() {
UniqueIdSelector selector = selectUniqueId(uniqueIdForMethod(MyTestClass.class, "test1()").toString());
resolve(request().selectors(selector));
assertEquals(2, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(MyTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(MyTestClass.class, "test1()"));
}
@Test
void methodResolutionByUniqueIdFromInheritedClass() {
UniqueIdSelector selector = selectUniqueId(uniqueIdForMethod(HerTestClass.class, "test1()").toString());
resolve(request().selectors(selector));
assertEquals(2, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(HerTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(HerTestClass.class, "test1()"));
}
@Test
void methodResolutionByUniqueIdWithParams() {
UniqueIdSelector selector = selectUniqueId(
uniqueIdForMethod(HerTestClass.class, "test7(java.lang.String)").toString());
resolve(request().selectors(selector));
assertEquals(2, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(HerTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(HerTestClass.class, "test7(java.lang.String)"));
}
@Test
void resolvingUniqueIdWithWrongParamsResolvesNothing() {
UniqueId uniqueId = uniqueIdForMethod(HerTestClass.class, "test7(java.math.BigDecimal)");
resolve(request().selectors(selectUniqueId(uniqueId)));
assertTrue(requireNonNull(engineDescriptor).getDescendants().isEmpty());
assertUnresolved(selectUniqueId(uniqueId));
}
@Test
void twoMethodResolutionsByUniqueId() {
UniqueIdSelector selector1 = selectUniqueId(uniqueIdForMethod(MyTestClass.class, "test1()").toString());
UniqueIdSelector selector2 = selectUniqueId(uniqueIdForMethod(MyTestClass.class, "test2()").toString());
// adding same selector twice should have no effect
resolve(request().selectors(selector1, selector2, selector2));
assertEquals(3, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(MyTestClass.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(MyTestClass.class, "test1()"));
assertThat(uniqueIds).contains(uniqueIdForMethod(MyTestClass.class, "test2()"));
TestDescriptor classFromMethod1 = descriptorByUniqueId(
uniqueIdForMethod(MyTestClass.class, "test1()")).getParent().orElseThrow();
TestDescriptor classFromMethod2 = descriptorByUniqueId(
uniqueIdForMethod(MyTestClass.class, "test2()")).getParent().orElseThrow();
assertEquals(classFromMethod1, classFromMethod2);
assertSame(classFromMethod1, classFromMethod2);
}
@Test
void packageResolutionUsingExplicitBasePackage() {
PackageSelector selector = selectPackage("org.junit.jupiter.engine.descriptor.subpackage");
resolve(request().selectors(selector));
assertEquals(6, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(Class1WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class1WithTestCases.class, "test1()"));
assertThat(uniqueIds).contains(uniqueIdForClass(Class2WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class2WithTestCases.class, "test2()"));
assertThat(uniqueIds).contains(
uniqueIdForMethod(ClassWithStaticInnerTestCases.ShouldBeDiscovered.class, "test1()"));
}
@Test
void packageResolutionUsingDefaultPackage() throws Exception {
resolve(request().selectors(selectPackage("")));
// 150 is completely arbitrary. The actual number is likely much higher.
assertThat(requireNonNull(engineDescriptor).getDescendants())//
.describedAs("Too few test descriptors in classpath")//
.hasSizeGreaterThan(150);
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds)//
.describedAs("Failed to pick up DefaultPackageTestCase via classpath scanning")//
.contains(uniqueIdForClass(ReflectionSupport.tryToLoadClass("DefaultPackageTestCase").get()));
assertThat(uniqueIds).contains(uniqueIdForClass(Class1WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class1WithTestCases.class, "test1()"));
assertThat(uniqueIds).contains(uniqueIdForClass(Class2WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class2WithTestCases.class, "test2()"));
}
@Test
void classpathResolution() throws Exception {
Path classpath = Path.of(
DiscoverySelectorResolverTests.class.getProtectionDomain().getCodeSource().getLocation().toURI());
List<ClasspathRootSelector> selectors = selectClasspathRoots(Set.of(classpath));
resolve(request().selectors(selectors));
// 150 is completely arbitrary. The actual number is likely much higher.
assertThat(requireNonNull(engineDescriptor).getDescendants())//
.describedAs("Too few test descriptors in classpath")//
.hasSizeGreaterThan(150);
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds)//
.describedAs("Failed to pick up DefaultPackageTestCase via classpath scanning")//
.contains(uniqueIdForClass(ReflectionSupport.tryToLoadClass("DefaultPackageTestCase").getNonNull()));
assertThat(uniqueIds).contains(uniqueIdForClass(Class1WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class1WithTestCases.class, "test1()"));
assertThat(uniqueIds).contains(uniqueIdForClass(Class2WithTestCases.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(Class2WithTestCases.class, "test2()"));
assertThat(uniqueIds).contains(
uniqueIdForMethod(ClassWithStaticInnerTestCases.ShouldBeDiscovered.class, "test1()"));
}
@Test
void classpathResolutionForJarFiles() throws Exception {
URL jarUrl = requireNonNull(getClass().getResource("/jupiter-testjar.jar"));
Path path = Path.of(jarUrl.toURI());
List<ClasspathRootSelector> selectors = selectClasspathRoots(Set.of(path));
ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
try (URLClassLoader classLoader = new URLClassLoader(new URL[] { jarUrl })) {
Thread.currentThread().setContextClassLoader(classLoader);
resolve(request().selectors(selectors));
assertThat(uniqueIds()) //
.contains(uniqueIdForStaticClass("com.example.project.FirstTest")) //
.contains(uniqueIdForStaticClass("com.example.project.SecondTest"));
}
finally {
Thread.currentThread().setContextClassLoader(originalClassLoader);
}
}
@Test
void nestedTestResolutionFromBaseClass() {
ClassSelector selector = selectClass(TestCaseWithNesting.class);
resolve(request().selectors(selector));
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).hasSize(6);
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(TestCaseWithNesting.class, "testA()"));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.class, "testB()"));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class));
assertThat(uniqueIds).contains(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class, "testC()"));
}
@Test
void nestedTestResolutionFromNestedTestClass() {
ClassSelector selector = selectClass(TestCaseWithNesting.NestedTestCase.class);
resolve(request().selectors(selector));
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).hasSize(5);
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.class, "testB()"));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class));
assertThat(uniqueIds).contains(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class, "testC()"));
}
@Test
void nestedTestResolutionFromUniqueId() {
UniqueIdSelector selector = selectUniqueId(
uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class).toString());
resolve(request().selectors(selector));
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).hasSize(4);
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class));
assertThat(uniqueIds).contains(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class, "testC()"));
}
@Test
void doubleNestedTestResolutionFromClass() {
ClassSelector selector = selectClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class);
resolve(request().selectors(selector));
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).hasSize(4);
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class));
assertThat(uniqueIds).contains(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class, "testC()"));
}
@Test
void methodResolutionInDoubleNestedTestClass() throws NoSuchMethodException {
MethodSelector selector = selectMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class,
TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class.getDeclaredMethod("testC"));
resolve(request().selectors(selector));
assertEquals(4, requireNonNull(engineDescriptor).getDescendants().size());
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class));
assertThat(uniqueIds).contains(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.DoubleNestedTestCase.class, "testC()"));
}
@Test
void nestedTestResolutionFromUniqueIdToMethod() {
UniqueIdSelector selector = selectUniqueId(
uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.class, "testB()").toString());
resolve(request().selectors(selector));
List<UniqueId> uniqueIds = uniqueIds();
assertThat(uniqueIds).hasSize(3);
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.class));
assertThat(uniqueIds).contains(uniqueIdForClass(TestCaseWithNesting.NestedTestCase.class));
assertThat(uniqueIds).contains(uniqueIdForMethod(TestCaseWithNesting.NestedTestCase.class, "testB()"));
}
@Test
void testFactoryMethodResolutionByUniqueId() {
Class<?> clazz = MyTestClass.class;
UniqueId factoryUid = uniqueIdForTestFactoryMethod(clazz, "dynamicTest()");
resolve(request().selectors(selectUniqueId(factoryUid)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), factoryUid);
}
@Test
void testTemplateMethodResolutionByUniqueId() {
Class<?> clazz = TestClassWithTemplate.class;
UniqueId templateUid = uniqueIdForTestTemplateMethod(clazz, "testTemplate()");
resolve(request().selectors(selectUniqueId(templateUid)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), templateUid);
}
@Test
void resolvingDynamicTestByUniqueIdResolvesUpToParentTestFactory() {
Class<?> clazz = MyTestClass.class;
UniqueId factoryUid = uniqueIdForTestFactoryMethod(clazz, "dynamicTest()");
UniqueId dynamicTestUid = factoryUid.append(DYNAMIC_TEST_SEGMENT_TYPE, "#1");
UniqueId differentDynamicTestUid = factoryUid.append(DYNAMIC_TEST_SEGMENT_TYPE, "#2");
resolve(request().selectors(selectUniqueId(dynamicTestUid)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), factoryUid);
TestDescriptor testClassDescriptor = getOnlyElement(requireNonNull(engineDescriptor).getChildren());
TestDescriptor testFactoryDescriptor = getOnlyElement(testClassDescriptor.getChildren());
DynamicDescendantFilter dynamicDescendantFilter = getDynamicDescendantFilter(testFactoryDescriptor);
assertThat(dynamicDescendantFilter.test(dynamicTestUid, 42)).isTrue();
assertThat(dynamicDescendantFilter.test(differentDynamicTestUid, 42)).isFalse();
assertAllSelectorsResolved();
}
@Test
void resolvingDynamicContainerByUniqueIdResolvesUpToParentTestFactory() {
Class<?> clazz = MyTestClass.class;
UniqueId factoryUid = uniqueIdForTestFactoryMethod(clazz, "dynamicTest()");
UniqueId dynamicContainerUid = factoryUid.append(DYNAMIC_CONTAINER_SEGMENT_TYPE, "#1");
UniqueId differentDynamicContainerUid = factoryUid.append(DYNAMIC_CONTAINER_SEGMENT_TYPE, "#2");
UniqueId dynamicTestUid = dynamicContainerUid.append(DYNAMIC_TEST_SEGMENT_TYPE, "#1");
UniqueId differentDynamicTestUid = dynamicContainerUid.append(DYNAMIC_TEST_SEGMENT_TYPE, "#2");
resolve(request().selectors(selectUniqueId(dynamicTestUid)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), factoryUid);
TestDescriptor testClassDescriptor = getOnlyElement(requireNonNull(engineDescriptor).getChildren());
TestDescriptor testFactoryDescriptor = getOnlyElement(testClassDescriptor.getChildren());
DynamicDescendantFilter dynamicDescendantFilter = getDynamicDescendantFilter(testFactoryDescriptor);
assertThat(dynamicDescendantFilter.test(dynamicTestUid, 42)).isTrue();
assertThat(dynamicDescendantFilter.test(differentDynamicContainerUid, 42)).isFalse();
assertThat(dynamicDescendantFilter.test(differentDynamicTestUid, 42)).isFalse();
assertAllSelectorsResolved();
}
@Test
void resolvingDynamicTestByUniqueIdAndTestFactoryByMethodSelectorResolvesTestFactory() {
Class<?> clazz = MyTestClass.class;
UniqueId factoryUid = uniqueIdForTestFactoryMethod(clazz, "dynamicTest()");
UniqueId dynamicTestUid = factoryUid.append(DYNAMIC_TEST_SEGMENT_TYPE, "#1");
resolve(request().selectors(selectUniqueId(dynamicTestUid), selectMethod(clazz, "dynamicTest")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), factoryUid);
TestDescriptor testClassDescriptor = getOnlyElement(requireNonNull(engineDescriptor).getChildren());
TestDescriptor testFactoryDescriptor = getOnlyElement(testClassDescriptor.getChildren());
DynamicDescendantFilter dynamicDescendantFilter = getDynamicDescendantFilter(testFactoryDescriptor);
assertThat(dynamicDescendantFilter.test(UniqueId.root("foo", "bar"), 42)).isTrue();
}
private DynamicDescendantFilter getDynamicDescendantFilter(TestDescriptor testDescriptor) {
assertThat(testDescriptor).isInstanceOf(JupiterTestDescriptor.class);
return ((Filterable) testDescriptor).getDynamicDescendantFilter();
}
@Test
void resolvingTestTemplateInvocationByUniqueIdResolvesOnlyUpToParentTestTemplate() {
Class<?> clazz = TestClassWithTemplate.class;
UniqueId templateUid = uniqueIdForTestTemplateMethod(clazz, "testTemplate()");
UniqueId invocationUid = templateUid.append(TestTemplateInvocationTestDescriptor.SEGMENT_TYPE, "#1");
resolve(request().selectors(selectUniqueId(invocationUid)));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(2);
assertThat(uniqueIds()).containsSequence(uniqueIdForClass(clazz), templateUid);
}
@Test
void includingPackageNameFilterExcludesClassesInNonMatchingPackages() {
resolve(request().selectors(selectClass(MatchingClass.class)).filters(
includePackageNames("org.junit.jupiter.engine.unknown")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).isEmpty();
}
@Test
void includingPackageNameFilterIncludesClassesInMatchingPackages() {
resolve(request().selectors(selectClass(MatchingClass.class)).filters(
includePackageNames("org.junit.jupiter.engine")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(3);
}
@Test
void excludingPackageNameFilterExcludesClassesInMatchingPackages() {
resolve(request().selectors(selectClass(MatchingClass.class)).filters(
excludePackageNames("org.junit.jupiter.engine")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).isEmpty();
}
@Test
void excludingPackageNameFilterIncludesClassesInNonMatchingPackages() {
resolve(request().selectors(selectClass(MatchingClass.class)).filters(
excludePackageNames("org.junit.jupiter.engine.unknown")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(3);
}
@Test
void classNamePatternFilterExcludesNonMatchingClasses() {
resolve(request().selectors(selectClass(MatchingClass.class), selectClass(OtherClass.class)).filters(
includeClassNamePatterns(".*MatchingClass")));
assertThat(requireNonNull(engineDescriptor).getDescendants()).hasSize(3);
}
private void resolve(LauncherDiscoveryRequestBuilder builder) {
engineDescriptor = discoverTests(builder.build()).getEngineDescriptor();
}
private TestDescriptor descriptorByUniqueId(UniqueId uniqueId) {
return requireNonNull(engineDescriptor).getDescendants().stream().filter(
d -> d.getUniqueId().equals(uniqueId)).findFirst().orElseThrow();
}
private List<UniqueId> uniqueIds() {
return requireNonNull(engineDescriptor).getDescendants().stream().map(TestDescriptor::getUniqueId).toList();
}
private LauncherDiscoveryRequestBuilder request() {
return defaultRequest() //
.configurationParameter(DEFAULT_DISCOVERY_LISTENER_CONFIGURATION_PROPERTY_NAME, "logging") //
.listeners(discoveryListener);
}
private void assertAllSelectorsResolved() {
ArgumentCaptor<SelectorResolutionResult> resultCaptor = ArgumentCaptor.forClass(SelectorResolutionResult.class);
verify(discoveryListener).selectorProcessed(eq(UniqueId.forEngine("junit-jupiter")), any(),
resultCaptor.capture());
assertThat(resultCaptor.getAllValues()) //
.flatExtracting(SelectorResolutionResult::getStatus) //
.allMatch(Predicate.isEqual(RESOLVED));
}
private void assertUnresolved(DiscoverySelector selector) {
var result = verifySelectorProcessed(selector);
assertThat(result.getStatus()).isEqualTo(UNRESOLVED);
}
private SelectorResolutionResult verifySelectorProcessed(DiscoverySelector selector) {
ArgumentCaptor<SelectorResolutionResult> resultCaptor = ArgumentCaptor.forClass(SelectorResolutionResult.class);
verify(discoveryListener).selectorProcessed(eq(UniqueId.forEngine("junit-jupiter")), eq(selector),
resultCaptor.capture());
return resultCaptor.getValue();
}
}
// -----------------------------------------------------------------------------
|
with
|
java
|
apache__camel
|
core/camel-util/src/main/java/org/apache/camel/util/OrderedProperties.java
|
{
"start": 1283,
"end": 1342
}
|
class ____ extends BaseOrderedProperties {
}
|
OrderedProperties
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/config/WebFluxViewResolutionIntegrationTests.java
|
{
"start": 5131,
"end": 5633
}
|
class ____ extends AbstractWebFluxConfig {
@Override
public void configureViewResolvers(ViewResolverRegistry registry) {
registry.freeMarker();
}
@Bean
public FreeMarkerConfigurer freeMarkerConfigurer() {
FreeMarkerConfigurer configurer = new FreeMarkerConfigurer();
configurer.setPreTemplateLoaders(classTemplateLoader);
configurer.setDefaultCharset(UTF_8);
return configurer;
}
}
@Configuration(proxyBeanMethods = false)
static
|
ExplicitDefaultEncodingConfig
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java
|
{
"start": 111403,
"end": 111630
}
|
class ____ extends AsyncScalarFunction {
public void eval(CompletableFuture<Integer> f, int i, Double d) {}
public void eval(CompletableFuture<Long> f, String s) {}
}
private static
|
OverloadedFunctionAsync
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/scripting/xmltags/DynamicSqlSource.java
|
{
"start": 978,
"end": 2051
}
|
class ____ implements SqlSource {
private final Configuration configuration;
private final SqlNode rootSqlNode;
private final ParamNameResolver paramNameResolver;
public DynamicSqlSource(Configuration configuration, SqlNode rootSqlNode) {
this(configuration, rootSqlNode, null);
}
public DynamicSqlSource(Configuration configuration, SqlNode rootSqlNode, ParamNameResolver paramNameResolver) {
this.configuration = configuration;
this.rootSqlNode = rootSqlNode;
this.paramNameResolver = paramNameResolver;
}
@Override
public BoundSql getBoundSql(Object parameterObject) {
DynamicContext context = new DynamicContext(configuration, parameterObject, null, paramNameResolver, true);
rootSqlNode.apply(context);
String sql = context.getSql();
SqlSource sqlSource = SqlSourceBuilder.buildSqlSource(configuration, sql, context.getParameterMappings());
BoundSql boundSql = sqlSource.getBoundSql(parameterObject);
context.getBindings().forEach(boundSql::setAdditionalParameter);
return boundSql;
}
}
|
DynamicSqlSource
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ThriftEndpointBuilderFactory.java
|
{
"start": 9428,
"end": 16255
}
|
interface ____
extends
EndpointConsumerBuilder {
default ThriftEndpointConsumerBuilder basic() {
return (ThriftEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets whether synchronous processing should be strictly used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param synchronous the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param synchronous the value to set
* @return the dsl builder
*/
default AdvancedThriftEndpointConsumerBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the Thrift component.
*/
public
|
AdvancedThriftEndpointConsumerBuilder
|
java
|
spring-projects__spring-framework
|
integration-tests/src/test/java/org/springframework/core/env/EnvironmentSystemIntegrationTests.java
|
{
"start": 4928,
"end": 25437
}
|
class ____ {
private final ConfigurableEnvironment prodEnv = new StandardEnvironment();
private final ConfigurableEnvironment devEnv = new StandardEnvironment();
private final ConfigurableEnvironment prodWebEnv = new StandardServletEnvironment();
@BeforeEach
void setUp() {
prodEnv.setActiveProfiles(PROD_ENV_NAME);
devEnv.setActiveProfiles(DEV_ENV_NAME);
prodWebEnv.setActiveProfiles(PROD_ENV_NAME);
}
@Test
void genericApplicationContext_standardEnv() {
ConfigurableApplicationContext ctx = new GenericApplicationContext(newBeanFactoryWithEnvironmentAwareBean());
ctx.refresh();
assertHasStandardEnvironment(ctx);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, ctx.getEnvironment());
}
@Test
void genericApplicationContext_customEnv() {
GenericApplicationContext ctx = new GenericApplicationContext(newBeanFactoryWithEnvironmentAwareBean());
ctx.setEnvironment(prodEnv);
ctx.refresh();
assertHasEnvironment(ctx, prodEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodEnv);
}
@Test
void xmlBeanDefinitionReader_inheritsEnvironmentFromEnvironmentCapableBDR() {
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.setEnvironment(prodEnv);
new XmlBeanDefinitionReader(ctx).loadBeanDefinitions(XML_PATH);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void annotatedBeanDefinitionReader_inheritsEnvironmentFromEnvironmentCapableBDR() {
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.setEnvironment(prodEnv);
new AnnotatedBeanDefinitionReader(ctx).register(Config.class);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void classPathBeanDefinitionScanner_inheritsEnvironmentFromEnvironmentCapableBDR_scanProfileAnnotatedConfigClasses() {
// it's actually ConfigurationClassPostProcessor's Environment that gets the job done here.
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.setEnvironment(prodEnv);
ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(ctx);
scanner.scan("org.springframework.core.env.scan1");
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void classPathBeanDefinitionScanner_inheritsEnvironmentFromEnvironmentCapableBDR_scanProfileAnnotatedComponents() {
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.setEnvironment(prodEnv);
ClassPathBeanDefinitionScanner scanner = new ClassPathBeanDefinitionScanner(ctx);
scanner.scan("org.springframework.core.env.scan2");
ctx.refresh();
assertThat(scanner.getEnvironment()).isEqualTo(ctx.getEnvironment());
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void genericXmlApplicationContext() {
GenericXmlApplicationContext ctx = new GenericXmlApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(prodEnv);
ctx.load(XML_PATH);
ctx.refresh();
assertHasEnvironment(ctx, prodEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodEnv);
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void classPathXmlApplicationContext() {
ConfigurableApplicationContext ctx = new ClassPathXmlApplicationContext(XML_PATH);
ctx.setEnvironment(prodEnv);
ctx.refresh();
assertEnvironmentBeanRegistered(ctx);
assertHasEnvironment(ctx, prodEnv);
assertEnvironmentAwareInvoked(ctx, ctx.getEnvironment());
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void fileSystemXmlApplicationContext() throws IOException {
ClassPathResource xml = new ClassPathResource(XML_PATH);
File tmpFile = File.createTempFile("test", "xml");
FileCopyUtils.copy(xml.getFile(), tmpFile);
// strange - FSXAC strips leading '/' unless prefixed with 'file:'
ConfigurableApplicationContext ctx =
new FileSystemXmlApplicationContext(new String[] {"file:" + tmpFile.getPath()}, false);
ctx.setEnvironment(prodEnv);
ctx.refresh();
assertEnvironmentBeanRegistered(ctx);
assertHasEnvironment(ctx, prodEnv);
assertEnvironmentAwareInvoked(ctx, ctx.getEnvironment());
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void annotationConfigApplicationContext_withPojos() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(prodEnv);
ctx.register(EnvironmentAwareBean.class);
ctx.refresh();
assertEnvironmentAwareInvoked(ctx, prodEnv);
}
@Test
void annotationConfigApplicationContext_withProdEnvAndProdConfigClass() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(prodEnv);
ctx.register(ProdConfig.class);
ctx.refresh();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void annotationConfigApplicationContext_withProdEnvAndDevConfigClass() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(prodEnv);
ctx.register(DevConfig.class);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(TRANSITIVE_BEAN_NAME)).isFalse();
}
@Test
void annotationConfigApplicationContext_withDevEnvAndDevConfigClass() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(devEnv);
ctx.register(DevConfig.class);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isTrue();
assertThat(ctx.containsBean(TRANSITIVE_BEAN_NAME)).isTrue();
}
@Test
void annotationConfigApplicationContext_withImportedConfigClasses() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
assertHasStandardEnvironment(ctx);
ctx.setEnvironment(prodEnv);
ctx.register(Config.class);
ctx.refresh();
assertEnvironmentAwareInvoked(ctx, prodEnv);
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(TRANSITIVE_BEAN_NAME)).isFalse();
}
@Test
void mostSpecificDerivedClassDrivesEnvironment_withDerivedDevEnvAndDerivedDevConfigClass() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
StandardEnvironment derivedDevEnv = new StandardEnvironment();
derivedDevEnv.setActiveProfiles(DERIVED_DEV_ENV_NAME);
ctx.setEnvironment(derivedDevEnv);
ctx.register(DerivedDevConfig.class);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isTrue();
assertThat(ctx.containsBean(DERIVED_DEV_BEAN_NAME)).isTrue();
assertThat(ctx.containsBean(TRANSITIVE_BEAN_NAME)).isTrue();
}
@Test
void mostSpecificDerivedClassDrivesEnvironment_withDevEnvAndDerivedDevConfigClass() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.setEnvironment(devEnv);
ctx.register(DerivedDevConfig.class);
ctx.refresh();
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(DERIVED_DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(TRANSITIVE_BEAN_NAME)).isFalse();
}
@Test
void annotationConfigApplicationContext_withProfileExpressionMatchOr() {
testProfileExpression(true, "p3");
}
@Test
void annotationConfigApplicationContext_withProfileExpressionMatchAnd() {
testProfileExpression(true, "p1", "p2");
}
@Test
void annotationConfigApplicationContext_withProfileExpressionNoMatchAnd() {
testProfileExpression(false, "p1");
}
@Test
void annotationConfigApplicationContext_withProfileExpressionNoMatchNone() {
testProfileExpression(false, "p4");
}
private void testProfileExpression(boolean expected, String... activeProfiles) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
StandardEnvironment environment = new StandardEnvironment();
environment.setActiveProfiles(activeProfiles);
ctx.setEnvironment(environment);
ctx.register(ProfileExpressionConfig.class);
ctx.refresh();
assertThat(ctx.containsBean("expressionBean")).isEqualTo(expected);
}
@Test
void webApplicationContext() {
GenericWebApplicationContext ctx = new GenericWebApplicationContext(newBeanFactoryWithEnvironmentAwareBean());
assertHasStandardServletEnvironment(ctx);
ctx.setEnvironment(prodWebEnv);
ctx.refresh();
assertHasEnvironment(ctx, prodWebEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodWebEnv);
}
@Test
void xmlWebApplicationContext() {
AbstractRefreshableWebApplicationContext ctx = new XmlWebApplicationContext();
ctx.setConfigLocation("classpath:" + XML_PATH);
ctx.setEnvironment(prodWebEnv);
ctx.refresh();
assertHasEnvironment(ctx, prodWebEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodWebEnv);
assertThat(ctx.containsBean(DEV_BEAN_NAME)).isFalse();
assertThat(ctx.containsBean(PROD_BEAN_NAME)).isTrue();
}
@Test
void staticApplicationContext() {
StaticApplicationContext ctx = new StaticApplicationContext();
assertHasStandardEnvironment(ctx);
registerEnvironmentBeanDefinition(ctx);
ctx.setEnvironment(prodEnv);
ctx.refresh();
assertHasEnvironment(ctx, prodEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodEnv);
}
@Test
void staticWebApplicationContext() {
StaticWebApplicationContext ctx = new StaticWebApplicationContext();
assertHasStandardServletEnvironment(ctx);
registerEnvironmentBeanDefinition(ctx);
ctx.setEnvironment(prodWebEnv);
ctx.refresh();
assertHasEnvironment(ctx, prodWebEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodWebEnv);
}
@Test
void annotationConfigWebApplicationContext() {
AnnotationConfigWebApplicationContext ctx = new AnnotationConfigWebApplicationContext();
ctx.setEnvironment(prodWebEnv);
ctx.setConfigLocation(EnvironmentAwareBean.class.getName());
ctx.refresh();
assertHasEnvironment(ctx, prodWebEnv);
assertEnvironmentBeanRegistered(ctx);
assertEnvironmentAwareInvoked(ctx, prodWebEnv);
}
@Test
void registerServletParamPropertySources_AbstractRefreshableWebApplicationContext() {
MockServletContext servletContext = new MockServletContext();
servletContext.addInitParameter("pCommon", "pCommonContextValue");
servletContext.addInitParameter("pContext1", "pContext1Value");
MockServletConfig servletConfig = new MockServletConfig(servletContext);
servletConfig.addInitParameter("pCommon", "pCommonConfigValue");
servletConfig.addInitParameter("pConfig1", "pConfig1Value");
AbstractRefreshableWebApplicationContext ctx = new AnnotationConfigWebApplicationContext();
ctx.setConfigLocation(EnvironmentAwareBean.class.getName());
ctx.setServletConfig(servletConfig);
ctx.refresh();
ConfigurableEnvironment environment = ctx.getEnvironment();
assertThat(environment).isInstanceOf(StandardServletEnvironment.class);
MutablePropertySources propertySources = environment.getPropertySources();
assertThat(propertySources.contains(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)).isTrue();
assertThat(propertySources.contains(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)).isTrue();
// ServletConfig gets precedence
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonConfigValue");
assertThat(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)))
.isLessThan(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)));
// but all params are available
assertThat(environment.getProperty("pContext1")).isEqualTo("pContext1Value");
assertThat(environment.getProperty("pConfig1")).isEqualTo("pConfig1Value");
// Servlet* PropertySources have precedence over System* PropertySources
assertThat(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)))
.isLessThan(propertySources.precedenceOf(PropertySource.named(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME)));
// Replace system properties with a mock property source for convenience
MockPropertySource mockSystemProperties = new MockPropertySource(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME);
mockSystemProperties.setProperty("pCommon", "pCommonSysPropsValue");
mockSystemProperties.setProperty("pSysProps1", "pSysProps1Value");
propertySources.replace(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME, mockSystemProperties);
// assert that servletconfig params resolve with higher precedence than sysprops
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonConfigValue");
assertThat(environment.getProperty("pSysProps1")).isEqualTo("pSysProps1Value");
}
@Test
void registerServletParamPropertySources_GenericWebApplicationContext() {
MockServletContext servletContext = new MockServletContext();
servletContext.addInitParameter("pCommon", "pCommonContextValue");
servletContext.addInitParameter("pContext1", "pContext1Value");
GenericWebApplicationContext ctx = new GenericWebApplicationContext();
ctx.setServletContext(servletContext);
ctx.refresh();
ConfigurableEnvironment environment = ctx.getEnvironment();
assertThat(environment).isInstanceOf(StandardServletEnvironment.class);
MutablePropertySources propertySources = environment.getPropertySources();
assertThat(propertySources.contains(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)).isTrue();
// ServletContext params are available
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonContextValue");
assertThat(environment.getProperty("pContext1")).isEqualTo("pContext1Value");
// Servlet* PropertySources have precedence over System* PropertySources
assertThat(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)))
.isLessThan(propertySources.precedenceOf(PropertySource.named(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME)));
// Replace system properties with a mock property source for convenience
MockPropertySource mockSystemProperties = new MockPropertySource(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME);
mockSystemProperties.setProperty("pCommon", "pCommonSysPropsValue");
mockSystemProperties.setProperty("pSysProps1", "pSysProps1Value");
propertySources.replace(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME, mockSystemProperties);
// assert that servletcontext init params resolve with higher precedence than sysprops
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonContextValue");
assertThat(environment.getProperty("pSysProps1")).isEqualTo("pSysProps1Value");
}
@Test
void registerServletParamPropertySources_StaticWebApplicationContext() {
MockServletContext servletContext = new MockServletContext();
servletContext.addInitParameter("pCommon", "pCommonContextValue");
servletContext.addInitParameter("pContext1", "pContext1Value");
MockServletConfig servletConfig = new MockServletConfig(servletContext);
servletConfig.addInitParameter("pCommon", "pCommonConfigValue");
servletConfig.addInitParameter("pConfig1", "pConfig1Value");
StaticWebApplicationContext ctx = new StaticWebApplicationContext();
ctx.setServletConfig(servletConfig);
ctx.refresh();
ConfigurableEnvironment environment = ctx.getEnvironment();
MutablePropertySources propertySources = environment.getPropertySources();
assertThat(propertySources.contains(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)).isTrue();
assertThat(propertySources.contains(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)).isTrue();
// ServletConfig gets precedence
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonConfigValue");
assertThat(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)))
.isLessThan(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONTEXT_PROPERTY_SOURCE_NAME)));
// but all params are available
assertThat(environment.getProperty("pContext1")).isEqualTo("pContext1Value");
assertThat(environment.getProperty("pConfig1")).isEqualTo("pConfig1Value");
// Servlet* PropertySources have precedence over System* PropertySources
assertThat(propertySources.precedenceOf(PropertySource.named(StandardServletEnvironment.SERVLET_CONFIG_PROPERTY_SOURCE_NAME)))
.isLessThan(propertySources.precedenceOf(PropertySource.named(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME)));
// Replace system properties with a mock property source for convenience
MockPropertySource mockSystemProperties = new MockPropertySource(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME);
mockSystemProperties.setProperty("pCommon", "pCommonSysPropsValue");
mockSystemProperties.setProperty("pSysProps1", "pSysProps1Value");
propertySources.replace(StandardEnvironment.SYSTEM_PROPERTIES_PROPERTY_SOURCE_NAME, mockSystemProperties);
// assert that servletconfig params resolve with higher precedence than sysprops
assertThat(environment.getProperty("pCommon")).isEqualTo("pCommonConfigValue");
assertThat(environment.getProperty("pSysProps1")).isEqualTo("pSysProps1Value");
}
@Test
void abstractApplicationContextValidatesRequiredPropertiesOnRefresh() {
{
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.refresh();
}
{
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.getEnvironment().setRequiredProperties("foo", "bar");
assertThatExceptionOfType(MissingRequiredPropertiesException.class).isThrownBy(ctx::refresh);
}
{
ConfigurableApplicationContext ctx = new AnnotationConfigApplicationContext();
ctx.getEnvironment().setRequiredProperties("foo");
ctx.setEnvironment(new MockEnvironment().withProperty("foo", "fooValue"));
ctx.refresh(); // should succeed
}
}
private DefaultListableBeanFactory newBeanFactoryWithEnvironmentAwareBean() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
registerEnvironmentBeanDefinition(bf);
return bf;
}
private void registerEnvironmentBeanDefinition(BeanDefinitionRegistry registry) {
registry.registerBeanDefinition(ENVIRONMENT_AWARE_BEAN_NAME,
rootBeanDefinition(EnvironmentAwareBean.class).getBeanDefinition());
}
private void assertEnvironmentBeanRegistered(
ConfigurableApplicationContext ctx) {
// ensure environment is registered as a bean
assertThat(ctx.containsBean(ENVIRONMENT_BEAN_NAME)).isTrue();
}
private void assertHasStandardEnvironment(ApplicationContext ctx) {
Environment defaultEnv = ctx.getEnvironment();
assertThat(defaultEnv).isNotNull();
assertThat(defaultEnv).isInstanceOf(StandardEnvironment.class);
}
private void assertHasStandardServletEnvironment(WebApplicationContext ctx) {
// ensure a default servlet environment exists
Environment defaultEnv = ctx.getEnvironment();
assertThat(defaultEnv).isNotNull();
assertThat(defaultEnv).isInstanceOf(StandardServletEnvironment.class);
}
private void assertHasEnvironment(ApplicationContext ctx, Environment expectedEnv) {
// ensure the custom environment took
Environment actualEnv = ctx.getEnvironment();
assertThat(actualEnv).isNotNull();
assertThat(actualEnv).isEqualTo(expectedEnv);
// ensure environment is registered as a bean
assertThat(ctx.containsBean(ENVIRONMENT_BEAN_NAME)).isTrue();
}
private void assertEnvironmentAwareInvoked(ConfigurableApplicationContext ctx, Environment expectedEnv) {
assertThat(ctx.getBean(EnvironmentAwareBean.class).environment).isEqualTo(expectedEnv);
}
private static
|
EnvironmentSystemIntegrationTests
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-native/src/main/java/org/apache/dubbo/aot/api/JdkProxyDescriber.java
|
{
"start": 1000,
"end": 2041
}
|
class ____ implements ConditionalDescriber {
private final List<String> proxiedInterfaces;
private final String reachableType;
public JdkProxyDescriber(List<String> proxiedInterfaces, String reachableType) {
this.proxiedInterfaces = proxiedInterfaces;
this.reachableType = reachableType;
}
public List<String> getProxiedInterfaces() {
return proxiedInterfaces;
}
@Override
public String getReachableType() {
return reachableType;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JdkProxyDescriber that = (JdkProxyDescriber) o;
return Objects.equals(proxiedInterfaces, that.proxiedInterfaces)
&& Objects.equals(reachableType, that.reachableType);
}
@Override
public int hashCode() {
return Objects.hash(proxiedInterfaces, reachableType);
}
}
|
JdkProxyDescriber
|
java
|
quarkusio__quarkus
|
extensions/smallrye-graphql-client/deployment/src/main/java/io/quarkus/smallrye/graphql/client/deployment/SmallRyeGraphQLClientProcessor.java
|
{
"start": 10865,
"end": 15283
}
|
class ____ in GraphQL client configuration rather than
* fully qualified names. This method computes a mapping between short names and qualified names,
* and the configuration merger bean will take it into account when merging Quarkus configuration
* with SmallRye-side configuration.
*/
@BuildStep
@Record(RUNTIME_INIT)
@Consume(SyntheticBeansRuntimeInitBuildItem.class)
GraphQLClientConfigInitializedBuildItem mergeClientConfigurations(SmallRyeGraphQLClientRecorder recorder,
BeanArchiveIndexBuildItem index) {
// to store config keys of all clients found in the application code
List<String> knownConfigKeys = new ArrayList<>();
Map<String, String> shortNamesToQualifiedNames = new HashMap<>();
for (AnnotationInstance annotation : index.getIndex().getAnnotations(GRAPHQL_CLIENT_API)) {
ClassInfo clazz = annotation.target().asClass();
shortNamesToQualifiedNames.put(clazz.name().withoutPackagePrefix(), clazz.name().toString());
AnnotationValue configKeyValue = annotation.value("configKey");
String configKey = configKeyValue != null ? configKeyValue.asString() : null;
String actualConfigKey = (configKey != null && !configKey.equals("")) ? configKey : clazz.name().toString();
knownConfigKeys.add(actualConfigKey);
}
for (AnnotationInstance annotation : index.getIndex().getAnnotations(GRAPHQL_CLIENT)) {
String configKey = annotation.value().asString();
if (configKey == null) {
configKey = "default";
}
knownConfigKeys.add(configKey);
}
GraphQLClientSupport support = new GraphQLClientSupport();
support.setShortNamesToQualifiedNamesMapping(shortNamesToQualifiedNames);
support.setKnownConfigKeys(knownConfigKeys);
recorder.mergeClientConfigurations(support);
return new GraphQLClientConfigInitializedBuildItem();
}
@BuildStep
@Record(RUNTIME_INIT)
void buildClientModel(CombinedIndexBuildItem index, SmallRyeGraphQLClientRecorder recorder,
BuildProducer<SyntheticBeanBuildItem> syntheticBeans, GraphQLClientBuildConfig quarkusConfig) {
if (!index.getIndex().getAnnotations(GRAPHQL_CLIENT_API).isEmpty()) {
ClientModels clientModels = (quarkusConfig.enableBuildTimeScanning()) ? ClientModelBuilder.build(index.getIndex())
: new ClientModels(); // empty Client Model(s)
RuntimeValue<ClientModels> modelRuntimeClientModel = recorder.getRuntimeClientModel(clientModels);
DotName supportClassName = DotName.createSimple(ClientModels.class.getName());
SyntheticBeanBuildItem bean = SyntheticBeanBuildItem
.configure(supportClassName)
.addType(supportClassName)
.scope(Singleton.class)
.runtimeValue(modelRuntimeClientModel)
.setRuntimeInit()
.unremovable()
.done();
syntheticBeans.produce(bean);
}
}
@BuildStep
ServiceProviderBuildItem overrideErrorMessageProvider() {
return ServiceProviderBuildItem.allProvidersFromClassPath("io.smallrye.graphql.client.impl.ErrorMessageProvider");
}
@BuildStep
@Record(RUNTIME_INIT)
void setGlobalVertxInstance(CoreVertxBuildItem vertxBuildItem,
SmallRyeGraphQLClientRecorder recorder) {
recorder.setGlobalVertxInstance(vertxBuildItem.getVertx());
}
@BuildStep
void setAdditionalClassesToIndex(BuildProducer<AdditionalIndexedClassesBuildItem> additionalClassesToIndex,
GraphQLClientBuildConfig quarkusConfig) {
if (quarkusConfig.enableBuildTimeScanning()) {
additionalClassesToIndex.produce(new AdditionalIndexedClassesBuildItem(Closeable.class.getName()));
additionalClassesToIndex.produce(new AdditionalIndexedClassesBuildItem(AutoCloseable.class.getName()));
additionalClassesToIndex.produce(new AdditionalIndexedClassesBuildItem(Input.class.getName()));
}
}
@BuildStep
void registerCertificateUpdateEventListener(BuildProducer<AdditionalBeanBuildItem> additionalBeans) {
additionalBeans.produce(new AdditionalBeanBuildItem(CERTIFICATE_UPDATE_EVENT_LISTENER));
}
}
|
names
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/CrossClusterAccessTransportInterceptorTests.java
|
{
"start": 5018,
"end": 43905
}
|
class ____ extends AbstractServerTransportInterceptorTests {
private Settings settings;
private ThreadPool threadPool;
private ThreadContext threadContext;
private SecurityContext securityContext;
private ClusterService clusterService;
private MockLicenseState mockLicenseState;
private DestructiveOperations destructiveOperations;
private CrossClusterApiKeySignatureManager crossClusterApiKeySignatureManager;
@Override
public void setUp() throws Exception {
super.setUp();
settings = Settings.builder().put("path.home", createTempDir()).build();
threadPool = new TestThreadPool(getTestName());
clusterService = ClusterServiceUtils.createClusterService(threadPool);
threadContext = threadPool.getThreadContext();
securityContext = spy(new SecurityContext(settings, threadPool.getThreadContext()));
mockLicenseState = MockLicenseState.createMock();
Mockito.when(mockLicenseState.isAllowed(Security.ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE)).thenReturn(true);
destructiveOperations = new DestructiveOperations(
Settings.EMPTY,
new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING))
);
crossClusterApiKeySignatureManager = mock(CrossClusterApiKeySignatureManager.class);
}
@After
public void stopThreadPool() throws Exception {
clusterService.close();
terminate(threadPool);
}
public void testSendWithCrossClusterAccessHeadersForSystemUserRegularAction() throws Exception {
final String action;
final TransportRequest request;
if (randomBoolean()) {
action = randomAlphaOfLengthBetween(5, 30);
request = mock(TransportRequest.class);
} else {
action = ClusterStateAction.NAME;
request = mock(ClusterStateRequest.class);
}
doTestSendWithCrossClusterAccessHeaders(
true,
action,
request,
AuthenticationTestHelper.builder().internal(InternalUsers.SYSTEM_USER).build()
);
}
public void testSendWithCrossClusterAccessHeadersForSystemUserCcrInternalAction() throws Exception {
final String action = randomFrom(
"internal:admin/ccr/restore/session/put",
"internal:admin/ccr/restore/session/clear",
"internal:admin/ccr/restore/file_chunk/get"
);
final TransportRequest request = mock(TransportRequest.class);
doTestSendWithCrossClusterAccessHeaders(
true,
action,
request,
AuthenticationTestHelper.builder().internal(InternalUsers.SYSTEM_USER).build()
);
}
public void testSendWithCrossClusterAccessHeadersForRegularUserRegularAction() throws Exception {
final Authentication authentication = randomValueOtherThanMany(
authc -> authc.getAuthenticationType() == Authentication.AuthenticationType.INTERNAL,
() -> AuthenticationTestHelper.builder().build()
);
final String action = randomAlphaOfLengthBetween(5, 30);
final TransportRequest request = mock(TransportRequest.class);
doTestSendWithCrossClusterAccessHeaders(false, action, request, authentication);
}
public void testSendWithCrossClusterAccessHeadersForRegularUserClusterStateAction() throws Exception {
final Authentication authentication = randomValueOtherThanMany(
authc -> authc.getAuthenticationType() == Authentication.AuthenticationType.INTERNAL,
() -> AuthenticationTestHelper.builder().build()
);
final String action = ClusterStateAction.NAME;
final TransportRequest request = mock(ClusterStateRequest.class);
doTestSendWithCrossClusterAccessHeaders(true, action, request, authentication);
}
private void doTestSendWithCrossClusterAccessHeaders(
boolean shouldAssertForSystemUser,
String action,
TransportRequest request,
Authentication authentication
) throws IOException {
doTestSendWithCrossClusterAccessHeaders(shouldAssertForSystemUser, action, request, authentication, TransportVersion.current());
}
private void doTestSendWithCrossClusterAccessHeaders(
boolean shouldAssertForSystemUser,
String action,
TransportRequest request,
Authentication authentication,
TransportVersion transportVersion
) throws IOException {
authentication.writeToContext(threadContext);
final String expectedRequestId = AuditUtil.getOrGenerateRequestId(threadContext);
if (randomBoolean()) {
threadContext.putHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER, randomProjectIdOrDefault().id());
}
final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10);
final String encodedApiKey = randomAlphaOfLengthBetween(10, 42);
final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(encodedApiKey);
final AuthorizationService authzService = mock(AuthorizationService.class);
// We capture the listener so that we can complete the full flow, by calling onResponse further down
@SuppressWarnings("unchecked")
final ArgumentCaptor<ActionListener<RoleDescriptorsIntersection>> listenerCaptor = ArgumentCaptor.forClass(ActionListener.class);
doAnswer(i -> null).when(authzService)
.getRoleDescriptorsIntersectionForRemoteCluster(any(), any(), any(), listenerCaptor.capture());
final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
mockSslService(),
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState,
ignored -> Optional.of(
new RemoteConnectionManager.RemoteClusterAliasWithCredentials(
remoteClusterAlias,
new SecureString(encodedApiKey.toCharArray())
)
)
)
);
final AtomicBoolean calledWrappedSender = new AtomicBoolean(false);
final AtomicReference<String> sentAction = new AtomicReference<>();
final AtomicReference<String> sentCredential = new AtomicReference<>();
final AtomicReference<CrossClusterAccessSubjectInfo> sentCrossClusterAccessSubjectInfo = new AtomicReference<>();
final TransportInterceptor.AsyncSender sender = interceptor.interceptSender(new TransportInterceptor.AsyncSender() {
@Override
public <T extends TransportResponse> void sendRequest(
Transport.Connection connection,
String action,
TransportRequest request,
TransportRequestOptions options,
TransportResponseHandler<T> handler
) {
if (calledWrappedSender.compareAndSet(false, true) == false) {
fail("sender called more than once");
}
assertThat(securityContext.getAuthentication(), nullValue());
assertThat(AuditUtil.extractRequestId(securityContext.getThreadContext()), equalTo(expectedRequestId));
assertThat(threadContext.getHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER), nullValue());
sentAction.set(action);
sentCredential.set(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY));
try {
sentCrossClusterAccessSubjectInfo.set(
CrossClusterAccessSubjectInfo.decode(
securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY)
)
);
} catch (IOException e) {
fail("no exceptions expected but got " + e);
}
handler.handleResponse(null);
}
});
final Transport.Connection connection = mock(Transport.Connection.class);
when(connection.getTransportVersion()).thenReturn(transportVersion);
sender.sendRequest(connection, action, request, null, new TransportResponseHandler<>() {
@Override
public Executor executor() {
return TransportResponseHandler.TRANSPORT_WORKER;
}
@Override
public void handleResponse(TransportResponse response) {
// Headers should get restored before handle response is called
assertThat(securityContext.getAuthentication(), equalTo(authentication));
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
}
@Override
public void handleException(TransportException exp) {
fail("no exceptions expected but got " + exp);
}
@Override
public TransportResponse read(StreamInput in) {
return null;
}
});
if (shouldAssertForSystemUser) {
assertThat(
sentCrossClusterAccessSubjectInfo.get(),
equalTo(
SystemUser.crossClusterAccessSubjectInfo(
authentication.getEffectiveSubject().getTransportVersion(),
authentication.getEffectiveSubject().getRealm().getNodeName()
)
)
);
verify(authzService, never()).getRoleDescriptorsIntersectionForRemoteCluster(
eq(remoteClusterAlias),
eq(TransportVersion.current()),
eq(authentication.getEffectiveSubject()),
anyActionListener()
);
} else {
final RoleDescriptorsIntersection expectedRoleDescriptorsIntersection = new RoleDescriptorsIntersection(
randomList(1, 3, () -> Set.copyOf(randomUniquelyNamedRoleDescriptors(0, 1)))
);
// Call listener to complete flow
listenerCaptor.getValue().onResponse(expectedRoleDescriptorsIntersection);
verify(authzService, times(1)).getRoleDescriptorsIntersectionForRemoteCluster(
eq(remoteClusterAlias),
eq(TransportVersion.current()),
eq(authentication.getEffectiveSubject()),
anyActionListener()
);
assertThat(
sentCrossClusterAccessSubjectInfo.get(),
equalTo(new CrossClusterAccessSubjectInfo(authentication, expectedRoleDescriptorsIntersection))
);
}
assertTrue(calledWrappedSender.get());
if (action.startsWith("internal:")) {
assertThat(sentAction.get(), equalTo("indices:internal/" + action.substring("internal:".length())));
} else {
assertThat(sentAction.get(), equalTo(action));
}
assertThat(sentCredential.get(), equalTo(remoteClusterCredential));
verify(securityContext, never()).executeAsInternalUser(any(), any(), anyConsumer());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
assertThat(AuditUtil.extractRequestId(securityContext.getThreadContext()), equalTo(expectedRequestId));
}
public void testSendWithUserIfCrossClusterAccessHeadersConditionNotMet() throws Exception {
boolean noCredential = randomBoolean();
final boolean notRemoteConnection = randomBoolean();
// Ensure at least one condition fails
if (false == (notRemoteConnection || noCredential)) {
noCredential = true;
}
final boolean finalNoCredential = noCredential;
final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10);
final String encodedApiKey = randomAlphaOfLengthBetween(10, 42);
final AuthenticationTestHelper.AuthenticationTestBuilder builder = AuthenticationTestHelper.builder();
final Authentication authentication = randomFrom(
builder.apiKey().build(),
builder.serviceAccount().build(),
builder.user(new User(randomAlphaOfLengthBetween(3, 10), randomRoles())).realm().build()
);
authentication.writeToContext(threadContext);
final AuthorizationService authzService = mock(AuthorizationService.class);
final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
mockSslService(),
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState,
ignored -> notRemoteConnection
? Optional.empty()
: (finalNoCredential
? Optional.of(new RemoteConnectionManager.RemoteClusterAliasWithCredentials(remoteClusterAlias, null))
: Optional.of(
new RemoteConnectionManager.RemoteClusterAliasWithCredentials(
remoteClusterAlias,
new SecureString(encodedApiKey.toCharArray())
)
))
)
);
final AtomicBoolean calledWrappedSender = new AtomicBoolean(false);
final AtomicReference<Authentication> sentAuthentication = new AtomicReference<>();
final TransportInterceptor.AsyncSender sender = interceptor.interceptSender(new TransportInterceptor.AsyncSender() {
@Override
public <T extends TransportResponse> void sendRequest(
Transport.Connection connection,
String action,
TransportRequest request,
TransportRequestOptions options,
TransportResponseHandler<T> handler
) {
if (calledWrappedSender.compareAndSet(false, true) == false) {
fail("sender called more than once");
}
sentAuthentication.set(securityContext.getAuthentication());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
}
});
final Transport.Connection connection = mock(Transport.Connection.class);
when(connection.getTransportVersion()).thenReturn(TransportVersion.current());
sender.sendRequest(connection, "action", mock(TransportRequest.class), null, null);
assertTrue(calledWrappedSender.get());
assertThat(sentAuthentication.get(), equalTo(authentication));
verify(authzService, never()).getRoleDescriptorsIntersectionForRemoteCluster(any(), any(), any(), anyActionListener());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
}
public void testSendRemoteRequestFailsIfUserHasNoRemoteIndicesPrivileges() throws Exception {
final Authentication authentication = AuthenticationTestHelper.builder()
.user(new User(randomAlphaOfLengthBetween(3, 10), randomRoles()))
.realm()
.build();
authentication.writeToContext(threadContext);
final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10);
final String encodedApiKey = randomAlphaOfLengthBetween(10, 42);
final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(encodedApiKey);
final AuthorizationService authzService = mock(AuthorizationService.class);
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
final var listener = (ActionListener<RoleDescriptorsIntersection>) invocation.getArgument(3);
listener.onResponse(RoleDescriptorsIntersection.EMPTY);
return null;
}).when(authzService).getRoleDescriptorsIntersectionForRemoteCluster(any(), any(), any(), anyActionListener());
final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
mockSslService(),
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState,
ignored -> Optional.of(
new RemoteConnectionManager.RemoteClusterAliasWithCredentials(
remoteClusterAlias,
new SecureString(encodedApiKey.toCharArray())
)
)
)
);
final TransportInterceptor.AsyncSender sender = interceptor.interceptSender(new TransportInterceptor.AsyncSender() {
@Override
public <T extends TransportResponse> void sendRequest(
Transport.Connection connection,
String action,
TransportRequest request,
TransportRequestOptions options,
TransportResponseHandler<T> handler
) {
fail("request should have failed");
}
});
final Transport.Connection connection = mock(Transport.Connection.class);
when(connection.getTransportVersion()).thenReturn(TransportVersion.current());
final ElasticsearchSecurityException expectedException = new ElasticsearchSecurityException("remote action denied");
when(authzService.remoteActionDenied(authentication, "action", remoteClusterAlias)).thenReturn(expectedException);
final var actualException = new AtomicReference<Throwable>();
sender.sendRequest(connection, "action", mock(TransportRequest.class), null, new TransportResponseHandler<>() {
@Override
public Executor executor() {
return TransportResponseHandler.TRANSPORT_WORKER;
}
@Override
public void handleResponse(TransportResponse response) {
fail("should not success");
}
@Override
public void handleException(TransportException exp) {
actualException.set(exp.getCause());
}
@Override
public TransportResponse read(StreamInput in) {
return null;
}
});
assertThat(actualException.get(), is(expectedException));
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
}
public void testSendWithCrossClusterAccessHeadersWithUnsupportedLicense() throws Exception {
final MockLicenseState unsupportedLicenseState = MockLicenseState.createMock();
Mockito.when(unsupportedLicenseState.isAllowed(Security.ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE)).thenReturn(false);
AuthenticationTestHelper.builder().build().writeToContext(threadContext);
final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10);
final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
mock(AuthorizationService.class),
mockSslService(),
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
settings,
threadPool,
mock(AuthenticationService.class),
mock(AuthorizationService.class),
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
unsupportedLicenseState,
mockRemoteClusterCredentialsResolver(remoteClusterAlias)
)
);
final TransportInterceptor.AsyncSender sender = interceptor.interceptSender(
mock(TransportInterceptor.AsyncSender.class, ignored -> {
throw new AssertionError("sender should not be called");
})
);
final Transport.Connection connection = mock(Transport.Connection.class);
when(connection.getTransportVersion()).thenReturn(TransportVersion.current());
final AtomicBoolean calledHandleException = new AtomicBoolean(false);
final AtomicReference<TransportException> actualException = new AtomicReference<>();
sender.sendRequest(connection, "action", mock(TransportRequest.class), null, new TransportResponseHandler<>() {
@Override
public Executor executor() {
return TransportResponseHandler.TRANSPORT_WORKER;
}
@Override
public void handleResponse(TransportResponse response) {
fail("should not receive a response");
}
@Override
public void handleException(TransportException exp) {
if (calledHandleException.compareAndSet(false, true) == false) {
fail("handle exception called more than once");
}
actualException.set(exp);
}
@Override
public TransportResponse read(StreamInput in) {
fail("should not receive a response");
return null;
}
});
assertThat(actualException.get(), instanceOf(SendRequestTransportException.class));
assertThat(actualException.get().getCause(), instanceOf(ElasticsearchSecurityException.class));
assertThat(
actualException.get().getCause().getMessage(),
equalTo("current license is non-compliant for [" + Security.ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE.getName() + "]")
);
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue());
assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue());
}
public void testProfileFiltersCreatedDifferentlyForDifferentTransportAndRemoteClusterSslSettings() {
// filters are created irrespective of ssl enabled
final boolean transportSslEnabled = randomBoolean();
final boolean remoteClusterSslEnabled = randomBoolean();
final Settings.Builder builder = Settings.builder()
.put(this.settings)
.put("xpack.security.transport.ssl.enabled", transportSslEnabled)
.put("remote_cluster_server.enabled", true)
.put("xpack.security.remote_cluster_server.ssl.enabled", remoteClusterSslEnabled);
if (randomBoolean()) {
builder.put("xpack.security.remote_cluster_client.ssl.enabled", randomBoolean()); // client SSL won't be processed
}
final SslProfile defaultProfile = mock(SslProfile.class);
when(defaultProfile.configuration()).thenReturn(
new SslConfiguration(
"xpack.security.transport.ssl",
randomBoolean(),
mock(SslTrustConfig.class),
mock(SslKeyConfig.class),
randomFrom(SslVerificationMode.values()),
SslClientAuthenticationMode.REQUIRED,
List.of("TLS_AES_256_GCM_SHA384"),
List.of("TLSv1.3"),
randomLongBetween(1, 100000)
)
);
final SslProfile remoteProfile = mock(SslProfile.class);
when(remoteProfile.configuration()).thenReturn(
new SslConfiguration(
"xpack.security.remote_cluster_server.ssl",
randomBoolean(),
mock(SslTrustConfig.class),
mock(SslKeyConfig.class),
randomFrom(SslVerificationMode.values()),
SslClientAuthenticationMode.NONE,
List.of(Runtime.version().feature() < 24 ? "TLS_RSA_WITH_AES_256_GCM_SHA384" : "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"),
List.of("TLSv1.2"),
randomLongBetween(1, 100000)
)
);
final SSLService sslService = mock(SSLService.class);
when(sslService.profile("xpack.security.transport.ssl.")).thenReturn(defaultProfile);
when(sslService.profile("xpack.security.remote_cluster_server.ssl.")).thenReturn(remoteProfile);
doThrow(new AssertionError("profile filters should not be configured for remote cluster client")).when(sslService)
.profile("xpack.security.remote_cluster_client.ssl.");
final AuthenticationService authcService = mock(AuthenticationService.class);
final AuthorizationService authzService = mock(AuthorizationService.class);
final var securityServerTransportInterceptor = new SecurityServerTransportInterceptor(
builder.build(),
threadPool,
authcService,
authzService,
sslService,
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
builder.build(),
threadPool,
authcService,
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState
)
);
final Map<String, ServerTransportFilter> profileFilters = securityServerTransportInterceptor.getProfileFilters();
assertThat(profileFilters.keySet(), containsInAnyOrder("default", "_remote_cluster"));
assertThat(profileFilters.get("default").isExtractClientCert(), is(transportSslEnabled));
assertThat(profileFilters.get("default"), not(instanceOf(CrossClusterAccessServerTransportFilter.class)));
assertThat(profileFilters.get("_remote_cluster").isExtractClientCert(), is(false));
assertThat(profileFilters.get("_remote_cluster"), instanceOf(CrossClusterAccessServerTransportFilter.class));
}
public void testNoProfileFilterForRemoteClusterWhenTheFeatureIsDisabled() {
final boolean transportSslEnabled = randomBoolean();
final Settings.Builder builder = Settings.builder()
.put(this.settings)
.put("xpack.security.transport.ssl.enabled", transportSslEnabled)
.put("remote_cluster_server.enabled", false)
.put("xpack.security.remote_cluster_server.ssl.enabled", randomBoolean());
if (randomBoolean()) {
builder.put("xpack.security.remote_cluster_client.ssl.enabled", randomBoolean()); // client SSL won't be processed
}
final SslProfile profile = mock(SslProfile.class);
when(profile.configuration()).thenReturn(
new SslConfiguration(
"xpack.security.transport.ssl",
randomBoolean(),
mock(SslTrustConfig.class),
mock(SslKeyConfig.class),
randomFrom(SslVerificationMode.values()),
SslClientAuthenticationMode.REQUIRED,
List.of("TLS_AES_256_GCM_SHA384"),
List.of("TLSv1.3"),
randomLongBetween(1, 100000)
)
);
final SSLService sslService = mock(SSLService.class);
when(sslService.profile("xpack.security.transport.ssl.")).thenReturn(profile);
doThrow(new AssertionError("profile filters should not be configured for remote cluster server when the port is disabled")).when(
sslService
).profile("xpack.security.remote_cluster_server.ssl.");
doThrow(new AssertionError("profile filters should not be configured for remote cluster client")).when(sslService)
.profile("xpack.security.remote_cluster_client.ssl.");
final var securityServerTransportInterceptor = new SecurityServerTransportInterceptor(
builder.build(),
threadPool,
mock(AuthenticationService.class),
mock(AuthorizationService.class),
sslService,
securityContext,
destructiveOperations,
new CrossClusterAccessTransportInterceptor(
builder.build(),
threadPool,
mock(AuthenticationService.class),
mock(AuthorizationService.class),
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState
)
);
final Map<String, ServerTransportFilter> profileFilters = securityServerTransportInterceptor.getProfileFilters();
assertThat(profileFilters.keySet(), contains("default"));
assertThat(profileFilters.get("default").isExtractClientCert(), is(transportSslEnabled));
}
public void testGetRemoteProfileTransportFilter() {
final boolean remoteClusterSslEnabled = randomBoolean();
final Settings.Builder builder = Settings.builder()
.put(this.settings)
.put("remote_cluster_server.enabled", true)
.put("xpack.security.remote_cluster_server.ssl.enabled", remoteClusterSslEnabled);
if (randomBoolean()) {
builder.put("xpack.security.remote_cluster_client.ssl.enabled", randomBoolean()); // client SSL won't be processed
}
final SslProfile remoteProfile = mock(SslProfile.class);
when(remoteProfile.configuration()).thenReturn(
new SslConfiguration(
"xpack.security.remote_cluster_server.ssl",
randomBoolean(),
mock(SslTrustConfig.class),
mock(SslKeyConfig.class),
randomFrom(SslVerificationMode.values()),
SslClientAuthenticationMode.NONE,
List.of(Runtime.version().feature() < 24 ? "TLS_RSA_WITH_AES_256_GCM_SHA384" : "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"),
List.of("TLSv1.2"),
randomLongBetween(1, 100000)
)
);
final SSLService sslService = mock(SSLService.class);
when(sslService.profile("xpack.security.remote_cluster_server.ssl.")).thenReturn(remoteProfile);
doThrow(new AssertionError("profile filters should not be configured for remote cluster client")).when(sslService)
.profile("xpack.security.remote_cluster_client.ssl.");
final AuthenticationService authcService = mock(AuthenticationService.class);
final AuthorizationService authzService = mock(AuthorizationService.class);
CrossClusterAccessTransportInterceptor interceptor = new CrossClusterAccessTransportInterceptor(
builder.build(),
threadPool,
authcService,
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState
);
final Optional<ServerTransportFilter> remoteProfileTransportFilter = interceptor.getRemoteProfileTransportFilter(
remoteProfile,
destructiveOperations
);
assertThat(remoteProfileTransportFilter.isPresent(), is(true));
assertThat(remoteProfileTransportFilter.get(), instanceOf(CrossClusterAccessServerTransportFilter.class));
}
public void testGetRemoteProfileTransportFilterWhenRemoteClusterServerIsDisabled() {
final boolean remoteClusterSslEnabled = randomBoolean();
final Settings.Builder builder = Settings.builder()
.put(this.settings)
.put("remote_cluster_server.enabled", false)
.put("xpack.security.remote_cluster_server.ssl.enabled", remoteClusterSslEnabled);
if (randomBoolean()) {
builder.put("xpack.security.remote_cluster_client.ssl.enabled", randomBoolean()); // client SSL won't be processed
}
final SslProfile remoteProfile = mock(SslProfile.class);
when(remoteProfile.configuration()).thenReturn(
new SslConfiguration(
"xpack.security.remote_cluster_server.ssl",
randomBoolean(),
mock(SslTrustConfig.class),
mock(SslKeyConfig.class),
randomFrom(SslVerificationMode.values()),
SslClientAuthenticationMode.NONE,
List.of(Runtime.version().feature() < 24 ? "TLS_RSA_WITH_AES_256_GCM_SHA384" : "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"),
List.of("TLSv1.2"),
randomLongBetween(1, 100000)
)
);
final SSLService sslService = mock(SSLService.class);
when(sslService.profile("xpack.security.remote_cluster_server.ssl.")).thenReturn(remoteProfile);
doThrow(new AssertionError("profile filters should not be configured for remote cluster client")).when(sslService)
.profile("xpack.security.remote_cluster_client.ssl.");
final AuthenticationService authcService = mock(AuthenticationService.class);
final AuthorizationService authzService = mock(AuthorizationService.class);
CrossClusterAccessTransportInterceptor interceptor = new CrossClusterAccessTransportInterceptor(
builder.build(),
threadPool,
authcService,
authzService,
securityContext,
mock(CrossClusterAccessAuthenticationService.class),
crossClusterApiKeySignatureManager,
mockLicenseState
);
final Optional<ServerTransportFilter> remoteProfileTransportFilter = interceptor.getRemoteProfileTransportFilter(
remoteProfile,
destructiveOperations
);
assertThat(remoteProfileTransportFilter.isPresent(), is(false));
}
public void testSendWithCrossClusterApiKeySignatureSkippedOnUnsupportedConnection() throws Exception {
final String action;
final TransportRequest request;
if (randomBoolean()) {
action = randomAlphaOfLengthBetween(5, 30);
request = mock(TransportRequest.class);
} else {
action = ClusterStateAction.NAME;
request = mock(ClusterStateRequest.class);
}
var signer = mock(CrossClusterApiKeySignatureManager.Signer.class);
when(crossClusterApiKeySignatureManager.signerForClusterAlias(anyString())).thenReturn(signer);
var transportVersion = TransportVersionUtils.getPreviousVersion(
CrossClusterAccessTransportInterceptor.ADD_CROSS_CLUSTER_API_KEY_SIGNATURE
);
doTestSendWithCrossClusterAccessHeaders(
true,
action,
request,
AuthenticationTestHelper.builder().internal(InternalUsers.SYSTEM_USER).transportVersion(transportVersion).build(),
transportVersion
);
verifyNoInteractions(signer);
}
public void testSendWithCrossClusterApiKeySignatureSentOnSupportedConnection() throws Exception {
final String action;
final TransportRequest request;
if (randomBoolean()) {
action = randomAlphaOfLengthBetween(5, 30);
request = mock(TransportRequest.class);
} else {
action = ClusterStateAction.NAME;
request = mock(ClusterStateRequest.class);
}
var testSignature = getTestSignature();
var signer = mock(CrossClusterApiKeySignatureManager.Signer.class);
when(signer.sign(anyString(), anyString())).thenReturn(testSignature);
when(crossClusterApiKeySignatureManager.signerForClusterAlias(anyString())).thenReturn(signer);
var transportVersion = CrossClusterAccessTransportInterceptor.ADD_CROSS_CLUSTER_API_KEY_SIGNATURE;
doTestSendWithCrossClusterAccessHeaders(
true,
action,
request,
AuthenticationTestHelper.builder().internal(InternalUsers.SYSTEM_USER).transportVersion(transportVersion).build(),
transportVersion
);
verify(signer, times(1)).sign(anyString(), anyString());
}
private X509CertificateSignature getTestSignature() throws CertificateException, IOException {
return new X509CertificateSignature(getTestCertificates(), "SHA256withRSA", new BytesArray(new byte[] { 1, 2, 3, 4 }));
}
private X509Certificate[] getTestCertificates() throws CertificateException, IOException {
return PemUtils.readCertificates(List.of(getDataPath("/org/elasticsearch/xpack/security/signature/signing_rsa.crt")))
.stream()
.map(cert -> (X509Certificate) cert)
.toArray(X509Certificate[]::new);
}
}
|
CrossClusterAccessTransportInterceptorTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisLimitsTests.java
|
{
"start": 1094,
"end": 9720
}
|
class ____ extends AbstractXContentSerializingTestCase<AnalysisLimits> {
@Override
protected AnalysisLimits createTestInstance() {
return createRandomized();
}
public static AnalysisLimits createRandomized() {
return new AnalysisLimits(
randomBoolean() ? (long) randomIntBetween(1, 1000000) : null,
randomBoolean() ? randomNonNegativeLong() : null
);
}
@Override
protected Writeable.Reader<AnalysisLimits> instanceReader() {
return AnalysisLimits::new;
}
@Override
protected AnalysisLimits doParseInstance(XContentParser parser) {
return AnalysisLimits.STRICT_PARSER.apply(parser, null);
}
public void testParseModelMemoryLimitGivenNegativeNumber() throws IOException {
String json = "{\"model_memory_limit\": -1}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null));
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -1"));
}
public void testParseModelMemoryLimitGivenZero() throws IOException {
String json = "{\"model_memory_limit\": 0}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null));
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
}
public void testParseModelMemoryLimitGivenPositiveNumber() throws IOException {
String json = "{\"model_memory_limit\": 2048}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
AnalysisLimits limits = AnalysisLimits.STRICT_PARSER.apply(parser, null);
assertThat(limits.getModelMemoryLimit(), equalTo(2048L));
}
public void testParseModelMemoryLimitGivenNegativeString() throws IOException {
String json = "{\"model_memory_limit\":\"-4MB\"}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null));
// the root cause is wrapped in an intermediate ElasticsearchParseException
assertThat(e.getCause(), instanceOf(ElasticsearchParseException.class));
assertThat(e.getCause().getCause(), instanceOf(IllegalArgumentException.class));
assertThat(e.getCause().getCause().getMessage(), containsString("Values less than -1 bytes are not supported: -4mb"));
}
public void testParseModelMemoryLimitGivenZeroString() throws IOException {
String json = "{\"model_memory_limit\":\"0MB\"}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null));
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
}
public void testParseModelMemoryLimitGivenLessThanOneMBString() throws IOException {
String json = "{\"model_memory_limit\":\"1000Kb\"}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
XContentParseException e = expectThrows(XContentParseException.class, () -> AnalysisLimits.STRICT_PARSER.apply(parser, null));
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
}
public void testParseModelMemoryLimitGivenStringMultipleOfMBs() throws IOException {
String json = "{\"model_memory_limit\":\"4g\"}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
AnalysisLimits limits = AnalysisLimits.STRICT_PARSER.apply(parser, null);
assertThat(limits.getModelMemoryLimit(), equalTo(4096L));
}
public void testParseModelMemoryLimitGivenStringNonMultipleOfMBs() throws IOException {
String json = "{\"model_memory_limit\":\"1300kb\"}";
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, json);
AnalysisLimits limits = AnalysisLimits.STRICT_PARSER.apply(parser, null);
assertThat(limits.getModelMemoryLimit(), equalTo(1L));
}
public void testModelMemoryDefault() {
AnalysisLimits limits = new AnalysisLimits(randomNonNegativeLong());
assertThat(limits.getModelMemoryLimit(), equalTo(AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB));
}
public void testEquals_GivenEqual() {
AnalysisLimits analysisLimits1 = new AnalysisLimits(10L, 20L);
AnalysisLimits analysisLimits2 = new AnalysisLimits(10L, 20L);
assertTrue(analysisLimits1.equals(analysisLimits1));
assertTrue(analysisLimits1.equals(analysisLimits2));
assertTrue(analysisLimits2.equals(analysisLimits1));
}
public void testEquals_GivenDifferentModelMemoryLimit() {
AnalysisLimits analysisLimits1 = new AnalysisLimits(10L, 20L);
AnalysisLimits analysisLimits2 = new AnalysisLimits(11L, 20L);
assertFalse(analysisLimits1.equals(analysisLimits2));
assertFalse(analysisLimits2.equals(analysisLimits1));
}
public void testEquals_GivenDifferentCategorizationExamplesLimit() {
AnalysisLimits analysisLimits1 = new AnalysisLimits(10L, 20L);
AnalysisLimits analysisLimits2 = new AnalysisLimits(10L, 21L);
assertFalse(analysisLimits1.equals(analysisLimits2));
assertFalse(analysisLimits2.equals(analysisLimits1));
}
public void testHashCode_GivenEqual() {
AnalysisLimits analysisLimits1 = new AnalysisLimits(5555L, 3L);
AnalysisLimits analysisLimits2 = new AnalysisLimits(5555L, 3L);
assertEquals(analysisLimits1.hashCode(), analysisLimits2.hashCode());
}
public void testVerify_GivenNegativeCategorizationExamplesLimit() {
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> new AnalysisLimits(1L, -1L));
String errorMessage = Messages.getMessage(
Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW,
AnalysisLimits.CATEGORIZATION_EXAMPLES_LIMIT,
0,
-1L
);
assertEquals(errorMessage, e.getMessage());
}
public void testVerify_GivenValid() {
new AnalysisLimits(null, 1L);
new AnalysisLimits(1L, null);
new AnalysisLimits(1L, 1L);
}
@Override
protected AnalysisLimits mutateInstance(AnalysisLimits instance) {
Long memoryModelLimit = instance.getModelMemoryLimit();
Long categorizationExamplesLimit = instance.getCategorizationExamplesLimit();
switch (between(0, 1)) {
case 0:
if (memoryModelLimit == null) {
memoryModelLimit = randomNonNegativeLong();
} else {
if (randomBoolean()) {
memoryModelLimit = null;
} else {
memoryModelLimit += between(1, 10000);
}
}
break;
case 1:
if (categorizationExamplesLimit == null) {
categorizationExamplesLimit = randomNonNegativeLong();
} else {
if (randomBoolean()) {
categorizationExamplesLimit = null;
} else {
categorizationExamplesLimit += between(1, 10000);
}
}
break;
default:
throw new AssertionError("Illegal randomisation branch");
}
return new AnalysisLimits(memoryModelLimit, categorizationExamplesLimit);
}
}
|
AnalysisLimitsTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/TaskEventDispatcherTest.java
|
{
"start": 7935,
"end": 8133
}
|
class ____ implements EventListener<TaskEvent> {
public void onEvent(TaskEvent actual) {
throw new IllegalStateException("Should never fire");
}
}
}
|
ZeroShotEventListener
|
java
|
apache__kafka
|
raft/src/main/java/org/apache/kafka/raft/FollowerState.java
|
{
"start": 1224,
"end": 9798
}
|
class ____ implements EpochState {
private final Logger log;
private final int fetchTimeoutMs;
private final int epoch;
private final int leaderId;
private final Endpoints leaderEndpoints;
private final Optional<ReplicaKey> votedKey;
private final Set<Integer> voters;
// Used for tracking the expiration of both the Fetch and FetchSnapshot requests
private final Timer fetchTimer;
// Used to track when to send another add, remove, or update voter request
private final Timer updateVoterSetPeriodTimer;
/* Used to track if the replica has fetched successfully from the leader at least once since
* the transition to follower in this epoch. If the replica has not yet fetched successfully,
* it may be able to grant PreVotes.
*/
private boolean hasFetchedFromLeader = false;
private Optional<LogOffsetMetadata> highWatermark;
/* For kraft.version 0, track if the leader has received updated voter information from this
* follower.
*/
private boolean hasUpdatedLeader = false;
/* Used to track the currently fetching snapshot. When fetching snapshot regular Fetch request
* are paused
*/
private Optional<RawSnapshotWriter> fetchingSnapshot = Optional.empty();
public FollowerState(
Time time,
int epoch,
int leaderId,
Endpoints leaderEndpoints,
Optional<ReplicaKey> votedKey,
Set<Integer> voters,
Optional<LogOffsetMetadata> highWatermark,
int fetchTimeoutMs,
LogContext logContext
) {
this.fetchTimeoutMs = fetchTimeoutMs;
this.epoch = epoch;
this.leaderId = leaderId;
this.leaderEndpoints = leaderEndpoints;
this.votedKey = votedKey;
this.voters = voters;
this.fetchTimer = time.timer(fetchTimeoutMs);
this.updateVoterSetPeriodTimer = time.timer(updateVoterPeriodMs());
this.highWatermark = highWatermark;
this.log = logContext.logger(FollowerState.class);
}
@Override
public ElectionState election() {
return ElectionState.withElectedLeader(epoch, leaderId, votedKey, voters);
}
@Override
public int epoch() {
return epoch;
}
@Override
public Endpoints leaderEndpoints() {
return leaderEndpoints;
}
@Override
public String name() {
return "Follower";
}
public long remainingFetchTimeMs(long currentTimeMs) {
fetchTimer.update(currentTimeMs);
return fetchTimer.remainingMs();
}
public int leaderId() {
return leaderId;
}
public Node leaderNode(ListenerName listener) {
return leaderEndpoints
.address(listener)
.map(address -> new Node(leaderId, address.getHostString(), address.getPort()))
.orElseThrow(() ->
new IllegalArgumentException(
String.format(
"Unknown endpoint for leader %d and listener %s, known endpoints are %s",
leaderId,
listener,
leaderEndpoints
)
)
);
}
public boolean hasFetchTimeoutExpired(long currentTimeMs) {
fetchTimer.update(currentTimeMs);
return fetchTimer.isExpired();
}
/**
* Reset the fetch timeout after successful fetch from leader.
*/
public void resetFetchTimeoutForSuccessfulFetch(long currentTimeMs) {
overrideFetchTimeout(currentTimeMs, fetchTimeoutMs);
hasFetchedFromLeader = true;
}
/**
* Override the fetch timeout to a specific value. This is useful for short-circuiting followers' timeouts after
* they receive end quorum requests
*/
public void overrideFetchTimeout(long currentTimeMs, long timeoutMs) {
fetchTimer.update(currentTimeMs);
fetchTimer.reset(timeoutMs);
}
private long updateVoterPeriodMs() {
// Allow for a few rounds of fetch request before attempting to update
// the voter state
return fetchTimeoutMs;
}
public boolean hasUpdateVoterSetPeriodExpired(long currentTimeMs) {
updateVoterSetPeriodTimer.update(currentTimeMs);
return updateVoterSetPeriodTimer.isExpired();
}
public void resetUpdateVoterSetPeriod(long currentTimeMs) {
updateVoterSetPeriodTimer.update(currentTimeMs);
updateVoterSetPeriodTimer.reset(updateVoterPeriodMs());
}
public boolean hasUpdatedLeader() {
return hasUpdatedLeader;
}
public void setHasUpdatedLeader() {
this.hasUpdatedLeader = true;
}
public boolean updateHighWatermark(OptionalLong newHighWatermark) {
if (newHighWatermark.isEmpty() && highWatermark.isPresent()) {
throw new IllegalArgumentException(
String.format("Attempt to overwrite current high watermark %s with unknown value", highWatermark)
);
}
if (highWatermark.isPresent()) {
long previousHighWatermark = highWatermark.get().offset();
long updatedHighWatermark = newHighWatermark.getAsLong();
if (updatedHighWatermark < 0) {
throw new IllegalArgumentException(
String.format("Illegal negative (%d) high watermark update", updatedHighWatermark)
);
} else if (previousHighWatermark > updatedHighWatermark) {
throw new IllegalArgumentException(
String.format(
"Non-monotonic update of high watermark from %d to %d",
previousHighWatermark,
updatedHighWatermark
)
);
} else if (previousHighWatermark == updatedHighWatermark) {
return false;
}
}
Optional<LogOffsetMetadata> oldHighWatermark = highWatermark;
highWatermark = newHighWatermark.isPresent() ?
Optional.of(new LogOffsetMetadata(newHighWatermark.getAsLong())) :
Optional.empty();
logHighWatermarkUpdate(oldHighWatermark, highWatermark);
return true;
}
@Override
public Optional<LogOffsetMetadata> highWatermark() {
return highWatermark;
}
public Optional<RawSnapshotWriter> fetchingSnapshot() {
return fetchingSnapshot;
}
public void setFetchingSnapshot(Optional<RawSnapshotWriter> newSnapshot) {
fetchingSnapshot.ifPresent(RawSnapshotWriter::close);
fetchingSnapshot = newSnapshot;
}
@Override
public boolean canGrantVote(ReplicaKey replicaKey, boolean isLogUpToDate, boolean isPreVote) {
if (isPreVote && !hasFetchedFromLeader && isLogUpToDate) {
return true;
}
log.debug(
"Rejecting Vote request (preVote={}) from replica ({}) since we are in FollowerState with leader {} in " +
"epoch {}, hasFetchedFromLeader={}, replica's log is up-to-date={}",
isPreVote,
replicaKey,
leaderId,
epoch,
hasFetchedFromLeader,
isLogUpToDate
);
return false;
}
@Override
public String toString() {
return String.format(
"FollowerState(fetchTimeoutMs=%d, epoch=%d, leader=%d, leaderEndpoints=%s, votedKey=%s, " +
"voters=%s, highWatermark=%s, fetchingSnapshot=%s)",
fetchTimeoutMs,
epoch,
leaderId,
leaderEndpoints,
votedKey,
voters,
highWatermark,
fetchingSnapshot
);
}
@Override
public void close() {
fetchingSnapshot.ifPresent(RawSnapshotWriter::close);
}
private void logHighWatermarkUpdate(
Optional<LogOffsetMetadata> oldHighWatermark,
Optional<LogOffsetMetadata> newHighWatermark
) {
if (!oldHighWatermark.equals(newHighWatermark)) {
if (oldHighWatermark.isPresent()) {
log.trace(
"High watermark set to {} from {} for epoch {}",
newHighWatermark,
oldHighWatermark.get(),
epoch
);
} else {
log.info(
"High watermark set to {} for the first time for epoch {}",
newHighWatermark,
epoch
);
}
}
}
}
|
FollowerState
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest48.java
|
{
"start": 992,
"end": 3266
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE TABLE AO_E8B6CC_REPO_TO_CHANGESET ("
+ " CHANGESET_ID INTEGER,"//
+ " ID INTEGER AUTO_INCREMENT NOT NULL,"//
+ " REPOSITORY_ID INTEGER,"//
+ " CONSTRAINT fk_ao_e8b6cc_repo_to_changeset_repository_id FOREIGN KEY (REPOSITORY_ID) REFERENCES AO_E8B6CC_REPOSITORY_MAPPING(ID),"//
+ " CONSTRAINT fk_ao_e8b6cc_repo_to_changeset_changeset_id FOREIGN KEY (CHANGESET_ID) REFERENCES AO_E8B6CC_CHANGESET_MAPPING(ID),"//
+ " PRIMARY KEY(ID)"//
+ ") ENGINE=InnoDB";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseCreateTable();
System.out.println(stmt);
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(3, visitor.getTables().size());
assertEquals(5, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("AO_E8B6CC_REPO_TO_CHANGESET")));
String output = SQLUtils.toMySqlString(stmt);
assertEquals("CREATE TABLE AO_E8B6CC_REPO_TO_CHANGESET ("//
+ "\n\tCHANGESET_ID INTEGER,"//
+ "\n\tID INTEGER NOT NULL AUTO_INCREMENT,"//
+ "\n\tREPOSITORY_ID INTEGER,"//
+ "\n\tCONSTRAINT fk_ao_e8b6cc_repo_to_changeset_repository_id FOREIGN KEY (REPOSITORY_ID) REFERENCES AO_E8B6CC_REPOSITORY_MAPPING (ID),"//
+ "\n\tCONSTRAINT fk_ao_e8b6cc_repo_to_changeset_changeset_id FOREIGN KEY (CHANGESET_ID) REFERENCES AO_E8B6CC_CHANGESET_MAPPING (ID),"//
+ "\n\tPRIMARY KEY (ID)"//
+ "\n) ENGINE = InnoDB",
output);
}
}
|
MySqlCreateTableTest48
|
java
|
google__guava
|
android/guava/src/com/google/common/base/FinalizableReferenceQueue.java
|
{
"start": 13185,
"end": 13495
}
|
class ____.
String finalizerPath = FINALIZER_CLASS_NAME.replace('.', '/') + ".class";
URL finalizerUrl = getClass().getClassLoader().getResource(finalizerPath);
if (finalizerUrl == null) {
throw new FileNotFoundException(finalizerPath);
}
// Find URL pointing to base of
|
file
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/TransactionalClientDataSourceWithOnExceptionTest.java
|
{
"start": 1393,
"end": 3744
}
|
class ____ extends TransactionalClientDataSourceTest {
@Override
@Test
public void testTransactionRollback() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:error");
mock.expectedMessageCount(1);
try {
template.sendBody("direct:fail", "Hello World");
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
// expected as we fail
assertIsInstanceOf(RuntimeCamelException.class, e.getCause());
assertTrue(e.getCause().getCause() instanceof IllegalArgumentException);
assertEquals("We don't have Donkeys, only Camels", e.getCause().getCause().getMessage());
}
assertMockEndpointsSatisfied();
int count = jdbc.queryForObject("select count(*) from books", Integer.class);
assertEquals(1, count, "Number of books");
}
@Override
// The API is deprecated, we can remove warnings safely as the tests will disappear when removing this component.
@SuppressWarnings("deprecation")
protected RouteBuilder createRouteBuilder() throws Exception {
return new SpringRouteBuilder() {
public void configure() throws Exception {
// use required as transaction policy
SpringTransactionPolicy required = lookup("PROPAGATION_REQUIRED", SpringTransactionPolicy.class);
// configure to use transaction error handler and pass on the required as it will fetch
// the transaction manager from it that it needs
errorHandler(transactionErrorHandler(required));
// on exception is also supported
onException(IllegalArgumentException.class).handled(false).to("mock:error");
from("direct:okay")
.policy(required)
.setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Elephant in Action")).bean("bookService");
from("direct:fail")
.policy(required)
.setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Donkey in Action")).bean("bookService");
}
};
}
}
|
TransactionalClientDataSourceWithOnExceptionTest
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/InstanceCreator.java
|
{
"start": 1430,
"end": 2231
}
|
class ____<T> {
* private final Class<T> clazz;
* private final long value;
* public Id(Class<T> clazz, long value) {
* this.clazz = clazz;
* this.value = value;
* }
* }
* </pre>
*
* <p>If Gson encounters an object of type {@code Id} during deserialization, it will throw an
* exception. The easiest way to solve this problem will be to add a (public or private) no-args
* constructor as follows:
*
* <pre>
* private Id() {
* this(Object.class, 0L);
* }
* </pre>
*
* <p>However, let us assume that the developer does not have access to the source-code of the
* {@code Id} class, or does not want to define a no-args constructor for it. The developer can
* solve this problem by defining an {@code InstanceCreator} for {@code Id}:
*
* <pre>
*
|
Id
|
java
|
quarkusio__quarkus
|
independent-projects/arc/runtime/src/main/java/io/quarkus/arc/WithCaching.java
|
{
"start": 2482,
"end": 2691
}
|
class ____ extends AnnotationLiteral<WithCaching> implements WithCaching {
private static final long serialVersionUID = 1L;
public static final Literal INSTANCE = new Literal();
}
}
|
Literal
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/client/samples/MockMvcClientHttpRequestFactoryTests.java
|
{
"start": 2151,
"end": 3164
}
|
class ____ {
private final RestTemplate template;
MockMvcClientHttpRequestFactoryTests(WebApplicationContext wac) {
MockMvc mockMvc = MockMvcBuilders.webAppContextSetup(wac).build();
this.template = new RestTemplate(new MockMvcClientHttpRequestFactory(mockMvc));
}
@Test
void withResult() {
assertThat(template.getForObject("/foo", String.class)).isEqualTo("bar");
}
@Test
void withError() {
assertThatExceptionOfType(HttpClientErrorException.class)
.isThrownBy(() -> template.getForEntity("/error", String.class))
.withMessageContaining("400")
.withMessageContaining("some bad request");
}
@Test
void withErrorAndBody() {
assertThatExceptionOfType(HttpClientErrorException.class)
.isThrownBy(() -> template.getForEntity("/errorbody", String.class))
.withMessageContaining("400")
.withMessageContaining("some really bad request");
}
@EnableWebMvc
@Configuration(proxyBeanMethods = false)
@Import(MyController.class)
static
|
MockMvcClientHttpRequestFactoryTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhance/internal/bytebuddy/MyNonVisibleGenericMappedSuperclass.java
|
{
"start": 428,
"end": 630
}
|
class ____<C> {
@Embedded
private C embedded;
public C getEmbedded() {
return embedded;
}
public void setEmbedded(C embedded) {
this.embedded = embedded;
}
}
|
MyNonVisibleGenericMappedSuperclass
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/support/GenericApplicationContextTests.java
|
{
"start": 31112,
"end": 31486
}
|
class ____<T> extends AbstractFactoryBean<T> {
TestAotFactoryBean() {
throw new IllegalStateException("FactoryBean should not be instantied early");
}
@Override
public Class<?> getObjectType() {
return Object.class;
}
@SuppressWarnings("unchecked")
@Override
protected T createInstance() {
return (T) new Object();
}
}
static
|
TestAotFactoryBean
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestResponseBodyMethodProcessorTests.java
|
{
"start": 57073,
"end": 57249
}
|
class ____ extends MyControllerImplementingInterface {
@Override
public String handle(String arg) {
return arg;
}
}
abstract static
|
SubControllerImplementingInterface
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/collections/mappedsuperclasselement/AbstractCode.java
|
{
"start": 412,
"end": 1227
}
|
class ____ {
/**
* Initial Value
*/
protected static final int UNDEFINED = -1;
private int code = UNDEFINED;
protected AbstractCode() {
this( UNDEFINED );
}
/**
* Constructor with code
*/
public AbstractCode(int code) {
this.code = code;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + code;
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( getClass() != obj.getClass() ) {
return false;
}
AbstractCode other = (AbstractCode) obj;
if ( code != other.code ) {
return false;
}
return true;
}
}
|
AbstractCode
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
|
{
"start": 1179,
"end": 2648
}
|
class ____ implements SchemaValidationStrategy {
/**
* Validate that the schemas provided can mutually read data written by each
* other according to the default Avro schema resolution rules.
*
* @throws SchemaValidationException if the schemas are not mutually compatible.
*/
@Override
public void validate(Schema toValidate, Schema existing) throws SchemaValidationException {
canRead(toValidate, existing);
canRead(existing, toValidate);
}
/**
* Validates that data written with one schema can be read using another, based
* on the default Avro schema resolution rules.
*
* @param writtenWith The "writer's" schema, representing data to be read.
* @param readUsing The "reader's" schema, representing how the reader will
* interpret data.
* @throws SchemaValidationException if the schema <b>readUsing<b/> cannot be
* used to read data written with
* <b>writtenWith<b/>
*/
static void canRead(Schema writtenWith, Schema readUsing) throws SchemaValidationException {
boolean error;
try {
error = Symbol.hasErrors(new ResolvingGrammarGenerator().generate(writtenWith, readUsing));
} catch (IOException e) {
throw new SchemaValidationException(readUsing, writtenWith, e);
}
if (error) {
throw new SchemaValidationException(readUsing, writtenWith);
}
}
}
|
ValidateMutualRead
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java
|
{
"start": 34628,
"end": 34810
}
|
class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesFunction( dialect, "array_includes" );
}
}
public static
|
SupportsArrayIncludes
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/handler/PathPatternMatchableHandlerMapping.java
|
{
"start": 1582,
"end": 3024
}
|
class ____ implements MatchableHandlerMapping {
private final MatchableHandlerMapping delegate;
private final PathPatternParser parser;
private final Map<String, PathPattern> pathPatternCache = new ConcurrentHashMap<>();
private final int cacheLimit;
public PathPatternMatchableHandlerMapping(MatchableHandlerMapping delegate, int cacheLimit) {
Assert.notNull(delegate, "HandlerMapping to delegate to is required.");
Assert.notNull(delegate.getPatternParser(), "Expected HandlerMapping configured to use PatternParser.");
this.delegate = delegate;
this.parser = delegate.getPatternParser();
this.cacheLimit = cacheLimit;
}
@SuppressWarnings("removal")
@Deprecated(since = "7.0", forRemoval = true)
@Override
public @Nullable RequestMatchResult match(HttpServletRequest request, String pattern) {
PathPattern pathPattern = this.pathPatternCache.computeIfAbsent(pattern, value -> {
Assert.state(this.pathPatternCache.size() < this.cacheLimit, "Max size for pattern cache exceeded.");
return this.parser.parse(pattern);
});
PathContainer path = ServletRequestPathUtils.getParsedRequestPath(request).pathWithinApplication();
return (pathPattern.matches(path) ? new RequestMatchResult(pathPattern, path) : null);
}
@Override
public @Nullable HandlerExecutionChain getHandler(HttpServletRequest request) throws Exception {
return this.delegate.getHandler(request);
}
}
|
PathPatternMatchableHandlerMapping
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/Shorts_assertIsCloseTo_Test.java
|
{
"start": 1495,
"end": 4077
}
|
class ____ extends ShortsBaseTest {
private static final Short ZERO = 0;
private static final Short ONE = 1;
@ParameterizedTest
@CsvSource({
"1, 1, 1",
"1, 2, 10",
"-2, 0, 3",
"-1, 1, 3",
"0, 2, 5"
})
void should_pass_if_difference_is_less_than_given_offset(short actual, short expected, short offset) {
shorts.assertIsCloseTo(someInfo(), actual, expected, within(offset));
shorts.assertIsCloseTo(someInfo(), actual, expected, byLessThan(offset));
}
@ParameterizedTest
@CsvSource({
"1, 1, 0",
"1, 0, 1",
"-1, 0, 1",
"-1, -1, 0",
"-1, 1, 2",
"0, 32767, 32767",
"32767, 32767, 0",
"-32768, -32768, 0"
})
void should_pass_if_difference_is_equal_to_given_offset(short actual, short expected, short offset) {
shorts.assertIsCloseTo(someInfo(), actual, expected, within(offset));
}
@ParameterizedTest
@CsvSource({
"1, 3, 1",
"3, 1, 1",
"-2, 0, 1",
"-1, 1, 1",
"0, 2, 1"
})
void should_fail_if_actual_is_not_close_enough_to_expected(short actual, short expected, short offset) {
// GIVEN
AssertionInfo info = someInfo();
// WHEN
expectAssertionError(() -> shorts.assertIsCloseTo(info, actual, expected, within(offset)));
// THEN
verify(failures).failure(info, shouldBeEqual(actual, expected, within(offset), (short) abs(actual - expected)));
}
@ParameterizedTest
@CsvSource({
"1, 2, 1",
"3, 2, 1",
"-2, -1, 1",
"-1, 1, 2",
"0, 2, 2"
})
void should_fail_if_difference_is_equal_to_the_given_strict_offset(short actual, short expected, short offset) {
// GIVEN
AssertionInfo info = someInfo();
// WHEN
expectAssertionError(() -> shorts.assertIsCloseTo(info, actual, expected, byLessThan(offset)));
// THEN
verify(failures).failure(info, shouldBeEqual(actual, expected, byLessThan(offset), (short) abs(actual - expected)));
}
@Test
void should_fail_if_actual_is_null() {
assertThatAssertionErrorIsThrownBy(() -> shorts.assertIsCloseTo(someInfo(), null, ONE,
within(ONE))).withMessage(actualIsNull());
}
@Test
void should_fail_if_expected_value_is_null() {
assertThatNullPointerException().isThrownBy(() -> shorts.assertIsCloseTo(someInfo(), ONE, null, within(ONE)));
}
@Test
void should_fail_if_offset_is_null() {
assertThatNullPointerException().isThrownBy(() -> shorts.assertIsCloseTo(someInfo(), ONE, ZERO, null));
}
}
|
Shorts_assertIsCloseTo_Test
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/changelog/ChangelogStateBackendLocalHandle.java
|
{
"start": 1422,
"end": 3968
}
|
class ____ implements ChangelogStateBackendHandle {
private static final long serialVersionUID = 1L;
private final List<KeyedStateHandle> localMaterialized;
private final List<ChangelogStateHandle> localNonMaterialized;
private final ChangelogStateBackendHandleImpl remoteHandle;
public ChangelogStateBackendLocalHandle(
List<KeyedStateHandle> localMaterialized,
List<ChangelogStateHandle> localNonMaterialized,
ChangelogStateBackendHandleImpl remoteHandle) {
this.localMaterialized = localMaterialized;
this.localNonMaterialized = localNonMaterialized;
this.remoteHandle = remoteHandle;
}
@Override
public List<KeyedStateHandle> getMaterializedStateHandles() {
return localMaterialized;
}
@Override
public List<ChangelogStateHandle> getNonMaterializedStateHandles() {
return localNonMaterialized;
}
@Override
public long getMaterializationID() {
return remoteHandle.getMaterializationID();
}
@Override
public ChangelogStateBackendHandle rebound(long checkpointId) {
throw new UnsupportedOperationException("Should not call here.");
}
public List<KeyedStateHandle> getRemoteMaterializedStateHandles() {
return remoteHandle.getMaterializedStateHandles();
}
public List<ChangelogStateHandle> getRemoteNonMaterializedStateHandles() {
return remoteHandle.getNonMaterializedStateHandles();
}
@Override
public long getCheckpointId() {
return remoteHandle.getCheckpointId();
}
@Override
public void registerSharedStates(SharedStateRegistry stateRegistry, long checkpointID) {
remoteHandle.registerSharedStates(stateRegistry, checkpointID);
}
@Override
public long getCheckpointedSize() {
return remoteHandle.getCheckpointedSize();
}
@Override
public KeyGroupRange getKeyGroupRange() {
return remoteHandle.getKeyGroupRange();
}
@Nullable
@Override
public KeyedStateHandle getIntersection(KeyGroupRange keyGroupRange) {
throw new UnsupportedOperationException(
"This is a local state handle for the TM side only.");
}
@Override
public StateHandleID getStateHandleId() {
return remoteHandle.getStateHandleId();
}
@Override
public void discardState() throws Exception {}
@Override
public long getStateSize() {
return remoteHandle.getStateSize();
}
}
|
ChangelogStateBackendLocalHandle
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/http2/file/Http2StaticFileServerHandler.java
|
{
"start": 4254,
"end": 14839
}
|
class ____ extends ChannelDuplexHandler {
public static final String HTTP_DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss zzz";
public static final String HTTP_DATE_GMT_TIMEZONE = "GMT";
public static final int HTTP_CACHE_SECONDS = 60;
private Http2FrameStream stream;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof Http2HeadersFrame) {
Http2HeadersFrame headersFrame = (Http2HeadersFrame) msg;
this.stream = headersFrame.stream();
if (!GET.toString().equals(headersFrame.headers().method().toString())) {
sendError(ctx, METHOD_NOT_ALLOWED);
return;
}
final String uri = headersFrame.headers().path().toString();
final String path = sanitizeUri(uri);
if (path == null) {
sendError(ctx, FORBIDDEN);
return;
}
File file = new File(path);
if (file.isHidden() || !file.exists()) {
sendError(ctx, NOT_FOUND);
return;
}
if (file.isDirectory()) {
if (uri.endsWith("/")) {
sendListing(ctx, file, uri);
} else {
sendRedirect(ctx, uri + '/');
}
return;
}
if (!file.isFile()) {
sendError(ctx, FORBIDDEN);
return;
}
// Cache Validation
CharSequence ifModifiedSince = headersFrame.headers().get(HttpHeaderNames.IF_MODIFIED_SINCE);
if (ifModifiedSince != null && !ifModifiedSince.toString().isEmpty()) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince.toString());
// Only compare up to the second because the datetime format we send to the client
// does not have milliseconds
long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000;
long fileLastModifiedSeconds = file.lastModified() / 1000;
if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) {
sendNotModified(ctx);
return;
}
}
RandomAccessFile raf;
try {
raf = new RandomAccessFile(file, "r");
} catch (FileNotFoundException ignore) {
sendError(ctx, NOT_FOUND);
return;
}
long fileLength = raf.length();
Http2Headers headers = new DefaultHttp2Headers();
headers.status("200");
headers.setLong(HttpHeaderNames.CONTENT_LENGTH, fileLength);
setContentTypeHeader(headers, file);
setDateAndCacheHeaders(headers, file);
// Write the initial line and the header.
ctx.writeAndFlush(new DefaultHttp2HeadersFrame(headers).stream(stream));
// Write the content.
ChannelFuture sendFileFuture;
sendFileFuture = ctx.writeAndFlush(new Http2DataChunkedInput(
new ChunkedFile(raf, 0, fileLength, 8192), stream), ctx.newProgressivePromise());
sendFileFuture.addListener(new ChannelProgressiveFutureListener() {
@Override
public void operationProgressed(ChannelProgressiveFuture future, long progress, long total) {
if (total < 0) { // total unknown
System.err.println(future.channel() + " Transfer progress: " + progress);
} else {
System.err.println(future.channel() + " Transfer progress: " + progress + " / " + total);
}
}
@Override
public void operationComplete(ChannelProgressiveFuture future) {
System.err.println(future.channel() + " Transfer complete.");
}
});
} else {
// Unsupported message type
System.out.println("Unsupported message type: " + msg);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
if (ctx.channel().isActive()) {
sendError(ctx, INTERNAL_SERVER_ERROR);
}
}
private static final Pattern INSECURE_URI = Pattern.compile(".*[<>&\"].*");
private static String sanitizeUri(String uri) throws UnsupportedEncodingException {
// Decode the path.
uri = URLDecoder.decode(uri, "UTF-8");
if (uri.isEmpty() || uri.charAt(0) != '/') {
return null;
}
// Convert file separators.
uri = uri.replace('/', File.separatorChar);
// Simplistic dumb security check.
// You will have to do something serious in the production environment.
if (uri.contains(File.separator + '.') ||
uri.contains('.' + File.separator) ||
uri.charAt(0) == '.' || uri.charAt(uri.length() - 1) == '.' ||
INSECURE_URI.matcher(uri).matches()) {
return null;
}
// Convert to absolute path.
return SystemPropertyUtil.get("user.dir") + File.separator + uri;
}
private static final Pattern ALLOWED_FILE_NAME = Pattern.compile("[^-\\._]?[^<>&\\\"]*");
private void sendListing(ChannelHandlerContext ctx, File dir, String dirPath) {
StringBuilder buf = new StringBuilder()
.append("<!DOCTYPE html>\r\n")
.append("<html><head><meta charset='utf-8' /><title>")
.append("Listing of: ")
.append(dirPath)
.append("</title></head><body>\r\n")
.append("<h3>Listing of: ")
.append(dirPath)
.append("</h3>\r\n")
.append("<ul>")
.append("<li><a href=\"../\">..</a></li>\r\n");
File[] files = dir.listFiles();
if (files != null) {
for (File f : files) {
if (f.isHidden() || !f.canRead()) {
continue;
}
String name = f.getName();
if (!ALLOWED_FILE_NAME.matcher(name).matches()) {
continue;
}
buf.append("<li><a href=\"")
.append(name)
.append("\">")
.append(name)
.append("</a></li>\r\n");
}
}
buf.append("</ul></body></html>\r\n");
ByteBuf buffer = ctx.alloc().buffer(buf.length());
buffer.writeCharSequence(buf.toString(), CharsetUtil.UTF_8);
Http2Headers headers = new DefaultHttp2Headers();
headers.status(OK.toString());
headers.add(HttpHeaderNames.CONTENT_TYPE, "text/html; charset=UTF-8");
ctx.write(new DefaultHttp2HeadersFrame(headers).stream(stream));
ctx.writeAndFlush(new DefaultHttp2DataFrame(buffer, true).stream(stream));
}
private void sendRedirect(ChannelHandlerContext ctx, String newUri) {
Http2Headers headers = new DefaultHttp2Headers();
headers.status(FOUND.toString());
headers.add(HttpHeaderNames.LOCATION, newUri);
ctx.writeAndFlush(new DefaultHttp2HeadersFrame(headers, true).stream(stream));
}
private void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
Http2Headers headers = new DefaultHttp2Headers();
headers.status(status.toString());
headers.add(HttpHeaderNames.CONTENT_TYPE, "text/plain; charset=UTF-8");
Http2HeadersFrame headersFrame = new DefaultHttp2HeadersFrame(headers);
headersFrame.stream(stream);
Http2DataFrame dataFrame = new DefaultHttp2DataFrame(
Unpooled.copiedBuffer("Failure: " + status + "\r\n", CharsetUtil.UTF_8), true);
dataFrame.stream(stream);
ctx.write(headersFrame);
ctx.writeAndFlush(dataFrame);
}
/**
* When file timestamp is the same as what the browser is sending up, send a "304 Not Modified"
*
* @param ctx Context
*/
private void sendNotModified(ChannelHandlerContext ctx) {
Http2Headers headers = new DefaultHttp2Headers();
headers.status(NOT_MODIFIED.toString());
setDateHeader(headers);
ctx.writeAndFlush(new DefaultHttp2HeadersFrame(headers, true).stream(stream));
}
/**
* Sets the Date header for the HTTP response
*
* @param headers Http2 Headers
*/
private static void setDateHeader(Http2Headers headers) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
dateFormatter.setTimeZone(TimeZone.getTimeZone(HTTP_DATE_GMT_TIMEZONE));
Calendar time = new GregorianCalendar();
headers.set(HttpHeaderNames.DATE, dateFormatter.format(time.getTime()));
}
/**
* Sets the Date and Cache headers for the HTTP Response
*
* @param headers Http2 Headers
* @param fileToCache file to extract content type
*/
private static void setDateAndCacheHeaders(Http2Headers headers, File fileToCache) {
SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US);
dateFormatter.setTimeZone(TimeZone.getTimeZone(HTTP_DATE_GMT_TIMEZONE));
// Date header
Calendar time = new GregorianCalendar();
headers.set(HttpHeaderNames.DATE, dateFormatter.format(time.getTime()));
// Add cache headers
time.add(Calendar.SECOND, HTTP_CACHE_SECONDS);
headers.set(HttpHeaderNames.EXPIRES, dateFormatter.format(time.getTime()));
headers.set(HttpHeaderNames.CACHE_CONTROL, "private, max-age=" + HTTP_CACHE_SECONDS);
headers.set(HttpHeaderNames.LAST_MODIFIED, dateFormatter.format(new Date(fileToCache.lastModified())));
}
/**
* Sets the content type header for the HTTP Response
*
* @param headers Http2 Headers
* @param file file to extract content type
*/
private static void setContentTypeHeader(Http2Headers headers, File file) {
MimetypesFileTypeMap mimeTypesMap = new MimetypesFileTypeMap();
headers.set(HttpHeaderNames.CONTENT_TYPE, mimeTypesMap.getContentType(file.getPath()));
}
}
|
Http2StaticFileServerHandler
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/method/GlobalMethodSecurityBeanDefinitionParser.java
|
{
"start": 22127,
"end": 23479
}
|
class ____ implements AuthenticationManager, BeanFactoryAware {
private AuthenticationManager delegate;
private final Object delegateMonitor = new Object();
private BeanFactory beanFactory;
private final String authMgrBean;
AuthenticationManagerDelegator(String authMgrBean) {
this.authMgrBean = StringUtils.hasText(authMgrBean) ? authMgrBean : BeanIds.AUTHENTICATION_MANAGER;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
synchronized (this.delegateMonitor) {
if (this.delegate == null) {
Assert.state(this.beanFactory != null,
() -> "BeanFactory must be set to resolve " + this.authMgrBean);
try {
this.delegate = this.beanFactory.getBean(this.authMgrBean, AuthenticationManager.class);
}
catch (NoSuchBeanDefinitionException ex) {
if (BeanIds.AUTHENTICATION_MANAGER.equals(ex.getBeanName())) {
throw new NoSuchBeanDefinitionException(BeanIds.AUTHENTICATION_MANAGER,
AuthenticationManagerFactoryBean.MISSING_BEAN_ERROR_MESSAGE);
}
throw ex;
}
}
}
return this.delegate.authenticate(authentication);
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
}
static
|
AuthenticationManagerDelegator
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng8561SourceRootTest.java
|
{
"start": 1040,
"end": 1482
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify project is buildable.
*/
@Test
void testIt() throws Exception {
Path basedir = extractResources("/mng-8561").getAbsoluteFile().toPath();
Verifier verifier = newVerifier(basedir.toString());
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
|
MavenITmng8561SourceRootTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/LazyAutowiredAnnotationBeanPostProcessorTests.java
|
{
"start": 7993,
"end": 8322
}
|
class ____ implements TestBeanHolder {
@Autowired @Lazy
private TestBean testBean;
@Autowired @Lazy
private List<TestBean> testBeans;
@Override
public TestBean getTestBean() {
return this.testBean;
}
public List<TestBean> getTestBeans() {
return testBeans;
}
}
public static
|
FieldResourceInjectionBean
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/rpc/RpcClientUtils.java
|
{
"start": 1010,
"end": 2408
}
|
class ____ {
public static RemotingCommand createCommandForRpcRequest(RpcRequest rpcRequest) {
RemotingCommand cmd = RemotingCommand.createRequestCommand(rpcRequest.getCode(), rpcRequest.getHeader());
cmd.setBody(encodeBody(rpcRequest.getBody()));
return cmd;
}
public static RemotingCommand createCommandForRpcResponse(RpcResponse rpcResponse) {
RemotingCommand cmd = RemotingCommand.createResponseCommandWithHeader(rpcResponse.getCode(), rpcResponse.getHeader());
cmd.setRemark(rpcResponse.getException() == null ? "" : rpcResponse.getException().getMessage());
cmd.setBody(encodeBody(rpcResponse.getBody()));
return cmd;
}
public static byte[] encodeBody(Object body) {
if (body == null) {
return null;
}
if (body instanceof byte[]) {
return (byte[])body;
} else if (body instanceof RemotingSerializable) {
return ((RemotingSerializable) body).encode();
} else if (body instanceof ByteBuffer) {
ByteBuffer buffer = (ByteBuffer)body;
buffer.mark();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
buffer.reset();
return data;
} else {
throw new RuntimeException("Unsupported body type " + body.getClass());
}
}
}
|
RpcClientUtils
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/loading/multiLoad/FindMultipleEntityGraphTest.java
|
{
"start": 2265,
"end": 2369
}
|
class ____ {
@Id String name;
Owner(String name) {
this.name = name;
}
Owner() {
}
}
}
|
Owner
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/embeddable/EmbeddableInheritanceRecursiveTest.java
|
{
"start": 5065,
"end": 5154
}
|
class ____ extends Root3 {
String child3Prop;
}
@Entity(name = "Entity3")
static
|
Child3
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerLongRunningTasks.java
|
{
"start": 4083,
"end": 38204
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestBalancerLongRunningTasks.class);
static {
GenericTestUtils.setLogLevel(Balancer.LOG, Level.TRACE);
GenericTestUtils.setLogLevel(Dispatcher.LOG, Level.DEBUG);
}
private final static long CAPACITY = 5000L;
private final static String RACK0 = "/rack0";
private final static String RACK1 = "/rack1";
private final static String RACK2 = "/rack2";
private final static String FILE_NAME = "/tmp.txt";
private final static Path FILE_PATH = new Path(FILE_NAME);
private MiniDFSCluster cluster;
@AfterEach
public void shutdown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
private ClientProtocol client;
static final int DEFAULT_BLOCK_SIZE = 100;
static final int DEFAULT_RAM_DISK_BLOCK_SIZE = 5 * 1024 * 1024;
static {
initTestSetup();
}
public static void initTestSetup() {
// do not create id file since it occupies the disk space
NameNodeConnector.setWrite2IdFile(false);
}
static void initConf(Configuration conf) {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DEFAULT_BLOCK_SIZE);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY,
1L);
SimulatedFSDataset.setFactory(conf);
conf.setLong(DFSConfigKeys.DFS_BALANCER_MOVEDWINWIDTH_KEY, 2000L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
conf.setInt(DFSConfigKeys.DFS_BALANCER_MAX_NO_MOVE_INTERVAL_KEY, 5 * 1000);
}
static void initConfWithRamDisk(Configuration conf,
long ramDiskCapacity) {
conf.setLong(DFS_BLOCK_SIZE_KEY, DEFAULT_RAM_DISK_BLOCK_SIZE);
conf.setLong(DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, ramDiskCapacity);
conf.setInt(DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC, 3);
conf.setLong(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 500);
conf.setInt(DFS_DATANODE_LAZY_WRITER_INTERVAL_SEC, 1);
conf.setInt(DFSConfigKeys.DFS_BALANCER_MAX_NO_MOVE_INTERVAL_KEY, 5 * 1000);
LazyPersistTestCase.initCacheManipulator();
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
}
/**
* Test special case. Two replicas belong to same block should not in same
* node.
* We have 2 nodes.
* We have a block in (DN0,SSD) and (DN1,DISK).
* Replica in (DN0,SSD) should not be moved to (DN1,SSD).
* Otherwise DN1 has 2 replicas.
*/
@Test
@Timeout(value = 100)
public void testTwoReplicaShouldNotInSameDN() throws Exception {
final Configuration conf = new HdfsConfiguration();
int blockSize = 5 * 1024 * 1024;
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY,
1L);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1L);
int numOfDatanodes = 2;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(2)
.racks(new String[]{"/default/rack0", "/default/rack0"})
.storagesPerDatanode(2)
.storageTypes(new StorageType[][]{
{StorageType.SSD, StorageType.DISK},
{StorageType.SSD, StorageType.DISK}})
.storageCapacities(new long[][]{
{100 * blockSize, 20 * blockSize},
{20 * blockSize, 100 * blockSize}})
.build();
cluster.waitActive();
//set "/bar" directory with ONE_SSD storage policy.
DistributedFileSystem fs = cluster.getFileSystem();
Path barDir = new Path("/bar");
fs.mkdir(barDir, new FsPermission((short) 777));
fs.setStoragePolicy(barDir, HdfsConstants.ONESSD_STORAGE_POLICY_NAME);
// Insert 30 blocks. So (DN0,SSD) and (DN1,DISK) are about half full,
// and (DN0,SSD) and (DN1,DISK) are about 15% full.
long fileLen = 30 * blockSize;
// fooFile has ONE_SSD policy. So
// (DN0,SSD) and (DN1,DISK) have 2 replicas belong to same block.
// (DN0,DISK) and (DN1,SSD) have 2 replicas belong to same block.
Path fooFile = new Path(barDir, "foo");
TestBalancer.createFile(cluster, fooFile, fileLen, (short) numOfDatanodes,
0);
// update space info
cluster.triggerHeartbeats();
BalancerParameters p = BalancerParameters.DEFAULT;
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
final int r = Balancer.run(namenodes, p, conf);
// Replica in (DN0,SSD) was not moved to (DN1,SSD), because (DN1,DISK)
// already has one. Otherwise DN1 will have 2 replicas.
// For same reason, no replicas were moved.
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
/*
* Test Balancer with Ram_Disk configured
* One DN has two files on RAM_DISK, other DN has no files on RAM_DISK.
* Then verify that the balancer does not migrate files on RAM_DISK across DN.
*/
@Test
@Timeout(value = 300)
public void testBalancerWithRamDisk() throws Exception {
final int seed = 0xFADED;
final short replicationFactor = 1;
Configuration conf = new Configuration();
final int defaultRamDiskCapacity = 10;
final long ramDiskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
final long diskStorageLimit =
((long) defaultRamDiskCapacity * DEFAULT_RAM_DISK_BLOCK_SIZE) +
(DEFAULT_RAM_DISK_BLOCK_SIZE - 1);
initConfWithRamDisk(conf, ramDiskStorageLimit);
cluster = new MiniDFSCluster
.Builder(conf)
.numDataNodes(1)
.storageCapacities(new long[]{ramDiskStorageLimit, diskStorageLimit})
.storageTypes(new StorageType[]{RAM_DISK, DEFAULT})
.build();
cluster.waitActive();
// Create few files on RAM_DISK
final String methodName = GenericTestUtils.getMethodName();
final Path path1 = new Path("/" + methodName + ".01.dat");
final Path path2 = new Path("/" + methodName + ".02.dat");
DistributedFileSystem fs = cluster.getFileSystem();
DFSClient dfsClient = fs.getClient();
DFSTestUtil.createFile(fs, path1, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 4 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, replicationFactor, seed, true);
DFSTestUtil.createFile(fs, path2, true,
DEFAULT_RAM_DISK_BLOCK_SIZE, 1 * DEFAULT_RAM_DISK_BLOCK_SIZE,
DEFAULT_RAM_DISK_BLOCK_SIZE, replicationFactor, seed, true);
// Sleep for a short time to allow the lazy writer thread to do its job
Thread.sleep(6 * 1000);
// Add another fresh DN with the same type/capacity without files on
// RAM_DISK
StorageType[][] storageTypes = new StorageType[][]{{RAM_DISK, DEFAULT}};
long[][] storageCapacities = new long[][]{{ramDiskStorageLimit,
diskStorageLimit}};
cluster.startDataNodes(conf, replicationFactor, storageTypes, true, null,
null, null, storageCapacities, null, false, false, false, null, null, null);
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
// Run Balancer
final BalancerParameters p = BalancerParameters.DEFAULT;
final int r = Balancer.run(namenodes, p, conf);
// Validate no RAM_DISK block should be moved
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
// Verify files are still on RAM_DISK
DFSTestUtil.verifyFileReplicasOnStorageType(fs, dfsClient, path1, RAM_DISK);
DFSTestUtil.verifyFileReplicasOnStorageType(fs, dfsClient, path2, RAM_DISK);
}
/**
* Balancer should not move blocks with size < minBlockSize.
*/
@Test
@Timeout(value = 60)
public void testMinBlockSizeAndSourceNodes() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
final short replication = 3;
final long[] lengths = {10, 10, 10, 10};
final long[] capacities = new long[replication];
final long totalUsed = capacities.length * TestBalancer.sum(lengths);
Arrays.fill(capacities, 1000);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(capacities.length)
.simulatedCapacities(capacities)
.build();
final DistributedFileSystem dfs = cluster.getFileSystem();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf, dfs.getUri(),
ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
for (int i = 0; i < lengths.length; i++) {
final long size = lengths[i];
final Path p = new Path("/file" + i + "_size" + size);
try (OutputStream out = dfs.create(p)) {
for (int j = 0; j < size; j++) {
out.write(j);
}
}
}
// start up an empty node with the same capacity
cluster.startDataNodes(conf, capacities.length, true, null, null, capacities);
LOG.info("capacities = " + Arrays.toString(capacities));
LOG.info("totalUsedSpace= " + totalUsed);
LOG.info("lengths = " + Arrays.toString(lengths) + ", #=" + lengths.length);
TestBalancer.waitForHeartBeat(totalUsed,
2 * capacities[0] * capacities.length, client, cluster);
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
{ // run Balancer with min-block-size=50
final BalancerParameters p = Balancer.Cli.parse(new String[]{
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1"
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
{ // run Balancer with empty nodes as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for (int i = capacities.length; i < datanodes.size(); i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[]{
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 50);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with a filled node as a source node
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
sourceNodes.add(datanodes.get(0).getDisplayName());
final BalancerParameters p = Balancer.Cli.parse(new String[]{
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.NO_MOVE_BLOCK.getExitCode(), r);
}
{ // run Balancer with all filled node as source nodes
final Set<String> sourceNodes = new HashSet<>();
final List<DataNode> datanodes = cluster.getDataNodes();
for (int i = 0; i < capacities.length; i++) {
sourceNodes.add(datanodes.get(i).getDisplayName());
}
final BalancerParameters p = Balancer.Cli.parse(new String[]{
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-source", StringUtils.join(sourceNodes, ',')
});
assertEquals(p.getBalancingPolicy(), BalancingPolicy.Node.INSTANCE);
assertEquals(p.getThreshold(), 1.0, 0.001);
assertEquals(p.getSourceNodes(), sourceNodes);
conf.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY, 1);
final int r = Balancer.run(namenodes, p, conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
}
}
/**
* Verify balancer won't violate upgrade domain block placement policy.
*
* @throws Exception
*/
@Test
@Timeout(value = 100)
public void testUpgradeDomainPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
BlockPlacementPolicyWithUpgradeDomain.class,
BlockPlacementPolicy.class);
long[] capacities = new long[]{CAPACITY, CAPACITY, CAPACITY};
String[] hosts = {"host0", "host1", "host2"};
String[] racks = {RACK0, RACK1, RACK1};
String[] uds = {"ud0", "ud1", "ud2"};
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
uds, CAPACITY, "host3", RACK2, "ud2");
}
/**
* Verify balancer won't violate the default block placement policy.
*
* @throws Exception
*/
@Test
@Timeout(value = 100)
public void testRackPolicyAfterBalance() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
long[] capacities = new long[]{CAPACITY, CAPACITY};
String[] hosts = {"host0", "host1"};
String[] racks = {RACK0, RACK1};
runBalancerAndVerifyBlockPlacmentPolicy(conf, capacities, hosts, racks,
null, CAPACITY, "host2", RACK1, null);
}
private void runBalancerAndVerifyBlockPlacmentPolicy(Configuration conf,
long[] capacities, String[] hosts, String[] racks, String[] UDs,
long newCapacity, String newHost, String newRack, String newUD)
throws Exception {
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
DatanodeManager dm = cluster.getNamesystem().getBlockManager().
getDatanodeManager();
if (UDs != null) {
for (int i = 0; i < UDs.length; i++) {
DatanodeID datanodeId = cluster.getDataNodes().get(i).getDatanodeId();
dm.getDatanode(datanodeId).setUpgradeDomain(UDs[i]);
}
}
try {
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = TestBalancer.sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
final long fileSize = totalUsedSpace / numOfDatanodes;
DFSTestUtil.createFile(cluster.getFileSystem(0), FILE_PATH, false, 1024,
fileSize, DEFAULT_BLOCK_SIZE, (short) numOfDatanodes, 0, false);
// start up an empty node with the same capacity on the same rack as the
// pinned host.
cluster.startDataNodes(conf, 1, true, null, new String[]{newRack},
new String[]{newHost}, new long[]{newCapacity});
if (newUD != null) {
DatanodeID newId = cluster.getDataNodes().get(
numOfDatanodes).getDatanodeId();
dm.getDatanode(newId).setUpgradeDomain(newUD);
}
totalCapacity += newCapacity;
// run balancer and validate results
TestBalancer.waitForHeartBeat(totalUsedSpace,
totalCapacity, client, cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
BlockPlacementPolicy placementPolicy =
cluster.getNamesystem().getBlockManager().getBlockPlacementPolicy();
List<LocatedBlock> locatedBlocks = client.
getBlockLocations(FILE_NAME, 0, fileSize).getLocatedBlocks();
for (LocatedBlock locatedBlock : locatedBlocks) {
BlockPlacementStatus status = placementPolicy.verifyBlockPlacement(
locatedBlock.getLocations(), numOfDatanodes);
assertTrue(status.isPlacementPolicySatisfied());
}
} finally {
cluster.shutdown();
}
}
/**
* Make sure that balancer can't move pinned blocks.
* If specified favoredNodes when create file, blocks will be pinned use
* sticky bit.
*
* @throws Exception
*/
@Test
@Timeout(value = 100)
public void testBalancerWithPinnedBlocks() throws Exception {
// This test assumes stick-bit based block pin mechanism available only
// in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
// provide a different mechanism for Windows.
assumeNotWindows();
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setBoolean(DFS_DATANODE_BLOCK_PINNING_ENABLED, true);
long[] capacities = new long[]{CAPACITY, CAPACITY};
String[] hosts = {"host0", "host1"};
String[] racks = {RACK0, RACK1};
int numOfDatanodes = capacities.length;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(capacities.length)
.hosts(hosts).racks(racks).simulatedCapacities(capacities).build();
cluster.waitActive();
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(), ClientProtocol.class).getProxy();
// fill up the cluster to be 80% full
long totalCapacity = TestBalancer.sum(capacities);
long totalUsedSpace = totalCapacity * 8 / 10;
InetSocketAddress[] favoredNodes = new InetSocketAddress[numOfDatanodes];
for (int i = 0; i < favoredNodes.length; i++) {
// DFSClient will attempt reverse lookup. In case it resolves
// "127.0.0.1" to "localhost", we manually specify the hostname.
int port = cluster.getDataNodes().get(i).getXferAddress().getPort();
favoredNodes[i] = new InetSocketAddress(hosts[i], port);
}
DFSTestUtil.createFile(cluster.getFileSystem(0), FILE_PATH, false, 1024,
totalUsedSpace / numOfDatanodes, DEFAULT_BLOCK_SIZE,
(short) numOfDatanodes, 0, false, favoredNodes);
// start up an empty node with the same capacity
cluster.startDataNodes(conf, 1, true, null, new String[]{RACK2},
new long[]{CAPACITY});
totalCapacity += CAPACITY;
// run balancer and validate results
TestBalancer.waitForHeartBeat(totalUsedSpace, totalCapacity, client,
cluster);
// start rebalancing
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
int r = Balancer.run(namenodes, BalancerParameters.DEFAULT, conf);
assertEquals(ExitStatus.NO_MOVE_PROGRESS.getExitCode(), r);
}
@Test
@Timeout(value = 60)
public void testBalancerWithSortTopNodes() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
conf.setInt(DFS_HEARTBEAT_INTERVAL_KEY, 30000);
final long capacity = 1000L;
final int diffBetweenNodes = 50;
// Set up the datanodes with two groups:
// 5 over-utilized nodes with 80%, 85%, 90%, 95%, 100% usage
// 2 under-utilizaed nodes with 0%, 5% usage
// With sortTopNodes option, 100% and 95% used ones will be chosen.
final int numOfOverUtilizedDn = 5;
final int numOfUnderUtilizedDn = 2;
final int totalNumOfDn = numOfOverUtilizedDn + numOfUnderUtilizedDn;
final long[] capacityArray = new long[totalNumOfDn];
Arrays.fill(capacityArray, capacity);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(totalNumOfDn)
.simulatedCapacities(capacityArray)
.build();
cluster.setDataNodesDead();
List<DataNode> dataNodes = cluster.getDataNodes();
// Create top used nodes
for (int i = 0; i < numOfOverUtilizedDn; i++) {
// Bring one node alive
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(i));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(i));
// Create nodes with: 80%, 85%, 90%, 95%, 100%.
int capacityForThisDatanode = (int) capacity
- diffBetweenNodes * (numOfOverUtilizedDn - i - 1);
TestBalancer.createFile(cluster, new Path("test_big" + i),
capacityForThisDatanode, (short) 1, 0);
cluster.setDataNodesDead();
}
// Create under utilized nodes
for (int i = numOfUnderUtilizedDn - 1; i >= 0; i--) {
int index = i + numOfOverUtilizedDn;
// Bring one node alive
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(index));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(index));
// Create nodes with: 5%, 0%
int capacityForThisDatanode = diffBetweenNodes * i;
TestBalancer.createFile(cluster,
new Path("test_small" + i),
capacityForThisDatanode, (short) 1, 0);
cluster.setDataNodesDead();
}
// Bring all nodes alive
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
cluster.waitFirstBRCompleted(0, 6000);
final BalancerParameters p = Balancer.Cli.parse(new String[]{
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-sortTopNodes"
});
client = NameNodeProxies.createProxy(conf,
cluster.getFileSystem(0).getUri(),
ClientProtocol.class).getProxy();
// Set max-size-to-move to small number
// so only top two nodes will be chosen in one iteration.
conf.setLong(DFS_BALANCER_MAX_SIZE_TO_MOVE_KEY, 99L);
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
List<NameNodeConnector> connectors = NameNodeConnector
.newNameNodeConnectors(namenodes,
Balancer.class.getSimpleName(), Balancer.BALANCER_ID_PATH, conf,
BalancerParameters.DEFAULT.getMaxIdleIteration());
final Balancer b = new Balancer(connectors.get(0), p, conf);
Balancer.Result balancerResult = b.runOneIteration();
cluster.triggerDeletionReports();
cluster.triggerBlockReports();
cluster.triggerHeartbeats();
DatanodeInfo[] datanodeReport = client
.getDatanodeReport(HdfsConstants.DatanodeReportType.ALL);
long maxUsage = 0;
for (int i = 0; i < totalNumOfDn; i++) {
maxUsage = Math.max(maxUsage, datanodeReport[i].getDfsUsed());
}
// The 95% usage DN will have 9 blocks of 100B and 1 block of 50B - all for the same file.
// The HDFS balancer will choose a block to move from this node randomly. More likely it will
// be 100B block. Since 100B is greater than DFS_BALANCER_MAX_SIZE_TO_MOVE_KEY which is 99L,
// it will stop here. Total bytes moved from this 95% DN will be 1 block of size 100B.
// However, chances are the first block selected to be moved from this 95% DN is the 50B block.
// After this block is moved, the total moved size so far would be 50B which is smaller than
// DFS_BALANCER_MAX_SIZE_TO_MOVE_KEY (99L), hence it will try to move another block.
// The second block will always be of size 100B. So total bytes moved from this 95% DN will be
// 2 blocks of size (100B + 50B) 150B.
// Hence, overall total blocks moved by HDFS balancer would be either of these 2 options:
// a) 2 blocks of total size (100B + 100B)
// b) 3 blocks of total size (50B + 100B + 100B)
assertTrue((balancerResult.getBytesAlreadyMoved() == 200
&& balancerResult.getBlocksMoved() == 2)
|| (balancerResult.getBytesAlreadyMoved() == 250
&& balancerResult.getBlocksMoved() == 3),
"BalancerResult is not as expected. " + balancerResult);
// 100% and 95% used nodes will be balanced, so top used will be 900
assertEquals(900, maxUsage);
}
@Test
@Timeout(value = 60)
public void testBalancerWithLimitOverUtilizedNum() throws Exception {
final Configuration conf = new HdfsConfiguration();
// Init the config (block size to 100)
initConf(conf);
conf.setInt(DFS_HEARTBEAT_INTERVAL_KEY, 30000);
final long totalCapacity = 1000L;
final int diffBetweenNodes = 50;
// Set up the nodes with two groups:
// 5 over-utilized nodes with 80%, 85%, 90%, 95%, 100% usage
// 2 under-utilized nodes with 0%, 5% usage
// With sortTopNodes and limitOverUtilizedNum option, 100% used ones will be chosen
final int numOfOverUtilizedDn = 5;
final int numOfUnderUtilizedDn = 2;
final int totalNumOfDn = numOfOverUtilizedDn + numOfUnderUtilizedDn;
final long[] capacityArray = new long[totalNumOfDn];
Arrays.fill(capacityArray, totalCapacity);
try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(totalNumOfDn)
.simulatedCapacities(capacityArray)
.build()) {
cluster.setDataNodesDead();
List<DataNode> dataNodes = cluster.getDataNodes();
// Create top used nodes
for (int i = 0; i < numOfOverUtilizedDn; i++) {
// Bring one node alive
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(i));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(i));
// Create nodes with: 80%, 85%, 90%, 95%, 100%
int nodeCapacity = (int) totalCapacity - diffBetweenNodes * (numOfOverUtilizedDn - i - 1);
TestBalancer.createFile(cluster, new Path("test_big" + i), nodeCapacity, (short) 1, 0);
cluster.setDataNodesDead();
}
// Create under utilized nodes
for (int i = numOfUnderUtilizedDn - 1; i >= 0; i--) {
int index = i + numOfOverUtilizedDn;
// Bring one node alive
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(index));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(index));
// Create nodes with: 5%, 0%
int nodeCapacity = diffBetweenNodes * i;
TestBalancer.createFile(cluster, new Path("test_small" + i), nodeCapacity, (short) 1, 0);
cluster.setDataNodesDead();
}
// Bring all nodes alive
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
cluster.waitFirstBRCompleted(0, 6000);
final BalancerParameters balancerParameters = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "1",
"-sortTopNodes",
"-limitOverUtilizedNum", "1"
});
client = NameNodeProxies.createProxy(conf, cluster.getFileSystem(0)
.getUri(), ClientProtocol.class)
.getProxy();
// Set max-size-to-move to small number
// so only top two nodes will be chosen in one iteration
conf.setLong(DFS_BALANCER_MAX_SIZE_TO_MOVE_KEY, 99L);
final Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
List<NameNodeConnector> connectors =
NameNodeConnector.newNameNodeConnectors(namenodes, Balancer.class.getSimpleName(),
Balancer.BALANCER_ID_PATH, conf, BalancerParameters.DEFAULT.getMaxIdleIteration());
final Balancer balancer = new Balancer(connectors.get(0), balancerParameters, conf);
Balancer.Result balancerResult = balancer.runOneIteration();
cluster.triggerDeletionReports();
cluster.triggerBlockReports();
cluster.triggerHeartbeats();
DatanodeInfo[] datanodeReport =
client.getDatanodeReport(HdfsConstants.DatanodeReportType.ALL);
long maxUsage = 0;
for (int i = 0; i < totalNumOfDn; i++) {
maxUsage = Math.max(maxUsage, datanodeReport[i].getDfsUsed());
}
// The maxUsage value is 950, only 100% of the nodes will be balanced
assertEquals(950, maxUsage);
assertTrue(
(balancerResult.getBytesAlreadyMoved() == 100 && balancerResult.getBlocksMoved() == 1),
"BalancerResult is not as expected. " + balancerResult);
}
}
@Test
@Timeout(value = 60)
public void testBalancerMetricsDuplicate() throws Exception {
final Configuration conf = new HdfsConfiguration();
// Init the config (block size to 100)
initConf(conf);
final long totalCapacity = 1000L;
final int numOfOverUtilizedDn = 1;
final int numOfUnderUtilizedDn = 2;
final int totalNumOfDn = numOfOverUtilizedDn + numOfUnderUtilizedDn;
final long[] capacityArray = new long[totalNumOfDn];
Arrays.fill(capacityArray, totalCapacity);
try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(totalNumOfDn)
.simulatedCapacities(capacityArray)
.build()) {
cluster.setDataNodesDead();
List<DataNode> dataNodes = cluster.getDataNodes();
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(0));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(0));
// Create nodes with: 100%
TestBalancer.createFile(cluster, new Path("test_big" + 0), 1000, (short) 1, 0);
cluster.setDataNodesDead();
// Two UnderUtilized in the cluster, execute at least twice: b.runOneIteration()
for (int i = 1; i <= numOfUnderUtilizedDn; i++) {
DataNodeTestUtils.triggerHeartbeat(dataNodes.get(i));
DataNodeTestUtils.triggerBlockReport(dataNodes.get(i));
// Create nodes with: 0%
TestBalancer.createFile(cluster, new Path("test_small" + i), 0, (short) 1, 0);
cluster.setDataNodesDead();
}
cluster.triggerDeletionReports();
cluster.triggerBlockReports();
cluster.triggerHeartbeats();
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
Collection<String> nsIds = DFSUtilClient.getNameServiceIds(conf);
assertEquals(1, namenodes.size());
// Throw an error when we double-initialize BalancerMetrics
DefaultMetricsSystem.setMiniClusterMode(false);
MetricsSystem instance = DefaultMetricsSystem.instance();
// Avoid the impact of cluster metric, remove cluster JvmMetrics
instance.unregisterSource("JvmMetrics");
final BalancerParameters balancerParameters = Balancer.Cli.parse(new String[] {
"-policy", BalancingPolicy.Node.INSTANCE.getName(),
"-threshold", "10",
});
int r = Balancer.run(namenodes, nsIds, balancerParameters, conf);
assertEquals(ExitStatus.SUCCESS.getExitCode(), r);
DefaultMetricsSystem.setMiniClusterMode(true);
}
}
@Test
@Timeout(value = 100)
public void testMaxIterationTime() throws Exception {
final Configuration conf = new HdfsConfiguration();
initConf(conf);
int blockSize = 10 * 1024 * 1024; // 10MB block size
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, blockSize);
// limit the worker thread count of Balancer to have only 1 queue per DN
conf.setInt(DFSConfigKeys.DFS_BALANCER_MOVERTHREADS_KEY, 1);
// limit the bandwidth to 4MB per sec to emulate slow block moves
conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY,
4 * 1024 * 1024);
// set client socket timeout to have an IN_PROGRESS notification back from
// the DataNode about the copy in every second.
conf.setLong(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, 2000L);
// set max iteration time to 500 ms to timeout before moving any block
conf.setLong(DFSConfigKeys.DFS_BALANCER_MAX_ITERATION_TIME_KEY, 500L);
// setup the cluster
final long capacity = 10L * blockSize;
final long[] dnCapacities = new long[]{capacity, capacity};
final short rep = 1;
final long seed = 0xFAFAFA;
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0)
.build();
try {
cluster.getConfiguration(0).setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
cluster.startDataNodes(conf, 1, true, null, null, dnCapacities);
cluster.waitClusterUp();
cluster.waitActive();
final Path path = new Path("/testMaxIterationTime.dat");
DistributedFileSystem fs = cluster.getFileSystem();
// fill the DN to 40%
DFSTestUtil.createFile(fs, path, 4L * blockSize, rep, seed);
// start a new DN
cluster.startDataNodes(conf, 1, true, null, null, dnCapacities);
cluster.triggerHeartbeats();
// setup Balancer and run one iteration
List<NameNodeConnector> connectors = Collections.emptyList();
try {
BalancerParameters bParams = BalancerParameters.DEFAULT;
// set maxIdleIterations to 1 for NO_MOVE_PROGRESS to be
// reported when there is no block move
connectors = NameNodeConnector.newNameNodeConnectors(
DFSUtil.getInternalNsRpcUris(conf), Balancer.class.getSimpleName(),
Balancer.BALANCER_ID_PATH, conf, 1);
for (NameNodeConnector nnc : connectors) {
LOG.info("NNC to work on: " + nnc);
Balancer b = new Balancer(nnc, bParams, conf);
Balancer.Result r = b.runOneIteration();
// Since no block can be moved in 500 milli-seconds (i.e.,
// 4MB/s * 0.5s = 2MB < 10MB), NO_MOVE_PROGRESS will be reported.
// When a block move is not canceled in 500 ms properly
// (highly unlikely) and then a block is moved unexpectedly,
// IN_PROGRESS will be reported. This is highly unlikely unexpected
// case. See HDFS-15989.
assertEquals(ExitStatus.NO_MOVE_PROGRESS, r.getExitStatus(),
"We expect ExitStatus.NO_MOVE_PROGRESS to be reported.");
assertEquals(0, r.getBlocksMoved());
}
} finally {
for (NameNodeConnector nnc : connectors) {
IOUtils.cleanupWithLogger(null, nnc);
}
}
} finally {
cluster.shutdown(true, true);
}
}
}
|
TestBalancerLongRunningTasks
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java
|
{
"start": 846,
"end": 5807
}
|
class ____ extends BaseAggregationTestCase<PercentilesAggregationBuilder> {
@Override
protected PercentilesAggregationBuilder createTestAggregatorBuilder() {
PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(randomAlphaOfLengthBetween(1, 20));
if (randomBoolean()) {
factory.keyed(randomBoolean());
}
if (randomBoolean()) {
int percentsSize = randomIntBetween(1, 20);
double[] percents = new double[percentsSize];
for (int i = 0; i < percentsSize; i++) {
percents[i] = randomDouble() * 100;
}
factory.percentiles(percents);
}
if (randomBoolean()) {
factory.numberOfSignificantValueDigits(randomIntBetween(0, 5));
} else if (randomBoolean()) {
factory.compression(randomIntBetween(1, 50000));
}
String field = randomNumericField();
randomFieldOrScript(factory, field);
if (randomBoolean()) {
factory.missing("MISSING");
}
if (randomBoolean()) {
factory.format("###.00");
}
return factory;
}
public void testNullOrEmptyPercentilesThrows() throws IOException {
PercentilesAggregationBuilder builder = new PercentilesAggregationBuilder("testAgg");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(null));
assertEquals("[percents] must not be null: [testAgg]", ex.getMessage());
ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(new double[0]));
assertEquals("[percents] must not be empty: [testAgg]", ex.getMessage());
}
public void testOutOfRangePercentilesThrows() throws IOException {
PercentilesAggregationBuilder builder = new PercentilesAggregationBuilder("testAgg");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(-0.4));
assertEquals("percent must be in [0,100], got [-0.4]: [testAgg]", ex.getMessage());
ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(104));
assertEquals("percent must be in [0,100], got [104.0]: [testAgg]", ex.getMessage());
}
public void testDuplicatePercentilesThrows() throws IOException {
PercentilesAggregationBuilder builder = new PercentilesAggregationBuilder("testAgg");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> builder.percentiles(5, 42, 10, 99, 42, 87));
assertEquals("percent [42.0] has been specified twice: [testAgg]", ex.getMessage());
}
public void testExceptionMultipleMethods() throws IOException {
final String illegalAgg = """
{
"percentiles": {
"field": "load_time",
"percents": [99],
"tdigest": {
"compression": 200
},
"hdr": {
"number_of_significant_value_digits": 3
}
}
}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, illegalAgg);
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
XContentParseException e = expectThrows(
XContentParseException.class,
() -> PercentilesAggregationBuilder.PARSER.parse(parser, "myPercentiles")
);
assertThat(e.getMessage(), containsString("[percentiles] failed to parse field [hdr]"));
}
public void testParseTDigestWithParams() throws IOException {
final String percentileConfig = """
{
"percentiles": {
"field": "load_time",
"percents": [1, 99],
"tdigest": {
"compression": 200,
"execution_hint": "high_accuracy"
}
}
}""";
XContentParser parser = createParser(JsonXContent.jsonXContent, percentileConfig);
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
PercentilesAggregationBuilder parsed = PercentilesAggregationBuilder.PARSER.parse(parser, "myPercentiles");
assertArrayEquals(parsed.percentiles(), new double[] { 1.0, 99.0 }, 0.0);
assertEquals(PercentilesMethod.TDIGEST, parsed.percentilesConfig().getMethod());
var tdigestConfig = (PercentilesConfig.TDigest) parsed.percentilesConfig();
assertEquals(200.0, tdigestConfig.getCompression(), 0);
assertEquals(TDigestExecutionHint.HIGH_ACCURACY, tdigestConfig.getExecutionHint(null));
}
}
|
PercentilesTests
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/RecipientListParallelStopOnExceptionWithOnExceptionTest.java
|
{
"start": 1035,
"end": 2451
}
|
class ____ extends ContextTestSupport {
@Test
public void testRecipientListStopOnException() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:b").expectedMessageCount(1);
// we run parallel so the tasks could haven been processed so we get 0
// or more messages
getMockEndpoint("mock:a").expectedMinimumMessageCount(0);
getMockEndpoint("mock:c").expectedMinimumMessageCount(0);
String out = template.requestBodyAndHeader("direct:start", "Hello World", "foo", "direct:a,direct:b,direct:c",
String.class);
assertEquals("Damn Forced", out);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class).handled(true).to("mock:handled").transform(simple("Damn ${exception.message}"));
from("direct:start").recipientList(header("foo")).stopOnException().parallelProcessing().to("mock:result");
from("direct:a").to("mock:a");
from("direct:b").to("mock:b").throwException(new IllegalArgumentException("Forced"));
from("direct:c").to("mock:c");
}
};
}
}
|
RecipientListParallelStopOnExceptionWithOnExceptionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/embedded/LegalStructure.java
|
{
"start": 423,
"end": 1450
}
|
class ____ {
private String name;
private String country;
private CorpType corporationType;
private Nationality origin;
private Set<Manager> topManagement = new HashSet<Manager>();
@ManyToOne
@JoinColumn(name = "CORP_ID")
public CorpType getCorporationType() {
return corporationType;
}
public void setCorporationType(CorpType corporationType) {
this.corporationType = corporationType;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ManyToOne
@JoinColumn(name = "origin_fk")
public Nationality getOrigin() {
return origin;
}
public void setOrigin(Nationality origin) {
this.origin = origin;
}
@OneToMany(mappedBy = "employer")
public Set<Manager> getTopManagement() {
return topManagement;
}
public void setTopManagement(Set<Manager> topManagement) {
this.topManagement = topManagement;
}
}
|
LegalStructure
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/registry/nacos/nacos/NacosServiceNameTest.java
|
{
"start": 1397,
"end": 3867
}
|
class ____ {
private static final String category = DEFAULT_CATEGORY;
private static final String serviceInterface = "org.apache.dubbo.registry.nacos.NacosServiceName";
private static final String version = "1.0.0";
private static final String group = "default";
private final NacosServiceName name = new NacosServiceName();
@BeforeEach
public void init() {
name.setCategory(category);
name.setServiceInterface(serviceInterface);
name.setVersion(version);
name.setGroup(group);
}
@Test
void testGetter() {
assertEquals(category, name.getCategory());
assertEquals(serviceInterface, name.getServiceInterface());
assertEquals(version, name.getVersion());
assertEquals(group, name.getGroup());
assertEquals("providers:org.apache.dubbo.registry.nacos.NacosServiceName:1.0.0:default", name.getValue());
}
@Test
void testToString() {
assertEquals("providers:org.apache.dubbo.registry.nacos.NacosServiceName:1.0.0:default", name.toString());
}
@Test
void testIsConcrete() {
assertTrue(name.isConcrete());
name.setGroup(WILDCARD);
assertFalse(name.isConcrete());
init();
name.setVersion(WILDCARD);
assertFalse(name.isConcrete());
init();
name.setGroup(null);
name.setVersion(null);
assertTrue(name.isConcrete());
}
@Test
void testIsCompatible() {
NacosServiceName concrete = new NacosServiceName();
assertFalse(name.isCompatible(concrete));
// set category
concrete.setCategory(category);
assertFalse(name.isCompatible(concrete));
concrete.setServiceInterface(serviceInterface);
assertFalse(name.isCompatible(concrete));
concrete.setVersion(version);
assertFalse(name.isCompatible(concrete));
concrete.setGroup(group);
assertTrue(name.isCompatible(concrete));
// wildcard cases
name.setGroup(WILDCARD);
assertTrue(name.isCompatible(concrete));
init();
name.setVersion(WILDCARD);
assertTrue(name.isCompatible(concrete));
// range cases
init();
name.setGroup(group + ",2.0.0");
assertTrue(name.isCompatible(concrete));
init();
name.setVersion(version + ",2.0.0");
assertTrue(name.isCompatible(concrete));
}
}
|
NacosServiceNameTest
|
java
|
dropwizard__dropwizard
|
dropwizard-example/src/main/java/com/example/helloworld/core/User.java
|
{
"start": 125,
"end": 673
}
|
class ____ implements Principal {
private static final Random rng = new Random();
private final String name;
private final Set<String> roles;
public User(String name) {
this.name = name;
this.roles = null;
}
public User(String name, Set<String> roles) {
this.name = name;
this.roles = roles;
}
public String getName() {
return name;
}
public int getId() {
return rng.nextInt(100);
}
public Set<String> getRoles() {
return roles;
}
}
|
User
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/utils/TestSharedBuffer.java
|
{
"start": 3626,
"end": 10370
}
|
class ____ implements KeyedStateStore {
private long stateWrites = 0;
private long stateReads = 0;
@Override
public <T> ValueState<T> getState(ValueStateDescriptor<T> stateProperties) {
return new ValueState<T>() {
private T value;
@Override
public T value() throws IOException {
stateReads++;
return value;
}
@Override
public void update(T value) throws IOException {
stateWrites++;
this.value = value;
}
@Override
public void clear() {
this.value = null;
}
};
}
@Override
public <T> ListState<T> getListState(ListStateDescriptor<T> stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <T> ReducingState<T> getReducingState(ReducingStateDescriptor<T> stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <IN, ACC, OUT> AggregatingState<IN, OUT> getAggregatingState(
AggregatingStateDescriptor<IN, ACC, OUT> stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <UK, UV> MapState<UK, UV> getMapState(MapStateDescriptor<UK, UV> stateProperties) {
return new MapState<UK, UV>() {
private Map<UK, UV> values;
private Map<UK, UV> getOrSetMap() {
if (values == null) {
this.values = new HashMap<>();
}
return values;
}
@Override
public UV get(UK key) throws Exception {
stateReads++;
if (values == null) {
return null;
}
return values.get(key);
}
@Override
public void put(UK key, UV value) throws Exception {
stateWrites++;
getOrSetMap().put(key, value);
}
@Override
public void putAll(Map<UK, UV> map) throws Exception {
stateWrites++;
getOrSetMap().putAll(map);
}
@Override
public void remove(UK key) throws Exception {
if (values == null) {
return;
}
stateWrites++;
values.remove(key);
}
@Override
public boolean contains(UK key) throws Exception {
if (values == null) {
return false;
}
stateReads++;
return values.containsKey(key);
}
@Override
public Iterable<Map.Entry<UK, UV>> entries() throws Exception {
if (values == null) {
return Collections.emptyList();
}
return () -> new CountingIterator<>(values.entrySet().iterator());
}
@Override
public Iterable<UK> keys() throws Exception {
if (values == null) {
return Collections.emptyList();
}
return () -> new CountingIterator<>(values.keySet().iterator());
}
@Override
public Iterable<UV> values() throws Exception {
if (values == null) {
return Collections.emptyList();
}
return () -> new CountingIterator<>(values.values().iterator());
}
@Override
public Iterator<Map.Entry<UK, UV>> iterator() throws Exception {
if (values == null) {
return Collections.emptyIterator();
}
return new CountingIterator<>(values.entrySet().iterator());
}
@Override
public boolean isEmpty() throws Exception {
if (values == null) {
return true;
}
return values.isEmpty();
}
@Override
public void clear() {
stateWrites++;
this.values = null;
}
};
}
@Override
public <T> org.apache.flink.api.common.state.v2.ValueState<T> getValueState(
@Nonnull
org.apache.flink.api.common.state.v2.ValueStateDescriptor<T>
stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <T> org.apache.flink.api.common.state.v2.ListState<T> getListState(
@Nonnull
org.apache.flink.api.common.state.v2.ListStateDescriptor<T>
stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <UK, UV> org.apache.flink.api.common.state.v2.MapState<UK, UV> getMapState(
@Nonnull
org.apache.flink.api.common.state.v2.MapStateDescriptor<UK, UV>
stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <T> org.apache.flink.api.common.state.v2.ReducingState<T> getReducingState(
@Nonnull
org.apache.flink.api.common.state.v2.ReducingStateDescriptor<T>
stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public <IN, ACC, OUT>
org.apache.flink.api.common.state.v2.AggregatingState<IN, OUT> getAggregatingState(
@Nonnull
org.apache.flink.api.common.state.v2.AggregatingStateDescriptor<
IN, ACC, OUT>
stateProperties) {
throw new UnsupportedOperationException();
}
@Override
public String getBackendTypeIdentifier() {
return "mock";
}
private
|
MockKeyedStateStore
|
java
|
alibaba__nacos
|
auth/src/main/java/com/alibaba/nacos/auth/serveridentity/ServerIdentityResult.java
|
{
"start": 743,
"end": 1614
}
|
class ____ {
private final ResultStatus status;
private final String message;
private ServerIdentityResult(ResultStatus status, String message) {
this.status = status;
this.message = message;
}
public ResultStatus getStatus() {
return status;
}
public String getMessage() {
return message;
}
public static ServerIdentityResult success() {
return new ServerIdentityResult(ResultStatus.MATCHED, "Server identity matched.");
}
public static ServerIdentityResult noMatched() {
return new ServerIdentityResult(ResultStatus.NOT_MATCHED, "Server identity not matched.");
}
public static ServerIdentityResult fail(String message) {
return new ServerIdentityResult(ResultStatus.FAIL, message);
}
public
|
ServerIdentityResult
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/BinaryArithmeticPipe.java
|
{
"start": 723,
"end": 1935
}
|
class ____ extends BinaryPipe {
private final BinaryArithmeticOperation operation;
public BinaryArithmeticPipe(Source source, Expression expression, Pipe left, Pipe right, BinaryArithmeticOperation operation) {
super(source, expression, left, right);
this.operation = operation;
}
@Override
protected NodeInfo<BinaryArithmeticPipe> info() {
return NodeInfo.create(this, BinaryArithmeticPipe::new, expression(), left(), right(), operation);
}
@Override
protected BinaryPipe replaceChildren(Pipe left, Pipe right) {
return new BinaryArithmeticPipe(source(), expression(), left, right, operation);
}
@Override
public Processor asProcessor() {
return new BinaryArithmeticProcessor(left().asProcessor(), right().asProcessor(), operation);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), operation);
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj)) {
BinaryArithmeticPipe other = (BinaryArithmeticPipe) obj;
return Objects.equals(operation, other.operation);
}
return false;
}
}
|
BinaryArithmeticPipe
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/LPosArgs.java
|
{
"start": 712,
"end": 2586
}
|
class ____ {
/**
* Utility constructor.
*/
private Builder() {
}
/**
* Creates new empty {@link LPosArgs}.
*
* @return new {@link LPosArgs}.
* @see LPosArgs#maxlen(long)
*/
public static LPosArgs empty() {
return new LPosArgs();
}
/**
* Creates new {@link LPosArgs} and setting {@literal MAXLEN}.
*
* @return new {@link LPosArgs} with {@literal MAXLEN} set.
* @see LPosArgs#maxlen(long)
*/
public static LPosArgs maxlen(long count) {
return new LPosArgs().maxlen(count);
}
/**
* Creates new {@link LPosArgs} and setting {@literal RANK}.
*
* @return new {@link LPosArgs} with {@literal RANK} set.
* @see LPosArgs#rank(long)
*/
public static LPosArgs rank(long rank) {
return new LPosArgs().rank(rank);
}
}
/**
* Limit list scanning to {@code maxlen} entries.
*
* @param maxlen number greater 0.
* @return {@code this}
*/
public LPosArgs maxlen(long maxlen) {
LettuceAssert.isTrue(maxlen > 0, "Maxlen must be greater 0");
this.maxlen = maxlen;
return this;
}
/**
* Specify the rank of the first element to return, in case there are multiple matches.
*
* @param rank number.
* @return {@code this}
*/
public LPosArgs rank(long rank) {
this.rank = rank;
return this;
}
@Override
public <K, V> void build(CommandArgs<K, V> args) {
if (maxlen != null) {
args.add(CommandKeyword.MAXLEN);
args.add(maxlen);
}
if (rank != null) {
args.add("RANK");
args.add(rank);
}
}
}
|
Builder
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/log/InjectedLoggerTest.java
|
{
"start": 1164,
"end": 1501
}
|
class ____ {
@Inject
Logger log;
@LoggerName("shared")
Logger sharedLog;
public Logger getLog() {
log.info("Someone is here!");
return log;
}
public Logger getSharedLog() {
return sharedLog;
}
}
@Dependent
static
|
SimpleBean
|
java
|
elastic__elasticsearch
|
plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/network/GceNameResolver.java
|
{
"start": 3266,
"end": 4799
}
|
interface ____
gceMetadataPath = Strings.replace(GceAddressResolverType.PRIVATE_IP.gceName, "{{network}}", network);
} else {
throw new IllegalArgumentException(
"["
+ value
+ "] is not one of the supported GCE network.host setting. "
+ "Expecting _gce_, _gce:privateIp:X_, _gce:hostname_"
);
}
try {
String metadataResult = gceMetadataService.metadata(gceMetadataPath);
if (metadataResult == null || metadataResult.length() == 0) {
throw new IOException("no gce metadata returned from [" + gceMetadataPath + "] for [" + value + "]");
}
// only one address: because we explicitly ask for only one via the GceHostnameType
return new InetAddress[] { InetAddress.getByName(metadataResult) };
} catch (URISyntaxException | IOException e) {
throw new IOException("IOException caught when fetching InetAddress from [" + gceMetadataPath + "]", e);
}
}
@Override
public InetAddress[] resolveDefault() {
return null; // using this, one has to explicitly specify _gce_ in network setting
}
@Override
public InetAddress[] resolveIfPossible(String value) throws IOException {
// We only try to resolve network.host setting when it starts with _gce
if (value.startsWith("gce")) {
return resolve(value);
}
return null;
}
}
|
value
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/ActiveProfilesUtilsTests.java
|
{
"start": 6884,
"end": 6986
}
|
class ____ {
}
@ActiveProfiles({ "foo", "bar", " foo", "bar ", "baz" })
private static
|
EmptyProfiles
|
java
|
apache__camel
|
components/camel-file-watch/src/main/java/org/apache/camel/component/file/watch/FileWatchConstants.java
|
{
"start": 937,
"end": 3492
}
|
class ____ {
@Metadata(description = "Type of event. Possible values: CREATE, DELETE, MODIFY.", javaType = "String")
public static final String EVENT_TYPE_HEADER = "CamelFileEventType";
@Metadata(description = "Only the file name (the name with no leading paths).", javaType = "String")
public static final String FILE_NAME_ONLY = Exchange.FILE_NAME_ONLY;
@Metadata(description = "A `boolean` option specifying whether the consumed file denotes an\n" +
"absolute path or not. Should normally be `false` for relative paths.\n" +
"Absolute paths should normally not be used but we added to the move\n" +
"option to allow moving files to absolute paths. But can be used\n" +
"elsewhere as well.",
javaType = "Boolean")
public static final String FILE_ABSOLUTE = "CamelFileAbsolute";
@Metadata(description = "The absolute path to the file. For relative files this path holds the\n" +
"relative path instead.",
javaType = "String")
public static final String FILE_ABSOLUTE_PATH = "CamelFileAbsolutePath";
@Metadata(description = "The file path. For relative files this is the starting directory + the\n" +
"relative filename. For absolute files this is the absolute path.",
javaType = "String")
public static final String FILE_PATH = Exchange.FILE_PATH;
@Metadata(description = "Name of the consumed file as a relative file path with offset from the\n" +
"starting directory configured on the endpoint.",
javaType = "String", important = true)
public static final String FILE_NAME = Exchange.FILE_NAME;
@Metadata(description = "The relative path.", javaType = "String")
public static final String FILE_RELATIVE_PATH = "CamelFileRelativePath";
@Metadata(description = "The name of the file that has been consumed", javaType = "String")
public static final String FILE_NAME_CONSUMED = Exchange.FILE_NAME_CONSUMED;
@Metadata(description = "The parent path.", javaType = "String")
public static final String FILE_PARENT = Exchange.FILE_PARENT;
@Metadata(description = "A `Long` value containing the last modified timestamp of the file.",
javaType = "long")
public static final String FILE_LAST_MODIFIED = Exchange.FILE_LAST_MODIFIED;
private FileWatchConstants() {
// Utility class
}
}
|
FileWatchConstants
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/BasicJpaAnnotation.java
|
{
"start": 524,
"end": 1858
}
|
class ____
implements Basic, AttributeMarker, AttributeMarker.Fetchable, AttributeMarker.Optionalable {
private jakarta.persistence.FetchType fetch;
private boolean optional;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public BasicJpaAnnotation(ModelsContext modelContext) {
this.fetch = jakarta.persistence.FetchType.EAGER;
this.optional = true;
}
/**
* Used in creating annotation instances from JDK variant
*/
public BasicJpaAnnotation(Basic annotation, ModelsContext modelContext) {
this.fetch = annotation.fetch();
this.optional = annotation.optional();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public BasicJpaAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.fetch = (jakarta.persistence.FetchType) attributeValues.get( "fetch" );
this.optional = (boolean) attributeValues.get( "optional" );
}
@Override
public Class<? extends Annotation> annotationType() {
return Basic.class;
}
@Override
public jakarta.persistence.FetchType fetch() {
return fetch;
}
public void fetch(jakarta.persistence.FetchType value) {
this.fetch = value;
}
@Override
public boolean optional() {
return optional;
}
public void optional(boolean value) {
this.optional = value;
}
}
|
BasicJpaAnnotation
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/concurrent/KeyedLock.java
|
{
"start": 4587,
"end": 5801
}
|
class ____ extends ReentrantLock {
private static final VarHandle VH_COUNT_FIELD;
static {
try {
VH_COUNT_FIELD = MethodHandles.lookup().in(KeyLock.class).findVarHandle(KeyLock.class, "count", int.class);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("FieldMayBeFinal") // updated via VH_COUNT_FIELD (and _only_ via VH_COUNT_FIELD)
private volatile int count = 1;
KeyLock() {
super();
}
int decCountAndGet() {
do {
int i = count;
int newCount = i - 1;
if (VH_COUNT_FIELD.weakCompareAndSet(this, i, newCount)) {
return newCount;
}
} while (true);
}
boolean tryIncCount(int expectedCount) {
return VH_COUNT_FIELD.compareAndSet(this, expectedCount, expectedCount + 1);
}
}
/**
* Returns <code>true</code> if this lock has at least one locked key.
*/
public boolean hasLockedKeys() {
return map.isEmpty() == false;
}
}
|
KeyLock
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/Queue.java
|
{
"start": 1508,
"end": 4597
}
|
interface ____ {
/**
* Get the queue name
* @return queue name
*/
String getQueueName();
/**
* Get the queue metrics
* @return the queue metrics
*/
QueueMetrics getMetrics();
/**
* Get queue information
* @param includeChildQueues include child queues?
* @param recursive recursively get child queue information?
* @return queue information
*/
QueueInfo getQueueInfo(boolean includeChildQueues, boolean recursive);
/**
* Get queue ACLs for given <code>user</code>.
* @param user username
* @return queue ACLs for user
*/
List<QueueUserACLInfo> getQueueUserAclInfo(UserGroupInformation user);
boolean hasAccess(QueueACL acl, UserGroupInformation user);
public AbstractUsersManager getAbstractUsersManager();
/**
* Recover the state of the queue for a given container.
* @param clusterResource the resource of the cluster
* @param schedulerAttempt the application for which the container was allocated
* @param rmContainer the container that was recovered.
*/
public void recoverContainer(Resource clusterResource,
SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer);
/**
* Get labels can be accessed of this queue
* labels={*}, means this queue can access any label
* labels={ }, means this queue cannot access any label except node without label
* labels={a, b, c} means this queue can access a or b or c
* @return labels
*/
public Set<String> getAccessibleNodeLabels();
/**
* Get default label expression of this queue. If label expression of
* ApplicationSubmissionContext and label expression of Resource Request not
* set, this will be used.
*
* @return default label expression
*/
public String getDefaultNodeLabelExpression();
/**
* When new outstanding resource is asked, calling this will increase pending
* resource in a queue.
*
* @param nodeLabel asked by application
* @param resourceToInc new resource asked
*/
public void incPendingResource(String nodeLabel, Resource resourceToInc);
/**
* When an outstanding resource is fulfilled or canceled, calling this will
* decrease pending resource in a queue.
*
* @param nodeLabel
* asked by application
* @param resourceToDec
* new resource asked
*/
public void decPendingResource(String nodeLabel, Resource resourceToDec);
/**
* Get the Default Application Priority for this queue
*
* @return default application priority
*/
public Priority getDefaultApplicationPriority();
/**
* Increment Reserved Capacity
*
* @param partition
* asked by application
* @param reservedRes
* reserved resource asked
*/
public void incReservedResource(String partition, Resource reservedRes);
/**
* Decrement Reserved Capacity
*
* @param partition
* asked by application
* @param reservedRes
* reserved resource asked
*/
public void decReservedResource(String partition, Resource reservedRes);
}
|
Queue
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
|
{
"start": 9019,
"end": 46406
}
|
class ____ extends AuthenticationFilter {
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException {
Properties properties =
super.getConfiguration(configPrefix, filterConfig);
properties.put(AuthenticationFilter.AUTH_TYPE, "simple");
properties.put(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
return properties;
}
}
@MethodSource("rounds")
@ParameterizedTest
void testInvalidApp(int round) {
ApplicationId appId = ApplicationId.newInstance(0, MAX_APPS + 1);
WebTarget r = target();
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertResponseStatusCode("404 not found expected",
Response.Status.NOT_FOUND, response.getStatusInfo());
}
@MethodSource("rounds")
@ParameterizedTest
void testInvalidAttempt(int round) {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, MAX_APPS + 1);
WebTarget r = target();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.path(appAttemptId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (round == 1) {
assertResponseStatusCode(Response.Status.FORBIDDEN, response.getStatusInfo());
return;
}
assertResponseStatusCode("404 not found expected",
Response.Status.NOT_FOUND, response.getStatusInfo());
}
@MethodSource("rounds")
@ParameterizedTest
void testInvalidContainer(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId,
MAX_APPS + 1);
WebTarget r = target();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.path(appAttemptId.toString()).path("containers")
.path(containerId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (round == 1) {
assertResponseStatusCode(Response.Status.FORBIDDEN, response.getStatusInfo());
return;
}
assertResponseStatusCode("404 not found expected",
Response.Status.NOT_FOUND, response.getStatusInfo());
}
@MethodSource("rounds")
@ParameterizedTest
void testInvalidUri(int round) throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
responseStr =
r.path("ws").path("v1").path("applicationhistory").path("bogus")
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@MethodSource("rounds")
@ParameterizedTest
public void testInvalidUri2(int round) throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
responseStr = r.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@MethodSource("rounds")
@ParameterizedTest
public void testInvalidAccept(int round) throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
responseStr =
r.path("ws").path("v1").path("applicationhistory")
.queryParam("user.name", USERS[round])
.request(MediaType.TEXT_PLAIN).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (NotAcceptableException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(NOT_ACCEPTABLE,
response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@MethodSource("rounds")
@ParameterizedTest
public void testAbout(int round) throws Exception {
WebTarget r = target().register(TimelineAboutReader.class);
Response response = r
.path("ws").path("v1").path("applicationhistory").path("about")
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
TimelineAbout actualAbout = response.readEntity(TimelineAbout.class);
TimelineAbout expectedAbout =
TimelineUtils.createTimelineAbout("Generic History Service API");
assertNotNull(
actualAbout, "Timeline service about response is null");
assertEquals(expectedAbout.getAbout(), actualAbout.getAbout());
assertEquals(expectedAbout.getTimelineServiceVersion(),
actualAbout.getTimelineServiceVersion());
assertEquals(expectedAbout.getTimelineServiceBuildVersion(),
actualAbout.getTimelineServiceBuildVersion());
assertEquals(expectedAbout.getTimelineServiceVersionBuiltOn(),
actualAbout.getTimelineServiceVersionBuiltOn());
assertEquals(expectedAbout.getHadoopVersion(),
actualAbout.getHadoopVersion());
assertEquals(expectedAbout.getHadoopBuildVersion(),
actualAbout.getHadoopBuildVersion());
assertEquals(expectedAbout.getHadoopVersionBuiltOn(),
actualAbout.getHadoopVersionBuiltOn());
}
@MethodSource("rounds")
@ParameterizedTest
void testAppsQuery(int round) throws Exception {
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.queryParam("state", YarnApplicationState.FINISHED.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject apps = json.getJSONObject("apps");
assertEquals(1, apps.length(), "incorrect number of elements");
JSONArray array = apps.getJSONArray("app");
assertEquals(MAX_APPS, array.length(), "incorrect number of elements");
}
@MethodSource("rounds")
@ParameterizedTest
public void testQueueQuery(int round) throws Exception {
WebTarget r = target();
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.queryParam("queue", "test queue")
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertResponseStatusCode(OK, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String entity = response.readEntity(String.class);
JSONObject json = new JSONObject(entity);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject apps = json.getJSONObject("apps");
assertEquals(1, apps.length(), "incorrect number of elements");
JSONArray array = apps.getJSONArray("app");
assertEquals(MAX_APPS - 1,
array.length(),
"incorrect number of elements");
}
@MethodSource("rounds")
@ParameterizedTest
void testSingleApp(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject app = json.getJSONObject("app");
assertEquals(appId.toString(), app.getString("appId"));
assertEquals("test app", app.get("name"));
assertEquals(round == 0 ? "test diagnostics info" : "",
app.get("diagnosticsInfo"));
assertEquals(Integer.MAX_VALUE + 1L, app.get("submittedTime"));
assertEquals("test queue", app.get("queue"));
assertEquals("user1", app.get("user"));
assertEquals("test app type", app.get("type"));
assertEquals(FinalApplicationStatus.UNDEFINED.toString(),
app.get("finalAppStatus"));
assertEquals(YarnApplicationState.FINISHED.toString(), app.get("appState"));
assertNotNull(app.get("aggregateResourceAllocation"),
"Aggregate resource allocation is null");
assertNotNull(app.get("aggregatePreemptedResourceAllocation"),
"Aggregate Preempted Resource Allocation is null");
}
@MethodSource("rounds")
@ParameterizedTest
void testMultipleAttempts(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(Response.class);
if (round == 1) {
assertResponseStatusCode(FORBIDDEN, response.getStatusInfo());
return;
}
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject appAttempts = json.getJSONObject("appAttempts");
assertEquals(1, appAttempts.length(), "incorrect number of elements");
JSONArray array = appAttempts.getJSONArray("appAttempt");
assertEquals(MAX_APPS, array.length(), "incorrect number of elements");
}
@MethodSource("rounds")
@ParameterizedTest
public void testSingleAttempt(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.path(appAttemptId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (round == 1) {
assertResponseStatusCode(FORBIDDEN, response.getStatusInfo());
return;
}
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject appAttempt = json.getJSONObject("appAttempt");
assertEquals(appAttemptId.toString(), appAttempt.getString("appAttemptId"));
assertEquals("test host", appAttempt.getString("host"));
assertEquals("test diagnostics info",
appAttempt.getString("diagnosticsInfo"));
assertEquals("test tracking url", appAttempt.getString("trackingUrl"));
assertEquals(YarnApplicationAttemptState.FINISHED.toString(),
appAttempt.get("appAttemptState"));
}
@MethodSource("rounds")
@ParameterizedTest
public void testMultipleContainers(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.path(appAttemptId.toString()).path("containers")
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON).get(Response.class);
if (round == 1) {
assertResponseStatusCode(FORBIDDEN, response.getStatusInfo());
return;
}
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject containers = json.getJSONObject("containers");
assertEquals(1, containers.length(), "incorrect number of elements");
JSONArray array = containers.getJSONArray("container");
assertEquals(MAX_APPS, array.length(), "incorrect number of elements");
}
@MethodSource("rounds")
@ParameterizedTest
void testSingleContainer(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
WebTarget r = targetWithJsonObject();
when(request.getRemoteUser()).thenReturn(USERS[round]);
Response response =
r.path("ws").path("v1").path("applicationhistory").path("apps")
.path(appId.toString()).path("appattempts")
.path(appAttemptId.toString()).path("containers")
.path(containerId.toString())
.queryParam("user.name", USERS[round])
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
if (round == 1) {
assertResponseStatusCode(FORBIDDEN, response.getStatusInfo());
return;
}
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject container = json.getJSONObject("container");
assertEquals(containerId.toString(), container.getString("containerId"));
assertEquals("test diagnostics info", container.getString("diagnosticsInfo"));
assertEquals("-1", container.getString("allocatedMB"));
assertEquals("-1", container.getString("allocatedVCores"));
assertEquals(NodeId.newInstance("test host", 100).toString(),
container.getString("assignedNodeId"));
assertEquals("-1", container.getString("priority"));
Configuration conf = new YarnConfiguration();
assertEquals(WebAppUtils.getHttpSchemePrefix(conf) +
WebAppUtils.getAHSWebAppURLWithoutScheme(conf) +
"/applicationhistory/logs/test host:100/container_0_0001_01_000001/" +
"container_0_0001_01_000001/user1", container.getString("logUrl"));
assertEquals(ContainerState.COMPLETE.toString(),
container.getString("containerState"));
}
@MethodSource("rounds")
@ParameterizedTest
@Timeout(10000)
void testContainerLogsForFinishedApps(int round) throws Exception {
String fileName = "syslog";
String user = "user1";
NodeId nodeId = NodeId.newInstance("test host", 100);
NodeId nodeId2 = NodeId.newInstance("host2", 1234);
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 1);
ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 1);
ContainerId containerId100 = ContainerId.newContainerId(appAttemptId, 100);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId1,
"Hello." + containerId1),
nodeId, fileName, user, true);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId100,
"Hello." + containerId100),
nodeId2, fileName, user, false);
// test whether we can find container log from remote diretory if
// the containerInfo for this container could be fetched from AHS.
WebTarget r = target();
Response response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1.toString()).path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
String responseText = response.readEntity(String.class);
assertTrue(responseText.contains("Hello." + containerId1));
// Do the same test with new API
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs").path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains("Hello." + containerId1));
// test whether we can find container log from remote directory if
// the containerInfo for this container could not be fetched from AHS.
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId100.toString()).path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains("Hello." + containerId100));
// Do the same test with new API
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId100.toString()).path("logs").path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains("Hello." + containerId100));
// create an application which can not be found from AHS
ApplicationId appId100 = ApplicationId.newInstance(0, 100);
ApplicationAttemptId appAttemptId100 = ApplicationAttemptId.newInstance(
appId100, 1);
ContainerId containerId1ForApp100 = ContainerId.newContainerId(
appAttemptId100, 1);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId100,
Collections.singletonMap(containerId1ForApp100,
"Hello." + containerId1ForApp100),
nodeId, fileName, user, true);
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1ForApp100.toString()).path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains("Hello." + containerId1ForApp100));
int fullTextSize = responseText.getBytes().length;
String tailEndSeparator = StringUtils.repeat("*",
"End of LogType:syslog".length() + 50) + "\n\n";
int tailTextSize = "\nEnd of LogType:syslog\n".getBytes().length
+ tailEndSeparator.getBytes().length;
String logMessage = "Hello." + containerId1ForApp100;
int fileContentSize = logMessage.getBytes().length;
// specify how many bytes we should get from logs
// if we specify a position number, it would get the first n bytes from
// container log
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1ForApp100.toString()).path(fileName)
.queryParam("user.name", user)
.queryParam("size", "5")
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertEquals(responseText.getBytes().length,
(fullTextSize - fileContentSize) + 5);
assertTrue(fullTextSize >= responseText.getBytes().length);
assertEquals(new String(responseText.getBytes(),
(fullTextSize - fileContentSize - tailTextSize), 5),
new String(logMessage.getBytes(), 0, 5));
// specify how many bytes we should get from logs
// if we specify a negative number, it would get the last n bytes from
// container log
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1ForApp100.toString()).path(fileName)
.queryParam("user.name", user)
.queryParam("size", "-5")
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertEquals(responseText.getBytes().length,
(fullTextSize - fileContentSize) + 5);
assertTrue(fullTextSize >= responseText.getBytes().length);
assertEquals(new String(responseText.getBytes(),
(fullTextSize - fileContentSize - tailTextSize), 5),
new String(logMessage.getBytes(), fileContentSize - 5, 5));
// specify the bytes which is larger than the actual file size,
// we would get the full logs
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1ForApp100.toString()).path(fileName)
.queryParam("user.name", user)
.queryParam("size", "10000")
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertThat(responseText.getBytes()).hasSize(fullTextSize);
r = target();
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1ForApp100.toString()).path(fileName)
.queryParam("user.name", user)
.queryParam("size", "-10000")
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertThat(responseText.getBytes()).hasSize(fullTextSize);
}
@MethodSource("rounds")
@ParameterizedTest
@Timeout(10000)
public void testContainerLogsForRunningApps(int round) throws Exception {
String fileName = "syslog";
String user = "user1";
ApplicationId appId = ApplicationId.newInstance(
1234, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 1);
WebTarget r = target();
when(request.getRemoteUser()).thenReturn(user);
URI requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1.toString()).path(fileName)
.queryParam("user.name", user).getUri();
String redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains("test:1234"));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + user));
// If we specify NM id, we would re-direct the request
// to this NM's Web Address.
requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1.toString()).path(fileName)
.queryParam("user.name", user)
.queryParam(YarnWebServiceParams.NM_ID, NM_ID)
.getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(NM_WEBADDRESS));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + user));
// Test with new API
requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs").path(fileName)
.queryParam("user.name", user).getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains("test:1234"));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + user));
requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs").path(fileName)
.queryParam("user.name", user)
.queryParam(YarnWebServiceParams.NM_ID, NM_ID)
.getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(NM_WEBADDRESS));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + user));
// If we can not container information from ATS, we would try to
// get aggregated log from remote FileSystem.
ContainerId containerId1000 = ContainerId.newContainerId(
appAttemptId, 1000);
String content = "Hello." + containerId1000;
NodeId nodeId = NodeId.newInstance("test host", 100);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId1000, content),
nodeId, fileName, user, true);
r = target();
Response response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1000.toString()).path(fileName)
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
String responseText = response.readEntity(String.class);
assertTrue(responseText.contains(content));
// Also test whether we output the empty local container log, and give
// the warning message.
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.LOCAL));
assertTrue(
responseText.contains(LogWebServiceUtils.getNoRedirectWarning()));
// If we can not container information from ATS, and we specify the NM id,
// but we can not get nm web address, we would still try to
// get aggregated log from remote FileSystem.
response = r.path("ws").path("v1")
.path("applicationhistory").path("containerlogs")
.path(containerId1000.toString()).path(fileName)
.queryParam(YarnWebServiceParams.NM_ID, "invalid-nm:1234")
.queryParam("user.name", user)
.request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains(content));
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.LOCAL));
assertTrue(
responseText.contains(LogWebServiceUtils.getNoRedirectWarning()));
// If this is the redirect request, we would not re-direct the request
// back and get the aggregated logs.
String content1 = "Hello." + containerId1;
NodeId nodeId1 = NodeId.fromString(NM_ID);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId1, content1),
nodeId1, fileName, user, true);
response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs").path(fileName)
.queryParam("user.name", user)
.queryParam(YarnWebServiceParams.REDIRECTED_FROM_NODE, "true")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains(content1));
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.AGGREGATED));
}
@MethodSource("rounds")
@ParameterizedTest
@Timeout(10000)
public void testContainerLogsMetaForRunningApps(int round) throws Exception {
String user = "user1";
ApplicationId appId = ApplicationId.newInstance(
1234, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 1);
WebTarget r = target().register(ContainerLogsInfoListReader.class);
// If we specify the NMID, we re-direct the request by using
// the NM's web address
URI requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs")
.queryParam("user.name", user)
.queryParam(YarnWebServiceParams.NM_ID, NM_ID)
.getUri();
String redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains(NM_WEBADDRESS));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs"));
// If we do not specify the NodeId but can get Container information
// from ATS, we re-direct the request to the node manager
// who runs the container.
requestURI = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs")
.queryParam("user.name", user).getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertTrue(redirectURL != null);
assertTrue(redirectURL.contains("test:1234"));
assertTrue(redirectURL.contains("ws/v1/node/containers"));
assertTrue(redirectURL.contains(containerId1.toString()));
assertTrue(redirectURL.contains("/logs"));
// If we can not container information from ATS,
// and not specify nodeId,
// we would try to get aggregated log meta from remote FileSystem.
ContainerId containerId1000 = ContainerId.newContainerId(
appAttemptId, 1000);
String fileName = "syslog";
String content = "Hello." + containerId1000;
NodeId nodeId = NodeId.newInstance("test host", 100);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId1000, content),
nodeId, fileName, user, true);
when(request.getRemoteUser()).thenReturn(user);
Response response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1000.toString()).path("logs")
.queryParam("user.name", user)
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
List<ContainerLogsInfo> responseText = response.readEntity(new GenericType<
List<ContainerLogsInfo>>(){
});
assertTrue(responseText.size() == 2);
for (ContainerLogsInfo logInfo : responseText) {
if (logInfo.getLogType().equals(
ContainerLogAggregationType.AGGREGATED.toString())) {
List<ContainerLogFileInfo> logMeta = logInfo
.getContainerLogsInfo();
assertTrue(logMeta.size() == 1);
assertThat(logMeta.get(0).getFileName()).isEqualTo(fileName);
assertThat(logMeta.get(0).getFileSize()).isEqualTo(String.valueOf(
content.length()));
} else {
assertEquals(logInfo.getLogType(),
ContainerLogAggregationType.LOCAL.toString());
}
}
// If we can not container information from ATS,
// and we specify NM id, but can not find NM WebAddress for this nodeId,
// we would still try to get aggregated log meta from remote FileSystem.
response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1000.toString()).path("logs")
.queryParam(YarnWebServiceParams.NM_ID, "invalid-nm:1234")
.queryParam("user.name", user)
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
responseText = response.readEntity(new GenericType<
List<ContainerLogsInfo>>(){
});
assertTrue(responseText.size() == 2);
for (ContainerLogsInfo logInfo : responseText) {
if (logInfo.getLogType().equals(
ContainerLogAggregationType.AGGREGATED.toString())) {
List<ContainerLogFileInfo> logMeta = logInfo
.getContainerLogsInfo();
assertTrue(logMeta.size() == 1);
assertThat(logMeta.get(0).getFileName()).isEqualTo(fileName);
assertThat(logMeta.get(0).getFileSize()).isEqualTo(String.valueOf(
content.length()));
} else {
assertThat(logInfo.getLogType()).isEqualTo(
ContainerLogAggregationType.LOCAL.toString());
}
}
}
@MethodSource("rounds")
@ParameterizedTest
@Timeout(10000)
void testContainerLogsMetaForFinishedApps(int round) throws Exception {
ApplicationId appId = ApplicationId.newInstance(0, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 1);
String fileName = "syslog";
String user = "user1";
String content = "Hello." + containerId1;
NodeId nodeId = NodeId.newInstance("test host", 100);
TestContainerLogsUtils.createContainerLogFileInRemoteFS(conf, fs,
rootLogDir, appId, Collections.singletonMap(containerId1, content),
nodeId, fileName, user, true);
WebTarget r = target().register(ContainerLogsInfoListReader.class);
Response response = r.path("ws").path("v1")
.path("applicationhistory").path("containers")
.path(containerId1.toString()).path("logs")
.queryParam("user.name", user)
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
List<ContainerLogsInfo> responseText = response.readEntity(new GenericType<
List<ContainerLogsInfo>>(){
});
assertTrue(responseText.size() == 1);
assertEquals(responseText.get(0).getLogType(),
ContainerLogAggregationType.AGGREGATED.toString());
List<ContainerLogFileInfo> logMeta = responseText.get(0)
.getContainerLogsInfo();
assertTrue(logMeta.size() == 1);
assertThat(logMeta.get(0).getFileName()).isEqualTo(fileName);
assertThat(logMeta.get(0).getFileSize()).isEqualTo(
String.valueOf(content.length()));
}
private static String getRedirectURL(String url) {
String redirectUrl = null;
try {
HttpURLConnection conn = (HttpURLConnection) new URL(url)
.openConnection();
// do not automatically follow the redirection
// otherwise we get too many redirections exception
conn.setInstanceFollowRedirects(false);
if (conn.getResponseCode() == HttpServletResponse.SC_TEMPORARY_REDIRECT) {
redirectUrl = conn.getHeaderField("Location");
String queryParams = getQueryParams(url);
if (queryParams != null && !queryParams.isEmpty()) {
redirectUrl = appendQueryParams(redirectUrl, queryParams);
}
}
} catch (Exception e) {
// throw new RuntimeException(e);
}
return redirectUrl;
}
private static String getQueryParams(String url) {
try {
URL u = new URL(url);
String query = u.getQuery();
return query != null ? query : "";
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
private static String appendQueryParams(String url, String queryParams) {
if (url == null || queryParams == null || queryParams.isEmpty()) {
return url;
}
return url + (url.contains("?") ? "&" : "?") + queryParams;
}
}
|
TestSimpleAuthFilter
|
java
|
apache__camel
|
core/camel-console/src/main/java/org/apache/camel/impl/console/ServiceDevConsole.java
|
{
"start": 1303,
"end": 3856
}
|
class ____ extends AbstractDevConsole {
public ServiceDevConsole() {
super("camel", "service", "Services", "Services used for network communication with clients");
}
@Override
protected String doCallText(Map<String, Object> options) {
StringBuilder sb = new StringBuilder();
EndpointServiceRegistry esr = getCamelContext().getCamelContextExtension().getEndpointServiceRegistry();
for (EndpointServiceRegistry.EndpointService es : esr.listAllEndpointServices()) {
if (!sb.isEmpty()) {
sb.append("\n");
}
sb.append(String.format("\n Component: %s", es.getComponent()));
sb.append(String.format("\n Direction: %s", es.getDirection()));
sb.append(String.format("\n Hosted: %b", es.isHostedService()));
sb.append(String.format("\n Protocol: %s", es.getServiceProtocol()));
sb.append(String.format("\n Service: %s", es.getServiceUrl()));
sb.append(String.format("\n Endpoint: %s", URISupport.sanitizeUri(es.getServiceUrl())));
if (es.getRouteId() != null) {
sb.append(String.format("\n Route Id: %s", es.getRouteId()));
}
sb.append(String.format("\n Total Messages: %d", es.getHits()));
}
sb.append("\n");
return sb.toString();
}
@Override
protected Map<String, Object> doCallJson(Map<String, Object> options) {
JsonObject root = new JsonObject();
List<JsonObject> list = new ArrayList<>();
root.put("services", list);
EndpointServiceRegistry esr = getCamelContext().getCamelContextExtension().getEndpointServiceRegistry();
for (EndpointServiceRegistry.EndpointService es : esr.listAllEndpointServices()) {
JsonObject jo = new JsonObject();
jo.put("component", es.getComponent());
jo.put("direction", es.getDirection());
jo.put("hosted", es.isHostedService());
jo.put("protocol", es.getServiceProtocol());
jo.put("serviceUrl", es.getServiceUrl());
jo.put("endpointUri", es.getEndpointUri());
if (es.getRouteId() != null) {
jo.put("routeId", es.getRouteId());
}
jo.put("hits", es.getHits());
var map = es.getServiceMetadata();
if (map != null) {
jo.put("metadata", map);
}
list.add(jo);
}
return root;
}
}
|
ServiceDevConsole
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/security/UserSystemExitException.java
|
{
"start": 901,
"end": 1319
}
|
class ____ extends SecurityException {
private static final long serialVersionUID = -5732392967744534811L;
public UserSystemExitException() {
this("Flink user code attempted to exit JVM.");
}
public UserSystemExitException(String msg) {
super(msg);
}
public UserSystemExitException(String message, Throwable cause) {
super(message, cause);
}
}
|
UserSystemExitException
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/util/LinkedArrayList.java
|
{
"start": 828,
"end": 927
}
|
class ____ non final to allow embedding it directly and thus saving on object allocation.
*/
public
|
is
|
java
|
apache__flink
|
flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/AddBoolBeforeReturnRewriter.java
|
{
"start": 5011,
"end": 5410
}
|
class ____ extends JavaParserBaseVisitor<Void> {
private final String key;
private final String boolVarName;
private InnerVisitor(String key, String boolVarName) {
this.key = key;
this.boolVarName = boolVarName;
}
@Override
public Void visitClassBody(JavaParser.ClassBodyContext ctx) {
// skip anonymous
|
InnerVisitor
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/engine/CamelPostProcessorHelperTest.java
|
{
"start": 27636,
"end": 27929
}
|
class ____ extends SynchronizationAdapter {
private boolean onDone;
@Override
public void onDone(Exchange exchange) {
onDone = true;
}
public boolean isOnDone() {
return onDone;
}
}
public static
|
MySynchronization
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedSession.java
|
{
"start": 44097,
"end": 45727
}
|
class ____ implements AutoCloseable {
final Session session;
final boolean closeOnEnd;
final boolean allowModification;
SessionResult(Session session, boolean closeOnEnd, boolean allowModification) {
this.session = session;
this.closeOnEnd = closeOnEnd;
this.allowModification = allowModification;
}
@Override
public void close() {
if (closeOnEnd) {
session.close();
}
}
}
@Override
public <T> RootGraph<T> createEntityGraph(Class<T> rootType, String graphName) {
try (SessionResult emr = acquireSession()) {
return emr.session.createEntityGraph(rootType, graphName);
}
}
@Override
public SessionFactory getFactory() {
return sessionFactory;
}
@Override
public int getFetchBatchSize() {
try (SessionResult emr = acquireSession()) {
return emr.session.getFetchBatchSize();
}
}
@Override
public void setFetchBatchSize(int batchSize) {
try (SessionResult emr = acquireSession()) {
emr.session.setFetchBatchSize(batchSize);
}
}
@Override
public boolean isSubselectFetchingEnabled() {
try (SessionResult emr = acquireSession()) {
return emr.session.isSubselectFetchingEnabled();
}
}
@Override
public void setSubselectFetchingEnabled(boolean enabled) {
try (SessionResult emr = acquireSession()) {
emr.session.setSubselectFetchingEnabled(enabled);
}
}
}
|
SessionResult
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/TruthIncompatibleTypeTest.java
|
{
"start": 15177,
"end": 15763
}
|
class ____ {
public void f(Multimap<String, Long> xs) {
// BUG: Diagnostic contains:
assertThat(xs).containsExactly("", 1L, "foo", 2L, "bar", 3);
}
}
""")
.doTest();
}
@Test
public void streamContainsExactly() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.Multimap;
import java.util.stream.Stream;
public
|
Test
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/iomanager/BufferFileWriterReaderTest.java
|
{
"start": 1667,
"end": 7295
}
|
class ____ {
private static final int BUFFER_SIZE = 32 * 1024;
private static final BufferRecycler BUFFER_RECYCLER = FreeingBufferRecycler.INSTANCE;
private static final Random random = new Random();
private static final IOManager ioManager = new IOManagerAsync();
private BufferFileWriter writer;
private BufferFileReader reader;
private LinkedBlockingQueue<Buffer> returnedBuffers = new LinkedBlockingQueue<>();
@AfterAll
static void shutdown() throws Exception {
ioManager.close();
}
@BeforeEach
void setUpWriterAndReader() {
final FileIOChannel.ID channel = ioManager.createChannel();
try {
writer = ioManager.createBufferFileWriter(channel);
reader =
ioManager.createBufferFileReader(
channel, new QueuingCallback<>(returnedBuffers));
} catch (IOException e) {
if (writer != null) {
writer.deleteChannel();
}
if (reader != null) {
reader.deleteChannel();
}
fail("Failed to setup writer and reader.");
}
}
@AfterEach
void tearDownWriterAndReader() {
if (writer != null) {
writer.deleteChannel();
}
if (reader != null) {
reader.deleteChannel();
}
returnedBuffers.clear();
}
@Test
void testWriteRead() throws IOException {
int numBuffers = 1024;
int currentNumber = 0;
final int minBufferSize = BUFFER_SIZE / 4;
// Write buffers filled with ascending numbers...
for (int i = 0; i < numBuffers; i++) {
final Buffer buffer = createBuffer();
int size = getNextMultipleOf(getRandomNumberInRange(minBufferSize, BUFFER_SIZE), 4);
currentNumber = fillBufferWithAscendingNumbers(buffer, currentNumber, size);
writer.writeBlock(buffer);
}
// Make sure that the writes are finished
writer.close();
// Read buffers back in...
for (int i = 0; i < numBuffers; i++) {
assertThat(reader.hasReachedEndOfFile()).isFalse();
reader.readInto(createBuffer());
}
reader.close();
assertThat(reader.hasReachedEndOfFile()).isTrue();
// Verify that the content is the same
assertThat(returnedBuffers)
.withFailMessage("Read less buffers than written.")
.hasSize(numBuffers);
currentNumber = 0;
Buffer buffer;
while ((buffer = returnedBuffers.poll()) != null) {
currentNumber = verifyBufferFilledWithAscendingNumbers(buffer, currentNumber);
}
}
@Test
void testWriteSkipRead() throws IOException {
int numBuffers = 1024;
int currentNumber = 0;
// Write buffers filled with ascending numbers...
for (int i = 0; i < numBuffers; i++) {
final Buffer buffer = createBuffer();
currentNumber =
fillBufferWithAscendingNumbers(buffer, currentNumber, buffer.getMaxCapacity());
writer.writeBlock(buffer);
}
// Make sure that the writes are finished
writer.close();
final int toSkip = 32;
// Skip first buffers...
reader.seekToPosition((8 + BUFFER_SIZE) * toSkip);
numBuffers -= toSkip;
// Read buffers back in...
for (int i = 0; i < numBuffers; i++) {
assertThat(reader.hasReachedEndOfFile()).isFalse();
reader.readInto(createBuffer());
}
reader.close();
assertThat(reader.hasReachedEndOfFile()).isTrue();
// Verify that the content is the same
assertThat(returnedBuffers)
.withFailMessage("Read less buffers than written.")
.hasSize(numBuffers);
// Start number after skipped buffers...
currentNumber = (BUFFER_SIZE / 4) * toSkip;
Buffer buffer;
while ((buffer = returnedBuffers.poll()) != null) {
currentNumber = verifyBufferFilledWithAscendingNumbers(buffer, currentNumber);
}
}
// ------------------------------------------------------------------------
private int getRandomNumberInRange(int min, int max) {
return random.nextInt((max - min) + 1) + min;
}
private int getNextMultipleOf(int number, int multiple) {
final int mod = number % multiple;
if (mod == 0) {
return number;
}
return number + multiple - mod;
}
private Buffer createBuffer() {
return new NetworkBuffer(
MemorySegmentFactory.allocateUnpooledSegment(BUFFER_SIZE), BUFFER_RECYCLER);
}
static int fillBufferWithAscendingNumbers(Buffer buffer, int currentNumber, int size) {
checkArgument(size % 4 == 0);
MemorySegment segment = buffer.getMemorySegment();
for (int i = 0; i < size; i += 4) {
segment.putInt(i, currentNumber++);
}
buffer.setSize(size);
return currentNumber;
}
static int verifyBufferFilledWithAscendingNumbers(Buffer buffer, int currentNumber) {
MemorySegment segment = buffer.getMemorySegment();
int size = buffer.getSize();
for (int i = 0; i < size; i += 4) {
if (segment.getInt(i) != currentNumber++) {
throw new IllegalStateException("Read unexpected number from buffer.");
}
}
return currentNumber;
}
}
|
BufferFileWriterReaderTest
|
java
|
netty__netty
|
codec-native-quic/src/test/java/io/netty/handler/codec/quic/InsecureQuicTokenHandlerTest.java
|
{
"start": 1117,
"end": 3305
}
|
class ____ extends AbstractQuicTest {
@Test
public void testMaxTokenLength() {
assertEquals(InsecureQuicTokenHandler.MAX_TOKEN_LEN, InsecureQuicTokenHandler.INSTANCE.maxTokenLength());
}
@Test
public void testTokenProcessingIpv4() throws UnknownHostException {
testTokenProcessing(true);
}
@Test
public void testTokenProcessingIpv6() throws UnknownHostException {
testTokenProcessing(false);
}
private static void testTokenProcessing(boolean ipv4) throws UnknownHostException {
byte[] bytes = new byte[Quiche.QUICHE_MAX_CONN_ID_LEN];
ThreadLocalRandom.current().nextBytes(bytes);
ByteBuf dcid = Unpooled.wrappedBuffer(bytes);
ByteBuf out = Unpooled.buffer();
try {
final InetSocketAddress validAddress;
final InetSocketAddress invalidAddress;
if (ipv4) {
validAddress = new InetSocketAddress(
InetAddress.getByAddress(new byte[] { 10, 10, 10, 1}), 9999);
invalidAddress = new InetSocketAddress(
InetAddress.getByAddress(new byte[] { 10, 10, 10, 10}), 9999);
} else {
validAddress = new InetSocketAddress(InetAddress.getByAddress(
new byte[] { 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 1}), 9999);
invalidAddress = new InetSocketAddress(InetAddress.getByAddress(
new byte[] { 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10}), 9999);
}
InsecureQuicTokenHandler.INSTANCE.writeToken(out, dcid, validAddress);
assertThat(out.readableBytes()).isLessThanOrEqualTo(InsecureQuicTokenHandler.INSTANCE.maxTokenLength());
assertNotEquals(-1, InsecureQuicTokenHandler.INSTANCE.validateToken(out, validAddress));
// Use another address and check that the validate fails.
assertEquals(-1, InsecureQuicTokenHandler.INSTANCE.validateToken(out, invalidAddress));
} finally {
dcid.release();
out.release();
}
}
}
|
InsecureQuicTokenHandlerTest
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/legacy/operations/ddl/AlterTableOptionsOperation.java
|
{
"start": 1541,
"end": 2899
}
|
class ____ extends AlterTableOperation {
private final CatalogTable catalogTable;
public AlterTableOptionsOperation(
ObjectIdentifier tableIdentifier,
CatalogTable catalogTable,
boolean ignoreIfNotExists) {
super(tableIdentifier, ignoreIfNotExists);
this.catalogTable = catalogTable;
}
public CatalogTable getCatalogTable() {
return catalogTable;
}
@Override
public String asSummaryString() {
String description =
catalogTable.getOptions().entrySet().stream()
.map(
entry ->
OperationUtils.formatParameter(
entry.getKey(), entry.getValue()))
.collect(Collectors.joining(", "));
return String.format(
"ALTER %sTABLE %s SET (%s)",
ignoreIfTableNotExists ? "IF EXISTS " : "",
tableIdentifier.asSummaryString(),
description);
}
@Override
public TableResultInternal execute(Context ctx) {
ctx.getCatalogManager()
.alterTable(getCatalogTable(), getTableIdentifier(), ignoreIfTableNotExists());
return TableResultImpl.TABLE_RESULT_OK;
}
}
|
AlterTableOptionsOperation
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/entityNames/singleAssociatedAudited/SingleDomainObjectToMultipleTablesTest.java
|
{
"start": 921,
"end": 2288
}
|
class ____ {
private long carId;
private long ownerId;
private long driverId;
@Test
public void testSingleDomainObjectToMultipleTablesMapping(SessionFactoryScope scope) {
scope.inSession( session -> {
//REV 1
session.getTransaction().begin();
Person owner = new Person( "Lukasz", 25 );
Person driver = new Person( "Kinga", 24 );
Car car = new Car( 1, owner, driver );
session.persist( "Personaje", owner );
session.persist( "Driveraje", driver );
session.persist( car );
session.getTransaction().commit();
carId = car.getId();
ownerId = owner.getId();
driverId = driver.getId();
final var auditReader = AuditReaderFactory.get( session );
doAssertions( auditReader );
} );
}
public void doAssertions(AuditReader auditReader) {
Car carVer1 = auditReader.find( Car.class, carId, 1 );
Person ownerVer1 = auditReader.find( Person.class, "Personaje", ownerId, 1 );
Person driverVer1 = auditReader.find( Person.class, "Driveraje", driverId, 1 );
/* Check ids. */
assertEquals( ownerVer1.getId(), carVer1.getOwner().getId() );
assertEquals( driverVer1.getId(), carVer1.getDriver().getId() );
/* Check object properties. */
assertEquals( "Lukasz", ownerVer1.getName() );
assertEquals( "Kinga", driverVer1.getName() );
assertEquals( 1, carVer1.getNumber() );
}
}
|
SingleDomainObjectToMultipleTablesTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-integration/src/test/java/org/springframework/boot/integration/endpoint/IntegrationGraphEndpointTests.java
|
{
"start": 1554,
"end": 2633
}
|
class ____ {
private final IntegrationGraphServer server = mock(IntegrationGraphServer.class);
private final IntegrationGraphEndpoint endpoint = new IntegrationGraphEndpoint(this.server);
@Test
void readOperationShouldReturnGraph() {
Graph graph = mock(Graph.class);
Map<String, Object> contentDescriptor = new LinkedHashMap<>();
Collection<IntegrationNode> nodes = new ArrayList<>();
Collection<LinkNode> links = new ArrayList<>();
given(graph.contentDescriptor()).willReturn(contentDescriptor);
given(graph.nodes()).willReturn(nodes);
given(graph.links()).willReturn(links);
given(this.server.getGraph()).willReturn(graph);
GraphDescriptor descriptor = this.endpoint.graph();
then(this.server).should().getGraph();
assertThat(descriptor.getContentDescriptor()).isSameAs(contentDescriptor);
assertThat(descriptor.getNodes()).isSameAs(nodes);
assertThat(descriptor.getLinks()).isSameAs(links);
}
@Test
void writeOperationShouldRebuildGraph() {
this.endpoint.rebuild();
then(this.server).should().rebuild();
}
}
|
IntegrationGraphEndpointTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/AnnotationTest3.java
|
{
"start": 182,
"end": 487
}
|
class ____ extends TestCase {
public void test_supperField() throws Exception {
C c = new C();
c.setId(123);
c.setName("jobs");
String str = JSON.toJSONString(c);
Assert.assertEquals("{\"ID\":123,\"name\":\"jobs\"}", str);
}
public static
|
AnnotationTest3
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/response/IbmWatsonxEmbeddingsResponseEntity.java
|
{
"start": 1440,
"end": 3203
}
|
class ____ {
private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in IBM watsonx embeddings response";
public static DenseEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
moveToFirstToken(jsonParser);
XContentParser.Token token = jsonParser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser);
positionParserAtTokenAfterField(jsonParser, "results", FAILED_TO_FIND_FIELD_TEMPLATE);
List<DenseEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser,
IbmWatsonxEmbeddingsResponseEntity::parseEmbeddingObject
);
return new DenseEmbeddingFloatResults(embeddingList);
}
}
private static DenseEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE);
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
// parse and discard the rest of the object
consumeUntilObjectEnd(parser);
return DenseEmbeddingFloatResults.Embedding.of(embeddingValuesList);
}
private IbmWatsonxEmbeddingsResponseEntity() {}
}
|
IbmWatsonxEmbeddingsResponseEntity
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/sample/DummyRepository.java
|
{
"start": 971,
"end": 2230
}
|
interface ____ extends CrudRepository<Dummy, Integer> {
@Procedure("procedure_in1_out1")
Integer adHocProcedureWith1InputAnd1OutputParameter(Integer in);
@Procedure("procedure_in1_out0")
void adHocProcedureWith1InputAndNoOutputParameter(Integer in);
@Procedure("procedure_in0_out1")
Integer adHocProcedureWithNoInputAnd1OutputParameter();
@Procedure("procedure_in1_out0_return_rs_no_update")
List<Dummy> adHocProcedureWith1InputAnd1OutputParameterWithResultSet(String in);
@Procedure("procedure_in1_out0_return_rs_with_update")
List<Dummy> adHocProcedureWith1InputAnd1OutputParameterWithResultSetWithUpdate(String in);
@Procedure("procedure_in1_out0_no_return_with_update")
void adHocProcedureWith1InputAndNoOutputParameterWithUpdate(String in);
@Procedure
Integer procedureWith1InputAnd1OutputParameter(Integer in);
@Procedure
void procedureWith1InputAndNoOutputParameter(Integer in);
@Procedure
Integer procedureWithNoInputAnd1OutputParameter();
@Procedure
List<Dummy> procedureWith1InputAnd1OutputParameterWithResultSet(String in);
@Procedure
List<Dummy> procedureWith1InputAnd1OutputParameterWithResultSetWithUpdate(String in);
@Procedure
void procedureWith1InputAndNoOutputParameterWithUpdate(String in);
}
|
DummyRepository
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java
|
{
"start": 2040,
"end": 7960
}
|
class ____ extends HandledTransportAction<ChangePasswordRequest, ActionResponse.Empty> {
public static final ActionType<ActionResponse.Empty> TYPE = new ActionType<>("cluster:admin/xpack/security/user/change_password");
private final Settings settings;
private final NativeUsersStore nativeUsersStore;
private final Realms realms;
@Inject
public TransportChangePasswordAction(
Settings settings,
TransportService transportService,
ActionFilters actionFilters,
NativeUsersStore nativeUsersStore,
Realms realms
) {
super(TYPE.name(), transportService, actionFilters, ChangePasswordRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
this.settings = settings;
this.nativeUsersStore = nativeUsersStore;
this.realms = realms;
}
@Override
protected void doExecute(Task task, ChangePasswordRequest request, ActionListener<ActionResponse.Empty> listener) {
final String username = request.username();
if (AnonymousUser.isAnonymousUsername(username, settings)) {
listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API"));
return;
}
final Hasher requestPwdHashAlgo = Hasher.resolveFromHash(request.passwordHash());
final Hasher configPwdHashAlgo = Hasher.resolve(XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings));
if (requestPwdHashAlgo.equals(configPwdHashAlgo) == false
&& Hasher.getAvailableAlgoStoredPasswordHash().contains(requestPwdHashAlgo.name().toLowerCase(Locale.ROOT)) == false) {
listener.onFailure(
new IllegalArgumentException(
"The provided password hash is not a hash or it could not be resolved to a supported hash algorithm. "
+ "The supported password hash algorithms are "
+ Hasher.getAvailableAlgoStoredPasswordHash().toString()
)
);
return;
}
if (ClientReservedRealm.isReservedUsername(username) && XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings) == false) {
// when on cloud and resetting the elastic operator user by mistake
ValidationException validationException = new ValidationException();
validationException.addValidationError(
"user ["
+ username
+ "] belongs to the "
+ ReservedRealm.NAME
+ " realm which is disabled."
+ (ELASTIC_NAME.equalsIgnoreCase(username)
? " In a cloud deployment, the password can be changed through the cloud console."
: "")
);
listener.onFailure(validationException);
return;
}
// check if user exists in the native realm
nativeUsersStore.getUser(username, new ActionListener<>() {
@Override
public void onResponse(User user) {
// nativeUsersStore.changePassword can create a missing reserved user, so enter only if not reserved
if (ClientReservedRealm.isReserved(username, settings) == false && user == null) {
List<Realm> nonNativeRealms = realms.getActiveRealms()
.stream()
.filter(t -> Set.of(NativeRealmSettings.TYPE, ReservedRealm.TYPE).contains(t.type()) == false) // Reserved realm is
// implemented in the
// native store
.toList();
if (nonNativeRealms.isEmpty()) {
listener.onFailure(createUserNotFoundException());
return;
}
GroupedActionListener<User> gal = new GroupedActionListener<>(nonNativeRealms.size(), ActionListener.wrap(users -> {
final Optional<User> nonNativeUser = users.stream().filter(Objects::nonNull).findAny();
if (nonNativeUser.isPresent()) {
listener.onFailure(
new ValidationException().addValidationError(
"user [" + username + "] does not belong to the native realm and cannot be managed via this API."
)
);
} else {
// user wasn't found in any other realm, display standard not-found message
listener.onFailure(createUserNotFoundException());
}
}, listener::onFailure));
for (Realm realm : nonNativeRealms) {
EsExecutors.DIRECT_EXECUTOR_SERVICE.execute(
ActionRunnable.wrap(gal, userActionListener -> realm.lookupUser(username, userActionListener))
);
}
} else {
// safe to proceed
nativeUsersStore.changePassword(request, listener.safeMap(v -> ActionResponse.Empty.INSTANCE));
}
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
private static ValidationException createUserNotFoundException() {
ValidationException validationException = new ValidationException();
validationException.addValidationError(USER_NOT_FOUND_MESSAGE);
return validationException;
}
}
|
TransportChangePasswordAction
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-triple-servlet/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/servlet/DummyServletContext.java
|
{
"start": 1768,
"end": 9702
}
|
class ____ implements ServletContext {
private final FrameworkModel frameworkModel;
private final Map<String, Object> attributes = new HashMap<>();
private final Map<String, String> initParameters = new HashMap<>();
public DummyServletContext(FrameworkModel frameworkModel) {
this.frameworkModel = frameworkModel;
}
@Override
public String getContextPath() {
return "/";
}
@Override
public ServletContext getContext(String uripath) {
return this;
}
@Override
public int getMajorVersion() {
return 3;
}
@Override
public int getMinorVersion() {
return 1;
}
@Override
public int getEffectiveMajorVersion() {
return 3;
}
@Override
public int getEffectiveMinorVersion() {
return 1;
}
@Override
public String getMimeType(String file) {
return null;
}
@Override
public Set<String> getResourcePaths(String path) {
return null;
}
@Override
public URL getResource(String path) {
return null;
}
@Override
public InputStream getResourceAsStream(String path) {
return null;
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
return null;
}
@Override
public RequestDispatcher getNamedDispatcher(String name) {
return null;
}
public Servlet getServlet(String name) {
throw new UnsupportedOperationException();
}
public Enumeration<Servlet> getServlets() {
throw new UnsupportedOperationException();
}
public Enumeration<String> getServletNames() {
throw new UnsupportedOperationException();
}
@Override
public void log(String msg) {
LoggerFactory.getLogger(DummyServletContext.class).info(msg);
}
public void log(Exception exception, String msg) {
LoggerFactory.getLogger(DummyServletContext.class).info(msg, exception);
}
@Override
public void log(String message, Throwable throwable) {
LoggerFactory.getLogger(DummyServletContext.class).info(message, throwable);
}
@Override
public String getRealPath(String path) {
throw new UnsupportedOperationException();
}
@Override
public String getServerInfo() {
return "Dubbo Rest Server/1.0";
}
@Override
public String getInitParameter(String name) {
String value = initParameters.get(name);
if (value != null) {
return value;
}
Configuration conf = ConfigurationUtils.getGlobalConfiguration(frameworkModel.defaultApplication());
return conf.getString(RestConstants.CONFIG_PREFIX + "servlet-context." + name);
}
@Override
public Enumeration<String> getInitParameterNames() {
return Collections.enumeration(initParameters.keySet());
}
@Override
public boolean setInitParameter(String name, String value) {
return initParameters.putIfAbsent(name, value) == null;
}
@Override
public Object getAttribute(String name) {
return attributes.get(name);
}
@Override
public Enumeration<String> getAttributeNames() {
return Collections.enumeration(attributes.keySet());
}
@Override
public void setAttribute(String name, Object object) {
attributes.put(name, object);
}
@Override
public void removeAttribute(String name) {
attributes.remove(name);
}
@Override
public String getServletContextName() {
return "";
}
@Override
public ServletRegistration.Dynamic addServlet(String servletName, String className) {
throw new UnsupportedOperationException();
}
@Override
public ServletRegistration.Dynamic addServlet(String servletName, Servlet servlet) {
throw new UnsupportedOperationException();
}
@Override
public ServletRegistration.Dynamic addServlet(String servletName, Class<? extends Servlet> servletClass) {
throw new UnsupportedOperationException();
}
@Override
public Dynamic addJspFile(String servletName, String jspFile) {
return null;
}
@Override
public <T extends Servlet> T createServlet(Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public ServletRegistration getServletRegistration(String servletName) {
throw new UnsupportedOperationException();
}
@Override
public Map<String, ? extends ServletRegistration> getServletRegistrations() {
throw new UnsupportedOperationException();
}
@Override
public FilterRegistration.Dynamic addFilter(String filterName, String className) {
throw new UnsupportedOperationException();
}
@Override
public FilterRegistration.Dynamic addFilter(String filterName, Filter filter) {
throw new UnsupportedOperationException();
}
@Override
public FilterRegistration.Dynamic addFilter(String filterName, Class<? extends Filter> filterClass) {
throw new UnsupportedOperationException();
}
@Override
public <T extends Filter> T createFilter(Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public FilterRegistration getFilterRegistration(String filterName) {
throw new UnsupportedOperationException();
}
@Override
public Map<String, ? extends FilterRegistration> getFilterRegistrations() {
throw new UnsupportedOperationException();
}
@Override
public SessionCookieConfig getSessionCookieConfig() {
throw new UnsupportedOperationException();
}
@Override
public void setSessionTrackingModes(Set<SessionTrackingMode> sessionTrackingModes) {
throw new UnsupportedOperationException();
}
@Override
public Set<SessionTrackingMode> getDefaultSessionTrackingModes() {
throw new UnsupportedOperationException();
}
@Override
public Set<SessionTrackingMode> getEffectiveSessionTrackingModes() {
throw new UnsupportedOperationException();
}
@Override
public void addListener(String className) {
throw new UnsupportedOperationException();
}
@Override
public <T extends EventListener> void addListener(T t) {
throw new UnsupportedOperationException();
}
@Override
public void addListener(Class<? extends EventListener> listenerClass) {
throw new UnsupportedOperationException();
}
@Override
public <T extends EventListener> T createListener(Class<T> clazz) {
throw new UnsupportedOperationException();
}
@Override
public JspConfigDescriptor getJspConfigDescriptor() {
throw new UnsupportedOperationException();
}
@Override
public ClassLoader getClassLoader() {
return getClass().getClassLoader();
}
@Override
public void declareRoles(String... roleNames) {
throw new UnsupportedOperationException();
}
@Override
public String getVirtualServerName() {
throw new UnsupportedOperationException();
}
@Override
public int getSessionTimeout() {
throw new UnsupportedOperationException();
}
@Override
public void setSessionTimeout(int i) {
throw new UnsupportedOperationException();
}
@Override
public String getRequestCharacterEncoding() {
throw new UnsupportedOperationException();
}
@Override
public void setRequestCharacterEncoding(String encoding) {
throw new UnsupportedOperationException();
}
@Override
public String getResponseCharacterEncoding() {
throw new UnsupportedOperationException();
}
@Override
public void setResponseCharacterEncoding(String encoding) {
throw new UnsupportedOperationException();
}
}
|
DummyServletContext
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java
|
{
"start": 1248,
"end": 1336
}
|
class ____ the mutable configuration items for a data frame transform
*/
public final
|
holds
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/dagger/PrivateConstructorForNoninstantiableModuleTest.java
|
{
"start": 2132,
"end": 2744
}
|
class ____ {
@Provides
static String provideString() {
return "";
}
@Provides
static Integer provideInteger() {
return 1;
}
private TestModule() {}
}
""")
.doTest();
}
@Test
public void onlyStaticMethods_withConstructorGetsLeftAlone() {
testHelper
.addInputLines(
"in/TestModule.java",
"""
import dagger.Module;
import dagger.Provides;
@Module
final
|
TestModule
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-api/src/main/java/org/apache/dubbo/rpc/HeaderFilter.java
|
{
"start": 983,
"end": 1103
}
|
interface ____ {
RpcInvocation invoke(Invoker<?> invoker, RpcInvocation invocation) throws RpcException;
}
|
HeaderFilter
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/solver/impl/TestSolver.java
|
{
"start": 1845,
"end": 2826
}
|
class ____ {
private Solver solver;
protected abstract Solver createSolver() throws ResourceEstimatorException;
@BeforeEach
public void setup()
throws SolverException, IOException, SkylineStoreException,
ResourceEstimatorException {
solver = createSolver();
}
@Test
public void testNullJobHistory()
throws SolverException, SkylineStoreException {
assertThrows(InvalidInputException.class, () -> {
solver.solve(null);
});
// try to solve with null jobHistory
}
@Test
public void testEmptyJobHistory() throws SolverException, SkylineStoreException {
// try to solve with empty jobHistory
assertThrows(InvalidInputException.class, () -> {
Map<RecurrenceId, List<ResourceSkyline>> jobHistoryInvalid = new HashMap<RecurrenceId, List<ResourceSkyline>>();
solver.solve(jobHistoryInvalid);
});
}
@AfterEach
public final void cleanUp() {
solver.close();
solver = null;
}
}
|
TestSolver
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/test/java/org/apache/log4j/builders/filter/LevelRangeFilterBuilderTest.java
|
{
"start": 7492,
"end": 7663
}
|
interface ____ {
LevelRangeFilter build(Level levelMin, Level levelMax, Boolean acceptOnMatch) throws Exception;
}
private static
|
TestLevelRangeFilterBuilder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/SelectTestCase.java
|
{
"start": 446,
"end": 1792
}
|
class ____ extends CliIntegrationTestCase {
public void testSelect() throws IOException {
index("test", body -> body.field("test_field", "test_value"));
assertThat(command("SELECT * FROM test"), containsString("test_field"));
assertThat(readLine(), containsString("----------"));
assertThat(readLine(), containsString("test_value"));
assertEquals("", readLine());
}
public void testMultiLineSelect() throws IOException {
index("test", body -> body.field("test_field", "test_value"));
assertThat(command("SELECT *\nFROM\ntest"), containsString("test_field"));
assertThat(readLine(), containsString("----------"));
assertThat(readLine(), containsString("test_value"));
assertEquals("", readLine());
}
public void testSelectWithWhere() throws IOException {
index("test", body -> body.field("test_field", "test_value1").field("i", 1));
index("test", body -> body.field("test_field", "test_value2").field("i", 2));
assertThat(command("SELECT * FROM test WHERE i = 2"), matchesRegex(".*\\s*i\\s*\\|\\s*test_field\\s*.*"));
assertThat(readLine(), containsString("----------"));
assertThat(readLine(), matchesRegex(".*\\s*2\\s*\\|\\s*test_value2\\s*.*"));
assertEquals("", readLine());
}
}
|
SelectTestCase
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.