language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-datastream-api/src/main/java/org/apache/flink/datastream/api/stream/GlobalStream.java | {
"start": 3296,
"end": 3379
} | interface ____ a configurable {@link GlobalStream}. */
@Experimental
| represents |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/refresh/RefreshEntityWithLazyPropertyTest.java | {
"start": 9150,
"end": 9646
} | class ____ {
@Id
private Long id;
@Basic(fetch = FetchType.LAZY)
private String description;
public Position() {
}
public Position(Long id, String description) {
this.id = id;
this.description = description;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
}
| Position |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleInternalHelper.java | {
"start": 2505,
"end": 3122
} | class ____<T> implements Iterable<Flowable<T>> {
private final Iterable<? extends SingleSource<? extends T>> sources;
ToFlowableIterable(Iterable<? extends SingleSource<? extends T>> sources) {
this.sources = sources;
}
@Override
public Iterator<Flowable<T>> iterator() {
return new ToFlowableIterator<>(sources.iterator());
}
}
public static <T> Iterable<? extends Flowable<T>> iterableToFlowable(final Iterable<? extends SingleSource<? extends T>> sources) {
return new ToFlowableIterable<>(sources);
}
}
| ToFlowableIterable |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/benchmarks/JsonDecodeBenchmark.java | {
"start": 1001,
"end": 4157
} | class ____ extends BenchmarkBase {
private Buffer small;
private Buffer wide;
private Buffer deep;
private String smallString;
private String wideString;
private String deepString;
private JsonCodec jacksonCodec;
private JsonCodec databindCodec;
@Setup
public void setup() {
small = loadJsonAsBuffer("small_bench.json");
wide = loadJsonAsBuffer("wide_bench.json");
deep = loadJsonAsBuffer("deep_bench.json");
smallString = small.toString();
wideString = wide.toString();
deepString = deep.toString();
jacksonCodec = new JacksonCodec();
databindCodec = new DatabindCodec();
}
private Buffer loadJsonAsBuffer(String filename) {
ClassLoader classLoader = getClass().getClassLoader();
try (InputStream file = classLoader.getResourceAsStream(filename)) {
String str = new BufferedReader(new InputStreamReader(file, StandardCharsets.UTF_8))
.lines()
.collect(Collectors.joining());
Buffer encoded = Buffer.buffer(str);
return Buffer.buffer().appendBuffer(encoded);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Benchmark
public void smallStringJackson(Blackhole blackhole) {
stringJackson(smallString, blackhole);
}
@Benchmark
public void smallStringDatabind(Blackhole blackhole) throws Exception {
stringDatabind(smallString, blackhole);
}
@Benchmark
public void wideStringJackson(Blackhole blackhole) {
stringJackson(wideString, blackhole);
}
@Benchmark
public void wideStringDatabind(Blackhole blackhole) throws Exception {
stringDatabind(wideString, blackhole);
}
@Benchmark
public void deepStringJackson(Blackhole blackhole) {
stringJackson(deepString, blackhole);
}
@Benchmark
public void deepStringDatabind(Blackhole blackhole) throws Exception {
stringDatabind(deepString, blackhole);
}
private void stringJackson(String str, Blackhole blackhole) {
blackhole.consume(new JsonObject(str));
}
private void stringDatabind(String str, Blackhole blackhole) {
blackhole.consume(databindCodec.fromString(str, JsonObject.class));
}
@Benchmark
public void smallBufferJackson(Blackhole blackhole) {
bufferJackson(small, blackhole);
}
@Benchmark
public void smallBufferDatabind(Blackhole blackhole) throws Exception {
bufferDatabind(small, blackhole);
}
@Benchmark
public void wideBufferJackson(Blackhole blackhole) {
bufferJackson(wide, blackhole);
}
@Benchmark
public void wideBufferDatabind(Blackhole blackhole) throws Exception {
bufferDatabind(wide, blackhole);
}
@Benchmark
public void deepBufferJackson(Blackhole blackhole) {
bufferJackson(deep, blackhole);
}
@Benchmark
public void deepBufferDatabind(Blackhole blackhole) throws Exception {
bufferDatabind(deep, blackhole);
}
private void bufferJackson(Buffer buffer, Blackhole blackhole) {
blackhole.consume(new JsonObject(buffer));
}
private void bufferDatabind(Buffer buffer, Blackhole blackhole) throws Exception {
blackhole.consume(jacksonCodec.fromBuffer(buffer, JsonObject.class));
}
}
| JsonDecodeBenchmark |
java | elastic__elasticsearch | distribution/tools/keystore-cli/src/test/java/org/elasticsearch/cli/keystore/CreateKeyStoreCommandTests.java | {
"start": 963,
"end": 4473
} | class ____ extends KeyStoreCommandTestCase {
@Override
protected Command newCommand() {
return new CreateKeyStoreCommand() {
@Override
protected Environment createEnv(OptionSet options, ProcessInfo processInfo) throws UserException {
return env;
}
};
}
public void testNotMatchingPasswords() throws Exception {
String password = getPossibleKeystorePassword();
terminal.addSecretInput(password);
terminal.addSecretInput("notthekeystorepasswordyouarelookingfor");
UserException e = expectThrows(UserException.class, () -> execute(randomFrom("-p", "--password")));
assertEquals(e.getMessage(), ExitCodes.DATA_ERROR, e.exitCode);
assertThat(e.getMessage(), containsString("Passwords are not equal, exiting"));
}
public void testDefaultNotPromptForPassword() throws Exception {
assumeFalse("Cannot open unprotected keystore on FIPS JVM", inFipsJvm());
execute();
Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
public void testPosix() throws Exception {
final String password = getPossibleKeystorePassword();
// Sometimes (rarely) test with explicit empty password
final boolean withPassword = password.length() > 0 || rarely();
if (withPassword) {
terminal.addSecretInput(password);
terminal.addSecretInput(password);
execute(randomFrom("-p", "--password"));
} else {
execute();
}
Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
public void testNotPosix() throws Exception {
env = setupEnv(false, fileSystems);
final String password = getPossibleKeystorePassword();
// Sometimes (rarely) test with explicit empty password
final boolean withPassword = password.length() > 0 || rarely();
if (withPassword) {
terminal.addSecretInput(password);
terminal.addSecretInput(password);
execute(randomFrom("-p", "--password"));
} else {
execute();
}
Path configDir = env.configDir();
assertNotNull(KeyStoreWrapper.load(configDir));
}
public void testOverwrite() throws Exception {
String password = getPossibleKeystorePassword();
Path keystoreFile = KeyStoreWrapper.keystorePath(env.configDir());
byte[] content = "not a keystore".getBytes(StandardCharsets.UTF_8);
Files.write(keystoreFile, content);
terminal.addTextInput(""); // default is no (don't overwrite)
execute();
assertArrayEquals(content, Files.readAllBytes(keystoreFile));
terminal.addTextInput("n"); // explicit no (don't overwrite)
execute();
assertArrayEquals(content, Files.readAllBytes(keystoreFile));
terminal.reset();
// Sometimes (rarely) test with explicit empty password
final boolean withPassword = password.length() > 0 || rarely();
if (withPassword) {
terminal.addSecretInput(password);
terminal.addSecretInput(password);
}
terminal.addTextInput("y"); // overwrite
if (withPassword) {
execute(randomFrom("-p", "--password"));
} else {
execute();
}
assertNotNull(KeyStoreWrapper.load(env.configDir()));
}
}
| CreateKeyStoreCommandTests |
java | micronaut-projects__micronaut-core | inject-java/src/main/java/io/micronaut/annotation/processing/PublicAbstractMethodVisitor.java | {
"start": 1855,
"end": 3955
} | class ____
* @param visitorContext The visitor context
*/
PublicAbstractMethodVisitor(TypeElement classElement,
JavaVisitorContext visitorContext) {
super(visitorContext);
this.classElement = classElement;
this.modelUtils = visitorContext.getModelUtils();
this.elementUtils = visitorContext.getElements();
}
@Override
protected boolean isAcceptable(Element element) {
if (element.getKind() == ElementKind.METHOD) {
var executableElement = (ExecutableElement) element;
Set<Modifier> modifiers = executableElement.getModifiers();
String methodName = executableElement.getSimpleName().toString();
boolean acceptable = isAcceptableMethod(executableElement) && !modifiers.contains(Modifier.FINAL) && !modifiers.contains(Modifier.STATIC);
boolean isDeclared = executableElement.getEnclosingElement().equals(classElement);
if (acceptable && !isDeclared && declaredMethods.containsKey(methodName)) {
// check method is not overridden already
for (ExecutableElement ex : declaredMethods.get(methodName)) {
if (elementUtils.overrides(ex, executableElement, classElement)) {
return false;
}
}
} else if (!acceptable && !modelUtils.isStatic(executableElement)) {
List<ExecutableElement> declaredMethodList = declaredMethods.computeIfAbsent(methodName, s -> new ArrayList<>());
declaredMethodList.add(executableElement);
}
return acceptable;
} else {
return false;
}
}
/**
* Return whether the given executable element is acceptable. By default, just checks if the method is abstract.
*
* @param executableElement The method
* @return True if it is
*/
protected boolean isAcceptableMethod(ExecutableElement executableElement) {
return modelUtils.isAbstract(executableElement);
}
}
| element |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java | {
"start": 17931,
"end": 18129
} | class ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@PropertySource("classpath:org/springframework/context/annotation/p1.properties")
@ | ConfigWithRepeatedPropertySourceAnnotationsOnComposedAnnotation |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/stream/JSONReaderTest_error2.java | {
"start": 259,
"end": 1126
} | class ____ extends TestCase {
private static Object context;
private static Field stateField;
public void test_read() throws Exception {
Field field = JSONReader.class.getDeclaredField("context");
field.setAccessible(true);
;
JSONReader reader = new JSONReader(new StringReader("[{}]"));
reader.config(Feature.AllowArbitraryCommas, true);
reader.startArray();
context = field.get(reader);
stateField = context.getClass().getDeclaredField("state");
stateField.setAccessible(true);
{
Exception error = null;
try {
reader.readObject(VO.class);
} catch (Exception ex) {
error = ex;
}
Assert.assertNotNull(error);
}
}
public static | JSONReaderTest_error2 |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestContinuousScheduling.java | {
"start": 3445,
"end": 16060
} | class ____ extends FairSchedulerTestBase {
private ControlledClock mockClock;
private static int delayThresholdTimeMs = 1000;
@SuppressWarnings("deprecation")
@Override
public Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
conf.setBoolean(
FairSchedulerConfiguration.CONTINUOUS_SCHEDULING_ENABLED, true);
conf.setInt(FairSchedulerConfiguration.LOCALITY_DELAY_NODE_MS,
delayThresholdTimeMs);
conf.setInt(FairSchedulerConfiguration.LOCALITY_DELAY_RACK_MS,
delayThresholdTimeMs);
return conf;
}
@SuppressWarnings("deprecation")
@BeforeEach
public void setup() {
QueueMetrics.clearQueueMetrics();
DefaultMetricsSystem.setMiniClusterMode(true);
mockClock = new ControlledClock();
conf = createConfiguration();
resourceManager = new MockRM(conf);
resourceManager.start();
scheduler = (FairScheduler) resourceManager.getResourceScheduler();
scheduler.setClock(mockClock);
assertTrue(scheduler.isContinuousSchedulingEnabled());
assertEquals(
FairSchedulerConfiguration.DEFAULT_CONTINUOUS_SCHEDULING_SLEEP_MS,
scheduler.getContinuousSchedulingSleepMs());
assertEquals(mockClock, scheduler.getClock());
}
@AfterEach
public void teardown() {
if (resourceManager != null) {
resourceManager.stop();
resourceManager = null;
}
}
@Test
@Timeout(value = 60)
public void testBasic() throws InterruptedException {
// Add one node
String host = "127.0.0.1";
RMNode node1 = MockNodes.newNodeInfo(
1, Resources.createResource(4096, 4), 1, host);
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
scheduler.handle(nodeEvent1);
NodeUpdateSchedulerEvent nodeUpdateEvent =
new NodeUpdateSchedulerEvent(node1);
scheduler.handle(nodeUpdateEvent);
ApplicationAttemptId appAttemptId =
createAppAttemptId(this.APP_ID++, this.ATTEMPT_ID++);
createMockRMApp(appAttemptId);
ApplicationPlacementContext placementCtx =
new ApplicationPlacementContext("queue11");
scheduler.addApplication(appAttemptId.getApplicationId(), "queue11",
"user11", false, placementCtx);
scheduler.addApplicationAttempt(appAttemptId, false, false);
List<ResourceRequest> ask = new ArrayList<>();
ask.add(createResourceRequest(1024, 1, ResourceRequest.ANY, 1, 1, true));
scheduler.allocate(
appAttemptId, ask, null, new ArrayList<ContainerId>(),
null, null, NULL_UPDATE_REQUESTS);
FSAppAttempt app = scheduler.getSchedulerApp(appAttemptId);
triggerSchedulingAttempt();
checkAppConsumption(app, Resources.createResource(1024, 1));
}
@Test
@Timeout(value = 10)
public void testSortedNodes() throws Exception {
// Add two nodes
RMNode node1 =
MockNodes.newNodeInfo(1, Resources.createResource(8 * 1024, 8), 1,
"127.0.0.1");
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
scheduler.handle(nodeEvent1);
RMNode node2 =
MockNodes.newNodeInfo(1, Resources.createResource(8 * 1024, 8), 2,
"127.0.0.2");
NodeAddedSchedulerEvent nodeEvent2 = new NodeAddedSchedulerEvent(node2);
scheduler.handle(nodeEvent2);
// available resource
assertThat(scheduler.getClusterResource().getMemorySize()).
isEqualTo(16 * 1024);
assertThat(scheduler.getClusterResource().getVirtualCores()).
isEqualTo(16);
// send application request
ApplicationAttemptId appAttemptId =
createAppAttemptId(this.APP_ID++, this.ATTEMPT_ID++);
createMockRMApp(appAttemptId);
ApplicationPlacementContext placementCtx =
new ApplicationPlacementContext("queue11");
scheduler.addApplication(appAttemptId.getApplicationId(), "queue11",
"user11", false, placementCtx);
scheduler.addApplicationAttempt(appAttemptId, false, false);
List<ResourceRequest> ask = new ArrayList<>();
ResourceRequest request =
createResourceRequest(1024, 1, ResourceRequest.ANY, 1, 1, true);
ask.add(request);
scheduler.allocate(appAttemptId, ask, null, new ArrayList<>(), null, null,
NULL_UPDATE_REQUESTS);
triggerSchedulingAttempt();
FSAppAttempt app = scheduler.getSchedulerApp(appAttemptId);
checkAppConsumption(app, Resources.createResource(1024, 1));
// another request
request =
createResourceRequest(1024, 1, ResourceRequest.ANY, 2, 1, true);
ask.clear();
ask.add(request);
scheduler.allocate(appAttemptId, ask, null, new ArrayList<>(), null, null,
NULL_UPDATE_REQUESTS);
triggerSchedulingAttempt();
checkAppConsumption(app, Resources.createResource(2048, 2));
// 2 containers should be assigned to 2 nodes
Set<NodeId> nodes = new HashSet<NodeId>();
Iterator<RMContainer> it = app.getLiveContainers().iterator();
while (it.hasNext()) {
nodes.add(it.next().getContainer().getNodeId());
}
assertEquals(2, nodes.size());
}
@SuppressWarnings("deprecation")
@Test
public void testWithNodeRemoved() throws Exception {
// Disable continuous scheduling, will invoke continuous
// scheduling once manually
scheduler = new FairScheduler();
conf = super.createConfiguration();
resourceManager = new MockRM(conf);
// TODO: This test should really be using MockRM. For now starting stuff
// that is needed at a bare minimum.
((AsyncDispatcher)resourceManager.getRMContext().getDispatcher()).start();
resourceManager.getRMContext().getStateStore().start();
// to initialize the master key
resourceManager.getRMContext().getContainerTokenSecretManager()
.rollMasterKey();
scheduler.setRMContext(resourceManager.getRMContext());
assertTrue(!scheduler.isContinuousSchedulingEnabled(),
"Continuous scheduling should be disabled.");
scheduler.init(conf);
scheduler.start();
// Add two nodes
RMNode node1 =
MockNodes.newNodeInfo(1, Resources.createResource(8 * 1024, 8), 1,
"127.0.0.1");
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
scheduler.handle(nodeEvent1);
RMNode node2 =
MockNodes.newNodeInfo(1, Resources.createResource(8 * 1024, 8), 2,
"127.0.0.2");
NodeAddedSchedulerEvent nodeEvent2 = new NodeAddedSchedulerEvent(node2);
scheduler.handle(nodeEvent2);
assertEquals(2, scheduler.getNumClusterNodes(),
"We should have two alive nodes.");
// Remove one node
NodeRemovedSchedulerEvent removeNode1
= new NodeRemovedSchedulerEvent(node1);
scheduler.handle(removeNode1);
assertEquals(1, scheduler.getNumClusterNodes(),
"We should only have one alive node.");
// Invoke the continuous scheduling once
try {
scheduler.continuousSchedulingAttempt();
} catch (Exception e) {
fail("Exception happened when doing continuous scheduling. " +
e.toString());
}
}
@SuppressWarnings("deprecation")
@Test
public void testInterruptedException()
throws Exception {
// Disable continuous scheduling, will invoke continuous
// scheduling once manually
scheduler = new FairScheduler();
conf = super.createConfiguration();
resourceManager = new MockRM(conf);
// TODO: This test should really be using MockRM. For now starting stuff
// that is needed at a bare minimum.
((AsyncDispatcher)resourceManager.getRMContext().getDispatcher()).start();
resourceManager.getRMContext().getStateStore().start();
// to initialize the master key
resourceManager.getRMContext().getContainerTokenSecretManager()
.rollMasterKey();
scheduler.setRMContext(resourceManager.getRMContext());
scheduler.init(conf);
scheduler.start();
FairScheduler spyScheduler = spy(scheduler);
assertTrue(!spyScheduler.isContinuousSchedulingEnabled(),
"Continuous scheduling should be disabled.");
// Add one node
RMNode node1 =
MockNodes.newNodeInfo(1, Resources.createResource(8 * 1024, 8), 1,
"127.0.0.1");
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
spyScheduler.handle(nodeEvent1);
assertEquals(1, spyScheduler.getNumClusterNodes(),
"We should have one alive node.");
InterruptedException ie = new InterruptedException();
doThrow(new YarnRuntimeException(ie)).when(spyScheduler).
attemptScheduling(isA(FSSchedulerNode.class));
// Invoke the continuous scheduling once
try {
spyScheduler.continuousSchedulingAttempt();
fail("Expected InterruptedException to stop schedulingThread");
} catch (InterruptedException e) {
assertEquals(ie, e);
}
}
@SuppressWarnings("deprecation")
@Test
public void testSchedulerThreadLifeCycle() throws InterruptedException {
scheduler.start();
Thread schedulingThread = scheduler.schedulingThread;
assertTrue(schedulingThread.isAlive());
scheduler.stop();
int numRetries = 100;
while (numRetries-- > 0 && schedulingThread.isAlive()) {
Thread.sleep(50);
}
assertNotEquals(0, numRetries, "The Scheduling thread is still alive");
}
@SuppressWarnings("deprecation")
@Test
public void TestNodeAvailableResourceComparatorTransitivity() {
ClusterNodeTracker<FSSchedulerNode> clusterNodeTracker =
scheduler.getNodeTracker();
List<RMNode> rmNodes =
MockNodes.newNodes(2, 4000, Resource.newInstance(4096, 4));
for (RMNode rmNode : rmNodes) {
clusterNodeTracker.addNode(new FSSchedulerNode(rmNode, false));
}
// To simulate unallocated resource changes
new SubjectInheritingThread() {
@Override
public void work() {
for (int j = 0; j < 100; j++) {
for (FSSchedulerNode node : clusterNodeTracker.getAllNodes()) {
int i = ThreadLocalRandom.current().nextInt(-30, 30);
node.deductUnallocatedResource(Resource.newInstance(i * 1024, i));
}
}
}
}.start();
try {
scheduler.continuousSchedulingAttempt();
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
public void testFairSchedulerContinuousSchedulingInitTime() throws Exception {
scheduler.start();
int priorityValue;
Priority priority;
FSAppAttempt fsAppAttempt;
ResourceRequest request1;
ResourceRequest request2;
ApplicationAttemptId id11;
priorityValue = 1;
id11 = createAppAttemptId(1, 1);
createMockRMApp(id11);
priority = Priority.newInstance(priorityValue);
ApplicationPlacementContext placementCtx =
new ApplicationPlacementContext("root.queue1");
scheduler.addApplication(id11.getApplicationId(), "root.queue1", "user1",
false, placementCtx);
scheduler.addApplicationAttempt(id11, false, false);
fsAppAttempt = scheduler.getApplicationAttempt(id11);
String hostName = "127.0.0.1";
RMNode node1 =
MockNodes.newNodeInfo(1, Resources.createResource(16 * 1024, 16), 1,
hostName);
List<ResourceRequest> ask1 = new ArrayList<>();
request1 =
createResourceRequest(1024, 8, node1.getRackName(), priorityValue, 1,
true);
request2 =
createResourceRequest(1024, 8, ResourceRequest.ANY, priorityValue, 1,
true);
ask1.add(request1);
ask1.add(request2);
scheduler.allocate(id11, ask1, null, new ArrayList<ContainerId>(), null,
null, NULL_UPDATE_REQUESTS);
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
scheduler.handle(nodeEvent1);
FSSchedulerNode node = scheduler.getSchedulerNode(node1.getNodeID());
// Tick the time and let the fsApp startTime different from initScheduler
// time
mockClock.tickSec(delayThresholdTimeMs / 1000);
scheduler.attemptScheduling(node);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return fsAppAttempt.getLastScheduledContainer().size() != 0;
}
}, 10, 4000);
Map<SchedulerRequestKey, Long> lastScheduledContainer =
fsAppAttempt.getLastScheduledContainer();
long initSchedulerTime =
lastScheduledContainer.get(TestUtils.toSchedulerKey(priority));
assertEquals(delayThresholdTimeMs, initSchedulerTime);
}
@SuppressWarnings("deprecation")
private void triggerSchedulingAttempt() throws InterruptedException {
Thread.sleep(
2 * scheduler.getConf().getContinuousSchedulingSleepMs());
}
}
| TestContinuousScheduling |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/SpringFactoriesLoader.java | {
"start": 6630,
"end": 6837
} | class ____
* with custom failure handling provided by the given failure handler.
* <p>The returned factories are sorted using {@link AnnotationAwareOrderComparator}.
* <p>If duplicate implementation | loader |
java | apache__flink | flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/AvroInputFormatTypeExtractionTest.java | {
"start": 2557,
"end": 3045
} | class ____ {
public String theString;
public MyAvroType recursive;
private double aDouble;
public double getaDouble() {
return aDouble;
}
public void setaDouble(double aDouble) {
this.aDouble = aDouble;
}
public void setTheString(String theString) {
this.theString = theString;
}
public String getTheString() {
return theString;
}
}
}
| MyAvroType |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java | {
"start": 33826,
"end": 134874
} | class ____ extends Plugin
implements
SystemIndexPlugin,
IngestPlugin,
NetworkPlugin,
ClusterPlugin,
ClusterCoordinationPlugin,
MapperPlugin,
ExtensiblePlugin,
SearchPlugin,
RestServerActionPlugin,
ReloadablePlugin,
PersistentTaskPlugin {
public static final String SECURITY_CRYPTO_THREAD_POOL_NAME = XPackField.SECURITY + "-crypto";
// TODO: ip filtering does not actually track license usage yet
public static final LicensedFeature.Momentary IP_FILTERING_FEATURE = LicensedFeature.momentaryLenient(
null,
"security-ip-filtering",
License.OperationMode.GOLD
);
public static final LicensedFeature.Momentary AUDITING_FEATURE = LicensedFeature.momentary(
null,
"security-auditing",
License.OperationMode.GOLD
);
public static final LicensedFeature.Momentary TOKEN_SERVICE_FEATURE = LicensedFeature.momentary(
null,
"security-token-service",
License.OperationMode.STANDARD
);
private static final String REALMS_FEATURE_FAMILY = "security-realms";
// Builtin realms (file/native) realms are Basic licensed, so don't need to be checked or tracked
// Some realms (LDAP, AD, PKI) are Gold+
public static final LicensedFeature.Persistent LDAP_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"ldap",
License.OperationMode.GOLD
);
public static final LicensedFeature.Persistent AD_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"active-directory",
License.OperationMode.GOLD
);
public static final LicensedFeature.Persistent PKI_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"pki",
License.OperationMode.GOLD
);
// SSO realms are Platinum+
public static final LicensedFeature.Persistent SAML_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"saml",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Persistent OIDC_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"oidc",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Persistent JWT_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"jwt",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Persistent KERBEROS_REALM_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"kerberos",
License.OperationMode.PLATINUM
);
// Custom realms are Platinum+
public static final LicensedFeature.Persistent CUSTOM_REALMS_FEATURE = LicensedFeature.persistent(
REALMS_FEATURE_FAMILY,
"custom",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Momentary DELEGATED_AUTHORIZATION_FEATURE = LicensedFeature.momentary(
null,
"security-delegated-authorization",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Momentary AUTHORIZATION_ENGINE_FEATURE = LicensedFeature.momentary(
null,
"security-authorization-engine",
License.OperationMode.PLATINUM
);
// Custom role providers are Platinum+
public static final LicensedFeature.Persistent CUSTOM_ROLE_PROVIDERS_FEATURE = LicensedFeature.persistent(
null,
"security-roles-provider",
License.OperationMode.PLATINUM
);
public static final LicensedFeature.Momentary OPERATOR_PRIVILEGES_FEATURE = LicensedFeature.momentary(
null,
"operator-privileges",
License.OperationMode.ENTERPRISE
);
public static final LicensedFeature.Momentary USER_PROFILE_COLLABORATION_FEATURE = LicensedFeature.momentary(
null,
"user-profile-collaboration",
License.OperationMode.STANDARD
);
/**
* Configurable cross cluster access is Enterprise feature.
*/
public static final LicensedFeature.Momentary ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE = LicensedFeature.momentary(
null,
"advanced-remote-cluster-security",
License.OperationMode.ENTERPRISE
);
private static final Logger logger = LogManager.getLogger(Security.class);
private Settings settings;
private final boolean enabled;
private final SetOnce<Boolean> dlsFlsEnabled = new SetOnce<>();
private final SecuritySystemIndices systemIndices;
private final ListenableFuture<Void> nodeStartedListenable;
/* what a PITA that we need an extra indirection to initialize this. Yet, once we got rid of guice we can thing about how
* to fix this or make it simpler. Today we need several service that are created in createComponents but we need to register
* an instance of TransportInterceptor way earlier before createComponents is called. */
private final SetOnce<TransportInterceptor> securityInterceptor = new SetOnce<>();
private final SetOnce<IPFilter> ipFilter = new SetOnce<>();
private final SetOnce<AuthenticationService> authcService = new SetOnce<>();
private final SetOnce<SecondaryAuthenticator> secondayAuthc = new SetOnce<>();
private final SetOnce<AuditTrailService> auditTrailService = new SetOnce<>();
private final SetOnce<SecurityContext> securityContext = new SetOnce<>();
private final SetOnce<ThreadContext> threadContext = new SetOnce<>();
private final SetOnce<TokenService> tokenService = new SetOnce<>();
private final SetOnce<SecurityActionFilter> securityActionFilter = new SetOnce<>();
private final SetOnce<SharedGroupFactory> sharedGroupFactory = new SetOnce<>();
private final SetOnce<DocumentSubsetBitsetCache> dlsBitsetCache = new SetOnce<>();
private final SetOnce<List<BootstrapCheck>> bootstrapChecks = new SetOnce<>();
private final List<SecurityExtension> securityExtensions = new ArrayList<>();
private final SetOnce<Transport> transportReference = new SetOnce<>();
private final SetOnce<ScriptService> scriptServiceReference = new SetOnce<>();
private final SetOnce<OperatorOnlyRegistry> operatorOnlyRegistry = new SetOnce<>();
private final SetOnce<PutRoleRequestBuilderFactory> putRoleRequestBuilderFactory = new SetOnce<>();
private final SetOnce<BulkPutRoleRequestBuilderFactory> bulkPutRoleRequestBuilderFactory = new SetOnce<>();
private final SetOnce<CreateApiKeyRequestBuilderFactory> createApiKeyRequestBuilderFactory = new SetOnce<>();
private final SetOnce<UpdateApiKeyRequestTranslator> updateApiKeyRequestTranslator = new SetOnce<>();
private final SetOnce<BulkUpdateApiKeyRequestTranslator> bulkUpdateApiKeyRequestTranslator = new SetOnce<>();
private final SetOnce<RestGrantApiKeyAction.RequestTranslator> grantApiKeyRequestTranslator = new SetOnce<>();
private final SetOnce<GetBuiltinPrivilegesResponseTranslator> getBuiltinPrivilegesResponseTranslator = new SetOnce<>();
private final SetOnce<HasPrivilegesRequestBuilderFactory> hasPrivilegesRequestBuilderFactory = new SetOnce<>();
private final SetOnce<FileRolesStore> fileRolesStore = new SetOnce<>();
private final SetOnce<OperatorPrivileges.OperatorPrivilegesService> operatorPrivilegesService = new SetOnce<>();
private final SetOnce<ReservedRoleMappingAction> reservedRoleMappingAction = new SetOnce<>();
private final SetOnce<Realms> realms = new SetOnce<>();
private final SetOnce<Client> client = new SetOnce<>();
private final SetOnce<List<ReloadableSecurityComponent>> reloadableComponents = new SetOnce<>();
private final SetOnce<AuthorizationDenialMessages> authorizationDenialMessages = new SetOnce<>();
private final SetOnce<ReservedRoleNameChecker.Factory> reservedRoleNameCheckerFactory = new SetOnce<>();
private final SetOnce<FileRoleValidator> fileRoleValidator = new SetOnce<>();
private final SetOnce<SecondaryAuthActions> secondaryAuthActions = new SetOnce<>();
private final SetOnce<QueryableBuiltInRolesProviderFactory> queryableRolesProviderFactory = new SetOnce<>();
private final SetOnce<SamlAuthenticateResponseHandler.Factory> samlAuthenticateResponseHandlerFactory = new SetOnce<>();
private final SetOnce<RemoteClusterSecurityExtension.Provider> remoteClusterSecurityExtensionProvider = new SetOnce<>();
private final SetOnce<RemoteClusterSecurityExtension> remoteClusterSecurityExtension = new SetOnce<>();
private final SetOnce<RemoteClusterAuthenticationService> remoteClusterAuthenticationService = new SetOnce<>();
private final SetOnce<SecurityMigrations.Manager> migrationManager = new SetOnce<>();
private final SetOnce<List<Closeable>> closableComponents = new SetOnce<>();
public Security(Settings settings) {
this(settings, Collections.emptyList());
}
Security(Settings settings, List<SecurityExtension> extensions) {
// Note: The settings that are passed in here might not be the final values - things like Plugin.additionalSettings()
// will be called after the plugins are constructed, and may introduce new setting values.
// Accordingly we should avoid using this settings object for very much and mostly rely on Environment.setting() as provided
// to createComponents.
this.settings = settings;
// TODO this is wrong, we should only use the environment that is provided to createComponents
this.enabled = XPackSettings.SECURITY_ENABLED.get(settings);
this.systemIndices = new SecuritySystemIndices(settings);
this.nodeStartedListenable = new ListenableFuture<>();
if (enabled) {
runStartupChecks(settings);
Automatons.updateConfiguration(settings);
} else {
ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings);
this.bootstrapChecks.set(Collections.emptyList());
}
this.securityExtensions.addAll(extensions);
}
private void ensureNoRemoteClusterCredentialsOnDisabledSecurity(Settings settings) {
assert false == enabled;
final List<String> remoteClusterCredentialsSettingKeys = RemoteClusterSettings.REMOTE_CLUSTER_CREDENTIALS.getAllConcreteSettings(
settings
).map(Setting::getKey).sorted().toList();
if (false == remoteClusterCredentialsSettingKeys.isEmpty()) {
throw new IllegalArgumentException(
format(
"Found [%s] remote clusters with credentials [%s]. Security [%s] must be enabled to connect to them. "
+ "Please either enable security or remove these settings from the keystore.",
remoteClusterCredentialsSettingKeys.size(),
Strings.collectionToCommaDelimitedString(remoteClusterCredentialsSettingKeys),
XPackSettings.SECURITY_ENABLED.getKey()
)
);
}
}
private static void runStartupChecks(Settings settings) {
validateRealmSettings(settings);
if (XPackSettings.FIPS_MODE_ENABLED.get(settings)) {
validateForFips(settings);
}
}
// overridable by tests
protected Clock getClock() {
return Clock.systemUTC();
}
protected SSLService getSslService() {
return XPackPlugin.getSharedSslService();
}
protected LicenseService getLicenseService() {
return XPackPlugin.getSharedLicenseService();
}
protected XPackLicenseState getLicenseState() {
return XPackPlugin.getSharedLicenseState();
}
protected Client getClient() {
return client.get();
}
protected List<ReloadableSecurityComponent> getReloadableSecurityComponents() {
return this.reloadableComponents.get();
}
/*
* Copied from XPackPlugin.resolveConfigFile so we don't go to a different codesource
* and so fail the secured file permission check on the users file.
* If there's a secured permission granted on this file (which there should be),
* ES has already checked the file is actually in the config directory
*/
public static Path resolveSecuredConfigFile(Environment env, String file) {
Path config = env.configDir().resolve(file);
if (doPrivileged((PrivilegedAction<Boolean>) () -> Files.exists(config)) == false) {
Path legacyConfig = env.configDir().resolve("x-pack").resolve(file);
if (doPrivileged((PrivilegedAction<Boolean>) () -> Files.exists(legacyConfig))) {
DeprecationLogger.getLogger(XPackPlugin.class)
.warn(
DeprecationCategory.OTHER,
"config_file_path",
"Config file [" + file + "] is in a deprecated location. Move from " + legacyConfig + " to " + config
);
return legacyConfig;
}
}
return config;
}
@Override
public Collection<?> createComponents(PluginServices services) {
try {
return createComponents(
services.client(),
services.threadPool(),
services.clusterService(),
services.featureService(),
services.resourceWatcherService(),
services.scriptService(),
services.xContentRegistry(),
services.environment(),
services.indexNameExpressionResolver(),
services.telemetryProvider(),
new PersistentTasksService(services.clusterService(), services.threadPool(), services.client()),
services.linkedProjectConfigService(),
services.projectResolver(),
services.projectRoutingResolver()
);
} catch (final Exception e) {
throw new IllegalStateException("security initialization failed", e);
}
}
// pkg private for testing - tests want to pass in their set of extensions hence we are not using the extension service directly
Collection<Object> createComponents(
Client client,
ThreadPool threadPool,
ClusterService clusterService,
FeatureService featureService,
ResourceWatcherService resourceWatcherService,
ScriptService scriptService,
NamedXContentRegistry xContentRegistry,
Environment environment,
IndexNameExpressionResolver expressionResolver,
TelemetryProvider telemetryProvider,
PersistentTasksService persistentTasksService,
LinkedProjectConfigService linkedProjectConfigService,
ProjectResolver projectResolver,
ProjectRoutingResolver projectRoutingResolver
) throws Exception {
logger.info("Security is {}", enabled ? "enabled" : "disabled");
if (enabled == false) {
return Collections.singletonList(new SecurityUsageServices(null, null, null, null, null, null));
}
this.client.set(client);
// The settings in `environment` may have additional values over what was provided during construction
// See Plugin#additionalSettings()
this.settings = environment.settings();
systemIndices.init(client, featureService, clusterService, projectResolver);
this.migrationManager.set(new SecurityMigrations.Manager(clusterService, persistentTasksService, systemIndices));
scriptServiceReference.set(scriptService);
// We need to construct the checks here while the secure settings are still available.
// If we wait until #getBoostrapChecks the secure settings will have been cleared/closed.
final List<BootstrapCheck> checks = new ArrayList<>();
Collections.addAll(
checks,
new TokenSSLBootstrapCheck(),
new PkiRealmBootstrapCheck(getSslService()),
new TransportTLSBootstrapCheck()
);
checks.addAll(InternalRealms.getBootstrapChecks(settings, environment));
this.bootstrapChecks.set(Collections.unmodifiableList(checks));
threadContext.set(threadPool.getThreadContext());
List<Object> components = new ArrayList<>();
securityContext.set(new SecurityContext(settings, threadPool.getThreadContext()));
components.add(securityContext.get());
final RestrictedIndices restrictedIndices = new RestrictedIndices(expressionResolver);
// audit trail service construction
final AuditTrail auditTrail = XPackSettings.AUDIT_ENABLED.get(settings)
? new LoggingAuditTrail(settings, clusterService, threadPool)
: null;
final AuditTrailService auditTrailService = new AuditTrailService(auditTrail, getLicenseState());
components.add(auditTrailService);
this.auditTrailService.set(auditTrailService);
final TokenService tokenService = new TokenService(
settings,
Clock.systemUTC(),
client,
getLicenseState(),
securityContext.get(),
systemIndices.getMainIndexManager(),
systemIndices.getTokenIndexManager(),
clusterService
);
this.tokenService.set(tokenService);
components.add(tokenService);
// realms construction
final NativeUsersStore nativeUsersStore = new NativeUsersStore(settings, client, systemIndices.getMainIndexManager());
final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore(
settings,
client,
systemIndices.getMainIndexManager(),
scriptService
);
final ProjectStateRoleMapper projectStateRoleMapper = new ProjectStateRoleMapper(
settings,
scriptService,
clusterService,
projectResolver
);
final UserRoleMapper userRoleMapper = new CompositeRoleMapper(nativeRoleMappingStore, projectStateRoleMapper);
final AnonymousUser anonymousUser = new AnonymousUser(settings);
components.add(anonymousUser);
final ReservedRealm reservedRealm = new ReservedRealm(environment, settings, nativeUsersStore, anonymousUser, threadPool);
final SecurityExtension.SecurityComponents extensionComponents = new ExtensionComponents(
environment,
client,
clusterService,
resourceWatcherService,
userRoleMapper,
projectResolver,
telemetryProvider
);
Map<String, Realm.Factory> realmFactories = new HashMap<>(
InternalRealms.getFactories(
threadPool,
settings,
resourceWatcherService,
getSslService(),
nativeUsersStore,
userRoleMapper,
systemIndices.getMainIndexManager()
)
);
for (SecurityExtension extension : securityExtensions) {
Map<String, Realm.Factory> newRealms = extension.getRealms(extensionComponents);
for (Map.Entry<String, Realm.Factory> entry : newRealms.entrySet()) {
if (realmFactories.put(entry.getKey(), entry.getValue()) != null) {
throw new IllegalArgumentException("Realm type [" + entry.getKey() + "] is already registered");
}
}
}
final Realms realms = new Realms(
settings,
environment,
realmFactories,
getLicenseState(),
threadPool.getThreadContext(),
reservedRealm
);
components.add(nativeUsersStore);
components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore));
components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper));
components.add(projectStateRoleMapper);
components.add(reservedRealm);
components.add(realms);
this.realms.set(realms);
systemIndices.getMainIndexManager().addStateListener(nativeRoleMappingStore::onSecurityIndexStateChange);
final CacheInvalidatorRegistry cacheInvalidatorRegistry = new CacheInvalidatorRegistry();
components.add(cacheInvalidatorRegistry);
ServiceAccountService serviceAccountService = createServiceAccountService(
components,
cacheInvalidatorRegistry,
extensionComponents,
() -> new IndexServiceAccountTokenStore(
settings,
threadPool,
getClock(),
client,
systemIndices.getMainIndexManager(),
clusterService,
cacheInvalidatorRegistry
),
() -> new FileServiceAccountTokenStore(
environment,
resourceWatcherService,
threadPool,
clusterService,
cacheInvalidatorRegistry
)
);
components.add(serviceAccountService);
systemIndices.getMainIndexManager().addStateListener(cacheInvalidatorRegistry::onSecurityIndexStateChange);
final NativePrivilegeStore privilegeStore = new NativePrivilegeStore(
settings,
client,
systemIndices.getMainIndexManager(),
cacheInvalidatorRegistry,
clusterService
);
components.add(privilegeStore);
final ReservedRolesStore reservedRolesStore = new ReservedRolesStore(Set.copyOf(INCLUDED_RESERVED_ROLES_SETTING.get(settings)));
dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings));
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(settings);
RoleDescriptor.setFieldPermissionsCache(fieldPermissionsCache);
// Need to set to default if it wasn't set by an extension
if (putRoleRequestBuilderFactory.get() == null) {
putRoleRequestBuilderFactory.set(new PutRoleRequestBuilderFactory.Default());
}
if (bulkPutRoleRequestBuilderFactory.get() == null) {
bulkPutRoleRequestBuilderFactory.set(new BulkPutRoleRequestBuilderFactory.Default());
}
if (createApiKeyRequestBuilderFactory.get() == null) {
createApiKeyRequestBuilderFactory.set(new CreateApiKeyRequestBuilderFactory.Default());
}
if (getBuiltinPrivilegesResponseTranslator.get() == null) {
getBuiltinPrivilegesResponseTranslator.set(new GetBuiltinPrivilegesResponseTranslator.Default());
}
if (updateApiKeyRequestTranslator.get() == null) {
updateApiKeyRequestTranslator.set(new UpdateApiKeyRequestTranslator.Default());
}
if (bulkUpdateApiKeyRequestTranslator.get() == null) {
bulkUpdateApiKeyRequestTranslator.set(new BulkUpdateApiKeyRequestTranslator.Default());
}
if (grantApiKeyRequestTranslator.get() == null) {
grantApiKeyRequestTranslator.set(new RestGrantApiKeyAction.RequestTranslator.Default());
}
if (hasPrivilegesRequestBuilderFactory.get() == null) {
hasPrivilegesRequestBuilderFactory.trySet(new HasPrivilegesRequestBuilderFactory.Default());
}
if (reservedRoleNameCheckerFactory.get() == null) {
reservedRoleNameCheckerFactory.set(new ReservedRoleNameChecker.Factory.Default());
}
if (fileRoleValidator.get() == null) {
fileRoleValidator.set(new FileRoleValidator.Default());
}
if (samlAuthenticateResponseHandlerFactory.get() == null) {
samlAuthenticateResponseHandlerFactory.set(new SamlAuthenticateResponseHandler.DefaultFactory());
}
components.add(
new PluginComponentBinding<>(
SamlAuthenticateResponseHandler.class,
samlAuthenticateResponseHandlerFactory.get().create(settings, tokenService, getClock())
)
);
this.fileRolesStore.set(
new FileRolesStore(settings, environment, resourceWatcherService, getLicenseState(), xContentRegistry, fileRoleValidator.get())
);
ReservedRoleNameChecker reservedRoleNameChecker = reservedRoleNameCheckerFactory.get()
.create(clusterService, projectResolver, fileRolesStore.get()::exists);
components.add(new PluginComponentBinding<>(ReservedRoleNameChecker.class, reservedRoleNameChecker));
final Map<String, List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>>> customRoleProviders = new LinkedHashMap<>();
for (SecurityExtension extension : securityExtensions) {
final List<BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>>> providers = extension.getRolesProviders(
extensionComponents
);
if (providers != null && providers.isEmpty() == false) {
customRoleProviders.put(extension.extensionName(), providers);
}
}
final NativeRolesStore nativeRolesStore = new NativeRolesStore(
settings,
client,
getLicenseState(),
systemIndices.getMainIndexManager(),
clusterService,
reservedRoleNameChecker,
xContentRegistry
);
final ApiKeyService apiKeyService = new ApiKeyService(
settings,
Clock.systemUTC(),
client,
systemIndices.getMainIndexManager(),
clusterService,
cacheInvalidatorRegistry,
threadPool,
telemetryProvider.getMeterRegistry(),
featureService
);
components.add(apiKeyService);
final RoleProviders roleProviders = new RoleProviders(
reservedRolesStore,
fileRolesStore.get(),
nativeRolesStore,
customRoleProviders,
getLicenseState()
);
final CompositeRolesStore allRolesStore = new CompositeRolesStore(
settings,
clusterService,
roleProviders,
privilegeStore,
threadPool.getThreadContext(),
getLicenseState(),
fieldPermissionsCache,
apiKeyService,
serviceAccountService,
projectResolver,
dlsBitsetCache.get(),
restrictedIndices,
buildRoleBuildingExecutor(threadPool, settings),
new DeprecationRoleDescriptorConsumer(clusterService, projectResolver, threadPool)
);
systemIndices.getMainIndexManager().addStateListener(allRolesStore::onSecurityIndexStateChange);
final ProfileService profileService = new ProfileService(
settings,
getClock(),
client,
systemIndices.getProfileIndexManager(),
realms
);
components.add(profileService);
// We use the value of the {@code ENROLLMENT_ENABLED} setting to determine if the node is starting up with auto-generated
// certificates (which have been generated by pre-startup scripts). In this case, and further if the node forms a new cluster by
// itself, rather than joining an existing one, we complete the auto-configuration by generating and printing credentials and
// enrollment tokens (when the .security index becomes available).
// The generated information is output on node's standard out (if
InitialNodeSecurityAutoConfiguration.maybeGenerateEnrollmentTokensAndElasticCredentialsOnNodeStartup(
nativeUsersStore,
systemIndices.getMainIndexManager(),
getSslService(),
client,
environment,
(runnable -> nodeStartedListenable.addListener(ActionListener.running(runnable))),
threadPool
);
// to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be
// minimal
getLicenseState().addListener(allRolesStore::invalidateAll);
final AuthenticationFailureHandler failureHandler = createAuthenticationFailureHandler(realms, extensionComponents);
final boolean operatorPrivilegesEnabled = OPERATOR_PRIVILEGES_ENABLED.get(settings);
if (operatorPrivilegesEnabled) {
logger.info("operator privileges are enabled");
if (operatorOnlyRegistry.get() == null) {
operatorOnlyRegistry.set(new DefaultOperatorOnlyRegistry(clusterService.getClusterSettings()));
}
operatorPrivilegesService.set(
new OperatorPrivileges.DefaultOperatorPrivilegesService(
getLicenseState(),
new FileOperatorUsersStore(environment, resourceWatcherService),
operatorOnlyRegistry.get()
)
);
} else {
operatorPrivilegesService.set(OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE);
}
final List<CustomAuthenticator> customAuthenticators = getCustomAuthenticatorFromExtensions(extensionComponents);
PluggableAuthenticatorChain pluggableAuthenticatorChain = new PluggableAuthenticatorChain(customAuthenticators);
components.add(pluggableAuthenticatorChain);
components.addAll(customAuthenticators);
authcService.set(
new AuthenticationService(
settings,
realms,
auditTrailService,
failureHandler,
threadPool,
anonymousUser,
tokenService,
apiKeyService,
serviceAccountService,
operatorPrivilegesService.get(),
pluggableAuthenticatorChain,
telemetryProvider.getMeterRegistry()
)
);
components.add(authcService.get());
systemIndices.getMainIndexManager().addStateListener(authcService.get()::onSecurityIndexStateChange);
dlsFlsEnabled.set(XPackSettings.DLS_FLS_ENABLED.get(settings));
Set<RequestInterceptor> requestInterceptors = Sets.newHashSet(
new ResizeRequestInterceptor(threadPool, getLicenseState(), auditTrailService, dlsFlsEnabled.get()),
new IndicesAliasesRequestInterceptor(threadPool.getThreadContext(), getLicenseState(), auditTrailService, dlsFlsEnabled.get())
);
if (dlsFlsEnabled.get()) {
requestInterceptors.addAll(
Arrays.asList(
new SearchRequestInterceptor(threadPool, getLicenseState()),
new ShardSearchRequestInterceptor(threadPool, getLicenseState()),
new UpdateRequestInterceptor(threadPool, getLicenseState()),
new BulkShardRequestInterceptor(threadPool, getLicenseState()),
new DlsFlsLicenseRequestInterceptor(threadPool.getThreadContext(), getLicenseState()),
new SearchRequestCacheDisablingInterceptor(threadPool, getLicenseState()),
new ValidateRequestInterceptor(threadPool, getLicenseState())
)
);
}
requestInterceptors = Collections.unmodifiableSet(requestInterceptors);
if (authorizationDenialMessages.get() == null) {
authorizationDenialMessages.set(new AuthorizationDenialMessages.Default());
}
final var authorizedProjectsResolver = getCustomAuthorizedProjectsResolverOrDefault(extensionComponents);
final AuthorizationService authzService = new AuthorizationService(
settings,
allRolesStore,
fieldPermissionsCache,
clusterService,
auditTrailService,
failureHandler,
threadPool,
anonymousUser,
getAuthorizationEngine(),
requestInterceptors,
getLicenseState(),
expressionResolver,
operatorPrivilegesService.get(),
restrictedIndices,
authorizationDenialMessages.get(),
linkedProjectConfigService,
projectResolver,
authorizedProjectsResolver,
new CrossProjectModeDecider(settings),
projectRoutingResolver
);
components.add(nativeRolesStore); // used by roles actions
components.add(reservedRolesStore); // used by roles actions
components.add(allRolesStore); // for SecurityInfoTransportAction and clear roles cache
components.add(authzService);
components.add(new PluginComponentBinding<>(AuthorizedProjectsResolver.class, authorizedProjectsResolver));
final SecondaryAuthenticator secondaryAuthenticator = new SecondaryAuthenticator(
securityContext.get(),
authcService.get(),
auditTrailService
);
this.secondayAuthc.set(secondaryAuthenticator);
components.add(secondaryAuthenticator);
ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState()));
components.add(ipFilter.get());
DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings());
RemoteClusterSecurityExtension.Components rcsComponents = new RemoteClusterSecurityComponents(
authcService.get(),
authzService,
securityContext.get(),
apiKeyService,
resourceWatcherService,
projectResolver,
getLicenseState(),
clusterService,
environment,
threadPool,
settings,
client
);
remoteClusterSecurityExtension.set(this.getRemoteClusterSecurityExtension(rcsComponents));
remoteClusterAuthenticationService.set(remoteClusterSecurityExtension.get().getAuthenticationService());
components.add(new PluginComponentBinding<>(RemoteClusterAuthenticationService.class, remoteClusterAuthenticationService.get()));
var remoteClusterTransportInterceptor = remoteClusterSecurityExtension.get().getTransportInterceptor();
components.add(new PluginComponentBinding<>(RemoteClusterTransportInterceptor.class, remoteClusterTransportInterceptor));
securityInterceptor.set(
new SecurityServerTransportInterceptor(
settings,
threadPool,
authcService.get(),
authzService,
getSslService(),
securityContext.get(),
destructiveOperations,
remoteClusterTransportInterceptor
)
);
securityActionFilter.set(
new SecurityActionFilter(
authcService.get(),
authzService,
auditTrailService,
getLicenseState(),
threadPool,
securityContext.get(),
destructiveOperations,
secondaryAuthActions.get() == null ? Set::of : secondaryAuthActions.get()
)
);
components.add(
new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService)
);
reservedRoleMappingAction.set(new ReservedRoleMappingAction());
if (QUERYABLE_BUILT_IN_ROLES_ENABLED) {
if (queryableRolesProviderFactory.get() == null) {
queryableRolesProviderFactory.set(new QueryableBuiltInRolesProviderFactory.Default());
}
components.add(
new QueryableBuiltInRolesSynchronizer(
clusterService,
featureService,
queryableRolesProviderFactory.get(),
nativeRolesStore,
reservedRolesStore,
fileRolesStore.get(),
threadPool
)
);
}
cacheInvalidatorRegistry.validate();
setClosableAndReloadableComponents(components);
return components;
}
private void setClosableAndReloadableComponents(List<Object> components) {
// adding additional components we don't expose externally,
// but want to allow reloading settings and closing resources
// when the security plugin gets closed
final Iterable<Object> allComponents = Iterables.flatten(List.of(components, List.of(remoteClusterSecurityExtension.get())));
final List<ReloadableSecurityComponent> reloadableComponents = new ArrayList<>();
final List<Closeable> closableComponents = new ArrayList<>();
for (Object component : allComponents) {
final Object unwrapped = unwrapComponentObject(component);
if (unwrapped instanceof ReloadableSecurityComponent reloadable) {
reloadableComponents.add(reloadable);
}
if (unwrapped instanceof Closeable closeable) {
closableComponents.add(closeable);
}
}
this.reloadableComponents.set(List.copyOf(reloadableComponents));
this.closableComponents.set(List.copyOf(closableComponents));
}
private static Object unwrapComponentObject(Object component) {
if (component instanceof PluginComponentBinding<?, ?> pcb) {
return pcb.impl();
} else {
return component;
}
}
private RemoteClusterSecurityExtension getRemoteClusterSecurityExtension(RemoteClusterSecurityExtension.Components components) {
assert this.remoteClusterSecurityExtensionProvider.get() != null : "security plugin extensions should have been loaded first";
RemoteClusterSecurityExtension rcsExtension = this.remoteClusterSecurityExtensionProvider.get().getExtension(components);
assert rcsExtension != null;
if (false == isInternalRemoteClusterSecurityExtension(rcsExtension)) {
throw new IllegalStateException(
"The ["
+ rcsExtension.getClass().getCanonicalName()
+ "] extension tried to install a "
+ RemoteClusterSecurityExtension.class.getSimpleName()
+ ". This functionality is not available to external extensions."
);
}
return rcsExtension;
}
private List<CustomAuthenticator> getCustomAuthenticatorFromExtensions(SecurityExtension.SecurityComponents extensionComponents) {
final Map<String, List<CustomAuthenticator>> customAuthenticatorsByExtension = new HashMap<>();
for (final SecurityExtension securityExtension : securityExtensions) {
final List<CustomAuthenticator> customAuthenticators = securityExtension.getCustomAuthenticators(extensionComponents);
if (customAuthenticators != null) {
if (false == isInternalExtension(securityExtension)) {
throw new IllegalStateException(
"The ["
+ securityExtension.extensionName()
+ "] extension tried to install a "
+ CustomAuthenticator.class.getSimpleName()
+ ". "
+ "This functionality is not available to external extensions."
);
}
customAuthenticatorsByExtension.put(securityExtension.extensionName(), customAuthenticators);
}
}
if (customAuthenticatorsByExtension.isEmpty()) {
logger.debug(
"No custom implementations for [{}] provided by security extensions.",
CustomAuthenticator.class.getCanonicalName()
);
return List.of();
} else if (customAuthenticatorsByExtension.size() > 1) {
throw new IllegalStateException(
"Multiple extensions tried to install custom authenticators: " + customAuthenticatorsByExtension.keySet()
);
} else {
final var authenticatorByExtensionEntry = customAuthenticatorsByExtension.entrySet().iterator().next();
final List<CustomAuthenticator> customAuthenticators = authenticatorByExtensionEntry.getValue();
final String extensionName = authenticatorByExtensionEntry.getKey();
for (CustomAuthenticator authenticator : customAuthenticators) {
logger.debug(
"{} implementation [{}] provided by extension [{}]",
CustomAuthenticator.class.getSimpleName(),
authenticator.getClass().getCanonicalName(),
extensionName
);
}
return customAuthenticators;
}
}
private AuthorizedProjectsResolver getCustomAuthorizedProjectsResolverOrDefault(
SecurityExtension.SecurityComponents extensionComponents
) {
final AuthorizedProjectsResolver customAuthorizedProjectsResolver = findValueFromExtensions(
"authorized projects resolver",
extension -> {
final AuthorizedProjectsResolver authorizedProjectsResolver = extension.getAuthorizedProjectsResolver(extensionComponents);
if (authorizedProjectsResolver != null && isInternalExtension(extension) == false) {
throw new IllegalStateException(
"The ["
+ extension.getClass().getName()
+ "] extension tried to install a custom AuthorizedProjectsResolver. This functionality is not available to "
+ "external extensions."
);
}
return authorizedProjectsResolver;
}
);
return customAuthorizedProjectsResolver == null ? new AuthorizedProjectsResolver.Default() : customAuthorizedProjectsResolver;
}
private ServiceAccountService createServiceAccountService(
List<Object> components,
CacheInvalidatorRegistry cacheInvalidatorRegistry,
SecurityExtension.SecurityComponents extensionComponents,
Supplier<IndexServiceAccountTokenStore> indexServiceAccountTokenStoreSupplier,
Supplier<FileServiceAccountTokenStore> fileServiceAccountTokenStoreSupplier
) {
Map<String, ServiceAccountTokenStore> accountTokenStoreByExtension = new HashMap<>();
for (var extension : securityExtensions) {
var serviceAccountTokenStore = extension.getServiceAccountTokenStore(extensionComponents);
if (serviceAccountTokenStore != null) {
if (isInternalExtension(extension) == false) {
throw new IllegalStateException(
"The ["
+ extension.getClass().getName()
+ "] extension tried to install a custom ServiceAccountTokenStore. This functionality is not available to "
+ "external extensions."
);
}
accountTokenStoreByExtension.put(extension.extensionName(), serviceAccountTokenStore);
}
}
if (accountTokenStoreByExtension.size() > 1) {
throw new IllegalStateException(
"More than one extension provided a ServiceAccountTokenStore override: " + accountTokenStoreByExtension.keySet()
);
}
if (accountTokenStoreByExtension.isEmpty()) {
var fileServiceAccountTokenStore = fileServiceAccountTokenStoreSupplier.get();
var indexServiceAccountTokenStore = indexServiceAccountTokenStoreSupplier.get();
components.add(new PluginComponentBinding<>(NodeLocalServiceAccountTokenStore.class, fileServiceAccountTokenStore));
components.add(fileServiceAccountTokenStore);
components.add(indexServiceAccountTokenStore);
cacheInvalidatorRegistry.registerAlias("service", Set.of("file_service_account_token", "index_service_account_token"));
return new ServiceAccountService(
client.get(),
new CompositeServiceAccountTokenStore(
List.of(fileServiceAccountTokenStore, indexServiceAccountTokenStore),
client.get().threadPool().getThreadContext()
),
indexServiceAccountTokenStore
);
}
// Completely handover service account token management to the extension if provided,
// this will disable the index managed
// service account tokens managed through the service account token API
var extensionStore = accountTokenStoreByExtension.values().stream().findFirst();
components.add(new PluginComponentBinding<>(NodeLocalServiceAccountTokenStore.class, (token, listener) -> {
throw new IllegalStateException("Node local config not supported by [" + extensionStore.get().getClass() + "]");
}));
components.add(extensionStore);
logger.debug("Service account authentication handled by extension, disabling file and index token stores");
return new ServiceAccountService(client.get(), extensionStore.get());
}
private static boolean isInternalExtension(SecurityExtension extension) {
return isInternalExtension(extension.getClass());
}
private static boolean isInternalRemoteClusterSecurityExtension(RemoteClusterSecurityExtension extension) {
return isInternalExtension(extension.getClass());
}
private static boolean isInternalExtension(Class<?> extensionClass) {
final String canonicalName = extensionClass.getCanonicalName();
if (canonicalName == null) {
return false;
}
return canonicalName.startsWith("org.elasticsearch.xpack.") || canonicalName.startsWith("co.elastic.elasticsearch.");
}
private static Executor buildRoleBuildingExecutor(ThreadPool threadPool, Settings settings) {
final int allocatedProcessors = EsExecutors.allocatedProcessors(settings);
final ThrottledTaskRunner throttledTaskRunner = new ThrottledTaskRunner("build_roles", allocatedProcessors, threadPool.generic());
return r -> throttledTaskRunner.enqueueTask(new ActionListener<>() {
@Override
public void onResponse(Releasable releasable) {
try (releasable) {
r.run();
}
}
@Override
public void onFailure(Exception e) {
if (r instanceof AbstractRunnable abstractRunnable) {
abstractRunnable.onFailure(e);
}
// should be impossible, GENERIC pool doesn't reject anything
logger.error("unexpected failure running " + r, e);
assert false : new AssertionError("unexpected failure running " + r, e);
}
});
}
private AuthorizationEngine getAuthorizationEngine() {
return findValueFromExtensions("authorization engine", extension -> extension.getAuthorizationEngine(settings));
}
private AuthenticationFailureHandler createAuthenticationFailureHandler(
final Realms realms,
final SecurityExtension.SecurityComponents components
) {
AuthenticationFailureHandler failureHandler = findValueFromExtensions(
"authentication failure handler",
extension -> extension.getAuthenticationFailureHandler(components)
);
if (failureHandler == null) {
logger.debug("Using default authentication failure handler");
Supplier<Map<String, List<String>>> headersSupplier = () -> {
final Map<String, List<String>> defaultFailureResponseHeaders = new HashMap<>();
realms.getActiveRealms().forEach((realm) -> {
Map<String, List<String>> realmFailureHeaders = realm.getAuthenticationFailureHeaders();
realmFailureHeaders.forEach(
(key, value) -> value.stream()
.filter(v -> defaultFailureResponseHeaders.computeIfAbsent(key, x -> new ArrayList<>()).contains(v) == false)
.forEach(v -> defaultFailureResponseHeaders.get(key).add(v))
);
});
if (TokenService.isTokenServiceEnabled(settings)) {
String bearerScheme = "Bearer realm=\"" + XPackField.SECURITY + "\"";
if (defaultFailureResponseHeaders.computeIfAbsent("WWW-Authenticate", x -> new ArrayList<>())
.contains(bearerScheme) == false) {
defaultFailureResponseHeaders.get("WWW-Authenticate").add(bearerScheme);
}
}
if (API_KEY_SERVICE_ENABLED_SETTING.get(settings)) {
final String apiKeyScheme = "ApiKey";
if (defaultFailureResponseHeaders.computeIfAbsent("WWW-Authenticate", x -> new ArrayList<>())
.contains(apiKeyScheme) == false) {
defaultFailureResponseHeaders.get("WWW-Authenticate").add(apiKeyScheme);
}
}
return defaultFailureResponseHeaders;
};
DefaultAuthenticationFailureHandler finalDefaultFailureHandler = new DefaultAuthenticationFailureHandler(headersSupplier.get());
failureHandler = finalDefaultFailureHandler;
getLicenseState().addListener(() -> { finalDefaultFailureHandler.setHeaders(headersSupplier.get()); });
}
return failureHandler;
}
/**
* Calls the provided function for each configured extension and return the value that was generated by the extensions.
* If multiple extensions provide a value, throws {@link IllegalStateException}.
* If no extensions provide a value (or if there are no extensions) returns {@code null}.
*/
@Nullable
private <T> T findValueFromExtensions(String valueType, Function<SecurityExtension, T> method) {
T foundValue = null;
String fromExtension = null;
for (SecurityExtension extension : securityExtensions) {
final T extensionValue = method.apply(extension);
if (extensionValue == null) {
continue;
}
if (foundValue == null) {
foundValue = extensionValue;
fromExtension = extension.extensionName();
} else {
throw new IllegalStateException(
"Extensions ["
+ fromExtension
+ "] and ["
+ extension.extensionName()
+ "] "
+ " both attempted to provide a value for ["
+ valueType
+ "]"
);
}
}
if (foundValue == null) {
return null;
} else {
logger.debug("Using [{}] [{}] from extension [{}]", valueType, foundValue, fromExtension);
return foundValue;
}
}
@Override
public Settings additionalSettings() {
return additionalSettings(settings, enabled);
}
// visible for tests
static Settings additionalSettings(final Settings settings, final boolean enabled) {
if (enabled) {
final Settings.Builder builder = Settings.builder();
builder.put(SecuritySettings.addTransportSettings(settings));
if (NetworkModule.HTTP_TYPE_SETTING.exists(settings)) {
final String httpType = NetworkModule.HTTP_TYPE_SETTING.get(settings);
if (httpType.equals(SecurityField.NAME4)) {
SecurityHttpSettings.overrideSettings(builder, settings);
} else {
final String message = String.format(
Locale.ROOT,
"http type setting [%s] must be [%s] but is [%s]",
NetworkModule.HTTP_TYPE_KEY,
SecurityField.NAME4,
httpType
);
throw new IllegalArgumentException(message);
}
} else {
// default to security4
builder.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4);
SecurityHttpSettings.overrideSettings(builder, settings);
}
builder.put(SecuritySettings.addUserSettings(settings));
return builder.build();
} else {
return Settings.EMPTY;
}
}
@Override
public List<Setting<?>> getSettings() {
return getSettings(securityExtensions, remoteClusterSecurityExtensionProvider.get());
}
/**
* Get the {@link Setting setting configuration} for all security components, including those defined in extensions.
*/
public static List<Setting<?>> getSettings(
List<SecurityExtension> securityExtensions,
RemoteClusterSecurityExtension.Provider remoteClusterSecurityExtensionProvider
) {
List<Setting<?>> settingsList = new ArrayList<>();
// The following just apply in node mode
settingsList.add(XPackSettings.FIPS_MODE_ENABLED);
settingsList.add(XPackSettings.FIPS_REQUIRED_PROVIDERS);
SSLService.registerSettings(settingsList);
// IP Filter settings
IPFilter.addSettings(settingsList);
// audit settings
LoggingAuditTrail.registerSettings(settingsList);
// authentication and authorization settings
AnonymousUser.addSettings(settingsList);
settingsList.addAll(InternalRealmsSettings.getSettings());
ReservedRealm.addSettings(settingsList);
AuthenticationService.addSettings(settingsList);
AuthorizationService.addSettings(settingsList);
Automatons.addSettings(settingsList);
settingsList.addAll(CompositeRolesStore.getSettings());
settingsList.addAll(DocumentSubsetBitsetCache.getSettings());
settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING);
settingsList.add(TokenService.TOKEN_EXPIRATION);
settingsList.add(TokenService.DELETE_INTERVAL);
settingsList.add(TokenService.DELETE_TIMEOUT);
settingsList.add(ProfileService.MAX_SIZE_SETTING);
settingsList.addAll(SSLConfigurationSettings.getProfileSettings());
settingsList.add(ApiKeyService.STORED_HASH_ALGO_SETTING);
settingsList.add(ApiKeyService.DELETE_TIMEOUT);
settingsList.add(ApiKeyService.DELETE_INTERVAL);
settingsList.add(ApiKeyService.DELETE_RETENTION_PERIOD);
settingsList.add(ApiKeyService.CACHE_HASH_ALGO_SETTING);
settingsList.add(ApiKeyService.CACHE_MAX_KEYS_SETTING);
settingsList.add(ApiKeyService.CACHE_TTL_SETTING);
settingsList.add(ApiKeyService.DOC_CACHE_TTL_SETTING);
settingsList.add(ApiKeyService.CERTIFICATE_IDENTITY_PATTERN_CACHE_TTL_SETTING);
settingsList.add(ApiKeyService.CERTIFICATE_IDENTITY_PATTERN_CACHE_MAX_KEYS_SETTING);
settingsList.add(NativePrivilegeStore.CACHE_MAX_APPLICATIONS_SETTING);
settingsList.add(NativePrivilegeStore.CACHE_TTL_SETTING);
settingsList.add(OPERATOR_PRIVILEGES_ENABLED);
settingsList.add(CachingServiceAccountTokenStore.CACHE_TTL_SETTING);
settingsList.add(CachingServiceAccountTokenStore.CACHE_HASH_ALGO_SETTING);
settingsList.add(CachingServiceAccountTokenStore.CACHE_MAX_TOKENS_SETTING);
settingsList.add(SimpleRole.CACHE_SIZE_SETTING);
settingsList.add(NativeRoleMappingStore.LAST_LOAD_CACHE_ENABLED_SETTING);
settingsList.addAll(remoteClusterSecurityExtensionProvider.getSettings());
// hide settings
settingsList.add(Setting.stringListSetting(SecurityField.setting("hide_settings"), Property.NodeScope, Property.Filtered));
return settingsList;
}
@Override
public Collection<RestHeaderDefinition> getRestHeaders() {
Set<RestHeaderDefinition> headers = new HashSet<>();
headers.add(new RestHeaderDefinition(UsernamePasswordToken.BASIC_AUTH_HEADER, false));
headers.add(new RestHeaderDefinition(SecondaryAuthenticator.SECONDARY_AUTH_HEADER_NAME, false));
if (XPackSettings.AUDIT_ENABLED.get(settings)) {
headers.add(new RestHeaderDefinition(AuditTrail.X_FORWARDED_FOR_HEADER, true));
}
if (AuthenticationServiceField.RUN_AS_ENABLED.get(settings)) {
headers.add(new RestHeaderDefinition(AuthenticationServiceField.RUN_AS_USER_HEADER, false));
}
headers.add(new RestHeaderDefinition(JwtRealm.HEADER_CLIENT_AUTHENTICATION, false));
return headers;
}
@Override
public List<String> getSettingsFilter() {
List<String> asArray = settings.getAsList(SecurityField.setting("hide_settings"));
ArrayList<String> settingsFilter = new ArrayList<>(asArray);
// hide settings where we don't define them - they are part of a group...
settingsFilter.add("transport.profiles.*." + SecurityField.setting("*"));
return settingsFilter;
}
@Override
public List<BootstrapCheck> getBootstrapChecks() {
return bootstrapChecks.get();
}
@Override
public void onIndexModule(IndexModule module) {
if (enabled) {
assert getLicenseState() != null;
if (XPackSettings.DLS_FLS_ENABLED.get(settings)) {
assert dlsBitsetCache.get() != null;
module.setReaderWrapper(
indexService -> new SecurityIndexReaderWrapper(
shardId -> indexService.newSearchExecutionContext(
shardId.id(),
0,
// we pass a null index reader, which is legal and will disable rewrite optimizations
// based on index statistics, which is probably safer...
null,
() -> {
throw new IllegalArgumentException("permission filters are not allowed to use the current timestamp");
},
null,
// Don't use runtime mappings in the security query
emptyMap()
),
dlsBitsetCache.get(),
securityContext.get(),
getLicenseState(),
indexService.getScriptService()
)
);
/*
* We need to forcefully overwrite the query cache implementation to use security's opt-out query cache implementation. This
* implementation disables the query cache if field level security is used for a particular request. We have to forcefully
* overwrite the query cache implementation to prevent data leakage to unauthorized users.
*/
module.forceQueryCacheProvider(
(indexSettings, cache) -> new OptOutQueryCache(indexSettings.getIndex(), cache, threadContext.get())
);
}
// in order to prevent scroll ids from being maliciously crafted and/or guessed, a listener is added that
// attaches information to the scroll context so that we can validate the user that created the scroll against
// the user that is executing a scroll operation
module.addSearchOperationListener(new SecuritySearchOperationListener(securityContext.get(), auditTrailService.get()));
}
}
@Override
public List<ActionHandler> getActions() {
var usageAction = new ActionHandler(XPackUsageFeatureAction.SECURITY, SecurityUsageTransportAction.class);
var infoAction = new ActionHandler(XPackInfoFeatureAction.SECURITY, SecurityInfoTransportAction.class);
if (enabled == false) {
return Arrays.asList(usageAction, infoAction);
}
return Stream.of(
new ActionHandler(ClearRealmCacheAction.INSTANCE, TransportClearRealmCacheAction.class),
new ActionHandler(ClearRolesCacheAction.INSTANCE, TransportClearRolesCacheAction.class),
new ActionHandler(ClearPrivilegesCacheAction.INSTANCE, TransportClearPrivilegesCacheAction.class),
new ActionHandler(ClearSecurityCacheAction.INSTANCE, TransportClearSecurityCacheAction.class),
new ActionHandler(GetUsersAction.INSTANCE, TransportGetUsersAction.class),
new ActionHandler(ActionTypes.QUERY_USER_ACTION, TransportQueryUserAction.class),
new ActionHandler(PutUserAction.INSTANCE, TransportPutUserAction.class),
new ActionHandler(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class),
new ActionHandler(GetRolesAction.INSTANCE, TransportGetRolesAction.class),
new ActionHandler(ActionTypes.QUERY_ROLE_ACTION, TransportQueryRoleAction.class),
new ActionHandler(PutRoleAction.INSTANCE, TransportPutRoleAction.class),
new ActionHandler(ActionTypes.BULK_PUT_ROLES, TransportBulkPutRolesAction.class),
new ActionHandler(ActionTypes.BULK_DELETE_ROLES, TransportBulkDeleteRolesAction.class),
new ActionHandler(DeleteRoleAction.INSTANCE, TransportDeleteRoleAction.class),
new ActionHandler(TransportChangePasswordAction.TYPE, TransportChangePasswordAction.class),
new ActionHandler(AuthenticateAction.INSTANCE, TransportAuthenticateAction.class),
new ActionHandler(TransportSetEnabledAction.TYPE, TransportSetEnabledAction.class),
new ActionHandler(HasPrivilegesAction.INSTANCE, TransportHasPrivilegesAction.class),
new ActionHandler(GetUserPrivilegesAction.INSTANCE, TransportGetUserPrivilegesAction.class),
new ActionHandler(GetRoleMappingsAction.INSTANCE, TransportGetRoleMappingsAction.class),
new ActionHandler(PutRoleMappingAction.INSTANCE, TransportPutRoleMappingAction.class),
new ActionHandler(DeleteRoleMappingAction.INSTANCE, TransportDeleteRoleMappingAction.class),
new ActionHandler(CreateTokenAction.INSTANCE, TransportCreateTokenAction.class),
new ActionHandler(InvalidateTokenAction.INSTANCE, TransportInvalidateTokenAction.class),
new ActionHandler(GetCertificateInfoAction.INSTANCE, TransportGetCertificateInfoAction.class),
new ActionHandler(RefreshTokenAction.INSTANCE, TransportRefreshTokenAction.class),
new ActionHandler(SamlPrepareAuthenticationAction.INSTANCE, TransportSamlPrepareAuthenticationAction.class),
new ActionHandler(SamlAuthenticateAction.INSTANCE, TransportSamlAuthenticateAction.class),
new ActionHandler(SamlLogoutAction.INSTANCE, TransportSamlLogoutAction.class),
new ActionHandler(SamlInvalidateSessionAction.INSTANCE, TransportSamlInvalidateSessionAction.class),
new ActionHandler(TransportSamlCompleteLogoutAction.TYPE, TransportSamlCompleteLogoutAction.class),
new ActionHandler(SamlSpMetadataAction.INSTANCE, TransportSamlSpMetadataAction.class),
new ActionHandler(OpenIdConnectPrepareAuthenticationAction.INSTANCE, TransportOpenIdConnectPrepareAuthenticationAction.class),
new ActionHandler(OpenIdConnectAuthenticateAction.INSTANCE, TransportOpenIdConnectAuthenticateAction.class),
new ActionHandler(OpenIdConnectLogoutAction.INSTANCE, TransportOpenIdConnectLogoutAction.class),
new ActionHandler(GetBuiltinPrivilegesAction.INSTANCE, TransportGetBuiltinPrivilegesAction.class),
new ActionHandler(GetPrivilegesAction.INSTANCE, TransportGetPrivilegesAction.class),
new ActionHandler(PutPrivilegesAction.INSTANCE, TransportPutPrivilegesAction.class),
new ActionHandler(DeletePrivilegesAction.INSTANCE, TransportDeletePrivilegesAction.class),
new ActionHandler(CreateApiKeyAction.INSTANCE, TransportCreateApiKeyAction.class),
new ActionHandler(CreateCrossClusterApiKeyAction.INSTANCE, TransportCreateCrossClusterApiKeyAction.class),
new ActionHandler(GrantApiKeyAction.INSTANCE, TransportGrantApiKeyAction.class),
new ActionHandler(InvalidateApiKeyAction.INSTANCE, TransportInvalidateApiKeyAction.class),
new ActionHandler(GetApiKeyAction.INSTANCE, TransportGetApiKeyAction.class),
new ActionHandler(QueryApiKeyAction.INSTANCE, TransportQueryApiKeyAction.class),
new ActionHandler(UpdateApiKeyAction.INSTANCE, TransportUpdateApiKeyAction.class),
new ActionHandler(BulkUpdateApiKeyAction.INSTANCE, TransportBulkUpdateApiKeyAction.class),
new ActionHandler(UpdateCrossClusterApiKeyAction.INSTANCE, TransportUpdateCrossClusterApiKeyAction.class),
new ActionHandler(DelegatePkiAuthenticationAction.INSTANCE, TransportDelegatePkiAuthenticationAction.class),
new ActionHandler(CreateServiceAccountTokenAction.INSTANCE, TransportCreateServiceAccountTokenAction.class),
new ActionHandler(DeleteServiceAccountTokenAction.INSTANCE, TransportDeleteServiceAccountTokenAction.class),
new ActionHandler(GetServiceAccountCredentialsAction.INSTANCE, TransportGetServiceAccountCredentialsAction.class),
new ActionHandler(GetServiceAccountNodesCredentialsAction.INSTANCE, TransportGetServiceAccountNodesCredentialsAction.class),
new ActionHandler(GetServiceAccountAction.INSTANCE, TransportGetServiceAccountAction.class),
new ActionHandler(KibanaEnrollmentAction.INSTANCE, TransportKibanaEnrollmentAction.class),
new ActionHandler(NodeEnrollmentAction.INSTANCE, TransportNodeEnrollmentAction.class),
new ActionHandler(ProfileHasPrivilegesAction.INSTANCE, TransportProfileHasPrivilegesAction.class),
new ActionHandler(GetProfilesAction.INSTANCE, TransportGetProfilesAction.class),
new ActionHandler(ActivateProfileAction.INSTANCE, TransportActivateProfileAction.class),
new ActionHandler(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class),
new ActionHandler(SuggestProfilesAction.INSTANCE, TransportSuggestProfilesAction.class),
new ActionHandler(SetProfileEnabledAction.INSTANCE, TransportSetProfileEnabledAction.class),
new ActionHandler(GetSecuritySettingsAction.INSTANCE, TransportGetSecuritySettingsAction.class),
new ActionHandler(UpdateSecuritySettingsAction.INSTANCE, TransportUpdateSecuritySettingsAction.class),
new ActionHandler(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, TransportReloadRemoteClusterCredentialsAction.class),
new ActionHandler(UpdateIndexMigrationVersionAction.INSTANCE, UpdateIndexMigrationVersionAction.TransportAction.class),
new ActionHandler(GetSecurityStatsAction.INSTANCE, TransportSecurityStatsAction.class),
usageAction,
infoAction
).filter(Objects::nonNull).toList();
}
@Override
public List<ActionFilter> getActionFilters() {
if (enabled == false) {
return emptyList();
}
return singletonList(securityActionFilter.get());
}
@Override
public List<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
if (enabled == false) {
return emptyList();
}
return Stream.<RestHandler>of(
new RestAuthenticateAction(settings, securityContext.get(), getLicenseState()),
new RestClearRealmCacheAction(settings, getLicenseState()),
new RestClearRolesCacheAction(settings, getLicenseState()),
new RestClearPrivilegesCacheAction(settings, getLicenseState()),
new RestClearApiKeyCacheAction(settings, getLicenseState()),
new RestClearServiceAccountTokenStoreCacheAction(settings, getLicenseState()),
new RestGetUsersAction(settings, getLicenseState()),
new RestQueryUserAction(settings, getLicenseState()),
new RestPutUserAction(settings, getLicenseState()),
new RestDeleteUserAction(settings, getLicenseState()),
new RestGetRolesAction(settings, getLicenseState()),
new RestQueryRoleAction(settings, getLicenseState()),
new RestBulkPutRolesAction(settings, getLicenseState(), bulkPutRoleRequestBuilderFactory.get()),
new RestBulkDeleteRolesAction(settings, getLicenseState()),
new RestPutRoleAction(settings, getLicenseState(), putRoleRequestBuilderFactory.get()),
new RestDeleteRoleAction(settings, getLicenseState()),
new RestChangePasswordAction(settings, securityContext.get(), getLicenseState()),
new RestSetEnabledAction(settings, getLicenseState()),
new RestHasPrivilegesAction(settings, securityContext.get(), getLicenseState(), hasPrivilegesRequestBuilderFactory.get()),
new RestGetUserPrivilegesAction(settings, securityContext.get(), getLicenseState()),
new RestGetRoleMappingsAction(settings, getLicenseState()),
new RestPutRoleMappingAction(settings, getLicenseState()),
new RestDeleteRoleMappingAction(settings, getLicenseState()),
new RestGetTokenAction(settings, getLicenseState()),
new RestInvalidateTokenAction(settings, getLicenseState()),
new RestGetCertificateInfoAction(),
new RestSamlPrepareAuthenticationAction(settings, getLicenseState()),
new RestSamlAuthenticateAction(settings, getLicenseState()),
new RestSamlLogoutAction(settings, getLicenseState()),
new RestSamlInvalidateSessionAction(settings, getLicenseState()),
new RestSamlCompleteLogoutAction(settings, getLicenseState()),
new RestSamlSpMetadataAction(settings, getLicenseState()),
new RestOpenIdConnectPrepareAuthenticationAction(settings, getLicenseState()),
new RestOpenIdConnectAuthenticateAction(settings, getLicenseState()),
new RestOpenIdConnectLogoutAction(settings, getLicenseState()),
new RestGetBuiltinPrivilegesAction(settings, getLicenseState(), getBuiltinPrivilegesResponseTranslator.get()),
new RestGetPrivilegesAction(settings, getLicenseState()),
new RestPutPrivilegesAction(settings, getLicenseState()),
new RestDeletePrivilegesAction(settings, getLicenseState()),
new RestCreateApiKeyAction(settings, getLicenseState(), createApiKeyRequestBuilderFactory.get()),
new RestCreateCrossClusterApiKeyAction(settings, getLicenseState()),
new RestUpdateApiKeyAction(settings, getLicenseState(), updateApiKeyRequestTranslator.get()),
new RestBulkUpdateApiKeyAction(settings, getLicenseState(), bulkUpdateApiKeyRequestTranslator.get()),
new RestUpdateCrossClusterApiKeyAction(settings, getLicenseState()),
new RestGrantApiKeyAction(settings, getLicenseState(), grantApiKeyRequestTranslator.get()),
new RestInvalidateApiKeyAction(settings, getLicenseState()),
new RestGetApiKeyAction(settings, getLicenseState()),
new RestQueryApiKeyAction(settings, getLicenseState()),
new RestDelegatePkiAuthenticationAction(settings, getLicenseState()),
new RestCreateServiceAccountTokenAction(settings, getLicenseState()),
new RestDeleteServiceAccountTokenAction(settings, getLicenseState()),
new RestGetServiceAccountCredentialsAction(settings, getLicenseState()),
new RestGetServiceAccountAction(settings, getLicenseState()),
new RestKibanaEnrollAction(settings, getLicenseState()),
new RestNodeEnrollmentAction(settings, getLicenseState()),
new RestProfileHasPrivilegesAction(settings, getLicenseState()),
new RestGetProfilesAction(settings, getLicenseState()),
new RestActivateProfileAction(settings, getLicenseState()),
new RestUpdateProfileDataAction(settings, getLicenseState()),
new RestSuggestProfilesAction(settings, getLicenseState()),
new RestEnableProfileAction(settings, getLicenseState()),
new RestDisableProfileAction(settings, getLicenseState()),
new RestGetSecuritySettingsAction(settings, getLicenseState()),
new RestUpdateSecuritySettingsAction(settings, getLicenseState()),
new RestSecurityStatsAction(settings, getLicenseState(), clusterSupportsFeature)
).filter(Objects::nonNull).toList();
}
@Override
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
return Map.of(SetSecurityUserProcessor.TYPE, new SetSecurityUserProcessor.Factory(securityContext::get, settings));
}
@Override
public void onNodeStarted() {
this.nodeStartedListenable.onResponse(null);
}
/**
* Realm settings were changed in 7.0. This method validates that the settings in use on this node match the new style of setting.
* In 6.x a realm config would be
* <pre>
* xpack.security.authc.realms.file1.type: file
* xpack.security.authc.realms.file1.order: 0
* </pre>
* In 7.x this realm should be
* <pre>
* xpack.security.authc.realms.file.file1.order: 0
* </pre>
* If confronted with an old style config, the ES Settings validation would simply fail with an error such as
* <em>unknown setting [xpack.security.authc.realms.file1.order]</em>. This validation method provides an error that is easier to
* understand and take action on.
*/
static void validateRealmSettings(Settings settings) {
final Set<String> badRealmSettings = settings.keySet().stream().filter(k -> k.startsWith(RealmSettings.PREFIX)).filter(key -> {
final String suffix = key.substring(RealmSettings.PREFIX.length());
// suffix-part, only contains a single '.'
return suffix.indexOf('.') == suffix.lastIndexOf('.');
}).collect(Collectors.toSet());
if (badRealmSettings.isEmpty() == false) {
String sampleRealmSetting = RealmSettings.realmSettingPrefix(new RealmConfig.RealmIdentifier("file", "my_file")) + "order";
throw new IllegalArgumentException(
"Incorrect realm settings found. "
+ "Realm settings have been changed to include the type as part of the setting key.\n"
+ "For example '"
+ sampleRealmSetting
+ "'\n"
+ "Found invalid config: "
+ Strings.collectionToDelimitedString(badRealmSettings, ", ")
+ "\n"
+ "Please see the breaking changes documentation."
);
}
}
static void validateForFips(Settings settings) {
final List<String> validationErrors = new ArrayList<>();
Settings keystoreTypeSettings = settings.filter(k -> k.endsWith("keystore.type"))
.filter(k -> settings.get(k).equalsIgnoreCase("jks"));
if (keystoreTypeSettings.isEmpty() == false) {
validationErrors.add(
"JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please "
+ "revisit ["
+ keystoreTypeSettings.toDelimitedString(',')
+ "] settings"
);
}
Settings keystorePathSettings = settings.filter(k -> k.endsWith("keystore.path"))
.filter(k -> settings.hasValue(k.replace(".path", ".type")) == false)
.filter(k -> KeyStoreUtil.inferKeyStoreType(settings.get(k)).equals("jks"));
if (keystorePathSettings.isEmpty() == false) {
validationErrors.add(
"JKS Keystores cannot be used in a FIPS 140 compliant JVM. Please "
+ "revisit ["
+ keystorePathSettings.toDelimitedString(',')
+ "] settings"
);
}
final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings);
if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) {
validationErrors.add(
"Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. Please set the "
+ "appropriate value for [ "
+ XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey()
+ " ] setting."
);
}
final var serviceTokenStoredHashSettings = XPackSettings.SERVICE_TOKEN_HASHING_ALGORITHM;
final var serviceTokenStoredHashAlgo = serviceTokenStoredHashSettings.get(settings);
if (serviceTokenStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) {
// log instead of validation error for backwards compatibility
logger.warn(
"Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM. "
+ "Please set the appropriate value for [{}] setting.",
serviceTokenStoredHashSettings.getKey()
);
}
final var apiKeyStoredHashSettings = ApiKeyService.STORED_HASH_ALGO_SETTING;
final var apiKeyStoredHashAlgo = apiKeyStoredHashSettings.get(settings);
if (apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("ssha256") == false
&& apiKeyStoredHashAlgo.toLowerCase(Locale.ROOT).startsWith("pbkdf2") == false) {
// log instead of validation error for backwards compatibility
logger.warn(
"[{}] is not recommended for stored API key hashing in a FIPS 140 JVM. The recommended hasher for [{}] is SSHA256.",
apiKeyStoredHashSettings,
apiKeyStoredHashSettings.getKey()
);
}
final var cacheHashAlgoSettings = settings.filter(k -> k.endsWith(".cache.hash_algo"));
cacheHashAlgoSettings.keySet().forEach((key) -> {
final var setting = cacheHashAlgoSettings.get(key);
assert setting != null;
final var hashAlgoName = setting.toLowerCase(Locale.ROOT);
if (hashAlgoName.equals("ssha256") == false && hashAlgoName.startsWith("pbkdf2") == false) {
logger.warn(
"[{}] is not recommended for in-memory credential hashing in a FIPS 140 JVM. "
+ "The recommended hasher for [{}] is SSHA256.",
setting,
key
);
}
});
Set<String> foundProviders = new HashSet<>();
for (Provider provider : java.security.Security.getProviders()) {
foundProviders.add(provider.getName().toLowerCase(Locale.ROOT));
if (logger.isTraceEnabled()) {
logger.trace("Security Provider: " + provider.getName() + ", Version: " + provider.getVersionStr());
provider.entrySet().forEach(entry -> { logger.trace("\t" + entry.getKey()); });
}
}
final List<String> requiredProviders = XPackSettings.FIPS_REQUIRED_PROVIDERS.get(settings);
logger.info("JVM Security Providers: " + foundProviders);
if (requiredProviders != null && requiredProviders.isEmpty() == false) {
List<String> unsatisfiedProviders = requiredProviders.stream()
.map(s -> s.toLowerCase(Locale.ROOT))
.filter(element -> foundProviders.contains(element) == false)
.toList();
if (unsatisfiedProviders.isEmpty() == false) {
String errorMessage = "Could not find required FIPS security provider: " + unsatisfiedProviders;
logger.error(errorMessage);
validationErrors.add(errorMessage);
}
}
if (validationErrors.isEmpty() == false) {
final StringBuilder sb = new StringBuilder();
sb.append("Validation for FIPS 140 mode failed: \n");
int index = 0;
for (String error : validationErrors) {
sb.append(++index).append(": ").append(error).append(";\n");
}
throw new IllegalArgumentException(sb.toString());
}
}
@Override
public List<TransportInterceptor> getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) {
if (enabled == false) { // don't register anything if we are not enabled
return Collections.emptyList();
}
return Collections.singletonList(new TransportInterceptor() {
@Override
public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler(
String action,
Executor executor,
boolean forceExecution,
TransportRequestHandler<T> actualHandler
) {
assert securityInterceptor.get() != null;
return securityInterceptor.get().interceptHandler(action, executor, forceExecution, actualHandler);
}
@Override
public AsyncSender interceptSender(AsyncSender sender) {
assert securityInterceptor.get() != null;
return securityInterceptor.get().interceptSender(sender);
}
});
}
@Override
public Map<String, Supplier<Transport>> getTransports(
Settings settings,
ThreadPool threadPool,
PageCacheRecycler pageCacheRecycler,
CircuitBreakerService circuitBreakerService,
NamedWriteableRegistry namedWriteableRegistry,
NetworkService networkService
) {
if (enabled == false) { // don't register anything if we are not enabled
return Collections.emptyMap();
}
IPFilter ipFilter = this.ipFilter.get();
return Map.of(
// security based on Netty 4
SecurityField.NAME4,
() -> {
transportReference.set(
new SecurityNetty4ServerTransport(
settings,
TransportVersion.current(),
threadPool,
networkService,
pageCacheRecycler,
namedWriteableRegistry,
circuitBreakerService,
ipFilter,
getSslService(),
getNettySharedGroupFactory(settings),
remoteClusterAuthenticationService.get()
)
);
return transportReference.get();
}
);
}
@Override
public Map<String, Supplier<HttpServerTransport>> getHttpTransports(
Settings settings,
ThreadPool threadPool,
BigArrays bigArrays,
PageCacheRecycler pageCacheRecycler,
CircuitBreakerService circuitBreakerService,
NamedXContentRegistry xContentRegistry,
NetworkService networkService,
HttpServerTransport.Dispatcher dispatcher,
BiConsumer<HttpPreRequest, ThreadContext> perRequestThreadContext,
ClusterSettings clusterSettings,
TelemetryProvider telemetryProvider
) {
if (enabled == false) { // don't register anything if we are not enabled
return Collections.emptyMap();
}
final IPFilter ipFilter = this.ipFilter.get();
final AcceptChannelHandler.AcceptPredicate acceptPredicate = new AcceptChannelHandler.AcceptPredicate() {
@Override
public void setBoundAddress(BoundTransportAddress boundHttpTransportAddress) {
ipFilter.setBoundHttpTransportAddress(boundHttpTransportAddress);
}
@Override
public boolean test(String profile, InetSocketAddress peerAddress) {
return ipFilter.accept(profile, peerAddress);
}
};
Map<String, Supplier<HttpServerTransport>> httpTransports = new HashMap<>();
httpTransports.put(SecurityField.NAME4, () -> {
final boolean ssl = HTTP_SSL_ENABLED.get(settings);
final SSLService sslService = getSslService();
final BiConsumer<Channel, ThreadContext> populateClientCertificate;
final TLSConfig tlsConfig;
if (ssl) {
final SslProfile sslProfile = sslService.profile(XPackSettings.HTTP_SSL_PREFIX);
final SslConfiguration sslConfiguration = sslProfile.configuration();
if (SSLService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
throw new IllegalArgumentException(
"a key must be provided to run as a server. the key should be configured using the "
+ "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting"
);
}
if (SSLService.isSSLClientAuthEnabled(sslConfiguration)) {
populateClientCertificate = (channel, threadContext) -> extractClientCertificates(logger, threadContext, channel);
} else {
populateClientCertificate = (channel, threadContext) -> {};
}
tlsConfig = new TLSConfig(sslProfile::engine);
} else {
tlsConfig = TLSConfig.noTLS();
populateClientCertificate = (channel, threadContext) -> {};
}
final AuthenticationService authenticationService = this.authcService.get();
final ThreadContext threadContext = this.threadContext.get();
return getHttpServerTransportWithHeadersValidator(
settings,
networkService,
threadPool,
xContentRegistry,
dispatcher,
clusterSettings,
getNettySharedGroupFactory(settings),
telemetryProvider,
tlsConfig,
acceptPredicate,
(httpRequest, channel, listener) -> {
HttpPreRequest httpPreRequest = HttpHeadersAuthenticatorUtils.asHttpPreRequest(httpRequest);
// step 1: Populate the thread context with credentials and any other HTTP request header values (eg run-as) that the
// authentication process looks for while doing its duty.
perRequestThreadContext.accept(httpPreRequest, threadContext);
populateClientCertificate.accept(channel, threadContext);
RemoteHostHeader.process(channel, threadContext);
// step 2: Run authentication on the now properly prepared thread-context.
// This inspects and modifies the thread context.
authenticationService.authenticate(httpPreRequest, listener.delegateFailureAndWrap((l, ignored) -> l.onResponse(null)));
},
(httpRequest, channel, listener) -> {
// allow unauthenticated OPTIONS request through
// this includes CORS preflight, and regular OPTIONS that return permitted methods for a given path
// But still populate the thread context with the usual request headers (as for any other request that is dispatched)
HttpPreRequest httpPreRequest = HttpHeadersAuthenticatorUtils.asHttpPreRequest(httpRequest);
perRequestThreadContext.accept(httpPreRequest, threadContext);
populateClientCertificate.accept(channel, threadContext);
RemoteHostHeader.process(channel, threadContext);
listener.onResponse(null);
}
);
});
return httpTransports;
}
// "public" so it can be used in tests
public static Netty4HttpServerTransport getHttpServerTransportWithHeadersValidator(
Settings settings,
NetworkService networkService,
ThreadPool threadPool,
NamedXContentRegistry xContentRegistry,
HttpServerTransport.Dispatcher dispatcher,
ClusterSettings clusterSettings,
SharedGroupFactory sharedGroupFactory,
TelemetryProvider telemetryProvider,
TLSConfig tlsConfig,
@Nullable AcceptChannelHandler.AcceptPredicate acceptPredicate,
HttpValidator httpValidator,
HttpValidator httpOptionsValidator
) {
return getHttpServerTransportWithHeadersValidator(
settings,
networkService,
threadPool,
xContentRegistry,
dispatcher,
clusterSettings,
sharedGroupFactory,
telemetryProvider,
tlsConfig,
acceptPredicate,
(httpRequest, channel, listener) -> {
if (httpRequest.method() == HttpMethod.OPTIONS) {
if (HttpUtil.getContentLength(httpRequest, -1L) > 1 || HttpUtil.isTransferEncodingChunked(httpRequest)) {
// OPTIONS requests with a body are not supported
listener.onFailure(
new ElasticsearchStatusException(
"OPTIONS requests with a payload body are not supported",
RestStatus.BAD_REQUEST
)
);
} else {
httpOptionsValidator.validate(httpRequest, channel, listener);
}
} else {
httpValidator.validate(httpRequest, channel, listener);
}
}
);
}
// "public" so it can be used in tests
public static Netty4HttpServerTransport getHttpServerTransportWithHeadersValidator(
Settings settings,
NetworkService networkService,
ThreadPool threadPool,
NamedXContentRegistry xContentRegistry,
HttpServerTransport.Dispatcher dispatcher,
ClusterSettings clusterSettings,
SharedGroupFactory sharedGroupFactory,
TelemetryProvider telemetryProvider,
TLSConfig tlsConfig,
@Nullable AcceptChannelHandler.AcceptPredicate acceptPredicate,
HttpValidator httpValidator
) {
return new Netty4HttpServerTransport(
settings,
networkService,
threadPool,
xContentRegistry,
dispatcher,
clusterSettings,
sharedGroupFactory,
telemetryProvider,
tlsConfig,
acceptPredicate,
Objects.requireNonNull(httpValidator)
) {
@Override
protected void populatePerRequestThreadContext(RestRequest restRequest, ThreadContext threadContext) {
ThreadContext.StoredContext authenticationThreadContext = HttpHeadersAuthenticatorUtils.extractAuthenticationContext(
restRequest.getHttpRequest()
);
if (authenticationThreadContext != null) {
authenticationThreadContext.restore();
} else {
// this is an unexpected internal error condition where {@code Netty4HttpHeaderValidator} does not work correctly
throw new ElasticsearchSecurityException("Request is not authenticated");
}
}
};
}
@Override
public RestInterceptor getRestHandlerInterceptor(ThreadContext threadContext) {
return new SecurityRestFilter(
enabled,
threadContext,
secondayAuthc.get(),
auditTrailService.get(),
operatorPrivilegesService.get()
);
}
@Override
public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) {
if (enabled) {
final int allocatedProcessors = EsExecutors.allocatedProcessors(settings);
return List.of(
new FixedExecutorBuilder(
settings,
TokenService.THREAD_POOL_NAME,
1,
1000,
"xpack.security.authc.token.thread_pool",
EsExecutors.TaskTrackingConfig.DO_NOT_TRACK
),
new FixedExecutorBuilder(
settings,
SECURITY_CRYPTO_THREAD_POOL_NAME,
(allocatedProcessors + 1) / 2,
1000,
"xpack.security.crypto.thread_pool",
EsExecutors.TaskTrackingConfig.DO_NOT_TRACK
)
);
}
return Collections.emptyList();
}
@Override
public UnaryOperator<Map<String, IndexTemplateMetadata>> getIndexTemplateMetadataUpgrader() {
return templates -> {
// .security index is not managed by using templates anymore
templates.remove("security_audit_log");
// .security is a system index now. deleting another legacy template that's not used anymore
templates.remove("security-index-template");
return templates;
};
}
@Override
public Function<String, FieldPredicate> getFieldFilter() {
if (enabled) {
return index -> {
XPackLicenseState licenseState = getLicenseState();
IndicesAccessControl indicesAccessControl = INDICES_PERMISSIONS_VALUE.get(threadContext.get());
if (dlsFlsEnabled.get() == false) {
return FieldPredicate.ACCEPT_ALL;
}
if (indicesAccessControl == null) {
return FieldPredicate.ACCEPT_ALL;
}
assert indicesAccessControl.isGranted();
IndexNameExpressionResolver.assertExpressionHasNullOrDataSelector(index);
IndicesAccessControl.IndexAccessControl indexPermissions = indicesAccessControl.getIndexPermissions(index);
if (indexPermissions == null) {
return FieldPredicate.ACCEPT_ALL;
}
FieldPermissions fieldPermissions = indexPermissions.getFieldPermissions();
if (fieldPermissions.hasFieldLevelSecurity() == false) {
return FieldPredicate.ACCEPT_ALL;
}
if (FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState) == false) {
// check license last, once we know FLS is actually used
return FieldPredicate.ACCEPT_ALL;
}
return fieldPermissions.fieldPredicate();
};
}
return MapperPlugin.super.getFieldFilter();
}
@Override
public BiConsumer<DiscoveryNode, ClusterState> getJoinValidator() {
if (enabled) {
return new ValidateLicenseForFIPS(XPackSettings.FIPS_MODE_ENABLED.get(settings), getLicenseService());
}
return null;
}
@Override
public void reload(Settings settings) throws Exception {
if (enabled) {
final List<Exception> reloadExceptions = new ArrayList<>();
try {
reloadRemoteClusterCredentials(settings);
} catch (Exception ex) {
reloadExceptions.add(ex);
}
this.getReloadableSecurityComponents().forEach(component -> {
try {
component.reload(settings);
} catch (Exception ex) {
reloadExceptions.add(ex);
}
});
if (false == reloadExceptions.isEmpty()) {
final var combinedException = new ElasticsearchException(
"secure settings reload failed for one or more security components"
);
reloadExceptions.forEach(combinedException::addSuppressed);
throw combinedException;
}
} else {
ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings);
}
}
/**
* This method uses a transport action internally to access classes that are injectable but not part of the plugin contract.
* See {@link TransportReloadRemoteClusterCredentialsAction} for more context.
*/
private void reloadRemoteClusterCredentials(Settings settingsWithKeystore) {
// Using `settings` instead of `settingsWithKeystore` is deliberate: we are not interested in secure settings here
if (DiscoveryNode.isStateless(settings)) {
// Stateless does not support remote cluster operations. Skip.
return;
}
final PlainActionFuture<ActionResponse.Empty> future = new UnsafePlainActionFuture<>(ThreadPool.Names.GENERIC);
getClient().execute(
ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION,
new TransportReloadRemoteClusterCredentialsAction.Request(settingsWithKeystore),
future
);
future.actionGet();
}
public Map<String, String> getAuthContextForSlowLog() {
if (this.securityContext.get() != null && this.securityContext.get().getAuthentication() != null) {
Authentication authentication = this.securityContext.get().getAuthentication();
Map<String, String> authContext;
if (authentication.isCrossClusterAccess()) {
Authentication originalAuthentication = Authentication.getAuthenticationFromCrossClusterAccessMetadata(authentication);
// For RCS 2.0, we log the user from the querying cluster
authContext = createAuthContextMap(originalAuthentication);
} else {
authContext = createAuthContextMap(authentication);
}
return authContext;
}
return Map.of();
}
private Map<String, String> createAuthContextMap(Authentication auth) {
Map<String, String> authContext = new HashMap<>();
Subject authenticatingSubject = auth.getAuthenticatingSubject();
Subject effectiveSubject = auth.getEffectiveSubject();
// The primary user.name and user.realm fields should reflect the AUTHENTICATING user
if (authenticatingSubject.getUser() != null) {
authContext.put("user.name", authenticatingSubject.getUser().principal());
authContext.put("user.realm", authenticatingSubject.getRealm().getName());
if (authenticatingSubject.getUser().fullName() != null) {
authContext.put("user.full_name", authenticatingSubject.getUser().fullName());
}
}
// Only include effective user if different from authenticating user (run-as)
if (auth.isRunAs()) {
if (effectiveSubject.getUser() != null) {
authContext.put("user.effective.name", effectiveSubject.getUser().principal());
authContext.put("user.effective.realm", effectiveSubject.getRealm().getName());
if (effectiveSubject.getUser().fullName() != null) {
authContext.put("user.effective.full_name", effectiveSubject.getUser().fullName());
}
}
}
authContext.put("auth.type", auth.getAuthenticationType().name());
if (auth.isApiKey()) {
authContext.put("apikey.id", Objects.toString(authenticatingSubject.getMetadata().get(AuthenticationField.API_KEY_ID_KEY)));
Object apiKeyName = authenticatingSubject.getMetadata().get(AuthenticationField.API_KEY_NAME_KEY);
if (apiKeyName != null) {
authContext.put("apikey.name", apiKeyName.toString());
}
}
return authContext;
}
static final | Security |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/util/introspection/FieldUtils.java | {
"start": 3649,
"end": 4539
} | interface ____. This must be manually searched for
// incase there is a public supersuperclass field hidden by a private/package
// superclass field.
Field match = null;
for (Class<?> class1 : ClassUtils.getAllInterfaces(cls)) {
try {
Field test = class1.getField(fieldName);
checkArgument(match == null, "Reference to field " + fieldName + " is ambiguous relative to " + cls
+ "; a matching field exists on two or more implemented interfaces.");
match = test;
} catch (NoSuchFieldException ex) { // NOPMD
// ignore
}
}
return match;
}
/**
* Returns the {@link Field} corresponding to the given fieldName for the specified class.
* <p>
* Caches the field after getting it for efficiency.
*
* @param fieldName the name of the field to get
* @param acls the | case |
java | dropwizard__dropwizard | docs/source/examples/core/src/main/java/io/dropwizard/documentation/AssetsApp.java | {
"start": 256,
"end": 1042
} | class ____ extends Application<Configuration> {
@Override
public void initialize(Bootstrap<Configuration> bootstrap) {
// core: AssetsApp#initialize->AssetsBundle
bootstrap.addBundle(new AssetsBundle("/assets/", "/"));
// core: AssetsApp#initialize->AssetsBundle
// core: AssetsApp#initialize->AssetsBundle->subfolders
bootstrap.addBundle(new AssetsBundle("/assets/css", "/css", null, "css"));
bootstrap.addBundle(new AssetsBundle("/assets/js", "/js", null, "js"));
bootstrap.addBundle(new AssetsBundle("/assets/fonts", "/fonts", null, "fonts"));
// core: AssetsApp#initialize->AssetsBundle->subfolders
}
@Override
public void run(Configuration configuration, Environment environment) {
}
}
| AssetsApp |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/MapWithGenericValuesDeserTest.java | {
"start": 1308,
"end": 1430
} | interface ____<K,V> extends java.io.Serializable {
public abstract Map<K,V> getEntries();
}
static | MapWrapper |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/EnumJavaType.java | {
"start": 5444,
"end": 5682
} | enum ____
*/
public T fromByte(Byte relationalForm) {
if ( relationalForm == null ) {
return null;
}
return getJavaTypeClass().getEnumConstants()[ relationalForm ];
}
/**
* Interpret a numeric value as the ordinal of the | type |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CheckReturnValueTest.java | {
"start": 39679,
"end": 40206
} | class ____ {
void foo(Lib lib) {
var unused = lib.a();
var unused2 = lib.b();
}
}
""")
.doTest();
}
@Test
public void inheritsCanIgnoreReturnValue() {
refactoringHelper
.addInputLines(
"Super.java",
"""
import com.google.errorprone.annotations.CheckReturnValue;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
@CheckReturnValue
| Test |
java | junit-team__junit5 | documentation/src/test/java/example/exception/FailedAssertionDemo.java | {
"start": 507,
"end": 913
} | class ____ {
// tag::user_guide[]
private final Calculator calculator = new Calculator();
// end::user_guide[]
@ExpectToFail
// tag::user_guide[]
@Test
void failsDueToUncaughtAssertionError() {
// The following incorrect assertion will cause a test failure.
// The expected value should be 2 instead of 99.
assertEquals(99, calculator.add(1, 1));
}
// end::user_guide[]
}
| FailedAssertionDemo |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/commands/CreateExtension.java | {
"start": 26146,
"end": 27561
} | class ____ extends HashMap<String, Object> {
public Optional<String> getStringValue(QuarkusExtensionData key) {
final Object o = get(key.key());
return Optional.ofNullable((o instanceof String) ? (String) o : null);
}
public void putIfAbsent(QuarkusExtensionData dataKey, Object value) {
this.putIfAbsent(dataKey.key(), value);
}
public void put(QuarkusExtensionData dataKey, Object value) {
this.put(dataKey.key(), value);
}
public void putIfNonEmptyString(QuarkusExtensionData dataKey, String value) {
if (!StringUtils.isEmpty(value)) {
this.put(dataKey.key(), value);
}
}
public void putIfNonNull(QuarkusExtensionData dataKey, String value) {
if (value != null) {
this.put(dataKey.key(), value);
}
}
public String getRequiredStringValue(QuarkusExtensionData key) {
return requireNonNull(getStringValue(key).orElse(null), key.key() + " is required");
}
public boolean containsNonEmptyStringForKey(QuarkusExtensionData dataKey) {
return !StringUtils.isEmpty(getStringValue(dataKey).orElse(null));
}
public boolean containsKey(QuarkusExtensionData dataKey) {
return containsKey(dataKey.key());
}
}
}
| EnhancedDataMap |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/MoshiTest.java | {
"start": 15744,
"end": 17051
} | class
____(adapter.fromJson("null")).isEqualTo(null);
assertThat(adapter.toJson(null)).isEqualTo("null");
}
@Test
public void longAdapter() throws Exception {
Moshi moshi = new Moshi.Builder().build();
JsonAdapter<Long> adapter = moshi.adapter(long.class).lenient();
assertThat(adapter.fromJson("1")).isEqualTo(1L);
assertThat(adapter.toJson(-2L)).isEqualTo("-2");
// Test min/max values
assertThat(adapter.fromJson("-9223372036854775808")).isEqualTo(Long.MIN_VALUE);
assertThat(adapter.toJson(Long.MIN_VALUE)).isEqualTo("-9223372036854775808");
assertThat(adapter.fromJson("9223372036854775807")).isEqualTo(Long.MAX_VALUE);
assertThat(adapter.toJson(Long.MAX_VALUE)).isEqualTo("9223372036854775807");
try {
adapter.fromJson("9223372036854775808");
fail();
} catch (JsonDataException expected) {
assertThat(expected)
.hasMessageThat()
.isEqualTo("Expected a long but was 9223372036854775808 at path $");
}
try {
adapter.fromJson("-9223372036854775809");
fail();
} catch (JsonDataException expected) {
assertThat(expected)
.hasMessageThat()
.isEqualTo("Expected a long but was -9223372036854775809 at path $");
}
// Nulls not allowed for long. | assertThat |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/ingest/IngestSettings.java | {
"start": 609,
"end": 1141
} | class ____ {
private IngestSettings() {
// utility class
}
public static final Setting<TimeValue> GROK_WATCHDOG_INTERVAL = Setting.timeSetting(
"ingest.grok.watchdog.interval",
TimeValue.timeValueSeconds(1),
Setting.Property.NodeScope
);
public static final Setting<TimeValue> GROK_WATCHDOG_MAX_EXECUTION_TIME = Setting.timeSetting(
"ingest.grok.watchdog.max_execution_time",
TimeValue.timeValueSeconds(1),
Setting.Property.NodeScope
);
}
| IngestSettings |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/sentinel/SentinelConnectionIntegrationTests.java | {
"start": 1087,
"end": 6567
} | class ____ extends TestSupport {
private final RedisClient redisClient;
private StatefulRedisSentinelConnection<String, String> connection;
private RedisSentinelCommands<String, String> sentinel;
private RedisSentinelAsyncCommands<String, String> sentinelAsync;
@Inject
public SentinelConnectionIntegrationTests(RedisClient redisClient) {
this.redisClient = redisClient;
}
@BeforeEach
void before() {
this.connection = this.redisClient.connectSentinel(SentinelTestSettings.SENTINEL_URI);
this.sentinel = getSyncConnection(this.connection);
this.sentinelAsync = this.connection.async();
}
protected RedisSentinelCommands<String, String> getSyncConnection(
StatefulRedisSentinelConnection<String, String> connection) {
return connection.sync();
}
@AfterEach
void after() {
this.connection.close();
}
@Test
void testAsync() {
RedisFuture<List<Map<String, String>>> future = sentinelAsync.masters();
assertThat(TestFutures.getOrTimeout(future)).isNotNull();
assertThat(future.isDone()).isTrue();
assertThat(future.isCancelled()).isFalse();
}
@Test
void testFuture() throws Exception {
RedisFuture<Map<String, String>> future = sentinelAsync.master("unknown master");
AtomicBoolean state = new AtomicBoolean();
future.exceptionally(throwable -> {
state.set(true);
return null;
});
assertThat(future.await(5, TimeUnit.SECONDS)).isTrue();
assertThat(state.get()).isTrue();
}
@Test
void testStatefulConnection() {
StatefulRedisSentinelConnection<String, String> statefulConnection = sentinel.getStatefulConnection();
assertThat(statefulConnection).isSameAs(statefulConnection.async().getStatefulConnection());
}
@Test
void testSyncConnection() {
StatefulRedisSentinelConnection<String, String> statefulConnection = sentinel.getStatefulConnection();
RedisSentinelCommands<String, String> sync = statefulConnection.sync();
assertThat(sync.ping()).isEqualTo("PONG");
}
@Test
void testSyncAsyncConversion() {
StatefulRedisSentinelConnection<String, String> statefulConnection = sentinel.getStatefulConnection();
assertThat(statefulConnection.sync().getStatefulConnection()).isSameAs(statefulConnection);
assertThat(statefulConnection.sync().getStatefulConnection().sync()).isSameAs(statefulConnection.sync());
}
@Test
void connectToOneNode() {
RedisSentinelCommands<String, String> connection = redisClient.connectSentinel(SentinelTestSettings.SENTINEL_URI)
.sync();
assertThat(connection.ping()).isEqualTo("PONG");
connection.getStatefulConnection().close();
}
@Test
void connectWithByteCodec() {
RedisSentinelCommands<byte[], byte[]> connection = redisClient
.connectSentinel(new ByteArrayCodec(), SentinelTestSettings.SENTINEL_URI).sync();
assertThat(connection.master(SentinelTestSettings.MASTER_ID.getBytes())).isNotNull();
connection.getStatefulConnection().close();
}
@Test
void sentinelConnectionShouldDiscardPassword() {
RedisURI redisURI = RedisURI.Builder.sentinel(TestSettings.host(), SentinelTestSettings.MASTER_ID)
.withPassword("hello-world").build();
redisClient.setOptions(ClientOptions.builder().build());
StatefulRedisSentinelConnection<String, String> connection = redisClient.connectSentinel(redisURI);
assertThat(connection.sync().ping()).isEqualTo("PONG");
connection.close();
redisClient.setOptions(ClientOptions.create());
}
@Test
void sentinelConnectionShouldSetClientName() {
RedisURI redisURI = RedisURI.Builder.sentinel(TestSettings.host(), SentinelTestSettings.MASTER_ID)
.withClientName("my-client").build();
StatefulRedisSentinelConnection<String, String> connection = redisClient.connectSentinel(redisURI);
assertThat(connection.sync().clientGetname()).isEqualTo(redisURI.getClientName());
connection.close();
}
@Test
void sentinelManagedConnectionShouldSetClientName() {
RedisURI redisURI = RedisURI.Builder.sentinel(TestSettings.host(), SentinelTestSettings.MASTER_ID)
.withClientName("my-client").build();
StatefulRedisConnection<String, String> connection = redisClient.connect(redisURI);
assertThat(connection.sync().clientGetname()).isEqualTo(redisURI.getClientName());
connection.sync().quit();
assertThat(connection.sync().clientGetname()).isEqualTo(redisURI.getClientName());
connection.close();
}
@Test
void sentinelWithAuthentication() {
RedisURI redisURI = RedisURI.Builder.sentinel(TestSettings.host(), 26381, SentinelTestSettings.MASTER_ID)
.withPassword("foobared".toCharArray()).withClientName("my-client").build();
redisClient.setOptions(ClientOptions.builder().pingBeforeActivateConnection(true).build());
StatefulRedisConnection<String, String> connection = redisClient.connect(redisURI);
connection.sync().quit();
assertThat(connection.sync().clientGetname()).isEqualTo(redisURI.getClientName());
connection.close();
}
}
| SentinelConnectionIntegrationTests |
java | apache__camel | test-infra/camel-test-infra-arangodb/src/test/java/org/apache/camel/test/infra/arangodb/services/ArangoDBServiceFactory.java | {
"start": 2933,
"end": 3027
} | class ____ extends ArangoDBRemoteInfraService implements ArangoDBService {
}
| ArangoDBRemoteService |
java | dropwizard__dropwizard | dropwizard-json-logging/src/test/java/io/dropwizard/logging/json/layout/MapBuilderTest.java | {
"start": 248,
"end": 5484
} | class ____ {
private int size = 4;
private TimestampFormatter timestampFormatter = new TimestampFormatter("yyyy-MM-dd'T'HH:mm:ss.SSSZ", ZoneId.of("UTC"));
private MapBuilder mapBuilder = new MapBuilder(timestampFormatter, Collections.emptyMap(), Collections.emptyMap(), size);
private String message = "Since the dawn of time...";
@Test
void testIncludeStringValue() {
assertThat(mapBuilder.add("message", true, message).build())
.containsOnly(entry("message", message));
}
@Test
void testDoNotIncludeStringValue() {
assertThat(mapBuilder.add("message", false, message).build()).isEmpty();
}
@Test
void testDoNotIncludeNullStringValue() {
String value = null;
assertThat(mapBuilder.add("message", true, value).build()).isEmpty();
}
@Test
void testIncludeNumberValue() {
assertThat(mapBuilder.addNumber("status", true, 200)
.build()).containsOnly(entry("status", 200));
}
@Test
void testIncludeMapValue() {
assertThat(mapBuilder.add("headers", true, Collections.singletonMap("userAgent", "Lynx/2.8.7"))
.build()).containsOnly(entry("headers", Collections.singletonMap("userAgent", "Lynx/2.8.7")));
}
@Test
void testDoNotIncludeEmptyMapValue() {
assertThat(mapBuilder.add("headers", true, Collections.emptyMap()).build()).isEmpty();
}
@Test
void testDoNotIncludeNullNumberValue() {
Double value = null;
assertThat(mapBuilder.addNumber("status", true, value).build()).isEmpty();
}
@Test
void testIncludeFormattedTimestamp() {
assertThat(mapBuilder.addTimestamp("timestamp", true, 1514906361000L).build())
.containsOnly(entry("timestamp", "2018-01-02T15:19:21.000+0000"));
}
@Test
void testIncludeNotFormattedTimestamp() {
assertThat(new MapBuilder(new TimestampFormatter(null, ZoneId.of("UTC")), Collections.emptyMap(),
Collections.emptyMap(), size)
.addTimestamp("timestamp", true, 1514906361000L)
.build()).containsOnly(entry("timestamp", 1514906361000L));
}
@Test
void testReplaceStringFieldName() {
assertThat(new MapBuilder(timestampFormatter, Collections.singletonMap("message", "@message"), Collections.emptyMap(), size)
.add("message", true, message)
.build()).containsOnly(entry("@message", message));
}
@Test
void testReplaceNumberFieldName() {
assertThat(new MapBuilder(timestampFormatter, Collections.singletonMap("status", "@status"), Collections.emptyMap(), size)
.addNumber("status", true, 200)
.build()).containsOnly(entry("@status", 200));
}
@Test
void testAddAdditionalField() {
assertThat(new MapBuilder(timestampFormatter, Collections.emptyMap(), Collections.singletonMap("version", "1.8.3"), size)
.add("message", true, message).build())
.containsOnly(entry("message", message), entry("version", "1.8.3"));
}
@Test
void testAddSupplier() {
assertThat(mapBuilder.add("message", true, () -> message).build())
.containsOnly(entry("message", message));
}
@Test
void testAddNumberSupplier() {
assertThat(mapBuilder.addNumber("status", true, () -> 200)
.build()).containsOnly(entry("status", 200));
}
@Test
void testAddMapSupplier() {
assertThat(mapBuilder.addMap("headers", true, () -> Collections.singletonMap("userAgent", "Lynx/2.8.7"))
.build()).containsOnly(entry("headers", Collections.singletonMap("userAgent", "Lynx/2.8.7")));
}
@Test
void testAddSupplierNotInvoked() {
assertThat(mapBuilder.add("status", false, () -> {throw new RuntimeException();}).build()).isEmpty();
}
@Test
void testAddNumberSupplierNotInvoked() {
assertThat(mapBuilder.addNumber("status", false, () -> {throw new RuntimeException();}).build()).isEmpty();
}
@Test
void testAddMapSupplierNotInvoked() {
assertThat(mapBuilder.addMap("status", false, () -> {throw new RuntimeException();}).build()).isEmpty();
}
@Test
void testTimestampIsAlwaysFirst() {
mapBuilder.add("status", true, "200");
mapBuilder.addTimestamp("timestamp", true, 1514906361000L);
mapBuilder.addNumber("code", true, 123);
mapBuilder.addTimestamp("timestamp2", true, 1514906361000L);
assertThat(mapBuilder.build().keySet())
.containsExactly("timestamp", "status", "code", "timestamp2");
}
@Test
void testTimestampIsAlwaysFirstWhenRenamed() {
final MapBuilder mapBuilder = new MapBuilder(timestampFormatter,
Collections.singletonMap("timestamp", "renamed-timestamp"), Collections.emptyMap(), size);
mapBuilder.add("status", true, "200");
mapBuilder.addNumber("code", true, 123);
mapBuilder.addTimestamp("timestamp2", true, 1514906361000L);
mapBuilder.addTimestamp("timestamp", true, 1514906361000L);
assertThat(mapBuilder.build().keySet())
.containsExactly("renamed-timestamp", "status", "code", "timestamp2");
}
}
| MapBuilderTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/orphan/EagerOneToManyPersistAndLoadTest.java | {
"start": 1642,
"end": 6603
} | class ____ {
public static final String CHILD_NAME = "Luigi";
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testEmptyCollectionPersistLoad(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
session.persist( p );
List<Child> children = p.getChildren();
List<Parent> parents = session.createQuery( "from Parent", Parent.class ).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
Parent parent = parents.get( 0 );
assertThat( parent ).isEqualTo( p );
assertThat( parent.getChildren() ).isEqualTo( children );
assertThat( children ).isNull();
}
);
}
@Test
public void testEmptyCollectionPersistQueryJoinFetch(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
session.persist( p );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, 1l );
List<Child> children = p.getChildren();
assertTrue( Hibernate.isInitialized( children ) );
List<Parent> parents = session.createQuery(
"select p from Parent p left join fetch p.children",
Parent.class
).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
assertThat( parents.get( 0 ) ).isEqualTo( p );
assertTrue( Hibernate.isInitialized( children ) );
assertThat( children ).isEqualTo( parents.get( 0 ).getChildren() );
assertThat( children.size() ).isEqualTo( 0 );
}
);
}
@Test
public void testEmptyCollectionPersistQuery(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
session.persist( p );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, 1l );
List<Child> children = p.getChildren();
assertTrue( Hibernate.isInitialized( children ) );
List<Parent> parents = session.createQuery(
"select p from Parent p ",
Parent.class
).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
assertThat( parents.get( 0 ) ).isEqualTo( p );
assertThat( parents.get( 0 ).getChildren() ).isEqualTo( children );
assertTrue( Hibernate.isInitialized( children ) );
assertThat( children.size() ).isEqualTo( 0 );
}
);
}
@Test
public void testCollectionPersistLoad(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
Child c = new Child( CHILD_NAME );
p.addChild( c );
session.persist( c );
session.persist( p );
List<Parent> parents = session.createQuery( "from Parent", Parent.class ).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
List<Child> children = parents.get( 0 ).getChildren();
assertThat( children.size() ).isEqualTo( 1 );
}
);
}
@Test
public void testCollectionPersistQueryJoinFetch(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
Child c = new Child( CHILD_NAME );
p.addChild( c );
session.persist( c );
session.persist( p );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, 1l );
List<Child> children = p.getChildren();
assertTrue( Hibernate.isInitialized( p.getChildren() ) );
List<Parent> parents = session.createQuery(
"select p from Parent p join fetch p.children",
Parent.class
).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
assertThat( parents.get( 0 ) ).isEqualTo( p );
assertThat( parents.get( 0 ).getChildren() ).isEqualTo( children );
assertTrue( Hibernate.isInitialized( children ) );
assertThat( children.size() ).isEqualTo( 1 );
}
);
}
@Test
public void testCollectionPersistQuery(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( 1l );
Child c = new Child( CHILD_NAME );
p.addChild( c );
session.persist( c );
session.persist( p );
}
);
scope.inTransaction(
session -> {
Parent p = session.get( Parent.class, 1l );
List<Child> children = p.getChildren();
assertTrue( Hibernate.isInitialized( children ) );
List<Parent> parents = session.createQuery(
"select p from Parent p",
Parent.class
).getResultList();
assertThat( parents.size() ).isEqualTo( 1 );
assertThat( parents.get( 0 ) ).isEqualTo( p );
assertTrue( Hibernate.isInitialized( children ) );
assertThat( children.size() ).isEqualTo( 1 );
assertThat( parents.get( 0 ).getChildren() ).isEqualTo( children );
Child child = children.get( 0 );
assertThat( child.getName() ).isEqualTo( CHILD_NAME );
}
);
}
@Entity(name = "Child")
public static | EagerOneToManyPersistAndLoadTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java | {
"start": 1062,
"end": 3335
} | class ____ extends ESTestCase {
public void testCompareParsingTokens() throws IOException {
BytesStreamOutput xsonOs = new BytesStreamOutput();
XContentGenerator xsonGen = XContentFactory.xContent(XContentType.SMILE).createGenerator(xsonOs);
BytesStreamOutput jsonOs = new BytesStreamOutput();
XContentGenerator jsonGen = XContentFactory.xContent(XContentType.JSON).createGenerator(jsonOs);
xsonGen.writeStartObject();
jsonGen.writeStartObject();
xsonGen.writeStringField("test", "value");
jsonGen.writeStringField("test", "value");
xsonGen.writeFieldName("arr");
xsonGen.writeStartArray();
jsonGen.writeFieldName("arr");
jsonGen.writeStartArray();
xsonGen.writeNumber(1);
jsonGen.writeNumber(1);
xsonGen.writeNull();
jsonGen.writeNull();
xsonGen.writeEndArray();
jsonGen.writeEndArray();
xsonGen.writeEndObject();
jsonGen.writeEndObject();
xsonGen.close();
jsonGen.close();
try (
XContentParser jsonParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes());
XContentParser smileParser = createParser(SmileXContent.smileXContent, xsonOs.bytes())
) {
verifySameTokens(jsonParser, smileParser);
}
}
private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException {
while (true) {
XContentParser.Token token1 = parser1.nextToken();
XContentParser.Token token2 = parser2.nextToken();
if (token1 == null) {
assertThat(token2, nullValue());
return;
}
assertThat(token1, equalTo(token2));
switch (token1) {
case FIELD_NAME -> assertThat(parser1.currentName(), equalTo(parser2.currentName()));
case VALUE_STRING -> assertThat(parser1.text(), equalTo(parser2.text()));
case VALUE_NUMBER -> {
assertThat(parser1.numberType(), equalTo(parser2.numberType()));
assertThat(parser1.numberValue(), equalTo(parser2.numberValue()));
}
}
}
}
}
| JsonVsSmileTests |
java | quarkusio__quarkus | integration-tests/redis-cache/src/main/java/io/quarkus/it/cache/redis/ExpensiveResource.java | {
"start": 385,
"end": 1342
} | class ____ {
private final AtomicInteger invocations = new AtomicInteger(0);
@GET
@Path("/{keyElement1}/{keyElement2}/{keyElement3}")
@CacheResult(cacheName = "expensiveResourceCache")
public ExpensiveResponse getExpensiveResponse(@PathParam("keyElement1") @CacheKey String keyElement1,
@PathParam("keyElement2") @CacheKey String keyElement2, @PathParam("keyElement3") @CacheKey String keyElement3,
@QueryParam("foo") String foo) {
invocations.incrementAndGet();
ExpensiveResponse response = new ExpensiveResponse();
response.setResult(keyElement1 + " " + keyElement2 + " " + keyElement3 + " too!");
return response;
}
@POST
@CacheInvalidateAll(cacheName = "expensiveResourceCache")
public void invalidateAll() {
}
@GET
@Path("/invocations")
public int getInvocations() {
return invocations.get();
}
public static | ExpensiveResource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/various/Antenna.java | {
"start": 387,
"end": 641
} | class ____ {
@Id public Integer id;
@Generated(event = { EventType.INSERT, EventType.UPDATE})
@Column()
public String longitude;
@Generated(event = EventType.INSERT)
@Column(insertable = false)
public String latitude;
public Double power;
}
| Antenna |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AutoValueSubclassLeakedTest.java | {
"start": 2224,
"end": 2340
} | class ____ {
abstract int foo();
@AutoValue.Builder
abstract static | Foo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/descriptor/java/LocalTimeJavaTypeDescriptorTest.java | {
"start": 552,
"end": 1056
} | class ____ {
@Test
@JiraKey("HHH-17229")
public void testWrap() {
final LocalTimeJavaType javaType = LocalTimeJavaType.INSTANCE;
final Time sqlTime = new Time(
LocalDate.EPOCH.atTime( LocalTime.of( 0, 1, 2, 0 ) )
.toInstant( ZoneOffset.ofHours( 4 ) )
.plusMillis( 123 )
.toEpochMilli()
);
final LocalTime wrappedSqlTime = javaType.wrap( sqlTime, null );
assertThat( wrappedSqlTime ).isEqualTo( LocalTime.of( 20, 1, 2, 123_000_000 ) );
}
}
| LocalTimeJavaTypeDescriptorTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/aggregation/SumFunction.java | {
"start": 3033,
"end": 3284
} | class ____ extends SumFunction {
private static final long serialVersionUID = 1L;
@Override
public Object add(Object value1, Object value2) {
return (Float) value1 + (Float) value2;
}
}
static | FloatSum |
java | spring-projects__spring-boot | module/spring-boot-amqp/src/main/java/org/springframework/boot/amqp/autoconfigure/RabbitProperties.java | {
"start": 28458,
"end": 28766
} | class ____ extends Retry {
/**
* Whether retries are stateless or stateful.
*/
private boolean stateless = true;
public boolean isStateless() {
return this.stateless;
}
public void setStateless(boolean stateless) {
this.stateless = stateless;
}
}
private static final | ListenerRetry |
java | micronaut-projects__micronaut-core | management/src/main/java/io/micronaut/management/health/indicator/discovery/DiscoveryClientHealthIndicator.java | {
"start": 1754,
"end": 4238
} | class ____ implements HealthIndicator {
private final DiscoveryClient discoveryClient;
/**
* @param discoveryClient The Discovery client
*/
public DiscoveryClientHealthIndicator(DiscoveryClient discoveryClient) {
this.discoveryClient = discoveryClient;
}
@Override
public Publisher<HealthResult> getResult() {
return Flux.from(discoveryClient.getServiceIds())
.flatMap((Function<List<String>, Publisher<HealthResult>>) ids -> {
List<Flux<Map<String, List<ServiceInstance>>>> serviceMap = ids.stream()
.map(id -> {
Flux<List<ServiceInstance>> serviceList = Flux.from(discoveryClient.getInstances(id));
return serviceList
.map(serviceInstances -> Collections.singletonMap(id, serviceInstances));
})
.collect(Collectors.toList());
Flux<Map<String, List<ServiceInstance>>> mergedServiceMap = Flux.merge(serviceMap);
return mergedServiceMap.reduce(new LinkedHashMap<String, List<ServiceInstance>>(), (allServiceMap, service) -> {
allServiceMap.putAll(service);
return allServiceMap;
}).map(details -> {
HealthResult.Builder builder = HealthResult.builder(discoveryClient.getDescription(), HealthStatus.UP);
Stream<Map.Entry<String, List<ServiceInstance>>> entryStream = details.entrySet().stream();
Map<String, Object> value = entryStream.collect(
Collectors.toMap(Map.Entry::getKey, entry ->
entry
.getValue()
.stream()
.map(ServiceInstance::getURI)
.collect(Collectors.toList())
)
);
builder.details(Collections.singletonMap(
"services", value
));
return builder.build();
}).flux();
}).onErrorResume(throwable -> {
HealthResult.Builder builder = HealthResult.builder(discoveryClient.getDescription(), HealthStatus.DOWN);
builder.exception(throwable);
return Flux.just(builder.build());
});
}
}
| DiscoveryClientHealthIndicator |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10SerializationTests.java | {
"start": 537,
"end": 747
} | class ____ extends AbstractUnaryScalarSerializationTests<Log10> {
@Override
protected Log10 create(Source source, Expression child) {
return new Log10(source, child);
}
}
| Log10SerializationTests |
java | apache__camel | components/camel-beanio/src/test/java/org/apache/camel/dataformat/beanio/B1Record.java | {
"start": 855,
"end": 1889
} | class ____ extends Record {
String securityName;
public B1Record() {
}
public B1Record(String sedol, String source, String securityName) {
super(sedol, source);
this.securityName = securityName;
}
public String getSecurityName() {
return securityName;
}
public void setSecurityName(String securityName) {
this.securityName = securityName;
}
@Override
public int hashCode() {
return securityName != null ? securityName.hashCode() : 0;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
} else if (obj == this) {
return true;
} else {
B1Record record = (B1Record) obj;
return super.equals(record) && this.securityName.equals(record.getSecurityName());
}
}
@Override
public String toString() {
return "SEDOL[" + this.sedol + "], SOURCE[" + this.source + "], NAME[" + this.securityName + "]";
}
}
| B1Record |
java | apache__flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/FailingTableApiTestStep.java | {
"start": 1931,
"end": 5729
} | class ____ implements TestStep {
private final Function<TableEnvAccessor, Table> tableQuery;
private final String sinkName;
public final Class<? extends Exception> expectedException;
public final String expectedErrorMessage;
FailingTableApiTestStep(
Function<TableEnvAccessor, Table> tableQuery,
String sinkName,
Class<? extends Exception> expectedException,
String expectedErrorMessage) {
Preconditions.checkArgument(
expectedException == ValidationException.class
|| expectedException == TableRuntimeException.class,
"Usually a Table API query should fail with either validation or runtime exception. "
+ "Otherwise this might require an update to the exception design.");
this.tableQuery = tableQuery;
this.sinkName = sinkName;
this.expectedException = expectedException;
this.expectedErrorMessage = expectedErrorMessage;
}
@Override
public TestKind getKind() {
return TestKind.FAILING_TABLE_API;
}
public Table toTable(TableEnvironment env) {
return tableQuery.apply(
new TableEnvAccessor() {
@Override
public Table from(String path) {
return env.from(path);
}
@Override
public Table fromCall(String path, Object... arguments) {
return env.fromCall(path, arguments);
}
@Override
public Table fromCall(
Class<? extends UserDefinedFunction> function, Object... arguments) {
return env.fromCall(function, arguments);
}
@Override
public Table fromValues(Object... values) {
return env.fromValues(values);
}
@Override
public Table fromValues(AbstractDataType<?> dataType, Object... values) {
return env.fromValues(dataType, values);
}
@Override
public Table sqlQuery(String query) {
return env.sqlQuery(query);
}
@Override
public Model fromModel(String modelPath) {
return env.fromModel(modelPath);
}
@Override
public Model from(ModelDescriptor modelDescriptor) {
return env.fromModel(modelDescriptor);
}
});
}
public void apply(TableEnvironment env) {
assertThatThrownBy(
() -> {
final Table table = toTable(env);
table.executeInsert(sinkName).await();
})
.satisfies(anyCauseMatches(expectedException, expectedErrorMessage));
}
public void applyAsSql(TableEnvironment env) {
assertThatThrownBy(
() -> {
final Table table = toTable(env);
final String query =
table.getQueryOperation()
.asSerializableString(DefaultSqlFactory.INSTANCE);
env.executeSql(String.format("INSERT INTO %s %s", sinkName, query))
.await();
})
.satisfies(anyCauseMatches(expectedException, expectedErrorMessage));
}
}
| FailingTableApiTestStep |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeTypeSerializerSnapshot.java | {
"start": 1621,
"end": 2356
} | class ____ can be
* used by simple serializers which 1) delegates its serialization to multiple nested serializers,
* and 2) may contain some extra static information that needs to be persisted as part of its
* snapshot.
*
* <p>Examples for this would be the {@link ListSerializer}, {@link MapSerializer}, {@link
* EitherSerializer}, etc., in which case the serializer, called the "outer" serializer in this
* context, has only some nested serializers that needs to be persisted as its snapshot, and nothing
* else that needs to be persisted as the "outer" snapshot. An example which has non-empty outer
* snapshots would be the {@link GenericArraySerializer}, which beyond the nested component
* serializer, also contains a | that |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-jersey/src/test/java/smoketest/jersey/JerseyApplicationPathAndManagementPortTests.java | {
"start": 1586,
"end": 2126
} | class ____ {
@LocalServerPort
private int port;
@LocalManagementPort
private int managementPort;
@Autowired
private TestRestTemplate testRestTemplate;
@Test
void applicationPathShouldNotAffectActuators() {
ResponseEntity<String> entity = this.testRestTemplate
.getForEntity("http://localhost:" + this.managementPort + "/actuator/health", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).contains("\"status\":\"UP\"");
}
}
| JerseyApplicationPathAndManagementPortTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/graph/internal/AttributeNodeImpl.java | {
"start": 5906,
"end": 12859
} | class ____<J,K,V> extends AttributeNodeImpl<J, V, K> {
private MapAttributeNodeImpl(
PluralPersistentAttribute<?,J,V> pluralAttribute,
@SuppressWarnings("unused") // a "witness" that this is really a Map
MapPersistentAttribute<?,K,?> attribute,
boolean mutable,
DomainType<V> valueGraphType, SimpleDomainType<K> keyGraphType) {
super( pluralAttribute, mutable, valueGraphType, keyGraphType );
}
private MapAttributeNodeImpl(AttributeNodeImpl<J, V, K> that, boolean mutable) {
super( that, mutable );
}
@Override
public SubGraphImplementor<K> addKeySubgraph() {
verifyMutability();
if ( keySubgraph == null ) {
keySubgraph = new SubGraphImpl<>( asManagedType( keyGraphType ), true );
}
return keySubgraph;
}
@Override
public SubGraphImplementor<V> addElementSubgraph() {
checkToMany();
verifyMutability();
if ( valueSubgraph == null ) {
valueSubgraph = new SubGraphImpl<>( asManagedType( valueGraphType ), true );
}
return valueSubgraph;
}
@Override
public AttributeNodeImplementor<J, V, K> makeCopy(boolean mutable) {
return !mutable && !isMutable() ? this : new MapAttributeNodeImpl<>( this, mutable );
}
}
@Override
public String getAttributeName() {
return getAttributeDescriptor().getName();
}
@Override
public PersistentAttribute<?, J> getAttributeDescriptor() {
return attribute;
}
@Override
public SubGraphImplementor<E> addValueSubgraph() {
verifyMutability();
// this one is intentionally lenient and disfavored
if ( valueSubgraph == null ) {
valueSubgraph = new SubGraphImpl<>( asManagedType( valueGraphType ), true );
}
return valueSubgraph;
}
@Override
public SubGraphImplementor<J> addSingularSubgraph() {
throw new UnsupportedOperationException("Not a singular attribute node");
}
@Override
public SubGraphImplementor<E> addElementSubgraph() {
throw new UnsupportedOperationException( "Not a collection-valued attribute node" );
}
@Override
public SubGraphImplementor<K> addKeySubgraph() {
throw new UnsupportedOperationException( "Not a Map-valued attribute node" );
}
protected void checkToOne() {
final Attribute.PersistentAttributeType attributeType = attribute.getPersistentAttributeType();
if ( attributeType != MANY_TO_ONE && attributeType != ONE_TO_ONE && attributeType != EMBEDDED ) {
throw new CannotContainSubGraphException( "Attribute '" + attribute.getName() + "' is not a to-one association" );
}
}
protected void checkToMany() {
final Attribute.PersistentAttributeType attributeType = attribute.getPersistentAttributeType();
if ( attributeType != MANY_TO_MANY && attributeType != ONE_TO_MANY ) {
throw new CannotContainSubGraphException( "Attribute '" + attribute.getName() + "' is not a to-many association" );
}
}
@Override @Deprecated
public SubGraphImplementor<E> makeSubGraph() {
verifyMutability();
if ( valueSubgraph == null ) {
valueSubgraph = new SubGraphImpl<>( asManagedType( valueGraphType ), true );
}
return valueSubgraph;
}
@Override @Deprecated
public <S> SubGraphImplementor<S> makeSubGraph(Class<S> subtype) {
final ManagedDomainType<E> managedType = asManagedType( valueGraphType );
if ( !managedType.getJavaType().isAssignableFrom( subtype ) ) {
throw new IllegalArgumentException( "Not a subtype: " + subtype.getName() );
}
@SuppressWarnings("unchecked")
final Class<? extends E> castSuptype = (Class<? extends E>) subtype;
final SubGraphImplementor<? extends E> result = makeSubGraph().addTreatedSubgraph( castSuptype );
//noinspection unchecked
return (SubGraphImplementor<S>) result;
}
@Override @Deprecated
public SubGraphImplementor<K> makeKeySubGraph() {
verifyMutability();
checkMap();
if ( keySubgraph == null ) {
keySubgraph = new SubGraphImpl<>( asManagedType( keyGraphType ), true );
}
return keySubgraph;
}
@Override @Deprecated
public <S> SubGraphImplementor<S> makeKeySubGraph(Class<S> subtype) {
checkMap();
final ManagedDomainType<K> type = asManagedType( keyGraphType );
if ( !type.getJavaType().isAssignableFrom( subtype ) ) {
throw new IllegalArgumentException( "Not a key subtype: " + subtype.getName() );
}
@SuppressWarnings("unchecked")
final Class<? extends K> castType = (Class<? extends K>) subtype;
final SubGraphImplementor<? extends K> result = makeKeySubGraph().addTreatedSubgraph( castType );
//noinspection unchecked
return (SubGraphImplementor<S>) result;
}
private void checkMap() {
if ( keyGraphType == null ) {
throw new CannotContainSubGraphException( "Attribute '" + description() + "' is not a Map" );
}
}
protected <T> ManagedDomainType<T> asManagedType(DomainType<T> domainType) {
if ( domainType instanceof ManagedDomainType<T> managedDomainType ) {
return managedDomainType;
}
else {
throw new CannotContainSubGraphException( "Attribute '" + description()
+ "' is of type '" + domainType.getTypeName()
+ "' which is not a managed type" );
}
}
private String description() {
return attribute.getDeclaringType().getTypeName() + "." + attribute.getName();
}
@Override
public String toString() {
return "AttributeNode[" + description() + "]";
}
@Override
public void merge(AttributeNodeImplementor<J, E, K> that) {
assert that.isMutable() == isMutable();
assert that.getAttributeDescriptor() == attribute;
final SubGraphImplementor<E> otherValueSubgraph = that.getValueSubgraph();
if ( otherValueSubgraph != null ) {
if ( valueSubgraph == null ) {
valueSubgraph = otherValueSubgraph.makeCopy( isMutable() );
}
else {
// even if immutable, we need to merge here
valueSubgraph.mergeInternal( otherValueSubgraph );
}
}
final SubGraphImplementor<K> otherKeySubgraph = that.getKeySubgraph();
if ( otherKeySubgraph != null ) {
if ( keySubgraph == null ) {
keySubgraph = otherKeySubgraph.makeCopy( isMutable() );
}
else {
// even if immutable, we need to merge here
keySubgraph.mergeInternal( otherKeySubgraph );
}
}
}
@Override
public Map<Class<?>, SubGraphImplementor<?>> getSubGraphs() {
if ( valueSubgraph == null ) {
return emptyMap();
}
else {
final HashMap<Class<?>, SubGraphImplementor<?>> map = new HashMap<>( valueSubgraph.getTreatedSubgraphs() );
map.put( attribute.getValueGraphType().getJavaType(), valueSubgraph );
return map;
}
}
@Override
public Map<Class<?>, SubGraphImplementor<?>> getKeySubGraphs() {
if ( keySubgraph == null ) {
return emptyMap();
}
else {
final HashMap<Class<?>, SubGraphImplementor<?>> map = new HashMap<>( keySubgraph.getTreatedSubgraphs() );
map.put( attribute.getKeyGraphType().getJavaType(), keySubgraph );
return map;
}
}
@Override
public SubGraphImplementor<E> getValueSubgraph() {
return valueSubgraph;
}
@Override
public SubGraphImplementor<K> getKeySubgraph() {
return keySubgraph;
}
}
| MapAttributeNodeImpl |
java | apache__camel | components/camel-geocoder/src/main/java/org/apache/camel/component/geocoder/GeoCoderNominatimProducer.java | {
"start": 1630,
"end": 7217
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(GeoCoderNominatimProducer.class);
private final GeoCoderEndpoint endpoint;
private final CloseableHttpClient httpClient = HttpClients.createDefault();
public GeoCoderNominatimProducer(GeoCoderEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
@Override
protected void doStop() throws Exception {
httpClient.close();
}
@Override
public void process(Exchange exchange) throws Exception {
// headers take precedence
String address = exchange.getIn().getHeader(GeoCoderConstants.ADDRESS, String.class);
if (address == null) {
address = endpoint.getAddress();
}
String latlng = exchange.getIn().getHeader(GeoCoderConstants.LATLNG, String.class);
if (latlng == null) {
latlng = endpoint.getLatlng();
}
String geocoded = null;
if (latlng != null) {
String lat = StringHelper.before(latlng, ",");
String lng = StringHelper.after(latlng, ",");
LOG.debug("Geocode for lat/lng {}", latlng);
geocoded = query(lat, lng);
} else if (address != null) {
LOG.debug("Geocode for address {}", address);
geocoded = query(address);
}
LOG.debug("Geocode response {}", geocoded);
extractResult(geocoded, exchange);
}
private String query(String dlat, String dlon) throws IOException {
Map<String, String> params = new HashMap<>();
params.put("format", "jsonv2");
params.put("lat", dlat);
params.put("lon", dlon);
return queryForString("reverse", params);
}
private String query(String address) throws IOException {
Map<String, String> params = new HashMap<>();
params.put("format", "jsonv2");
params.put("addressdetails", "1");
params.put("q", address);
params.put("limit", "1");
return queryForString("search", params);
}
private String queryForString(String operation, Map<String, String> params) throws IOException {
String url = endpoint.getServerUrl();
if (!url.endsWith("/")) {
url += "/";
}
url += operation;
final ClassicRequestBuilder builder = ClassicRequestBuilder.get().setUri(url);
for (Map.Entry<String, String> entry : params.entrySet()) {
builder.addParameter(entry.getKey(), entry.getValue());
}
return httpClient.execute(builder.build(), resp -> EntityUtils.toString(resp.getEntity()));
}
protected void extractResult(String place, Exchange exchange) {
if (!endpoint.isHeadersOnly()) {
exchange.getIn().setBody(place);
}
if (place == null || place.isEmpty()) {
exchange.getIn().setHeader(GeoCoderConstants.STATUS, GeocoderStatus.ZERO_RESULTS);
return;
}
exchange.getIn().setHeader(GeoCoderConstants.STATUS, GeocoderStatus.OK);
if (place.startsWith("[") && place.endsWith("]")) {
place = place.substring(1, place.length() - 1);
}
// additional details
final Configuration conf = Configuration.defaultConfiguration().addOptions(Option.SUPPRESS_EXCEPTIONS);
final DocumentContext doc = JsonPath.using(conf).parse(place);
exchange.getIn().setHeader(GeoCoderConstants.ADDRESS, doc.read("$['display_name']"));
// just grab the first element and its lat and lon
setLatLngToExchangeHeader(doc.read("$['lat']"), doc.read("$['lon']"), exchange);
extractCountry(doc, exchange.getIn());
extractCity(doc, exchange.getIn());
extractPostalCode(doc, exchange.getIn());
extractRegion(doc, exchange.getIn());
}
private void setLatLngToExchangeHeader(String resLat, String resLng, Exchange exchange) {
if (resLat != null && resLng != null) {
exchange.getIn().setHeader(GeoCoderConstants.LAT, formatLatOrLon(resLat));
exchange.getIn().setHeader(GeoCoderConstants.LNG, formatLatOrLon(resLng));
String resLatlng = formatLatOrLon(resLat) + ", " + formatLatOrLon(resLng);
exchange.getIn().setHeader(GeoCoderConstants.LATLNG, resLatlng);
}
}
private void extractCountry(DocumentContext doc, Message in) {
String code = doc.read("$['address']['country_code']");
if (code != null) {
code = code.toUpperCase();
}
in.setHeader(GeoCoderConstants.COUNTRY_SHORT, code);
in.setHeader(GeoCoderConstants.COUNTRY_LONG, doc.read("$['address']['country']"));
}
private void extractCity(DocumentContext doc, Message in) {
in.setHeader(GeoCoderConstants.CITY, doc.read("$['address']['city']"));
}
private void extractPostalCode(DocumentContext doc, Message in) {
in.setHeader(GeoCoderConstants.POSTAL_CODE, doc.read("$['address']['postcode']"));
}
private void extractRegion(DocumentContext doc, Message in) {
String code = doc.read("$['address']['state_code']");
if (code != null) {
code = code.toUpperCase();
}
in.setHeader(GeoCoderConstants.REGION_CODE, code);
in.setHeader(GeoCoderConstants.REGION_NAME, doc.read("$['address']['state']"));
}
private String formatLatOrLon(String value) {
return String.format(Locale.ENGLISH, "%.8f", Double.parseDouble(value));
}
}
| GeoCoderNominatimProducer |
java | mapstruct__mapstruct | integrationtest/src/test/resources/recordsCrossModuleTest/mapper/src/test/java/org/mapstruct/itest/records/mapper/RecordsTest.java | {
"start": 451,
"end": 1274
} | class ____ {
@Test
public void shouldMapRecord() {
CustomerEntity customer = CustomerMapper.INSTANCE.fromRecord( new CustomerDto( "Kermit", "kermit@test.com" ) );
assertThat( customer ).isNotNull();
assertThat( customer.getName() ).isEqualTo( "Kermit" );
assertThat( customer.getMail() ).isEqualTo( "kermit@test.com" );
}
@Test
public void shouldMapIntoRecord() {
CustomerEntity entity = new CustomerEntity();
entity.setName( "Kermit" );
entity.setMail( "kermit@test.com" );
CustomerDto customer = CustomerMapper.INSTANCE.toRecord( entity );
assertThat( customer ).isNotNull();
assertThat( customer.name() ).isEqualTo( "Kermit" );
assertThat( customer.email() ).isEqualTo( "kermit@test.com" );
}
}
| RecordsTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/assignment/GetterWrapperForCollectionsAndMaps.java | {
"start": 1168,
"end": 2923
} | class ____ extends WrapperForCollectionsAndMaps {
private final boolean ignoreMapNull;
public GetterWrapperForCollectionsAndMaps(Assignment decoratedAssignment,
List<Type> thrownTypesToExclude,
Type targetType,
boolean fieldAssignment) {
this( decoratedAssignment, thrownTypesToExclude, targetType, null, fieldAssignment );
}
/**
* @param decoratedAssignment source RHS
* @param thrownTypesToExclude set of types to exclude from re-throwing
* @param targetType the target type
* @param fieldAssignment true when this the assignment is to a field rather than via accessors
*/
public GetterWrapperForCollectionsAndMaps(Assignment decoratedAssignment,
List<Type> thrownTypesToExclude,
Type targetType,
NullValuePropertyMappingStrategyGem nvpms,
boolean fieldAssignment) {
super(
decoratedAssignment,
thrownTypesToExclude,
targetType,
fieldAssignment
);
this.ignoreMapNull = nvpms == IGNORE;
}
@Override
public Set<Type> getImportTypes() {
Set<Type> imported = new HashSet<>( super.getImportTypes() );
if ( getSourcePresenceCheckerReference() == null ) {
imported.addAll( getNullCheckLocalVarType().getImportTypes() );
}
return imported;
}
public boolean isIgnoreMapNull() {
return ignoreMapNull;
}
}
| GetterWrapperForCollectionsAndMaps |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/valueextractor/SingletonCustomValueExtractorTest.java | {
"start": 628,
"end": 1161
} | class ____ {
@Inject
ValidatorFactory validatorFactory;
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class)
.addClasses(TestBean.class, Container.class, SingletonContainerValueExtractor.class));
@Test
public void testSingletonCustomValueExtractor() {
assertThat(validatorFactory.getValidator().validate(new TestBean())).hasSize(1);
}
public static | SingletonCustomValueExtractorTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java | {
"start": 58034,
"end": 58269
} | interface ____ {
@AliasFor(attribute = "bar")
String[] foo() default "";
@AliasFor(attribute = "quux")
String[] bar() default "";
}
@AliasForWithMirroredAliasForWrongAttribute
static | AliasForWithMirroredAliasForWrongAttribute |
java | grpc__grpc-java | api/src/test/java/io/grpc/MetadataTest.java | {
"start": 2098,
"end": 2787
} | class ____ implements Metadata.BinaryStreamMarshaller<Fish> {
@Override
public InputStream toStream(Fish fish) {
return new ByteArrayInputStream(FISH_MARSHALLER.toBytes(fish));
}
@Override
public Fish parseStream(InputStream stream) {
try {
return FISH_MARSHALLER.parseBytes(ByteStreams.toByteArray(stream));
} catch (IOException ioe) {
throw new AssertionError();
}
}
}
private static final Metadata.BinaryStreamMarshaller<Fish> FISH_STREAM_MARSHALLER =
new FishStreamMarshaller();
/** A pattern commonly used to avoid unnecessary serialization of immutable objects. */
private static final | FishStreamMarshaller |
java | quarkusio__quarkus | extensions/narayana-jta/runtime/src/test/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorderTest.java | {
"start": 337,
"end": 2242
} | class ____ {
//this string has been chosen as when hashed and Base64 encoded the resulted byte array will have a length > 28, so it will be trimmed too.
public static final String NODE_NAME_TO_SHORTEN = "dfe2420d-b12e-4ec3-92c0-ee7c4";
@Test
void testByteLengthWithLongerString() throws NoSuchAlgorithmException {
// create nodeNames larger than 28 bytes
assertTrue(NODE_NAME_TO_SHORTEN.getBytes(StandardCharsets.UTF_8).length > 28);
NarayanaJtaRecorder recorder = new NarayanaJtaRecorder(new RuntimeValue<>());
String shorterNodeName = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN);
int numberOfBytes = shorterNodeName.getBytes(StandardCharsets.UTF_8).length;
assertEquals(28, numberOfBytes,
"node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes);
}
@Test
void testPredictableConversion() throws NoSuchAlgorithmException {
assertTrue(NODE_NAME_TO_SHORTEN.getBytes(StandardCharsets.UTF_8).length > 28);
NarayanaJtaRecorder recorder = new NarayanaJtaRecorder(new RuntimeValue<>());
String firstConversion = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN);
int numberOfBytes = firstConversion.getBytes(StandardCharsets.UTF_8).length;
assertEquals(28, numberOfBytes,
"node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes);
String secondConversion = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN);
numberOfBytes = secondConversion.getBytes(StandardCharsets.UTF_8).length;
assertEquals(28, numberOfBytes,
"node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes);
assertEquals(firstConversion, secondConversion,
"Node names were shortened differently: " + firstConversion + " " + secondConversion);
}
}
| NarayanaJtaRecorderTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ObjectUtils.java | {
"start": 31569,
"end": 32692
} | class ____ not override toString itself. {@code null} will return {@code null}.
*
* <pre>
* ObjectUtils.identityToString(null) = null
* ObjectUtils.identityToString("") = "java.lang.String@1e23"
* ObjectUtils.identityToString(Boolean.TRUE) = "java.lang.Boolean@7fa"
* </pre>
*
* @param object the object to create a toString for, may be {@code null}.
* @return the default toString text, or {@code null} if {@code null} passed in.
*/
public static String identityToString(final Object object) {
if (object == null) {
return null;
}
final String name = object.getClass().getName();
final String hexString = identityHashCodeHex(object);
final StringBuilder builder = new StringBuilder(name.length() + 1 + hexString.length());
// @formatter:off
builder.append(name)
.append(AT_SIGN)
.append(hexString);
// @formatter:on
return builder.toString();
}
/**
* Appends the toString that would be produced by {@link Object}
* if a | did |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/SpringLogDebugBodyMaxCharsTest.java | {
"start": 1079,
"end": 1351
} | class ____ extends LogDebugBodyMaxCharsTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/SpringLogDebugBodyMaxCharsTest.xml");
}
}
| SpringLogDebugBodyMaxCharsTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/state/StatefulOperatorChainedTaskTest.java | {
"start": 3026,
"end": 9005
} | class ____ {
private static final Set<OperatorID> RESTORED_OPERATORS = ConcurrentHashMap.newKeySet();
private TemporaryFolder temporaryFolder;
@Before
public void setup() throws IOException {
RESTORED_OPERATORS.clear();
temporaryFolder = new TemporaryFolder();
temporaryFolder.create();
}
@Test
public void testMultipleStatefulOperatorChainedSnapshotAndRestore() throws Exception {
OperatorID headOperatorID = new OperatorID(42L, 42L);
OperatorID tailOperatorID = new OperatorID(44L, 44L);
JobManagerTaskRestore restore =
createRunAndCheckpointOperatorChain(
headOperatorID,
new CounterOperator("head"),
tailOperatorID,
new CounterOperator("tail"),
Optional.empty());
TaskStateSnapshot stateHandles = restore.getTaskStateSnapshot();
assertEquals(2, stateHandles.getSubtaskStateMappings().size());
createRunAndCheckpointOperatorChain(
headOperatorID,
new CounterOperator("head"),
tailOperatorID,
new CounterOperator("tail"),
Optional.of(restore));
assertEquals(
new HashSet<>(Arrays.asList(headOperatorID, tailOperatorID)), RESTORED_OPERATORS);
}
private JobManagerTaskRestore createRunAndCheckpointOperatorChain(
OperatorID headId,
OneInputStreamOperator<String, String> headOperator,
OperatorID tailId,
OneInputStreamOperator<String, String> tailOperator,
Optional<JobManagerTaskRestore> restore)
throws Exception {
File localRootDir = temporaryFolder.newFolder();
final OneInputStreamTaskTestHarness<String, String> testHarness =
new OneInputStreamTaskTestHarness<>(
OneInputStreamTask::new,
1,
1,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
localRootDir);
testHarness
.setupOperatorChain(headId, headOperator)
.chain(tailId, tailOperator, StringSerializer.INSTANCE, true)
.finish();
if (restore.isPresent()) {
JobManagerTaskRestore taskRestore = restore.get();
testHarness.setTaskStateSnapshot(
taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot());
}
StreamMockEnvironment environment =
new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.getExecutionConfig(),
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize,
testHarness.getTaskStateManager());
Configuration configuration = new Configuration();
configuration.setString(STATE_BACKEND.key(), "rocksdb");
File file = temporaryFolder.newFolder();
configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString());
configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true");
environment.setTaskManagerInfo(
new TestingTaskManagerRuntimeInfo(
configuration,
System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator)));
testHarness.invoke(environment);
testHarness.waitForTaskRunning();
OneInputStreamTask<String, String> streamTask = testHarness.getTask();
processRecords(testHarness);
triggerCheckpoint(testHarness, streamTask);
TestTaskStateManager taskStateManager = testHarness.getTaskStateManager();
JobManagerTaskRestore jobManagerTaskRestore =
new JobManagerTaskRestore(
taskStateManager.getReportedCheckpointId(),
taskStateManager.getLastJobManagerTaskStateSnapshot());
testHarness.endInput();
testHarness.waitForTaskCompletion();
return jobManagerTaskRestore;
}
private void triggerCheckpoint(
OneInputStreamTaskTestHarness<String, String> testHarness,
OneInputStreamTask<String, String> streamTask)
throws Exception {
long checkpointId = 1L;
CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, 1L);
testHarness.getTaskStateManager().getWaitForReportLatch().reset();
while (!streamTask
.triggerCheckpointAsync(
checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation())
.get()) {}
testHarness.getTaskStateManager().getWaitForReportLatch().await();
long reportedCheckpointId = testHarness.getTaskStateManager().getReportedCheckpointId();
assertEquals(checkpointId, reportedCheckpointId);
}
private void processRecords(OneInputStreamTaskTestHarness<String, String> testHarness)
throws Exception {
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>("10"), 0, 0);
testHarness.processElement(new StreamRecord<>("20"), 0, 0);
testHarness.processElement(new StreamRecord<>("30"), 0, 0);
testHarness.waitForInputProcessing();
expectedOutput.add(new StreamRecord<>("10"));
expectedOutput.add(new StreamRecord<>("20"));
expectedOutput.add(new StreamRecord<>("30"));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
}
private abstract static | StatefulOperatorChainedTaskTest |
java | jhy__jsoup | src/main/java/org/jsoup/select/Evaluator.java | {
"start": 17807,
"end": 17858
} | class ____-of-type
*
*/
public static | nth |
java | google__dagger | javatests/dagger/android/support/functional/ComponentStructureFollowsControllerStructureApplication.java | {
"start": 2687,
"end": 4102
} | class ____ {
@Provides
@IntoSet
static Class<?> addToComponentHierarchy() {
return ApplicationComponent.class;
}
@Binds
@IntoMap
@ClassKey(TestActivity.class)
abstract AndroidInjector.Factory<?> bindFactoryForTestActivity(
ActivitySubcomponent.Builder builder);
@Binds
@IntoMap
@ClassKey(OuterClass.TestInnerClassActivity.class)
abstract AndroidInjector.Factory<?> bindFactoryForInnerActivity(
InnerActivitySubcomponent.Builder builder);
@Binds
@IntoMap
@ClassKey(TestService.class)
abstract AndroidInjector.Factory<?> bindFactoryForService(
ServiceSubcomponent.Builder builder);
@Binds
@IntoMap
@ClassKey(TestIntentService.class)
abstract AndroidInjector.Factory<?> bindFactoryForIntentService(
IntentServiceSubcomponent.Builder builder);
@Binds
@IntoMap
@ClassKey(TestBroadcastReceiver.class)
abstract AndroidInjector.Factory<?> bindFactoryForBroadcastReceiver(
BroadcastReceiverSubcomponent.Builder builder);
@Binds
@IntoMap
@ClassKey(TestContentProvider.class)
abstract AndroidInjector.Factory<?> bindFactoryForContentProvider(
ContentProviderSubcomponent.Builder builder);
}
@Subcomponent(modules = ActivitySubcomponent.ActivityModule.class)
| ApplicationModule |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/ParameterBinder.java | {
"start": 1315,
"end": 4120
} | class ____ {
static final String PARAMETER_NEEDS_TO_BE_NAMED = "For queries with named parameters you need to provide names for method parameters; Use @Param for query method parameters, or when on Java 8+ use the javac flag -parameters";
private final JpaParameters parameters;
private final Iterable<QueryParameterSetter> parameterSetters;
private final boolean useJpaForPaging;
/**
* Creates a new {@link ParameterBinder} for the given {@link JpaParameters} and {@link QueryParameterSetter}s.
* Defaults to use JPA API to apply pagination offsets.
*
* @param parameters must not be {@literal null}.
* @param parameterSetters must not be {@literal null}.
* @since 2.0.6
*/
ParameterBinder(JpaParameters parameters, Iterable<QueryParameterSetter> parameterSetters) {
this(parameters, parameterSetters, true);
}
/**
* Creates a new {@link ParameterBinder} for the given {@link JpaParameters} and {@link QueryParameterSetter}s.
*
* @param parameters must not be {@literal null}.
* @param parameterSetters must not be {@literal null}.
* @param useJpaForPaging determines whether {@link Query#setFirstResult(int)} and {@link Query#setMaxResults(int)}
* shall be used for paging.
*/
public ParameterBinder(JpaParameters parameters, Iterable<QueryParameterSetter> parameterSetters,
boolean useJpaForPaging) {
Assert.notNull(parameters, "JpaParameters must not be null");
Assert.notNull(parameterSetters, "Parameter setters must not be null");
this.parameters = parameters;
this.parameterSetters = parameterSetters;
this.useJpaForPaging = useJpaForPaging;
}
public <T extends Query> T bind(T jpaQuery,
JpaParametersParameterAccessor accessor) {
bind(new QueryParameterSetter.BindableQuery(jpaQuery), accessor, ErrorHandling.STRICT);
return jpaQuery;
}
public void bind(QueryParameterSetter.BindableQuery query, JpaParametersParameterAccessor accessor,
ErrorHandler errorHandler) {
for (QueryParameterSetter setter : parameterSetters) {
setter.setParameter(query, accessor, errorHandler);
}
}
/**
* Binds the parameters to the given query and applies special parameter types (e.g. pagination).
*
* @param query must not be {@literal null}.
* @param accessor must not be {@literal null}.
*/
Query bindAndPrepare(Query query,
JpaParametersParameterAccessor accessor) {
bind(query, accessor);
Pageable pageable = accessor.getPageable();
if (!useJpaForPaging || !parameters.hasLimitingParameters() || pageable.isUnpaged()) {
return query;
}
// Apply offset only if it is not 0 (the default).
int offset = PageableUtils.getOffsetAsInteger(pageable);
if (offset != 0) {
query.setFirstResult(offset);
}
query.setMaxResults(pageable.getPageSize());
return query;
}
}
| ParameterBinder |
java | elastic__elasticsearch | x-pack/license-tools/src/main/java/org/elasticsearch/license/licensor/tools/KeyPairGeneratorToolProvider.java | {
"start": 397,
"end": 641
} | class ____ implements CliToolProvider {
@Override
public String name() {
return "key-pair-generator";
}
@Override
public Command create() {
return new KeyPairGeneratorTool();
}
}
| KeyPairGeneratorToolProvider |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataResourceNotFoundExceptionTests.java | {
"start": 1269,
"end": 5573
} | class ____ {
private final ConfigDataResource resource = new TestConfigDataResource();
private final ConfigDataLocation location = ConfigDataLocation.of("optional:test");
private final Throwable cause = new RuntimeException();
private File exists;
private File missing;
@TempDir
@SuppressWarnings("NullAway.Init")
File temp;
@BeforeEach
void setup() throws IOException {
this.exists = new File(this.temp, "exists");
this.missing = new File(this.temp, "missing");
try (OutputStream out = new FileOutputStream(this.exists)) {
out.write("test".getBytes());
}
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenResourceIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(() -> new ConfigDataResourceNotFoundException(null))
.withMessage("'resource' must not be null");
}
@Test
void createWithResourceCreatesInstance() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource);
assertThat(exception.getResource()).isSameAs(this.resource);
}
@Test
void createWithResourceAndCauseCreatesInstance() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource,
this.cause);
assertThat(exception.getResource()).isSameAs(this.resource);
assertThat(exception.getCause()).isSameAs(this.cause);
}
@Test
void getResourceReturnsResource() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource);
assertThat(exception.getResource()).isSameAs(this.resource);
}
@Test
void getLocationWhenHasNoLocationReturnsNull() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource);
assertThat(exception.getLocation()).isNull();
}
@Test
void getLocationWhenHasLocationReturnsLocation() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource)
.withLocation(this.location);
assertThat(exception.getLocation()).isSameAs(this.location);
}
@Test
void getReferenceDescriptionWhenHasNoLocationReturnsDescription() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource);
assertThat(exception.getReferenceDescription()).isEqualTo("resource 'mytestresource'");
}
@Test
void getReferenceDescriptionWhenHasLocationReturnsDescription() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource)
.withLocation(this.location);
assertThat(exception.getReferenceDescription())
.isEqualTo("resource 'mytestresource' via location 'optional:test'");
}
@Test
void withLocationReturnsNewInstanceWithLocation() {
ConfigDataResourceNotFoundException exception = new ConfigDataResourceNotFoundException(this.resource)
.withLocation(this.location);
assertThat(exception.getLocation()).isSameAs(this.location);
}
@Test
void throwIfDoesNotExistWhenPathExistsDoesNothing() {
ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource, this.exists.toPath());
}
@Test
void throwIfDoesNotExistWhenPathDoesNotExistThrowsException() {
assertThatExceptionOfType(ConfigDataResourceNotFoundException.class).isThrownBy(
() -> ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource, this.missing.toPath()));
}
@Test
void throwIfDoesNotExistWhenFileExistsDoesNothing() {
ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource, this.exists);
}
@Test
void throwIfDoesNotExistWhenFileDoesNotExistThrowsException() {
assertThatExceptionOfType(ConfigDataResourceNotFoundException.class)
.isThrownBy(() -> ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource, this.missing));
}
@Test
void throwIfDoesNotExistWhenResourceExistsDoesNothing() {
ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource, new FileSystemResource(this.exists));
}
@Test
void throwIfDoesNotExistWhenResourceDoesNotExistThrowsException() {
assertThatExceptionOfType(ConfigDataResourceNotFoundException.class)
.isThrownBy(() -> ConfigDataResourceNotFoundException.throwIfDoesNotExist(this.resource,
new FileSystemResource(this.missing)));
}
static | ConfigDataResourceNotFoundExceptionTests |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/MapRequestRepresentation.java | {
"start": 2914,
"end": 11513
} | class ____ extends RequestRepresentation {
/** Maximum number of key-value pairs that can be passed to ImmutableMap.of(K, V, K, V, ...). */
private static final int MAX_IMMUTABLE_MAP_OF_KEY_VALUE_PAIRS = 5;
private final XProcessingEnv processingEnv;
private final MultiboundMapBinding binding;
private final ImmutableMap<DependencyRequest, ContributionBinding> dependencies;
private final ComponentRequestRepresentations componentRequestRepresentations;
private final CompilerOptions compilerOptions;
private final boolean useLazyClassKey;
private final ShardImplementation shard;
@AssistedInject
MapRequestRepresentation(
@Assisted MultiboundMapBinding binding,
XProcessingEnv processingEnv,
BindingGraph graph,
ComponentImplementation componentImplementation,
ComponentRequestRepresentations componentRequestRepresentations,
CompilerOptions compilerOptions) {
this.binding = binding;
this.processingEnv = processingEnv;
BindingKind bindingKind = this.binding.kind();
checkArgument(bindingKind.equals(MULTIBOUND_MAP), bindingKind);
this.componentRequestRepresentations = componentRequestRepresentations;
this.compilerOptions = compilerOptions;
this.dependencies =
Maps.toMap(binding.dependencies(), dep -> graph.contributionBinding(dep.key()));
this.useLazyClassKey = MapKeys.useLazyClassKey(binding, graph);
this.shard = componentImplementation.shardImplementation(binding);
}
@Override
XExpression getDependencyExpression(XClassName requestingClass) {
XExpression dependencyExpression = getUnderlyingMapExpression(requestingClass);
// LazyClassKey is backed with a string map, therefore needs to be wrapped.
if (useLazyClassKey) {
return XExpression.create(
dependencyExpression.type(),
XCodeBlock.of(
"%T.<%T>of(%L)",
XTypeNames.LAZY_CLASS_KEY_MAP,
valueTypeName(),
dependencyExpression.codeBlock()));
}
return dependencyExpression;
}
private XExpression getUnderlyingMapExpression(XClassName requestingClass) {
// TODO(b/460400653): This might cause a double wrap when we call to getDependencyExpression
// on a single element map. It would be good to get rid of that double wrapping, but solving
// this properly may require rethinking our current API.
// TODO(ronshapiro): We should also make an ImmutableMap version of MapFactory
boolean isImmutableMapAvailable = isImmutableMapAvailable();
// TODO(ronshapiro, gak): Use Maps.immutableEnumMap() if it's available?
if (isImmutableMapAvailable && dependencies.size() <= MAX_IMMUTABLE_MAP_OF_KEY_VALUE_PAIRS) {
return XExpression.create(
immutableMapType(),
XCodeBlock.builder()
.add("%T.", XTypeNames.IMMUTABLE_MAP)
.add(maybeTypeParameters(requestingClass))
.add(
"of(%L)",
dependencies.keySet().stream()
.map(dependency -> keyAndValueExpression(dependency, requestingClass))
.collect(toParametersCodeBlock()))
.build());
}
switch (dependencies.size()) {
case 0:
return collectionsStaticFactoryInvocation(requestingClass, XCodeBlock.of("emptyMap()"));
case 1:
return collectionsStaticFactoryInvocation(
requestingClass,
XCodeBlock.of(
"singletonMap(%L)",
keyAndValueExpression(getOnlyElement(dependencies.keySet()), requestingClass)));
default:
String builderName = "mapBuilder";
XCodeBlock.Builder builderMethodCalls = XCodeBlock.builder();
for (DependencyRequest dependency : dependencies.keySet()) {
builderMethodCalls.addStatement(
"%N.put(%L)", builderName, keyAndValueExpression(dependency, requestingClass));
}
String methodName =
shard.getUniqueMethodName(KeyVariableNamer.name(binding.key()) + "Builder");
XTypeName returnType =
isImmutableMapAvailable ? XTypeNames.IMMUTABLE_MAP : XTypeNames.JAVA_UTIL_MAP;
XTypeName builderType =
isImmutableMapAvailable ? XTypeNames.IMMUTABLE_MAP_BUILDER : XTypeNames.MAP_BUILDER;
XFunSpec methodSpec =
methodBuilder(methodName)
.addModifiers(
!shard.isShardClassPrivate()
? ImmutableSet.of(Modifier.PRIVATE)
: ImmutableSet.of())
.returns(returnType)
.addCode(
XCodeBlock.builder()
.addStatement(
"%T %N = %L",
builderType, builderName, mapBuilderInvocation(requestingClass))
.add(builderMethodCalls.build())
.addStatement("return %N.build()", builderName)
.build())
.build();
shard.addMethod(MethodSpecKind.PRIVATE_METHOD, methodSpec);
XType expressionType =
isImmutableMapAvailable ? immutableMapType() : binding.key().type().xprocessing();
boolean isSameClass = requestingClass.equals(shard.name());
XCodeBlock codeBlock =
isSameClass
? XCodeBlock.of("%N()", methodName) // Call method directly
: XCodeBlock.of(
"%L.%N()",
shard.shardFieldReference(), methodName); // Call method on shard field
return XExpression.create(expressionType, codeBlock);
}
}
private XCodeBlock mapBuilderInvocation(XClassName requestingClass) {
XCodeBlock.Builder builder = XCodeBlock.builder();
XCodeBlock typeParam = maybeTypeParameters(requestingClass);
if (isImmutableMapAvailable()) {
builder.add("%T.", XTypeNames.IMMUTABLE_MAP).add(typeParam);
if (isImmutableMapBuilderWithExpectedSizeAvailable()) {
builder.add("builderWithExpectedSize(%L)", dependencies.size());
} else {
builder.add("builder()");
}
} else {
builder.add("%T.", XTypeNames.MAP_BUILDER).add(typeParam);
builder.add("newMapBuilder(%L)", dependencies.size());
}
return builder.build();
}
private XType immutableMapType() {
MapType mapType = MapType.from(binding.key());
return processingEnv.getDeclaredType(
processingEnv.requireTypeElement(XTypeNames.IMMUTABLE_MAP),
mapType.keyType(),
mapType.valueType());
}
private XCodeBlock keyAndValueExpression(
DependencyRequest dependency, XClassName requestingClass) {
return XCodeBlock.of(
"%L, %L",
useLazyClassKey
? getLazyClassMapKeyExpression(dependencies.get(dependency))
: getMapKeyExpression(dependencies.get(dependency), requestingClass, processingEnv),
componentRequestRepresentations
.getDependencyExpression(bindingRequest(dependency), requestingClass)
.codeBlock());
}
private XExpression collectionsStaticFactoryInvocation(
XClassName requestingClass, XCodeBlock methodInvocation) {
return XExpression.create(
binding.key().type().xprocessing(),
toJavaPoet(
XCodeBlock.builder()
.add("%T.", XTypeNames.JAVA_UTIL_COLLECTIONS)
.add(maybeTypeParameters(requestingClass))
.add(methodInvocation)
.build()));
}
private XCodeBlock maybeTypeParameters(XClassName requestingClass) {
XType bindingKeyType = binding.key().type().xprocessing();
return isTypeAccessibleFrom(bindingKeyType, requestingClass.getPackageName())
? XCodeBlock.of("<%T, %T>", keyTypeName(), valueTypeName())
: XCodeBlock.of("");
}
private XTypeName keyTypeName() {
if (useLazyClassKey) {
return XTypeName.STRING;
}
return MapType.from(binding.key()).keyType().asTypeName();
}
private XTypeName valueTypeName() {
return MapType.from(binding.key()).valueType().asTypeName();
}
private boolean isImmutableMapBuilderWithExpectedSizeAvailable() {
return isImmutableMapAvailable()
&& processingEnv.requireTypeElement(XTypeNames.IMMUTABLE_MAP).getDeclaredMethods().stream()
.anyMatch(method -> getSimpleName(method).contentEquals("builderWithExpectedSize"));
}
private boolean isImmutableMapAvailable() {
return processingEnv.findTypeElement(XTypeNames.IMMUTABLE_MAP) != null;
}
@AssistedFactory
static | MapRequestRepresentation |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/CliRepl.java | {
"start": 447,
"end": 2501
} | class ____ {
private CliTerminal cliTerminal;
private CliCommand cliCommand;
private CliSession cliSession;
public CliRepl(CliTerminal cliTerminal, CliSession cliSession, CliCommand cliCommand) {
this.cliTerminal = cliTerminal;
this.cliCommand = cliCommand;
this.cliSession = cliSession;
}
public void execute() {
String DEFAULT_PROMPT = "sql> ";
String MULTI_LINE_PROMPT = " | ";
StringBuilder multiLine = new StringBuilder();
String prompt = DEFAULT_PROMPT;
cliTerminal.flush();
cliCommand.handle(cliTerminal, cliSession, "logo");
while (true) {
String line = cliTerminal.readLine(prompt);
if (line == null) {
return;
}
line = line.trim();
if (line.endsWith(";") == false) {
multiLine.append(line);
multiLine.append(" ");
prompt = MULTI_LINE_PROMPT;
continue;
}
line = line.substring(0, line.length() - 1);
prompt = DEFAULT_PROMPT;
if (multiLine.length() > 0) {
// append the line without trailing ;
multiLine.append(line);
line = multiLine.toString().trim();
multiLine.setLength(0);
}
// Skip empty commands
if (line.isEmpty()) {
continue;
}
// special case to handle exit
if (isExit(line)) {
cliTerminal.line().em("Bye!").ln();
cliTerminal.flush();
return;
}
if (cliCommand.handle(cliTerminal, cliSession, line) == false) {
cliTerminal.error("Unrecognized command", line);
}
cliTerminal.println();
}
}
private static boolean isExit(String line) {
line = line.toLowerCase(Locale.ROOT);
return line.equals("exit") || line.equals("quit");
}
}
| CliRepl |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AssertThrowsMultipleStatementsTest.java | {
"start": 885,
"end": 1308
} | class ____ {
private final BugCheckerRefactoringTestHelper compilationHelper =
BugCheckerRefactoringTestHelper.newInstance(AssertThrowsMultipleStatements.class, getClass());
@Test
public void ignoreInThrowingRunnables() {
compilationHelper
.addInputLines(
"Test.java",
"""
import static org.junit.Assert.assertThrows;
| AssertThrowsMultipleStatementsTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnitAssertSameCheckTest.java | {
"start": 944,
"end": 1442
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(JUnitAssertSameCheck.class, getClass());
@Test
public void positiveCase() {
compilationHelper
.addSourceLines(
"JUnitAssertSameCheckPositiveCase.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/**
* Positive test cases for {@link JUnitAssertSameCheck} check.
*
* @author bhagwani@google.com (Sumit Bhagwani)
*/
public | JUnitAssertSameCheckTest |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/Registry.java | {
"start": 3654,
"end": 4656
} | class ____ any subtypes. For
* example, registering an {@link Encoder} for {@link java.io.InputStream} will result in the
* {@link Encoder} being used for {@link
* android.content.res.AssetFileDescriptor.AutoCloseInputStream}, {@link java.io.FileInputStream}
* and any other subclass.
*
* <p>If multiple {@link Encoder}s are registered for the same type or super type, the {@link
* Encoder} that is registered first will be used.
*
* @deprecated Use the equivalent {@link #append(Class, Class, ModelLoaderFactory)} method
* instead.
*/
@NonNull
@Deprecated
public <Data> Registry register(@NonNull Class<Data> dataClass, @NonNull Encoder<Data> encoder) {
return append(dataClass, encoder);
}
/**
* Appends the given {@link Encoder} onto the list of available {@link Encoder}s so that it is
* attempted after all earlier and default {@link Encoder}s for the given data class.
*
* <p>The {@link Encoder} will be used both for the exact data | and |
java | playframework__playframework | core/play/src/main/java/play/http/DefaultHttpErrorHandler.java | {
"start": 796,
"end": 1046
} | class ____ intended to be extended to allow reusing Play's default error handling
* functionality.
*
* <p>The "play.editor" configuration setting is used here to give a link back to the source code
* when set and development mode is on.
*/
public | is |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/JdbcTemplate.java | {
"start": 2648,
"end": 2819
} | class ____ the use of JDBC and helps to avoid common errors.
* It executes core JDBC workflow, leaving application code to provide SQL
* and extract results. This | simplifies |
java | redisson__redisson | redisson/src/main/java/org/redisson/cache/LocalCachedMapEnable.java | {
"start": 718,
"end": 1202
} | class ____ implements Serializable {
private byte[][] keyHashes;
private String requestId;
public LocalCachedMapEnable() {
}
public LocalCachedMapEnable(String requestId, byte[][] keyHashes) {
super();
this.requestId = requestId;
this.keyHashes = keyHashes;
}
public String getRequestId() {
return requestId;
}
public byte[][] getKeyHashes() {
return keyHashes;
}
}
| LocalCachedMapEnable |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alter/MySqlAlterTableGroupTest_0.java | {
"start": 906,
"end": 1743
} | class ____ extends TestCase {
public void test_alter_first() throws Exception {
String sql = "alter tablegroup new_ddl.ddl_test_1 k1=v1";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLEGROUP new_ddl.ddl_test_1 k1 = v1", SQLUtils.toMySqlString(stmt));
}
public void test_alter_2() throws Exception {
String sql = "alter tablegroup new_ddl.ddl_test_1 k1=123";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLEGROUP new_ddl.ddl_test_1 k1 = 123", SQLUtils.toMySqlString(stmt));
}
}
| MySqlAlterTableGroupTest_0 |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4312TypeAwarePluginParameterExpressionInjectionTest.java | {
"start": 1131,
"end": 2235
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that plugins that use magic parameter expressions like ${plugin} for ordinary system properties
* get properly configured and don't crash due to Maven trying to inject a type-incompatible magic value
* into the String-type mojo parameter.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4312");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties props = verifier.loadProperties("target/config.properties");
assertEquals("", props.getProperty("stringParam", ""));
assertEquals("", props.getProperty("aliasParam", ""));
assertEquals("maven-core-it", props.getProperty("defaultParam"));
}
}
| MavenITmng4312TypeAwarePluginParameterExpressionInjectionTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java | {
"start": 973,
"end": 2918
} | class ____ extends AbstractBucketMetricsTestCase<MaxBucketPipelineAggregationBuilder> {
@Override
protected MaxBucketPipelineAggregationBuilder doCreateTestAggregatorFactory(String name, String bucketsPath) {
return new MaxBucketPipelineAggregationBuilder(name, bucketsPath);
}
public void testValidate() {
AggregationBuilder singleBucketAgg = new GlobalAggregationBuilder("global");
AggregationBuilder multiBucketAgg = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING);
final Set<AggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(singleBucketAgg);
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
assertThat(
validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric")),
equalTo(
"Validation Failed: 1: "
+ PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"
)
);
// Now try to point to a single bucket agg
assertThat(
validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "global>metric")),
equalTo(
"Validation Failed: 1: Unable to find unqualified multi-bucket aggregation in "
+ PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ ". Path must include a multi-bucket aggregation for aggregation [name] found :"
+ GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric;"
)
);
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}
| MaxBucketTests |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/datavalidation/groups/EmailController.java | {
"start": 1141,
"end": 1572
} | class ____ {
@Post("/createDraft")
public HttpResponse createDraft(@Body @Valid Email email) { // <2>
return HttpResponse.ok(Collections.singletonMap("msg", "OK"));
}
@Post("/send")
@Validated(groups = FinalValidation.class) // <3>
public HttpResponse send(@Body @Valid Email email) { // <4>
return HttpResponse.ok(Collections.singletonMap("msg", "OK"));
}
}
//end::clazz[]
| EmailController |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositorySnapshotStatsIT.java | {
"start": 1442,
"end": 5189
} | class ____ extends AbstractSnapshotIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
// Make upload time more accurate
.put(ESTIMATED_TIME_INTERVAL_SETTING.getKey(), "0s")
.build();
}
public void testRepositorySnapshotStats() {
logger.info("--> starting a node");
internalCluster().startNode();
logger.info("--> create index");
final int numberOfShards = randomIntBetween(2, 6);
createIndex("test-idx", numberOfShards, 0);
ensureGreen();
indexRandomDocs("test-idx", 100);
IndicesStatsResponse indicesStats = indicesAdmin().prepareStats("test-idx").get();
IndexStats indexStats = indicesStats.getIndex("test-idx");
long totalSizeInBytes = 0;
for (ShardStats shard : indexStats.getShards()) {
totalSizeInBytes += shard.getStats().getStore().sizeInBytes();
}
logger.info("--> total shards size: {} bytes", totalSizeInBytes);
logger.info("--> create repository with really low snapshot/restore rate-limits");
createRepository(
"test-repo",
"fs",
Settings.builder()
.put("location", randomRepoPath())
.put("compress", false)
// set rate limits at ~25% of total size
.put("max_snapshot_bytes_per_sec", ByteSizeValue.ofBytes(totalSizeInBytes / 4))
.put("max_restore_bytes_per_sec", ByteSizeValue.ofBytes(totalSizeInBytes / 4))
);
logger.info("--> create snapshot");
createSnapshot("test-repo", "test-snap", Collections.singletonList("test-idx"));
logger.info("--> restore from snapshot");
RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot(
TEST_REQUEST_TIMEOUT,
"test-repo",
"test-snap"
).setRenamePattern("test-").setRenameReplacement("test2-").setWaitForCompletion(true).get();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
assertDocCount("test-idx", 100);
logger.info("--> access repository throttling stats via _nodes/stats api");
NodesStatsResponse response = clusterAdmin().prepareNodesStats().setRepositoryStats(true).get();
RepositoriesStats stats = response.getNodes().get(0).getRepositoriesStats();
// These are just broad sanity checks on the values. There are more detailed checks in SnapshotMetricsIT
assertTrue(stats.getRepositorySnapshotStats().containsKey("test-repo"));
RepositoriesStats.SnapshotStats snapshotStats = stats.getRepositorySnapshotStats().get("test-repo");
assertThat(snapshotStats.totalWriteThrottledNanos(), greaterThan(0L));
assertThat(snapshotStats.totalReadThrottledNanos(), greaterThan(0L));
assertThat(snapshotStats.shardSnapshotsStarted(), equalTo((long) numberOfShards));
assertThat(snapshotStats.shardSnapshotsCompleted(), equalTo((long) numberOfShards));
assertThat(snapshotStats.shardSnapshotsInProgress(), equalTo(0L));
assertThat(snapshotStats.numberOfBlobsUploaded(), greaterThan(0L));
assertThat(snapshotStats.numberOfBytesUploaded(), greaterThan(0L));
assertThat(snapshotStats.totalUploadTimeInMillis(), greaterThan(0L));
assertThat(snapshotStats.totalUploadReadTimeInMillis(), greaterThan(0L));
assertThat(snapshotStats.totalUploadReadTimeInMillis(), lessThanOrEqualTo(snapshotStats.totalUploadTimeInMillis()));
}
}
| RepositorySnapshotStatsIT |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetoone/unidirectional/UnidirectionalWithNulls.java | {
"start": 772,
"end": 3674
} | class ____ {
private Integer ed1_id;
private Integer ed2_id;
private Integer ing1_id;
private Integer ing2_id;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1
scope.inTransaction( em -> {
UniRefEdEntity ed1 = new UniRefEdEntity( 1, "data_ed_1" );
UniRefEdEntity ed2 = new UniRefEdEntity( 2, "data_ed_2" );
UniRefIngEntity ing1 = new UniRefIngEntity( 3, "data_ing_1", ed1 );
UniRefIngEntity ing2 = new UniRefIngEntity( 4, "data_ing_2", null );
em.persist( ed1 );
em.persist( ed2 );
em.persist( ing1 );
em.persist( ing2 );
ed1_id = ed1.getId();
ed2_id = ed2.getId();
ing1_id = ing1.getId();
ing2_id = ing2.getId();
} );
// Revision 2
scope.inTransaction( em -> {
UniRefIngEntity ing1 = em.find( UniRefIngEntity.class, ing1_id );
ing1.setReference( null );
} );
// Revision 3
scope.inTransaction( em -> {
UniRefIngEntity ing2 = em.find( UniRefIngEntity.class, ing2_id );
UniRefEdEntity ed2 = em.find( UniRefEdEntity.class, ed2_id );
ing2.setReference( ed2 );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed1_id ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( UniRefEdEntity.class, ed2_id ) );
assertEquals( Arrays.asList( 1, 2 ), auditReader.getRevisions( UniRefIngEntity.class, ing1_id ) );
assertEquals( Arrays.asList( 1, 3 ), auditReader.getRevisions( UniRefIngEntity.class, ing2_id ) );
} );
}
@Test
public void testHistoryOfIngId1(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
UniRefEdEntity ed1 = em.find( UniRefEdEntity.class, ed1_id );
UniRefIngEntity rev1 = auditReader.find( UniRefIngEntity.class, ing1_id, 1 );
UniRefIngEntity rev2 = auditReader.find( UniRefIngEntity.class, ing1_id, 2 );
UniRefIngEntity rev3 = auditReader.find( UniRefIngEntity.class, ing1_id, 3 );
assertEquals( ed1, rev1.getReference() );
assertNull( rev2.getReference() );
assertNull( rev3.getReference() );
} );
}
@Test
public void testHistoryOfIngId2(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
UniRefEdEntity ed2 = em.find( UniRefEdEntity.class, ed2_id );
UniRefIngEntity rev1 = auditReader.find( UniRefIngEntity.class, ing2_id, 1 );
UniRefIngEntity rev2 = auditReader.find( UniRefIngEntity.class, ing2_id, 2 );
UniRefIngEntity rev3 = auditReader.find( UniRefIngEntity.class, ing2_id, 3 );
assertNull( rev1.getReference() );
assertNull( rev2.getReference() );
assertEquals( ed2, rev3.getReference() );
} );
}
}
| UnidirectionalWithNulls |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java | {
"start": 15731,
"end": 15869
} | class ____ String constants and static methods for the
* use of the {@link JavaSandboxLinuxContainerRuntime}.
*/
static final | defining |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/function/BodyInsertersTests.java | {
"start": 3321,
"end": 16406
} | class ____ {
private BodyInserter.Context context;
private Map<String, Object> hints;
@BeforeEach
void createContext() {
final List<HttpMessageWriter<?>> messageWriters = new ArrayList<>();
messageWriters.add(new EncoderHttpMessageWriter<>(new ByteBufferEncoder()));
messageWriters.add(new EncoderHttpMessageWriter<>(CharSequenceEncoder.textPlainOnly()));
messageWriters.add(new ResourceHttpMessageWriter());
messageWriters.add(new EncoderHttpMessageWriter<>(new Jaxb2XmlEncoder()));
JacksonJsonEncoder jsonEncoder = new JacksonJsonEncoder();
messageWriters.add(new EncoderHttpMessageWriter<>(jsonEncoder));
messageWriters.add(new ServerSentEventHttpMessageWriter(jsonEncoder));
messageWriters.add(new FormHttpMessageWriter());
messageWriters.add(new EncoderHttpMessageWriter<>(CharSequenceEncoder.allMimeTypes()));
messageWriters.add(new MultipartHttpMessageWriter(messageWriters));
this.context = new BodyInserter.Context() {
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return messageWriters;
}
@Override
public Optional<ServerHttpRequest> serverRequest() {
return Optional.empty();
}
@Override
public Map<String, Object> hints() {
return hints;
}
};
this.hints = new HashMap<>();
}
@Test
void ofString() {
String body = "foo";
BodyInserter<String, ReactiveHttpOutputMessage> inserter = BodyInserters.fromValue(body);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBody())
.consumeNextWith(buf -> {
String actual = buf.toString(UTF_8);
assertThat(actual).isEqualTo("foo");
})
.expectComplete()
.verify();
}
@Test
void ofObject() {
User body = new User("foo", "bar");
BodyInserter<User, ReactiveHttpOutputMessage> inserter = BodyInserters.fromValue(body);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBodyAsString())
.expectNext("{\"username\":\"foo\",\"password\":\"bar\"}")
.expectComplete()
.verify();
}
@Test
void ofObjectWithHints() {
User body = new User("foo", "bar");
BodyInserter<User, ReactiveHttpOutputMessage> inserter = BodyInserters.fromValue(body);
this.hints.put(JSON_VIEW_HINT, SafeToSerialize.class);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBodyAsString())
.expectNext("{\"username\":\"foo\"}")
.expectComplete()
.verify();
}
@Test
void ofProducerWithMono() {
Mono<User> body = Mono.just(new User("foo", "bar"));
BodyInserter<?, ReactiveHttpOutputMessage> inserter = BodyInserters.fromProducer(body, User.class);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBodyAsString())
.expectNext("{\"username\":\"foo\",\"password\":\"bar\"}")
.expectComplete()
.verify();
}
@Test
void ofProducerWithFlux() {
Flux<String> body = Flux.just("foo");
BodyInserter<?, ReactiveHttpOutputMessage> inserter = BodyInserters.fromProducer(body, String.class);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBody())
.consumeNextWith(buf -> {
String actual = buf.toString(UTF_8);
assertThat(actual).isEqualTo("foo");
})
.expectComplete()
.verify();
}
@Test
void ofProducerWithSingle() {
Single<User> body = Single.just(new User("foo", "bar"));
BodyInserter<?, ReactiveHttpOutputMessage> inserter = BodyInserters.fromProducer(body, User.class);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBodyAsString())
.expectNext("{\"username\":\"foo\",\"password\":\"bar\"}")
.expectComplete()
.verify();
}
@Test
void ofPublisher() {
Flux<String> body = Flux.just("foo");
BodyInserter<Flux<String>, ReactiveHttpOutputMessage> inserter = BodyInserters.fromPublisher(body, String.class);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBody())
.consumeNextWith(buf -> {
String actual = buf.toString(UTF_8);
assertThat(actual).isEqualTo("foo");
})
.expectComplete()
.verify();
}
@Test
void ofResource() throws IOException {
Resource resource = new ClassPathResource("response.txt", getClass());
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = BodyInserters.fromResource(resource).insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
byte[] expectedBytes = Files.readAllBytes(resource.getFile().toPath());
StepVerifier.create(response.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
assertThat(resultBytes).isEqualTo(expectedBytes);
})
.expectComplete()
.verify();
}
@Test // gh-24366
public void ofResourceWithExplicitMediaType() throws IOException {
Resource resource = new ClassPathResource("response.txt", getClass());
MockClientHttpRequest request = new MockClientHttpRequest(HttpMethod.POST, "/");
request.getHeaders().setContentType(MediaType.TEXT_MARKDOWN);
Mono<Void> result = BodyInserters.fromResource(resource).insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
byte[] expectedBytes = Files.readAllBytes(resource.getFile().toPath());
assertThat(request.getHeaders().getContentType()).isEqualTo(MediaType.TEXT_MARKDOWN);
StepVerifier.create(request.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
assertThat(resultBytes).isEqualTo(expectedBytes);
})
.expectComplete()
.verify();
}
@Test
void ofResourceRange() throws IOException {
final int rangeStart = 10;
Resource body = new ClassPathResource("response.txt", getClass());
BodyInserter<Resource, ReactiveHttpOutputMessage> inserter = BodyInserters.fromResource(body);
MockServerHttpRequest request = MockServerHttpRequest.get("/foo")
.range(HttpRange.createByteRange(rangeStart))
.build();
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, new BodyInserter.Context() {
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return Collections.singletonList(new ResourceHttpMessageWriter());
}
@Override
public Optional<ServerHttpRequest> serverRequest() {
return Optional.of(request);
}
@Override
public Map<String, Object> hints() {
return hints;
}
});
StepVerifier.create(result).expectComplete().verify();
byte[] allBytes = Files.readAllBytes(body.getFile().toPath());
byte[] expectedBytes = new byte[allBytes.length - rangeStart];
System.arraycopy(allBytes, rangeStart, expectedBytes, 0, expectedBytes.length);
StepVerifier.create(response.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
assertThat(resultBytes).isEqualTo(expectedBytes);
})
.expectComplete()
.verify();
}
@Test
void ofServerSentEventFlux() {
ServerSentEvent<String> event = ServerSentEvent.builder("foo").build();
Flux<ServerSentEvent<String>> body = Flux.just(event);
BodyInserter<Flux<ServerSentEvent<String>>, ServerHttpResponse> inserter =
BodyInserters.fromServerSentEvents(body);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectNextCount(0).expectComplete().verify();
}
@Test
void fromFormDataMap() {
MultiValueMap<String, String> body = new LinkedMultiValueMap<>();
body.set("name 1", "value 1");
body.add("name 2", "value 2+1");
body.add("name 2", "value 2+2");
body.add("name 3", null);
BodyInserter<MultiValueMap<String, String>, ClientHttpRequest>
inserter = BodyInserters.fromFormData(body);
MockClientHttpRequest request = new MockClientHttpRequest(HttpMethod.GET, URI.create("https://example.com"));
Mono<Void> result = inserter.insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(request.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
assertThat(resultBytes).isEqualTo("name+1=value+1&name+2=value+2%2B1&name+2=value+2%2B2&name+3".getBytes(StandardCharsets.UTF_8));
})
.expectComplete()
.verify();
}
@Test
void fromFormDataWith() {
BodyInserter<MultiValueMap<String, String>, ClientHttpRequest>
inserter = BodyInserters.fromFormData("name 1", "value 1")
.with("name 2", "value 2+1")
.with("name 2", "value 2+2")
.with("name 3", null);
MockClientHttpRequest request = new MockClientHttpRequest(HttpMethod.GET, URI.create("https://example.com"));
Mono<Void> result = inserter.insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(request.getBody())
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
assertThat(resultBytes).isEqualTo("name+1=value+1&name+2=value+2%2B1&name+2=value+2%2B2&name+3".getBytes(StandardCharsets.UTF_8));
})
.expectComplete()
.verify();
}
@Test
void fromMultipartData() {
MultiValueMap<String, Object> map = new LinkedMultiValueMap<>();
map.set("name 3", "value 3");
BodyInserters.FormInserter<Object> inserter =
BodyInserters.fromMultipartData("name 1", "value 1")
.withPublisher("name 2", Flux.just("foo", "bar", "baz"), String.class)
.with(map);
MockClientHttpRequest request = new MockClientHttpRequest(HttpMethod.GET, URI.create("https://example.com"));
Mono<Void> result = inserter.insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
}
@Test // SPR-16350
public void fromMultipartDataWithMultipleValues() {
MultiValueMap<String, Object> map = new LinkedMultiValueMap<>();
map.put("name", Arrays.asList("value1", "value2"));
BodyInserters.FormInserter<Object> inserter = BodyInserters.fromMultipartData(map);
MockClientHttpRequest request = new MockClientHttpRequest(HttpMethod.GET, URI.create("https://example.com"));
Mono<Void> result = inserter.insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(DataBufferUtils.join(request.getBody()))
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
DataBufferUtils.release(dataBuffer);
String content = new String(resultBytes, StandardCharsets.UTF_8);
assertThat(content).contains("""
Content-Disposition: form-data; name="name"\r
Content-Type: text/plain;charset=UTF-8\r
Content-Length: 6\r
\r
value1""");
assertThat(content).contains("""
Content-Disposition: form-data; name="name"\r
Content-Type: text/plain;charset=UTF-8\r
Content-Length: 6\r
\r
value2""");
})
.expectComplete()
.verify();
}
@Test
void ofDataBuffers() {
byte[] bytes = "foo".getBytes(UTF_8);
DefaultDataBuffer dataBuffer = DefaultDataBufferFactory.sharedInstance.wrap(ByteBuffer.wrap(bytes));
Flux<DataBuffer> body = Flux.just(dataBuffer);
BodyInserter<Flux<DataBuffer>, ReactiveHttpOutputMessage> inserter = BodyInserters.fromDataBuffers(body);
MockServerHttpResponse response = new MockServerHttpResponse();
Mono<Void> result = inserter.insert(response, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(response.getBody())
.expectNext(dataBuffer)
.expectComplete()
.verify();
}
| BodyInsertersTests |
java | apache__camel | components/camel-stream/src/test/java/org/apache/camel/component/stream/StreamSystemOutNewLineOffTest.java | {
"start": 1155,
"end": 2769
} | class ____ extends CamelTestSupport {
String message = "Hello World";
PrintStream stdOut = System.out;
ByteArrayOutputStream mockOut = new ByteArrayOutputStream();
// START SNIPPET: e1
@Test
public void testStringContent() {
try {
// Given
System.setOut(new PrintStream(mockOut));
// When
template.sendBody("direct:in", message);
// Then
assertEquals(message, new String(mockOut.toByteArray()));
} finally {
System.setOut(stdOut);
}
}
@Test
public void testBinaryContent() {
try {
// Given
System.setOut(new PrintStream(mockOut));
// When
template.sendBody("direct:in", message.getBytes());
// Then
assertEquals(message, new String(mockOut.toByteArray()));
} finally {
System.setOut(stdOut);
}
}
@Test
public void shouldSkipNullBody() {
try {
// Given
System.setOut(new PrintStream(mockOut));
// When
template.sendBody("direct:in", null);
// Then
assertEquals(0, mockOut.toByteArray().length);
} finally {
System.setOut(stdOut);
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:in").to("stream:out?appendNewLine=false");
}
};
}
// END SNIPPET: e1
}
| StreamSystemOutNewLineOffTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/runtime/src/main/java/io/quarkus/resteasy/reactive/server/runtime/ResteasyReactiveInitialiser.java | {
"start": 121,
"end": 296
} | interface ____ {
/**
* This is where we stuff all generated static init calls we need to make.
*/
void init(Deployment deployment);
}
| ResteasyReactiveInitialiser |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/subscribers/SafeSubscriber.java | {
"start": 1187,
"end": 7508
} | class ____<@NonNull T> implements FlowableSubscriber<T>, Subscription {
/** The actual Subscriber. */
final Subscriber<? super T> downstream;
/** The subscription. */
Subscription upstream;
/** Indicates a terminal state. */
boolean done;
/**
* Constructs a {@code SafeSubscriber} by wrapping the given actual {@link Subscriber}.
* @param downstream the actual {@code Subscriber} to wrap, not {@code null} (not validated)
*/
public SafeSubscriber(@NonNull Subscriber<? super T> downstream) {
this.downstream = downstream;
}
@Override
public void onSubscribe(@NonNull Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
try {
downstream.onSubscribe(this);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
done = true;
// can't call onError because the actual's state may be corrupt at this point
try {
s.cancel();
} catch (Throwable e1) {
Exceptions.throwIfFatal(e1);
RxJavaPlugins.onError(new CompositeException(e, e1));
return;
}
RxJavaPlugins.onError(e);
}
}
}
@Override
public void onNext(@NonNull T t) {
if (done) {
return;
}
if (upstream == null) {
onNextNoSubscription();
return;
}
if (t == null) {
Throwable ex = ExceptionHelper.createNullPointerException("onNext called with a null Throwable.");
try {
upstream.cancel();
} catch (Throwable e1) {
Exceptions.throwIfFatal(e1);
onError(new CompositeException(ex, e1));
return;
}
onError(ex);
return;
}
try {
downstream.onNext(t);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
try {
upstream.cancel();
} catch (Throwable e1) {
Exceptions.throwIfFatal(e1);
onError(new CompositeException(e, e1));
return;
}
onError(e);
}
}
void onNextNoSubscription() {
done = true;
Throwable ex = new NullPointerException("Subscription not set!");
try {
downstream.onSubscribe(EmptySubscription.INSTANCE);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// can't call onError because the actual's state may be corrupt at this point
RxJavaPlugins.onError(new CompositeException(ex, e));
return;
}
try {
downstream.onError(ex);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// if onError failed, all that's left is to report the error to plugins
RxJavaPlugins.onError(new CompositeException(ex, e));
}
}
@Override
public void onError(@NonNull Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
if (upstream == null) {
Throwable npe = new NullPointerException("Subscription not set!");
try {
downstream.onSubscribe(EmptySubscription.INSTANCE);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// can't call onError because the actual's state may be corrupt at this point
RxJavaPlugins.onError(new CompositeException(t, npe, e));
return;
}
try {
downstream.onError(new CompositeException(t, npe));
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// if onError failed, all that's left is to report the error to plugins
RxJavaPlugins.onError(new CompositeException(t, npe, e));
}
return;
}
if (t == null) {
t = ExceptionHelper.createNullPointerException("onError called with a null Throwable.");
}
try {
downstream.onError(t);
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
RxJavaPlugins.onError(new CompositeException(t, ex));
}
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
if (upstream == null) {
onCompleteNoSubscription();
return;
}
try {
downstream.onComplete();
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
RxJavaPlugins.onError(e);
}
}
void onCompleteNoSubscription() {
Throwable ex = new NullPointerException("Subscription not set!");
try {
downstream.onSubscribe(EmptySubscription.INSTANCE);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// can't call onError because the actual's state may be corrupt at this point
RxJavaPlugins.onError(new CompositeException(ex, e));
return;
}
try {
downstream.onError(ex);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
// if onError failed, all that's left is to report the error to plugins
RxJavaPlugins.onError(new CompositeException(ex, e));
}
}
@Override
public void request(long n) {
try {
upstream.request(n);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
try {
upstream.cancel();
} catch (Throwable e1) {
Exceptions.throwIfFatal(e1);
RxJavaPlugins.onError(new CompositeException(e, e1));
return;
}
RxJavaPlugins.onError(e);
}
}
@Override
public void cancel() {
try {
upstream.cancel();
} catch (Throwable e1) {
Exceptions.throwIfFatal(e1);
RxJavaPlugins.onError(e1);
}
}
}
| SafeSubscriber |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/target/CustomViewTarget.java | {
"start": 1378,
"end": 10051
} | class ____<T extends View, Z> implements Target<Z> {
private static final String TAG = "CustomViewTarget";
@IdRes private static final int VIEW_TAG_ID = R.id.glide_custom_view_target_tag;
private final SizeDeterminer sizeDeterminer;
protected final T view;
@Nullable private OnAttachStateChangeListener attachStateListener;
private boolean isClearedByUs;
private boolean isAttachStateListenerAdded;
/** Constructor that defaults {@code waitForLayout} to {@code false}. */
public CustomViewTarget(@NonNull T view) {
this.view = Preconditions.checkNotNull(view);
sizeDeterminer = new SizeDeterminer(view);
}
/**
* A required callback invoked when the resource is no longer valid and must be freed.
*
* <p>You must ensure that any current Drawable received in {@link #onResourceReady(Object,
* Transition)} is no longer used before redrawing the container (usually a View) or changing its
* visibility. <b>Not doing so will result in crashes in your app.</b>
*
* @param placeholder The placeholder drawable to optionally show, or null.
*/
protected abstract void onResourceCleared(@Nullable Drawable placeholder);
/**
* An optional callback invoked when a resource load is started.
*
* @see Target#onLoadStarted(Drawable)
* @param placeholder The placeholder drawable to optionally show, or null.
*/
protected void onResourceLoading(@Nullable Drawable placeholder) {
// Default empty.
}
@Override
public void onStart() {
// Default empty.
}
@Override
public void onStop() {
// Default empty.
}
@Override
public void onDestroy() {
// Default empty.
}
/**
* Indicates that Glide should always wait for any pending layout pass before checking for the
* size an {@link View}.
*
* <p>By default, Glide will only wait for a pending layout pass if it's unable to resolve the
* size from the {@link LayoutParams} or valid non-zero values for {@link View#getWidth()} and
* {@link View#getHeight()}.
*
* <p>Because calling this method forces Glide to wait for the layout pass to occur before
* starting loads, setting this parameter to {@code true} can cause Glide to asynchronous load an
* image even if it's in the memory cache. The load will happen asynchronously because Glide has
* to wait for a layout pass to occur, which won't necessarily happen in the same frame as when
* the image is requested. As a result, using this method can resulting in flashing in some cases
* and should be used sparingly.
*
* <p>If the {@link LayoutParams} of the wrapped {@link View} are set to fixed sizes, they will
* still be used instead of the {@link View}'s dimensions even if this method is called. This
* parameter is a fallback only.
*/
@SuppressWarnings("WeakerAccess") // Public API
@NonNull
public final CustomViewTarget<T, Z> waitForLayout() {
sizeDeterminer.waitForLayout = true;
return this;
}
/**
* Clears the {@link View}'s {@link Request} when the {@link View} is detached from its {@link
* android.view.Window} and restarts the {@link Request} when the {@link View} is re-attached from
* its {@link android.view.Window}.
*
* <p>This is an experimental API that may be removed in a future version.
*
* <p>Using this method can save memory by allowing Glide to more eagerly clear resources when
* transitioning screens or swapping adapters in scrolling views. However it also substantially
* increases the odds that images will not be in memory if users subsequently return to a screen
* where images were previously loaded. Whether or not this happens will depend on the number of
* images loaded in the new screen and the size of the memory cache. Increasing the size of the
* memory cache can improve this behavior but it largely negates the memory benefits of using this
* method.
*
* <p>Use this method with caution and measure your memory usage to ensure that it's actually
* improving your memory usage in the cases you care about.
*/
// Public API.
@NonNull
@SuppressWarnings({"UnusedReturnValue", "WeakerAccess"})
public final CustomViewTarget<T, Z> clearOnDetach() {
if (attachStateListener != null) {
return this;
}
attachStateListener =
new OnAttachStateChangeListener() {
@Override
public void onViewAttachedToWindow(View v) {
resumeMyRequest();
}
@Override
public void onViewDetachedFromWindow(View v) {
pauseMyRequest();
}
};
maybeAddAttachStateListener();
return this;
}
/**
* Override the android resource id to store temporary state allowing loads to be automatically
* cancelled and resources re-used in scrolling lists.
*
* <p>Unlike {@link ViewTarget}, it is <b>not</b> necessary to set a custom tag id if your app
* uses {@link View#setTag(Object)}. It is only necessary if loading several Glide resources into
* the same view, for example one foreground and one background view.
*
* @param tagId The android resource id to use.
* @deprecated Using this method prevents clearing the target from working properly. Glide uses
* its own internal tag id so this method should not be necessary. This method is currently a
* no-op.
*/
// Public API.
@SuppressWarnings({"UnusedReturnValue", "WeakerAccess"})
@Deprecated
public final CustomViewTarget<T, Z> useTagId(@IdRes int tagId) {
return this;
}
/** Returns the wrapped {@link android.view.View}. */
@NonNull
public final T getView() {
return view;
}
/**
* Determines the size of the view by first checking {@link android.view.View#getWidth()} and
* {@link android.view.View#getHeight()}. If one or both are zero, it then checks the view's
* {@link LayoutParams}. If one or both of the params width and height are less than or equal to
* zero, it then adds an {@link android.view.ViewTreeObserver.OnPreDrawListener} which waits until
* the view has been measured before calling the callback with the view's drawn width and height.
*
* @param cb {@inheritDoc}
*/
@Override
public final void getSize(@NonNull SizeReadyCallback cb) {
sizeDeterminer.getSize(cb);
}
@Override
public final void removeCallback(@NonNull SizeReadyCallback cb) {
sizeDeterminer.removeCallback(cb);
}
@Override
public final void onLoadStarted(@Nullable Drawable placeholder) {
maybeAddAttachStateListener();
onResourceLoading(placeholder);
}
@Override
public final void onLoadCleared(@Nullable Drawable placeholder) {
sizeDeterminer.clearCallbacksAndListener();
onResourceCleared(placeholder);
if (!isClearedByUs) {
maybeRemoveAttachStateListener();
}
}
/**
* Stores the request using {@link View#setTag(Object)}.
*
* @param request {@inheritDoc}
*/
@Override
public final void setRequest(@Nullable Request request) {
setTag(request);
}
/** Returns any stored request using {@link android.view.View#getTag()}. */
@Override
@Nullable
public final Request getRequest() {
Object tag = getTag();
if (tag != null) {
if (tag instanceof Request) {
return (Request) tag;
} else {
throw new IllegalArgumentException("You must not pass non-R.id ids to setTag(id)");
}
}
return null;
}
@Override
public String toString() {
return "Target for: " + view;
}
@SuppressWarnings("WeakerAccess")
@Synthetic
final void resumeMyRequest() {
Request request = getRequest();
if (request != null && request.isCleared()) {
request.begin();
}
}
@SuppressWarnings("WeakerAccess")
@Synthetic
final void pauseMyRequest() {
Request request = getRequest();
if (request != null) {
isClearedByUs = true;
request.clear();
isClearedByUs = false;
}
}
private void setTag(@Nullable Object tag) {
view.setTag(VIEW_TAG_ID, tag);
}
@Nullable
private Object getTag() {
return view.getTag(VIEW_TAG_ID);
}
private void maybeAddAttachStateListener() {
if (attachStateListener == null || isAttachStateListenerAdded) {
return;
}
view.addOnAttachStateChangeListener(attachStateListener);
isAttachStateListenerAdded = true;
}
private void maybeRemoveAttachStateListener() {
if (attachStateListener == null || !isAttachStateListenerAdded) {
return;
}
view.removeOnAttachStateChangeListener(attachStateListener);
isAttachStateListenerAdded = false;
}
@VisibleForTesting
static final | CustomViewTarget |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/FrameworkFieldInitializer.java | {
"start": 2303,
"end": 2517
} | interface ____ {
/** Returns the expression to use to assign to the component field for the binding. */
XCodeBlock creationExpression();
/**
* Returns the framework | FrameworkInstanceCreationExpression |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/InvalidAllocationTagsQueryException.java | {
"start": 1066,
"end": 1361
} | class ____ extends YarnException {
private static final long serialVersionUID = 12312831974894L;
public InvalidAllocationTagsQueryException(String msg) {
super(msg);
}
public InvalidAllocationTagsQueryException(YarnException e) {
super(e);
}
}
| InvalidAllocationTagsQueryException |
java | elastic__elasticsearch | x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlServiceProviderMetadataIT.java | {
"start": 1929,
"end": 11901
} | class ____ extends ESRestTestCase {
private static HttpsServer httpsServer;
private static Map<Integer, Boolean> metadataAvailable = new HashMap<>();
public static ElasticsearchCluster cluster = initTestCluster();
private static Path caPath;
@ClassRule
public static TestRule ruleChain = RuleChain.outerRule(new RunnableTestRuleAdapter(SamlServiceProviderMetadataIT::initWebserver))
.around(cluster);
private static void initWebserver() {
try {
final InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0);
final Path cert = getDataResource("/ssl/http.crt");
final Path key = getDataResource("/ssl/http.key");
httpsServer = MockHttpServer.createHttps(address, 0);
httpsServer.setHttpsConfigurator(new PemHttpsConfigurator(cert, key, new char[0]));
httpsServer.start();
List.of(1, 2, 3).forEach(realmNumber -> {
try {
configureMetadataResource(realmNumber);
} catch (CertificateException | IOException | URISyntaxException e) {
throw new RuntimeException("Cannot configure metadata for realm " + realmNumber, e);
}
});
} catch (URISyntaxException | IOException | GeneralSecurityException e) {
throw new RuntimeException("Failed to initialise mock web server", e);
}
}
@AfterClass
public static void shutdownWebserver() {
httpsServer.stop(0);
httpsServer = null;
}
private static ElasticsearchCluster initTestCluster() {
return ElasticsearchCluster.local()
.nodes(1)
.module("analysis-common")
.setting("xpack.license.self_generated.type", "trial")
.setting("xpack.security.enabled", "true")
.setting("xpack.security.authc.token.enabled", "true")
.setting("xpack.security.authc.api_key.enabled", "true")
.setting("xpack.security.http.ssl.enabled", "true")
.setting("xpack.security.http.ssl.certificate", "node.crt")
.setting("xpack.security.http.ssl.key", "node.key")
.setting("xpack.security.http.ssl.certificate_authorities", "ca.crt")
.setting("xpack.security.transport.ssl.enabled", "true")
.setting("xpack.security.transport.ssl.certificate", "node.crt")
.setting("xpack.security.transport.ssl.key", "node.key")
.setting("xpack.security.transport.ssl.certificate_authorities", "ca.crt")
.setting("xpack.security.transport.ssl.verification_mode", "certificate")
.keystore("bootstrap.password", "x-pack-test-password")
.user("test_admin", "x-pack-test-password", User.ROOT_USER_ROLE, true)
.user("rest_test", "rest_password", User.ROOT_USER_ROLE, false)
.configFile("node.key", Resource.fromClasspath("ssl/node.key"))
.configFile("node.crt", Resource.fromClasspath("ssl/node.crt"))
.configFile("ca.crt", Resource.fromClasspath("ssl/ca.crt"))
.settings(node -> {
var samlWebServerAddress = httpsServer.getAddress();
var https = "https://" + samlWebServerAddress.getHostName() + ":" + samlWebServerAddress.getPort() + "/";
var settings = new HashMap<String, String>();
for (int realmNumber : List.of(1, 2, 3)) {
var prefix = "xpack.security.authc.realms.saml.saml" + realmNumber;
var idpEntityId = getIdpEntityId(realmNumber);
settings.put(prefix + ".order", String.valueOf(realmNumber));
settings.put(prefix + ".idp.entity_id", idpEntityId);
settings.put(prefix + ".idp.metadata.path", https + "metadata/" + realmNumber + ".xml");
settings.put(prefix + ".sp.entity_id", "https://sp" + realmNumber + ".example.org/");
settings.put(prefix + ".sp.acs", https + "acs/" + realmNumber);
settings.put(prefix + ".attributes.principal", "urn:oid:2.5.4.3");
settings.put(prefix + ".ssl.certificate_authorities", "ca.crt");
}
return settings;
})
.build();
}
private static void configureMetadataResource(int realmNumber) throws CertificateException, IOException, URISyntaxException {
metadataAvailable.putIfAbsent(realmNumber, false);
var signingCert = getDataResource("/saml/signing.crt");
var metadataBody = new SamlIdpMetadataBuilder().entityId(getIdpEntityId(realmNumber)).sign(signingCert).asString();
httpsServer.createContext("/metadata/" + realmNumber + ".xml", http -> {
if (metadataAvailable.get(realmNumber)) {
sendXmlContent(metadataBody, http);
} else {
if (randomBoolean()) {
http.sendResponseHeaders(randomFrom(404, 401, 403, 500), 0);
http.getResponseBody().close();
} else {
sendXmlContent("not valid xml", http);
}
}
});
}
private static void sendXmlContent(String bodyContent, HttpExchange http) throws IOException {
http.getResponseHeaders().add("Content-Type", "text/xml");
http.sendResponseHeaders(200, bodyContent.length());
try (var out = http.getResponseBody()) {
out.write(bodyContent.getBytes(StandardCharsets.UTF_8));
}
}
@BeforeClass
public static void loadCertificateAuthority() throws Exception {
URL resource = SamlServiceProviderMetadataIT.class.getResource("/ssl/ca.crt");
if (resource == null) {
throw new FileNotFoundException("Cannot find classpath resource /ssl/ca.crt");
}
caPath = PathUtils.get(resource.toURI());
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Override
protected String getProtocol() {
return "https";
}
@Override
protected Settings restAdminSettings() {
final String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).put(CERTIFICATE_AUTHORITIES, caPath).build();
}
@Override
protected Settings restClientSettings() {
final String token = basicAuthHeaderValue("rest_test", new SecureString("rest_password".toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).put(CERTIFICATE_AUTHORITIES, caPath).build();
}
public void testAuthenticationWhenMetadataIsUnreliable() throws Exception {
// Start with no metadata available
assertAllMetadataUnavailable();
final String username = randomAlphaOfLengthBetween(4, 12);
for (int realmNumber : shuffledList(List.of(1, 2, 3))) {
// Authc fails because metadata has never been loaded.
var ex = expectThrows(ResponseException.class, () -> samlAuthUser(realmNumber, username));
assertThat(ex.getResponse().getStatusLine().getStatusCode(), is(401));
// Authc works once metadata is available.
makeMetadataAvailable(realmNumber);
samlAuthUser(realmNumber, username);
}
// Switch off all metadata
makeAllMetadataUnavailable();
for (int realmNumber : List.of(1, 2, 3)) {
// Authc still works because metadata is cached.
samlAuthUser(realmNumber, username);
}
}
private void samlAuthUser(int realmNumber, String username) throws Exception {
var httpsAddress = httpsServer.getAddress();
var message = new SamlResponseBuilder().spEntityId("https://sp" + realmNumber + ".example.org/")
.idpEntityId(getIdpEntityId(realmNumber))
.acs(new URL("https://" + httpsAddress.getHostName() + ":" + httpsAddress.getPort() + "/acs/" + realmNumber))
.attribute("urn:oid:2.5.4.3", username)
.sign(getDataPath("/saml/signing.crt"), getDataPath("/saml/signing.key"), new char[0])
.asString();
final Map<String, Object> body = new HashMap<>();
body.put("content", Base64.getEncoder().encodeToString(message.getBytes(StandardCharsets.UTF_8)));
if (randomBoolean()) {
// If realm is not specified the action will infer it based on the ACS in the saml auth message
body.put("realm", "saml" + realmNumber);
}
var req = new Request("POST", "_security/saml/authenticate");
req.setJsonEntity(Strings.toString(JsonXContent.contentBuilder().map(body)));
var resp = entityAsMap(client().performRequest(req));
assertThat(resp.get("username"), equalTo(username));
assertThat(ObjectPath.evaluate(resp, "authentication.authentication_realm.name"), equalTo("saml" + realmNumber));
}
private static Path getDataResource(String relativePath) throws URISyntaxException {
return PathUtils.get(SamlServiceProviderMetadataIT.class.getResource(relativePath).toURI());
}
private static String getIdpEntityId(int realmNumber) {
return "https://idp" + realmNumber + ".example.org/";
}
private void makeMetadataAvailable(int... realms) {
for (int r : realms) {
metadataAvailable.put(Integer.valueOf(r), true);
}
}
private void assertAllMetadataUnavailable() {
metadataAvailable.forEach((realm, available) -> assertThat("For realm #" + realm, available, is(false)));
}
private void makeAllMetadataUnavailable() {
metadataAvailable.keySet().forEach(k -> metadataAvailable.put(k, false));
}
}
| SamlServiceProviderMetadataIT |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/creator/JSONCreatorTest11.java | {
"start": 997,
"end": 2934
} | class ____ {
private String image;
private String link;
private String title;
private String desc;
private String labels;
private String labelNot;
@JSONCreator
public RecommendDTO(@JSONField(name = "image") String image, @JSONField(name = "link") String link,
@JSONField(name = "title") String title, @JSONField(name = "desc") String desc,
@JSONField(name = "labels") String labels, @JSONField(name = "labelNot") String labelNot) {
final String PREFIX = "//";
this.desc = desc;
this.title = title;
this.labelNot = labelNot;
this.labels = labels;
if (image.startsWith(PREFIX)) {
this.image = image.substring(2);
}
if (link.startsWith(PREFIX)) {
this.link = link.substring(2);
}
}
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
public String getLink() {
return link;
}
public void setLink(String link) {
this.link = link;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getLabels() {
return labels;
}
public void setLabels(String labels) {
this.labels = labels;
}
public String getLabelNot() {
return labelNot;
}
public void setLabelNot(String labelNot) {
this.labelNot = labelNot;
}
}
}
| RecommendDTO |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/LobUnfetchedPropertyTest.java | {
"start": 6544,
"end": 6932
} | class ____ {
private int id;
private String clob;
@Id
@GeneratedValue
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@Column(name = "filedata", length = 1024 * 1024)
@Lob
@Basic(fetch = FetchType.LAZY)
public String getClob() {
return clob;
}
public void setClob(String clob) {
this.clob = clob;
}
}
}
| FileNClob2 |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/junit4/Helper.java | {
"start": 666,
"end": 2093
} | class ____ {
public static final String VALIDATE_FAILURE_EXPECTED = FailureExpected.VALIDATE_FAILURE_EXPECTED;
private Helper() {
}
/**
* Standard string content checking.
*
* @param string The string to check
*
* @return Are its content empty or the reference null?
*/
public static boolean isNotEmpty(String string) {
return string != null && string.length() > 0;
}
/**
* Extract a nice test name representation for display
*
* @param frameworkMethod The test method.
*
* @return The display representation
*/
public static String extractTestName(FrameworkMethod frameworkMethod) {
return frameworkMethod.getMethod().getDeclaringClass().getName() + '#' + frameworkMethod.getName();
}
/**
* Extract a nice method name representation for display
*
* @param method The method.
*
* @return The display representation
*/
public static String extractMethodName(Method method) {
return method.getDeclaringClass().getName() + "#" + method.getName();
}
public static <T extends Annotation> T locateAnnotation(
Class<T> annotationClass,
FrameworkMethod frameworkMethod,
TestClass testClass) {
T annotation = frameworkMethod.getAnnotation( annotationClass );
if ( annotation == null ) {
annotation = testClass.getJavaClass().getAnnotation( annotationClass );
}
return annotation;
}
/**
* Locates the specified annotation both at the method site and | Helper |
java | spring-projects__spring-framework | spring-jms/src/test/java/org/springframework/jms/annotation/AbstractJmsAnnotationDrivenTests.java | {
"start": 9412,
"end": 9848
} | class ____ {
@JmsListener(id = "${jms.listener.id}", containerFactory = "${jms.listener.containerFactory}",
destination = "${jms.listener.destination}", selector = "${jms.listener.selector}",
subscription = "${jms.listener.subscription}", concurrency = "${jms.listener.concurrency}")
@SendTo("${jms.listener.sendTo}")
public String fullHandle(String msg) {
return "reply";
}
}
@Component
static | FullConfigurableBean |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2AuthorizationServerMetadataTests.java | {
"start": 10008,
"end": 11012
} | class ____ extends AuthorizationServerConfiguration {
// @formatter:off
@Bean
SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
http
.oauth2AuthorizationServer((authorizationServer) ->
authorizationServer
.authorizationServerMetadataEndpoint((authorizationServerMetadataEndpoint) ->
authorizationServerMetadataEndpoint
.authorizationServerMetadataCustomizer(authorizationServerMetadataCustomizer()))
)
.authorizeHttpRequests((authorize) ->
authorize.anyRequest().authenticated()
);
return http.build();
}
// @formatter:on
private Consumer<OAuth2AuthorizationServerMetadata.Builder> authorizationServerMetadataCustomizer() {
return (authorizationServerMetadata) -> authorizationServerMetadata.scope("scope1").scope("scope2");
}
}
@EnableWebSecurity
@Import(OAuth2AuthorizationServerConfiguration.class)
static | AuthorizationServerConfigurationWithMetadataCustomizer |
java | spring-projects__spring-framework | spring-jdbc/src/test/java/org/springframework/jdbc/datasource/IsolationLevelDataSourceAdapterTests.java | {
"start": 1382,
"end": 3365
} | class ____ {
private final IsolationLevelDataSourceAdapter adapter = new IsolationLevelDataSourceAdapter();
@Test
void setIsolationLevelNameToUnsupportedValues() {
assertThatIllegalArgumentException().isThrownBy(() -> adapter.setIsolationLevelName(null));
assertThatIllegalArgumentException().isThrownBy(() -> adapter.setIsolationLevelName(" "));
assertThatIllegalArgumentException().isThrownBy(() -> adapter.setIsolationLevelName("bogus"));
}
/**
* Verify that the internal 'constants' map is properly configured for all
* ISOLATION_ constants defined in {@link TransactionDefinition}.
*/
@Test
void setIsolationLevelNameToAllSupportedValues() {
Set<Integer> uniqueValues = new HashSet<>();
streamIsolationConstants()
.forEach(name -> {
adapter.setIsolationLevelName(name);
Integer isolationLevel = adapter.getIsolationLevel();
if ("ISOLATION_DEFAULT".equals(name)) {
assertThat(isolationLevel).isNull();
uniqueValues.add(ISOLATION_DEFAULT);
}
else {
Integer expected = IsolationLevelDataSourceAdapter.constants.get(name);
assertThat(isolationLevel).isEqualTo(expected);
uniqueValues.add(isolationLevel);
}
});
assertThat(uniqueValues).containsExactlyInAnyOrderElementsOf(IsolationLevelDataSourceAdapter.constants.values());
}
@Test
void setIsolationLevel() {
assertThatIllegalArgumentException().isThrownBy(() -> adapter.setIsolationLevel(999));
adapter.setIsolationLevel(ISOLATION_DEFAULT);
assertThat(adapter.getIsolationLevel()).isNull();
adapter.setIsolationLevel(ISOLATION_READ_COMMITTED);
assertThat(adapter.getIsolationLevel()).isEqualTo(ISOLATION_READ_COMMITTED);
}
private static Stream<String> streamIsolationConstants() {
return Arrays.stream(TransactionDefinition.class.getFields())
.filter(ReflectionUtils::isPublicStaticFinal)
.map(Field::getName)
.filter(name -> name.startsWith("ISOLATION_"));
}
}
| IsolationLevelDataSourceAdapterTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/EqualsReferenceTest.java | {
"start": 2858,
"end": 3161
} | class ____ {
@Override
public boolean equals(Object o) {
if (o.equals("hi")) {
return true;
} else {
return o == this;
}
}
}
| OtherEquals |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/gateway/BaseGatewayShardAllocator.java | {
"start": 1620,
"end": 4860
} | class ____ {
protected final Logger logger = LogManager.getLogger(this.getClass());
/**
* Allocate an unassigned shard to nodes (if any) where valid copies of the shard already exist.
* It is up to the individual implementations of {@link #makeAllocationDecision(ShardRouting, RoutingAllocation, Logger)}
* to make decisions on assigning shards to nodes.
* @param shardRouting the shard to allocate
* @param allocation the allocation state container object
* @param unassignedAllocationHandler handles the allocation of the current shard
*/
public void allocateUnassigned(
ShardRouting shardRouting,
RoutingAllocation allocation,
ExistingShardsAllocator.UnassignedAllocationHandler unassignedAllocationHandler
) {
final AllocateUnassignedDecision allocateUnassignedDecision = makeAllocationDecision(shardRouting, allocation, logger);
if (allocateUnassignedDecision.isDecisionTaken() == false) {
// no decision was taken by this allocator
return;
}
if (allocateUnassignedDecision.getAllocationDecision() == AllocationDecision.YES) {
unassignedAllocationHandler.initialize(
allocateUnassignedDecision.getTargetNode().getId(),
allocateUnassignedDecision.getAllocationId(),
getExpectedShardSize(shardRouting, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE, allocation),
allocation.changes()
);
} else {
unassignedAllocationHandler.removeAndIgnore(allocateUnassignedDecision.getAllocationStatus(), allocation.changes());
}
}
/**
* Make a decision on the allocation of an unassigned shard. This method is used by
* {@link #allocateUnassigned(ShardRouting, RoutingAllocation, ExistingShardsAllocator.UnassignedAllocationHandler)} to make decisions
* about whether or not the shard can be allocated by this allocator and if so, to which node it will be allocated.
*
* @param unassignedShard the unassigned shard to allocate
* @param allocation the current routing state
* @param logger the logger
* @return an {@link AllocateUnassignedDecision} with the final decision of whether to allocate and details of the decision
*/
public abstract AllocateUnassignedDecision makeAllocationDecision(
ShardRouting unassignedShard,
RoutingAllocation allocation,
Logger logger
);
/**
* Builds decisions for all nodes in the cluster, so that the explain API can provide information on
* allocation decisions for each node, while still waiting to allocate the shard (e.g. due to fetching shard data).
*/
protected static List<NodeAllocationResult> buildDecisionsForAllNodes(ShardRouting shard, RoutingAllocation allocation) {
List<NodeAllocationResult> results = new ArrayList<>();
for (RoutingNode node : allocation.routingNodes()) {
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
results.add(new NodeAllocationResult(node.node(), null, decision));
}
return results;
}
}
| BaseGatewayShardAllocator |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/testutils/statemigration/V1TestTypeSerializerSnapshot.java | {
"start": 1580,
"end": 2817
} | class ____ implements TypeSerializerSnapshot<TestType> {
@Override
public int getCurrentVersion() {
return 1;
}
@Override
public TypeSerializerSchemaCompatibility<TestType> resolveSchemaCompatibility(
TypeSerializerSnapshot<TestType> oldSerializerSnapshot) {
if (oldSerializerSnapshot instanceof V1TestTypeSerializerSnapshot) {
return TypeSerializerSchemaCompatibility.compatibleAsIs();
} else if (
// old ReconfigurationRequiringTestTypeSerializerSnapshot cannot be compatible with
// any new TypeSerializerSnapshots
oldSerializerSnapshot instanceof ReconfigurationRequiringTestTypeSerializerSnapshot
// migrating from V2 -> V1 is not supported
|| oldSerializerSnapshot instanceof V2TestTypeSerializerSnapshot
// IncompatibleTestTypeSerializerSnapshot cannot be compatible with any
// TypeSerializerSnapshots
|| oldSerializerSnapshot instanceof IncompatibleTestTypeSerializerSnapshot) {
return TypeSerializerSchemaCompatibility.incompatible();
} else {
throw new IllegalStateException("Unknown serializer | V1TestTypeSerializerSnapshot |
java | google__auto | value/src/main/java/com/google/auto/value/processor/KotlinMetadata.java | {
"start": 2633,
"end": 7667
} | class ____ constructors should be returned
*/
ImmutableList<Executable> kotlinConstructorsIn(AnnotationMirror metadata, TypeElement ofClass) {
if (!KOTLIN_METADATA_AVAILABLE) {
if (!warnedAboutMissingMetadataApi) {
warnedAboutMissingMetadataApi = true;
errorReporter.reportWarning(
ofClass,
"[AutoBuilderNoMetadataApi] The Kotlin metadata API (kotlinx.metadata or"
+ " kotlin.metadata) is not available. You may need to add a dependency on"
+ " org.jetbrains.kotlin:kotlin-metadata-jvm.");
}
return ImmutableList.of();
}
try {
return kotlinConstructorsFromReflection(metadata, ofClass);
} catch (InvocationTargetException e) {
throwIfUnchecked(e.getCause());
// We don't expect the Kotlin API to throw checked exceptions.
throw new LinkageError(e.getMessage(), e);
} catch (ReflectiveOperationException e) {
throw new LinkageError(e.getMessage(), e);
}
}
private static ImmutableList<Executable> kotlinConstructorsFromReflection(
AnnotationMirror metadata, TypeElement ofClass) throws ReflectiveOperationException {
ImmutableMap<String, AnnotationValue> annotationValues =
AnnotationMirrors.getAnnotationValuesWithDefaults(metadata).entrySet().stream()
.collect(toImmutableMap(e -> e.getKey().getSimpleName().toString(), e -> e.getValue()));
// We match the KmConstructor instances with the ExecutableElement instances based on the
// parameter names. We could possibly just assume that the constructors are in the same order.
Map<ImmutableSet<String>, ExecutableElement> map =
constructorsIn(ofClass.getEnclosedElements()).stream()
.collect(toMap(c -> parameterNames(c), c -> c, (a, b) -> a, LinkedHashMap::new));
ImmutableMap<ImmutableSet<String>, ExecutableElement> paramNamesToConstructor =
ImmutableMap.copyOf(map);
KotlinClassHeader header =
new KotlinClassHeader(
(Integer) annotationValues.get("k").getValue(),
intArrayValue(annotationValues.get("mv")),
stringArrayValue(annotationValues.get("d1")),
stringArrayValue(annotationValues.get("d2")),
(String) annotationValues.get("xs").getValue(),
(String) annotationValues.get("pn").getValue(),
(Integer) annotationValues.get("xi").getValue());
KotlinClassMetadata.Class classMetadata = KotlinClassMetadata.readLenient(header);
KmClass kmClass = classMetadata.getKmClass();
ImmutableList.Builder<Executable> kotlinConstructorsBuilder = ImmutableList.builder();
for (KmConstructor constructor : kmClass.getConstructors()) {
ImmutableSet.Builder<String> allBuilder = ImmutableSet.builder();
ImmutableSet.Builder<String> optionalBuilder = ImmutableSet.builder();
for (KmValueParameter param : constructor.getValueParameters()) {
String name = param.getName();
allBuilder.add(name);
if (Attributes.getDeclaresDefaultValue(param)) {
optionalBuilder.add(name);
}
}
ImmutableSet<String> optional = optionalBuilder.build();
ImmutableSet<String> all = allBuilder.build();
ExecutableElement javaConstructor = paramNamesToConstructor.get(all);
if (javaConstructor != null) {
kotlinConstructorsBuilder.add(Executable.of(javaConstructor, optional));
}
}
return kotlinConstructorsBuilder.build();
}
private static ImmutableSet<String> parameterNames(ExecutableElement executableElement) {
return executableElement.getParameters().stream()
.map(v -> v.getSimpleName().toString())
.collect(toImmutableSet());
}
Optional<AnnotationMirror> kotlinMetadataAnnotation(Element element) {
return element.getAnnotationMirrors().stream()
.filter(
a ->
asTypeElement(a.getAnnotationType())
.getQualifiedName()
.contentEquals(KOTLIN_METADATA_NAME))
.<AnnotationMirror>map(a -> a) // get rid of that stupid wildcard
.findFirst();
}
private static int[] intArrayValue(AnnotationValue value) {
@SuppressWarnings("unchecked")
List<AnnotationValue> list = (List<AnnotationValue>) value.getValue();
return list.stream().mapToInt(v -> (int) v.getValue()).toArray();
}
private static String[] stringArrayValue(AnnotationValue value) {
@SuppressWarnings("unchecked")
List<AnnotationValue> list = (List<AnnotationValue>) value.getValue();
return list.stream().map(AnnotationValue::getValue).toArray(String[]::new);
}
// Wrapper classes for the Kotlin metadata API. These classes have the same names as the ones
// from that API (minus the package of course), and use reflection to access the real API. This
// allows us to write client code that is essentially the same as if we were using the real API.
// Otherwise the logic would be obscured by all the reflective calls.
private static | whose |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java | {
"start": 60458,
"end": 60606
} | class ____ {
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties(ValidatedJsr303Properties.class)
static | PrefixConfiguration |
java | square__okhttp | samples/unixdomainsockets/src/main/java/okhttp3/unixdomainsockets/ClientAndServer.java | {
"start": 1030,
"end": 2108
} | class ____ {
public void run() throws Exception {
File socketFile = new File("/tmp/ClientAndServer.sock");
socketFile.delete(); // Clean up from previous runs.
MockWebServer server = new MockWebServer();
server.setServerSocketFactory(new UnixDomainServerSocketFactory(socketFile));
server.setProtocols(Collections.singletonList(Protocol.H2_PRIOR_KNOWLEDGE));
server.enqueue(new MockResponse().setBody("hello"));
server.start();
OkHttpClient client = new OkHttpClient.Builder()
.socketFactory(new UnixDomainSocketFactory(socketFile))
.protocols(Collections.singletonList(Protocol.H2_PRIOR_KNOWLEDGE))
.build();
Request request = new Request.Builder()
.url("http://publicobject.com/helloworld.txt")
.build();
try (Response response = client.newCall(request).execute()) {
System.out.println(response.body().string());
}
server.shutdown();
socketFile.delete();
}
public static void main(String... args) throws Exception {
new ClientAndServer().run();
}
}
| ClientAndServer |
java | apache__camel | components/camel-spring-parent/camel-spring-redis/src/test/java/org/apache/camel/component/redis/processor/idempotent/SpringRedisIdempotentRepositoryTest.java | {
"start": 1514,
"end": 3203
} | class ____ {
private static final String REPOSITORY = "testRepository";
private static final String KEY = "KEY";
@Mock
private RedisTemplate<String, String> redisTemplate;
@Mock
private RedisConnectionFactory redisConnectionFactory;
@Mock
private RedisConnection redisConnection;
@Mock
private SetOperations<String, String> setOperations;
private SpringRedisIdempotentRepository idempotentRepository;
@BeforeEach
public void setUp() throws Exception {
when(redisTemplate.opsForSet()).thenReturn(setOperations);
when(redisTemplate.getConnectionFactory()).thenReturn(redisConnectionFactory);
when(redisTemplate.getConnectionFactory().getConnection()).thenReturn(redisConnection);
idempotentRepository = SpringRedisIdempotentRepository.redisIdempotentRepository(redisTemplate, REPOSITORY);
}
@Test
public void shouldAddKey() {
idempotentRepository.add(KEY);
verify(setOperations).add(REPOSITORY, KEY);
}
@Test
public void shoulCheckForMembers() {
idempotentRepository.contains(KEY);
verify(setOperations).isMember(REPOSITORY, KEY);
}
@Test
public void shouldRemoveKey() {
idempotentRepository.remove(KEY);
verify(setOperations).remove(REPOSITORY, KEY);
}
@Test
public void shouldClearRepository() {
idempotentRepository.clear();
verify(redisConnection).flushDb();
}
@Test
public void shouldReturnProcessorName() {
String processorName = idempotentRepository.getRepositoryName();
assertEquals(REPOSITORY, processorName);
}
}
| SpringRedisIdempotentRepositoryTest |
java | apache__camel | components/camel-vertx/camel-vertx-websocket/src/test/java/org/apache/camel/component/vertx/websocket/VertxWebSocketSlowConsumerTest.java | {
"start": 1206,
"end": 2532
} | class ____ extends VertxWebSocketTestSupport {
private static final String MESSAGE_BODY = "Hello World";
private final BlockedThreadReporter reporter = new BlockedThreadReporter();
@AfterEach
public void afterEach() {
reporter.reset();
}
@BindToRegistry
public Vertx createVertx() {
return createVertxWithThreadBlockedHandler(reporter);
}
@Test
void slowConsumerDoesNotBlockEventLoop() throws Exception {
MockEndpoint mockEndpoint = getMockEndpoint("mock:result");
mockEndpoint.expectedBodiesReceived(MESSAGE_BODY);
template.requestBody("direct:start", MESSAGE_BODY);
mockEndpoint.assertIsSatisfied();
assertFalse(reporter.isEventLoopBlocked(), "Expected Vert.x event loop to not be blocked");
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.toF("vertx-websocket:localhost:%d/slow", port);
fromF("vertx-websocket:localhost:%d/slow", port)
.delay(600).syncDelayed()
.to("mock:result");
}
};
}
}
| VertxWebSocketSlowConsumerTest |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/client/RedisOptionsCustomizer.java | {
"start": 141,
"end": 335
} | interface ____ the possibility to extend/modify the
* {@link io.vertx.redis.client.RedisOptions} before they are used to create the {@code RedisClient} or
* {@code RedisDataSource}.
*/
public | has |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/composite/ReferenceGenerator.java | {
"start": 903,
"end": 1445
} | interface ____ {@code referenceId} generation. For each object given to the
* {@link ReferenceGenerator#nextReferenceFor(Object)} method, the implementation should generate reference identifiers.
* Reference identifiers need to be unique within one SObject tree request and should start with alphanumeric character.
* <p/>
* For example you can provide your {@link ReferenceGenerator} implementation that uses identities within your own
* system as references, i.e. primary keys of records in your database.
*
* @see Counter
*/
public | for |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/AbstractPackagerMojo.java | {
"start": 2091,
"end": 2264
} | class ____ classes that work with an {@link Packager}.
*
* @author Phillip Webb
* @author Scott Frederick
* @author Moritz Halbritter
* @since 2.3.0
*/
public abstract | for |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessageType.java | {
"start": 842,
"end": 986
} | enum ____ {
CONNECT,
CONNECT_ACK,
MESSAGE,
SUBSCRIBE,
UNSUBSCRIBE,
HEARTBEAT,
DISCONNECT,
DISCONNECT_ACK,
OTHER
}
| SimpMessageType |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/internal/StandardOrderedSetSemantics.java | {
"start": 658,
"end": 1867
} | class ____<E> extends AbstractSetSemantics<LinkedHashSet<E>,E> {
/**
* Singleton access
*/
public static final StandardOrderedSetSemantics<?> INSTANCE = new StandardOrderedSetSemantics<>();
private StandardOrderedSetSemantics() {
}
@Override
public CollectionClassification getCollectionClassification() {
return CollectionClassification.ORDERED_SET;
}
@Override
public LinkedHashSet<E> instantiateRaw(
int anticipatedSize,
CollectionPersister collectionDescriptor) {
return anticipatedSize < 1 ? CollectionHelper.linkedSet() : CollectionHelper.linkedSetOfSize( anticipatedSize );
}
@Override
public PersistentCollection<E> instantiateWrapper(
Object key,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentSet<>( session );
}
@Override
public PersistentCollection<E> wrap(
LinkedHashSet<E> rawCollection,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentSet<>( session, rawCollection );
}
@Override
public Iterator<E> getElementIterator(LinkedHashSet<E> rawCollection) {
return rawCollection.iterator();
}
}
| StandardOrderedSetSemantics |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/testrunner/metaannotations/MetaET.java | {
"start": 215,
"end": 649
} | class ____ {
@UnitTest
public void t1() {
given()
.when().get("/hello/greeting/foo")
.then()
.statusCode(200)
.body(is("hello foo"));
}
@UnitTest
public void t2() {
given()
.when().get("/hello/greeting/foo")
.then()
.statusCode(200)
.body(is("hello foo"));
}
}
| MetaET |
java | quarkusio__quarkus | extensions/kubernetes-config/runtime/src/main/java/io/quarkus/kubernetes/config/runtime/KubernetesConfigSourceFactory.java | {
"start": 784,
"end": 8415
} | class ____ implements ConfigSourceFactory {
private static final Logger log = Logger.getLogger(KubernetesConfigSourceFactory.class);
private final KubernetesClient client;
private final ConfigMapConfigSourceUtil configMapConfigSourceUtil;
private final SecretConfigSourceUtil secretConfigSourceUtil;
/**
* @param client A Kubernetes Client that is specific to this extension - it must not be shared with any other
* parts of the application
*/
public KubernetesConfigSourceFactory(KubernetesClient client) {
this.client = client;
this.configMapConfigSourceUtil = new ConfigMapConfigSourceUtil();
this.secretConfigSourceUtil = new SecretConfigSourceUtil();
}
@Override
public Iterable<ConfigSource> getConfigSources(final ConfigSourceContext context) {
SmallRyeConfig config = new SmallRyeConfigBuilder()
.withSources(new ConfigSourceContextConfigSource(context))
.withMapping(KubernetesConfigBuildTimeConfig.class)
.withMapping(KubernetesConfigSourceConfig.class)
.build();
KubernetesConfigBuildTimeConfig kubernetesConfigBuildTimeConfig = config
.getConfigMapping(KubernetesConfigBuildTimeConfig.class);
KubernetesConfigSourceConfig kubernetesConfigSourceConfig = config.getConfigMapping(KubernetesConfigSourceConfig.class);
// TODO - radcortez - Move the check that uses the build time config to the processor and skip the builder registration
if ((!kubernetesConfigSourceConfig.enabled() && !kubernetesConfigBuildTimeConfig.secretsEnabled())
|| isExplicitlyDisabled(context)) {
log.debug(
"No attempt will be made to obtain configuration from the Kubernetes API server because the functionality has been disabled via configuration");
return emptyList();
}
return getConfigSources(kubernetesConfigSourceConfig, kubernetesConfigBuildTimeConfig.secretsEnabled());
}
Iterable<ConfigSource> getConfigSources(final KubernetesConfigSourceConfig config, final boolean secrets) {
if (config.configMaps().isEmpty() && config.secrets().isEmpty()) {
log.debug("No ConfigMaps or Secrets were configured for config source lookup");
return emptyList();
}
List<ConfigSource> result = new ArrayList<>();
if (config.enabled() && config.configMaps().isPresent()) {
result.addAll(getConfigMapConfigSources(config.configMaps().get(), config));
}
if (secrets && config.secrets().isPresent()) {
result.addAll(getSecretConfigSources(config.secrets().get(), config));
}
try {
client.close(); // we no longer need the client, so we must close it to avoid resource leaks
} catch (Exception e) {
log.debug("Error in closing kubernetes client", e);
}
return result;
}
private boolean isExplicitlyDisabled(ConfigSourceContext context) {
ConfigValue configValue = context.getValue("quarkus.kubernetes-config.enabled");
if (DefaultValuesConfigSource.NAME.equals(configValue.getConfigSourceName())) {
return false;
}
if (configValue.getValue() != null) {
return !Converters.getImplicitConverter(Boolean.class).convert(configValue.getValue());
}
return false;
}
private List<ConfigSource> getConfigMapConfigSources(List<String> configMapNames, KubernetesConfigSourceConfig config) {
List<ConfigSource> result = new ArrayList<>(configMapNames.size());
try {
for (int i = 0; i < configMapNames.size(); i++) {
String configMapName = configMapNames.get(i);
if (log.isDebugEnabled()) {
log.debug("Attempting to read ConfigMap " + configMapName);
}
ConfigMap configMap;
String namespace;
if (config.namespace().isPresent()) {
namespace = config.namespace().get();
configMap = client.configMaps().inNamespace(namespace).withName(configMapName).get();
} else {
namespace = client.getNamespace();
configMap = client.configMaps().withName(configMapName).get();
}
if (configMap == null) {
logMissingOrFail(configMapName, namespace, "ConfigMap", config.failOnMissingConfig());
} else {
result.addAll(configMapConfigSourceUtil.toConfigSources(configMap.getMetadata(), configMap.getData(), i));
if (log.isDebugEnabled()) {
log.debug("Done reading ConfigMap " + configMap.getMetadata().getName());
}
}
}
return result;
} catch (Exception e) {
throw new RuntimeException("Unable to obtain configuration for ConfigMap objects from Kubernetes API Server at: "
+ client.getConfiguration().getMasterUrl(), e);
}
}
private List<ConfigSource> getSecretConfigSources(List<String> secretNames, KubernetesConfigSourceConfig config) {
List<ConfigSource> result = new ArrayList<>(secretNames.size());
try {
for (int i = 0; i < secretNames.size(); i++) {
String secretName = secretNames.get(i);
if (log.isDebugEnabled()) {
log.debug("Attempting to read Secret " + secretName);
}
Secret secret;
String namespace;
if (config.namespace().isPresent()) {
namespace = config.namespace().get();
secret = client.secrets().inNamespace(namespace).withName(secretName).get();
} else {
namespace = client.getNamespace();
secret = client.secrets().withName(secretName).get();
}
if (secret == null) {
logMissingOrFail(secretName, namespace, "Secret", config.failOnMissingConfig());
} else {
result.addAll(secretConfigSourceUtil.toConfigSources(secret.getMetadata(), secret.getData(), i));
if (log.isDebugEnabled()) {
log.debug("Done reading Secret " + secret.getMetadata().getName());
}
}
}
return result;
} catch (Exception e) {
throw new RuntimeException("Unable to obtain configuration for Secret objects from Kubernetes API Server at: "
+ client.getConfiguration().getMasterUrl(), e);
}
}
private void logMissingOrFail(String name, String namespace, String type, boolean failOnMissingConfig) {
String message = type + " '" + name + "' not found";
if (namespace == null) {
message = message
+ ". No Kubernetes namespace was set (most likely because the application is running outside the Kubernetes cluster). Consider setting 'quarkus.kubernetes-client.namespace=my-namespace' to specify the namespace in which to look up the "
+ type;
} else {
message = message + " in namespace '" + namespace + "'";
}
if (failOnMissingConfig) {
throw new RuntimeException(message);
} else {
log.info(message);
}
}
}
| KubernetesConfigSourceFactory |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/recording/RecordedState.java | {
"start": 497,
"end": 2780
} | class ____ {
private final Dialect dialect;
private final PrevalidatedQuarkusMetadata metadata;
private final BuildTimeSettings settings;
private final Collection<Integrator> integrators;
private final Collection<ProvidedService<?>> providedServices;
private final IntegrationSettings integrationSettings;
private final ProxyDefinitions proxyClassDefinitions;
private final MultiTenancyStrategy multiTenancyStrategy;
private final boolean isReactive;
private final boolean fromPersistenceXml;
public RecordedState(Dialect dialect, PrevalidatedQuarkusMetadata metadata,
BuildTimeSettings settings, Collection<Integrator> integrators,
Collection<ProvidedService<?>> providedServices, IntegrationSettings integrationSettings,
ProxyDefinitions classDefinitions, MultiTenancyStrategy strategy,
boolean isReactive, boolean fromPersistenceXml) {
this.dialect = dialect;
this.metadata = metadata;
this.settings = settings;
this.integrators = integrators;
this.providedServices = providedServices;
this.integrationSettings = integrationSettings;
this.proxyClassDefinitions = classDefinitions;
this.multiTenancyStrategy = strategy;
this.isReactive = isReactive;
this.fromPersistenceXml = fromPersistenceXml;
}
public Dialect getDialect() {
return dialect;
}
public PrevalidatedQuarkusMetadata getMetadata() {
return metadata;
}
public BuildTimeSettings getBuildTimeSettings() {
return settings;
}
public Collection<Integrator> getIntegrators() {
return integrators;
}
public Collection<ProvidedService<?>> getProvidedServices() {
return providedServices;
}
public IntegrationSettings getIntegrationSettings() {
return integrationSettings;
}
public ProxyDefinitions getProxyClassDefinitions() {
return proxyClassDefinitions;
}
public MultiTenancyStrategy getMultiTenancyStrategy() {
return multiTenancyStrategy;
}
public boolean isReactive() {
return isReactive;
}
public boolean isFromPersistenceXml() {
return fromPersistenceXml;
}
}
| RecordedState |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/SpeculativeExecutionJobVertex.java | {
"start": 1481,
"end": 3284
} | class ____ extends ExecutionJobVertex {
public SpeculativeExecutionJobVertex(
InternalExecutionGraphAccessor graph,
JobVertex jobVertex,
VertexParallelismInformation parallelismInfo,
CoordinatorStore coordinatorStore,
JobManagerJobMetricGroup jobManagerJobMetricGroup)
throws JobException {
super(graph, jobVertex, parallelismInfo, coordinatorStore, jobManagerJobMetricGroup);
}
@Override
protected ExecutionVertex createExecutionVertex(
ExecutionJobVertex jobVertex,
int subTaskIndex,
IntermediateResult[] producedDataSets,
Duration timeout,
long createTimestamp,
int executionHistorySizeLimit,
int initialAttemptCount) {
return new SpeculativeExecutionVertex(
jobVertex,
subTaskIndex,
producedDataSets,
timeout,
createTimestamp,
executionHistorySizeLimit,
initialAttemptCount);
}
@Override
protected OperatorCoordinatorHolder createOperatorCoordinatorHolder(
SerializedValue<OperatorCoordinator.Provider> provider,
ClassLoader classLoader,
CoordinatorStore coordinatorStore,
JobManagerJobMetricGroup jobManagerJobMetricGroup)
throws Exception {
return OperatorCoordinatorHolder.create(
provider,
this,
classLoader,
coordinatorStore,
true,
getTaskInformation(),
jobManagerJobMetricGroup);
}
/** Factory to create {@link SpeculativeExecutionJobVertex}. */
public static | SpeculativeExecutionJobVertex |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.