language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/NonAggregatedIdentifierMapping.java | {
"start": 1186,
"end": 1450
} | interface ____ extends CompositeIdentifierMapping, EmbeddableValuedFetchable, FetchOptions, VirtualModelPart {
/**
* The virtual-id representation of this id mapping
*/
VirtualIdEmbeddable getVirtualIdEmbeddable();
/**
* The id- | NonAggregatedIdentifierMapping |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/DeclareParentsTests.java | {
"start": 2579,
"end": 2738
} | class ____ {
public void checkNotLocked(Lockable mixin) {
if (mixin.locked()) {
throw new IllegalStateException("locked");
}
}
}
| NonAnnotatedMakeLockable |
java | apache__camel | components/camel-irc/src/generated/java/org/apache/camel/component/irc/IrcEndpointUriFactory.java | {
"start": 513,
"end": 3248
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":hostname:port";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(29);
props.add("autoRejoin");
props.add("bridgeErrorHandler");
props.add("channels");
props.add("colors");
props.add("commandTimeout");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("hostname");
props.add("keys");
props.add("lazyStartProducer");
props.add("namesOnJoin");
props.add("nickPassword");
props.add("nickname");
props.add("onJoin");
props.add("onKick");
props.add("onMode");
props.add("onNick");
props.add("onPart");
props.add("onPrivmsg");
props.add("onQuit");
props.add("onReply");
props.add("onTopic");
props.add("password");
props.add("persistent");
props.add("port");
props.add("realname");
props.add("sslContextParameters");
props.add("trustManager");
props.add("username");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(3);
secretProps.add("nickPassword");
secretProps.add("password");
secretProps.add("username");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "irc".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "hostname", null, true, copy);
uri = buildPathParameter(syntax, uri, "port", null, false, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| IrcEndpointUriFactory |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/std/ContainerDeserializerBase.java | {
"start": 723,
"end": 5952
} | class ____<T>
extends StdDeserializer<T>
implements ValueInstantiator.Gettable // since 2.9
{
protected final JavaType _containerType;
/**
* Handler we need for dealing with nulls.
*/
protected final NullValueProvider _nullProvider;
/**
* Marker flag set if the <code>_nullProvider</code> indicates that all null
* content values should be skipped (instead of being possibly converted).
*/
protected final boolean _skipNullValues;
/**
* Specific override for this instance (from proper, or global per-type overrides)
* to indicate whether single value may be taken to mean an unwrapped one-element array
* or not. If null, left to global defaults.
*/
protected final Boolean _unwrapSingle;
protected ContainerDeserializerBase(JavaType selfType,
NullValueProvider nuller, Boolean unwrapSingle) {
super(selfType);
_containerType = selfType;
_unwrapSingle = unwrapSingle;
_nullProvider = nuller;
_skipNullValues = NullsConstantProvider.isSkipper(nuller);
}
protected ContainerDeserializerBase(JavaType selfType) {
this(selfType, null, null);
}
protected ContainerDeserializerBase(ContainerDeserializerBase<?> base) {
this(base, base._nullProvider, base._unwrapSingle);
}
protected ContainerDeserializerBase(ContainerDeserializerBase<?> base,
NullValueProvider nuller, Boolean unwrapSingle) {
super(base._containerType);
_containerType = base._containerType;
_nullProvider = nuller;
_unwrapSingle = unwrapSingle;
_skipNullValues = NullsConstantProvider.isSkipper(nuller);
}
/*
/**********************************************************************
/* Overrides
/**********************************************************************
*/
@Override
public JavaType getValueType() { return _containerType; }
@Override
public Boolean supportsUpdate(DeserializationConfig config) {
return Boolean.TRUE;
}
@Override
public SettableBeanProperty findBackReference(String refName) {
ValueDeserializer<Object> valueDeser = getContentDeserializer();
if (valueDeser == null) {
throw new IllegalArgumentException(String.format(
"Cannot handle managed/back reference '%s': type: container deserializer of type %s returned null for 'getContentDeserializer()'",
refName, getClass().getName()));
}
return valueDeser.findBackReference(refName);
}
/*
/**********************************************************************
/* Extended API
/**********************************************************************
*/
/**
* Accessor for declared type of contained value elements; either exact
* type, or one of its supertypes.
*/
public JavaType getContentType() {
if (_containerType == null) {
return TypeFactory.unknownType(); // should never occur but...
}
return _containerType.getContentType();
}
/**
* Accesor for deserializer use for deserializing content values.
*/
public abstract ValueDeserializer<Object> getContentDeserializer();
@Override
public AccessPattern getEmptyAccessPattern() {
// 02-Feb-2017, tatu: Empty containers are usually constructed as needed
// and may not be shared; for some deserializers this may be further refined.
return AccessPattern.DYNAMIC;
}
@Override
public Object getEmptyValue(DeserializationContext ctxt) throws JacksonException {
ValueInstantiator vi = getValueInstantiator();
if (vi == null || !vi.canCreateUsingDefault()) {
JavaType type = getValueType();
ctxt.reportBadDefinition(type,
String.format("Cannot create empty instance of %s, no default Creator", type));
}
return vi.createUsingDefault(ctxt);
}
/*
/**********************************************************************
/* Shared methods for sub-classes
/**********************************************************************
*/
/**
* Helper method called by various Map(-like) deserializers when encountering
* a processing problem (whether from underlying parser, i/o, or something else).
*/
protected <BOGUS> BOGUS wrapAndThrow(DeserializationContext ctxt,
Throwable t, Object ref, String key) throws JacksonException
{
// to handle StackOverflow:
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
// Errors to be passed as is
ClassUtil.throwIfError(t);
// 25-Feb-2021, tatu: as per [databind#3068] need to obey WRAP_EXCEPTIONS setting
if (!ctxt.isEnabled(DeserializationFeature.WRAP_EXCEPTIONS)) {
ClassUtil.throwIfRTE(t);
}
// for [databind#1141]
throw DatabindException.wrapWithPath(ctxt, t,
new JacksonException.Reference(ref, ClassUtil.nonNull(key, "N/A")));
}
}
| ContainerDeserializerBase |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java | {
"start": 1149,
"end": 3437
} | class ____ extends MockScriptPlugin {
public static final Map<ScriptContext<?>, MockScriptEngine.ContextCompiler> CONTEXT_COMPILERS;
static {
CONTEXT_COMPILERS = Map.of(
WatcherConditionScript.CONTEXT,
(script, options) -> (WatcherConditionScript.Factory) (params, watcherContext) -> new WatcherConditionScript(
params,
watcherContext
) {
@Override
public boolean execute() {
Map<String, Object> vars = new HashMap<>();
vars.put("params", getParams());
vars.put("ctx", getCtx());
return (boolean) script.apply(vars);
}
},
WatcherTransformScript.CONTEXT,
(script, options) -> (WatcherTransformScript.Factory) (params, watcherContext, payload) -> new WatcherTransformScript(
params,
watcherContext,
payload
) {
@Override
public Object execute() {
Map<String, Object> vars = new HashMap<>();
vars.put("params", getParams());
vars.put("ctx", getCtx());
return script.apply(vars);
}
}
);
}
public static final List<ScriptContext<?>> CONTEXTS = List.of(
WatcherConditionScript.CONTEXT,
WatcherTransformScript.CONTEXT,
Watcher.SCRIPT_TEMPLATE_CONTEXT
);
@Override
protected Map<ScriptContext<?>, MockScriptEngine.ContextCompiler> pluginContextCompilers() {
return CONTEXT_COMPILERS;
}
public static ScriptService newMockScriptService(Map<String, Function<Map<String, Object>, Object>> scripts, ProjectId projectId) {
Map<String, ScriptEngine> engines = new HashMap<>();
engines.put(MockScriptEngine.NAME, new MockScriptEngine(MockScriptEngine.NAME, scripts, CONTEXT_COMPILERS));
Map<String, ScriptContext<?>> contexts = CONTEXTS.stream().collect(Collectors.toMap(o -> o.name, Function.identity()));
return new ScriptService(Settings.EMPTY, engines, contexts, () -> 1L, TestProjectResolvers.singleProject(projectId));
}
}
| WatcherMockScriptPlugin |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/completable/CompletableMergeArrayDelayError.java | {
"start": 2067,
"end": 2515
} | class ____ implements Disposable {
final AtomicThrowable errors;
TryTerminateAndReportDisposable(AtomicThrowable errors) {
this.errors = errors;
}
@Override
public void dispose() {
errors.tryTerminateAndReport();
}
@Override
public boolean isDisposed() {
return errors.isTerminated();
}
}
static final | TryTerminateAndReportDisposable |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/support/StandardTypeLocator.java | {
"start": 3961,
"end": 5067
} | class ____ for the type
* @throws EvaluationException if the type cannot be found
*/
@Override
public Class<?> findType(String typeName) throws EvaluationException {
Class<?> cachedType = this.typeCache.get(typeName);
if (cachedType != null) {
return cachedType;
}
Class<?> loadedType = loadType(typeName);
if (loadedType != null) {
if (!(this.classLoader instanceof SmartClassLoader scl && scl.isClassReloadable(loadedType))) {
this.typeCache.put(typeName, loadedType);
}
return loadedType;
}
throw new SpelEvaluationException(SpelMessage.TYPE_NOT_FOUND, typeName);
}
private @Nullable Class<?> loadType(String typeName) {
try {
return ClassUtils.forName(typeName, this.classLoader);
}
catch (ClassNotFoundException ex) {
// try any registered prefixes before giving up
}
for (String prefix : this.importPrefixes) {
try {
String nameToLookup = prefix + '.' + typeName;
return ClassUtils.forName(nameToLookup, this.classLoader);
}
catch (ClassNotFoundException ex) {
// might be a different prefix
}
}
return null;
}
}
| object |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/interceptor/TransactionalClientDataSourceMixedTransactedRedeliveryTest.java | {
"start": 995,
"end": 2437
} | class ____
extends TransactionalClientDataSourceMixedTransactedTest {
@Override
// The API is deprecated, we can remove warnings safely as the tests will disappear when removing this component.
@SuppressWarnings("deprecation")
protected RouteBuilder createRouteBuilder() throws Exception {
return new SpringRouteBuilder() {
public void configure() throws Exception {
// ignore failure if its something with Donkey
onException(IllegalArgumentException.class).onWhen(exceptionMessage().contains("Donkey")).handled(true);
from("direct:okay")
// mark this route as transacted
.errorHandler(transactionErrorHandler().maximumRedeliveries(3))
.setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Elephant in Action")).bean("bookService")
.setBody(constant("Donkey in Action")).bean("bookService");
from("direct:fail")
// and this route is not transacted
.errorHandler(defaultErrorHandler())
.setBody(constant("Tiger in Action")).bean("bookService")
.setBody(constant("Donkey in Action")).bean("bookService");
}
};
}
}
| TransactionalClientDataSourceMixedTransactedRedeliveryTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java | {
"start": 22741,
"end": 28660
} | enum ____ {
/**
* Always try and reconnect before executing a request, waiting for {@link TransportSettings#CONNECT_TIMEOUT} before failing if the
* remote cluster is totally unresponsive.
*/
RECONNECT_IF_DISCONNECTED,
/**
* Fail the request immediately if the remote cluster is disconnected (but also trigger another attempt to reconnect to the remote
* cluster in the background so that the next request might succeed).
*/
FAIL_IF_DISCONNECTED,
/**
* Behave according to the {@link RemoteClusterSettings#REMOTE_CLUSTER_SKIP_UNAVAILABLE} setting for this remote cluster: if this
* setting is {@code false} (the default) then behave like {@link #RECONNECT_IF_DISCONNECTED}, but if it is {@code true} then behave
* like {@link #FAIL_IF_DISCONNECTED}.
*/
RECONNECT_UNLESS_SKIP_UNAVAILABLE
}
/**
* Returns a client to the remote cluster if the given cluster alias exists.
*
* @param clusterAlias the cluster alias the remote cluster is registered under
* @param responseExecutor the executor to use to process the response
* @param disconnectedStrategy how to handle the situation where the remote cluster is disconnected when executing a request
* @throws IllegalArgumentException if the given clusterAlias doesn't exist
*/
public RemoteClusterClient getRemoteClusterClient(
String clusterAlias,
Executor responseExecutor,
DisconnectedStrategy disconnectedStrategy
) {
ensureClientIsEnabled();
if (transportService.getRemoteClusterService().getRegisteredRemoteClusterNames().contains(clusterAlias) == false) {
throw new NoSuchRemoteClusterException(clusterAlias);
}
return new RemoteClusterAwareClient(transportService, clusterAlias, responseExecutor, switch (disconnectedStrategy) {
case RECONNECT_IF_DISCONNECTED -> true;
case FAIL_IF_DISCONNECTED -> false;
case RECONNECT_UNLESS_SKIP_UNAVAILABLE -> transportService.getRemoteClusterService()
.isSkipUnavailable(clusterAlias)
.orElse(true) == false;
});
}
/**
* Verifies this node is configured to support linked project client operations.
* @throws IllegalArgumentException If this node is not configured to support client operations.
*/
public void ensureClientIsEnabled() {
if (isRemoteClusterClient) {
return;
}
if (isStateless == false) {
throw new IllegalArgumentException(
"node [" + getNodeName() + "] does not have the [" + DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE.roleName() + "] role"
);
}
// For stateless the remote cluster client is enabled by default for search nodes,
// REMOTE_CLUSTER_CLIENT_ROLE is not explicitly required.
if (isSearchNode == false) {
throw new IllegalArgumentException(
"node ["
+ getNodeName()
+ "] must have the ["
+ DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE.roleName()
+ "] role or the ["
+ DiscoveryNodeRole.SEARCH_ROLE.roleName()
+ "] role in stateless environments to use linked project client features"
);
}
}
static void registerRemoteClusterHandshakeRequestHandler(TransportService transportService) {
transportService.registerRequestHandler(
REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME,
EsExecutors.DIRECT_EXECUTOR_SERVICE,
false,
false,
TransportService.HandshakeRequest::new,
(request, channel, task) -> {
if (false == RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE.equals(channel.getProfileName())) {
throw new IllegalArgumentException(
Strings.format(
"remote cluster handshake action requires channel profile to be [%s], but got [%s]",
RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE,
channel.getProfileName()
)
);
}
logger.trace("handling remote cluster handshake request");
channel.sendResponse(
new TransportService.HandshakeResponse(
transportService.getLocalNode().getVersion(),
Build.current().hash(),
transportService.getLocalNode().withTransportAddress(transportService.boundRemoteAccessAddress().publishAddress()),
transportService.clusterName
)
);
}
);
}
/**
* Returns the map of connections for the {@link ProjectId} currently returned by the {@link ProjectResolver}.
*/
private Map<String, RemoteClusterConnection> getConnectionsMapForCurrentProject() {
return getConnectionsMapForProject(projectResolver.getProjectId());
}
/**
* Returns the map of connections for the given {@link ProjectId}.
*/
@FixForMultiProject(description = "Assert ProjectId.DEFAULT should not be used in multi-project environment")
private Map<String, RemoteClusterConnection> getConnectionsMapForProject(ProjectId projectId) {
if (projectResolver.supportsMultipleProjects()) {
return remoteClusters.computeIfAbsent(projectId, unused -> ConcurrentCollections.newConcurrentMap());
}
assert ProjectId.DEFAULT.equals(projectId) : "Only the default project ID should be used when multiple projects are not supported";
return remoteClusters.get(projectId);
}
}
| DisconnectedStrategy |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java | {
"start": 2930,
"end": 3255
} | class ____ extends AmIpFilter {
private Set<String> proxyAddresses = null;
protected Set<String> getProxyAddresses() {
if (proxyAddresses == null) {
proxyAddresses = new HashSet<String>();
}
proxyAddresses.add(proxyHost);
return proxyAddresses;
}
}
private static | TestAmIpFilter |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TimeRetentionPolicyConfigTests.java | {
"start": 635,
"end": 2720
} | class ____ extends AbstractXContentSerializingTestCase<TimeRetentionPolicyConfig> {
public static TimeRetentionPolicyConfig randomTimeRetentionPolicyConfig() {
return new TimeRetentionPolicyConfig(randomAlphaOfLengthBetween(1, 10), new TimeValue(randomLongBetween(60000, 1_000_000_000L)));
}
@Override
protected TimeRetentionPolicyConfig doParseInstance(XContentParser parser) throws IOException {
return TimeRetentionPolicyConfig.fromXContent(parser, false);
}
@Override
protected TimeRetentionPolicyConfig createTestInstance() {
return randomTimeRetentionPolicyConfig();
}
@Override
protected TimeRetentionPolicyConfig mutateInstance(TimeRetentionPolicyConfig instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected Reader<TimeRetentionPolicyConfig> instanceReader() {
return TimeRetentionPolicyConfig::new;
}
public void testValidationMin() {
TimeRetentionPolicyConfig timeRetentionPolicyConfig = new TimeRetentionPolicyConfig(
randomAlphaOfLengthBetween(1, 10),
TimeValue.timeValueSeconds(10)
);
ActionRequestValidationException e = timeRetentionPolicyConfig.validate(null);
assertNotNull(e);
assertEquals(1, e.validationErrors().size());
assertEquals("retention_policy.time.max_age must be greater than 60s, found [10s]", e.validationErrors().get(0));
}
public void testValidationMax() {
TimeRetentionPolicyConfig timeRetentionPolicyConfig = new TimeRetentionPolicyConfig(
randomAlphaOfLengthBetween(1, 10),
TimeValue.parseTimeValue("600000000000d", "time value")
);
ActionRequestValidationException e = timeRetentionPolicyConfig.validate(null);
assertNotNull(e);
assertEquals(1, e.validationErrors().size());
assertEquals("retention_policy.time.max_age must not be greater than [106751.9d]", e.validationErrors().get(0));
}
}
| TimeRetentionPolicyConfigTests |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ClassPathBeanDefinitionScannerTests.java | {
"start": 31801,
"end": 31856
} | class ____ {
}
private static final | NonStaticInnerClass |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemPermission.java | {
"start": 1593,
"end": 4187
} | class ____ extends AbstractAbfsIntegrationTest{
private static Path testRoot = new Path("/test");
private static final String DEFAULT_UMASK_VALUE = "027";
private static final FsPermission DEFAULT_UMASK_PERMISSION = new FsPermission(DEFAULT_UMASK_VALUE);
private static final int KILOBYTE = 1024;
private FsPermission permission;
private Path path;
public ITestAzureBlobFileSystemPermission(FsPermission testPermission) throws Exception {
super();
permission = testPermission;
}
public static Collection abfsCreateNonRecursiveTestData()
throws Exception {
/*
Test Data
File/Folder name, User permission, Group permission, Other Permission,
Parent already exist
shouldCreateSucceed, expectedExceptionIfFileCreateFails
*/
final Collection<Object[]> datas = new ArrayList<>();
for (FsAction g : FsAction.values()) {
for (FsAction o : FsAction.values()) {
datas.add(new Object[] {new FsPermission(FsAction.ALL, g, o)});
}
}
return datas;
}
@Test
public void testFilePermission() throws Exception {
final AzureBlobFileSystem fs = this.getFileSystem();
assumeThat(getIsNamespaceEnabled(fs)).isTrue();
assumeThat(getIngressServiceType()).isEqualTo(AbfsServiceType.DFS);
fs.getConf().set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, DEFAULT_UMASK_VALUE);
path = new Path(testRoot, UUID.randomUUID().toString());
fs.mkdirs(path.getParent(),
new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
fs.removeDefaultAcl(path.getParent());
fs.create(path, permission, true, KILOBYTE, (short) 1, KILOBYTE - 1,
null).close();
FileStatus status = fs.getFileStatus(path);
Assertions.assertEquals(permission.applyUMask(DEFAULT_UMASK_PERMISSION), status.getPermission());
}
@Test
public void testFolderPermission() throws Exception {
final AzureBlobFileSystem fs = this.getFileSystem();
assumeThat(getIsNamespaceEnabled(fs)).isTrue();
assumeThat(getIngressServiceType()).isEqualTo(AbfsServiceType.DFS);
fs.getConf().set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "027");
path = new Path(testRoot, UUID.randomUUID().toString());
fs.mkdirs(path.getParent(),
new FsPermission(FsAction.ALL, FsAction.WRITE, FsAction.NONE));
fs.removeDefaultAcl(path.getParent());
fs.mkdirs(path, permission);
FileStatus status = fs.getFileStatus(path);
Assertions.assertEquals(permission.applyUMask(DEFAULT_UMASK_PERMISSION), status.getPermission());
}
}
| ITestAzureBlobFileSystemPermission |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationWithFactoryBeanAndParametersTests.java | {
"start": 1775,
"end": 2039
} | class ____ implements FactoryBean<Foo> {
@Override
public Foo getObject() {
return new Foo();
}
@Override
public Class<Foo> getObjectType() {
return Foo.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
}
| FooFactoryBean |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/coldctr/ColdDataCgCtrService.java | {
"start": 1786,
"end": 10941
} | class ____ extends ServiceThread {
private static final Logger log = LoggerFactory.getLogger(LoggerName.ROCKETMQ_COLDCTR_LOGGER_NAME);
private final SystemClock systemClock = new SystemClock();
private final long cgColdAccResideTimeoutMills = 60 * 1000;
private static final AtomicLong GLOBAL_ACC = new AtomicLong(0L);
private static final String ADAPTIVE = "||adaptive";
/**
* as soon as the consumerGroup read the cold data then it will be put into @code cgColdThresholdMapRuntime,
* and it also will be removed when does not read cold data in @code cgColdAccResideTimeoutMills later;
*/
private final ConcurrentHashMap<String, AccAndTimeStamp> cgColdThresholdMapRuntime = new ConcurrentHashMap<>();
/**
* if the system admin wants to set the special cold read threshold for some consumerGroup, the configuration will
* be putted into @code cgColdThresholdMapConfig
*/
private final ConcurrentHashMap<String, Long> cgColdThresholdMapConfig = new ConcurrentHashMap<>();
private final BrokerConfig brokerConfig;
private final MessageStoreConfig messageStoreConfig;
private final ColdCtrStrategy coldCtrStrategy;
public ColdDataCgCtrService(BrokerController brokerController) {
this.brokerConfig = brokerController.getBrokerConfig();
this.messageStoreConfig = brokerController.getMessageStoreConfig();
this.coldCtrStrategy = brokerConfig.isUsePIDColdCtrStrategy() ? new PIDAdaptiveColdCtrStrategy(this, (long)(brokerConfig.getGlobalColdReadThreshold() * 0.8)) : new SimpleColdCtrStrategy(this);
}
@Override
public String getServiceName() {
return ColdDataCgCtrService.class.getSimpleName();
}
@Override
public void run() {
log.info("{} service started", this.getServiceName());
while (!this.isStopped()) {
try {
if (messageStoreConfig.isColdDataFlowControlEnable()) {
this.waitForRunning(5 * 1000);
} else {
this.waitForRunning(180 * 1000);
}
long beginLockTimestamp = this.systemClock.now();
clearDataAcc();
if (!brokerConfig.isColdCtrStrategyEnable()) {
clearAdaptiveConfig();
}
long costTime = this.systemClock.now() - beginLockTimestamp;
log.info("[{}] clearTheDataAcc-cost {} ms.", costTime > 3 * 1000 ? "NOTIFYME" : "OK", costTime);
} catch (Throwable e) {
log.warn(this.getServiceName() + " service has exception", e);
}
}
log.info("{} service end", this.getServiceName());
}
public String getColdDataFlowCtrInfo() {
JSONObject result = new JSONObject();
result.put("runtimeTable", this.cgColdThresholdMapRuntime);
result.put("configTable", this.cgColdThresholdMapConfig);
result.put("cgColdReadThreshold", this.brokerConfig.getCgColdReadThreshold());
result.put("globalColdReadThreshold", this.brokerConfig.getGlobalColdReadThreshold());
result.put("globalAcc", GLOBAL_ACC.get());
return result.toJSONString();
}
/**
* clear the long time no cold read cg in the table;
* update the acc to zero for the cg in the table;
* use the strategy to promote or decelerate the cg;
*/
private void clearDataAcc() {
log.info("clearDataAcc cgColdThresholdMapRuntime key size: {}", cgColdThresholdMapRuntime.size());
if (brokerConfig.isColdCtrStrategyEnable()) {
coldCtrStrategy.collect(GLOBAL_ACC.get());
}
Iterator<Entry<String, AccAndTimeStamp>> iterator = cgColdThresholdMapRuntime.entrySet().iterator();
while (iterator.hasNext()) {
Entry<String, AccAndTimeStamp> next = iterator.next();
if (System.currentTimeMillis() >= cgColdAccResideTimeoutMills + next.getValue().getLastColdReadTimeMills()) {
if (brokerConfig.isColdCtrStrategyEnable()) {
cgColdThresholdMapConfig.remove(buildAdaptiveKey(next.getKey()));
}
iterator.remove();
} else if (next.getValue().getColdAcc().get() >= getThresholdByConsumerGroup(next.getKey())) {
log.info("Coldctr consumerGroup: {}, acc: {}, threshold: {}", next.getKey(), next.getValue().getColdAcc().get(), getThresholdByConsumerGroup(next.getKey()));
if (brokerConfig.isColdCtrStrategyEnable() && !isGlobalColdCtr() && !isAdminConfig(next.getKey())) {
coldCtrStrategy.promote(buildAdaptiveKey(next.getKey()), getThresholdByConsumerGroup(next.getKey()));
}
}
next.getValue().getColdAcc().set(0L);
}
if (isGlobalColdCtr()) {
log.info("Coldctr global acc: {}, threshold: {}", GLOBAL_ACC.get(), this.brokerConfig.getGlobalColdReadThreshold());
}
if (brokerConfig.isColdCtrStrategyEnable()) {
sortAndDecelerate();
}
GLOBAL_ACC.set(0L);
}
private void sortAndDecelerate() {
List<Entry<String, Long>> configMapList = new ArrayList<Entry<String, Long>>(cgColdThresholdMapConfig.entrySet());
configMapList.sort(new Comparator<Entry<String, Long>>() {
@Override
public int compare(Entry<String, Long> o1, Entry<String, Long> o2) {
return (int)(o2.getValue() - o1.getValue());
}
});
Iterator<Entry<String, Long>> iterator = configMapList.iterator();
int maxDecelerate = 3;
while (iterator.hasNext() && maxDecelerate > 0) {
Entry<String, Long> next = iterator.next();
if (!isAdminConfig(next.getKey())) {
coldCtrStrategy.decelerate(next.getKey(), getThresholdByConsumerGroup(next.getKey()));
maxDecelerate --;
}
}
}
public void coldAcc(String consumerGroup, long coldDataToAcc) {
if (coldDataToAcc <= 0) {
return;
}
GLOBAL_ACC.addAndGet(coldDataToAcc);
AccAndTimeStamp atomicAcc = cgColdThresholdMapRuntime.get(consumerGroup);
if (null == atomicAcc) {
atomicAcc = new AccAndTimeStamp(new AtomicLong(coldDataToAcc));
atomicAcc = cgColdThresholdMapRuntime.putIfAbsent(consumerGroup, atomicAcc);
}
if (null != atomicAcc) {
atomicAcc.getColdAcc().addAndGet(coldDataToAcc);
atomicAcc.setLastColdReadTimeMills(System.currentTimeMillis());
}
}
public void addOrUpdateGroupConfig(String consumerGroup, Long threshold) {
cgColdThresholdMapConfig.put(consumerGroup, threshold);
}
public void removeGroupConfig(String consumerGroup) {
cgColdThresholdMapConfig.remove(consumerGroup);
}
public boolean isCgNeedColdDataFlowCtr(String consumerGroup) {
if (!this.messageStoreConfig.isColdDataFlowControlEnable()) {
return false;
}
if (MixAll.isSysConsumerGroupPullMessage(consumerGroup)) {
return false;
}
AccAndTimeStamp accAndTimeStamp = cgColdThresholdMapRuntime.get(consumerGroup);
if (null == accAndTimeStamp) {
return false;
}
Long threshold = getThresholdByConsumerGroup(consumerGroup);
if (accAndTimeStamp.getColdAcc().get() >= threshold) {
return true;
}
return GLOBAL_ACC.get() >= this.brokerConfig.getGlobalColdReadThreshold();
}
public boolean isGlobalColdCtr() {
return GLOBAL_ACC.get() > this.brokerConfig.getGlobalColdReadThreshold();
}
public BrokerConfig getBrokerConfig() {
return brokerConfig;
}
private Long getThresholdByConsumerGroup(String consumerGroup) {
if (isAdminConfig(consumerGroup)) {
if (consumerGroup.endsWith(ADAPTIVE)) {
return cgColdThresholdMapConfig.get(consumerGroup.split(ADAPTIVE)[0]);
}
return cgColdThresholdMapConfig.get(consumerGroup);
}
Long threshold = null;
if (brokerConfig.isColdCtrStrategyEnable()) {
if (consumerGroup.endsWith(ADAPTIVE)) {
threshold = cgColdThresholdMapConfig.get(consumerGroup);
} else {
threshold = cgColdThresholdMapConfig.get(buildAdaptiveKey(consumerGroup));
}
}
if (null == threshold) {
threshold = this.brokerConfig.getCgColdReadThreshold();
}
return threshold;
}
private String buildAdaptiveKey(String consumerGroup) {
return consumerGroup + ADAPTIVE;
}
private boolean isAdminConfig(String consumerGroup) {
if (consumerGroup.endsWith(ADAPTIVE)) {
consumerGroup = consumerGroup.split(ADAPTIVE)[0];
}
return cgColdThresholdMapConfig.containsKey(consumerGroup);
}
private void clearAdaptiveConfig() {
cgColdThresholdMapConfig.entrySet().removeIf(next -> next.getKey().endsWith(ADAPTIVE));
}
}
| ColdDataCgCtrService |
java | apache__camel | components/camel-infinispan/camel-infinispan/src/test/java/org/apache/camel/component/infinispan/remote/InfinispanRemoteQueryTestSupport.java | {
"start": 937,
"end": 2585
} | class ____ extends InfinispanRemoteTestSupport {
public static final User[] USERS = new User[] {
createUser("nameA", "surnameA"),
createUser("nameA", "surnameB"),
createUser("nameB", "surnameB") };
public static final User[] CQ_USERS = new User[] {
createUser("CQ01", "surname01"),
createUser("CQ02", "surname01"),
createUser("NQ03", "surname03"),
createUser("NQ04", "surname04")
};
public static String createKey(User user) {
return String.format("%s+%s", user.getName(), user.getSurname());
}
public static User createUser(String name, String surname) {
User user = new User();
user.setName(name);
user.setSurname(surname);
return user;
}
public static boolean eq(String str1, String str2) {
if (str1 == null) {
return str2 == null;
} else {
return str1.equals(str2);
}
}
public static boolean eq(User user, String name, String surname) {
if (user == null) {
return false;
}
if (!eq(user.getName(), name)) {
return false;
}
if (!eq(user.getSurname(), surname)) {
return false;
}
return true;
}
public static boolean hasUser(List<User> users, String name, String surname) {
if (users == null) {
return false;
}
for (User user : users) {
if (eq(user, name, surname)) {
return true;
}
}
return false;
}
}
| InfinispanRemoteQueryTestSupport |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/config/ConfigPropertiesResource.java | {
"start": 1473,
"end": 1583
} | enum ____ {
ONE,
TWO;
}
@ConfigMapping(prefix = "configproperties")
public | NumberEnum |
java | google__truth | core/src/main/java/com/google/common/truth/Correspondence.java | {
"start": 27721,
"end": 28753
} | class ____ which the {@code compare} method was called. When
* reporting failures, stack traces will be truncated above elements in this class.
* @param exception The exception encountered
* @param actual The {@code actual} argument to the {@code compare} call during which the
* exception was encountered
* @param expected The {@code expected} argument to the {@code compare} call during which the
* exception was encountered
*/
void addCompareException(
Class<?> callingClass,
Exception exception,
@Nullable Object actual,
@Nullable Object expected) {
if (firstCompareException == null) {
truncateStackTrace(exception, callingClass);
firstCompareException =
StoredException.create(exception, "compare", asList(actual, expected));
}
}
/**
* Adds an exception that was thrown during an {@code apply} call on the function used to key
* actual elements.
*
* @param callingClass The | from |
java | apache__dubbo | dubbo-plugin/dubbo-auth/src/main/java/org/apache/dubbo/auth/AccessKeyAuthenticator.java | {
"start": 1456,
"end": 4920
} | class ____ implements Authenticator {
private final FrameworkModel frameworkModel;
public AccessKeyAuthenticator(FrameworkModel frameworkModel) {
this.frameworkModel = frameworkModel;
}
@Override
public void sign(Invocation invocation, URL url) {
String currentTime = String.valueOf(System.currentTimeMillis());
AccessKeyPair accessKeyPair = getAccessKeyPair(invocation, url);
invocation.setAttachment(
Constants.REQUEST_SIGNATURE_KEY,
getSignature(url, invocation, accessKeyPair.getSecretKey(), currentTime));
invocation.setAttachment(Constants.REQUEST_TIMESTAMP_KEY, currentTime);
invocation.setAttachment(Constants.AK_KEY, accessKeyPair.getAccessKey());
invocation.setAttachment(CommonConstants.CONSUMER, url.getApplication());
}
@Override
public void authenticate(Invocation invocation, URL url) throws RpcAuthenticationException {
String accessKeyId = String.valueOf(invocation.getAttachment(Constants.AK_KEY));
String requestTimestamp = String.valueOf(invocation.getAttachment(Constants.REQUEST_TIMESTAMP_KEY));
String originSignature = String.valueOf(invocation.getAttachment(Constants.REQUEST_SIGNATURE_KEY));
String consumer = String.valueOf(invocation.getAttachment(CommonConstants.CONSUMER));
if (StringUtils.isAnyEmpty(accessKeyId, consumer, requestTimestamp, originSignature)) {
throw new RpcAuthenticationException("Failed to authenticate, maybe consumer side did not enable the auth");
}
AccessKeyPair accessKeyPair;
try {
accessKeyPair = getAccessKeyPair(invocation, url);
} catch (Exception e) {
throw new RpcAuthenticationException("Failed to authenticate , can't load the accessKeyPair");
}
String computeSignature = getSignature(url, invocation, accessKeyPair.getSecretKey(), requestTimestamp);
boolean success = computeSignature.equals(originSignature);
if (!success) {
throw new RpcAuthenticationException("Failed to authenticate, signature is not correct");
}
}
AccessKeyPair getAccessKeyPair(Invocation invocation, URL url) {
AccessKeyStorage accessKeyStorage = frameworkModel
.getExtensionLoader(AccessKeyStorage.class)
.getExtension(url.getParameter(Constants.ACCESS_KEY_STORAGE_KEY, Constants.DEFAULT_ACCESS_KEY_STORAGE));
AccessKeyPair accessKeyPair;
try {
accessKeyPair = accessKeyStorage.getAccessKey(url, invocation);
if (accessKeyPair == null
|| StringUtils.isAnyEmpty(accessKeyPair.getAccessKey(), accessKeyPair.getSecretKey())) {
throw new AccessKeyNotFoundException("AccessKeyId or secretAccessKey not found");
}
} catch (Exception e) {
throw new RuntimeException("Can't load the AccessKeyPair from accessKeyStorage");
}
return accessKeyPair;
}
String getSignature(URL url, Invocation invocation, String secretKey, String time) {
String requestString = String.format(
Constants.SIGNATURE_STRING_FORMAT,
url.getColonSeparatedKey(),
RpcUtils.getMethodName(invocation),
secretKey,
time);
return SignatureUtils.sign(requestString, secretKey);
}
}
| AccessKeyAuthenticator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/LocaleFieldTest.java | {
"start": 1219,
"end": 1271
} | class ____ {
public Locale locale;
}
}
| Model |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/xml/XmlConfigurationFactory.java | {
"start": 1544,
"end": 2897
} | class ____ extends ConfigurationFactory {
public static final String FILE_EXTENSION = ".xml";
private static final org.apache.logging.log4j.Logger LOGGER = StatusLogger.getLogger();
/**
* File name prefix for test configurations.
*/
protected static final String TEST_PREFIX = "log4j-test";
/**
* File name prefix for standard configurations.
*/
protected static final String DEFAULT_PREFIX = "log4j";
@Override
protected String[] getSupportedTypes() {
if (!PropertiesUtil.getProperties()
.getBooleanProperty(ConfigurationFactory.LOG4J1_EXPERIMENTAL, Boolean.FALSE)) {
return null;
}
return new String[] {FILE_EXTENSION};
}
@Override
public Configuration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) {
final int interval = PropertiesUtil.getProperties().getIntegerProperty(Log4j1Configuration.MONITOR_INTERVAL, 0);
return new XmlConfiguration(loggerContext, source, interval);
}
@Override
protected String getTestPrefix() {
return TEST_PREFIX;
}
@Override
protected String getDefaultPrefix() {
return DEFAULT_PREFIX;
}
@Override
protected String getVersion() {
return LOG4J1_VERSION;
}
}
| XmlConfigurationFactory |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/ClientTransportFactory.java | {
"start": 3499,
"end": 5879
} | class ____ {
private ChannelLogger channelLogger;
private String authority = "unknown-authority";
private Attributes eagAttributes = Attributes.EMPTY;
@Nullable private String userAgent;
@Nullable private HttpConnectProxiedSocketAddress connectProxiedSocketAddr;
public ChannelLogger getChannelLogger() {
return channelLogger;
}
public ClientTransportOptions setChannelLogger(ChannelLogger channelLogger) {
this.channelLogger = channelLogger;
return this;
}
public String getAuthority() {
return authority;
}
/** Sets the non-null authority. */
public ClientTransportOptions setAuthority(String authority) {
this.authority = Preconditions.checkNotNull(authority, "authority");
return this;
}
public Attributes getEagAttributes() {
return eagAttributes;
}
/** Sets the non-null EquivalentAddressGroup's attributes. */
public ClientTransportOptions setEagAttributes(Attributes eagAttributes) {
Preconditions.checkNotNull(eagAttributes, "eagAttributes");
this.eagAttributes = eagAttributes;
return this;
}
@Nullable
public String getUserAgent() {
return userAgent;
}
public ClientTransportOptions setUserAgent(@Nullable String userAgent) {
this.userAgent = userAgent;
return this;
}
@Nullable
public HttpConnectProxiedSocketAddress getHttpConnectProxiedSocketAddress() {
return connectProxiedSocketAddr;
}
public ClientTransportOptions setHttpConnectProxiedSocketAddress(
@Nullable HttpConnectProxiedSocketAddress connectProxiedSocketAddr) {
this.connectProxiedSocketAddr = connectProxiedSocketAddr;
return this;
}
@Override
public int hashCode() {
return Objects.hashCode(authority, eagAttributes, userAgent, connectProxiedSocketAddr);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ClientTransportOptions)) {
return false;
}
ClientTransportOptions that = (ClientTransportOptions) o;
return this.authority.equals(that.authority)
&& this.eagAttributes.equals(that.eagAttributes)
&& Objects.equal(this.userAgent, that.userAgent)
&& Objects.equal(this.connectProxiedSocketAddr, that.connectProxiedSocketAddr);
}
}
final | ClientTransportOptions |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/LegacySemanticQueryRewriteInterceptor.java | {
"start": 1306,
"end": 7245
} | class ____ implements QueryRewriteInterceptor {
public LegacySemanticQueryRewriteInterceptor() {}
@Override
public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) {
String fieldName = getFieldName(queryBuilder);
ResolvedIndices resolvedIndices = context.getResolvedIndices();
if (resolvedIndices == null) {
// No resolved indices, so return the original query.
return queryBuilder;
}
InferenceIndexInformationForField indexInformation = resolveIndicesForField(fieldName, resolvedIndices);
if (indexInformation.getInferenceIndices().isEmpty()) {
// No inference fields were identified, so return the original query.
return queryBuilder;
} else if (resolvedIndices.getRemoteClusterIndices().isEmpty()) {
if (indexInformation.nonInferenceIndices().isEmpty() == false) {
// Combined case where the field name requested by this query contains both
// semantic_text and non-inference fields, so we have to combine queries per index
// containing each field type.
return buildCombinedInferenceAndNonInferenceQuery(queryBuilder, indexInformation);
} else {
// The only fields we've identified are inference fields (e.g. semantic_text),
// so rewrite the entire query to work on a semantic_text field.
return buildInferenceQuery(queryBuilder, indexInformation);
}
} else {
throw new IllegalArgumentException(
getQueryName()
+ " query does not support cross-cluster search when querying a ["
+ SemanticTextFieldMapper.CONTENT_TYPE
+ "] field in a mixed-version cluster. Please update all nodes to at least Elasticsearch "
+ SEMANTIC_SEARCH_CCS_SUPPORT.toReleaseVersion()
+ "."
);
}
}
/**
* @param queryBuilder {@link QueryBuilder}
* @return The singular field name requested by the provided query builder.
*/
protected abstract String getFieldName(QueryBuilder queryBuilder);
/**
* @param queryBuilder {@link QueryBuilder}
* @return The text/query string requested by the provided query builder.
*/
protected abstract String getQuery(QueryBuilder queryBuilder);
/**
* Builds the inference query
*
* @param queryBuilder {@link QueryBuilder}
* @param indexInformation {@link InferenceIndexInformationForField}
* @return {@link QueryBuilder}
*/
protected abstract QueryBuilder buildInferenceQuery(QueryBuilder queryBuilder, InferenceIndexInformationForField indexInformation);
/**
* Builds a combined inference and non-inference query,
* which separates the different queries into appropriate indices based on field type.
* @param queryBuilder {@link QueryBuilder}
* @param indexInformation {@link InferenceIndexInformationForField}
* @return {@link QueryBuilder}
*/
protected abstract QueryBuilder buildCombinedInferenceAndNonInferenceQuery(
QueryBuilder queryBuilder,
InferenceIndexInformationForField indexInformation
);
private InferenceIndexInformationForField resolveIndicesForField(String fieldName, ResolvedIndices resolvedIndices) {
Collection<IndexMetadata> indexMetadataCollection = resolvedIndices.getConcreteLocalIndicesMetadata().values();
Map<String, InferenceFieldMetadata> inferenceIndicesMetadata = new HashMap<>();
List<String> nonInferenceIndices = new ArrayList<>();
for (IndexMetadata indexMetadata : indexMetadataCollection) {
String indexName = indexMetadata.getIndex().getName();
InferenceFieldMetadata inferenceFieldMetadata = indexMetadata.getInferenceFields().get(fieldName);
if (inferenceFieldMetadata != null) {
inferenceIndicesMetadata.put(indexName, inferenceFieldMetadata);
} else {
nonInferenceIndices.add(indexName);
}
}
return new InferenceIndexInformationForField(fieldName, inferenceIndicesMetadata, nonInferenceIndices);
}
protected QueryBuilder createSubQueryForIndices(Collection<String> indices, QueryBuilder queryBuilder) {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(queryBuilder);
boolQueryBuilder.filter(new TermsQueryBuilder(IndexFieldMapper.NAME, indices));
return boolQueryBuilder;
}
protected QueryBuilder createSemanticSubQuery(Collection<String> indices, String fieldName, String value) {
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(new SemanticQueryBuilder(fieldName, value, true));
boolQueryBuilder.filter(new TermsQueryBuilder(IndexFieldMapper.NAME, indices));
return boolQueryBuilder;
}
/**
* Represents the indices and associated inference information for a field.
*/
public record InferenceIndexInformationForField(
String fieldName,
Map<String, InferenceFieldMetadata> inferenceIndicesMetadata,
List<String> nonInferenceIndices
) {
public Collection<String> getInferenceIndices() {
return inferenceIndicesMetadata.keySet();
}
public Map<String, List<String>> getInferenceIdsIndices() {
return inferenceIndicesMetadata.entrySet()
.stream()
.collect(
Collectors.groupingBy(
entry -> entry.getValue().getSearchInferenceId(),
Collectors.mapping(Map.Entry::getKey, Collectors.toList())
)
);
}
}
}
| LegacySemanticQueryRewriteInterceptor |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingWithPanacheProcessor.java | {
"start": 697,
"end": 2285
} | class ____ {
private static final DotName QUARKUS_LOG_DOTNAME = DotName.createSimple("io.quarkus.logging.Log");
private static final String QUARKUS_LOG_BINARY_NAME = "io/quarkus/logging/Log";
private static final String SYNTHETIC_LOGGER_FIELD_NAME = "quarkusSyntheticLogger";
private static final String JBOSS_LOGGER_BINARY_NAME = "org/jboss/logging/Logger";
private static final String JBOSS_LOGGER_DESCRIPTOR = "L" + JBOSS_LOGGER_BINARY_NAME + ";";
private static final String GET_LOGGER_DESCRIPTOR = "(Ljava/lang/String;)" + JBOSS_LOGGER_DESCRIPTOR;
private static final String LAMBDA_METAFACTORY = "java/lang/invoke/LambdaMetafactory";
@BuildStep
public void process(CombinedIndexBuildItem index, BuildProducer<BytecodeTransformerBuildItem> transformers) {
for (ClassInfo clazz : index.getIndex().getKnownUsers(QUARKUS_LOG_DOTNAME)) {
String className = clazz.name().toString();
transformers.produce(new BytecodeTransformerBuildItem.Builder()
.setClassToTransform(className)
.setVisitorFunction((ignored, visitor) -> new AddLoggerFieldAndRewriteInvocations(visitor, className))
.setClassReaderOptions(ClassReader.EXPAND_FRAMES)
.setPriority(1000)
.build());
}
}
/**
* Makes the following modifications to the visited class:
* <ul>
* <li>adds a {@code private static final} field of type {@code org.jboss.logging.Logger}
* ({@code public} in case the | LoggingWithPanacheProcessor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ThrowSpecificExceptionsTest.java | {
"start": 2050,
"end": 2344
} | class ____ {
void test() {
throw new VerifyException();
}
}
""")
.doTest();
}
@Test
public void anonymousClass() {
helper
.addSourceLines(
"Test.java",
"""
public | Test |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvtVO/DataTransaction2.java | {
"start": 2684,
"end": 2837
} | class ____ {
private Limit limit = new Limit();
private Map<String, String> form = new HashMap<String, String>();
| Param |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddable/nested/property/Address.java | {
"start": 252,
"end": 1072
} | class ____ {
private String street;
private String city;
private Postcode postcode;
public String getStreet() {
return street;
}
public String getCity() {
return city;
}
public Postcode getPostcode() {
return postcode;
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj == null || obj.getClass() != this.getClass()) return false;
var that = (Address) obj;
return Objects.equals(this.street, that.street) &&
Objects.equals(this.city, that.city) &&
Objects.equals(this.postcode, that.postcode);
}
@Override
public int hashCode() {
return Objects.hash(street, city, postcode);
}
@Override
public String toString() {
return "Address[" +
"street=" + street + ", " +
"city=" + city + ", " +
"postcode=" + postcode + ']';
}
}
| Address |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/RestResponse.java | {
"start": 35262,
"end": 42243
} | class ____<T> {
/**
* Protected constructor, use one of the static methods of
* {@code RestResponse} to obtain an instance.
*/
protected ResponseBuilder() {
}
/**
* Create a new builder instance.
*
* @return a new response builder.
*/
protected static <T> ResponseBuilder<T> newInstance() {
return ((org.jboss.resteasy.reactive.common.jaxrs.RuntimeDelegateImpl) RuntimeDelegate.getInstance())
.createRestResponseBuilder();
}
/**
* Create a RestResponse instance from the current ResponseBuilder. The builder
* is reset to a blank state equivalent to calling the ok method.
*
* @return a RestResponse instance.
*/
public abstract RestResponse<T> build();
/**
* {@inheritDoc}
* <p>
* Create a copy of the ResponseBuilder preserving its state.
* </p>
*
* @return a copy of the ResponseBuilder.
*/
@Override
public abstract ResponseBuilder<T> clone();
/**
* Set the status on the ResponseBuilder.
*
* @param status the response status.
* @return the updated response builder.
* @throws IllegalArgumentException if status is less than {@code 100} or greater
* than {@code 599}.
*/
public abstract <Ret extends T> ResponseBuilder<Ret> status(int status);
/**
* Set the status on the ResponseBuilder.
*
* @param status the response status.
* @param reasonPhrase the reason phrase.
* @return the updated response builder.
* @throws IllegalArgumentException if status is less than {@code 100} or greater
* than {@code 599}.
*/
public abstract <Ret extends T> ResponseBuilder<Ret> status(int status, String reasonPhrase);
/**
* Set the status on the ResponseBuilder.
*
* @param status the response status.
* @return the updated response builder.
* @throws IllegalArgumentException if status is {@code null}.
*/
public <Ret extends T> ResponseBuilder<Ret> status(StatusType status) {
if (status == null) {
throw new IllegalArgumentException();
}
return status(status.getStatusCode(), status.getReasonPhrase());
}
/**
* Set the status on the ResponseBuilder.
*
* @param status the response status.
* @return the updated response builder.
* @throws IllegalArgumentException if status is {@code null}.
*/
public ResponseBuilder<T> status(Status status) {
return status((StatusType) status);
}
/**
* Set the response entity in the builder.
* <p />
* Any Java type instance for a response entity, that is supported by the
* runtime can be passed. It is the callers responsibility to wrap the
* actual entity with {@link GenericEntity} if preservation of its generic
* type is required. Note that the entity can be also set as an
* {@link java.io.InputStream input stream}.
* <p />
* A specific entity media type can be set using one of the {@code type(...)}
* methods.
*
* @param entity the request entity.
* @return updated response builder instance.
* @see #entity(java.lang.Object, java.lang.annotation.Annotation[])
* @see #type(jakarta.ws.rs.core.MediaType)
* @see #type(java.lang.String)
*/
public abstract ResponseBuilder<T> entity(T entity);
/**
* Set the response entity in the builder.
* <p />
* Any Java type instance for a response entity, that is supported by the
* runtime can be passed. It is the callers responsibility to wrap the
* actual entity with {@link GenericEntity} if preservation of its generic
* type is required. Note that the entity can be also set as an
* {@link java.io.InputStream input stream}.
* <p />
* A specific entity media type can be set using one of the {@code type(...)}
* methods.
*
* @param entity the request entity.
* @param annotations annotations that will be passed to the {@link MessageBodyWriter},
* (in addition to any annotations declared directly on a resource
* method that returns the built response).
* @return updated response builder instance.
* @see #entity(java.lang.Object)
* @see #type(jakarta.ws.rs.core.MediaType)
* @see #type(java.lang.String)
*/
public abstract ResponseBuilder<T> entity(T entity, Annotation[] annotations);
/**
* Set the list of allowed methods for the resource. Any duplicate method
* names will be truncated to a single entry.
*
* @param methods the methods to be listed as allowed for the resource,
* if {@code null} any existing allowed method list will be removed.
* @return the updated response builder.
*/
public abstract ResponseBuilder<T> allow(String... methods);
/**
* Set the list of allowed methods for the resource.
*
* @param methods the methods to be listed as allowed for the resource,
* if {@code null} any existing allowed method list will be removed.
* @return the updated response builder.
*/
public abstract ResponseBuilder<T> allow(Set<String> methods);
/**
* Set the cache control data of the message.
*
* @param cacheControl the cache control directives, if {@code null}
* any existing cache control directives will be removed.
* @return the updated response builder.
*/
public abstract ResponseBuilder<T> cacheControl(CacheControl cacheControl);
/**
* Set the message entity content encoding.
*
* @param encoding the content encoding of the message entity,
* if {@code null} any existing value for content encoding will be
* removed.
* @return the updated response builder.
*/
public abstract ResponseBuilder<T> encoding(String encoding);
/**
* Add an arbitrary header.
*
* @param name the name of the header
* @param value the value of the header, the header will be serialized
* using a {@link jakarta.ws.rs.ext.RuntimeDelegate.HeaderDelegate} if
* one is available via {@link jakarta.ws.rs.ext.RuntimeDelegate#createHeaderDelegate(java.lang.Class)}
* for the | ResponseBuilder |
java | mockito__mockito | mockito-core/src/test/java/org/concurrentmockito/ThreadsShareAMockTest.java | {
"start": 288,
"end": 1171
} | class ____ extends TestBase {
private IMethods mock;
@Test
public void shouldAllowVerifyingInThreads() throws Exception {
for (int i = 0; i < 100; i++) {
performTest();
}
}
private void performTest() throws InterruptedException {
mock = mock(IMethods.class);
final Thread[] listeners = new Thread[3];
for (int i = 0; i < listeners.length; i++) {
listeners[i] =
new Thread() {
@Override
public void run() {
mock.simpleMethod("foo");
}
};
listeners[i].start();
}
for (Thread listener : listeners) {
listener.join();
}
verify(mock, times(listeners.length)).simpleMethod("foo");
}
}
| ThreadsShareAMockTest |
java | google__guava | guava-testlib/test/com/google/common/testing/FakeTickerTest.java | {
"start": 1658,
"end": 6454
} | class ____ extends TestCase {
@GwtIncompatible // NullPointerTester
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicInstanceMethods(new FakeTicker());
}
@GwtIncompatible // java.time.Duration
public void testAdvance() {
FakeTicker ticker = new FakeTicker();
assertEquals(0, ticker.read());
assertSame(ticker, ticker.advance(10));
assertEquals(10, ticker.read());
ticker.advance(1, MILLISECONDS);
assertEquals(1000010L, ticker.read());
ticker.advance(Duration.ofMillis(1));
assertEquals(2000010L, ticker.read());
}
public void testAutoIncrementStep_returnsSameInstance() {
FakeTicker ticker = new FakeTicker();
assertSame(ticker, ticker.setAutoIncrementStep(10, NANOSECONDS));
}
public void testAutoIncrementStep_nanos() {
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(10, NANOSECONDS);
assertEquals(0, ticker.read());
assertEquals(10, ticker.read());
assertEquals(20, ticker.read());
}
public void testAutoIncrementStep_millis() {
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(1, MILLISECONDS);
assertEquals(0, ticker.read());
assertEquals(1000000, ticker.read());
assertEquals(2000000, ticker.read());
}
public void testAutoIncrementStep_seconds() {
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(3, SECONDS);
assertEquals(0, ticker.read());
assertEquals(3000000000L, ticker.read());
assertEquals(6000000000L, ticker.read());
}
@GwtIncompatible // java.time.Duration
public void testAutoIncrementStep_duration() {
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(Duration.ofMillis(1));
assertEquals(0, ticker.read());
assertEquals(1000000, ticker.read());
assertEquals(2000000, ticker.read());
}
public void testAutoIncrementStep_resetToZero() {
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(10, NANOSECONDS);
assertEquals(0, ticker.read());
assertEquals(10, ticker.read());
assertEquals(20, ticker.read());
for (TimeUnit timeUnit : TimeUnit.values()) {
ticker.setAutoIncrementStep(0, timeUnit);
assertEquals(
"Expected no auto-increment when setting autoIncrementStep to 0 " + timeUnit,
30,
ticker.read());
}
}
public void testAutoIncrement_negative() {
FakeTicker ticker = new FakeTicker();
assertThrows(
IllegalArgumentException.class, () -> ticker.setAutoIncrementStep(-1, NANOSECONDS));
}
@GwtIncompatible // concurrency
public void testConcurrentAdvance() throws Exception {
FakeTicker ticker = new FakeTicker();
int numberOfThreads = 64;
runConcurrentTest(
numberOfThreads,
new Callable<@Nullable Void>() {
@Override
public @Nullable Void call() throws Exception {
// adds two nanoseconds to the ticker
ticker.advance(1L);
Thread.sleep(10);
ticker.advance(1L);
return null;
}
});
assertEquals(numberOfThreads * 2, ticker.read());
}
@GwtIncompatible // concurrency
public void testConcurrentAutoIncrementStep() throws Exception {
int incrementByNanos = 3;
FakeTicker ticker = new FakeTicker().setAutoIncrementStep(incrementByNanos, NANOSECONDS);
int numberOfThreads = 64;
runConcurrentTest(
numberOfThreads,
new Callable<@Nullable Void>() {
@Override
public @Nullable Void call() throws Exception {
long unused = ticker.read();
return null;
}
});
assertEquals(incrementByNanos * numberOfThreads, ticker.read());
}
/** Runs {@code callable} concurrently {@code numberOfThreads} times. */
@GwtIncompatible // concurrency
private void runConcurrentTest(int numberOfThreads, Callable<@Nullable Void> callable)
throws Exception {
ExecutorService executorService = newFixedThreadPool(numberOfThreads);
CountDownLatch startLatch = new CountDownLatch(numberOfThreads);
CountDownLatch doneLatch = new CountDownLatch(numberOfThreads);
for (int i = numberOfThreads; i > 0; i--) {
@SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored
Future<?> possiblyIgnoredError =
executorService.submit(
new Callable<@Nullable Void>() {
@Override
public @Nullable Void call() throws Exception {
startLatch.countDown();
startLatch.await();
callable.call();
doneLatch.countDown();
return null;
}
});
}
doneLatch.await();
}
}
| FakeTickerTest |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/lifecycle/StartCommand.java | {
"start": 1447,
"end": 1563
} | interface ____ {
void applyTo(DockerCompose dockerCompose, LogLevel logLevel, List<String> arguments);
}
}
| Command |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerMultiNodes.java | {
"start": 4037,
"end": 4095
} | class ____ Multi Node scheduling related tests.
*/
public | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customproviders/AnotherValidNonBlockingFiltersTest.java | {
"start": 5092,
"end": 5850
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
MultivaluedMap<String, String> headers = requestContext.getHeaders();
String previousFilterHeaderValue = headers.getFirst("filter-request");
headers.putSingle("filter-request", previousFilterHeaderValue + "/5-another-standard-blocking");
String previousThreadHeaderValue = headers.getFirst("thread");
headers.putSingle("thread", previousThreadHeaderValue + "/" + BlockingOperationControl.isBlockingAllowed());
}
}
@Provider
@Priority(Priorities.USER + 50)
@NonBlocking
public static | AnotherStandardBlockingRequestFilter |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/hhh18829/EmployeeWithIdClass.java | {
"start": 307,
"end": 460
} | class ____ extends Address {
@Id
String empName;
@Id
Integer empId;
public record EmployeeId(String empName, Integer empId) {
}
}
| EmployeeWithIdClass |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/multipart/MalformedMultipartInputTest.java | {
"start": 2758,
"end": 2871
} | class ____ {
@RestForm
@PartType("text/myenum")
public MyEnum format;
}
public | Input |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/SessionFactoryImpl.java | {
"start": 7556,
"end": 17838
} | class ____ implements SessionFactoryImplementor {
private final String name;
private final String jndiName;
private final String uuid;
private transient volatile Status status = Status.OPEN;
private final transient SessionFactoryObserverChain observerChain = new SessionFactoryObserverChain();
private final transient SessionFactoryOptions sessionFactoryOptions;
private final transient Map<String,Object> settings;
private final transient SessionFactoryServiceRegistry serviceRegistry;
private final transient EventEngine eventEngine;
private final transient JdbcServices jdbcServices;
private final transient SqlStringGenerationContext sqlStringGenerationContext;
private final transient RuntimeMetamodelsImplementor runtimeMetamodels;
private final PersistenceUnitUtil jpaPersistenceUnitUtil;
private final transient CacheImplementor cacheAccess;
private final transient QueryEngine queryEngine;
private final transient SqlTranslationEngine sqlTranslationEngine;
private final transient TypeConfiguration typeConfiguration;
private final transient CurrentSessionContext currentSessionContext;
private final transient Map<String, FilterDefinition> filters;
private final transient Collection<FilterDefinition> autoEnabledFilters = new ArrayList<>();
private final transient JavaType<Object> tenantIdentifierJavaType;
private final transient EventListenerGroups eventListenerGroups;
private final transient WrapperOptions wrapperOptions;
private final transient SessionBuilderImplementor defaultSessionOpenOptions;
private final transient SessionBuilderImplementor temporarySessionOpenOptions;
private final transient StatelessSessionBuilder defaultStatelessOptions;
private final transient EntityNameResolver entityNameResolver;
private final transient SchemaManager schemaManager;
final transient ClassLoaderService classLoaderService;
final transient TransactionCoordinatorBuilder transactionCoordinatorBuilder;
final transient ConnectionProvider connectionProvider;
final transient MultiTenantConnectionProvider<Object> multiTenantConnectionProvider;
final transient ManagedBeanRegistry managedBeanRegistry;
final transient BatchBuilder batchBuilder;
final transient EventMonitor eventMonitor;
final transient EntityCopyObserverFactory entityCopyObserverFactory;
final transient ParameterMarkerStrategy parameterMarkerStrategy;
final transient JdbcValuesMappingProducerProvider jdbcValuesMappingProducerProvider;
public SessionFactoryImpl(
final MetadataImplementor bootMetamodel,
final SessionFactoryOptions options,
final BootstrapContext bootstrapContext) {
SESSION_FACTORY_LOGGER.buildingSessionFactory();
typeConfiguration = bootstrapContext.getTypeConfiguration();
sessionFactoryOptions = options;
serviceRegistry = getServiceRegistry( options, this );
eventEngine = new EventEngine( bootMetamodel, this );
bootMetamodel.initSessionFactory( this );
name = getSessionFactoryName( options, serviceRegistry );
jndiName = determineJndiName( name, options, serviceRegistry );
uuid = options.getUuid();
jdbcServices = serviceRegistry.requireService( JdbcServices.class );
settings = getMaskedSettings( options, serviceRegistry );
SESSION_FACTORY_LOGGER.instantiatingFactory( uuid, settings );
sqlStringGenerationContext = createSqlStringGenerationContext( bootMetamodel, options, jdbcServices );
cacheAccess = serviceRegistry.getService( CacheImplementor.class );
jpaPersistenceUnitUtil = new PersistenceUnitUtilImpl( this );
for ( var sessionFactoryObserver : options.getSessionFactoryObservers() ) {
observerChain.addObserver( sessionFactoryObserver );
}
filters = new HashMap<>( bootMetamodel.getFilterDefinitions() );
tenantIdentifierJavaType = tenantIdentifierType( options );
for ( var filter : filters.values() ) {
if ( filter.isAutoEnabled() ) {
autoEnabledFilters.add( filter );
}
}
entityNameResolver = new CoordinatingEntityNameResolver( this, getInterceptor() );
schemaManager = new SchemaManagerImpl( this, bootMetamodel );
// used for initializing the MappingMetamodelImpl
classLoaderService = serviceRegistry.requireService( ClassLoaderService.class );
jdbcValuesMappingProducerProvider = serviceRegistry.requireService( JdbcValuesMappingProducerProvider.class );
final var integratorObserver = new IntegratorObserver();
observerChain.addObserver( integratorObserver );
try {
integrate( bootMetamodel, bootstrapContext, integratorObserver );
bootMetamodel.orderColumns( false );
bootMetamodel.validate();
primeSecondLevelCacheRegions( bootMetamodel );
// create the empty runtime metamodels object
final var runtimeMetamodelsImpl = new RuntimeMetamodelsImpl( typeConfiguration );
runtimeMetamodels = runtimeMetamodelsImpl;
// we build this before creating the runtime metamodels
// because the SqlAstTranslators (unnecessarily, perhaps)
// use the SqmFunctionRegistry when rendering SQL for Loaders
queryEngine = new QueryEngineImpl( bootMetamodel, options, runtimeMetamodels, serviceRegistry, settings, name );
final Map<String, FetchProfile> fetchProfiles = new HashMap<>();
sqlTranslationEngine = new SqlTranslationEngineImpl( this, typeConfiguration, fetchProfiles );
// now actually create the mapping and JPA metamodels
final var mappingMetamodelImpl = new MappingMetamodelImpl( typeConfiguration, serviceRegistry );
runtimeMetamodelsImpl.setMappingMetamodel( mappingMetamodelImpl );
mappingMetamodelImpl.finishInitialization(
new ModelCreationContext( bootstrapContext, bootMetamodel, mappingMetamodelImpl, typeConfiguration ) );
runtimeMetamodelsImpl.setJpaMetamodel( mappingMetamodelImpl.getJpaMetamodel() );
// this needs to happen after the mapping metamodel is
// completely built, since we need to use the persisters
addFetchProfiles( bootMetamodel, runtimeMetamodelsImpl, fetchProfiles );
defaultSessionOpenOptions = createDefaultSessionOpenOptionsIfPossible();
temporarySessionOpenOptions = defaultSessionOpenOptions == null ? null : buildTemporarySessionOpenOptions();
defaultStatelessOptions = defaultSessionOpenOptions == null ? null : withStatelessOptions();
wrapperOptions = new SessionFactoryBasedWrapperOptions( this );
currentSessionContext = buildCurrentSessionContext();
// cache references to some "hot" services:
transactionCoordinatorBuilder = serviceRegistry.requireService( TransactionCoordinatorBuilder.class );
entityCopyObserverFactory = serviceRegistry.requireService( EntityCopyObserverFactory.class );
parameterMarkerStrategy = serviceRegistry.requireService( ParameterMarkerStrategy.class );
batchBuilder = serviceRegistry.requireService( BatchBuilder.class );
managedBeanRegistry = serviceRegistry.getService( ManagedBeanRegistry.class );
final boolean multiTenancyEnabled = options.isMultiTenancyEnabled();
connectionProvider =
multiTenancyEnabled ? null : serviceRegistry.requireService( ConnectionProvider.class );
multiTenantConnectionProvider =
multiTenancyEnabled ? serviceRegistry.requireService( MultiTenantConnectionProvider.class ) : null;
eventMonitor = loadEventMonitor();
eventListenerGroups = new EventListenerGroups( serviceRegistry );
// re-scope the TypeConfiguration to this SessionFactory,
// now that we are (almost) fully-initialized ... note,
// we could have done this earlier, but then it's hard to
// really know or control who's calling back to us while
// we're in an incompletely-initialized state
typeConfiguration.scope( this );
observerChain.sessionFactoryCreated( this );
}
catch ( Exception e ) {
disintegrate( e, integratorObserver );
try {
close();
}
catch (Exception closeException) {
SESSION_FACTORY_LOGGER.eatingErrorClosingFactoryAfterFailedInstantiation();
}
throw e;
}
SESSION_FACTORY_LOGGER.instantiatedFactory( uuid );
}
private JavaType<Object> tenantIdentifierType(SessionFactoryOptions options) {
final var tenantFilter = filters.get( TenantIdBinder.FILTER_NAME );
if ( tenantFilter == null ) {
return options.getDefaultTenantIdentifierJavaType();
}
else {
final var jdbcMapping = tenantFilter.getParameterJdbcMapping( TenantIdBinder.PARAMETER_NAME );
assert jdbcMapping != null;
//NOTE: this is completely unsound
//noinspection unchecked
return (JavaType<Object>) jdbcMapping.getJavaTypeDescriptor();
}
}
private EventMonitor loadEventMonitor() {
final var eventMonitors = classLoaderService.loadJavaServices( EventMonitor.class );
return eventMonitors.isEmpty() ? new EmptyEventMonitor() : eventMonitors.iterator().next();
}
private static SqlStringGenerationContext createSqlStringGenerationContext(
MetadataImplementor bootMetamodel,
SessionFactoryOptions options,
JdbcServices jdbcServices) {
return SqlStringGenerationContextImpl.fromExplicit(
jdbcServices.getJdbcEnvironment(),
bootMetamodel.getDatabase(),
options.getDefaultCatalog(),
options.getDefaultSchema()
);
}
private static SessionFactoryServiceRegistry getServiceRegistry(
SessionFactoryOptions options,
SessionFactoryImplementor self) {
return options.getServiceRegistry()
.requireService( SessionFactoryServiceRegistryFactory.class )
// it is not great how we pass a reference to
// an incompletely-initialized instance here:
.buildServiceRegistry( self, options );
}
@Override
public EventListenerGroups getEventListenerGroups() {
return eventListenerGroups;
}
@Override
public ParameterMarkerStrategy getParameterMarkerStrategy() {
return parameterMarkerStrategy;
}
@Override
public JdbcValuesMappingProducerProvider getJdbcValuesMappingProducerProvider() {
return jdbcValuesMappingProducerProvider;
}
@Override
public EntityCopyObserverFactory getEntityCopyObserver() {
return entityCopyObserverFactory;
}
@Override
public ClassLoaderService getClassLoaderService() {
return classLoaderService;
}
@Override
public ManagedBeanRegistry getManagedBeanRegistry() {
return managedBeanRegistry;
}
@Override
public EventListenerRegistry getEventListenerRegistry() {
return eventEngine.getListenerRegistry();
}
| SessionFactoryImpl |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/accumulators/AccumulatorHelper.java | {
"start": 9039,
"end": 9202
} | class ____, and then try to unwrap the value unchecked.
*
* @param serializedAccumulators The serialized accumulator results.
* @param loader The | loader |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | {
"start": 10294,
"end": 11571
} | class ____ extends PostingsFormat {
public EmptyPostingsFormat() {
super("EmptyPostingsFormat");
}
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) {
return new FieldsConsumer() {
@Override
public void write(Fields fields, NormsProducer norms) {
throw new UnsupportedOperationException();
}
@Override
public void close() {
}
};
}
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) {
return new FieldsProducer() {
@Override
public void close() {
}
@Override
public void checkIntegrity() {
}
@Override
public Iterator<String> iterator() {
return null;
}
@Override
public Terms terms(String field) {
return null;
}
@Override
public int size() {
return 0;
}
};
}
}
}
| EmptyPostingsFormat |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/test/java/org/springframework/security/oauth2/server/resource/DefaultAuthenticationEventPublisherBearerTokenTests.java | {
"start": 1504,
"end": 2312
} | class ____ {
DefaultAuthenticationEventPublisher publisher;
@Test
public void publishAuthenticationFailureWhenInvalidBearerTokenExceptionThenMaps() {
ApplicationEventPublisher appPublisher = mock(ApplicationEventPublisher.class);
Authentication authentication = new JwtAuthenticationToken(TestJwts.jwt().build());
Exception cause = new Exception();
this.publisher = new DefaultAuthenticationEventPublisher(appPublisher);
this.publisher.publishAuthenticationFailure(new InvalidBearerTokenException("invalid"), authentication);
this.publisher.publishAuthenticationFailure(new InvalidBearerTokenException("invalid", cause), authentication);
verify(appPublisher, times(2)).publishEvent(isA(AuthenticationFailureBadCredentialsEvent.class));
}
}
| DefaultAuthenticationEventPublisherBearerTokenTests |
java | apache__flink | flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/RegistryAvroDeserializationSchema.java | {
"start": 2749,
"end": 3225
} | class ____ which deserialize. Should be either {@link SpecificRecord} or
* {@link GenericRecord}.
* @param reader reader's Avro schema. Should be provided if recordClazz is {@link
* GenericRecord}
* @param schemaCoderProvider schema provider that allows instantiation of {@link SchemaCoder}
* that will be used for schema reading
* @param encoding Avro serialization approach to use. Required to identify the correct decoder
* | to |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/output/StreamEntryDeletionResultListOutputUnitTests.java | {
"start": 2263,
"end": 2782
} | class ____ extends StreamingOutput.Subscriber<StreamEntryDeletionResult> {
private final List<StreamEntryDeletionResult> results = new java.util.ArrayList<>();
@Override
public void onNext(StreamEntryDeletionResult item) {
results.add(item);
}
@Override
public void onNext(Collection<StreamEntryDeletionResult> outputTarget, StreamEntryDeletionResult item) {
results.add(item);
outputTarget.add(item);
}
}
}
| TestSubscriber |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java | {
"start": 5110,
"end": 86511
} | class ____ extends SamlTestCase {
public static final String TEST_IDP_ENTITY_ID = "http://demo_josso_1.josso.dev.docker:8081/IDBUS/JOSSO-TUTORIAL/IDP1/SAML2/MD";
private static final TimeValue METADATA_REFRESH = TimeValue.timeValueMillis(3000);
private static final String REALM_NAME = "my-saml";
private static final String REALM_SETTINGS_PREFIX = "xpack.security.authc.realms.saml." + REALM_NAME;
private Settings globalSettings;
private Environment env;
private ThreadContext threadContext;
@Before
public void setupEnv() throws PrivilegedActionException {
SamlUtils.initialize(logger);
globalSettings = Settings.builder().put("path.home", createTempDir()).build();
env = TestEnvironment.newEnvironment(globalSettings);
threadContext = new ThreadContext(globalSettings);
}
public void testReadIdpMetadataFromFile() throws Exception {
final Path path = getDataPath("idp1.xml");
Tuple<RealmConfig, SSLService> config = buildConfig(path.toString());
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
assertIdp1MetadataParsedCorrectly(tuple.v2().get());
} finally {
tuple.v1().destroy();
}
}
public void testReadIdpMetadataFromHttps() throws Exception {
final Path path = getDataPath("idp1.xml");
final String body = Files.readString(path);
TestsSSLService sslService = buildTestSslService();
try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext("xpack.security.http.ssl"), false)) {
proxyServer.start();
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody(body).addHeader("Content-Type", "application/xml"));
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody(body).addHeader("Content-Type", "application/xml"));
assertEquals(0, proxyServer.requests().size());
Tuple<RealmConfig, SSLService> config = buildConfig("https://localhost:" + proxyServer.getPort());
logger.info("Settings\n{}", config.v1().settings().toDelimitedString('\n'));
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
final int firstRequestCount = proxyServer.requests().size();
assertThat(firstRequestCount, greaterThanOrEqualTo(1));
assertIdp1MetadataParsedCorrectly(tuple.v2().get());
assertBusy(() -> assertThat(proxyServer.requests().size(), greaterThan(firstRequestCount)));
} finally {
tuple.v1().destroy();
}
}
}
public void testFailOnErrorForInvalidHttpsMetadata() throws Exception {
TestsSSLService sslService = buildTestSslService();
try (MockWebServer webServer = new MockWebServer(sslService.sslContext("xpack.security.http.ssl"), false)) {
webServer.start();
webServer.enqueue(
new MockResponse().setResponseCode(randomIntBetween(400, 405))
.setBody("No metadata available")
.addHeader("Content-Type", "text/plain")
);
assertEquals(0, webServer.requests().size());
var metadataPath = "https://localhost:" + webServer.getPort();
final Tuple<RealmConfig, SSLService> config = buildConfig(
metadataPath,
settings -> settings.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_FAIL_ON_ERROR),
true
)
);
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
var exception = expectThrows(
ElasticsearchSecurityException.class,
() -> SamlRealm.initializeResolver(logger, config.v1(), config.v2(), watcherService)
);
assertThat(exception, throwableWithMessage("cannot load SAML metadata from [" + metadataPath + "]"));
}
}
public void testRetryFailedHttpsMetadata() throws Exception {
final Path path = getDataPath("idp1.xml");
final String body = Files.readString(path);
TestsSSLService sslService = buildTestSslService();
doTestReloadFailedHttpsMetadata(body, sslService, true);
doTestReloadFailedHttpsMetadata(body, sslService, false);
}
/**
* @param testBackgroundRefresh If {@code true}, the test asserts that the metadata is automatically loaded in the background.
* If {@code false}, the test triggers activity on the realm to force a reload of the metadata.
*/
private void doTestReloadFailedHttpsMetadata(String metadataBody, TestsSSLService sslService, boolean testBackgroundRefresh)
throws Exception {
try (MockWebServer webServer = new MockWebServer(sslService.sslContext("xpack.security.http.ssl"), false)) {
webServer.start();
webServer.enqueue(
new MockResponse().setResponseCode(randomIntBetween(400, 405))
.setBody("No metadata available")
.addHeader("Content-Type", "text/plain")
);
assertEquals(0, webServer.requests().size());
// Even with a long refresh we should automatically retry metadata that fails
final TimeValue defaultRefreshTime = TimeValue.timeValueHours(24);
// OpenSAML (4.0) has a bug that can attempt to set negative duration timers if the refresh is too short.
// Don't set this too small or we may hit "java.lang.IllegalArgumentException: Negative delay."
final TimeValue minimumRefreshTime = testBackgroundRefresh ? TimeValue.timeValueMillis(500) : defaultRefreshTime;
final Tuple<RealmConfig, SSLService> config = buildConfig("https://localhost:" + webServer.getPort(), builder -> {
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_REFRESH),
defaultRefreshTime.getStringRep()
);
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_MIN_REFRESH),
minimumRefreshTime.getStringRep()
);
if (randomBoolean()) {
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_FAIL_ON_ERROR),
false
);
}
});
logger.info("Settings\n{}", config.v1().settings().toDelimitedString('\n'));
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
final int firstRequestCount = webServer.requests().size();
assertThat(firstRequestCount, greaterThanOrEqualTo(1));
assertThat(tuple.v2().get(), instanceOf(UnresolvedEntity.class));
webServer.enqueue(
new MockResponse().setResponseCode(200).setBody(metadataBody).addHeader("Content-Type", "application/xml")
);
if (testBackgroundRefresh) {
assertBusy(() -> assertThat(webServer.requests().size(), greaterThan(firstRequestCount)), 2, TimeUnit.SECONDS);
} else {
// The Supplier.get() call will trigger a reload anyway
}
assertBusy(() -> assertIdp1MetadataParsedCorrectly(tuple.v2().get()), 3, TimeUnit.SECONDS);
} finally {
tuple.v1().destroy();
}
}
}
public void testMinRefreshGreaterThanRefreshThrowsSettingsException() throws GeneralSecurityException, IOException {
var refresh = randomTimeValue(20, 110, TimeUnit.MINUTES, TimeUnit.SECONDS);
var minRefresh = randomTimeValue(2, 8, TimeUnit.HOURS);
Tuple<RealmConfig, SSLService> tuple = buildConfig(
"https://localhost:9900/metadata.xml",
builder -> builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_REFRESH),
refresh
).put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_MIN_REFRESH), minRefresh)
);
final RealmConfig config = tuple.v1();
final SSLService sslService = tuple.v2();
final SettingsException settingsException = expectThrows(
SettingsException.class,
() -> SamlRealm.create(
config,
sslService,
mock(ResourceWatcherService.class),
mock(UserRoleMapper.class),
mock(SingleSamlSpConfiguration.class)
)
);
assertThat(
settingsException,
throwableWithMessage(
containsString(
"the value ("
+ minRefresh
+ ") for ["
+ SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_MIN_REFRESH)
+ "]"
)
)
);
assertThat(
settingsException,
throwableWithMessage(
containsString(
"greater than the value ("
+ refresh.getStringRep()
+ ") for ["
+ SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_REFRESH)
+ "]"
)
)
);
}
public void testAbsurdlyLowMinimumRefreshThrowsException() {
var minRefresh = randomBoolean()
? randomTimeValue(1, 450, TimeUnit.MILLISECONDS)
: randomTimeValue(1, 999, TimeUnit.MICROSECONDS, TimeUnit.NANOSECONDS);
Tuple<RealmConfig, SSLService> tuple = buildConfig(
"https://localhost:9900/metadata.xml",
builder -> builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_MIN_REFRESH),
minRefresh
)
);
final RealmConfig config = tuple.v1();
final SSLService sslService = tuple.v2();
final IllegalArgumentException settingsException = expectThrows(
IllegalArgumentException.class,
() -> SamlRealm.create(
config,
sslService,
mock(ResourceWatcherService.class),
mock(UserRoleMapper.class),
mock(SingleSamlSpConfiguration.class)
)
);
assertThat(
settingsException,
throwableWithMessage(
"failed to parse value ["
+ minRefresh
+ "] for setting ["
+ SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_MIN_REFRESH)
+ "], must be >= [500ms]"
)
);
}
private UserRoleMapper mockRoleMapper(Set<String> rolesToReturn, AtomicReference<UserRoleMapper.UserData> userData) {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
Mockito.doAnswer(invocation -> {
assert invocation.getArguments().length == 2;
userData.set((UserRoleMapper.UserData) invocation.getArguments()[0]);
@SuppressWarnings("unchecked")
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) invocation.getArguments()[1];
listener.onResponse(rolesToReturn);
return null;
}).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
return roleMapper;
}
public void testAuthenticateWithEmptyRoleMapping() throws Exception {
final AtomicReference<UserRoleMapper.UserData> userData = new AtomicReference<>();
final UserRoleMapper roleMapper = mockRoleMapper(Set.of(), userData);
final boolean testWithDelimiter = randomBoolean();
final AuthenticationResult<User> result = performAuthentication(
roleMapper,
randomBoolean(),
randomBoolean(),
randomFrom(Boolean.TRUE, Boolean.FALSE, null),
false,
randomBoolean() ? REALM_NAME : null,
testWithDelimiter ? List.of("STRIKE Team: Delta$shield") : Arrays.asList("avengers", "shield"),
testWithDelimiter ? "$" : null,
randomBoolean() ? List.of("superuser", "kibana_admin") : randomFrom(List.of(), null),
null
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().roles().length, equalTo(0));
}
public void testAuthenticateWithRoleMapping() throws Exception {
final AtomicReference<UserRoleMapper.UserData> userData = new AtomicReference<>();
final UserRoleMapper roleMapper = mockRoleMapper(Set.of("superuser", "kibana_admin"), userData);
final boolean excludeRoles = randomBoolean();
final List<String> rolesToExclude = excludeRoles ? List.of("superuser") : randomFrom(List.of(), null);
final boolean useNameId = randomBoolean();
final boolean principalIsEmailAddress = randomBoolean();
final Boolean populateUserMetadata = randomFrom(Boolean.TRUE, Boolean.FALSE, null);
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
final boolean testWithDelimiter = randomBoolean();
final AuthenticationResult<User> result = performAuthentication(
roleMapper,
useNameId,
principalIsEmailAddress,
populateUserMetadata,
false,
authenticatingRealm,
testWithDelimiter ? List.of("STRIKE Team: Delta$shield") : Arrays.asList("avengers", "shield"),
testWithDelimiter ? "$" : null,
rolesToExclude,
Map.of("top_secret", List.of("Batman's secret identity is Bruce Wayne!"))
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().principal(), equalTo(useNameId ? "clint.barton" : "cbarton"));
assertThat(result.getValue().email(), equalTo("cbarton@shield.gov"));
if (excludeRoles) {
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("kibana_admin"));
} else {
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("kibana_admin", "superuser"));
}
if (populateUserMetadata == Boolean.FALSE) {
// TODO : "saml_nameid" should be null too, but the logout code requires it for now.
assertThat(result.getValue().metadata().get("saml_uid"), nullValue());
} else {
final String nameIdValue = principalIsEmailAddress ? "clint.barton@shield.gov" : "clint.barton";
final String uidValue = principalIsEmailAddress ? "cbarton@shield.gov" : "cbarton";
assertThat(result.getValue().metadata().get("saml_nameid"), equalTo(nameIdValue));
assertThat(result.getValue().metadata().get("saml_uid"), instanceOf(Iterable.class));
assertThat((Iterable<?>) result.getValue().metadata().get("saml_uid"), contains(uidValue));
assertThat(result.getValue().metadata().get("saml_top_secret"), nullValue());
}
assertThat(result.getMetadata(), notNullValue());
assertThat(result.getMetadata().containsKey(PRIVATE_ATTRIBUTES_METADATA), is(true));
@SuppressWarnings("unchecked")
Map<String, List<SecureString>> privateAttributesMetadata = (Map<String, List<SecureString>>) result.getMetadata()
.get(PRIVATE_ATTRIBUTES_METADATA);
assertThat(privateAttributesMetadata, notNullValue());
assertThat(privateAttributesMetadata.keySet(), containsInAnyOrder("top_secret"));
List<SecureString> secretAttribute = privateAttributesMetadata.get("top_secret");
assertThat(secretAttribute, notNullValue());
assertThat(secretAttribute.size(), equalTo(1));
assertEquals(
"SecureString has already been closed",
expectThrows(IllegalStateException.class, () -> secretAttribute.getFirst().getChars()).getMessage()
);
assertThat(userData.get().getUsername(), equalTo(useNameId ? "clint.barton" : "cbarton"));
if (testWithDelimiter) {
assertThat(userData.get().getGroups(), containsInAnyOrder("STRIKE Team: Delta", "shield"));
} else {
assertThat(userData.get().getGroups(), containsInAnyOrder("avengers", "shield"));
}
}
public void testAuthenticateWithAuthorizingRealm() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
Mockito.doAnswer(invocation -> {
assert invocation.getArguments().length == 2;
@SuppressWarnings("unchecked")
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) invocation.getArguments()[1];
listener.onFailure(new RuntimeException("Role mapping should not be called"));
return null;
}).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
final boolean useNameId = randomBoolean();
final boolean principalIsEmailAddress = randomBoolean();
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
AuthenticationResult<User> result = performAuthentication(
roleMapper,
useNameId,
principalIsEmailAddress,
null,
true,
authenticatingRealm
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().principal(), equalTo(useNameId ? "clint.barton" : "cbarton"));
assertThat(result.getValue().email(), equalTo("cbarton@shield.gov"));
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("lookup_user_role"));
assertThat(result.getValue().fullName(), equalTo("Clinton Barton"));
assertThat(result.getValue().metadata().entrySet(), Matchers.iterableWithSize(1));
assertThat(result.getValue().metadata().get("is_lookup"), Matchers.equalTo(true));
}
public void testAuthenticateWithWrongRealmName() throws Exception {
AuthenticationResult<User> result = performAuthentication(
mock(UserRoleMapper.class),
randomBoolean(),
randomBoolean(),
null,
true,
REALM_NAME + randomAlphaOfLength(8)
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE));
}
private AuthenticationResult<User> performAuthentication(
UserRoleMapper roleMapper,
boolean useNameId,
boolean principalIsEmailAddress,
Boolean populateUserMetadata,
boolean useAuthorizingRealm,
String authenticatingRealm
) throws Exception {
return performAuthentication(
roleMapper,
useNameId,
principalIsEmailAddress,
populateUserMetadata,
useAuthorizingRealm,
authenticatingRealm,
Arrays.asList("avengers", "shield"),
null
);
}
private AuthenticationResult<User> performAuthentication(
UserRoleMapper roleMapper,
boolean useNameId,
boolean principalIsEmailAddress,
Boolean populateUserMetadata,
boolean useAuthorizingRealm,
String authenticatingRealm,
List<String> groups,
String groupsDelimiter
) throws Exception {
return performAuthentication(
roleMapper,
useNameId,
principalIsEmailAddress,
populateUserMetadata,
useAuthorizingRealm,
authenticatingRealm,
groups,
groupsDelimiter,
null,
null
);
}
private AuthenticationResult<User> performAuthentication(
UserRoleMapper roleMapper,
boolean useNameId,
boolean principalIsEmailAddress,
Boolean populateUserMetadata,
boolean useAuthorizingRealm,
String authenticatingRealm,
List<String> groups,
String groupsDelimiter,
List<String> rolesToExclude,
Map<String, List<String>> secureAttributes
) throws Exception {
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final String userPrincipal = useNameId ? "clint.barton" : "cbarton";
final String nameIdValue = principalIsEmailAddress ? "clint.barton@shield.gov" : "clint.barton";
final String uidValue = principalIsEmailAddress ? "cbarton@shield.gov" : "cbarton";
final String realmType = SingleSpSamlRealmSettings.TYPE;
final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier("mock", "mock_lookup");
final MockLookupRealm lookupRealm = new MockLookupRealm(
new RealmConfig(
realmIdentifier,
Settings.builder()
.put(globalSettings)
.put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0)
.build(),
env,
threadContext
)
);
final Settings.Builder settingsBuilder = Settings.builder()
.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()),
useNameId ? "nameid" : "uid"
)
.put(
RealmSettings.getFullSettingKey(
REALM_NAME,
SamlRealmSettings.GROUPS_ATTRIBUTE.apply(realmType).getAttributeSetting().getAttribute()
),
"groups"
)
.put(RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.MAIL_ATTRIBUTE.apply(realmType).getAttribute()), "mail");
if (groupsDelimiter != null) {
settingsBuilder.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.GROUPS_ATTRIBUTE.apply(realmType).getDelimiter()),
groupsDelimiter
);
}
if (principalIsEmailAddress) {
final boolean anchoredMatch = randomBoolean();
settingsBuilder.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getPattern()),
anchoredMatch ? "^([^@]+)@shield.gov$" : "^([^@]+)@"
);
}
if (populateUserMetadata != null) {
settingsBuilder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.POPULATE_USER_METADATA),
populateUserMetadata.booleanValue()
);
}
if (rolesToExclude != null) {
settingsBuilder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.EXCLUDE_ROLES),
String.join(",", rolesToExclude)
);
}
if (secureAttributes != null) {
settingsBuilder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRIVATE_ATTRIBUTES),
String.join(",", secureAttributes.keySet())
);
}
if (useAuthorizingRealm) {
settingsBuilder.putList(
RealmSettings.getFullSettingKey(
new RealmConfig.RealmIdentifier("saml", REALM_NAME),
DelegatedAuthorizationSettings.AUTHZ_REALMS
),
lookupRealm.name()
);
lookupRealm.registerUser(
new User(
userPrincipal,
new String[] { "lookup_user_role" },
"Clinton Barton",
"cbarton@shield.gov",
Collections.singletonMap("is_lookup", true),
true
)
);
}
final Settings realmSettings = settingsBuilder.build();
final RealmConfig config = buildConfig(realmSettings);
final SamlRealm realm = buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp);
initializeRealms(realm, lookupRealm);
final SamlToken token = new SamlToken(new byte[0], Collections.singletonList("<id>"), authenticatingRealm);
final SamlAttributes attributes = new SamlAttributes(
new SamlNameId(NameIDType.PERSISTENT, nameIdValue, idp.getEntityID(), sp.getEntityId(), null),
randomAlphaOfLength(16),
List.of(
new SamlAttributes.SamlAttribute("urn:oid:0.9.2342.19200300.100.1.1", "uid", Collections.singletonList(uidValue)),
new SamlAttributes.SamlAttribute("urn:oid:1.3.6.1.4.1.5923.1.5.1.1", "groups", groups),
new SamlAttributes.SamlAttribute("urn:oid:0.9.2342.19200300.100.1.3", "mail", Arrays.asList("cbarton@shield.gov"))
),
secureAttributes == null
? List.of()
: secureAttributes.entrySet()
.stream()
.map(
a -> new SamlAttributes.SamlPrivateAttribute(
a.getKey(),
null,
a.getValue().stream().map(SecureString::new).toList()
)
)
.toList()
);
when(authenticator.authenticate(token)).thenReturn(attributes);
final PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>() {
@Override
public void onResponse(AuthenticationResult<User> result) {
if (secureAttributes != null && result.isAuthenticated()) {
assertThat(result.getMetadata(), notNullValue());
assertThat(result.getMetadata().containsKey(PRIVATE_ATTRIBUTES_METADATA), is(true));
@SuppressWarnings("unchecked")
var metadata = (Map<String, List<SecureString>>) result.getMetadata().get(PRIVATE_ATTRIBUTES_METADATA);
secureAttributes.forEach((name, value) -> assertThat(metadata.get(name), equalTo(value)));
}
super.onResponse(result);
}
};
realm.authenticate(token, future);
return future.get();
}
private void initializeRealms(Realm... realms) {
MockLicenseState licenseState = mock(MockLicenseState.class);
when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true);
final List<Realm> realmList = Arrays.asList(realms);
for (Realm realm : realms) {
realm.initialize(realmList, licenseState);
}
}
public SamlRealm buildRealm(
RealmConfig config,
UserRoleMapper roleMapper,
SamlAuthenticator authenticator,
SamlLogoutRequestHandler logoutHandler,
EntityDescriptor idp,
SpConfiguration sp
) throws Exception {
try {
return new SamlRealm(
config,
roleMapper,
authenticator,
logoutHandler,
mock(SamlLogoutResponseHandler.class),
() -> idp,
sp,
SamlRealmSettings.UserAttributeNameConfiguration.fromConfig(config)
);
} catch (SettingsException e) {
logger.info(() -> format("Settings are invalid:\n%s", config.settings().toDelimitedString('\n')), e);
throw e;
}
}
public void testAttributeSelectionWithSplit() {
List<String> strings = performAttributeSelectionWithSplit(",", "departments", "engineering", "elasticsearch-admins", "employees");
assertThat("For attributes: " + strings, strings, contains("engineering", "elasticsearch-admins", "employees"));
}
public void testAttributeSelectionWithSplitEmptyInput() {
List<String> strings = performAttributeSelectionWithSplit(",", "departments");
assertThat("For attributes: " + strings, strings, is(empty()));
}
public void testAttributeSelectionWithSplitJustDelimiter() {
List<String> strings = performAttributeSelectionWithSplit(",", ",");
assertThat("For attributes: " + strings, strings, is(empty()));
}
public void testAttributeSelectionWithSplitNoDelimiter() {
List<String> strings = performAttributeSelectionWithSplit(",", "departments", "elasticsearch-team");
assertThat("For attributes: " + strings, strings, contains("elasticsearch-team"));
}
private List<String> performAttributeSelectionWithSplit(String delimiter, String groupAttributeName, String... returnedGroups) {
final Settings settings = Settings.builder()
.put(REALM_SETTINGS_PREFIX + ".attributes.groups", groupAttributeName)
.put(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups", delimiter)
.build();
final RealmConfig config = buildConfig(settings);
final SamlRealmSettings.AttributeSettingWithDelimiter groupSetting = new SamlRealmSettings.AttributeSettingWithDelimiter(
config.type(),
"groups"
);
final SamlRealm.AttributeParser parser = SamlRealm.AttributeParser.forSetting(logger, groupSetting, config);
final SamlAttributes attributes = new SamlAttributes(
new SamlNameId(NameIDType.TRANSIENT, randomAlphaOfLength(24), null, null, null),
randomAlphaOfLength(16),
List.of(
new SamlAttributes.SamlAttribute(
"departments",
"departments",
Collections.singletonList(String.join(delimiter, returnedGroups))
)
),
List.of()
);
return parser.getAttribute(attributes);
}
public void testAttributeSelectionWithDelimiterAndPatternThrowsSettingsException() throws Exception {
final Settings settings = Settings.builder()
.put(REALM_SETTINGS_PREFIX + ".attributes.groups", "departments")
.put(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups", ",")
.put(REALM_SETTINGS_PREFIX + ".attribute_patterns.groups", "^(.+)@\\w+.example.com$")
.build();
final RealmConfig config = buildConfig(settings);
final SamlRealmSettings.AttributeSettingWithDelimiter groupSetting = new SamlRealmSettings.AttributeSettingWithDelimiter(
config.type(),
"groups"
);
final SettingsException settingsException = expectThrows(
SettingsException.class,
() -> SamlRealm.AttributeParser.forSetting(logger, groupSetting, config)
);
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups"));
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attribute_patterns.groups"));
}
public void testAttributeSelectionNoGroupsConfiguredThrowsSettingsException() {
String delimiter = ",";
final Settings settings = Settings.builder().put(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups", delimiter).build();
final RealmConfig config = buildConfig(settings);
final SamlRealmSettings.AttributeSettingWithDelimiter groupSetting = new SamlRealmSettings.AttributeSettingWithDelimiter(
config.type(),
"groups"
);
final SettingsException settingsException = expectThrows(
SettingsException.class,
() -> SamlRealm.AttributeParser.forSetting(logger, groupSetting, config)
);
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups"));
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attributes.groups"));
}
public void testAttributeSelectionWithSplitAndListThrowsSecurityException() {
String delimiter = ",";
final Settings settings = Settings.builder()
.put(REALM_SETTINGS_PREFIX + ".attributes.groups", "departments")
.put(REALM_SETTINGS_PREFIX + ".attribute_delimiters.groups", delimiter)
.build();
final RealmConfig config = buildConfig(settings);
final SamlRealmSettings.AttributeSettingWithDelimiter groupSetting = new SamlRealmSettings.AttributeSettingWithDelimiter(
config.type(),
"groups"
);
final SamlRealm.AttributeParser parser = SamlRealm.AttributeParser.forSetting(logger, groupSetting, config);
final SamlAttributes attributes = new SamlAttributes(
new SamlNameId(NameIDType.TRANSIENT, randomAlphaOfLength(24), null, null, null),
randomAlphaOfLength(16),
List.of(
new SamlAttributes.SamlAttribute(
"departments",
"departments",
List.of("engineering", String.join(delimiter, "elasticsearch-admins", "employees"))
)
),
List.of()
);
ElasticsearchSecurityException securityException = expectThrows(
ElasticsearchSecurityException.class,
() -> parser.getAttribute(attributes)
);
assertThat(securityException.getMessage(), containsString("departments"));
}
public void testAttributeSelectionWithRegex() {
final boolean useFriendlyName = randomBoolean();
final Settings settings = Settings.builder()
.put(REALM_SETTINGS_PREFIX + ".attributes.principal", useFriendlyName ? "mail" : "urn:oid:0.9.2342.19200300.100.1.3")
.put(REALM_SETTINGS_PREFIX + ".attribute_patterns.principal", "^(.+)@\\w+.example.com$")
.build();
final RealmConfig config = buildConfig(settings);
final SamlRealmSettings.AttributeSetting principalSetting = new SamlRealmSettings.AttributeSetting(config.type(), "principal");
final SamlRealm.AttributeParser parser = SamlRealm.AttributeParser.forSetting(logger, principalSetting, config, false);
final SamlAttributes attributes = new SamlAttributes(
new SamlNameId(NameIDType.TRANSIENT, randomAlphaOfLength(24), null, null, null),
randomAlphaOfLength(16),
List.of(
new SamlAttributes.SamlAttribute(
"urn:oid:0.9.2342.19200300.100.1.3",
"mail",
Arrays.asList("john.smith@personal.example.net", "john.smith@corporate.example.com", "jsmith@corporate.example.com")
)
),
List.of()
);
final List<String> strings = parser.getAttribute(attributes);
assertThat("For attributes: " + strings, strings, contains("john.smith", "jsmith"));
}
public void testSettingPatternWithoutAttributeThrowsSettingsException() throws Exception {
final String realmType = SingleSpSamlRealmSettings.TYPE;
final Settings realmSettings = Settings.builder()
.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()),
"nameid"
)
.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.NAME_ATTRIBUTE.apply(realmType).getPattern()),
"^\\s*(\\S.*\\S)\\s*$"
)
.build();
final RealmConfig config = buildConfig(realmSettings);
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
final SettingsException settingsException = expectThrows(
SettingsException.class,
() -> buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp)
);
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attribute_patterns.name"));
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attributes.name"));
}
public void testSettingExcludeRolesAndAuthorizationRealmsThrowsException() throws Exception {
final Settings realmSettings = Settings.builder()
.putList(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.EXCLUDE_ROLES), "superuser", "kibana_admin")
.putList(
RealmSettings.getFullSettingKey(
new RealmConfig.RealmIdentifier("saml", REALM_NAME),
DelegatedAuthorizationSettings.AUTHZ_REALMS
),
"ldap"
)
.put(
RealmSettings.getFullSettingKey(
REALM_NAME,
SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttribute()
),
"mail"
)
.build();
final RealmConfig config = buildConfig(realmSettings);
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
var e = expectThrows(IllegalArgumentException.class, () -> buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp));
assertThat(
e.getCause().getMessage(),
containsString(
"Setting ["
+ REALM_SETTINGS_PREFIX
+ ".exclude_roles] is not permitted when setting ["
+ REALM_SETTINGS_PREFIX
+ ".authorization_realms] is configured."
)
);
}
public void testPrivateAttributesSettingValidation() {
String attributeName = randomAlphaOfLength(10);
String attributeSetting = RealmSettings.getFullSettingKey(
REALM_NAME,
randomFrom(
SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttribute(),
SamlRealmSettings.GROUPS_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttributeSetting().getAttribute(),
SamlRealmSettings.DN_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttribute(),
SamlRealmSettings.NAME_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttribute(),
SamlRealmSettings.MAIL_ATTRIBUTE.apply(SingleSpSamlRealmSettings.TYPE).getAttribute()
)
);
{
Settings settings = buildSettings("https://example.com").put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRIVATE_ATTRIBUTES),
attributeName
).put(attributeSetting, attributeName).build();
final RealmConfig config = realmConfigFromGlobalSettings(settings);
var e = expectThrows(IllegalArgumentException.class, () -> config.getSetting(SamlRealmSettings.PRIVATE_ATTRIBUTES));
assertThat(
e.getCause().getMessage(),
containsString(
"SAML Attribute ["
+ attributeName
+ "] cannot be both configured for ["
+ REALM_SETTINGS_PREFIX
+ ".private_attributes] and ["
+ attributeSetting
+ "] settings."
)
);
}
{
String otherAttributeName = randomAlphaOfLength(9);
Settings settings = buildSettings("https://example.com").put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRIVATE_ATTRIBUTES),
attributeName
).put(attributeSetting, otherAttributeName).build();
final RealmConfig config = realmConfigFromGlobalSettings(settings);
assertThat(config.getSetting(SamlRealmSettings.PRIVATE_ATTRIBUTES), containsInAnyOrder(attributeName));
assertThat(config.settings().get(attributeSetting), equalTo(otherAttributeName));
}
}
public void testMissingPrincipalSettingThrowsSettingsException() throws Exception {
final Settings realmSettings = Settings.EMPTY;
final RealmConfig config = buildConfig(realmSettings);
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
final SettingsException settingsException = expectThrows(
SettingsException.class,
() -> buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp)
);
assertThat(settingsException.getMessage(), containsString(REALM_SETTINGS_PREFIX + ".attributes.principal"));
}
public void testNonMatchingPrincipalPatternThrowsSamlException() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final String realmType = SingleSpSamlRealmSettings.TYPE;
final Settings realmSettings = Settings.builder()
.put(RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()), "mail")
.put(
RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getPattern()),
"^([^@]+)@mycorp\\.example\\.com$"
)
.build();
final RealmConfig config = buildConfig(realmSettings);
final SamlRealm realm = buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp);
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
final SamlToken token = new SamlToken(new byte[0], Collections.singletonList("<id>"), authenticatingRealm);
for (String mail : Arrays.asList("john@your-corp.example.com", "john@mycorp.example.com.example.net", "john")) {
final SamlAttributes attributes = new SamlAttributes(
new SamlNameId(NameIDType.TRANSIENT, randomAlphaOfLength(12), null, null, null),
randomAlphaOfLength(16),
List.of(new SamlAttributes.SamlAttribute("urn:oid:0.9.2342.19200300.100.1.3", "mail", Collections.singletonList(mail))),
List.of()
);
when(authenticator.authenticate(token)).thenReturn(attributes);
final PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(token, future);
final AuthenticationResult<User> result = future.actionGet();
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE));
assertThat(result.getMessage(), containsString("attributes.principal"));
assertThat(result.getMessage(), containsString("mail"));
assertThat(result.getMessage(), containsString("@mycorp\\.example\\.com"));
}
}
public void testCreateCredentialFromPemFiles() throws Exception {
final Settings.Builder builder = buildSettings("http://example.com");
final Path dir = createTempDir("encryption");
final Path encryptionKeyPath = getDataPath("encryption.key");
final Path destEncryptionKeyPath = dir.resolve("encryption.key");
final PrivateKey encryptionKey = PemUtils.readPrivateKey(encryptionKeyPath, "encryption"::toCharArray);
final Path encryptionCertPath = getDataPath("encryption.crt");
final Path destEncryptionCertPath = dir.resolve("encryption.crt");
final X509Certificate encryptionCert = CertParsingUtils.readX509Certificates(Collections.singletonList(encryptionCertPath))[0];
Files.copy(encryptionKeyPath, destEncryptionKeyPath);
Files.copy(encryptionCertPath, destEncryptionCertPath);
builder.put(REALM_SETTINGS_PREFIX + ".encryption.key", destEncryptionKeyPath);
builder.put(REALM_SETTINGS_PREFIX + ".encryption.certificate", destEncryptionCertPath);
final Settings settings = builder.build();
final RealmConfig realmConfig = realmConfigFromGlobalSettings(settings);
final Credential credential = SamlRealm.buildEncryptionCredential(realmConfig).get(0);
assertThat(credential, notNullValue());
assertThat(credential.getPrivateKey(), equalTo(encryptionKey));
assertThat(credential.getPublicKey(), equalTo(encryptionCert.getPublicKey()));
}
public void testCreateEncryptionCredentialFromKeyStore() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final Path dir = createTempDir();
final Settings.Builder builder = Settings.builder().put(REALM_SETTINGS_PREFIX + ".type", "saml").put("path.home", dir);
final Path ksFile = dir.resolve("cred.p12");
final boolean testMultipleEncryptionKeyPair = randomBoolean();
final Tuple<X509Certificate, PrivateKey> certKeyPair1 = readKeyPair("RSA_4096");
final Tuple<X509Certificate, PrivateKey> certKeyPair2 = readKeyPair("RSA_2048");
final KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(null);
ks.setKeyEntry(
getAliasName(certKeyPair1),
certKeyPair1.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair1.v1() }
);
if (testMultipleEncryptionKeyPair) {
ks.setKeyEntry(
getAliasName(certKeyPair2),
certKeyPair2.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair2.v1() }
);
}
try (OutputStream out = Files.newOutputStream(ksFile)) {
ks.store(out, "ks-password".toCharArray());
}
builder.put(REALM_SETTINGS_PREFIX + ".encryption.keystore.path", ksFile.toString());
builder.put(REALM_SETTINGS_PREFIX + ".encryption.keystore.type", "PKCS12");
final boolean isEncryptionKeyStoreAliasSet = randomBoolean();
if (isEncryptionKeyStoreAliasSet) {
builder.put(REALM_SETTINGS_PREFIX + ".encryption.keystore.alias", getAliasName(certKeyPair1));
}
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(REALM_SETTINGS_PREFIX + ".encryption.keystore.secure_password", "ks-password");
secureSettings.setString(REALM_SETTINGS_PREFIX + ".encryption.keystore.secure_key_password", "key-password");
builder.setSecureSettings(secureSettings);
final Settings settings = builder.build();
final RealmConfig realmConfig = realmConfigFromGlobalSettings(settings);
final List<X509Credential> credentials = SamlRealm.buildEncryptionCredential(realmConfig);
assertThat("Encryption Credentials should not be null", credentials, notNullValue());
final int expectedCredentials = (isEncryptionKeyStoreAliasSet) ? 1 : (testMultipleEncryptionKeyPair) ? 2 : 1;
assertEquals("Expected encryption credentials size does not match", expectedCredentials, credentials.size());
credentials.forEach((credential) -> {
assertTrue(
"Unexpected private key in the list of encryption credentials",
Arrays.asList(new PrivateKey[] { certKeyPair1.v2(), certKeyPair2.v2() }).contains(credential.getPrivateKey())
);
assertTrue(
"Unexpected public key in the list of encryption credentials",
Arrays.asList(new PublicKey[] { (certKeyPair1.v1()).getPublicKey(), certKeyPair2.v1().getPublicKey() })
.contains(credential.getPublicKey())
);
});
}
public void testCreateSigningCredentialFromKeyStoreSuccessScenarios() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final Path dir = createTempDir();
final Settings.Builder builder = Settings.builder().put(REALM_SETTINGS_PREFIX + ".type", "saml").put("path.home", dir);
final Path ksFile = dir.resolve("cred.p12");
final Tuple<X509Certificate, PrivateKey> certKeyPair1 = readRandomKeyPair("RSA");
final Tuple<X509Certificate, PrivateKey> certKeyPair2 = readRandomKeyPair("EC");
final KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(null);
ks.setKeyEntry(
getAliasName(certKeyPair1),
certKeyPair1.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair1.v1() }
);
ks.setKeyEntry(
getAliasName(certKeyPair2),
certKeyPair2.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair2.v1() }
);
try (OutputStream out = Files.newOutputStream(ksFile)) {
ks.store(out, "ks-password".toCharArray());
}
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.path", ksFile.toString());
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.type", "PKCS12");
final boolean isSigningKeyStoreAliasSet = randomBoolean();
if (isSigningKeyStoreAliasSet) {
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.alias", getAliasName(certKeyPair1));
}
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(REALM_SETTINGS_PREFIX + ".signing.keystore.secure_password", "ks-password");
secureSettings.setString(REALM_SETTINGS_PREFIX + ".signing.keystore.secure_key_password", "key-password");
builder.setSecureSettings(secureSettings);
final Settings settings = builder.build();
final RealmConfig realmConfig = realmConfigFromGlobalSettings(settings);
// Should build signing credential and use the key from KS.
final SigningConfiguration signingConfig = SamlRealm.buildSigningConfiguration(realmConfig);
final Credential credential = signingConfig.getCredential();
assertThat(credential, notNullValue());
assertThat(credential.getPrivateKey(), equalTo(certKeyPair1.v2()));
assertThat(credential.getPublicKey(), equalTo(certKeyPair1.v1().getPublicKey()));
}
public void testCreateSigningCredentialFromKeyStoreFailureScenarios() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final Path dir = createTempDir();
final Settings.Builder builder = Settings.builder().put(REALM_SETTINGS_PREFIX + ".type", "saml").put("path.home", dir);
final Path ksFile = dir.resolve("cred.p12");
final Tuple<X509Certificate, PrivateKey> certKeyPair1 = readKeyPair("RSA_4096");
final Tuple<X509Certificate, PrivateKey> certKeyPair2 = readKeyPair("RSA_2048");
final Tuple<X509Certificate, PrivateKey> certKeyPair3 = readRandomKeyPair("EC");
final KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(null);
final boolean noRSAKeysInKS = randomBoolean();
if (noRSAKeysInKS == false) {
ks.setKeyEntry(
getAliasName(certKeyPair1),
certKeyPair1.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair1.v1() }
);
ks.setKeyEntry(
getAliasName(certKeyPair2),
certKeyPair2.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair2.v1() }
);
}
ks.setKeyEntry(
getAliasName(certKeyPair3),
certKeyPair3.v2(),
"key-password".toCharArray(),
new Certificate[] { certKeyPair3.v1() }
);
try (OutputStream out = Files.newOutputStream(ksFile)) {
ks.store(out, "ks-password".toCharArray());
}
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.path", ksFile.toString());
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.type", "PKCS12");
final boolean isSigningKeyStoreAliasSet = randomBoolean();
final Tuple<X509Certificate, PrivateKey> chosenAliasCertKeyPair;
final String unknownAlias = randomAlphaOfLength(5);
if (isSigningKeyStoreAliasSet) {
chosenAliasCertKeyPair = randomFrom(Arrays.asList(certKeyPair3, null));
if (chosenAliasCertKeyPair == null) {
// Unknown alias
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.alias", unknownAlias);
} else {
builder.put(REALM_SETTINGS_PREFIX + ".signing.keystore.alias", getAliasName(chosenAliasCertKeyPair));
}
} else {
chosenAliasCertKeyPair = null;
}
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(REALM_SETTINGS_PREFIX + ".signing.keystore.secure_password", "ks-password");
secureSettings.setString(REALM_SETTINGS_PREFIX + ".signing.keystore.secure_key_password", "key-password");
builder.setSecureSettings(secureSettings);
final Settings settings = builder.build();
final RealmConfig realmConfig = realmConfigFromGlobalSettings(settings);
if (isSigningKeyStoreAliasSet) {
if (chosenAliasCertKeyPair == null) {
// Unknown alias, this must throw exception
final IllegalArgumentException illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> SamlRealm.buildSigningConfiguration(realmConfig)
);
final String expectedErrorMessage = "The configured key store for "
+ RealmSettings.realmSettingPrefix(realmConfig.identifier())
+ "signing."
+ " does not have a key associated with alias ["
+ unknownAlias
+ "] "
+ "(from setting "
+ RealmSettings.getFullSettingKey(realmConfig, SamlRealmSettings.SIGNING_KEY_ALIAS)
+ ")";
assertEquals(expectedErrorMessage, illegalArgumentException.getLocalizedMessage());
} else {
final String chosenAliasName = getAliasName(chosenAliasCertKeyPair);
// Since this is unsupported key type, this must throw exception
final IllegalArgumentException illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> SamlRealm.buildSigningConfiguration(realmConfig)
);
final String expectedErrorMessage = "The key associated with alias ["
+ chosenAliasName
+ "] "
+ "(from setting "
+ RealmSettings.getFullSettingKey(realmConfig, SamlRealmSettings.SIGNING_KEY_ALIAS)
+ ") uses unsupported key algorithm type ["
+ chosenAliasCertKeyPair.v2().getAlgorithm()
+ "], only RSA is supported";
assertEquals(expectedErrorMessage, illegalArgumentException.getLocalizedMessage());
}
} else {
if (noRSAKeysInKS) {
// Should throw exception as no RSA keys in the keystore
final IllegalArgumentException illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> SamlRealm.buildSigningConfiguration(realmConfig)
);
final String expectedErrorMessage = "The configured key store for "
+ RealmSettings.realmSettingPrefix(realmConfig.identifier())
+ "signing."
+ " does not contain any RSA key pairs";
assertEquals(expectedErrorMessage, illegalArgumentException.getLocalizedMessage());
} else {
// Should throw exception when multiple signing keys found and alias not set
final IllegalArgumentException illegalArgumentException = expectThrows(
IllegalArgumentException.class,
() -> SamlRealm.buildSigningConfiguration(realmConfig)
);
final String expectedErrorMessage = "The configured key store for "
+ RealmSettings.realmSettingPrefix(realmConfig.identifier())
+ "signing."
+ " has multiple keys but no alias has been specified (from setting "
+ RealmSettings.getFullSettingKey(realmConfig, SamlRealmSettings.SIGNING_KEY_ALIAS)
+ ")";
assertEquals(expectedErrorMessage, illegalArgumentException.getLocalizedMessage());
}
}
}
private String getAliasName(final Tuple<X509Certificate, PrivateKey> certKeyPair) {
// Keys are pre-generated with the same name, so add the serial no to the alias so that keystore entries won't be overwritten
return certKeyPair.v1().getSubjectX500Principal().getName().toLowerCase(Locale.US)
+ "-"
+ certKeyPair.v1().getSerialNumber()
+ "-alias";
}
public void testBuildLogoutRequest() throws Exception {
final Boolean useSingleLogout = randomFrom(true, false, null);
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final EntityDescriptor idp = mockIdp();
final IDPSSODescriptor role = mock(IDPSSODescriptor.class);
final SingleLogoutService slo = SamlUtils.buildObject(SingleLogoutService.class, SingleLogoutService.DEFAULT_ELEMENT_NAME);
when(idp.getRoleDescriptors(IDPSSODescriptor.DEFAULT_ELEMENT_NAME)).thenReturn(Collections.singletonList(role));
when(role.getSingleLogoutServices()).thenReturn(Collections.singletonList(slo));
slo.setBinding(SAMLConstants.SAML2_REDIRECT_BINDING_URI);
slo.setLocation("https://logout.saml/");
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", "https://saml/", null, null, null, Collections.emptyList());
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final String realmType = SingleSpSamlRealmSettings.TYPE;
final Settings.Builder realmSettings = Settings.builder()
.put(RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()), "uid");
if (useSingleLogout != null) {
realmSettings.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_SINGLE_LOGOUT),
useSingleLogout.booleanValue()
);
}
final RealmConfig config = buildConfig(realmSettings.build());
final SamlRealm realm = buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp);
final NameID nameId = SamlUtils.buildObject(NameID.class, NameID.DEFAULT_ELEMENT_NAME);
nameId.setFormat(NameID.TRANSIENT);
nameId.setValue(SamlUtils.generateSecureNCName(18));
final String session = SamlUtils.generateSecureNCName(12);
final LogoutRequest request = realm.buildLogoutRequest(nameId, session);
if (Boolean.FALSE.equals(useSingleLogout)) {
assertThat(request, nullValue());
} else {
assertThat(request, notNullValue());
assertThat(request.getDestination(), equalTo("https://logout.saml/"));
assertThat(request.getNameID(), equalTo(nameId));
assertThat(request.getSessionIndexes(), iterableWithSize(1));
assertThat(request.getSessionIndexes().get(0).getValue(), equalTo(session));
}
}
public void testCorrectRealmSelected() throws Exception {
final String acsUrl = "https://idp.test/saml/login";
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final EntityDescriptor idp = mockIdp();
final SpConfiguration sp = new SingleSamlSpConfiguration("<sp>", acsUrl, null, null, null, Collections.emptyList());
final SamlAuthenticator authenticator = mock(SamlAuthenticator.class);
final SamlLogoutRequestHandler logoutHandler = mock(SamlLogoutRequestHandler.class);
final String realmType = SingleSpSamlRealmSettings.TYPE;
final Settings.Builder realmSettings = Settings.builder()
.put(RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()), "uid")
.put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_PATH), "http://url.to/metadata")
.put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_ENTITY_ID), TEST_IDP_ENTITY_ID)
.put(RealmSettings.getFullSettingKey(REALM_NAME, SingleSpSamlRealmSettings.SP_ACS), acsUrl);
final RealmConfig config = buildConfig(realmSettings.build());
final SamlRealm realm = buildRealm(config, roleMapper, authenticator, logoutHandler, idp, sp);
final Realms realms = mock(Realms.class);
when(realms.realm(REALM_NAME)).thenReturn(realm);
when(realms.stream()).thenAnswer(i -> Stream.of(realm));
final String emptyRealmName = randomBoolean() ? null : "";
assertThat(SamlRealm.findSamlRealms(realms, emptyRealmName, acsUrl), hasSize(1));
assertThat(SamlRealm.findSamlRealms(realms, emptyRealmName, acsUrl).get(0), equalTo(realm));
assertThat(SamlRealm.findSamlRealms(realms, "my-saml", acsUrl), hasSize(1));
assertThat(SamlRealm.findSamlRealms(realms, "my-saml", acsUrl).get(0), equalTo(realm));
assertThat(SamlRealm.findSamlRealms(realms, "my-saml", null), hasSize(1));
assertThat(SamlRealm.findSamlRealms(realms, "my-saml", null).get(0), equalTo(realm));
assertThat(SamlRealm.findSamlRealms(realms, "my-saml", "https://idp.test:443/saml/login"), empty());
assertThat(SamlRealm.findSamlRealms(realms, "incorrect", acsUrl), empty());
assertThat(SamlRealm.findSamlRealms(realms, "incorrect", "https://idp.test:443/saml/login"), empty());
}
public void testReadDifferentIdpMetadataSameKeyFromFiles() throws Exception {
// Confirm these files are located in /x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/saml/
final Path originalMetadataPath = getDataPath("idp1.xml");
final Path updatedMetadataPath = getDataPath("idp1-same-certs-updated-id-cacheDuration.xml");
assertThat(Files.exists(originalMetadataPath), is(true));
assertThat(Files.exists(updatedMetadataPath), is(true));
// Confirm the file contents are different
assertThat(Files.readString(originalMetadataPath), is(not(equalTo(Files.readString(updatedMetadataPath)))));
// Use a temp file to trigger load and reload by ResourceWatcherService
final Path realmMetadataPath = Files.createTempFile(PathUtils.get(createTempDir().toString()), "idp1-metadata", "xml");
final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier(SingleSpSamlRealmSettings.TYPE, "saml-idp1");
final RealmConfig realmConfig = new RealmConfig(
realmIdentifier,
Settings.builder().put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 1).build(),
this.env,
this.threadContext
);
final TestThreadPool testThreadPool = new TestThreadPool("Async Reload");
try {
// Put original metadata contents into realm metadata file
Files.writeString(realmMetadataPath, Files.readString(originalMetadataPath));
final TimeValue timeValue = TimeValue.timeValueMillis(10);
final Settings resourceWatcherSettings = Settings.builder()
.put(ResourceWatcherService.RELOAD_INTERVAL_HIGH.getKey(), timeValue)
.put(ResourceWatcherService.RELOAD_INTERVAL_MEDIUM.getKey(), timeValue)
.put(ResourceWatcherService.RELOAD_INTERVAL_LOW.getKey(), timeValue)
.build();
try (ResourceWatcherService watcherService = new ResourceWatcherService(resourceWatcherSettings, testThreadPool)) {
Tuple<RealmConfig, SSLService> config = buildConfig(realmMetadataPath.toString());
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
assertIdp1MetadataParsedCorrectly(tuple.v2().get());
final IdpConfiguration idpConf = SamlRealm.getIdpConfiguration(realmConfig, tuple.v1(), tuple.v2());
// Trigger initialized log message
final List<PublicKey> keys1 = idpConf.getSigningCredentials().stream().map(Credential::getPublicKey).toList();
// Add metadata update listener
final CountDownLatch metadataUpdateLatch = new CountDownLatch(1);
FileReloadListener metadataUpdateListener = new FileReloadListener(realmMetadataPath, metadataUpdateLatch::countDown);
FileWatcher metadataUpdateWatcher = new FileWatcher(realmMetadataPath);
metadataUpdateWatcher.addListener(metadataUpdateListener);
watcherService.add(metadataUpdateWatcher, ResourceWatcherService.Frequency.MEDIUM);
// Put updated metadata contents into realm metadata file
Files.writeString(realmMetadataPath, Files.readString(updatedMetadataPath));
// Remove metadata update listener
metadataUpdateLatch.await();
metadataUpdateWatcher.remove(metadataUpdateListener);
assertThat(Files.readString(realmMetadataPath), is(equalTo(Files.readString(updatedMetadataPath))));
// Trigger changed log message
final List<PublicKey> keys2 = idpConf.getSigningCredentials().stream().map(Credential::getPublicKey).toList();
assertThat(keys1, is(equalTo(keys2)));
// Trigger not changed log message
assertThat(Files.readString(realmMetadataPath), is(equalTo(Files.readString(updatedMetadataPath))));
final List<PublicKey> keys3 = idpConf.getSigningCredentials().stream().map(Credential::getPublicKey).toList();
assertThat(keys1, is(equalTo(keys3)));
} finally {
tuple.v1().destroy();
}
}
} finally {
testThreadPool.shutdown();
}
}
private EntityDescriptor mockIdp() {
final EntityDescriptor descriptor = mock(EntityDescriptor.class);
when(descriptor.getEntityID()).thenReturn("https://idp.saml/");
return descriptor;
}
private Tuple<RealmConfig, SSLService> buildConfig(String idpMetadataPath) {
return buildConfig(idpMetadataPath, ignore -> {});
}
private Tuple<RealmConfig, SSLService> buildConfig(String idpMetadataPath, Consumer<Settings.Builder> additionalSettings) {
var settings = buildSettings(idpMetadataPath);
additionalSettings.accept(settings);
final RealmConfig config = realmConfigFromGlobalSettings(settings.build());
final SSLService sslService = new SSLService(config.env());
return new Tuple<>(config, sslService);
}
private Settings.Builder buildSettings(String idpMetadataPath) {
MockSecureSettings secureSettings = new MockSecureSettings();
final String realmType = SingleSpSamlRealmSettings.TYPE;
secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.secure_key_passphrase", "testnode");
return Settings.builder()
.put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate")
.put(
REALM_SETTINGS_PREFIX + ".ssl.key",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")
)
.put(
REALM_SETTINGS_PREFIX + ".ssl.certificate",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")
)
.put(
REALM_SETTINGS_PREFIX + ".ssl.certificate_authorities",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")
)
.put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_PATH), idpMetadataPath)
.put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_ENTITY_ID), TEST_IDP_ENTITY_ID)
.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_REFRESH),
METADATA_REFRESH.getStringRep()
)
.put(RealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.PRINCIPAL_ATTRIBUTE.apply(realmType).getAttribute()), "uid")
.put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), true)
.put("path.home", createTempDir())
.setSecureSettings(secureSettings);
}
private RealmConfig buildConfig(Settings realmSettings) {
final Settings settings = Settings.builder().put("path.home", createTempDir()).put(realmSettings).build();
final Environment env = TestEnvironment.newEnvironment(settings);
final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier("saml", REALM_NAME);
return new RealmConfig(
realmIdentifier,
Settings.builder().put(settings).put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0).build(),
env,
threadContext
);
}
private RealmConfig realmConfigFromGlobalSettings(Settings globalSettings) {
final Environment env = TestEnvironment.newEnvironment(globalSettings);
final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier("saml", REALM_NAME);
return new RealmConfig(
realmIdentifier,
Settings.builder()
.put(globalSettings)
.put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0)
.build(),
env,
new ThreadContext(globalSettings)
);
}
private TestsSSLService buildTestSslService() {
final MockSecureSettings mockSecureSettings = new MockSecureSettings();
mockSecureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode");
final Settings settings = Settings.builder()
.put("xpack.security.http.ssl.enabled", true)
.put("xpack.security.http.ssl.key", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"))
.put(
"xpack.security.http.ssl.certificate",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")
)
.put(
"xpack.security.http.ssl.certificate_authorities",
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")
)
.putList("xpack.security.http.ssl.supported_protocols", XPackSettings.DEFAULT_SUPPORTED_PROTOCOLS)
.put("path.home", createTempDir())
.setSecureSettings(mockSecureSettings)
.build();
return new TestsSSLService(TestEnvironment.newEnvironment(settings));
}
public void testHttpMetadataWithCustomTimeouts() throws Exception {
final Path path = getDataPath("idp1.xml");
final String body = Files.readString(path);
TestsSSLService sslService = buildTestSslService();
try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext("xpack.security.http.ssl"), false)) {
proxyServer.start();
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody(body).addHeader("Content-Type", "application/xml"));
final TimeValue customConnectTimeout = TimeValue.timeValueSeconds(3);
final TimeValue customReadTimeout = TimeValue.timeValueSeconds(15);
Tuple<RealmConfig, SSLService> config = buildConfig("https://localhost:" + proxyServer.getPort(), builder -> {
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_CONNECT_TIMEOUT),
customConnectTimeout.getStringRep()
);
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_READ_TIMEOUT),
customReadTimeout.getStringRep()
);
});
// Verify settings are correctly configured
assertThat(config.v1().getSetting(SamlRealmSettings.IDP_METADATA_HTTP_CONNECT_TIMEOUT), equalTo(customConnectTimeout));
assertThat(config.v1().getSetting(SamlRealmSettings.IDP_METADATA_HTTP_READ_TIMEOUT), equalTo(customReadTimeout));
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
assertThat(proxyServer.requests().size(), greaterThanOrEqualTo(1));
assertIdp1MetadataParsedCorrectly(tuple.v2().get());
} finally {
tuple.v1().destroy();
}
}
}
public void testHttpMetadataWithDefaultTimeouts() throws Exception {
final Path path = getDataPath("idp1.xml");
final String body = Files.readString(path);
TestsSSLService sslService = buildTestSslService();
try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext("xpack.security.http.ssl"), false)) {
proxyServer.start();
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody(body).addHeader("Content-Type", "application/xml"));
Tuple<RealmConfig, SSLService> config = buildConfig("https://localhost:" + proxyServer.getPort());
// Verify default timeout values are used
assertThat(config.v1().getSetting(SamlRealmSettings.IDP_METADATA_HTTP_CONNECT_TIMEOUT), equalTo(TimeValue.timeValueSeconds(5)));
assertThat(config.v1().getSetting(SamlRealmSettings.IDP_METADATA_HTTP_READ_TIMEOUT), equalTo(TimeValue.timeValueSeconds(10)));
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
assertThat(proxyServer.requests().size(), greaterThanOrEqualTo(1));
assertIdp1MetadataParsedCorrectly(tuple.v2().get());
} finally {
tuple.v1().destroy();
}
}
}
public void testHttpMetadataConnectionTimeout() throws Exception {
// Use a non-routable IP address to simulate connection timeout
// 192.0.2.1 is reserved for documentation and will not be routable
final String unreachableUrl = "https://192.0.2.1:9999/metadata.xml";
final TimeValue shortConnectTimeout = TimeValue.timeValueMillis(100);
Tuple<RealmConfig, SSLService> config = buildConfig(unreachableUrl, builder -> {
builder.put(
SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_CONNECT_TIMEOUT),
shortConnectTimeout.getStringRep()
);
builder.put(SingleSpSamlRealmSettings.getFullSettingKey(REALM_NAME, SamlRealmSettings.IDP_METADATA_HTTP_FAIL_ON_ERROR), false);
});
final ResourceWatcherService watcherService = mock(ResourceWatcherService.class);
// initialization should complete even though the connection fails
Tuple<AbstractReloadingMetadataResolver, Supplier<EntityDescriptor>> tuple = SamlRealm.initializeResolver(
logger,
config.v1(),
config.v2(),
watcherService
);
try {
EntityDescriptor descriptor = tuple.v2().get();
assertThat(descriptor, instanceOf(UnresolvedEntity.class));
} finally {
tuple.v1().destroy();
}
}
private void assertIdp1MetadataParsedCorrectly(EntityDescriptor descriptor) {
try {
IDPSSODescriptor idpssoDescriptor = descriptor.getIDPSSODescriptor(SAMLConstants.SAML20P_NS);
assertNotNull(idpssoDescriptor);
List<SingleSignOnService> ssoServices = idpssoDescriptor.getSingleSignOnServices();
assertEquals(2, ssoServices.size());
assertEquals(SAMLConstants.SAML2_POST_BINDING_URI, ssoServices.get(0).getBinding());
assertEquals(SAMLConstants.SAML2_REDIRECT_BINDING_URI, ssoServices.get(1).getBinding());
} catch (ElasticsearchSecurityException e) {
// Convert a SAML exception into an assertion failure so that we can `assertBusy` on these checks
throw new AssertionError("Failed to retrieve IdP metadata", e);
}
}
}
| SamlRealmTests |
java | google__dagger | javatests/dagger/functional/multibindings/subpackage/ContributionsModule.java | {
"start": 916,
"end": 1334
} | class ____ {
@Provides
@IntoSet
static int contributeAnInt(@SuppressWarnings("unused") double doubleDependency) {
return 1742;
}
@Provides
@IntoSet
static int contributeAnotherInt() {
return 832;
}
@Provides
@ElementsIntoSet
static Set<Integer> contributeSomeInts() {
return Collections.unmodifiableSet(new LinkedHashSet<Integer>(Arrays.asList(-1, -90, -17)));
}
}
| ContributionsModule |
java | resilience4j__resilience4j | resilience4j-bulkhead/src/main/java/io/github/resilience4j/bulkhead/BulkheadConfig.java | {
"start": 861,
"end": 3361
} | class ____ implements Serializable {
private static final long serialVersionUID = -9139631465007403460L;
public static final int DEFAULT_MAX_CONCURRENT_CALLS = 25;
public static final Duration DEFAULT_MAX_WAIT_DURATION = Duration.ofSeconds(0);
public static final boolean DEFAULT_WRITABLE_STACK_TRACE_ENABLED = true;
public static final boolean DEFAULT_FAIR_CALL_HANDLING_STRATEGY_ENABLED = true;
private final int maxConcurrentCalls;
private final Duration maxWaitDuration;
private final boolean writableStackTraceEnabled;
private final boolean fairCallHandlingEnabled;
private BulkheadConfig(int maxConcurrentCalls, Duration maxWaitDuration,
boolean writableStackTraceEnabled, boolean fairCallHandlingEnabled) {
this.maxConcurrentCalls = maxConcurrentCalls;
this.maxWaitDuration = maxWaitDuration;
this.writableStackTraceEnabled = writableStackTraceEnabled;
this.fairCallHandlingEnabled = fairCallHandlingEnabled;
}
/**
* Returns a builder to create a custom BulkheadConfig.
*
* @return a {@link Builder}
*/
public static Builder custom() {
return new Builder();
}
/**
* Returns a builder to create a custom BulkheadConfig.
*
* @return a {@link Builder}
*/
public static Builder from(BulkheadConfig baseConfig) {
return new Builder(baseConfig);
}
/**
* Creates a default Bulkhead configuration.
*
* @return a default Bulkhead configuration.
*/
public static BulkheadConfig ofDefaults() {
return new Builder().build();
}
public int getMaxConcurrentCalls() {
return maxConcurrentCalls;
}
public Duration getMaxWaitDuration() {
return maxWaitDuration;
}
public boolean isWritableStackTraceEnabled() {
return writableStackTraceEnabled;
}
public boolean isFairCallHandlingEnabled() {
return fairCallHandlingEnabled;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("BulkheadConfig{");
sb.append("maxConcurrentCalls=").append(maxConcurrentCalls);
sb.append(", maxWaitDuration=").append(maxWaitDuration);
sb.append(", writableStackTraceEnabled=").append(writableStackTraceEnabled);
sb.append(", fairCallHandlingEnabled=").append(fairCallHandlingEnabled);
sb.append('}');
return sb.toString();
}
public static | BulkheadConfig |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultDependencyResolverResult.java | {
"start": 7405,
"end": 11205
} | class ____ or module path.
* This method can be invoked at most once.
*
* @param main the main output directory, or {@code null} if none
* @param test the test output directory, or {@code null} if none
* @throws IOException if an error occurred while reading module information
*
* TODO: this is currently not called. This is intended for use by Surefire and may move there.
*/
void addOutputDirectory(Path main, Path test) throws IOException {
if (outputModules != null) {
throw new IllegalStateException("Output directories must be set first and only once.");
}
if (main != null) {
outputModules = cache.getModuleInfo(main);
addPathElement(outputModules.getPathType(), main);
} else {
outputModules = PathModularization.NONE;
}
if (test != null) {
boolean addToClasspath = true;
PathModularization testModules = cache.getModuleInfo(test);
boolean isModuleHierarchy = outputModules.isModuleHierarchy || testModules.isModuleHierarchy;
for (Object value : outputModules.descriptors.values()) {
String moduleName = name(value);
Path subdir = test;
if (isModuleHierarchy) {
// If module hierarchy is used, the directory names shall be the module names.
Path path = test.resolve(moduleName);
if (!Files.isDirectory(path)) {
// Main module without tests. It is okay.
continue;
}
subdir = path;
}
// When the same module is found in main and test output, the latter is patching the former.
addPathElement(JavaPathType.patchModule(moduleName), subdir);
addToClasspath = false;
}
/*
* If the test output directory provides some modules of its own, add them.
* Except for this unusual case, tests should never be added to the module-path.
*/
for (Map.Entry<Path, Object> entry : testModules.descriptors.entrySet()) {
if (!outputModules.containsModule(name(entry.getValue()))) {
addPathElement(JavaPathType.MODULES, entry.getKey());
addToClasspath = false;
}
}
if (addToClasspath) {
addPathElement(JavaPathType.CLASSES, test);
}
}
}
/**
* Adds a dependency node to the result.
*
* @param node the dependency node
*/
void addNode(Node node) {
nodes.add(node);
}
/**
* Adds a dependency to the result. This method populates the {@link #nodes}, {@link #paths},
* {@link #dispatchedPaths} and {@link #dependencies} collections with the given arguments.
*
* @param node the dependency node
* @param dep the dependency for the given node, or {@code null} if none
* @param filter filter the paths accepted by the tool which will consume the path.
* @param path the path to the dependency, or {@code null} if the dependency was null
* @throws IOException if an error occurred while reading module information
*/
void addDependency(Node node, Dependency dep, Predicate<PathType> filter, Path path) throws IOException {
nodes.add(node);
if (dep == null) {
return;
}
if (dependencies.put(dep, path) != null) {
throw new IllegalStateException("Duplicated key: " + dep);
}
if (path == null) {
return;
}
paths.add(path);
/*
* Dispatch the dependency to | path |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/PineconeVectorDbEndpointBuilderFactory.java | {
"start": 11571,
"end": 13704
} | interface ____ {
/**
* Pinecone (camel-pinecone)
* Perform operations on the Pinecone Vector Database.
*
* Category: database,ai
* Since: 4.6
* Maven coordinates: org.apache.camel:camel-pinecone
*
* @return the dsl builder for the headers' name.
*/
default PineconeVectorDbHeaderNameBuilder pinecone() {
return PineconeVectorDbHeaderNameBuilder.INSTANCE;
}
/**
* Pinecone (camel-pinecone)
* Perform operations on the Pinecone Vector Database.
*
* Category: database,ai
* Since: 4.6
* Maven coordinates: org.apache.camel:camel-pinecone
*
* Syntax: <code>pinecone:collection</code>
*
* Path parameter: collection (required)
* The collection Name. (Only used by some actions)
*
* @param path collection
* @return the dsl builder
*/
default PineconeVectorDbEndpointBuilder pinecone(String path) {
return PineconeVectorDbEndpointBuilderFactory.endpointBuilder("pinecone", path);
}
/**
* Pinecone (camel-pinecone)
* Perform operations on the Pinecone Vector Database.
*
* Category: database,ai
* Since: 4.6
* Maven coordinates: org.apache.camel:camel-pinecone
*
* Syntax: <code>pinecone:collection</code>
*
* Path parameter: collection (required)
* The collection Name. (Only used by some actions)
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path collection
* @return the dsl builder
*/
default PineconeVectorDbEndpointBuilder pinecone(String componentName, String path) {
return PineconeVectorDbEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Pinecone component.
*/
public static | PineconeVectorDbBuilders |
java | apache__camel | test-infra/camel-test-infra-aws-v2/src/main/java/org/apache/camel/test/infra/aws2/services/AWSLambdaLocalContainerInfraService.java | {
"start": 1178,
"end": 1351
} | class ____ extends AWSLocalContainerInfraService {
public AWSLambdaLocalContainerInfraService() {
super(Service.LAMBDA);
}
}
| AWSLambdaLocalContainerInfraService |
java | mapstruct__mapstruct | processor/src/test/resources/fixtures/org/mapstruct/ap/test/accessibility/referenced/SourceTargetMapperPrivateImpl.java | {
"start": 444,
"end": 1113
} | class ____ implements SourceTargetMapperPrivate {
@Override
public Target toTarget(Source source) {
if ( source == null ) {
return null;
}
Target target = new Target();
target.setReferencedTarget( referencedSourceToReferencedTarget( source.getReferencedSource() ) );
return target;
}
protected ReferencedTarget referencedSourceToReferencedTarget(ReferencedSource referencedSource) {
if ( referencedSource == null ) {
return null;
}
ReferencedTarget referencedTarget = new ReferencedTarget();
return referencedTarget;
}
}
| SourceTargetMapperPrivateImpl |
java | apache__camel | components/camel-test/camel-test-junit5/src/main/java/org/apache/camel/test/junit5/util/RouteDumperExtension.java | {
"start": 1052,
"end": 1949
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(RouteDumperExtension.class);
private final ModelCamelContext context;
public RouteDumperExtension(ModelCamelContext context) {
this.context = context;
}
public void dumpRoute(Class<?> testClass, String currentTestName, String format) throws Exception {
LOG.debug("Dumping Route");
String className = testClass.getSimpleName();
String dir = "target/camel-route-dump";
String name = className + "-" + StringHelper.before(currentTestName, "(") + "." + format;
DumpRoutesStrategy drs = context.getCamelContextExtension().getContextPlugin(DumpRoutesStrategy.class);
drs.setOutput(dir + "/" + name);
drs.setInclude("*");
drs.setLog(false);
drs.setUriAsParameters(true);
drs.dumpRoutes(format);
}
}
| RouteDumperExtension |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InconsistentCapitalizationTest.java | {
"start": 4996,
"end": 5407
} | class ____ {
Object aa;
Test(Object aa) {
this.aa = aa;
}
}
""")
.doTest();
}
@Test
public void correctsInconsistentVariableNameInLambdaDefinitionToFieldCase() {
refactoringHelper
.addInputLines(
"in/Test.java",
"""
import java.util.function.Function;
| Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java | {
"start": 1356,
"end": 2956
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/_index_template/_simulate"), new Route(POST, "/_index_template/_simulate/{name}"));
}
@Override
public String getName() {
return "simulate_template_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
SimulateTemplateAction.Request simulateRequest = new SimulateTemplateAction.Request(
getMasterNodeTimeout(request),
request.param("name")
);
simulateRequest.includeDefaults(request.paramAsBoolean("include_defaults", false));
if (request.hasContent()) {
TransportPutComposableIndexTemplateAction.Request indexTemplateRequest = new TransportPutComposableIndexTemplateAction.Request(
"simulating_template"
);
try (var parser = request.contentParser()) {
indexTemplateRequest.indexTemplate(ComposableIndexTemplate.parse(parser));
}
indexTemplateRequest.create(request.paramAsBoolean("create", false));
indexTemplateRequest.cause(request.param("cause", "api"));
simulateRequest.indexTemplateRequest(indexTemplateRequest);
}
return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute(
SimulateTemplateAction.INSTANCE,
simulateRequest,
new RestToXContentListener<>(channel)
);
}
}
| RestSimulateTemplateAction |
java | apache__hadoop | hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java | {
"start": 1183,
"end": 2357
} | class ____ {
@Test
public void testSuiteAll() throws Exception {
HdfsCompatMiniCluster cluster = new HdfsCompatMiniCluster();
try {
cluster.start();
final String uri = cluster.getUri() + "/tmp";
Configuration conf = cluster.getConf();
HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "ALL", conf);
cmd.initialize();
HdfsCompatReport report = cmd.apply();
assertEquals(0, report.getFailedCase().size());
new HdfsCompatTool(conf).printReport(report, System.out);
} finally {
cluster.shutdown();
}
}
@Test
public void testSuiteTpcds() throws Exception {
HdfsCompatMiniCluster cluster = new HdfsCompatMiniCluster();
try {
cluster.start();
final String uri = cluster.getUri() + "/tmp";
Configuration conf = cluster.getConf();
HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "TPCDS", conf);
cmd.initialize();
HdfsCompatReport report = cmd.apply();
assertEquals(0, report.getFailedCase().size());
new HdfsCompatTool(conf).printReport(report, System.out);
} finally {
cluster.shutdown();
}
}
} | TestHdfsCompatDefaultSuites |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/SocketAddress.java | {
"start": 1074,
"end": 4794
} | interface ____ extends Address {
static SocketAddress fromJson(JsonObject json) {
Integer port = json.getInteger("port");
String host = json.getString("host");
if (host != null && port != null) {
if (port >= 0) {
return inetSocketAddress(port, host);
} else {
return sharedRandomPort(-port, host);
}
}
String path = json.getString("path");
if (path != null) {
return domainSocketAddress(path);
}
return null;
}
/**
* Create an inet socket address that binds to a shared random port identified by {@code id}.
* <br/>
* The {@code host} string can be a host name or a host address.
* <br/>
* No name resolution will be attempted.
*
* @param id the shared random port id
* @param host the host
* @return the created socket address
*/
static SocketAddress sharedRandomPort(int id, String host) {
if (id < 1) {
throw new IllegalArgumentException("Shared random port ID " + id + " must be > 0");
}
return new SocketAddressImpl(-id, host);
}
/**
* Create an inet socket address, {@code host} must be non {@code null} and {@code port} must be between {@code 0}
* and {@code 65536}.
* <br/>
* The {@code host} string can be a host name or a host address.
* <br/>
* No name resolution will be attempted.
*
* @param port the port
* @param host the host
* @return the created socket address
*/
static SocketAddress inetSocketAddress(int port, String host) {
Arguments.requireInRange(port, 0, 65535, "port p must be in range 0 <= p <= 65535");
return new SocketAddressImpl(port, host);
}
/**
* Create a domain socket address from a {@code path}.
*
* @param path the address path
* @return the created socket address
*/
static SocketAddress domainSocketAddress(String path) {
return new SocketAddressImpl(path);
}
/**
* Create a inet socket address from a Java {@link InetSocketAddress}.
* <br/>
* No name resolution will be attempted.
*
* @param address the address
* @return the created socket address
*/
@GenIgnore(GenIgnore.PERMITTED_TYPE)
static SocketAddress inetSocketAddress(InetSocketAddress address) {
return new SocketAddressImpl(address);
}
/**
* Returns the host name when available or the IP address in string representation.
* <br/>
* Domain socket address returns {@code null}.
*
* @return the host address
*/
@CacheReturn
String host();
/**
* Returns the host name when available or {@code null}
* <br/>
* Domain socket address returns {@code null}.
*
* @return the host name
*/
@CacheReturn
String hostName();
/**
* Returns the host IP address when available or {@code null} as a String.
* <br/>
* Domain socket address returns {@code null}.
*
* @return the host address
*/
@CacheReturn
String hostAddress();
/**
* @return the address port or {@code -1} for a domain socket
*/
@CacheReturn
int port();
/**
* @return the domain socket path or {@code null} for inet socket address, empty path represents unnamed domain socket addresses.
*/
@CacheReturn
String path();
/**
* @return {@code true} for an inet socket address
*/
@CacheReturn
boolean isInetSocket();
/**
* @return {@code true} for an domain socket address
*/
@CacheReturn
boolean isDomainSocket();
default JsonObject toJson() {
if (isInetSocket()) {
return new JsonObject().put("host", host()).put("port", port());
} else if (isDomainSocket()) {
return new JsonObject().put("path", path());
} else {
throw new IllegalStateException();
}
}
}
| SocketAddress |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/profile/UnlessBuildProfileStereotypeTest.java | {
"start": 2554,
"end": 2771
} | interface ____ {
}
@UnlessBuildProfile("test")
@Stereotype
@Inherited
@Target({ ElementType.TYPE, ElementType.METHOD, ElementType.FIELD })
@Retention(RetentionPolicy.RUNTIME)
public @ | TestNever |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigTreeConfigDataLocationResolverTests.java | {
"start": 1122,
"end": 3048
} | class ____ {
private final ConfigTreeConfigDataLocationResolver resolver = new ConfigTreeConfigDataLocationResolver(
new DefaultResourceLoader());
private final ConfigDataLocationResolverContext context = mock(ConfigDataLocationResolverContext.class);
@TempDir
@SuppressWarnings("NullAway.Init")
File temp;
@Test
void isResolvableWhenPrefixMatchesReturnsTrue() {
assertThat(this.resolver.isResolvable(this.context, ConfigDataLocation.of("configtree:/etc/config"))).isTrue();
}
@Test
void isResolvableWhenPrefixDoesNotMatchReturnsFalse() {
assertThat(this.resolver.isResolvable(this.context, ConfigDataLocation.of("http://etc/config"))).isFalse();
assertThat(this.resolver.isResolvable(this.context, ConfigDataLocation.of("/etc/config"))).isFalse();
}
@Test
void resolveReturnsConfigVolumeMountLocation() {
List<ConfigTreeConfigDataResource> locations = this.resolver.resolve(this.context,
ConfigDataLocation.of("configtree:/etc/config/"));
assertThat(locations).hasSize(1);
assertThat(locations).extracting(Object::toString)
.containsExactly("config tree [" + new File("/etc/config").getAbsolutePath() + "]");
}
@Test
void resolveWildcardPattern() throws Exception {
File directoryA = new File(this.temp, "a");
File directoryB = new File(this.temp, "b");
directoryA.mkdirs();
directoryB.mkdirs();
FileCopyUtils.copy("test".getBytes(), new File(directoryA, "spring"));
FileCopyUtils.copy("test".getBytes(), new File(directoryB, "boot"));
List<ConfigTreeConfigDataResource> locations = this.resolver.resolve(this.context,
ConfigDataLocation.of("configtree:" + this.temp.getAbsolutePath() + "/*/"));
assertThat(locations).hasSize(2);
assertThat(locations).extracting(Object::toString)
.containsExactly("config tree [" + directoryA.getAbsolutePath() + "]",
"config tree [" + directoryB.getAbsolutePath() + "]");
}
}
| ConfigTreeConfigDataLocationResolverTests |
java | spring-projects__spring-boot | module/spring-boot-jackson/src/test/java/org/springframework/boot/jackson/JacksonMixinModuleEntriesBeanRegistrationAotProcessorTests.java | {
"start": 5639,
"end": 6075
} | class ____ {
public boolean scanningInvoked;
private final Collection<String> packageNames;
TestConfiguration(Collection<String> packageNames) {
this.packageNames = packageNames;
}
@Bean
JacksonMixinModuleEntries jacksonMixinModuleEntries(ApplicationContext applicationContext) {
this.scanningInvoked = true;
return JacksonMixinModuleEntries.scan(applicationContext, this.packageNames);
}
}
}
| TestConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/nulliteral/NullLiteralExpression.java | {
"start": 626,
"end": 1769
} | class ____ {
@Test
@Jira( "https://hibernate.atlassian.net/browse/HHH-11159" )
public void testNullLiteralExpressionInCriteriaUpdate(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( entityManager -> {
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaUpdate<Person> criteriaUpdate = builder.createCriteriaUpdate( Person.class );
criteriaUpdate.from( Person.class );
criteriaUpdate.set( Person_.subject, builder.nullLiteral( Subject.class ) );
entityManager.createQuery( criteriaUpdate ).executeUpdate();
} );
}
@Test
@Jira( "https://hibernate.atlassian.net/browse/HHH-16803" )
public void testEnumNullLiteralUpdate(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( entityManager -> {
final CriteriaBuilder builder = entityManager.getCriteriaBuilder();
final CriteriaUpdate<Person> criteriaUpdate = builder.createCriteriaUpdate( Person.class );
criteriaUpdate.from( Person.class );
criteriaUpdate.set( Person_.eyeColor, builder.nullLiteral( EyeColor.class ) );
entityManager.createQuery( criteriaUpdate ).executeUpdate();
} );
}
}
| NullLiteralExpression |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestNMClientAsync.java | {
"start": 6868,
"end": 7292
} | class ____ extends NMClientAsyncImpl {
private Set<String> errorMsgs =
Collections.synchronizedSet(new HashSet<String>());
protected MockNMClientAsync1(int expectedSuccess, int expectedFailure)
throws YarnException, IOException {
super(MockNMClientAsync1.class.getName(), mockNMClient(0),
new TestCallbackHandler1(expectedSuccess, expectedFailure));
}
private | MockNMClientAsync1 |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/CopyOnWriteMap.java | {
"start": 1493,
"end": 9124
} | class ____<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V> {
/**
* How many items to evict at a time, to make eviction a bit more efficient.
*/
static final int EVICTION_BATCH = 16;
/**
* Empty {@link HashMap} to avoid polymorphism.
*/
@SuppressWarnings("rawtypes")
private static final Map EMPTY = new HashMap();
private static final VarHandle ACTUAL_HANDLE;
private final int maxSizeWithEvictionMargin;
@SuppressWarnings("unused")
private Map<? extends K, ? extends V> actualField;
static {
try {
ACTUAL_HANDLE = MethodHandles.lookup().findVarHandle(CopyOnWriteMap.class, "actualField", Map.class);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw new AssertionError("Field should be present", e);
}
}
/**
* @param maxSize The maximum size of this map
* @deprecated Use {@link #create(int)}
*/
@Deprecated(forRemoval = true)
@NextMajorVersion("Used by validation, remove when ready")
public CopyOnWriteMap(int maxSize) {
int maxSizeWithEvictionMargin = maxSize + EVICTION_BATCH;
if (maxSizeWithEvictionMargin < 0) {
maxSizeWithEvictionMargin = Integer.MAX_VALUE;
}
this.maxSizeWithEvictionMargin = maxSizeWithEvictionMargin;
actual(EMPTY);
}
/**
* Create a new COW map.
*
* @param maxSize The maximum size of the map, when arbitrary items should be evicted
* @return The map
* @param <K> The key type
* @param <V> The value type
*/
public static <K, V> ConcurrentMap<K, V> create(int maxSize) {
return new CopyOnWriteMap<>(maxSize);
}
@SuppressWarnings("unchecked")
private Map<? extends K, ? extends V> actual() {
return (Map<? extends K, ? extends V>) ACTUAL_HANDLE.getAcquire(this);
}
private void actual(Map<? extends K, ? extends V> map) {
ACTUAL_HANDLE.setRelease(this, map);
}
@NonNull
@Override
public Set<Entry<K, V>> entrySet() {
return new EntrySet();
}
@Override
public V get(Object key) {
return actual().get(key);
}
@SuppressWarnings("unchecked")
@Override
public V getOrDefault(Object key, V defaultValue) {
return ((Map<K, V>) actual()).getOrDefault(key, defaultValue);
}
@Override
public boolean containsKey(Object key) {
return actual().containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return actual().containsValue(value);
}
@Override
public int size() {
return actual().size();
}
@SuppressWarnings("unchecked")
@Override
public synchronized void clear() {
actual(EMPTY);
}
@Override
public void putAll(Map<? extends K, ? extends V> m) {
update(map -> {
map.putAll(m);
return null;
});
}
@Override
public V remove(Object key) {
return update(m -> m.remove(key));
}
@Override
public int hashCode() {
return actual().hashCode();
}
@SuppressWarnings("EqualsWhichDoesntCheckParameterClass")
@Override
public boolean equals(Object o) {
return actual().equals(o);
}
@Override
public String toString() {
return actual().toString();
}
@Override
public void forEach(BiConsumer<? super K, ? super V> action) {
actual().forEach(action);
}
private synchronized <R> R update(Function<Map<K, V>, R> updater) {
Map<K, V> next = new HashMap<>(actual());
R ret = updater.apply(next);
int newSize = next.size();
if (newSize >= maxSizeWithEvictionMargin) {
evict(next, EVICTION_BATCH);
}
actual(next);
return ret;
}
/**
* Evict {@code numToEvict} items from the given {@code map} at random. This is not an atomic
* operation.
*
* @param map The map to modify
* @param numToEvict The number of items to remove
*/
public static void evict(Map<?, ?> map, int numToEvict) {
int size = map.size();
if (size < numToEvict) {
// can occasionally happen with concurrent evict with CHM. we're best-effort in that case
numToEvict = size;
}
// select some indices in the map to remove at random
BitSet toRemove = new BitSet(size);
for (int i = 0; i < numToEvict; i++) {
setUnset(toRemove, ThreadLocalRandom.current().nextInt(size - i));
}
// iterate over the map and remove those indices
Iterator<?> iterator = map.entrySet().iterator();
for (int i = 0; i < size; i++) {
try {
iterator.next();
} catch (NoSuchElementException ignored) {
// if called on a ConcurrentHashMap, another thread may have modified the map in
// the meantime
break;
}
if (toRemove.get(i)) {
iterator.remove();
}
}
}
/**
* Set the bit at {@code index}, with the index only counting unset bits. e.g. setting index 0
* when the first bit of the {@link BitSet} is already set would set the second bit (the first
* unset bit).
*
* @param set The bit set to modify
* @param index The index of the bit to set
*/
static void setUnset(BitSet set, int index) {
int i = 0;
while (true) {
int nextI = set.nextSetBit(i);
if (nextI == -1 || nextI > index) {
break;
}
i = nextI + 1;
index++;
}
set.set(index);
}
@Override
public V put(K key, V value) {
return update(m -> m.put(key, value));
}
@Override
public boolean remove(@NonNull Object key, Object value) {
return update(m -> m.remove(key, value));
}
@Override
public boolean replace(@NonNull K key, @NonNull V oldValue, @NonNull V newValue) {
return update(m -> m.replace(key, oldValue, newValue));
}
@Override
public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {
update(m -> {
m.replaceAll(function);
return null;
});
}
@Override
public V computeIfAbsent(K key, @NonNull Function<? super K, ? extends V> mappingFunction) {
V present = get(key);
if (present != null) {
// fast path without sync
return present;
} else {
return update(m -> m.computeIfAbsent(key, mappingFunction));
}
}
@Override
public V computeIfPresent(K key, @NonNull BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
return update(m -> m.computeIfPresent(key, remappingFunction));
}
@Override
public V compute(K key, @NonNull BiFunction<? super K, ? super V, ? extends V> remappingFunction) {
return update(m -> m.compute(key, remappingFunction));
}
@Override
public V merge(K key, @NonNull V value, @NonNull BiFunction<? super V, ? super V, ? extends V> remappingFunction) {
return update(m -> m.merge(key, value, remappingFunction));
}
@Override
public V putIfAbsent(@NonNull K key, V value) {
return update(m -> m.putIfAbsent(key, value));
}
@Override
public V replace(@NonNull K key, @NonNull V value) {
return update(m -> m.replace(key, value));
}
private final | CopyOnWriteMap |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/ingest/ValueSource.java | {
"start": 6902,
"end": 7698
} | class ____ implements ValueSource {
private final TemplateScript.Factory template;
TemplatedValue(TemplateScript.Factory template) {
this.template = template;
}
@Override
public Object copyAndResolve(Map<String, Object> model) {
return template.newInstance(model).execute();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TemplatedValue templatedValue = (TemplatedValue) o;
return Objects.equals(template, templatedValue.template);
}
@Override
public int hashCode() {
return Objects.hashCode(template);
}
}
}
| TemplatedValue |
java | apache__kafka | storage/src/test/java/org/apache/kafka/server/log/remote/storage/RemoteLogManagerConfigTest.java | {
"start": 7537,
"end": 7945
} | class ____ extends AbstractConfig {
private final RemoteLogManagerConfig rlmConfig;
public RLMTestConfig(Map<?, ?> originals) {
super(RemoteLogManagerConfig.configDef(), originals, true);
rlmConfig = new RemoteLogManagerConfig(this);
}
public RemoteLogManagerConfig remoteLogManagerConfig() {
return rlmConfig;
}
}
}
| RLMTestConfig |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/metadata/MetadataV6SerializerTest.java | {
"start": 2232,
"end": 6585
} | class ____ {
private static final MetadataSerializer INSTANCE = MetadataV6Serializer.INSTANCE;
private static final Random RND = new Random();
private String basePath;
private List<InputStateHandle> inputHandles;
private List<OutputStateHandle> outputHandles;
private CheckpointMetadata metadata;
@BeforeEach
public void beforeEach(@TempDir Path tempDir) throws IOException {
basePath = tempDir.toUri().toString();
final org.apache.flink.core.fs.Path metaPath =
new org.apache.flink.core.fs.Path(
basePath, AbstractFsCheckpointStorageAccess.METADATA_FILE_NAME);
// this is in the temp folder, so it will get automatically deleted
FileSystem.getLocalFileSystem().create(metaPath, FileSystem.WriteMode.OVERWRITE).close();
}
@Test
void testSerializeUnmergedChannelStateHandle() throws IOException {
testSerializeChannelStateHandle(
() ->
ChannelStateTestUtils.randomInputChannelStateHandlesFromSameSubtask()
.stream()
.map(e -> (InputStateHandle) e)
.collect(Collectors.toList()),
() ->
ChannelStateTestUtils.randomResultSubpartitionStateHandlesFromSameSubtask()
.stream()
.map(e -> (OutputStateHandle) e)
.collect(Collectors.toList()));
}
@Test
void testSerializeMergedChannelStateHandle() throws IOException {
testSerializeChannelStateHandle(
() -> Collections.singletonList(randomMergedInputChannelStateHandle()),
() -> Collections.singletonList(randomMergedResultSubpartitionStateHandle()));
}
private void testSerializeChannelStateHandle(
Supplier<List<InputStateHandle>> getter1, Supplier<List<OutputStateHandle>> getter2)
throws IOException {
prepareAndSerializeMetadata(getter1, getter2);
try (ByteArrayOutputStream out = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(out)) {
INSTANCE.serialize(metadata, dos);
try (DataInputStream dis =
new DataInputStream(new ByteArrayInputStream(out.toByteArray()))) {
CheckpointMetadata deserializedMetadata =
INSTANCE.deserialize(dis, metadata.getClass().getClassLoader(), basePath);
Collection<OperatorState> operatorStates = deserializedMetadata.getOperatorStates();
assertThat(operatorStates).hasSize(1);
OperatorState operatorState = operatorStates.iterator().next();
assertEquals(1, operatorState.getNumberCollectedStates());
OperatorSubtaskState subtaskState = operatorState.getState(0);
assertEquals(inputHandles, subtaskState.getInputChannelState().asList());
assertEquals(outputHandles, subtaskState.getResultSubpartitionState().asList());
}
}
}
private void prepareAndSerializeMetadata(
Supplier<List<InputStateHandle>> getter1, Supplier<List<OutputStateHandle>> getter2) {
Collection<OperatorState> operatorStates =
CheckpointTestUtils.createOperatorStates(RND, basePath, 1, 0, 0, 1);
inputHandles = getter1.get();
outputHandles = getter2.get();
// Set merged channel state handle to each subtask state
for (OperatorState operatorState : operatorStates) {
int subtaskStateCount = operatorState.getNumberCollectedStates();
for (int i = 0; i < subtaskStateCount; i++) {
OperatorSubtaskState originSubtaskState = operatorState.getState(i);
OperatorSubtaskState.Builder builder = originSubtaskState.toBuilder();
builder.setInputChannelState(new StateObjectCollection<>(inputHandles));
builder.setResultSubpartitionState(new StateObjectCollection<>(outputHandles));
operatorState.putState(i, builder.build());
}
}
metadata = new CheckpointMetadata(1L, operatorStates, emptyList(), null);
}
}
| MetadataV6SerializerTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitterSkipTest.java | {
"start": 1025,
"end": 2371
} | class ____ extends ContextTestSupport {
@Test
public void testSplitterSkip() throws Exception {
getMockEndpoint("mock:line").expectedBodiesReceived("C", "D", "E");
List<Object> data = new ArrayList<>();
data.add("A");
data.add("B");
data.add("C");
data.add("D");
data.add("E");
template.sendBody("direct:start", data);
assertMockEndpointsSatisfied();
}
@Test
public void testSplitterEmpty() throws Exception {
getMockEndpoint("mock:line").expectedMessageCount(0);
List<Object> data = new ArrayList<>();
data.add("A");
data.add("B");
template.sendBody("direct:start", data);
assertMockEndpointsSatisfied();
}
@Test
public void testSplitterEmptyAgain() throws Exception {
getMockEndpoint("mock:line").expectedMessageCount(0);
List<Object> data = new ArrayList<>();
data.add("A");
template.sendBody("direct:start", data);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").split(simple("${skip(2)}")).to("mock:line");
}
};
}
}
| SplitterSkipTest |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/gen/script/Params.java | {
"start": 614,
"end": 1025
} | class ____ exists to handle the different aggregation cases.
* While aggs can appear in scripts like regular parameters, they are not passed
* as parameters but rather as bucket_path.
* However in some cases (like count), it's not the agg path that is relevant but rather
* its property (_count).
* As the agg name still needs to be remembered to properly associate the script with.
*
* Hence why this | mainly |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/DeadlockTest.java | {
"start": 1045,
"end": 1178
} | class ____ {
@Test
void deadlockOnReconfigure(final LoggerContext context) {
context.reconfigure();
}
}
| DeadlockTest |
java | processing__processing4 | app/src/processing/app/Sketch.java | {
"start": 49041,
"end": 58875
} | class ____ copying is completed
}
}
/**
* Add a file to the sketch.
* <p/>
* .pde or .java files will be added to the sketch folder. <br/>
* .jar, .class, .dll, .jnilib, and .so files will all
* be added to the "code" folder. <br/>
* All other files will be added to the "data" folder.
* <p/>
* If they don't exist already, the "code" or "data" folder
* will be created.
* <p/>
* @return true if successful.
*/
public boolean addFile(File sourceFile) {
if (sourceFile.isDirectory()) {
System.err.println("Skipping folder " + sourceFile);
System.err.println("Dragging and dropping a folder is not supported.");
return false;
}
String filename = sourceFile.getName();
File destFile = null;
String codeExtension = null;
boolean replacement = false;
boolean isCode = false;
// if the file appears to be code related, drop it
// into the code folder, instead of the data folder
if (filename.toLowerCase().endsWith(".class") ||
filename.toLowerCase().endsWith(".jar") ||
filename.toLowerCase().endsWith(".dll") ||
filename.toLowerCase().endsWith(".dylib") ||
filename.toLowerCase().endsWith(".jnilib") ||
filename.toLowerCase().endsWith(".so")) {
if (!codeFolder.exists()) {
boolean success = codeFolder.mkdirs();
if (!success) {
System.err.println("Could not create " + codeFolder);
return false;
}
}
destFile = new File(codeFolder, filename);
isCode = true;
} else {
for (String extension : mode.getExtensions()) {
String lower = filename.toLowerCase();
if (lower.endsWith("." + extension)) {
destFile = new File(this.folder, filename);
codeExtension = extension;
}
}
if (codeExtension == null) {
prepareDataFolder();
destFile = new File(dataFolder, filename);
}
}
// check whether this file already exists
if (destFile.exists()) {
Object[] options = { Language.text("prompt.ok"), Language.text("prompt.cancel") };
String prompt = Language.interpolate("add_file.messages.confirm_replace",
filename);
int result = JOptionPane.showOptionDialog(editor,
prompt,
"Replace",
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE,
null,
options,
options[0]);
if (result == JOptionPane.YES_OPTION) {
replacement = true;
} else {
return false;
}
}
// If it's a replacement, delete the old file first,
// otherwise case changes will not be preserved.
// https://download.processing.org/bugzilla/969.html
if (replacement) {
boolean muchSuccess = destFile.delete();
if (!muchSuccess) {
Messages.showWarning(Language.text("add_file.messages.error_adding"),
Language.interpolate("add_file.messages.cannot_delete.description", filename));
return false;
}
}
// make sure they aren't the same file
if ((codeExtension == null) && sourceFile.equals(destFile)) {
Messages.showWarning(Language.text("add_file.messages.same_file"),
Language.text("add_file.messages.same_file.description"));
return false;
}
// Handles "Add File" when a .pde is used. For 3.0b1, this no longer runs
// on a separate thread because it's totally unnecessary (a .pde file is
// not going to be so large that it's ever required) and otherwise we have
// to introduce a threading block here.
// https://github.com/processing/processing/issues/3383
if (!sourceFile.equals(destFile)) {
try {
Util.copyFile(sourceFile, destFile);
} catch (IOException e) {
Messages.showWarning(Language.text("add_file.messages.error_adding"),
Language.interpolate("add_file.messages.cannot_add.description", filename), e);
return false;
}
}
if (isCode) {
editor.codeFolderChanged();
}
if (codeExtension != null) {
SketchCode newCode = new SketchCode(destFile, codeExtension);
if (replacement) {
replaceCode(newCode);
} else {
insertCode(newCode);
sortCode();
}
setCurrentCode(filename);
editor.repaintHeader();
if (isUntitled()) { // TODO probably not necessary? problematic?
// Mark the new code as modified so that the sketch is saved
current.setModified(true);
}
} else {
if (isUntitled()) { // TODO probably not necessary? problematic?
// If a file has been added, mark the main code as modified so
// that the sketch is properly saved.
code[0].setModified(true);
}
}
return true;
}
/**
* Change what file is currently being edited. Changes the current tab index.
* <OL>
* <LI> store the String for the text of the current file.
* <LI> retrieve the String for the text of the new file.
* <LI> change the text that's visible in the text area
* </OL>
*/
public void setCurrentCode(int which) {
// if current is null, then this is the first setCurrent(0)
if (which < 0 || which >= codeCount ||
((currentIndex == which) && (current == code[currentIndex]))) {
return;
}
// get the text currently being edited
if (current != null) {
current.setState(editor.getText(),
editor.getSelectionStart(),
editor.getSelectionStop(),
editor.getScrollPosition());
}
current = code[which];
currentIndex = which;
current.visited = System.currentTimeMillis();
editor.setCode(current);
editor.repaintHeader();
}
/**
* Internal helper function to set the current tab based on a name.
* @param findName the file name (not pretty name) to be shown
*/
public void setCurrentCode(String findName) {
for (int i = 0; i < codeCount; i++) {
if (findName.equals(code[i].getFileName()) ||
findName.equals(code[i].getPrettyName())) {
setCurrentCode(i);
return;
}
}
}
/**
* Create a temporary folder that includes the sketch's name in its title.
*/
public File makeTempFolder() {
try {
return Util.createTempFolder(name, "temp", null);
} catch (IOException e) {
Messages.showWarning(Language.text("temp_dir.messages.bad_build_folder"),
Language.text("temp_dir.messages.bad_build_folder.description"), e);
}
return null;
}
/**
* Make sure the sketch hasn't been moved or deleted by a nefarious user.
* If they did, try to re-create it and save. Only checks whether the
* main folder is still around, but not its contents.
*/
public void ensureExistence() {
if (!folder.exists()) {
// Avoid an infinite loop if we've already warned about this
// https://github.com/processing/processing/issues/4805
if (!disappearedWarning) {
disappearedWarning = true;
// Disaster recovery, try to salvage what's there already.
Messages.showWarning(Language.text("ensure_exist.messages.missing_sketch"),
Language.text("ensure_exist.messages.missing_sketch.description"));
try {
folder.mkdirs();
modified = true;
for (int i = 0; i < codeCount; i++) {
code[i].save(); // this will force a save
}
calcModified();
} catch (Exception e) {
// disappearedWarning prevents infinite loop in this scenario
Messages.showWarning(Language.text("ensure_exist.messages.unrecoverable"),
Language.text("ensure_exist.messages.unrecoverable.description"), e);
}
}
}
}
/**
* Returns true if this is a read-only sketch. Used for the
* "examples" directory, or when sketches are loaded from read-only
* volumes or folders without appropriate permissions.
*/
public boolean isReadOnly() {
String path = folder.getAbsolutePath();
List<Mode> modes = editor.getBase().getModeList();
// Make sure it's not read-only for another Mode besides this one
// https://github.com/processing/processing/issues/773
for (Mode mode : modes) {
if (path.startsWith(mode.getExamplesFolder().getAbsolutePath()) ||
path.startsWith(mode.getLibrariesFolder().getAbsolutePath())) {
return true;
}
}
// Check to see if each modified code file can be written.
// Note: canWrite() does not work on directories.
for (int i = 0; i < codeCount; i++) {
if (code[i].isModified() &&
code[i].fileReadOnly() &&
code[i].fileExists()) {
return true;
}
}
return false;
}
// . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
// Additional accessors added in 0136 because of package work.
// These will also be helpful for tool developers.
/**
* Returns the name of this sketch. (The pretty name of the main tab.)
*/
public String getName() {
return name;
}
// /**
// * Returns a File object for the main .pde file for this sketch.
// */
// public File getMainFile() {
// return mainFile;
// }
/**
* Returns the name (without extension) of the main tab.
* (This version still has underscores and is a legit | when |
java | apache__camel | components/camel-grok/src/test/java/org/apache/camel/component/grok/GrokPatternsTest.java | {
"start": 1551,
"end": 5020
} | class ____ extends CamelTestSupport {
public static List<Arguments> data() {
String randomUuid = UUID.randomUUID().toString();
return Arrays.asList(
Arguments.of("%{QS:qs}", "this is some \"quoted string\".", test("qs", "quoted string")),
Arguments.of("%{UUID:uuid}", "some " + randomUuid, test("uuid", randomUuid)),
Arguments.of("%{MAC:mac}", "some:invalid:prefix:of:eth0:02:00:4c:4f:4f:50", test("mac", "02:00:4c:4f:4f:50")),
Arguments.of("%{PATH:path}", "C:\\path\\file", test("path", "C:\\path\\file")),
Arguments.of("%{PATH:path}", "C:\\path\\file.txt", test("path", "C:\\path\\file.txt")),
Arguments.of("%{PATH:path}", "\\\\server\\share\\path\\file", test("path", "\\\\server\\share\\path\\file")),
Arguments.of("%{PATH:path}", "/root/.hidden_file", test("path", "/root/.hidden_file")),
Arguments.of("%{PATH:path}", "/home/user/../../mnt", test("path", "/home/user/../../mnt")),
Arguments.of("%{PATH:path}", "/root", test("path", "/root")),
Arguments.of("%{URI:camelSite}", "the site is at http://camel.apache.org/",
test("camelSite", "http://camel.apache.org/")),
Arguments.of("%{URI:camelSite}", "the dataformat docs is at http://camel.apache.org/data-format.html",
test("camelSite", "http://camel.apache.org/data-format.html")),
Arguments.of("%{NUMBER:num}", "number is 123.", test("num", "123")),
Arguments.of("%{NUMBER:num:integer}", "number is 123.", test("num", 123)),
Arguments.of("%{IP:ip}", "my ip is 192.168.0.1", test("ip", "192.168.0.1")),
Arguments.of("%{TIMESTAMP_ISO8601:timestamp}", "This test was created at 2019-05-26T10:54:15Z test plain",
test("timestamp", "2019-05-26T10:54:15Z")),
Arguments.of("%{TIMESTAMP_ISO8601:timestamp:date}",
"This test was created at 2019-05-26T10:54:15Z test convert",
test("timestamp", Instant.ofEpochSecond(1558868055))));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
}
};
}
@ParameterizedTest
@MethodSource("data")
public void testPattern(String pattern, String input, Consumer<Map> expectedOutputTest) throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:input")
.unmarshal().grok(pattern);
}
});
assertDoesNotThrow(() -> expectedOutputTest.accept(
template.requestBody("direct:input", input, Map.class)));
}
private static Consumer<Map> test(String key, Object value) {
return new Consumer<Map>() {
@Override
public void accept(Map m) {
boolean result = m != null && m.containsKey(key) && Objects.equals(m.get(key), value);
assertTrue(result, String.format("Expected: map.get(%s) == %s. Given map %s", key, value, m));
}
@Override
public String toString() {
return String.format("map[%s] = %s", key, value);
}
};
}
}
| GrokPatternsTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/service/annotation/GetExchange.java | {
"start": 1177,
"end": 1729
} | interface ____ {
/**
* Alias for {@link HttpExchange#value}.
*/
@AliasFor(annotation = HttpExchange.class)
String value() default "";
/**
* Alias for {@link HttpExchange#url()}.
*/
@AliasFor(annotation = HttpExchange.class)
String url() default "";
/**
* Alias for {@link HttpExchange#accept()}.
*/
@AliasFor(annotation = HttpExchange.class)
String[] accept() default {};
/**
* Alias for {@link HttpExchange#version()}.
* @since 7.0
*/
@AliasFor(annotation = HttpExchange.class)
String version() default "";
}
| GetExchange |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/source/manysourcearguments/DeliveryAddress.java | {
"start": 214,
"end": 1964
} | class ____ {
private String firstName;
private String lastName;
private int height;
private String street;
private int zipCode;
private int houseNumber;
private String description;
public DeliveryAddress() {
}
public DeliveryAddress(String firstName, String lastName, int height, String street, int zipCode, int houseNumber,
String description) {
this.firstName = firstName;
this.lastName = lastName;
this.height = height;
this.street = street;
this.zipCode = zipCode;
this.houseNumber = houseNumber;
this.description = description;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public int getZipCode() {
return zipCode;
}
public void setZipCode(int zipCode) {
this.zipCode = zipCode;
}
public int getHouseNumber() {
return houseNumber;
}
public void setHouseNumber(int houseNumber) {
this.houseNumber = houseNumber;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}
| DeliveryAddress |
java | grpc__grpc-java | api/src/test/java/io/grpc/CallOptionsTest.java | {
"start": 11102,
"end": 11483
} | class ____ extends ClientStreamTracer.Factory {
final String name;
FakeTracerFactory(String name) {
this.name = name;
}
@Override
public ClientStreamTracer newClientStreamTracer(StreamInfo info, Metadata headers) {
return new ClientStreamTracer() {};
}
@Override
public String toString() {
return name;
}
}
}
| FakeTracerFactory |
java | apache__avro | lang/java/idl/src/main/java/org/apache/avro/idl/IdlSchemaFormatterFactory.java | {
"start": 932,
"end": 1104
} | class ____ implements SchemaFormatterFactory {
@Override
public SchemaFormatter getDefaultFormatter() {
return new IdlSchemaFormatter();
}
}
| IdlSchemaFormatterFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemavalidation/ExistingVarcharEnumColumnValidationTest.java | {
"start": 1129,
"end": 2872
} | class ____ {
@BeforeEach
public void setUp() {
try (var registry = ServiceRegistryUtil.serviceRegistryBuilder().build()) {
JdbcUtils.withConnection( registry, (connection) -> {
try (var statement = connection.createStatement()) {
try {
dropSchema( statement );
}
catch (Exception ignore) {
}
createSchema( statement );
}
} );
}
}
private void dropSchema(Statement statement) throws SQLException {
statement.execute( "drop table en cascade" );
}
private void createSchema(Statement statement) throws SQLException {
statement.execute(
"""
create table en (
id integer not null,
sign_position varchar(255)
check (sign_position in (
'AFTER_NO_SPACE',
'AFTER_WITH_SPACE',
'BEFORE_NO_SPACE',
'BEFORE_WITH_SPACE')
),
primary key (id)
)
"""
);
}
@AfterEach
public void tearDown() {
try (var registry = ServiceRegistryUtil.serviceRegistryBuilder().build()) {
final var connections = registry.requireService( ConnectionProvider.class );
JdbcUtils.withConnection( connections, (connection) -> {
try (var statement = connection.createStatement()) {
dropSchema( statement );
}
} );
}
}
@Test
public void testEnumDataTypeSchemaValidator() {
try (var ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.HBM2DDL_AUTO, "validate" )
.build()) {
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( EntityE.class );
new SchemaValidator().validate( metadataSources.buildMetadata() );
}
}
@Entity(name = "en")
@Table(name = "en")
public static | ExistingVarcharEnumColumnValidationTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessInferenceTest.java | {
"start": 22307,
"end": 22819
} | interface ____ {}",
"}")
.doTest();
}
@Test
public void intersectionBounds() {
compilationHelper
.addSourceLines(
"IntersectionBoundsTest.java",
"""
package com.google.errorprone.dataflow.nullnesspropagation;
import static com.google.errorprone.dataflow.nullnesspropagation.NullnessInferenceTest.inspectInferredExpression;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.checkerframework.checker.nullness.qual.NonNull;
public | DefaultNotNull |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/action/RestListQueryRulesetsAction.java | {
"start": 996,
"end": 1969
} | class ____ extends EnterpriseSearchBaseRestHandler {
public RestListQueryRulesetsAction(XPackLicenseState licenseState) {
super(licenseState, LicenseUtils.Product.QUERY_RULES);
}
@Override
public String getName() {
return "query_ruleset_list_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/" + EnterpriseSearch.QUERY_RULES_API_ENDPOINT));
}
@Override
protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) {
int from = restRequest.paramAsInt("from", PageParams.DEFAULT_FROM);
int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE);
ListQueryRulesetsAction.Request request = new ListQueryRulesetsAction.Request(new PageParams(from, size));
return channel -> client.execute(ListQueryRulesetsAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
| RestListQueryRulesetsAction |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/ValueInstantiatorTest.java | {
"start": 6343,
"end": 6748
} | class ____ extends InstantiatorBase
{
@Override
public String getValueTypeDesc() {
return MyMap.class.getName();
}
@Override
public boolean canCreateUsingDefault() { return true; }
@Override
public MyMap createUsingDefault(DeserializationContext ctxt) {
return new MyMap(true);
}
}
static | MyMapInstantiator |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/metadata/storage/ClassLoaderAwareRemoteLogMetadataManager.java | {
"start": 1680,
"end": 6282
} | class ____ implements RemoteLogMetadataManager {
private final RemoteLogMetadataManager delegate;
private final ClassLoader loader;
public ClassLoaderAwareRemoteLogMetadataManager(RemoteLogMetadataManager delegate,
ClassLoader loader) {
this.delegate = delegate;
this.loader = loader;
}
@Override
public CompletableFuture<Void> addRemoteLogSegmentMetadata(RemoteLogSegmentMetadata remoteLogSegmentMetadata) throws RemoteStorageException {
return withClassLoader(() -> delegate.addRemoteLogSegmentMetadata(remoteLogSegmentMetadata));
}
@Override
public CompletableFuture<Void> updateRemoteLogSegmentMetadata(RemoteLogSegmentMetadataUpdate remoteLogSegmentMetadataUpdate) throws RemoteStorageException {
return withClassLoader(() -> delegate.updateRemoteLogSegmentMetadata(remoteLogSegmentMetadataUpdate));
}
@Override
public Optional<RemoteLogSegmentMetadata> remoteLogSegmentMetadata(TopicIdPartition topicIdPartition,
int epochForOffset,
long offset) throws RemoteStorageException {
return withClassLoader(() -> delegate.remoteLogSegmentMetadata(topicIdPartition, epochForOffset, offset));
}
@Override
public Optional<Long> highestOffsetForEpoch(TopicIdPartition topicIdPartition,
int leaderEpoch) throws RemoteStorageException {
return withClassLoader(() -> delegate.highestOffsetForEpoch(topicIdPartition, leaderEpoch));
}
@Override
public CompletableFuture<Void> putRemotePartitionDeleteMetadata(RemotePartitionDeleteMetadata remotePartitionDeleteMetadata) throws RemoteStorageException {
return withClassLoader(() -> delegate.putRemotePartitionDeleteMetadata(remotePartitionDeleteMetadata));
}
@Override
public Iterator<RemoteLogSegmentMetadata> listRemoteLogSegments(TopicIdPartition topicIdPartition) throws RemoteStorageException {
return withClassLoader(() -> delegate.listRemoteLogSegments(topicIdPartition));
}
@Override
public Iterator<RemoteLogSegmentMetadata> listRemoteLogSegments(TopicIdPartition topicIdPartition,
int leaderEpoch) throws RemoteStorageException {
return withClassLoader(() -> delegate.listRemoteLogSegments(topicIdPartition, leaderEpoch));
}
@Override
public void onPartitionLeadershipChanges(Set<TopicIdPartition> leaderPartitions,
Set<TopicIdPartition> followerPartitions) {
withClassLoader(() -> {
delegate.onPartitionLeadershipChanges(leaderPartitions, followerPartitions);
return null;
});
}
@Override
public void onStopPartitions(Set<TopicIdPartition> partitions) {
withClassLoader(() -> {
delegate.onStopPartitions(partitions);
return null;
});
}
@Override
public long remoteLogSize(TopicIdPartition topicIdPartition, int leaderEpoch) throws RemoteStorageException {
return withClassLoader(() -> delegate.remoteLogSize(topicIdPartition, leaderEpoch));
}
@Override
public Optional<RemoteLogSegmentMetadata> nextSegmentWithTxnIndex(TopicIdPartition topicIdPartition, int epoch, long offset) throws RemoteStorageException {
return withClassLoader(() -> delegate.nextSegmentWithTxnIndex(topicIdPartition, epoch, offset));
}
@Override
public boolean isReady(TopicIdPartition topicIdPartition) {
return withClassLoader(() -> delegate.isReady(topicIdPartition));
}
@Override
public void configure(Map<String, ?> configs) {
withClassLoader(() -> {
delegate.configure(configs);
return null;
});
}
@Override
public void close() throws IOException {
withClassLoader(() -> {
delegate.close();
return null;
});
}
private <T, E extends Exception> T withClassLoader(StorageAction<T, E> action) throws E {
ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(loader);
try {
return action.execute();
} finally {
Thread.currentThread().setContextClassLoader(originalClassLoader);
}
}
}
| ClassLoaderAwareRemoteLogMetadataManager |
java | spring-projects__spring-framework | spring-core/src/testFixtures/java/org/springframework/core/testfixture/codec/AbstractEncoderTests.java | {
"start": 1765,
"end": 9825
} | class ____<E extends Encoder<?>> extends AbstractLeakCheckingTests {
/**
* The encoder to test.
*/
protected final E encoder;
/**
* Construct a new {@code AbstractEncoderTestCase} for the given parameters.
* @param encoder the encoder
*/
protected AbstractEncoderTests(E encoder) {
Assert.notNull(encoder, "Encoder must not be null");
this.encoder = encoder;
}
/**
* Subclasses should implement this method to test {@link Encoder#canEncode}.
*/
@Test
protected abstract void canEncode() throws Exception;
/**
* Subclasses should implement this method to test {@link Encoder#encode}, possibly using
* {@link #testEncodeAll} or other helper methods.
*/
@Test
protected abstract void encode() throws Exception;
/**
* Helper method that tests for a variety of encoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testEncode(Publisher, ResolvableType, MimeType, Map, Consumer)}</li>
* <li>{@link #testEncodeError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testEncodeCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testEncodeEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param input the input to be provided to the encoder
* @param inputClass the input class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testEncodeAll(Publisher<? extends T> input, Class<? extends T> inputClass,
Consumer<StepVerifier.FirstStep<DataBuffer>> stepConsumer) {
testEncodeAll(input, ResolvableType.forClass(inputClass), null, null, stepConsumer);
}
/**
* Helper method that tests for a variety of decoding scenarios. This method
* invokes:
* <ul>
* <li>{@link #testEncode(Publisher, ResolvableType, MimeType, Map, Consumer)}</li>
* <li>{@link #testEncodeError(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testEncodeCancel(Publisher, ResolvableType, MimeType, Map)}</li>
* <li>{@link #testEncodeEmpty(ResolvableType, MimeType, Map)}</li>
* </ul>
*
* @param <T> the output type
* @param input the input to be provided to the encoder
* @param inputType the input type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
*/
protected <T> void testEncodeAll(Publisher<? extends T> input, ResolvableType inputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints,
Consumer<StepVerifier.FirstStep<DataBuffer>> stepConsumer) {
testEncode(input, inputType, mimeType, hints, stepConsumer);
testEncodeError(input, inputType, mimeType, hints);
testEncodeCancel(input, inputType, mimeType, hints);
testEncodeEmpty(inputType, mimeType, hints);
}
/**
* Test a standard {@link Encoder#encode encode} scenario.
*
* @param input the input to be provided to the encoder
* @param inputClass the input class
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
* @param <T> the output type
*/
protected <T> void testEncode(Publisher<? extends T> input, Class<? extends T> inputClass,
Consumer<StepVerifier.FirstStep<DataBuffer>> stepConsumer) {
testEncode(input, ResolvableType.forClass(inputClass), null, null, stepConsumer);
}
/**
* Test a standard {@link Encoder#encode encode} scenario.
*
* @param <T> the output type
* @param input the input to be provided to the encoder
* @param inputType the input type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @param stepConsumer a consumer to {@linkplain StepVerifier verify} the output
*/
protected <T> void testEncode(Publisher<? extends T> input, ResolvableType inputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints,
Consumer<StepVerifier.FirstStep<DataBuffer>> stepConsumer) {
Flux<DataBuffer> result = encoder().encode(input, this.bufferFactory, inputType, mimeType, hints);
StepVerifier.FirstStep<DataBuffer> step = StepVerifier.create(result);
stepConsumer.accept(step);
}
/**
* Test a {@link Encoder#encode encode} scenario where the input stream contains an error.
* This test method will feed the first element of the {@code input} stream to the encoder,
* followed by an {@link InputException}.
* The result is expected to contain one "normal" element, followed by the error.
*
* @param input the input to be provided to the encoder
* @param inputType the input type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
* @see InputException
*/
protected void testEncodeError(Publisher<?> input, ResolvableType inputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Flux.concat(
Flux.from(input).take(1),
Flux.error(new InputException()));
Flux<DataBuffer> result = encoder().encode(input, this.bufferFactory, inputType,
mimeType, hints);
StepVerifier.create(result)
.consumeNextWith(DataBufferUtils::release)
.expectError(InputException.class)
.verify();
}
/**
* Test a {@link Encoder#encode encode} scenario where the input stream is canceled.
* This test method will feed the first element of the {@code input} stream to the decoder,
* followed by a cancel signal.
* The result is expected to contain one "normal" element.
*
* @param input the input to be provided to the encoder
* @param inputType the input type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testEncodeCancel(Publisher<?> input, ResolvableType inputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<DataBuffer> result = encoder().encode(input, this.bufferFactory, inputType, mimeType,
hints);
StepVerifier.create(result)
.consumeNextWith(DataBufferUtils::release)
.thenCancel()
.verify();
}
/**
* Test a {@link Encoder#encode encode} scenario where the input stream is empty.
* The output is expected to be empty as well.
*
* @param inputType the input type
* @param mimeType the mime type to use for decoding. May be {@code null}.
* @param hints the hints used for decoding. May be {@code null}.
*/
protected void testEncodeEmpty(ResolvableType inputType, @Nullable MimeType mimeType,
@Nullable Map<String, Object> hints) {
Flux<?> input = Flux.empty();
Flux<DataBuffer> result = encoder().encode(input, this.bufferFactory, inputType,
mimeType, hints);
StepVerifier.create(result)
.verifyComplete();
}
/**
* Create a result consumer that expects the given bytes.
* @param expected the expected bytes
* @return a consumer that expects the given data buffer to be equal to {@code expected}
*/
protected final Consumer<DataBuffer> expectBytes(byte[] expected) {
return dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);
release(dataBuffer);
assertThat(resultBytes).isEqualTo(expected);
};
}
/**
* Create a result consumer that expects the given string, using the UTF-8 encoding.
* @param expected the expected string
* @return a consumer that expects the given data buffer to be equal to {@code expected}
*/
protected Consumer<DataBuffer> expectString(String expected) {
return dataBuffer -> {
String actual = dataBuffer.toString(UTF_8);
release(dataBuffer);
assertThat(actual).isEqualToNormalizingNewlines(expected);
};
}
@SuppressWarnings("unchecked")
private <T> Encoder<T> encoder() {
return (Encoder<T>) this.encoder;
}
/**
* Exception used in {@link #testEncodeError}.
*/
@SuppressWarnings("serial")
public static | AbstractEncoderTests |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/ModelVersionUtilsTest.java | {
"start": 2007,
"end": 4829
} | class ____ {
@Test
@DisplayName("should detect model version from document")
void shouldDetectModelVersionFromDocument() {
String pomXml = PomBuilder.create()
.groupId("test")
.artifactId("test")
.version("1.0.0")
.build();
Document document = Document.of(pomXml);
String result = ModelVersionUtils.detectModelVersion(document);
assertEquals("4.0.0", result);
}
@ParameterizedTest(name = "for {0}")
@ValueSource(strings = {"4.0.0", "4.1.0", "4.2.0"})
@DisplayName("should detect model version")
void shouldDetectModelVersionFromNamespace(String targetVersion) throws Exception {
String pomXml = PomBuilder.create()
.namespace("http://maven.apache.org/POM/" + targetVersion)
.modelVersion(targetVersion)
.groupId("test")
.artifactId("test")
.version("1.0.0")
.build();
Document document = Document.of(pomXml);
String result = ModelVersionUtils.detectModelVersion(document);
assertEquals(targetVersion, result);
}
@Test
@DisplayName("should return default version when model version is missing")
void shouldReturnDefaultVersionWhenModelVersionMissing() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document document = Document.of(pomXml);
String result = ModelVersionUtils.detectModelVersion(document);
assertEquals("4.0.0", result); // Default version
}
@Test
@DisplayName("should detect version from namespace when model version is missing")
void shouldDetectVersionFromNamespaceWhenModelVersionMissing() throws Exception {
String pomXml = """
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.1.0">
<groupId>test</groupId>
<artifactId>test</artifactId>
<version>1.0.0</version>
</project>
""";
Document document = Document.of(pomXml);
String result = ModelVersionUtils.detectModelVersion(document);
assertEquals("4.1.0", result);
}
}
@Nested
@DisplayName("Model Version Validation")
| ModelVersionDetectionTests |
java | apache__camel | test-infra/camel-test-infra-redis/src/main/java/org/apache/camel/test/infra/redis/services/RedisContainer.java | {
"start": 1823,
"end": 2565
} | class ____ extends RedisContainer {
public TestInfraRedisContainer() {
super(imageName);
waitingFor(Wait.forListeningPort());
if (fixedPort) {
addFixedExposedPort(RedisProperties.DEFAULT_PORT, RedisProperties.DEFAULT_PORT);
} else {
withNetworkAliases(networkAlias)
.withExposedPorts(RedisProperties.DEFAULT_PORT);
}
}
}
return new RedisContainer(imageName)
.withNetworkAliases(networkAlias)
.withExposedPorts(RedisProperties.DEFAULT_PORT)
.waitingFor(Wait.forListeningPort());
}
}
| TestInfraRedisContainer |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java | {
"start": 31328,
"end": 44745
} | class ____ of the annotation type to find
* @param classValuesAsString whether to convert Class references into Strings or to
* preserve them as Class references
* @param nestedAnnotationsAsMap whether to convert nested Annotation instances into
* {@code AnnotationAttributes} maps or to preserve them as Annotation instances
* @return the merged {@code AnnotationAttributes}, or {@code null} if not found
* @since 4.2
* @see #findMergedAnnotation(AnnotatedElement, Class)
* @see #getMergedAnnotationAttributes(AnnotatedElement, String, boolean, boolean)
*/
public static @Nullable AnnotationAttributes findMergedAnnotationAttributes(AnnotatedElement element,
String annotationName, boolean classValuesAsString, boolean nestedAnnotationsAsMap) {
MergedAnnotation<?> mergedAnnotation = findAnnotations(element)
.get(annotationName, null, MergedAnnotationSelectors.firstDirectlyDeclared());
return getAnnotationAttributes(mergedAnnotation, classValuesAsString, nestedAnnotationsAsMap);
}
/**
* Find the first annotation of the specified {@code annotationType} within
* the annotation hierarchy <em>above</em> the supplied {@code element},
* merge that annotation's attributes with <em>matching</em> attributes from
* annotations in lower levels of the annotation hierarchy, and synthesize
* the result back into an annotation of the specified {@code annotationType}.
* <p>{@link AliasFor @AliasFor} semantics are fully supported, both within
* a single annotation and within the annotation hierarchy.
* <p>This method follows <em>find semantics</em> as described in the
* {@linkplain AnnotatedElementUtils class-level javadoc}.
* @param element the annotated element
* @param annotationType the annotation type to find
* @return the merged, synthesized {@code Annotation}, or {@code null} if not found
* @since 4.2
* @see #findAllMergedAnnotations(AnnotatedElement, Class)
* @see #findMergedAnnotationAttributes(AnnotatedElement, String, boolean, boolean)
* @see #getMergedAnnotationAttributes(AnnotatedElement, Class)
*/
public static <A extends Annotation> @Nullable A findMergedAnnotation(AnnotatedElement element, Class<A> annotationType) {
// Shortcut: directly present on the element, with no merging needed?
if (AnnotationFilter.PLAIN.matches(annotationType) ||
AnnotationsScanner.hasPlainJavaAnnotationsOnly(element)) {
return element.getDeclaredAnnotation(annotationType);
}
// Exhaustive retrieval of merged annotations...
return findAnnotations(element)
.get(annotationType, null, MergedAnnotationSelectors.firstDirectlyDeclared())
.synthesize(MergedAnnotation::isPresent).orElse(null);
}
/**
* Find <strong>all</strong> annotations of the specified {@code annotationType}
* within the annotation hierarchy <em>above</em> the supplied {@code element};
* and for each annotation found, merge that annotation's attributes with
* <em>matching</em> attributes from annotations in lower levels of the annotation
* hierarchy and synthesize the results back into an annotation of the specified
* {@code annotationType}.
* <p>{@link AliasFor @AliasFor} semantics are fully supported, both within
* a single annotation and within the annotation hierarchy.
* <p>This method follows <em>find semantics</em> as described in the
* {@linkplain AnnotatedElementUtils class-level javadoc}.
* @param element the annotated element (never {@code null})
* @param annotationType the annotation type to find (never {@code null})
* @return the set of all merged, synthesized {@code Annotations} found,
* or an empty set if none were found
* @since 4.3
* @see #findMergedAnnotation(AnnotatedElement, Class)
* @see #getAllMergedAnnotations(AnnotatedElement, Class)
*/
public static <A extends Annotation> Set<A> findAllMergedAnnotations(AnnotatedElement element, Class<A> annotationType) {
return findAnnotations(element).stream(annotationType)
.sorted(highAggregateIndexesFirst())
.collect(MergedAnnotationCollectors.toAnnotationSet());
}
/**
* Find <strong>all</strong> annotations of the specified {@code annotationTypes}
* within the annotation hierarchy <em>above</em> the supplied {@code element};
* and for each annotation found, merge that annotation's attributes with
* <em>matching</em> attributes from annotations in lower levels of the
* annotation hierarchy and synthesize the results back into an annotation
* of the corresponding {@code annotationType}.
* <p>{@link AliasFor @AliasFor} semantics are fully supported, both within
* a single annotation and within the annotation hierarchy.
* <p>This method follows <em>find semantics</em> as described in the
* {@linkplain AnnotatedElementUtils class-level javadoc}.
* @param element the annotated element (never {@code null})
* @param annotationTypes the annotation types to find
* @return the set of all merged, synthesized {@code Annotations} found,
* or an empty set if none were found
* @since 5.1
* @see #findAllMergedAnnotations(AnnotatedElement, Class)
*/
public static Set<Annotation> findAllMergedAnnotations(AnnotatedElement element, Set<Class<? extends Annotation>> annotationTypes) {
return findAnnotations(element).stream()
.filter(MergedAnnotationPredicates.typeIn(annotationTypes))
.sorted(highAggregateIndexesFirst())
.collect(MergedAnnotationCollectors.toAnnotationSet());
}
/**
* Find all <em>repeatable annotations</em> of the specified {@code annotationType}
* within the annotation hierarchy <em>above</em> the supplied {@code element};
* and for each annotation found, merge that annotation's attributes with
* <em>matching</em> attributes from annotations in lower levels of the annotation
* hierarchy and synthesize the results back into an annotation of the specified
* {@code annotationType}.
* <p>The container type that holds the repeatable annotations will be looked up
* via {@link java.lang.annotation.Repeatable @Repeatable}.
* <p>{@link AliasFor @AliasFor} semantics are fully supported, both within
* a single annotation and within the annotation hierarchy.
* <p>This method follows <em>find semantics</em> as described in the
* {@linkplain AnnotatedElementUtils class-level javadoc}.
* @param element the annotated element (never {@code null})
* @param annotationType the annotation type to find (never {@code null})
* @return the set of all merged repeatable {@code Annotations} found,
* or an empty set if none were found
* @throws IllegalArgumentException if the {@code element} or {@code annotationType}
* is {@code null}, or if the container type cannot be resolved
* @since 4.3
* @see #findMergedAnnotation(AnnotatedElement, Class)
* @see #findAllMergedAnnotations(AnnotatedElement, Class)
* @see #findMergedRepeatableAnnotations(AnnotatedElement, Class, Class)
*/
public static <A extends Annotation> Set<A> findMergedRepeatableAnnotations(AnnotatedElement element,
Class<A> annotationType) {
return findMergedRepeatableAnnotations(element, annotationType, null);
}
/**
* Find all <em>repeatable annotations</em> of the specified {@code annotationType}
* within the annotation hierarchy <em>above</em> the supplied {@code element};
* and for each annotation found, merge that annotation's attributes with
* <em>matching</em> attributes from annotations in lower levels of the annotation
* hierarchy and synthesize the results back into an annotation of the specified
* {@code annotationType}.
* <p>{@link AliasFor @AliasFor} semantics are fully supported, both within
* a single annotation and within the annotation hierarchy.
* <p>This method follows <em>find semantics</em> as described in the
* {@linkplain AnnotatedElementUtils class-level javadoc}.
* <p><strong>WARNING</strong>: if the supplied {@code containerType} is not
* {@code null}, the search will be restricted to supporting only repeatable
* annotations whose container is the supplied {@code containerType}. This
* prevents the search from finding repeatable annotations declared as
* meta-annotations on other types of repeatable annotations. If you need to
* support such a use case, favor {@link #findMergedRepeatableAnnotations(AnnotatedElement, Class)}
* over this method or alternatively use the {@link MergedAnnotations} API
* directly in conjunction with {@link RepeatableContainers} that are
* {@linkplain RepeatableContainers#plus(Class, Class) composed} to support
* multiple repeatable annotation types — for example:
* <pre class="code">
* RepeatableContainers.standardRepeatables()
* .plus(MyRepeatable1.class, MyContainer1.class)
* .plus(MyRepeatable2.class, MyContainer2.class);</pre>
* @param element the annotated element (never {@code null})
* @param annotationType the repeatable annotation type to find (never {@code null})
* @param containerType the type of the container that holds the repeatable
* annotations; may be {@code null} if the container type should be looked up
* via {@link java.lang.annotation.Repeatable @Repeatable}
* @return the set of all merged repeatable {@code Annotations} found,
* or an empty set if none were found
* @throws IllegalArgumentException if the {@code element} or {@code annotationType}
* is {@code null}, or if the container type cannot be resolved
* @throws AnnotationConfigurationException if the supplied {@code containerType}
* is not a valid container annotation for the supplied {@code annotationType}
* @since 4.3
* @see #findMergedAnnotation(AnnotatedElement, Class)
* @see #findAllMergedAnnotations(AnnotatedElement, Class)
*/
public static <A extends Annotation> Set<A> findMergedRepeatableAnnotations(AnnotatedElement element,
Class<A> annotationType, @Nullable Class<? extends Annotation> containerType) {
return findRepeatableAnnotations(element, annotationType, containerType)
.stream(annotationType)
.sorted(highAggregateIndexesFirst())
.collect(MergedAnnotationCollectors.toAnnotationSet());
}
private static MergedAnnotations getAnnotations(AnnotatedElement element) {
return MergedAnnotations.from(element, SearchStrategy.INHERITED_ANNOTATIONS, RepeatableContainers.none());
}
private static MergedAnnotations getRepeatableAnnotations(AnnotatedElement element,
Class<? extends Annotation> annotationType, @Nullable Class<? extends Annotation> containerType) {
RepeatableContainers repeatableContainers;
if (containerType == null) {
// Invoke RepeatableContainers.explicitRepeatable() in order to adhere to the contract of
// getMergedRepeatableAnnotations() which states that an IllegalArgumentException
// will be thrown if the container cannot be resolved.
//
// In any case, we use standardRepeatables() in order to support repeatable
// annotations on other types of repeatable annotations (i.e., nested repeatable
// annotation types).
//
// See https://github.com/spring-projects/spring-framework/issues/20279
RepeatableContainers.explicitRepeatable(annotationType, null);
repeatableContainers = RepeatableContainers.standardRepeatables();
}
else {
repeatableContainers = RepeatableContainers.explicitRepeatable(annotationType, containerType);
}
return MergedAnnotations.from(element, SearchStrategy.INHERITED_ANNOTATIONS, repeatableContainers);
}
private static MergedAnnotations findAnnotations(AnnotatedElement element) {
return MergedAnnotations.from(element, SearchStrategy.TYPE_HIERARCHY, RepeatableContainers.none());
}
private static MergedAnnotations findRepeatableAnnotations(AnnotatedElement element,
Class<? extends Annotation> annotationType, @Nullable Class<? extends Annotation> containerType) {
RepeatableContainers repeatableContainers;
if (containerType == null) {
// Invoke RepeatableContainers.explicitRepeatable() in order to adhere to the contract of
// findMergedRepeatableAnnotations() which states that an IllegalArgumentException
// will be thrown if the container cannot be resolved.
//
// In any case, we use standardRepeatables() in order to support repeatable
// annotations on other types of repeatable annotations (i.e., nested repeatable
// annotation types).
//
// See https://github.com/spring-projects/spring-framework/issues/20279
RepeatableContainers.explicitRepeatable(annotationType, null);
repeatableContainers = RepeatableContainers.standardRepeatables();
}
else {
repeatableContainers = RepeatableContainers.explicitRepeatable(annotationType, containerType);
}
return MergedAnnotations.from(element, SearchStrategy.TYPE_HIERARCHY, repeatableContainers);
}
private static @Nullable MultiValueMap<String, Object> nullIfEmpty(MultiValueMap<String, Object> map) {
return (map.isEmpty() ? null : map);
}
private static <A extends Annotation> Comparator<MergedAnnotation<A>> highAggregateIndexesFirst() {
return Comparator.<MergedAnnotation<A>> comparingInt(MergedAnnotation::getAggregateIndex).reversed();
}
private static @Nullable AnnotationAttributes getAnnotationAttributes(MergedAnnotation<?> annotation,
boolean classValuesAsString, boolean nestedAnnotationsAsMap) {
if (!annotation.isPresent()) {
return null;
}
return annotation.asAnnotationAttributes(Adapt.values(classValuesAsString, nestedAnnotationsAsMap));
}
}
| name |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExportBulk.java | {
"start": 7564,
"end": 8453
} | class ____ extends FilterOutputStream {
private long bytesWritten = 0;
CountingOutputStream(final OutputStream out) {
super(out);
}
@Override
public void write(final int b) throws IOException {
out.write(b);
count(1);
}
@Override
public void write(final byte[] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write(final byte[] b, final int off, final int len) throws IOException {
out.write(b, off, len);
count(len);
}
@Override
public void close() {
// don't close nested stream
}
protected void count(final long written) {
if (written != -1) {
bytesWritten += written;
}
}
}
}
| CountingOutputStream |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/context/request/RequestScopeTests.java | {
"start": 1931,
"end": 7635
} | class ____ {
private final DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
@BeforeEach
void setup() {
this.beanFactory.registerScope("request", new RequestScope());
this.beanFactory.setBeanExpressionResolver(new StandardBeanExpressionResolver());
XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(this.beanFactory);
reader.loadBeanDefinitions(new ClassPathResource("requestScopeTests.xml", getClass()));
this.beanFactory.preInstantiateSingletons();
}
@AfterEach
void resetRequestAttributes() {
RequestContextHolder.setRequestAttributes(null);
}
@Test
void getFromScope() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContextPath("/path");
RequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String name = "requestScopedObject";
assertThat(request.getAttribute(name)).isNull();
TestBean bean = (TestBean) this.beanFactory.getBean(name);
assertThat(bean.getName()).isEqualTo("/path");
assertThat(request.getAttribute(name)).isSameAs(bean);
assertThat(this.beanFactory.getBean(name)).isSameAs(bean);
}
@Test
void destructionAtRequestCompletion() {
MockHttpServletRequest request = new MockHttpServletRequest();
ServletRequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String name = "requestScopedDisposableObject";
assertThat(request.getAttribute(name)).isNull();
DerivedTestBean bean = (DerivedTestBean) this.beanFactory.getBean(name);
assertThat(request.getAttribute(name)).isSameAs(bean);
assertThat(this.beanFactory.getBean(name)).isSameAs(bean);
requestAttributes.requestCompleted();
assertThat(bean.wasDestroyed()).isTrue();
}
@Test
void getFromFactoryBeanInScope() {
MockHttpServletRequest request = new MockHttpServletRequest();
RequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String name = "requestScopedFactoryBean";
assertThat(request.getAttribute(name)).isNull();
TestBean bean = (TestBean) this.beanFactory.getBean(name);
boolean condition = request.getAttribute(name) instanceof FactoryBean;
assertThat(condition).isTrue();
assertThat(this.beanFactory.getBean(name)).isSameAs(bean);
}
@Test
void circleLeadsToException() {
MockHttpServletRequest request = new MockHttpServletRequest();
RequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String name = "requestScopedObjectCircle1";
assertThat(request.getAttribute(name)).isNull();
assertThatExceptionOfType(BeanCreationException.class).isThrownBy(() ->
this.beanFactory.getBean(name))
.matches(ex -> ex.contains(BeanCurrentlyInCreationException.class));
}
@Test
void innerBeanInheritsContainingBeanScopeByDefault() {
MockHttpServletRequest request = new MockHttpServletRequest();
ServletRequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String outerBeanName = "requestScopedOuterBean";
assertThat(request.getAttribute(outerBeanName)).isNull();
TestBean outer1 = (TestBean) this.beanFactory.getBean(outerBeanName);
assertThat(request.getAttribute(outerBeanName)).isNotNull();
TestBean inner1 = (TestBean) outer1.getSpouse();
assertThat(this.beanFactory.getBean(outerBeanName)).isSameAs(outer1);
requestAttributes.requestCompleted();
assertThat(outer1.wasDestroyed()).isTrue();
assertThat(inner1.wasDestroyed()).isTrue();
request = new MockHttpServletRequest();
requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
TestBean outer2 = (TestBean) this.beanFactory.getBean(outerBeanName);
assertThat(outer2).isNotSameAs(outer1);
assertThat(outer2.getSpouse()).isNotSameAs(inner1);
}
@Test
void requestScopedInnerBeanDestroyedWhileContainedBySingleton() {
MockHttpServletRequest request = new MockHttpServletRequest();
ServletRequestAttributes requestAttributes = new ServletRequestAttributes(request);
RequestContextHolder.setRequestAttributes(requestAttributes);
String outerBeanName = "singletonOuterBean";
TestBean outer1 = (TestBean) this.beanFactory.getBean(outerBeanName);
assertThat(request.getAttribute(outerBeanName)).isNull();
TestBean inner1 = (TestBean) outer1.getSpouse();
TestBean outer2 = (TestBean) this.beanFactory.getBean(outerBeanName);
assertThat(outer2).isSameAs(outer1);
assertThat(outer2.getSpouse()).isSameAs(inner1);
requestAttributes.requestCompleted();
assertThat(inner1.wasDestroyed()).isTrue();
assertThat(outer1.wasDestroyed()).isFalse();
}
@Test
void scopeNotAvailable() {
assertThatExceptionOfType(ScopeNotActiveException.class).isThrownBy(
() -> this.beanFactory.getBean(CountingTestBean.class));
ObjectProvider<CountingTestBean> beanProvider = this.beanFactory.getBeanProvider(CountingTestBean.class);
assertThatExceptionOfType(ScopeNotActiveException.class).isThrownBy(beanProvider::getObject);
assertThat(beanProvider.getIfAvailable()).isNull();
assertThat(beanProvider.getIfUnique()).isNull();
ObjectProvider<CountingTestBean> provider = this.beanFactory.createBean(ProviderBean.class).provider;
assertThatExceptionOfType(ScopeNotActiveException.class).isThrownBy(provider::getObject);
assertThat(provider.getIfAvailable()).isNull();
assertThat(provider.getIfUnique()).isNull();
}
public static | RequestScopeTests |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/components/dynamic/Age.java | {
"start": 172,
"end": 843
} | class ____ {
private int ageInYears;
public Age() {
}
public Age(int ageInYears) {
this.ageInYears = ageInYears;
}
public int getAgeInYears() {
return ageInYears;
}
public void setAgeInYears(int ageInYears) {
this.ageInYears = ageInYears;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !( o instanceof Age ) ) {
return false;
}
Age age = (Age) o;
if ( ageInYears != age.ageInYears ) {
return false;
}
return true;
}
@Override
public int hashCode() {
return ageInYears;
}
@Override
public String toString() {
return "Age{" +
"ageInYears=" + ageInYears +
'}';
}
}
| Age |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/EnableSchedulingTests.java | {
"start": 23744,
"end": 24202
} | class ____ {
@Autowired
ScheduledAnnotationBeanPostProcessor bpp;
@Bean
public AtomicInteger counter() {
return new AtomicInteger();
}
@Bean
public FactoryBeanForScheduled prototypeBean() {
return new FactoryBeanForScheduled(counter());
}
@PreDestroy
public void validateEarlyCancellation() {
if (!this.bpp.getScheduledTasks().isEmpty()) {
shutdownFailure.set(true);
}
}
}
static | FixedDelayTaskConfig_withFactoryBean |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/atomic/AtomicIntegerReadOnlyTest.java | {
"start": 580,
"end": 868
} | class ____ {
private final AtomicInteger value;
public V0(){
this(0);
}
public V0(int value){
this.value = new AtomicInteger(value);
}
public AtomicInteger getValue() {
return value;
}
}
}
| V0 |
java | apache__camel | components/camel-jpa/src/test/java/org/apache/camel/component/jpa/JpaWithQueryTest.java | {
"start": 1220,
"end": 1644
} | class ____ extends JpaWithNamedQueryTest {
@Override
protected void assertURIQueryOption(JpaConsumer jpaConsumer) {
assertEquals("select o from " + entityName + " o where o.step = 1", jpaConsumer.getQuery());
}
@Override
protected String getEndpointUri() {
return "jpa://" + MultiSteps.class.getName() + "?query=select o from " + entityName + " o where o.step = 1";
}
}
| JpaWithQueryTest |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/root/ApplicationTest.java | {
"start": 7382,
"end": 7623
} | class ____ implements Feature {
@Override
public boolean configure(FeatureContext context) {
context.register(ResponseFilter3.class);
return true;
}
}
@Provider
public static | Feature1 |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/parametrizedlist/ParametrizedListTest.java | {
"start": 1126,
"end": 2847
} | class ____ {
private SqlSessionFactory sqlSessionFactory;
@BeforeEach
void setUp() throws Exception {
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/parametrizedlist/Config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/parametrizedlist/CreateDB.sql");
}
@Test
void shouldDetectUsersAsParameterInsideAList() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<User<String>> list = mapper.getAListOfUsers();
Assertions.assertEquals(User.class, list.get(0).getClass());
}
}
@Test
void shouldDetectUsersAsParameterInsideAMap() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Map<Integer, User<String>> map = mapper.getAMapOfUsers();
Assertions.assertEquals(User.class, map.get(1).getClass());
}
}
@Test
void shouldGetAUserAsAMap() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Map<String, Object> map = mapper.getUserAsAMap();
Assertions.assertEquals(1, map.get("ID"));
}
}
@Test
void shouldGetAListOfMaps() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<Map<String, Object>> map = mapper.getAListOfMaps();
Assertions.assertEquals(1, map.get(0).get("ID"));
}
}
}
| ParametrizedListTest |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/partial/ThriftMetadata.java | {
"start": 6647,
"end": 7118
} | class ____ extends ThriftObject {
private static EnumCache enums = new EnumCache();
ThriftEnum(ThriftObject parent, TFieldIdEnum fieldId, FieldMetaData data) {
super(parent, fieldId, data);
}
@Override
protected void toPrettyString(StringBuilder sb, int level) {
this.append(sb, "%senum %s;\n", this.getIndent(level), this.getName());
}
}
/** Metadata of container like objects: list, set, map */
public abstract static | ThriftEnum |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/InsertWithSubSelectTest.java | {
"start": 1514,
"end": 1713
} | class ____ {
@Id
@GeneratedValue
private Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
@Entity(name = "B")
public static | A |
java | apache__camel | components/camel-tahu/src/generated/java/org/apache/camel/component/tahu/TahuHostEndpointUriFactory.java | {
"start": 514,
"end": 2630
} | class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":hostId";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(12);
props.add("bridgeErrorHandler");
props.add("checkClientIdLength");
props.add("clientId");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("hostId");
props.add("keepAliveTimeout");
props.add("password");
props.add("rebirthDebounceDelay");
props.add("servers");
props.add("sslContextParameters");
props.add("username");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(2);
secretProps.add("password");
secretProps.add("username");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "tahu-host".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "hostId", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| TahuHostEndpointUriFactory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/StateSnapshotTransformers.java | {
"start": 3705,
"end": 4500
} | class ____<T>
extends StateSnapshotTransformFactoryWrapAdaptor<T, List<T>> {
public ListStateSnapshotTransformFactory(
StateSnapshotTransformFactory<T> originalSnapshotTransformFactory) {
super(originalSnapshotTransformFactory);
}
@Override
public Optional<StateSnapshotTransformer<List<T>>> createForDeserializedState() {
return originalSnapshotTransformFactory
.createForDeserializedState()
.map(ListStateSnapshotTransformer::new);
}
}
/**
* General implementation of map state transformer.
*
* <p>This transformer wraps a transformer per-entry and transforms the whole map state.
*/
public static | ListStateSnapshotTransformFactory |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/utils/ClassUtils.java | {
"start": 8166,
"end": 8479
} | class ____ classes and interfaces.
* @return the simple name of the underlying class.
*/
public static String getSimpleName(Class cls) {
Objects.requireNonNull(cls, "cls");
return cls.getSimpleName();
}
/**
* Gets and returns the simple name of the underlying | represent |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientConfig.java | {
"start": 8155,
"end": 8588
} | enum ____ {
/**
* Uses a Netty-based transport re-using the existing Netty event loops.
*/
NETTY,
/**
* With a reactive driver it uses an async transport backed by a driver-managed thread pool.
*/
MONGO
}
/**
* Configures the reactive transport.
*/
@WithDefault("netty")
ReactiveTransportConfig reactiveTransport();
}
| ReactiveTransportConfig |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java | {
"start": 101667,
"end": 102242
} | class ____ {
@GET("/")
Call<ResponseBody> method(@Tag String one, @Tag String two) {
return null;
}
}
try {
buildRequest(Example.class, "one", "two");
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
"@Tag type java.lang.String is duplicate of parameter 'one' and would always overwrite its value. (parameter 'two')\n"
+ " for method Example.method");
}
}
@Test
public void tagGenericDuplicateFails() {
| Example |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/queue/FailedAckArgs.java | {
"start": 799,
"end": 1115
} | interface ____ extends QueueNegativeAckArgs {
/**
* Specifies the delay duration before the failed message is eligible
* for redelivery.
*
* @param value the delay duration before redelivery
* @return arguments object
*/
QueueNegativeAckArgs delay(Duration value);
}
| FailedAckArgs |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/transition/DrawableCrossFadeTransition.java | {
"start": 652,
"end": 2528
} | class ____ implements Transition<Drawable> {
private final int duration;
private final boolean isCrossFadeEnabled;
/**
* @param duration The duration that the cross fade animation should run if there is something to
* cross fade from when a new {@link android.graphics.drawable.Drawable} is put.
* @param isCrossFadeEnabled If {@code true}, animates the previous resource's alpha to 0 while
* animating the new resource's alpha to 100. Otherwise, only animates the new resource's
* alpha to 100 while leaving the previous resource's alpha at 100. See {@link
* TransitionDrawable#setCrossFadeEnabled(boolean)}.
*/
// Public API.
@SuppressWarnings("WeakerAccess")
public DrawableCrossFadeTransition(int duration, boolean isCrossFadeEnabled) {
this.duration = duration;
this.isCrossFadeEnabled = isCrossFadeEnabled;
}
/**
* Animates from the previous drawable to the current drawable in one of two ways.
*
* <ol>
* <li>Using the default animation provided in the constructor if the previous drawable is null
* <li>Using the cross fade animation with the duration provided in the constructor if the
* previous drawable is non null
* </ol>
*
* @param current {@inheritDoc}
* @param adapter {@inheritDoc}
* @return {@inheritDoc}
*/
@Override
public boolean transition(Drawable current, ViewAdapter adapter) {
Drawable previous = adapter.getCurrentDrawable();
if (previous == null) {
previous = new ColorDrawable(Color.TRANSPARENT);
}
TransitionDrawable transitionDrawable =
new TransitionDrawable(new Drawable[] {previous, current});
transitionDrawable.setCrossFadeEnabled(isCrossFadeEnabled);
transitionDrawable.startTransition(duration);
adapter.setDrawable(transitionDrawable);
return true;
}
}
| DrawableCrossFadeTransition |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockKey.java | {
"start": 1116,
"end": 1416
} | class ____ extends DelegationKey {
public BlockKey() {
super();
}
public BlockKey(int keyId, long expiryDate, SecretKey key) {
super(keyId, expiryDate, key);
}
public BlockKey(int keyId, long expiryDate, byte[] encodedKey) {
super(keyId, expiryDate, encodedKey);
}
}
| BlockKey |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/SchedulerNGFactory.java | {
"start": 2021,
"end": 3175
} | interface ____ {
SchedulerNG createInstance(
Logger log,
ExecutionPlan executionPlan,
Executor ioExecutor,
Configuration jobMasterConfiguration,
SlotPoolService slotPoolService,
ScheduledExecutorService futureExecutor,
ClassLoader userCodeLoader,
CheckpointRecoveryFactory checkpointRecoveryFactory,
Duration rpcTimeout,
BlobWriter blobWriter,
JobManagerJobMetricGroup jobManagerJobMetricGroup,
Duration slotRequestTimeout,
ShuffleMaster<?> shuffleMaster,
JobMasterPartitionTracker partitionTracker,
ExecutionDeploymentTracker executionDeploymentTracker,
long initializationTimestamp,
ComponentMainThreadExecutor mainThreadExecutor,
FatalErrorHandler fatalErrorHandler,
JobStatusListener jobStatusListener,
Collection<FailureEnricher> failureEnrichers,
BlocklistOperations blocklistOperations)
throws Exception;
JobManagerOptions.SchedulerType getSchedulerType();
}
| SchedulerNGFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InterruptedExceptionSwallowedTest.java | {
"start": 15364,
"end": 15806
} | class ____ {
void test(Future<?> future) throws Exception {
throw new Exception();
}
}
""")
.doTest();
}
@Test
public void declaredInMethodThrows() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
| Test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.