language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java | {
"start": 4325,
"end": 32155
} | class ____ extends InferenceServiceTestCase {
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private final MockWebServer webServer = new MockWebServer();
private ThreadPool threadPool;
private HttpClientManager clientManager;
@Before
public void init() throws Exception {
webServer.start();
threadPool = createThreadPool(inferenceUtilityExecutors());
clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class));
}
@After
public void shutdown() throws IOException {
clientManager.close();
terminate(threadPool);
webServer.close();
}
public void testParseRequestConfig_CreatesACompletionModel() throws IOException {
var apiKey = "apiKey";
var modelId = "model";
try (var service = createServiceWithMockSender()) {
ActionListener<Model> modelListener = ActionListener.wrap(model -> {
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getSecretSettings().apiKey().toString(), is(apiKey));
}, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage()));
service.parseRequestConfig(
"id",
TaskType.COMPLETION,
getRequestConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
new HashMap<>(Map.of(AnthropicServiceFields.MAX_TOKENS, 1)),
getSecretSettingsMap(apiKey)
),
modelListener
);
}
}
public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException {
try (var service = createServiceWithMockSender()) {
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"The [anthropic] service does not support task type [sparse_embedding]"
);
service.parseRequestConfig(
"id",
TaskType.SPARSE_EMBEDDING,
getRequestConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, "model")),
new HashMap<>(Map.of()),
getSecretSettingsMap("secret")
),
failureListener
);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException {
try (var service = createServiceWithMockSender()) {
var config = getRequestConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, "model")),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, null, null, null),
getSecretSettingsMap("secret")
);
config.put("extra_key", "value");
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [anthropic] service"
);
service.parseRequestConfig("id", TaskType.COMPLETION, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException {
try (var service = createServiceWithMockSender()) {
Map<String, Object> serviceSettings = new HashMap<>(Map.of(ServiceFields.MODEL_ID, "model"));
serviceSettings.put("extra_key", "value");
var config = getRequestConfigMap(
serviceSettings,
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, null, null, null),
getSecretSettingsMap("api_key")
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [anthropic] service"
);
service.parseRequestConfig("id", TaskType.COMPLETION, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() throws IOException {
try (var service = createServiceWithMockSender()) {
var taskSettingsMap = AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, null, null, null);
taskSettingsMap.put("extra_key", "value");
var config = getRequestConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, "model")),
taskSettingsMap,
getSecretSettingsMap("secret")
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [anthropic] service"
);
service.parseRequestConfig("id", TaskType.COMPLETION, config, failureListener);
}
}
public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException {
try (var service = createServiceWithMockSender()) {
Map<String, Object> secretSettings = getSecretSettingsMap("secret");
secretSettings.put("extra_key", "value");
var config = getRequestConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, "model")),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, null, null, null),
secretSettings
);
var failureListener = getModelListenerForException(
ElasticsearchStatusException.class,
"Configuration contains settings [{extra_key=value}] unknown to the [anthropic] service"
);
service.parseRequestConfig("id", TaskType.COMPLETION, config, failureListener);
}
}
public void testParsePersistedConfigWithSecrets_CreatesACompletionModel() throws IOException {
var modelId = "model";
var apiKey = "apiKey";
try (var service = createServiceWithMockSender()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3),
getSecretSettingsMap(apiKey)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertThat(completionModel.getSecretSettings().apiKey().toString(), is(apiKey));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
var modelId = "model";
var apiKey = "apiKey";
try (var service = createServiceWithMockSender()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3),
getSecretSettingsMap(apiKey)
);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertThat(completionModel.getSecretSettings().apiKey(), is(apiKey));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException {
var modelId = "model";
var apiKey = "apiKey";
try (var service = createServiceWithMockSender()) {
var secretSettingsMap = getSecretSettingsMap(apiKey);
secretSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3),
secretSettingsMap
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertThat(completionModel.getSecretSettings().apiKey().toString(), is(apiKey));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
var modelId = "model";
var apiKey = "apiKey";
try (var service = createServiceWithMockSender()) {
Map<String, Object> serviceSettingsMap = new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId));
serviceSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
serviceSettingsMap,
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3),
getSecretSettingsMap(apiKey)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertThat(completionModel.getSecretSettings().apiKey().toString(), is(apiKey));
}
}
public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
var modelId = "model";
var apiKey = "apiKey";
try (var service = createServiceWithMockSender()) {
Map<String, Object> taskSettings = AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3);
taskSettings.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
taskSettings,
getSecretSettingsMap(apiKey)
);
var model = service.parsePersistedConfigWithSecrets(
"id",
TaskType.COMPLETION,
persistedConfig.config(),
persistedConfig.secrets()
);
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertThat(completionModel.getSecretSettings().apiKey().toString(), is(apiKey));
}
}
public void testParsePersistedConfig_CreatesACompletionModel() throws IOException {
var modelId = "model";
try (var service = createServiceWithMockSender()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3)
);
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertNull(completionModel.getSecretSettings());
}
}
public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException {
var modelId = "model";
try (var service = createServiceWithMockSender()) {
var persistedConfig = getPersistedConfigMap(
new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)),
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3)
);
persistedConfig.config().put("extra_key", "value");
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertNull(completionModel.getSecretSettings());
}
}
public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException {
var modelId = "model";
try (var service = createServiceWithMockSender()) {
Map<String, Object> serviceSettingsMap = new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId));
serviceSettingsMap.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(
serviceSettingsMap,
AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3)
);
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertNull(completionModel.getSecretSettings());
}
}
public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException {
var modelId = "model";
try (var service = createServiceWithMockSender()) {
Map<String, Object> taskSettings = AnthropicChatCompletionTaskSettingsTests.getChatCompletionTaskSettingsMap(1, 1.0, 2.1, 3);
taskSettings.put("extra_key", "value");
var persistedConfig = getPersistedConfigMap(new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId)), taskSettings);
var model = service.parsePersistedConfig("id", TaskType.COMPLETION, persistedConfig.config());
assertThat(model, instanceOf(AnthropicChatCompletionModel.class));
var completionModel = (AnthropicChatCompletionModel) model;
assertThat(completionModel.getServiceSettings().modelId(), is(modelId));
assertThat(completionModel.getTaskSettings(), is(new AnthropicChatCompletionTaskSettings(1, 1.0, 2.1, 3)));
assertNull(completionModel.getSecretSettings());
}
}
public void testInfer_ThrowsErrorWhenModelIsNotAValidModel() throws IOException {
var sender = createMockSender();
var factory = mock(HttpRequestSender.Factory.class);
when(factory.createSender()).thenReturn(sender);
var mockModel = getInvalidModel("model_id", "service_name");
try (var service = new AnthropicService(factory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
mockModel,
null,
null,
null,
List.of(""),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
MatcherAssert.assertThat(
thrownException.getMessage(),
is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.")
);
verify(factory, times(1)).createSender();
verify(sender, times(1)).startAsynchronously(any());
}
verify(sender, times(1)).close();
verifyNoMoreInteractions(factory);
verifyNoMoreInteractions(sender);
}
public void testInfer_SendsCompletionRequest() throws IOException {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new AnthropicService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
String responseJson = """
{
"id": "msg_01XzZQmG41BMGe5NZ5p2vEWb",
"type": "message",
"role": "assistant",
"model": "claude-3-opus-20240229",
"content": [
{
"type": "text",
"text": "result"
}
],
"stop_reason": "end_turn",
"stop_sequence": null,
"usage": {
"input_tokens": 16,
"output_tokens": 326
}
}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
var model = AnthropicChatCompletionModelTests.createChatCompletionModel(getUrl(webServer), "secret", "model", 1);
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
service.infer(
model,
null,
null,
null,
List.of("input"),
false,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
var result = listener.actionGet(TIMEOUT);
assertThat(result.asMap(), is(buildExpectationCompletions(List.of("result"))));
var request = webServer.requests().get(0);
assertNull(request.getUri().getQuery());
assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), Matchers.equalTo(XContentType.JSON.mediaType()));
assertThat(request.getHeader(AnthropicRequestUtils.X_API_KEY), Matchers.equalTo("secret"));
assertThat(
request.getHeader(AnthropicRequestUtils.VERSION),
Matchers.equalTo(AnthropicRequestUtils.ANTHROPIC_VERSION_2023_06_01)
);
var requestMap = entityAsMap(request.getBody());
assertThat(
requestMap,
is(Map.of("messages", List.of(Map.of("role", "user", "content", "input")), "model", "model", "max_tokens", 1))
);
}
}
public void testInfer_StreamRequest() throws Exception {
String responseJson = """
event: message_start
data: {"type": "message_start", "message": {"model": "claude, probably"}}
event: content_block_start
data: {"type": "content_block_start", "index": 0, "content_block": {"type": "text", "text": ""}}
event: ping
data: {"type": "ping"}
event: content_block_delta
data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": "Hello"}}
event: content_block_delta
data: {"type": "content_block_delta", "index": 0, "delta": {"type": "text_delta", "text": ", World"}}
event: content_block_stop
data: {"type": "content_block_stop", "index": 0}
event: message_delta
data: {"type": "message_delta", "delta": {"stop_reason": "end_turn", "stop_sequence":null}, "usage": {"output_tokens": 4}}
event: message_stop
data: {"type": "message_stop"}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
streamChatCompletion().hasNoErrors().hasEvent("""
{"completion":[{"delta":"Hello"},{"delta":", World"}]}""");
}
private InferenceEventsAssertion streamChatCompletion() throws Exception {
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
try (var service = new AnthropicService(senderFactory, createWithEmptySettings(threadPool), mockClusterServiceEmpty())) {
var model = AnthropicChatCompletionModelTests.createChatCompletionModel(
getUrl(webServer),
"secret",
"model",
Integer.MAX_VALUE
);
var listener = new PlainActionFuture<InferenceServiceResults>();
service.infer(
model,
null,
null,
null,
List.of("abc"),
true,
new HashMap<>(),
InputType.INGEST,
InferenceAction.Request.DEFAULT_TIMEOUT,
listener
);
return InferenceEventsAssertion.assertThat(listener.actionGet(TIMEOUT)).hasFinishedStream();
}
}
public void testInfer_StreamRequest_ErrorResponse() throws Exception {
String responseJson = """
data: {"type": "error", "error": {"type": "request_too_large", "message": "blah"}}
""";
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
streamChatCompletion().hasNoEvents()
.hasErrorWithStatusCode(RestStatus.REQUEST_ENTITY_TOO_LARGE.getStatus())
.hasErrorContaining("blah");
}
public void testGetConfiguration() throws Exception {
try (var service = createServiceWithMockSender()) {
String content = XContentHelper.stripWhitespace("""
{
"service": "anthropic",
"name": "Anthropic",
"task_types": ["completion"],
"configurations": {
"api_key": {
"description": "API Key for the provider you're connecting to.",
"label": "API Key",
"required": true,
"sensitive": true,
"updatable": true,
"type": "str",
"supported_task_types": ["completion"]
},
"rate_limit.requests_per_minute": {
"description": "By default, the anthropic service sets the number of requests allowed per minute to 50.",
"label": "Rate Limit",
"required": false,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["completion"]
},
"model_id": {
"description": "The name of the model to use for the inference task.",
"label": "Model ID",
"required": true,
"sensitive": false,
"updatable": false,
"type": "str",
"supported_task_types": ["completion"]
},
"max_tokens": {
"description": "The maximum number of tokens to generate before stopping.",
"label": "Max Tokens",
"required": true,
"sensitive": false,
"updatable": false,
"type": "int",
"supported_task_types": ["completion"]
}
}
}
""");
InferenceServiceConfiguration configuration = InferenceServiceConfiguration.fromXContentBytes(
new BytesArray(content),
XContentType.JSON
);
boolean humanReadable = true;
BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable);
InferenceServiceConfiguration serviceConfiguration = service.getConfiguration();
assertToXContentEquivalent(
originalBytes,
toXContent(serviceConfiguration, XContentType.JSON, humanReadable),
XContentType.JSON
);
}
}
public void testSupportsStreaming() throws IOException {
try (var service = new AnthropicService(mock(), createWithEmptySettings(mock()), mockClusterServiceEmpty())) {
assertThat(service.supportedStreamingTasks(), is(EnumSet.of(TaskType.COMPLETION)));
assertFalse(service.canStream(TaskType.ANY));
}
}
private AnthropicService createServiceWithMockSender() {
return new AnthropicService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool), mockClusterServiceEmpty());
}
@Override
public InferenceService createInferenceService() {
return createServiceWithMockSender();
}
}
| AnthropicServiceTests |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2InboundFrameLogger.java | {
"start": 1024,
"end": 6408
} | class ____ implements Http2FrameReader {
private final Http2FrameReader reader;
private final Http2FrameLogger logger;
public Http2InboundFrameLogger(Http2FrameReader reader, Http2FrameLogger logger) {
this.reader = checkNotNull(reader, "reader");
this.logger = checkNotNull(logger, "logger");
}
@Override
public void readFrame(ChannelHandlerContext ctx, ByteBuf input, final Http2FrameListener listener)
throws Http2Exception {
reader.readFrame(ctx, input, new Http2FrameListener() {
@Override
public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data,
int padding, boolean endOfStream)
throws Http2Exception {
logger.logData(INBOUND, ctx, streamId, data, padding, endOfStream);
return listener.onDataRead(ctx, streamId, data, padding, endOfStream);
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId,
Http2Headers headers, int padding, boolean endStream)
throws Http2Exception {
logger.logHeaders(INBOUND, ctx, streamId, headers, padding, endStream);
listener.onHeadersRead(ctx, streamId, headers, padding, endStream);
}
@Override
public void onHeadersRead(ChannelHandlerContext ctx, int streamId,
Http2Headers headers, int streamDependency, short weight, boolean exclusive,
int padding, boolean endStream) throws Http2Exception {
logger.logHeaders(INBOUND, ctx, streamId, headers, streamDependency, weight, exclusive,
padding, endStream);
listener.onHeadersRead(ctx, streamId, headers, streamDependency, weight, exclusive,
padding, endStream);
}
@Override
public void onPriorityRead(ChannelHandlerContext ctx, int streamId,
int streamDependency, short weight, boolean exclusive) throws Http2Exception {
logger.logPriority(INBOUND, ctx, streamId, streamDependency, weight, exclusive);
listener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive);
}
@Override
public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode)
throws Http2Exception {
logger.logRstStream(INBOUND, ctx, streamId, errorCode);
listener.onRstStreamRead(ctx, streamId, errorCode);
}
@Override
public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception {
logger.logSettingsAck(INBOUND, ctx);
listener.onSettingsAckRead(ctx);
}
@Override
public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings)
throws Http2Exception {
logger.logSettings(INBOUND, ctx, settings);
listener.onSettingsRead(ctx, settings);
}
@Override
public void onPingRead(ChannelHandlerContext ctx, long data) throws Http2Exception {
logger.logPing(INBOUND, ctx, data);
listener.onPingRead(ctx, data);
}
@Override
public void onPingAckRead(ChannelHandlerContext ctx, long data) throws Http2Exception {
logger.logPingAck(INBOUND, ctx, data);
listener.onPingAckRead(ctx, data);
}
@Override
public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId,
int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {
logger.logPushPromise(INBOUND, ctx, streamId, promisedStreamId, headers, padding);
listener.onPushPromiseRead(ctx, streamId, promisedStreamId, headers, padding);
}
@Override
public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode,
ByteBuf debugData) throws Http2Exception {
logger.logGoAway(INBOUND, ctx, lastStreamId, errorCode, debugData);
listener.onGoAwayRead(ctx, lastStreamId, errorCode, debugData);
}
@Override
public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement)
throws Http2Exception {
logger.logWindowsUpdate(INBOUND, ctx, streamId, windowSizeIncrement);
listener.onWindowUpdateRead(ctx, streamId, windowSizeIncrement);
}
@Override
public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId,
Http2Flags flags, ByteBuf payload) throws Http2Exception {
logger.logUnknownFrame(INBOUND, ctx, frameType, streamId, flags, payload);
listener.onUnknownFrame(ctx, frameType, streamId, flags, payload);
}
});
}
@Override
public void close() {
reader.close();
}
@Override
public Configuration configuration() {
return reader.configuration();
}
}
| Http2InboundFrameLogger |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/javadoc/InvalidInlineTagTest.java | {
"start": 3526,
"end": 3720
} | interface ____ {
/** Provide an {@a} */
void foo(int a);
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | grpc__grpc-java | api/src/main/java/io/grpc/NameResolverRegistry.java | {
"start": 6448,
"end": 6932
} | class ____ extends NameResolver.Factory {
@Override
@Nullable
public NameResolver newNameResolver(URI targetUri, NameResolver.Args args) {
NameResolverProvider provider = getProviderForScheme(targetUri.getScheme());
return provider == null ? null : provider.newNameResolver(targetUri, args);
}
@Override
public String getDefaultScheme() {
return NameResolverRegistry.this.getDefaultScheme();
}
}
private static final | NameResolverFactory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/DefaultObjectDeserializerTest11.java | {
"start": 295,
"end": 584
} | class ____ extends TestCase {
public void test_0() throws Exception {
A a = new A();
DefaultJSONParser parser = new DefaultJSONParser("{\"id\":123}", ParserConfig.getGlobalInstance());
parser.parseObject(a);
}
public static | DefaultObjectDeserializerTest11 |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/OffsetsRequestManager.java | {
"start": 41124,
"end": 42533
} | class ____ {
private final Map<TopicPartition, Long> timestampsToSearch;
private final Map<TopicPartition, ListOffsetData> fetchedOffsets;
private final Map<TopicPartition, Long> remainingToSearch;
private final CompletableFuture<ListOffsetResult> globalResult;
final boolean requireTimestamps;
final OffsetFetcherUtils offsetFetcherUtils;
final IsolationLevel isolationLevel;
private ListOffsetsRequestState(Map<TopicPartition, Long> timestampsToSearch,
boolean requireTimestamps,
OffsetFetcherUtils offsetFetcherUtils,
IsolationLevel isolationLevel) {
remainingToSearch = new HashMap<>();
fetchedOffsets = new HashMap<>();
globalResult = new CompletableFuture<>();
this.timestampsToSearch = timestampsToSearch;
this.requireTimestamps = requireTimestamps;
this.offsetFetcherUtils = offsetFetcherUtils;
this.isolationLevel = isolationLevel;
}
private void addPartitionsToRetry(Set<TopicPartition> partitionsToRetry) {
remainingToSearch.putAll(partitionsToRetry.stream()
.collect(Collectors.toMap(tp -> tp, timestampsToSearch::get)));
}
}
private static | ListOffsetsRequestState |
java | elastic__elasticsearch | x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java | {
"start": 952,
"end": 2097
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, "/{index}/_downsample/{target_index}"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String sourceIndex = restRequest.param("index");
String targetIndex = restRequest.param("target_index");
String timeout = restRequest.param("timeout");
DownsampleConfig config;
try (var parser = restRequest.contentParser()) {
config = DownsampleConfig.fromXContent(parser);
}
DownsampleAction.Request request = new DownsampleAction.Request(
RestUtils.getMasterNodeTimeout(restRequest),
sourceIndex,
targetIndex,
TimeValue.parseTimeValue(timeout, null, "wait_timeout"),
config
);
return channel -> client.execute(DownsampleAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
@Override
public String getName() {
return "downsample_action";
}
}
| RestDownsampleAction |
java | alibaba__druid | druid-admin/src/main/java/com/alibaba/druid/admin/model/dto/SqlListResult.java | {
"start": 471,
"end": 3918
} | class ____ {
private String serviceId;
private String address;
private Integer port;
@JSONField(name = "ExecuteAndResultSetHoldTime")
private int ExecuteAndResultSetHoldTime;
@JSONField(name = "LastErrorMessage")
private Object LastErrorMessage;
@JSONField(name = "InputStreamOpenCount")
private int InputStreamOpenCount;
@JSONField(name = "BatchSizeTotal")
private int BatchSizeTotal;
@JSONField(name = "FetchRowCountMax")
private int FetchRowCountMax;
@JSONField(name = "ErrorCount")
private int ErrorCount;
@JSONField(name = "BatchSizeMax")
private int BatchSizeMax;
@JSONField(name = "URL")
private Object URL;
@JSONField(name = "Name")
private Object Name;
@JSONField(name = "LastErrorTime")
private Object LastErrorTime;
@JSONField(name = "ReaderOpenCount")
private int ReaderOpenCount;
@JSONField(name = "EffectedRowCountMax")
private int EffectedRowCountMax;
@JSONField(name = "LastErrorClass")
private Object LastErrorClass;
@JSONField(name = "InTransactionCount")
private int InTransactionCount;
@JSONField(name = "LastErrorStackTrace")
private Object LastErrorStackTrace;
@JSONField(name = "ResultSetHoldTime")
private int ResultSetHoldTime;
@JSONField(name = "TotalTime")
private int TotalTime;
@JSONField(name = "ID")
private int ID;
@JSONField(name = "ConcurrentMax")
private int ConcurrentMax;
@JSONField(name = "RunningCount")
private int RunningCount;
@JSONField(name = "FetchRowCount")
private int FetchRowCount;
@JSONField(name = "MaxTimespanOccurTime")
private String MaxTimespanOccurTime;
@JSONField(name = "LastSlowParameters")
private Object LastSlowParameters;
@JSONField(name = "ReadBytesLength")
private int ReadBytesLength;
@JSONField(name = "DbType")
private String DbType;
@JSONField(name = "DataSource")
private Object DataSource;
@JSONField(name = "SQL")
private String SQL;
@JSONField(name = "HASH")
private long HASH;
@JSONField(name = "LastError")
private Object LastError;
@JSONField(name = "MaxTimespan")
private int MaxTimespan;
@JSONField(name = "BlobOpenCount")
private int BlobOpenCount;
@JSONField(name = "ExecuteCount")
private int ExecuteCount;
@JSONField(name = "EffectedRowCount")
private int EffectedRowCount;
@JSONField(name = "ReadStringLength")
private int ReadStringLength;
@JSONField(name = "File")
private Object File;
@JSONField(name = "ClobOpenCount")
private int ClobOpenCount;
@JSONField(name = "LastTime")
private String LastTime;
@JSONField(name = "EffectedRowCountHistogram")
private List<Integer> EffectedRowCountHistogram;
@JSONField(name = "Histogram")
private List<Integer> Histogram;
@JSONField(name = "ExecuteAndResultHoldTimeHistogram")
private List<Integer> ExecuteAndResultHoldTimeHistogram;
@JSONField(name = "FetchRowCountHistogram")
private List<Integer> FetchRowCountHistogram;
}
}
| ContentBean |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java | {
"start": 5410,
"end": 6182
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory str;
private final EvalOperator.ExpressionEvaluator.Factory prefix;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str,
EvalOperator.ExpressionEvaluator.Factory prefix) {
this.source = source;
this.str = str;
this.prefix = prefix;
}
@Override
public StartsWithEvaluator get(DriverContext context) {
return new StartsWithEvaluator(source, str.get(context), prefix.get(context), context);
}
@Override
public String toString() {
return "StartsWithEvaluator[" + "str=" + str + ", prefix=" + prefix + "]";
}
}
}
| Factory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/valuehandlingmode/inline/NonPkAssociationEqualityPredicateParameterTest.java | {
"start": 1891,
"end": 2859
} | class ____ {
private String id;
private double totalPrice;
private Customer customer;
public Order() {
}
public Order(String id, double totalPrice) {
this.id = id;
this.totalPrice = totalPrice;
}
public Order(String id, Customer customer) {
this.id = id;
this.customer = customer;
}
public Order(String id) {
this.id = id;
}
@Id
@Column(name = "ID")
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Column(name = "TOTALPRICE")
public double getTotalPrice() {
return totalPrice;
}
public void setTotalPrice(double price) {
this.totalPrice = price;
}
@ManyToOne
@JoinColumn(name = "FK4_FOR_CUSTOMER_TABLE", referencedColumnName = "CUSTOMER_NUMBER")
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
}
@Entity
@Table(name = "CUSTOMER_TABLE")
public static | Order |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3296/MapperConfigWithPayloadArgument.java | {
"start": 319,
"end": 603
} | interface ____ {
@AfterMapping
default void afterMapping(@MappingTarget Entity entity, Payload unused) {
staticMethod( entity );
}
static void staticMethod(Entity entity) {
entity.setName( "AfterMapping called" );
}
}
| MapperConfigWithPayloadArgument |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builder/mappingTarget/simple/SimpleImmutableUpdateMapper.java | {
"start": 328,
"end": 668
} | interface ____ {
SimpleImmutableUpdateMapper INSTANCE = Mappers.getMapper( SimpleImmutableUpdateMapper.class );
// This method is fine as if the mapping target has setters it would use them, otherwise it won't
void toImmutable(SimpleMutableSource source, @MappingTarget SimpleImmutableTarget target);
}
| SimpleImmutableUpdateMapper |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/rolling/RollingFileManager.java | {
"start": 32725,
"end": 33945
} | class ____ extends ArrayBlockingQueue<Runnable> {
/**
*
*/
private static final long serialVersionUID = 1L;
EmptyQueue() {
super(1);
}
@Override
public int remainingCapacity() {
return 0;
}
@Override
public boolean add(final Runnable runnable) {
throw new IllegalStateException("Queue is full");
}
@Override
public void put(final Runnable runnable) throws InterruptedException {
/* No point in going into a permanent wait */
throw new InterruptedException("Unable to insert into queue");
}
@Override
public boolean offer(final Runnable runnable, final long timeout, final TimeUnit timeUnit)
throws InterruptedException {
Thread.sleep(timeUnit.toMillis(timeout));
return false;
}
@Override
public boolean addAll(final Collection<? extends Runnable> collection) {
if (collection.size() > 0) {
throw new IllegalArgumentException("Too many items in collection");
}
return false;
}
}
}
| EmptyQueue |
java | google__guava | android/guava-testlib/test/com/google/common/testing/EqualsTesterTest.java | {
"start": 11882,
"end": 12196
} | class ____ {
@Override
public boolean equals(@Nullable Object o) {
return o != null;
}
@Override
public int hashCode() {
return 0;
}
}
private static NamedObject named(String name) {
return new NamedObject(name);
}
private static | InvalidEqualsIncompatibleClassObject |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java | {
"start": 191237,
"end": 201945
} | class ____ extends ValueExpressionContext {
public Token operator;
public ValueExpressionContext valueExpression() {
return getRuleContext(ValueExpressionContext.class, 0);
}
public TerminalNode MINUS() {
return getToken(SqlBaseParser.MINUS, 0);
}
public TerminalNode PLUS() {
return getToken(SqlBaseParser.PLUS, 0);
}
public ArithmeticUnaryContext(ValueExpressionContext ctx) {
copyFrom(ctx);
}
@Override
public void enterRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).enterArithmeticUnary(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if (listener instanceof SqlBaseListener) ((SqlBaseListener) listener).exitArithmeticUnary(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if (visitor instanceof SqlBaseVisitor) return ((SqlBaseVisitor<? extends T>) visitor).visitArithmeticUnary(this);
else return visitor.visitChildren(this);
}
}
public final ValueExpressionContext valueExpression() throws RecognitionException {
return valueExpression(0);
}
private ValueExpressionContext valueExpression(int _p) throws RecognitionException {
ParserRuleContext _parentctx = _ctx;
int _parentState = getState();
ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, _parentState);
ValueExpressionContext _prevctx = _localctx;
int _startState = 68;
enterRecursionRule(_localctx, 68, RULE_valueExpression, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
setState(632);
_errHandler.sync(this);
switch (_input.LA(1)) {
case T__0:
case ANALYZE:
case ANALYZED:
case CASE:
case CAST:
case CATALOGS:
case COLUMNS:
case CONVERT:
case CURRENT_DATE:
case CURRENT_TIME:
case CURRENT_TIMESTAMP:
case DAY:
case DEBUG:
case EXECUTABLE:
case EXPLAIN:
case EXTRACT:
case FALSE:
case FIRST:
case FORMAT:
case FULL:
case FUNCTIONS:
case GRAPHVIZ:
case HOUR:
case INTERVAL:
case LAST:
case LEFT:
case LIMIT:
case MAPPED:
case MINUTE:
case MONTH:
case NULL:
case OPTIMIZED:
case PARSED:
case PHYSICAL:
case PIVOT:
case PLAN:
case RIGHT:
case RLIKE:
case QUERY:
case SCHEMAS:
case SECOND:
case SHOW:
case SYS:
case TABLES:
case TEXT:
case TRUE:
case TOP:
case TYPE:
case TYPES:
case VERIFY:
case YEAR:
case FUNCTION_ESC:
case DATE_ESC:
case TIME_ESC:
case TIMESTAMP_ESC:
case GUID_ESC:
case ASTERISK:
case PARAM:
case STRING:
case INTEGER_VALUE:
case DECIMAL_VALUE:
case IDENTIFIER:
case DIGIT_IDENTIFIER:
case QUOTED_IDENTIFIER:
case BACKQUOTED_IDENTIFIER: {
_localctx = new ValueExpressionDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(629);
primaryExpression(0);
}
break;
case PLUS:
case MINUS: {
_localctx = new ArithmeticUnaryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
setState(630);
((ArithmeticUnaryContext) _localctx).operator = _input.LT(1);
_la = _input.LA(1);
if (!(_la == PLUS || _la == MINUS)) {
((ArithmeticUnaryContext) _localctx).operator = (Token) _errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(631);
valueExpression(4);
}
break;
default:
throw new NoViableAltException(this);
}
_ctx.stop = _input.LT(-1);
setState(646);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input, 90, _ctx);
while (_alt != 2 && _alt != org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER) {
if (_alt == 1) {
if (_parseListeners != null) triggerExitRuleEvent();
_prevctx = _localctx;
{
setState(644);
_errHandler.sync(this);
switch (getInterpreter().adaptivePredict(_input, 89, _ctx)) {
case 1: {
_localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext) _localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
setState(634);
if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
setState(635);
((ArithmeticBinaryContext) _localctx).operator = _input.LT(1);
_la = _input.LA(1);
if (!(((((_la - 121)) & ~0x3f) == 0 && ((1L << (_la - 121)) & 7L) != 0))) {
((ArithmeticBinaryContext) _localctx).operator = (Token) _errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(636);
((ArithmeticBinaryContext) _localctx).right = valueExpression(4);
}
break;
case 2: {
_localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext) _localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
setState(637);
if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
setState(638);
((ArithmeticBinaryContext) _localctx).operator = _input.LT(1);
_la = _input.LA(1);
if (!(_la == PLUS || _la == MINUS)) {
((ArithmeticBinaryContext) _localctx).operator = (Token) _errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(639);
((ArithmeticBinaryContext) _localctx).right = valueExpression(3);
}
break;
case 3: {
_localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState));
((ComparisonContext) _localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
setState(640);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
setState(641);
comparisonOperator();
setState(642);
((ComparisonContext) _localctx).right = valueExpression(2);
}
break;
}
}
}
setState(648);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input, 90, _ctx);
}
}
} catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
} finally {
unrollRecursionContexts(_parentctx);
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | ArithmeticUnaryContext |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/scalar/ScalarFunction.java | {
"start": 2009,
"end": 7310
} | class ____ extends Function {
protected ScalarFunction(Source source) {
super(source, emptyList());
}
protected ScalarFunction(Source source, List<Expression> fields) {
super(source, fields);
}
//
// Script generation
//
public ScriptTemplate asScript(Expression exp) {
if (exp.foldable()) {
return scriptWithFoldable(exp);
}
if (exp instanceof FieldAttribute) {
return scriptWithField((FieldAttribute) exp);
}
if (exp instanceof ScalarFunction) {
return scriptWithScalar((ScalarFunction) exp);
}
if (exp instanceof AggregateFunction) {
return scriptWithAggregate((AggregateFunction) exp);
}
if (exp instanceof GroupingFunction) {
return scriptWithGrouping((GroupingFunction) exp);
}
throw new QlIllegalArgumentException("Cannot evaluate script for expression {}", exp);
}
protected ScriptTemplate scriptWithFoldable(Expression foldable) {
Object fold = foldable.fold();
// FIXME: this needs to be refactored
//
// Custom type handling
//
// wrap intervals with dedicated methods for serialization
if (fold instanceof ZonedDateTime zdt) {
return new ScriptTemplate(
processScript("{sql}.asDateTime({})"),
paramsBuilder().variable(DateUtils.toString(zdt)).build(),
dataType()
);
}
if (fold instanceof IntervalScripting is) {
return new ScriptTemplate(
processScript(is.script()),
paramsBuilder().variable(is.value()).variable(is.typeName()).build(),
dataType()
);
}
if (fold instanceof OffsetTime ot) {
return new ScriptTemplate(processScript("{sql}.asTime({})"), paramsBuilder().variable(ot.toString()).build(), dataType());
}
if (fold != null && fold.getClass().getSimpleName().equals("GeoShape")) {
return new ScriptTemplate(processScript("{sql}.stWktToSql({})"), paramsBuilder().variable(fold.toString()).build(), dataType());
}
return new ScriptTemplate(processScript("{}"), paramsBuilder().variable(fold).build(), dataType());
}
protected ScriptTemplate scriptWithScalar(ScalarFunction scalar) {
ScriptTemplate nested = scalar.asScript();
return new ScriptTemplate(processScript(nested.template()), paramsBuilder().script(nested.params()).build(), dataType());
}
protected ScriptTemplate scriptWithAggregate(AggregateFunction aggregate) {
String template = PARAM;
ParamsBuilder paramsBuilder = paramsBuilder().agg(aggregate);
DataType nullSafeCastDataType = null;
DataType dataType = aggregate.dataType();
if (dataType.name().equals("DATE") || dataType == DATETIME ||
// Aggregations on date_nanos are returned as string
aggregate.field().dataType() == DATETIME) {
template = "{sql}.asDateTime({})";
} else if (dataType.isInteger()) {
// MAX, MIN need to retain field's data type, so that possible operations on integral types (like division) work
// correctly -> perform a cast in the aggs filtering script, the bucket selector for HAVING.
// SQL function classes not available in QL: filter by name
String fn = aggregate.functionName();
if ("MAX".equals(fn) || "MIN".equals(fn)) {
nullSafeCastDataType = dataType;
} else if ("SUM".equals(fn)) {
// SUM(integral_type) requires returning a LONG value
nullSafeCastDataType = LONG;
}
}
if (nullSafeCastDataType != null) {
template = "{ql}.nullSafeCastNumeric({},{})";
paramsBuilder.variable(nullSafeCastDataType.name());
}
return new ScriptTemplate(processScript(template), paramsBuilder.build(), dataType());
}
// This method isn't actually used at the moment, since there is no grouping function (ie HISTOGRAM)
// that currently results in a script being generated
protected ScriptTemplate scriptWithGrouping(GroupingFunction grouping) {
String template = PARAM;
return new ScriptTemplate(processScript(template), paramsBuilder().grouping(grouping).build(), dataType());
}
protected ScriptTemplate scriptWithField(FieldAttribute field) {
Params params = paramsBuilder().variable(field.exactAttribute().name()).build();
// unsigned_long fields get returned in scripts as plain longs, so a conversion is required
return field.dataType() != UNSIGNED_LONG
? new ScriptTemplate(processScript(Scripts.DOC_VALUE), params, dataType())
: new ScriptTemplate(
processScript(format("{ql}.", "nullSafeCastToUnsignedLong({})", Scripts.DOC_VALUE)),
params,
UNSIGNED_LONG
);
}
protected String processScript(String script) {
return formatTemplate(script);
}
protected String formatTemplate(String template) {
return Scripts.formatTemplate(template);
}
}
| ScalarFunction |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java | {
"start": 90309,
"end": 90663
} | class ____
*/
@Private
public String getAutoCreatedQueueManagementPolicy(QueuePath queue) {
String autoCreatedQueueManagementPolicy =
get(getQueuePrefix(queue) + AUTO_CREATED_QUEUE_MANAGEMENT_POLICY,
DEFAULT_AUTO_CREATED_QUEUE_MANAGEMENT_POLICY);
return autoCreatedQueueManagementPolicy;
}
/**
* Get The policy | name |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongLookup.java | {
"start": 693,
"end": 3541
} | class ____ implements ReleasableIterator<LongBlock> {
private final LongBlock values;
private final IntBlock positions;
private final long targetByteSize;
private int position;
private long first;
private int valuesInPosition;
LongLookup(LongBlock values, IntBlock positions, ByteSizeValue targetBlockSize) {
values.incRef();
positions.incRef();
this.values = values;
this.positions = positions;
this.targetByteSize = targetBlockSize.getBytes();
}
@Override
public boolean hasNext() {
return position < positions.getPositionCount();
}
@Override
public LongBlock next() {
try (LongBlock.Builder builder = positions.blockFactory().newLongBlockBuilder(positions.getTotalValueCount())) {
int count = 0;
while (position < positions.getPositionCount()) {
int start = positions.getFirstValueIndex(position);
int end = start + positions.getValueCount(position);
valuesInPosition = 0;
for (int i = start; i < end; i++) {
copy(builder, positions.getInt(i));
}
switch (valuesInPosition) {
case 0 -> builder.appendNull();
case 1 -> builder.appendLong(first);
default -> builder.endPositionEntry();
}
position++;
// TOOD what if the estimate is super huge? should we break even with less than MIN_TARGET?
if (++count > Operator.MIN_TARGET_PAGE_SIZE && builder.estimatedBytes() < targetByteSize) {
break;
}
}
return builder.build();
}
}
private void copy(LongBlock.Builder builder, int valuePosition) {
if (valuePosition >= values.getPositionCount()) {
return;
}
int start = values.getFirstValueIndex(valuePosition);
int end = start + values.getValueCount(valuePosition);
for (int i = start; i < end; i++) {
if (valuesInPosition == 0) {
first = values.getLong(i);
valuesInPosition++;
continue;
}
if (valuesInPosition == 1) {
builder.beginPositionEntry();
builder.appendLong(first);
}
if (valuesInPosition > Block.MAX_LOOKUP) {
// TODO replace this with a warning and break
throw new IllegalArgumentException("Found a single entry with " + valuesInPosition + " entries");
}
builder.appendLong(values.getLong(i));
valuesInPosition++;
}
}
@Override
public void close() {
Releasables.close(values, positions);
}
}
| LongLookup |
java | quarkusio__quarkus | extensions/datasource/deployment-spi/src/main/java/io/quarkus/datasource/deployment/spi/DefaultDataSourceDbKindBuildItem.java | {
"start": 637,
"end": 4031
} | class ____ extends MultiBuildItem {
public static final String TEST = "test";
private final String dbKind;
private final Class<?> callerClass;
private volatile String scope;
public DefaultDataSourceDbKindBuildItem(String dbKind) {
this.dbKind = dbKind;
String callerClassName = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE).getCallerClass()
.getCanonicalName();
try {
callerClass = Thread.currentThread().getContextClassLoader().loadClass(callerClassName);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public String getDbKind() {
return dbKind;
}
public String getScope(CurateOutcomeBuildItem curateOutcomeBuildItem) {
if (scope == null) {
Map.Entry<String, String> artifact = ArtifactInfoUtil.groupIdAndArtifactId(callerClass, curateOutcomeBuildItem);
for (ResolvedDependency i : curateOutcomeBuildItem.getApplicationModel().getDependencies()) {
if (i.getArtifactId().equals(artifact.getValue())
&& i.getGroupId().equals(artifact.getKey())) {
scope = i.getScope();
break;
}
}
if (scope == null) {
throw new RuntimeException("Could not determine scope for " + dbKind);
}
}
return scope;
}
public static Optional<String> resolve(Optional<String> configured,
List<DefaultDataSourceDbKindBuildItem> defaultDbKinds,
boolean enableImplicitResolution,
CurateOutcomeBuildItem curateOutcomeBuildItem) {
if (configured.isPresent()) {
return Optional.of(DatabaseKind.normalize(configured.get()));
}
if (!enableImplicitResolution) {
return Optional.empty();
}
return resolveImplicitDbKind(defaultDbKinds, curateOutcomeBuildItem);
}
/**
* Attempts to resolve the implicit DB kind for the case where none has been specified.
*/
private static Optional<String> resolveImplicitDbKind(List<DefaultDataSourceDbKindBuildItem> defaultDbKinds,
CurateOutcomeBuildItem curateOutcomeBuildItem) {
if (defaultDbKinds.isEmpty()) {
return Optional.empty();
} else if (defaultDbKinds.stream().map(DefaultDataSourceDbKindBuildItem::getDbKind).distinct().count() == 1) {
return Optional.of(defaultDbKinds.get(0).dbKind);
} else {
//if we have one and only one test scoped driver we assume it is the default
//if is common to use a different DB such as H2 in tests
DefaultDataSourceDbKindBuildItem testScopedDriver = null;
for (DefaultDataSourceDbKindBuildItem i : defaultDbKinds) {
if (i.getScope(curateOutcomeBuildItem).equals(TEST)) {
if (testScopedDriver == null) {
testScopedDriver = i;
} else {
return Optional.empty();
}
}
}
if (testScopedDriver == null) {
return Optional.empty();
} else {
return Optional.of(testScopedDriver.dbKind);
}
}
}
}
| DefaultDataSourceDbKindBuildItem |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/forwardgroup/StreamNodeForwardGroup.java | {
"start": 1389,
"end": 6518
} | class ____ implements ForwardGroup<Integer> {
private int parallelism = ExecutionConfig.PARALLELISM_DEFAULT;
private int maxParallelism = JobVertex.MAX_PARALLELISM_DEFAULT;
private final Set<StreamNode> streamNodes = new HashSet<>();
// For a group of chained stream nodes, their parallelism is consistent. In order to make
// calculation and usage easier, we only use the start node to calculate forward group.
public StreamNodeForwardGroup(final Set<StreamNode> streamNodes) {
checkNotNull(streamNodes);
Set<Integer> configuredParallelisms =
streamNodes.stream()
.map(StreamNode::getParallelism)
.filter(v -> v > 0)
.collect(Collectors.toSet());
checkState(configuredParallelisms.size() <= 1);
if (configuredParallelisms.size() == 1) {
this.parallelism = configuredParallelisms.iterator().next();
}
Set<Integer> configuredMaxParallelisms =
streamNodes.stream()
.map(StreamNode::getMaxParallelism)
.filter(val -> val > 0)
.collect(Collectors.toSet());
if (!configuredMaxParallelisms.isEmpty()) {
this.maxParallelism = Collections.min(configuredMaxParallelisms);
checkState(
parallelism == ExecutionConfig.PARALLELISM_DEFAULT
|| maxParallelism >= parallelism,
"There is a start node in the forward group whose maximum parallelism is smaller than the group's parallelism");
}
this.streamNodes.addAll(streamNodes);
}
@Override
public void setParallelism(int parallelism) {
checkState(this.parallelism == ExecutionConfig.PARALLELISM_DEFAULT);
this.parallelism = parallelism;
this.streamNodes.forEach(
streamNode -> {
streamNode.setParallelism(parallelism);
});
}
@Override
public boolean isParallelismDecided() {
return parallelism > 0;
}
@Override
public int getParallelism() {
checkState(isParallelismDecided());
return parallelism;
}
@Override
public void setMaxParallelism(int maxParallelism) {
checkState(
maxParallelism == ExecutionConfig.PARALLELISM_DEFAULT
|| maxParallelism >= parallelism,
"There is a job vertex in the forward group whose maximum parallelism is smaller than the group's parallelism");
this.maxParallelism = maxParallelism;
this.streamNodes.forEach(
streamNode -> {
streamNode.setMaxParallelism(maxParallelism);
});
}
@Override
public boolean isMaxParallelismDecided() {
return maxParallelism > 0;
}
@Override
public int getMaxParallelism() {
checkState(isMaxParallelismDecided());
return maxParallelism;
}
@Override
public Set<Integer> getVertexIds() {
return streamNodes.stream().map(StreamNode::getId).collect(Collectors.toSet());
}
/**
* Merges forwardGroupToMerge into this and update the parallelism information for stream nodes
* in merged forward group.
*
* @param forwardGroupToMerge The forward group to be merged.
* @return whether the merge was successful.
*/
public boolean mergeForwardGroup(final StreamNodeForwardGroup forwardGroupToMerge) {
checkNotNull(forwardGroupToMerge);
if (forwardGroupToMerge == this) {
return true;
}
if (!ForwardGroupComputeUtil.canTargetMergeIntoSourceForwardGroup(
this, forwardGroupToMerge)) {
return false;
}
if (this.isParallelismDecided() && !forwardGroupToMerge.isParallelismDecided()) {
forwardGroupToMerge.setParallelism(this.parallelism);
} else if (!this.isParallelismDecided() && forwardGroupToMerge.isParallelismDecided()) {
this.setParallelism(forwardGroupToMerge.parallelism);
} else {
checkState(this.parallelism == forwardGroupToMerge.parallelism);
}
if (forwardGroupToMerge.isMaxParallelismDecided()
&& (!this.isMaxParallelismDecided()
|| this.maxParallelism > forwardGroupToMerge.maxParallelism)) {
this.setMaxParallelism(forwardGroupToMerge.maxParallelism);
} else if (this.isMaxParallelismDecided()
&& (!forwardGroupToMerge.isMaxParallelismDecided()
|| forwardGroupToMerge.maxParallelism > this.maxParallelism)) {
forwardGroupToMerge.setMaxParallelism(this.maxParallelism);
} else {
checkState(this.maxParallelism == forwardGroupToMerge.maxParallelism);
}
this.streamNodes.addAll(forwardGroupToMerge.streamNodes);
return true;
}
@VisibleForTesting
public int size() {
return streamNodes.size();
}
}
| StreamNodeForwardGroup |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/MapKeyDeserialization3143Test.java | {
"start": 590,
"end": 662
} | class ____
{
// [databind#3143]
static | MapKeyDeserialization3143Test |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtilTests.java | {
"start": 1256,
"end": 5860
} | class ____ extends ESTestCase {
private final ClusterService clusterService = mock(ClusterService.class);
private final ClusterState clusterState = mock(ClusterState.class);
private final Metadata metadata = mock(Metadata.class);
private final String clusterUuid = randomAlphaOfLength(16);
@Before
public void setup() {
when(clusterService.state()).thenReturn(clusterState);
when(clusterState.metadata()).thenReturn(metadata);
when(metadata.clusterUUID()).thenReturn(clusterUuid);
}
public void testWatchIdsAreAllUnique() {
final List<String> watchIds = Arrays.asList(ClusterAlertsUtil.WATCH_IDS);
assertThat(watchIds, hasSize(new HashSet<>(watchIds).size()));
}
public void testCreateUniqueWatchId() {
final String watchId = randomFrom(ClusterAlertsUtil.WATCH_IDS);
final String uniqueWatchId = ClusterAlertsUtil.createUniqueWatchId(clusterService, watchId);
assertThat(uniqueWatchId, equalTo(clusterUuid + "_" + watchId));
}
public void testLoadWatch() {
for (final String watchId : ClusterAlertsUtil.WATCH_IDS) {
final String watch = ClusterAlertsUtil.loadWatch(clusterService, watchId);
assertThat(watch, notNullValue());
assertThat(watch, containsString(clusterUuid));
assertThat(watch, containsString(watchId));
assertThat(watch, containsString(String.valueOf(ClusterAlertsUtil.LAST_UPDATED_VERSION)));
if ("elasticsearch_nodes".equals(watchId) == false) {
assertThat(watch, containsString(clusterUuid + "_" + watchId));
}
// validate that it's well formed JSON
assertThat(XContentHelper.convertToMap(XContentType.JSON.xContent(), watch, false), notNullValue());
}
}
public void testLoadWatchFails() {
expectThrows(RuntimeException.class, () -> ClusterAlertsUtil.loadWatch(clusterService, "watch-does-not-exist"));
}
public void testGetClusterAlertsBlacklistThrowsForUnknownWatchId() {
final List<String> watchIds = Arrays.asList(ClusterAlertsUtil.WATCH_IDS);
final List<String> blacklist = randomSubsetOf(watchIds);
blacklist.add("fake1");
if (randomBoolean()) {
blacklist.add("fake2");
if (rarely()) {
blacklist.add("fake3");
}
}
final Set<String> unknownIds = blacklist.stream().filter(id -> watchIds.contains(id) == false).collect(Collectors.toSet());
final String unknownIdsString = String.join(", ", unknownIds);
final SettingsException exception = expectThrows(
SettingsException.class,
() -> ClusterAlertsUtil.getClusterAlertsBlacklist(createConfigWithBlacklist("_random", blacklist))
);
assertThat(
exception.getMessage(),
equalTo(
"[xpack.monitoring.exporters._random.cluster_alerts.management.blacklist] contains unrecognized Cluster "
+ "Alert IDs ["
+ unknownIdsString
+ "]"
)
);
assertWarnings(
"[xpack.monitoring.exporters._random.cluster_alerts.management.blacklist] setting was deprecated in Elasticsearch "
+ "and will be removed in a future release. See the deprecation documentation for the next major version."
);
}
public void testGetClusterAlertsBlacklist() {
final List<String> blacklist = randomSubsetOf(Arrays.asList(ClusterAlertsUtil.WATCH_IDS));
assertThat(blacklist, equalTo(ClusterAlertsUtil.getClusterAlertsBlacklist(createConfigWithBlacklist("any", blacklist))));
assertWarnings(
"[xpack.monitoring.exporters.any.cluster_alerts.management.blacklist] setting was deprecated in Elasticsearch "
+ "and will be removed in a future release. See the deprecation documentation for the next major version."
);
}
private Exporter.Config createConfigWithBlacklist(final String name, final List<String> blacklist) {
final Settings settings = Settings.builder()
.putList("xpack.monitoring.exporters." + name + ".cluster_alerts.management.blacklist", blacklist)
.build();
final ClusterService mockClusterService = mock(ClusterService.class);
final XPackLicenseState licenseState = mock(XPackLicenseState.class);
return new Exporter.Config(name, "local", settings, mockClusterService, licenseState);
}
}
| ClusterAlertsUtilTests |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/projection/CollectionAwareProjectionFactory.java | {
"start": 1378,
"end": 1866
} | class ____ extends SpelAwareProjectionInformation {
CollectionAwareProjectionInformation(Class<?> projectionType) {
super(projectionType);
}
@Override
protected boolean isInputProperty(PropertyDescriptor descriptor) {
if (!super.isInputProperty(descriptor)) {
return false;
}
return !(Collection.class.isAssignableFrom(descriptor.getPropertyType()) //
|| Map.class.isAssignableFrom(descriptor.getPropertyType()));
}
}
}
| CollectionAwareProjectionInformation |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java | {
"start": 1215,
"end": 1894
} | class ____ extends MutableCounter {
private final LongAdder value = new LongAdder();
public MutableCounterLong(MetricsInfo info, long initValue) {
super(info);
this.value.add(initValue);
}
@Override
public void incr() {
incr(1);
}
/**
* Increment the value by a delta
* @param delta of the increment
*/
public void incr(long delta) {
value.add(delta);
setChanged();
}
public long value() {
return value.longValue();
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addCounter(info(), value());
clearChanged();
}
}
}
| MutableCounterLong |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/generics/ErroneousSourceTargetMapper6.java | {
"start": 352,
"end": 832
} | interface ____ {
ErroneousSourceTargetMapper6 INSTANCE = Mappers.getMapper( ErroneousSourceTargetMapper6.class );
ErroneousTarget6 sourceToTarget(ErroneousSource6 source);
// We are testing that we can't create the nested
// not whether or not we can instantiate the WildCardSuperWrapper
@ObjectFactory
default <T> WildCardSuperWrapper<T> createWildCardSuperWrapper() {
return new WildCardSuperWrapper<>( null );
}
}
| ErroneousSourceTargetMapper6 |
java | quarkusio__quarkus | extensions/spring-boot-properties/runtime/src/main/java/io/quarkus/spring/boot/properties/runtime/SpringBootConfigProperties.java | {
"start": 439,
"end": 747
} | class ____ extends AnnotationLiteral<SpringBootConfigProperties> implements SpringBootConfigProperties {
// used im generated code
@SuppressWarnings("unused")
public static final Literal INSTANCE = new Literal();
private static final long serialVersionUID = 1L;
}
}
| Literal |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/test/java/io/github/resilience4j/bulkhead/autoconfigure/BulkheadConfigurationOnMissingBeanTest.java | {
"start": 2143,
"end": 3265
} | class ____ {
@Autowired
private ConfigWithOverrides configWithOverrides;
@Autowired
private BulkheadRegistry bulkheadRegistry;
@Autowired
private BulkheadAspect bulkheadAspect;
@Autowired
private EventConsumerRegistry<BulkheadEvent> bulkheadEventEventConsumerRegistry;
@Test
public void testAllBeansFromBulkHeadHasOnMissingBean() throws NoSuchMethodException {
final Class<BulkheadConfiguration> originalClass = BulkheadConfiguration.class;
final Class<BulkheadConfigurationOnMissingBean> onMissingBeanClass = BulkheadConfigurationOnMissingBean.class;
TestUtils.assertAnnotations(originalClass, onMissingBeanClass);
}
@Test
public void testAllBulkHeadConfigurationBeansOverridden() {
assertEquals(bulkheadRegistry, configWithOverrides.bulkheadRegistry);
assertEquals(bulkheadAspect, configWithOverrides.bulkheadAspect);
assertEquals(bulkheadEventEventConsumerRegistry,
configWithOverrides.bulkheadEventEventConsumerRegistry);
}
@Configuration
public static | BulkheadConfigurationOnMissingBeanTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/validator/ValidatorContractTest.java | {
"start": 1207,
"end": 3728
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testInputTypeOnly() throws Exception {
context.getTypeConverterRegistry().addTypeConverters(new MyTypeConverters());
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
validator().type(A.class).withUri("direct:validator");
from("direct:a").inputTypeWithValidate(A.class).to("mock:a");
from("direct:validator").to("mock:validator");
}
});
context.start();
MockEndpoint mocka = context.getEndpoint("mock:a", MockEndpoint.class);
MockEndpoint mockv = context.getEndpoint("mock:validator", MockEndpoint.class);
mocka.setExpectedCount(1);
mockv.setExpectedCount(1);
Object answer = template.requestBody("direct:a", "foo");
mocka.assertIsSatisfied();
mockv.assertIsSatisfied();
Exchange exa = mocka.getExchanges().get(0);
assertEquals(A.class, exa.getIn().getBody().getClass());
Exchange exv = mockv.getExchanges().get(0);
assertEquals(A.class, exv.getIn().getBody().getClass());
assertEquals(A.class, answer.getClass());
}
@Test
public void testOutputTypeOnly() throws Exception {
context.getTypeConverterRegistry().addTypeConverters(new MyTypeConverters());
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
validator().type(A.class).withUri("direct:validator");
from("direct:a").outputTypeWithValidate(A.class).to("mock:a");
from("direct:validator").to("mock:validator");
}
});
context.start();
MockEndpoint mocka = context.getEndpoint("mock:a", MockEndpoint.class);
MockEndpoint mockv = context.getEndpoint("mock:validator", MockEndpoint.class);
mocka.setExpectedCount(1);
mockv.setExpectedCount(1);
Object answer = template.requestBody("direct:a", "foo");
mocka.assertIsSatisfied();
mockv.assertIsSatisfied();
Exchange exa = mocka.getExchanges().get(0);
assertEquals("foo", exa.getIn().getBody());
Exchange exv = mockv.getExchanges().get(0);
assertEquals(A.class, exv.getIn().getBody().getClass());
assertEquals(A.class, answer.getClass());
}
public static | ValidatorContractTest |
java | quarkusio__quarkus | devtools/maven/src/test/java/io/quarkus/maven/DevDependencyTreeMojoTest.java | {
"start": 34,
"end": 178
} | class ____ extends BasicDependencyTreeTestBase {
@Override
protected String mode() {
return "dev";
}
}
| DevDependencyTreeMojoTest |
java | apache__camel | core/camel-main/src/generated/java/org/apache/camel/main/HttpServerConfigurationPropertiesConfigurer.java | {
"start": 711,
"end": 9050
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("AuthenticationEnabled", boolean.class);
map.put("AuthenticationPath", java.lang.String.class);
map.put("AuthenticationRealm", java.lang.String.class);
map.put("BasicPropertiesFile", java.lang.String.class);
map.put("Enabled", boolean.class);
map.put("FileUploadDirectory", java.lang.String.class);
map.put("FileUploadEnabled", boolean.class);
map.put("Host", java.lang.String.class);
map.put("JwtKeystorePassword", java.lang.String.class);
map.put("JwtKeystorePath", java.lang.String.class);
map.put("JwtKeystoreType", java.lang.String.class);
map.put("MaxBodySize", java.lang.Long.class);
map.put("Path", java.lang.String.class);
map.put("Port", int.class);
map.put("StaticContextPath", java.lang.String.class);
map.put("StaticEnabled", boolean.class);
map.put("StaticSourceDir", java.lang.String.class);
map.put("UseGlobalSslContextParameters", boolean.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.main.HttpServerConfigurationProperties target = (org.apache.camel.main.HttpServerConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "authenticationenabled":
case "authenticationEnabled": target.setAuthenticationEnabled(property(camelContext, boolean.class, value)); return true;
case "authenticationpath":
case "authenticationPath": target.setAuthenticationPath(property(camelContext, java.lang.String.class, value)); return true;
case "authenticationrealm":
case "authenticationRealm": target.setAuthenticationRealm(property(camelContext, java.lang.String.class, value)); return true;
case "basicpropertiesfile":
case "basicPropertiesFile": target.setBasicPropertiesFile(property(camelContext, java.lang.String.class, value)); return true;
case "enabled": target.setEnabled(property(camelContext, boolean.class, value)); return true;
case "fileuploaddirectory":
case "fileUploadDirectory": target.setFileUploadDirectory(property(camelContext, java.lang.String.class, value)); return true;
case "fileuploadenabled":
case "fileUploadEnabled": target.setFileUploadEnabled(property(camelContext, boolean.class, value)); return true;
case "host": target.setHost(property(camelContext, java.lang.String.class, value)); return true;
case "jwtkeystorepassword":
case "jwtKeystorePassword": target.setJwtKeystorePassword(property(camelContext, java.lang.String.class, value)); return true;
case "jwtkeystorepath":
case "jwtKeystorePath": target.setJwtKeystorePath(property(camelContext, java.lang.String.class, value)); return true;
case "jwtkeystoretype":
case "jwtKeystoreType": target.setJwtKeystoreType(property(camelContext, java.lang.String.class, value)); return true;
case "maxbodysize":
case "maxBodySize": target.setMaxBodySize(property(camelContext, java.lang.Long.class, value)); return true;
case "path": target.setPath(property(camelContext, java.lang.String.class, value)); return true;
case "port": target.setPort(property(camelContext, int.class, value)); return true;
case "staticcontextpath":
case "staticContextPath": target.setStaticContextPath(property(camelContext, java.lang.String.class, value)); return true;
case "staticenabled":
case "staticEnabled": target.setStaticEnabled(property(camelContext, boolean.class, value)); return true;
case "staticsourcedir":
case "staticSourceDir": target.setStaticSourceDir(property(camelContext, java.lang.String.class, value)); return true;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "authenticationenabled":
case "authenticationEnabled": return boolean.class;
case "authenticationpath":
case "authenticationPath": return java.lang.String.class;
case "authenticationrealm":
case "authenticationRealm": return java.lang.String.class;
case "basicpropertiesfile":
case "basicPropertiesFile": return java.lang.String.class;
case "enabled": return boolean.class;
case "fileuploaddirectory":
case "fileUploadDirectory": return java.lang.String.class;
case "fileuploadenabled":
case "fileUploadEnabled": return boolean.class;
case "host": return java.lang.String.class;
case "jwtkeystorepassword":
case "jwtKeystorePassword": return java.lang.String.class;
case "jwtkeystorepath":
case "jwtKeystorePath": return java.lang.String.class;
case "jwtkeystoretype":
case "jwtKeystoreType": return java.lang.String.class;
case "maxbodysize":
case "maxBodySize": return java.lang.Long.class;
case "path": return java.lang.String.class;
case "port": return int.class;
case "staticcontextpath":
case "staticContextPath": return java.lang.String.class;
case "staticenabled":
case "staticEnabled": return boolean.class;
case "staticsourcedir":
case "staticSourceDir": return java.lang.String.class;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.main.HttpServerConfigurationProperties target = (org.apache.camel.main.HttpServerConfigurationProperties) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "authenticationenabled":
case "authenticationEnabled": return target.isAuthenticationEnabled();
case "authenticationpath":
case "authenticationPath": return target.getAuthenticationPath();
case "authenticationrealm":
case "authenticationRealm": return target.getAuthenticationRealm();
case "basicpropertiesfile":
case "basicPropertiesFile": return target.getBasicPropertiesFile();
case "enabled": return target.isEnabled();
case "fileuploaddirectory":
case "fileUploadDirectory": return target.getFileUploadDirectory();
case "fileuploadenabled":
case "fileUploadEnabled": return target.isFileUploadEnabled();
case "host": return target.getHost();
case "jwtkeystorepassword":
case "jwtKeystorePassword": return target.getJwtKeystorePassword();
case "jwtkeystorepath":
case "jwtKeystorePath": return target.getJwtKeystorePath();
case "jwtkeystoretype":
case "jwtKeystoreType": return target.getJwtKeystoreType();
case "maxbodysize":
case "maxBodySize": return target.getMaxBodySize();
case "path": return target.getPath();
case "port": return target.getPort();
case "staticcontextpath":
case "staticContextPath": return target.getStaticContextPath();
case "staticenabled":
case "staticEnabled": return target.isStaticEnabled();
case "staticsourcedir":
case "staticSourceDir": return target.getStaticSourceDir();
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters();
default: return null;
}
}
}
| HttpServerConfigurationPropertiesConfigurer |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentFunction.java | {
"start": 642,
"end": 1457
} | class ____<T extends Temporal> extends SqlConfigurationFunction {
private final T current;
CurrentFunction(Source source, Configuration configuration, T current, DataType dataType) {
super(source, configuration, dataType);
this.current = current;
}
@Override
public Object fold() {
return current;
}
@Override
public int hashCode() {
return Objects.hash(current);
}
@SuppressWarnings("rawtypes")
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
CurrentFunction other = (CurrentFunction) obj;
return Objects.equals(current, other.current);
}
}
| CurrentFunction |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/Created.java | {
"start": 1641,
"end": 2097
} | class ____ implements StateFactory<Created> {
private final Context context;
private final Logger log;
public Factory(Context context, Logger log) {
this.context = context;
this.log = log;
}
public Class<Created> getStateClass() {
return Created.class;
}
public Created getState() {
return new Created(this.context, this.log);
}
}
}
| Factory |
java | quarkusio__quarkus | integration-tests/grpc-external-proto-test/src/test/java/io/quarkus/grpc/external/proto/ExternalProtoTestBase.java | {
"start": 302,
"end": 717
} | class ____ {
@GrpcClient
MyTest hello;
@Test
void shouldWorkWithClientAndServiceFromExternalProto() {
Uni<TextContainer> reply = hello.doTest(TextContainer.newBuilder().setText("my-request").build());
String replyText = reply.await().atMost(Duration.ofSeconds(30))
.getText();
assertThat(replyText).isEqualTo("reply_to:my-request");
}
}
| ExternalProtoTestBase |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/MultiResultNode.java | {
"start": 173,
"end": 552
} | class ____ extends ResultNode {
private final Supplier<ResultNode>[] results;
public MultiResultNode(Supplier<ResultNode>[] results) {
this.results = results;
}
@Override
public void process(Consumer<String> consumer) {
for (Supplier<ResultNode> result : results) {
result.get().process(consumer);
}
}
}
| MultiResultNode |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fixed/skipfields/BindySimpleSkipFieldsTest.java | {
"start": 1641,
"end": 4397
} | class ____ extends CamelTestSupport {
public static final String URI_DIRECT_MARSHALL = "direct:marshall";
public static final String URI_DIRECT_UNMARSHALL = "direct:unmarshall";
public static final String URI_MOCK_MARSHALL_RESULT = "mock:marshall-result";
public static final String URI_MOCK_UNMARSHALL_RESULT = "mock:unmarshall-result";
private static final String TEST_RECORD = "10A9 PaulineM ISINXD12345678BUYShare000002500.45USD01-08-2009Hello \r\n";
@EndpointInject(URI_MOCK_MARSHALL_RESULT)
private MockEndpoint marshallResult;
@EndpointInject(URI_MOCK_UNMARSHALL_RESULT)
private MockEndpoint unmarshallResult;
// *************************************************************************
// TESTS
// *************************************************************************
@Test
public void testUnmarshallMessage() throws Exception {
unmarshallResult.expectedMessageCount(1);
template.sendBody(URI_DIRECT_UNMARSHALL, TEST_RECORD);
unmarshallResult.assertIsSatisfied();
// check the model
BindySimpleSkipFieldsTest.Order order
= (BindySimpleSkipFieldsTest.Order) unmarshallResult.getReceivedExchanges().get(0).getIn().getBody();
assertEquals(10, order.getOrderNr());
// the field is not trimmed
assertNull(order.getFirstName());
assertEquals("M ", order.getLastName());
assertEquals("Hello ", order.getComment());
}
// *************************************************************************
// ROUTES
// *************************************************************************
@Override
protected RouteBuilder createRouteBuilder() {
RouteBuilder routeBuilder = new RouteBuilder() {
@Override
public void configure() {
BindyDataFormat bindy = new BindyDataFormat();
bindy.setClassType(BindySimpleSkipFieldsTest.Order.class);
bindy.setLocale("en");
bindy.type(BindyType.Fixed);
from(URI_DIRECT_MARSHALL)
.marshal(bindy)
.to(URI_MOCK_MARSHALL_RESULT);
from(URI_DIRECT_UNMARSHALL)
.unmarshal().bindy(BindyType.Fixed, BindySimpleSkipFieldsTest.Order.class)
.to(URI_MOCK_UNMARSHALL_RESULT);
}
};
return routeBuilder;
}
// *************************************************************************
// DATA MODEL
// *************************************************************************
@FixedLengthRecord(ignoreTrailingChars = false)
public static | BindySimpleSkipFieldsTest |
java | apache__flink | flink-end-to-end-tests/flink-datastream-allround-test/src/main/java/org/apache/flink/streaming/tests/FailureMapper.java | {
"start": 1458,
"end": 3123
} | class ____<T> extends RichMapFunction<T, T> implements CheckpointListener {
private static final long serialVersionUID = -5286927943454740016L;
private final long numProcessedRecordsFailureThreshold;
private final long numCompleteCheckpointsFailureThreshold;
private final int maxNumFailures;
private long numProcessedRecords;
private long numCompleteCheckpoints;
public FailureMapper(
long numProcessedRecordsFailureThreshold,
long numCompleteCheckpointsFailureThreshold,
int maxNumFailures) {
this.numProcessedRecordsFailureThreshold = numProcessedRecordsFailureThreshold;
this.numCompleteCheckpointsFailureThreshold = numCompleteCheckpointsFailureThreshold;
this.maxNumFailures = maxNumFailures;
}
@Override
public T map(T value) throws Exception {
numProcessedRecords++;
if (isReachedFailureThreshold()) {
throw new Exception("Artificial failure.");
}
return value;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
numCompleteCheckpoints++;
if (isReachedFailureThreshold()) {
throw new Exception("Artificial failure.");
}
}
@Override
public void notifyCheckpointAborted(long checkpointId) {}
private boolean isReachedFailureThreshold() {
return numProcessedRecords >= numProcessedRecordsFailureThreshold
&& numCompleteCheckpoints >= numCompleteCheckpointsFailureThreshold
&& getRuntimeContext().getTaskInfo().getAttemptNumber() < maxNumFailures;
}
}
| FailureMapper |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/byte2darray/Byte2DArrayAssert_hasDimensions_Test.java | {
"start": 1010,
"end": 1355
} | class ____ extends Byte2DArrayAssertBaseTest {
@Override
protected Byte2DArrayAssert invoke_api_method() {
return assertions.hasDimensions(1, 2);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasDimensions(getInfo(assertions), getActual(assertions), 1, 2);
}
}
| Byte2DArrayAssert_hasDimensions_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/TemporalParameterPlusDurationTest.java | {
"start": 664,
"end": 2490
} | class ____ {
@Test
void timestampVsTimestampParameterPlusDuration(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where inst > :i + 1 second + 2 second", SimpleEntity.class )
.setParameter( "i", Instant.now() )
.getResultList();
} );
}
@Test
void timestampParameterPlusDurationVsTimestamp(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where :i + 1 second + 2 second > inst", SimpleEntity.class )
.setParameter( "i", Instant.now() )
.getResultList();
} );
}
@Test
void dateVsDateParameterPlusDuration(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where ldate > :i + 3 day + 2 day", SimpleEntity.class )
.setParameter( "i", LocalDate.now() )
.getResultList();
} );
}
@Test
void dateParameterPlusDurationVsDate(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where :i + 3 day + 2 day > ldate", SimpleEntity.class )
.setParameter( "i", LocalDate.now() )
.getResultList();
} );
}
@Test
void durationVsDurationParameterPlusDuration(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where dur > :i + 1 second", SimpleEntity.class )
.setParameter( "i", Duration.ofMinutes( 1 ) )
.getResultList();
} );
}
@Test
void durationParameterVsDurationPlusDuration(SessionFactoryScope scope) {
scope.inSession( session -> {
session.createQuery( "from SimpleEntity where :i + 1 second > dur", SimpleEntity.class )
.setParameter( "i", Duration.ofMinutes( 1 ) )
.getResultList();
} );
}
@Entity(name = "SimpleEntity")
public static | TemporalParameterPlusDurationTest |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/data/util/DataFormatConverters.java | {
"start": 27071,
"end": 27523
} | class ____ extends IdentityConverter<MapData> {
private static final long serialVersionUID = -9114231688474126815L;
public static final MapDataConverter INSTANCE = new MapDataConverter();
private MapDataConverter() {}
@Override
MapData toExternalImpl(RowData row, int column) {
return row.getMap(column);
}
}
/** Converter for DecimalData. */
public static final | MapDataConverter |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/websocket/WebSocketMessageBrokerConfigTests.java | {
"start": 28609,
"end": 28701
} | class ____ {
MessageArgument(String notDefaultConstructor) {
}
}
static | MessageArgument |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/WebFluxAutoConfigurationTests.java | {
"start": 49504,
"end": 49924
} | class ____ {
private int handlerMappings;
@Bean
WebFluxRegistrations webFluxRegistrationsHandlerMapping() {
return new WebFluxRegistrations() {
@Override
public RequestMappingHandlerMapping getRequestMappingHandlerMapping() {
CustomRequestMappingHandlerMapping.this.handlerMappings++;
return new MyRequestMappingHandlerMapping();
}
};
}
}
static | CustomRequestMappingHandlerMapping |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/AbstractAccessExecutionGraphHandler.java | {
"start": 1894,
"end": 3076
} | class ____<
R extends ResponseBody, M extends JobMessageParameters>
extends AbstractExecutionGraphHandler<R, M> {
protected AbstractAccessExecutionGraphHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders,
MessageHeaders<EmptyRequestBody, R, M> messageHeaders,
ExecutionGraphCache executionGraphCache,
Executor executor) {
super(
leaderRetriever,
timeout,
responseHeaders,
messageHeaders,
executionGraphCache,
executor);
}
@Override
protected R handleRequest(
HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraphInfo)
throws RestHandlerException {
return handleRequest(request, executionGraphInfo.getArchivedExecutionGraph());
}
protected abstract R handleRequest(
HandlerRequest<EmptyRequestBody> request, AccessExecutionGraph executionGraph)
throws RestHandlerException;
}
| AbstractAccessExecutionGraphHandler |
java | apache__flink | flink-formats/flink-orc/src/main/java/org/apache/flink/orc/TimestampUtil.java | {
"start": 1250,
"end": 2395
} | class ____ {
private TimestampUtil() {}
private static final Logger LOG = LoggerFactory.getLogger(OrcLegacyTimestampColumnVector.class);
private static Class hiveTSColVectorClz = null;
static {
try {
hiveTSColVectorClz =
Class.forName("org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector");
} catch (ClassNotFoundException e) {
LOG.debug("Hive TimestampColumnVector not available", e);
}
}
// whether a ColumnVector is the new TimestampColumnVector
public static boolean isHiveTimestampColumnVector(ColumnVector vector) {
return hiveTSColVectorClz != null && hiveTSColVectorClz.isAssignableFrom(vector.getClass());
}
// creates a Hive ColumnVector of constant timestamp value
public static ColumnVector createVectorFromConstant(int batchSize, Object value) {
if (hiveTSColVectorClz != null) {
return OrcTimestampColumnVector.createFromConstant(batchSize, value);
} else {
return OrcLegacyTimestampColumnVector.createFromConstant(batchSize, value);
}
}
}
| TimestampUtil |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/tls/MtlsConfigFromRegistryCdiTest.java | {
"start": 883,
"end": 2846
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Client.class, Resource.class)
.addAsResource(new File("target/certs/mtls-test-keystore.p12"), "server-keystore.p12")
.addAsResource(new File("target/certs/mtls-test-server-truststore.p12"), "server-truststore.p12")
.addAsResource(new File("target/certs/mtls-test-client-keystore.p12"), "client-keystore.p12")
.addAsResource(new File("target/certs/mtls-test-client-truststore.p12"), "client-truststore.p12"))
.overrideConfigKey("quarkus.tls.server.key-store.p12.path", "server-keystore.p12")
.overrideConfigKey("quarkus.tls.server.key-store.p12.password", "secret")
.overrideConfigKey("quarkus.tls.server.trust-store.p12.path", "server-truststore.p12")
.overrideConfigKey("quarkus.tls.server.trust-store.p12.password", "secret")
.overrideConfigKey("quarkus.http.tls-configuration-name", "server")
.overrideConfigKey("quarkus.tls.rest-client.key-store.p12.path", "client-keystore.p12")
.overrideConfigKey("quarkus.tls.rest-client.key-store.p12.password", "secret")
.overrideConfigKey("quarkus.tls.rest-client.trust-store.p12.path", "client-truststore.p12")
.overrideConfigKey("quarkus.tls.rest-client.trust-store.p12.password", "secret")
.overrideConfigKey("quarkus.rest-client.rc.url", "https://localhost:${quarkus.http.test-ssl-port:8444}")
.overrideConfigKey("quarkus.rest-client.rc.tls-configuration-name", "rest-client");
@RestClient
Client client;
@Test
void shouldHello() {
assertThat(client.echo("w0rld")).isEqualTo("hello, w0rld");
}
@Path("/hello")
@RegisterRestClient(configKey = "rc")
public | MtlsConfigFromRegistryCdiTest |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/IntOrLongMatcher.java | {
"start": 1762,
"end": 2450
} | class ____ extends IntOrLongMatcher<Long> {
private final long expected;
LongMatcher(long expected) {
this.expected = expected;
}
@Override
public boolean matches(Object o) {
return switch (o) {
case Integer i -> expected == i;
case Long l -> expected == l;
default -> false;
};
}
@Override
public void describeTo(Description description) {
equalTo(expected).describeTo(description);
}
}
public static Matcher<Object> isIntOrLong() {
return anyOf(isA(Integer.class), isA(Long.class));
}
}
| LongMatcher |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlocksRemoved.java | {
"start": 1119,
"end": 2059
} | class ____ extends BlockTransferMessage {
public final int numRemovedBlocks;
public BlocksRemoved(int numRemovedBlocks) {
this.numRemovedBlocks = numRemovedBlocks;
}
@Override
protected Type type() { return Type.BLOCKS_REMOVED; }
@Override
public int hashCode() {
return Objects.hashCode(numRemovedBlocks);
}
@Override
public String toString() {
return "BlocksRemoved[numRemovedBlocks=" + numRemovedBlocks + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof BlocksRemoved o) {
return numRemovedBlocks == o.numRemovedBlocks;
}
return false;
}
@Override
public int encodedLength() {
return 4;
}
@Override
public void encode(ByteBuf buf) {
buf.writeInt(numRemovedBlocks);
}
public static BlocksRemoved decode(ByteBuf buf) {
int numRemovedBlocks = buf.readInt();
return new BlocksRemoved(numRemovedBlocks);
}
}
| BlocksRemoved |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/StateInitializationContext.java | {
"start": 908,
"end": 1570
} | interface ____ a context in which operators can initialize by registering to managed
* state (i.e. state that is managed by state backends) or iterating over streams of state
* partitions written as raw state in a previous snapshot.
*
* <p>Similar to the managed state from {@link ManagedInitializationContext} and in general, raw
* operator state is available to all operators, while raw keyed state is only available for
* operators after keyBy.
*
* <p>For the purpose of initialization, the context signals if all state is empty (new operator) or
* if any state was restored from a previous execution of this operator.
*/
@PublicEvolving
public | provides |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/multipart/DefaultParts.java | {
"start": 3688,
"end": 4228
} | class ____ implements Part {
private final HttpHeaders headers;
protected AbstractPart(HttpHeaders headers) {
Assert.notNull(headers, "HttpHeaders is required");
this.headers = headers;
}
@Override
public String name() {
String name = headers().getContentDisposition().getName();
Assert.state(name != null, "No name available");
return name;
}
@Override
public HttpHeaders headers() {
return this.headers;
}
}
/**
* Default implementation of {@link FormFieldPart}.
*/
private static | AbstractPart |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java | {
"start": 18338,
"end": 24290
} | class ____ the fault for better retry logic
* @param exception outer exception
* @param innerCause inner cause (which is guaranteed to be some form
* of interrupted exception
* @param message message for the new exception.
* @return an IOE which can be rethrown
*/
private static InterruptedIOException translateInterruptedException(
SdkException exception,
final Exception innerCause,
String message) {
InterruptedIOException ioe;
if (innerCause instanceof SocketTimeoutException) {
ioe = new SocketTimeoutException(message);
} else {
String name = innerCause.getClass().getName();
if (name.endsWith(".ConnectTimeoutException")
|| name.endsWith(".ConnectionPoolTimeoutException")
|| name.endsWith("$ConnectTimeoutException")) {
// TODO: review in v2
// TCP connection http timeout from the shaded or unshaded filenames
// com.amazonaws.thirdparty.apache.http.conn.ConnectTimeoutException
ioe = new ConnectTimeoutException(message);
} else {
// any other exception
ioe = new InterruptedIOException(message);
}
}
ioe.initCause(exception);
return ioe;
}
/**
* Is the exception an instance of a throttling exception. That
* is an AmazonServiceException with a 503 response, an
* {@link AWSServiceThrottledException},
* or anything which the AWS SDK's RetryUtils considers to be
* a throttling exception.
* @param ex exception to examine
* @return true if it is considered a throttling exception
*/
public static boolean isThrottleException(Exception ex) {
return ex instanceof AWSServiceThrottledException
|| (ex instanceof AwsServiceException
&& 503 == ((AwsServiceException)ex).statusCode())
|| (ex instanceof SdkException
&& RetryUtils.isThrottlingException((SdkException) ex));
}
/**
* Cue that an AWS exception is likely to be an EOF Exception based
* on the message coming back from the client. This is likely to be
* brittle, so only a hint.
* @param ex exception
* @return true if this is believed to be a sign the connection was broken.
*/
public static boolean isMessageTranslatableToEOF(SdkException ex) {
// TODO: review in v2
return ex.toString().contains(EOF_MESSAGE_IN_XML_PARSER) ||
ex.toString().contains(EOF_READ_DIFFERENT_LENGTH);
}
/**
* Get low level details of an amazon exception for logging; multi-line.
* @param e exception
* @return string details
*/
public static String stringify(AwsServiceException e) {
StringBuilder builder = new StringBuilder(
String.format("%s error %d: %s; %s%s%n",
e.awsErrorDetails().serviceName(),
e.statusCode(),
e.awsErrorDetails().errorCode(),
e.awsErrorDetails().errorMessage(),
(e.retryable() ? " (retryable)": "")
));
String rawResponseContent = e.awsErrorDetails().rawResponse().asUtf8String();
if (rawResponseContent != null) {
builder.append(rawResponseContent);
}
return builder.toString();
}
/**
* Create a files status instance from a listing.
* @param keyPath path to entry
* @param s3Object s3Object entry
* @param blockSize block size to declare.
* @param owner owner of the file
* @param eTag S3 object eTag or null if unavailable
* @param versionId S3 object versionId or null if unavailable
* @param size s3 object size
* @return a status entry
*/
public static S3AFileStatus createFileStatus(Path keyPath,
S3Object s3Object,
long blockSize,
String owner,
String eTag,
String versionId,
long size) {
return createFileStatus(keyPath,
objectRepresentsDirectory(s3Object.key()),
size, Date.from(s3Object.lastModified()), blockSize, owner, eTag, versionId);
}
/**
* Create a file status for object we just uploaded. For files, we use
* current time as modification time, since s3a uses S3's service-based
* modification time, which will not be available until we do a
* getFileStatus() later on.
* @param keyPath path for created object
* @param isDir true iff directory
* @param size file length
* @param blockSize block size for file status
* @param owner Hadoop username
* @param eTag S3 object eTag or null if unavailable
* @param versionId S3 object versionId or null if unavailable
* @return a status entry
*/
public static S3AFileStatus createUploadFileStatus(Path keyPath,
boolean isDir, long size, long blockSize, String owner,
String eTag, String versionId) {
Date date = isDir ? null : new Date();
return createFileStatus(keyPath, isDir, size, date, blockSize, owner,
eTag, versionId);
}
/* Date 'modified' is ignored when isDir is true. */
private static S3AFileStatus createFileStatus(Path keyPath, boolean isDir,
long size, Date modified, long blockSize, String owner,
String eTag, String versionId) {
if (isDir) {
return new S3AFileStatus(Tristate.UNKNOWN, keyPath, owner);
} else {
return new S3AFileStatus(size, dateToLong(modified), keyPath, blockSize,
owner, eTag, versionId);
}
}
/**
* Predicate: does the object represent a directory?.
* @param name object name
* @return true if it meets the criteria for being an object
*/
public static boolean objectRepresentsDirectory(final String name) {
return !name.isEmpty()
&& name.charAt(name.length() - 1) == '/';
}
/**
* Date to long conversion.
* Handles null Dates that can be returned by AWS by returning 0
* @param date date from AWS query
* @return timestamp of the object
*/
public static long dateToLong(final Date date) {
if (date == null) {
return 0L;
}
return date.getTime();
}
/**
* Creates an instance of a | of |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/core/OpenSamlInitializationService.java | {
"start": 2922,
"end": 5063
} | class ____ {
private static final Log log = LogFactory.getLog(OpenSamlInitializationService.class);
private static final AtomicBoolean initialized = new AtomicBoolean(false);
private OpenSamlInitializationService() {
}
/**
* Ready OpenSAML for use and configure it with reasonable defaults.
*
* Initialization is guaranteed to happen only once per application. This method will
* passively return {@code false} if initialization already took place earlier in the
* application.
* @return whether or not initialization was performed. The first thread to initialize
* OpenSAML will return {@code true} while the rest will return {@code false}.
* @throws Saml2Exception if OpenSAML failed to initialize
*/
public static boolean initialize() {
return initialize((registry) -> {
});
}
/**
* Ready OpenSAML for use, configure it with reasonable defaults, and modify the
* {@link XMLObjectProviderRegistry} using the provided {@link Consumer}.
*
* Initialization is guaranteed to happen only once per application. This method will
* throw an exception if initialization already took place earlier in the application.
* @param registryConsumer the {@link Consumer} to further configure the
* {@link XMLObjectProviderRegistry}
* @throws Saml2Exception if initialization already happened previously or if OpenSAML
* failed to initialize
*/
public static void requireInitialize(Consumer<XMLObjectProviderRegistry> registryConsumer) {
if (!initialize(registryConsumer)) {
throw new Saml2Exception("OpenSAML was already initialized previously");
}
}
private static boolean initialize(Consumer<XMLObjectProviderRegistry> registryConsumer) {
if (initialized.compareAndSet(false, true)) {
log.trace("Initializing OpenSAML");
try {
InitializationService.initialize();
}
catch (Exception ex) {
throw new Saml2Exception(ex);
}
registryConsumer.accept(ConfigurationService.get(XMLObjectProviderRegistry.class));
log.debug("Initialized OpenSAML");
return true;
}
log.debug("Refused to re-initialize OpenSAML");
return false;
}
}
| OpenSamlInitializationService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/AnyDiscriminatorImplicitValuesAnnotation.java | {
"start": 554,
"end": 2192
} | class ____ implements AnyDiscriminatorImplicitValues {
private AnyDiscriminatorImplicitValues.Strategy value;
private Class<? extends ImplicitDiscriminatorStrategy> implementation;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public AnyDiscriminatorImplicitValuesAnnotation(ModelsContext modelContext) {
this.value = Strategy.CUSTOM;
this.implementation = ImplicitDiscriminatorStrategy.class;
}
/**
* Used in creating annotation instances from JDK variant
*/
public AnyDiscriminatorImplicitValuesAnnotation(AnyDiscriminatorImplicitValues annotation, ModelsContext modelContext) {
this.value = annotation.value();
this.implementation = annotation.implementation();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public AnyDiscriminatorImplicitValuesAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (Strategy) attributeValues.get( "value" );
//noinspection unchecked
this.implementation = (Class<? extends ImplicitDiscriminatorStrategy>) attributeValues.get( "implementation" );
}
@Override
public Class<? extends Annotation> annotationType() {
return AnyDiscriminatorImplicitValues.class;
}
@Override
public Strategy value() {
return value;
}
public void value(Strategy value) {
this.value = value;
}
@Override
public Class<? extends ImplicitDiscriminatorStrategy> implementation() {
return implementation;
}
public void implementation(Class<? extends ImplicitDiscriminatorStrategy> implementation) {
this.implementation = implementation;
}
}
| AnyDiscriminatorImplicitValuesAnnotation |
java | grpc__grpc-java | android-interop-testing/src/generated/debug/grpc/io/grpc/testing/integration/LoadBalancerStatsServiceGrpc.java | {
"start": 9084,
"end": 9553
} | class ____
implements io.grpc.BindableService, AsyncService {
@java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
return LoadBalancerStatsServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service LoadBalancerStatsService.
* <pre>
* A service used to obtain stats for verifying LB behavior.
* </pre>
*/
public static final | LoadBalancerStatsServiceImplBase |
java | elastic__elasticsearch | libs/geo/src/main/java/org/elasticsearch/geometry/simplify/GeometrySimplifier.java | {
"start": 8271,
"end": 10250
} | class ____ extends GeometrySimplifier<Polygon> {
public PolygonSimplifier(int maxPoints, SimplificationErrorCalculator calculator) {
this(maxPoints, calculator, null);
}
public PolygonSimplifier(int maxPoints, SimplificationErrorCalculator calculator, StreamingGeometrySimplifier.Monitor monitor) {
super(
"Polygon",
maxPoints,
calculator,
monitor,
new StreamingGeometrySimplifier.PolygonSimplifier(maxPoints, calculator, monitor)
);
}
@Override
public Polygon simplify(Polygon geometry) {
reset();
notifyMonitorSimplificationStart();
try {
LinearRing ring = geometry.getPolygon();
if (ring.length() <= maxPoints) {
return geometry;
}
for (int i = 0; i < ring.length(); i++) {
innerSimplifier.consume(ring.getX(i), ring.getY(i));
}
ArrayList<LinearRing> holes = new ArrayList<>(geometry.getNumberOfHoles());
for (int i = 0; i < geometry.getNumberOfHoles(); i++) {
LinearRing hole = geometry.getHole(i);
double simplificationFactor = (double) maxPoints / ring.length();
int maxHolePoints = Math.max(4, (int) (simplificationFactor * hole.length()));
var holeSimplifier = new GeometrySimplifier.LinearRingSimplifier(maxHolePoints, calculator, this.monitor);
holeSimplifier.description = "Polygon.Hole";
holes.add(holeSimplifier.simplify(hole));
}
return new Polygon(StreamingGeometrySimplifier.produceLinearRing(innerSimplifier), holes);
} finally {
notifyMonitorSimplificationEnd();
}
}
}
/**
* This | PolygonSimplifier |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java | {
"start": 3781,
"end": 21129
} | interface ____ support netgroups)");
return;
}
LOG.info("Testing netgroups using: " + groupMappingClassName);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING,
groupMappingClassName);
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Groups groups = Groups.getUserToGroupsMappingService(conf);
// try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, groups.getGroups(PROXY_USER_NAME).toArray(
new String[groups.getGroups(PROXY_USER_NAME).size()]));
assertAuthorized(proxyUserUgi, PROXY_IP);
}
@Test
public void testProxyUsers() throws Exception {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a group that's not allowed
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testProxyUsersWithUserConf() throws Exception {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserUserConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a user that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a user that's not allowed
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testWildcardGroup() {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
"*");
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a different group (just to make sure we aren't getting spill over
// from the other test case!)
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testWildcardUser() {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserUserConfKey(REAL_USER_NAME),
"*");
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a user that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a different user (just to make sure we aren't getting spill over
// from the other test case!)
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testWildcardIP() {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
"*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From either IP should be fine
assertAuthorized(proxyUserUgi, "1.2.3.4");
assertAuthorized(proxyUserUgi, "1.2.3.5");
// Now set up an unallowed group
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
// Neither IP should be OK
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testIPRange() {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
"*");
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP_RANGE);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "10.222.0.0");
// From bad IP
assertNotAuthorized(proxyUserUgi, "10.221.0.0");
}
@Test
public void testNullUser() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
Configuration conf = new Configuration();
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME), "*");
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP_RANGE);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// user is null
ProxyUsers.authorize(null, "10.222.0.0");
});
}
@Test
public void testNullIpAddress() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
Configuration conf = new Configuration();
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME), "*");
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP_RANGE);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// remote address is null
ProxyUsers.authorize(proxyUserUgi, (InetAddress) null);
});
}
@Test
public void testWithDuplicateProxyGroups() throws Exception {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> groupsToBeProxied =
ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME));
assertEquals (1,groupsToBeProxied.size());
}
@Test
public void testWithDuplicateProxyHosts() throws Exception {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider()
.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> hosts =
ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME));
assertEquals (1,hosts.size());
}
@Test
public void testProxyUsersWithProviderOverride() throws Exception {
Configuration conf = new Configuration();
conf.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
"org.apache.hadoop.security.authorize.TestProxyUsers$TestDummyImpersonationProvider");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createUserForTesting(REAL_USER_NAME, SUDO_GROUP_NAMES);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a group that's not allowed
realUserUgi = UserGroupInformation
.createUserForTesting(REAL_USER_NAME, GROUP_NAMES);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testWithProxyGroupsAndUsersWithSpaces() throws Exception {
Configuration conf = new Configuration();
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserUserConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(PROXY_USER_NAME + " ",AUTHORIZED_PROXY_USER_NAME, "ONEMORE")));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> groupsToBeProxied =
ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(REAL_USER_NAME));
assertEquals (GROUP_NAMES.length, groupsToBeProxied.size());
}
@Test
public void testProxyUsersWithNullPrefix() throws Exception {
assertThrows(IllegalArgumentException.class,
()-> ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false), null));
}
@Test
public void testProxyUsersWithEmptyPrefix() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false), "");
});
}
@Test
public void testProxyUsersWithCustomPrefix() throws Exception {
Configuration conf = new Configuration(false);
conf.set("x." + REAL_USER_NAME + ".users",
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
conf.set("x." + REAL_USER_NAME+ ".hosts", PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf, "x");
// First try proxying a user that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a user that's not allowed
realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
@Test
public void testNoHostsForUsers() throws Exception {
Configuration conf = new Configuration(false);
conf.set("y." + REAL_USER_NAME + ".users",
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
ProxyUsers.refreshSuperUserGroupsConfiguration(conf, "y");
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// IP doesn't matter
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
}
private static InetAddress toFakeAddress(String ip) {
try {
InetAddress addr = InetAddress.getByName(ip);
return InetAddress.getByAddress(ip.replace('.', '-'),
addr.getAddress());
} catch (UnknownHostException e) {
throw new IllegalArgumentException(e);
}
}
private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
try {
// test both APIs.
ProxyUsers.authorize(proxyUgi, host);
ProxyUsers.authorize(proxyUgi, toFakeAddress(host));
fail("Allowed authorization of " + proxyUgi + " from " + host);
} catch (AuthorizationException e) {
// Expected
}
}
private void assertAuthorized(UserGroupInformation proxyUgi, String host) {
try {
// test both APIs.
ProxyUsers.authorize(proxyUgi, host);
ProxyUsers.authorize(proxyUgi, toFakeAddress(host));
} catch (AuthorizationException e) {
fail("Did not allow authorization of " + proxyUgi + " from " + host);
}
}
static | and |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/xml/XmlReaderRequest.java | {
"start": 4457,
"end": 6964
} | class ____ implements XmlReaderRequest {
final Path path;
final Path rootDirectory;
final URL url;
final InputStream inputStream;
final Reader reader;
final Transformer transformer;
final boolean strict;
final String modelId;
final String location;
final boolean addDefaultEntities;
@SuppressWarnings("checkstyle:ParameterNumber")
DefaultXmlReaderRequest(
Path path,
Path rootDirectory,
URL url,
InputStream inputStream,
Reader reader,
Transformer transformer,
boolean strict,
String modelId,
String location,
boolean addDefaultEntities) {
this.path = path;
this.rootDirectory = rootDirectory;
this.url = url;
this.inputStream = inputStream;
this.reader = reader;
this.transformer = transformer;
this.strict = strict;
this.modelId = modelId;
this.location = location;
this.addDefaultEntities = addDefaultEntities;
}
@Override
public Path getPath() {
return path;
}
@Override
public Path getRootDirectory() {
return rootDirectory;
}
@Override
public URL getURL() {
return url;
}
@Override
public InputStream getInputStream() {
return inputStream;
}
@Override
public Reader getReader() {
return reader;
}
@Override
public Transformer getTransformer() {
return transformer;
}
@Override
public boolean isStrict() {
return strict;
}
@Override
public String getModelId() {
return modelId;
}
@Override
public String getLocation() {
return location;
}
@Override
public boolean isAddDefaultEntities() {
return addDefaultEntities;
}
}
}
}
| DefaultXmlReaderRequest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/metrics/groups/MetricGroupTest.java | {
"start": 18106,
"end": 19032
} | class ____
extends AbstractMetricGroup<DummyAbstractMetricGroup> {
final String groupName;
public DummyAbstractMetricGroup(MetricRegistry registry) {
this(registry, "foo");
}
public DummyAbstractMetricGroup(MetricRegistry registry, String groupName) {
super(registry, new String[0], null);
this.groupName = groupName;
}
@Override
protected QueryScopeInfo createQueryServiceMetricInfo(CharacterFilter filter) {
return null;
}
@Override
protected String getGroupName(CharacterFilter filter) {
return groupName;
}
@Override
protected void addMetric(String name, Metric metric) {}
@Override
public MetricGroup addGroup(String name) {
return new DummyAbstractMetricGroup(registry);
}
}
}
| DummyAbstractMetricGroup |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/BiConsumer.java | {
"start": 1077,
"end": 1354
} | interface ____<K, V> extends java.util.function.BiConsumer<K, V> {
/**
* Performs the operation given the specified arguments.
* @param k the first input argument
* @param v the second input argument
*/
@Override
void accept(K k, V v);
}
| BiConsumer |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/security/permission/AbstractHttpSecurityPolicyGrantingPermissionsTest.java | {
"start": 7926,
"end": 9378
} | class ____ {
@Inject
CDIBean cdiBean;
public void setup(@Observes Router router) {
router.route("/test/create").blockingHandler(new RouteHandler(() -> {
cdiBean.createTestBlocking();
return Uni.createFrom().nullItem();
}));
router.route("/test/create2").handler(new RouteHandler(cdiBean::createTest));
router.route("/test/delete").handler(new RouteHandler(cdiBean::deleteTest));
router.route("/test/update").handler(new RouteHandler(cdiBean::updateTest));
router.route("/test/update2").handler(new RouteHandler(cdiBean::update2Test));
router.route("/test/list").handler(new RouteHandler(cdiBean::listTest));
router.route("/test/custom").handler(new RouteHandler(cdiBean::customTest));
router.route("/test/custom-action").handler(new RouteHandler(cdiBean::customActionsTest));
router.route("/test/authenticated").handler(new RouteHandler(cdiBean::authenticatedTest));
router.route("/test/authenticated-admin").handler(new RouteHandler(cdiBean::authenticatedAdminTest));
router.route("/test/authenticated-user").handler(new RouteHandler(cdiBean::authenticatedUserTest));
router.route("/test/authenticated-test-role").handler(new RouteHandler(cdiBean::authenticatedTestRoleTest));
}
}
private static final | PermissionsPathHandler |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsAlterTableChangeColumnTest4.java | {
"start": 909,
"end": 2328
} | class ____ extends TestCase {
public void test_select() throws Exception {
String sql = "ALTER TABLE table_name CHANGE COLUMN old_col_name new_col_name column_type COMMENT '';";
assertEquals("ALTER TABLE table_name\n" +
"\tCHANGE COLUMN old_col_name new_col_name COLUMN_TYPE COMMENT '';", SQLUtils.formatOdps(sql));
assertEquals("alter table table_name\n" +
"\tchange column old_col_name new_col_name column_type comment '';", SQLUtils.formatOdps(sql, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.ODPS);
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.ODPS);
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getColumns().contains(new Column("abc", "name")));
}
}
| OdpsAlterTableChangeColumnTest4 |
java | playframework__playframework | documentation/manual/working/javaGuide/main/async/code/javaguide/async/websocket/HomeController.java | {
"start": 406,
"end": 875
} | class ____ extends Controller {
private final ActorSystem actorSystem;
private final Materializer materializer;
@Inject
public HomeController(ActorSystem actorSystem, Materializer materializer) {
this.actorSystem = actorSystem;
this.materializer = materializer;
}
public WebSocket socket() {
return WebSocket.Text.accept(
request -> ActorFlow.actorRef(MyWebSocketActor::props, actorSystem, materializer));
}
}
// #content
| HomeController |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/internal/aether/MavenTransformer.java | {
"start": 1349,
"end": 2025
} | class ____ implements ArtifactTransformer {
private final TransformerManager transformerManager;
@Inject
MavenTransformer(TransformerManager transformerManager) {
this.transformerManager = requireNonNull(transformerManager);
}
@Override
public InstallRequest transformInstallArtifacts(RepositorySystemSession session, InstallRequest request) {
return transformerManager.remapInstallArtifacts(session, request);
}
@Override
public DeployRequest transformDeployArtifacts(RepositorySystemSession session, DeployRequest request) {
return transformerManager.remapDeployArtifacts(session, request);
}
}
| MavenTransformer |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractFutureAssert.java | {
"start": 1026,
"end": 19735
} | class ____<SELF extends AbstractFutureAssert<SELF, ACTUAL, RESULT>, ACTUAL extends Future<RESULT>, RESULT>
extends AbstractAssertWithComparator<SELF, ACTUAL> {
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Futures futures = Futures.instance();
protected AbstractFutureAssert(ACTUAL actual, Class<?> selfType) {
super(actual, selfType);
}
/**
* Verifies that the {@link Future} is cancelled.
* <p>
* Example:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* return "done";
* }
* });
*
* // assertion will fail:
* assertThat(future).isCancelled();
*
* // assertion will pass:
* future.cancel(true);
* assertThat(future).isCancelled();</code></pre>
*
* @return this assertion object.
* @see Future#isCancelled()
* @since 2.7.0 / 3.7.0
*/
public SELF isCancelled() {
futures.assertIsCancelled(info, actual);
return myself;
}
/**
* Verifies that the {@link Future} is not cancelled.
* <p>
* Example:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* return "done";
* }
* });
*
* // assertion will pass:
* assertThat(future).isNotCancelled();
*
* // assertion will fail:
* future.cancel(true);
* assertThat(future).isNotCancelled();</code></pre>
*
* @return this assertion object.
* @see Future#isCancelled()
* @since 2.7.0 / 3.7.0
*/
public SELF isNotCancelled() {
futures.assertIsNotCancelled(info, actual);
return myself;
}
/**
* Verifies that the {@link Future} is done.
* <p>
* Example:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* return "done";
* }
* });
*
* // assertion will pass:
* assertThat(future).isDone();
*
* future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* Thread.sleep(1000);
* return "done";
* }
* });
*
* // assertion will fail:
* assertThat(future).isDone();</code></pre>
*
* @return this assertion object.
* @see Future#isDone()
* @since 2.7.0 / 3.7.0
*/
public SELF isDone() {
futures.assertIsDone(info, actual);
return myself;
}
/**
* Verifies that the {@link Future} is not done.
* <p>
* Example:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* Thread.sleep(1000);
* return "done";
* }
* });
*
* // assertion will pass:
* assertThat(future).isNotDone();
*
* future = executorService.submit(new Callable<String>() {
* {@literal @}Override
* public String call() throws Exception {
* return "done";
* }
* });
*
* // assertion will fail:
* assertThat(future).isNotDone();</code></pre>
*
* @return this assertion object.
* @see Future#isDone()
* @since 2.7.0 / 3.7.0
*/
public SELF isNotDone() {
futures.assertIsNotDone(info, actual);
return myself;
}
/**
* Waits if necessary for at most the given time for this future to complete and then returns its result for further assertions.
* <p>
* If the future's result is not available for any reason an assertion error is thrown.
* <p>
* <b>WARNING</b>
* <p>
* {@code succeedsWithin} does not fully integrate with soft assertions, if it fails the test will fail immediately (the error
* is not collected as a soft assertion error), if it succeeds the chained assertions are executed and any error will be
* collected as a soft assertion error.<br>
* The rationale is that if we collected {@code succeedsWithin} error as a soft assertion error, the chained assertions would be
* executed against a future value that is actually not available.
* <p>
* To get assertions for the future result's type use {@link #succeedsWithin(Duration, InstanceOfAssertFactory)} instead.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* Duration timeout = Duration.ofMillis(200);
*
* // assertion succeeds
* assertThat(future).succeedsWithin(timeout)
* .isEqualTo("ook!");
*
* // fails as the future is not done after the given timeout
* assertThat(future).succeedsWithin(Duration.ofMillis(50));
*
* // fails as the future is cancelled
* Future<String> future = ... ;
* future.cancel(false);
* assertThat(future).succeedsWithin(timeout);</code></pre>
*
* @param timeout the maximum time to wait
* @return a new assertion object on the future's result.
* @throws AssertionError if the actual {@code CompletableFuture} is {@code null}.
* @throws AssertionError if the actual {@code CompletableFuture} does not succeed within the given timeout.
* @since 3.17.0
*/
public ObjectAssert<RESULT> succeedsWithin(Duration timeout) {
return internalSucceedsWithin(timeout);
}
/**
* Waits if necessary for at most the given time for this future to complete and then returns its result for further assertions.
* <p>
* If the future's result is not available for any reason an assertion error is thrown.
* <p>
* <b>WARNING</b>
* <p>
* {@code succeedsWithin} does not fully integrate with soft assertions, if it fails the test will fail immediately (the error
* is not collected as a soft assertion error), if it succeeds the chained assertions are executed and any error will be
* collected as a soft assertion error.<br>
* The rationale is that if we collected {@code succeedsWithin} error as a soft assertion error, the chained assertions would be
* executed against a future value that is actually not available.
* <p>
* To get assertions for the future result's type use {@link #succeedsWithin(long, TimeUnit, InstanceOfAssertFactory)} instead.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* // assertion succeeds
* assertThat(future).succeedsWithin(200, TimeUnit.MILLISECONDS)
* .isEqualTo("ook!");
*
* // fails as the future is not done after the given timeout
* assertThat(future).succeedsWithin(50, TimeUnit.MILLISECONDS);
*
* // fails as the future is cancelled
* Future<String> future = ... ;
* future.cancel(false);
* assertThat(future).succeedsWithin(200, TimeUnit.MILLISECONDS);</code></pre>
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @return a new assertion object on the future's result.
* @throws AssertionError if the actual {@code Future} is {@code null}.
* @throws AssertionError if the actual {@code Future} does not succeed within the given timeout.
* @since 3.17.0
*/
public ObjectAssert<RESULT> succeedsWithin(long timeout, TimeUnit unit) {
return internalSucceedsWithin(timeout, unit);
}
/**
* Waits if necessary for at most the given time for this future to complete, the {@link InstanceOfAssertFactory}
* parameter is used to return assertions specific to the future's result type.
* <p>
* If the future's result is not available for any reason an assertion error is thrown.
* <p>
* <b>WARNING</b>
* <p>
* {@code succeedsWithin} does not fully integrate with soft assertions, if it fails the test will fail immediately (the error
* is not collected as a soft assertion error), if it succeeds the chained assertions are executed and any error will be
* collected as a soft assertion error.<br>
* The rationale is that if we collected {@code succeedsWithin} error as a soft assertion error, the chained assertions would be
* executed against a future value that is actually not available.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* Duration timeout = Duration.ofMillis(200);
*
* // assertion succeeds, contains(String...) assertion can be called because InstanceOfAssertFactories.STRING
* // indicates AssertJ to allow String assertions after succeedsWithin.
* assertThat(future).succeedsWithin(timeout, InstanceOfAssertFactories.STRING)
* .contains("ok");
*
* // fails as the future is not done after the given timeout
* // as() is syntactic sugar for better readability.
* assertThat(future).succeedsWithin(Duration.ofMillis(50), as(STRING));
*
* // assertion fails if the narrowed type for assertions is incompatible with the future's result type.
* assertThat(future).succeedsWithin(timeout, InstanceOfAssertFactories.DATE)
* .isToday();</code></pre>
*
* @param <ASSERT> the type of the resulting {@code Assert}
* @param timeout the maximum time to wait
* @param assertFactory the factory which verifies the type and creates the new {@code Assert}
* @return a new narrowed {@link Assert} instance for assertions chaining on the value of the {@link Future}
* @throws AssertionError if the actual {@code Future} is {@code null}.
* @throws IllegalStateException if the actual {@code Future} does not succeed within the given timeout.
* @since 3.17.0
*/
public <ASSERT extends AbstractAssert<?, ?>> ASSERT succeedsWithin(Duration timeout,
InstanceOfAssertFactory<RESULT, ASSERT> assertFactory) {
// we don't call succeedsWithin(Duration) to avoid double proxying soft assertions.
return internalSucceedsWithin(timeout).asInstanceOf(assertFactory);
}
/**
* Waits if necessary for at most the given time for this future to complete, the {@link InstanceOfAssertFactory}
* parameter is used to return assertions specific to the future's result type.
* <p>
* If the future's result is not available for any reason an assertion error is thrown.
* <p>
* <b>WARNING</b>
* <p>
* {@code succeedsWithin} does not fully integrate with soft assertions, if it fails the test will fail immediately (the error
* is not collected as a soft assertion error), if it succeeds the chained assertions are executed and any error will be
* collected as a soft assertion error.<br>
* The rationale is that if we collected {@code succeedsWithin} error as a soft assertion error, the chained assertions would be
* executed against a future value that is actually not available.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* // assertion succeeds, contains(String...) assertion can be called because InstanceOfAssertFactories.STRING
* // indicates AssertJ to allow String assertions after succeedsWithin.
* assertThat(future).succeedsWithin(200, TimeUnit.MILLISECONDS, InstanceOfAssertFactories.STRING)
* .contains("ok");
*
* // fails as the future is not done after the given timeout
* // as() is syntactic sugar for better readability.
* assertThat(future).succeedsWithin(50, TimeUnit.MILLISECONDS, as(STRING));
*
* // assertion fails if the narrowed type for assertions is incompatible with the future's result type.
* assertThat(future).succeedsWithin(200, TimeUnit.MILLISECONDS, InstanceOfAssertFactories.DATE)
* .isToday();</code></pre>
*
* @param <ASSERT> the type of the resulting {@code Assert}
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @param assertFactory the factory which verifies the type and creates the new {@code Assert}
* @return a new narrowed {@link Assert} instance for assertions chaining on the value of the {@link Future}
* @throws AssertionError if the actual {@code Future} is {@code null}.
* @throws AssertionError if the actual {@code Future} does not succeed within the given timeout.
* @since 3.17.0
*/
public <ASSERT extends AbstractAssert<?, ?>> ASSERT succeedsWithin(long timeout, TimeUnit unit,
InstanceOfAssertFactory<RESULT, ASSERT> assertFactory) {
// we don't call succeedsWithin(Duration) to avoid double proxying soft assertions.
return internalSucceedsWithin(timeout, unit).asInstanceOf(assertFactory);
}
/**
* Checks that the future does not complete within the given time and returns the exception that caused the failure for
* further (exception) assertions, the exception can be any of {@link InterruptedException}, {@link ExecutionException},
* {@link TimeoutException} or {@link CancellationException} as per {@link Future#get(long, TimeUnit)}.
* <p>
* <b>WARNING</b>
* <p>
* {@code failsWithin} does not fully integrate with soft assertions, if the future completes the test will fail immediately (the
* error is not collected as a soft assertion error), if the assertion succeeds the chained assertions are executed and any
* errors will be collected as a soft assertion errors.<br>
* The rationale is that if we collect {@code failsWithin} error as a soft assertion error, the chained assertions would be
* executed but that does not make sense since there is no exception to check as the future has completed.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* // assertion succeeds as the future is not completed after 50ms
* assertThat(future).failsWithin(Duration.ofMillis(50))
* .withThrowableOfType(TimeoutException.class)
* .withMessage(null);
*
* // fails as the future is completed after within 200ms
* assertThat(future).failsWithin(Duration.ofMillis(200));</code></pre>
*
* @param timeout the maximum time to wait
* @return a new assertion instance on the future's exception.
* @throws AssertionError if the actual {@code CompletableFuture} is {@code null}.
* @throws AssertionError if the actual {@code CompletableFuture} succeeds within the given timeout.
* @since 3.18.0
*/
public WithThrowable failsWithin(Duration timeout) {
return internalFailsWithin(timeout);
}
/**
* Checks that the future does not complete within the given time and returns the exception that caused the failure for
* further (exception) assertions, the exception can be any of {@link InterruptedException}, {@link ExecutionException},
* {@link TimeoutException} or {@link CancellationException} as per {@link Future#get(long, TimeUnit)}.
* <p>
* <b>WARNING</b>
* <p>
* {@code failsWithin} does not fully integrate with soft assertions, if the future completes the test will fail immediately (the
* error is not collected as a soft assertion error), if the assertion succeeds the chained assertions are executed and any
* errors will be collected as a soft assertion errors.<br>
* The rationale is that if we collect {@code failsWithin} error as a soft assertion error, the chained assertions would be
* executed but that does not make sense since there is no exception to check as the future has completed.
* <p>
* Examples:
* <pre><code class='java'> ExecutorService executorService = Executors.newSingleThreadExecutor();
*
* Future<String> future = executorService.submit(() -> {
* Thread.sleep(100);
* return "ook!";
* });
*
* // assertion succeeds as the future is not completed after 50ms
* assertThat(future).failsWithin(50, TimeUnit.MILLISECONDS)
* .withThrowableOfType(TimeoutException.class)
* .withMessage(null);
*
* // fails as the future is completed after the given timeout duration
* assertThat(future).failsWithin(200, TimeUnit.MILLISECONDS);</code></pre>
*
* @param timeout the maximum time to wait
* @param unit the time unit
* @return a new assertion instance on the future's exception.
* @throws AssertionError if the actual {@code CompletableFuture} is {@code null}.
* @throws AssertionError if the actual {@code CompletableFuture} succeeds within the given timeout.
* @since 3.18.0
*/
public WithThrowable failsWithin(long timeout, TimeUnit unit) {
return internalFailsWithin(timeout, unit);
}
private WithThrowable internalFailsWithin(Duration timeout) {
Exception exception = futures.assertFailedWithin(info, actual, timeout);
return new WithThrowable(exception);
}
private WithThrowable internalFailsWithin(long timeout, TimeUnit unit) {
Exception exception = futures.assertFailedWithin(info, actual, timeout, unit);
return new WithThrowable(exception);
}
private ObjectAssert<RESULT> internalSucceedsWithin(Duration timeout) {
RESULT result = futures.assertSucceededWithin(info, actual, timeout);
return assertThat(result);
}
private ObjectAssert<RESULT> internalSucceedsWithin(long timeout, TimeUnit unit) {
RESULT result = futures.assertSucceededWithin(info, actual, timeout, unit);
return assertThat(result);
}
}
| AbstractFutureAssert |
java | apache__camel | components/camel-aws/camel-aws2-ecs/src/main/java/org/apache/camel/component/aws2/ecs/ECS2Producer.java | {
"start": 2255,
"end": 12141
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(ECS2Producer.class);
private transient String ecsProducerToString;
private HealthCheck producerHealthCheck;
private WritableHealthCheckRepository healthCheckRepository;
public ECS2Producer(Endpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
switch (determineOperation(exchange)) {
case listClusters:
listClusters(getEndpoint().getEcsClient(), exchange);
break;
case describeCluster:
describeCluster(getEndpoint().getEcsClient(), exchange);
break;
case createCluster:
createCluster(getEndpoint().getEcsClient(), exchange);
break;
case deleteCluster:
deleteCluster(getEndpoint().getEcsClient(), exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
private ECS2Operations determineOperation(Exchange exchange) {
ECS2Operations operation = exchange.getIn().getHeader(ECS2Constants.OPERATION, ECS2Operations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
protected ECS2Configuration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public String toString() {
if (ecsProducerToString == null) {
ecsProducerToString = "ECSProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return ecsProducerToString;
}
@Override
public ECS2Endpoint getEndpoint() {
return (ECS2Endpoint) super.getEndpoint();
}
private void listClusters(EcsClient ecsClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof ListClustersRequest) {
ListClustersResponse result;
try {
ListClustersRequest request = (ListClustersRequest) payload;
result = ecsClient.listClusters(request);
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
Builder builder = ListClustersRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS))) {
int maxRes = exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS, Integer.class);
builder.maxResults(maxRes);
}
ListClustersResponse result;
try {
ListClustersRequest request = builder.build();
result = ecsClient.listClusters(request);
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void createCluster(EcsClient ecsClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof CreateClusterRequest) {
CreateClusterResponse result;
try {
CreateClusterRequest request = (CreateClusterRequest) payload;
result = ecsClient.createCluster(request);
} catch (AwsServiceException ase) {
LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
CreateClusterRequest.Builder builder = CreateClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) {
String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class);
builder.clusterName(name);
}
CreateClusterResponse result;
try {
CreateClusterRequest request = builder.build();
result = ecsClient.createCluster(request);
} catch (AwsServiceException ase) {
LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void describeCluster(EcsClient ecsClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof DescribeClustersRequest) {
DescribeClustersResponse result;
try {
DescribeClustersRequest request = (DescribeClustersRequest) payload;
result = ecsClient.describeClusters(request);
} catch (AwsServiceException ase) {
LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
DescribeClustersRequest.Builder builder = DescribeClustersRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) {
String clusterName = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class);
builder.clusters(clusterName);
}
DescribeClustersResponse result;
try {
DescribeClustersRequest request = builder.build();
result = ecsClient.describeClusters(request);
} catch (AwsServiceException ase) {
LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void deleteCluster(EcsClient ecsClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof DeleteClusterRequest) {
DeleteClusterResponse result;
try {
DeleteClusterRequest request = (DeleteClusterRequest) payload;
result = ecsClient.deleteCluster(request);
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) {
String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class);
builder.cluster(name);
} else {
throw new IllegalArgumentException("Cluster name must be specified");
}
DeleteClusterResponse result;
try {
DeleteClusterRequest request = builder.build();
result = ecsClient.deleteCluster(request);
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
public static Message getMessageForResponse(final Exchange exchange) {
return exchange.getMessage();
}
@Override
protected void doStart() throws Exception {
// health-check is optional so discover and resolve
healthCheckRepository = HealthCheckHelper.getHealthCheckRepository(
getEndpoint().getCamelContext(),
"producers",
WritableHealthCheckRepository.class);
if (healthCheckRepository != null) {
String id = getEndpoint().getId();
producerHealthCheck = new ECS2ProducerHealthCheck(getEndpoint(), id);
producerHealthCheck.setEnabled(getEndpoint().getComponent().isHealthCheckProducerEnabled());
healthCheckRepository.addHealthCheck(producerHealthCheck);
}
}
@Override
protected void doStop() throws Exception {
if (healthCheckRepository != null && producerHealthCheck != null) {
healthCheckRepository.removeHealthCheck(producerHealthCheck);
producerHealthCheck = null;
}
}
}
| ECS2Producer |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2393/Issue2393Test.java | {
"start": 547,
"end": 1003
} | class ____ {
@ProcessorTest
public void shouldUseCorrectImport() {
AddressDto dto = AddressDto.Converter.INSTANCE.convert( new Address(
"Zurich",
new Country( "Switzerland" )
) );
assertThat( dto.getCity() ).isEqualTo( "Zurich" );
assertThat( dto.getCountry().getName() ).isEqualTo( "Switzerland" );
assertThat( dto.getCountry().getCode() ).isEqualTo( "UNKNOWN" );
}
}
| Issue2393Test |
java | spring-projects__spring-framework | spring-oxm/src/main/java/org/springframework/oxm/xstream/CatchAllConverter.java | {
"start": 1113,
"end": 2058
} | class ____ to
* {@linkplain com.thoughtworks.xstream.XStream#registerConverter(com.thoughtworks.xstream.converters.Converter, int) register}
* this converter as a catch-all last converter with a
* {@linkplain com.thoughtworks.xstream.XStream#PRIORITY_NORMAL normal}
* or higher priority, in addition to converters that explicitly handle the domain
* classes that should be supported. As a result, default XStream converters with
* lower priorities and possible security vulnerabilities do not get invoked.
*
* <p>For instance:
* <pre class="code">
* XStreamMarshaller unmarshaller = new XStreamMarshaller();
* unmarshaller.getXStream().registerConverter(new MyDomainClassConverter(), XStream.PRIORITY_VERY_HIGH);
* unmarshaller.getXStream().registerConverter(new CatchAllConverter(), XStream.PRIORITY_NORMAL);
* MyDomainClass myObject = unmarshaller.unmarshal(source);
* </pre>
*
* @author Arjen Poutsma
* @since 3.2.5
*/
public | is |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/component/sql/stored/ProducerClasspathTest.java | {
"start": 905,
"end": 1406
} | class ____ extends ProducerTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// required for the sql component
getContext().getComponent("sql-stored", SqlStoredComponent.class).setDataSource(db);
from("direct:query").to("sql-stored:classpath:sql/selectStored.sql").to("mock:query");
}
};
}
}
| ProducerClasspathTest |
java | apache__rocketmq | broker/src/test/java/org/apache/rocketmq/broker/processor/PopInflightMessageCounterTest.java | {
"start": 1110,
"end": 4294
} | class ____ {
@Test
public void testNum() {
BrokerController brokerController = mock(BrokerController.class);
long brokerStartTime = System.currentTimeMillis();
when(brokerController.getShouldStartTime()).thenReturn(brokerStartTime);
PopInflightMessageCounter counter = new PopInflightMessageCounter(brokerController);
final String topic = "topic";
final String group = "group";
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.incrementInFlightMessageNum(topic, group, 0, 3);
assertEquals(3, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.decrementInFlightMessageNum(topic, group, System.currentTimeMillis(), 0, 1);
assertEquals(2, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.decrementInFlightMessageNum(topic, group, System.currentTimeMillis() - 1000, 0, 1);
assertEquals(2, counter.getGroupPopInFlightMessageNum(topic, group, 0));
PopCheckPoint popCheckPoint = new PopCheckPoint();
popCheckPoint.setTopic(topic);
popCheckPoint.setCId(group);
popCheckPoint.setQueueId(0);
popCheckPoint.setPopTime(System.currentTimeMillis());
counter.decrementInFlightMessageNum(popCheckPoint);
assertEquals(1, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.decrementInFlightMessageNum(topic, group, System.currentTimeMillis(), 0 ,1);
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.decrementInFlightMessageNum(topic, group, System.currentTimeMillis(), 0, 1);
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
}
@Test
public void testClearInFlightMessageNum() {
BrokerController brokerController = mock(BrokerController.class);
long brokerStartTime = System.currentTimeMillis();
when(brokerController.getShouldStartTime()).thenReturn(brokerStartTime);
PopInflightMessageCounter counter = new PopInflightMessageCounter(brokerController);
final String topic = "topic";
final String group = "group";
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.incrementInFlightMessageNum(topic, group, 0, 3);
assertEquals(3, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.clearInFlightMessageNumByTopicName("errorTopic");
assertEquals(3, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.clearInFlightMessageNumByTopicName(topic);
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.incrementInFlightMessageNum(topic, group, 0, 3);
assertEquals(3, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.clearInFlightMessageNumByGroupName("errorGroup");
assertEquals(3, counter.getGroupPopInFlightMessageNum(topic, group, 0));
counter.clearInFlightMessageNumByGroupName(group);
assertEquals(0, counter.getGroupPopInFlightMessageNum(topic, group, 0));
}
} | PopInflightMessageCounterTest |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/CoStreamCheckpointingITCase.java | {
"start": 9427,
"end": 10502
} | class ____ extends RichMapFunction<PrefixCount, PrefixCount>
implements ListCheckpointed<Long> {
static long[] counts = new long[PARALLELISM];
private long count;
@Override
public PrefixCount map(PrefixCount value) throws Exception {
count++;
return value;
}
@Override
public void close() throws IOException {
counts[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = count;
}
@Override
public List<Long> snapshotState(long checkpointId, long timestamp) throws Exception {
return Collections.singletonList(this.count);
}
@Override
public void restoreState(List<Long> state) throws Exception {
if (state.isEmpty() || state.size() > 1) {
throw new RuntimeException(
"Test failed due to unexpected recovered state size " + state.size());
}
this.count = state.get(0);
}
}
private static | StatefulCounterFunction |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FilerConsumerMoveFailedDoneFileNameTest.java | {
"start": 1129,
"end": 2367
} | class ____ extends ContextTestSupport {
private static final String TEST_FILE_NAME = "hello" + UUID.randomUUID() + ".txt";
@Test
public void testDoneFile() throws Exception {
getMockEndpoint("mock:input").expectedMessageCount(1);
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, TEST_FILE_NAME);
template.sendBodyAndHeader(fileUri(), "", Exchange.FILE_NAME, "done");
// wait a bit for the file processing to complete
assertMockEndpointsSatisfied(1, TimeUnit.SECONDS);
oneExchangeDone.matchesWaitTime();
// done file should be deleted now
assertFileNotExists(testFile("done"));
// as well the original file should be moved to failed
assertFileExists(testFile("failed/" + TEST_FILE_NAME));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?doneFileName=done&initialDelay=0&delay=10&moveFailed=failed")).to("mock:input")
.throwException(new IllegalArgumentException("Forced"));
}
};
}
}
| FilerConsumerMoveFailedDoneFileNameTest |
java | spring-projects__spring-boot | module/spring-boot-mustache/src/main/java/org/springframework/boot/mustache/autoconfigure/MustacheResourceTemplateLoader.java | {
"start": 1476,
"end": 2452
} | class ____ implements TemplateLoader, ResourceLoaderAware {
private String prefix = "";
private String suffix = "";
private String charSet = "UTF-8";
private ResourceLoader resourceLoader = new DefaultResourceLoader(null);
public MustacheResourceTemplateLoader() {
}
public MustacheResourceTemplateLoader(String prefix, String suffix) {
this.prefix = prefix;
this.suffix = suffix;
}
/**
* Set the charset.
* @param charSet the charset
*/
public void setCharset(String charSet) {
this.charSet = charSet;
}
/**
* Set the resource loader.
* @param resourceLoader the resource loader
*/
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public Reader getTemplate(String name) throws Exception {
return new InputStreamReader(this.resourceLoader.getResource(this.prefix + name + this.suffix).getInputStream(),
this.charSet);
}
}
| MustacheResourceTemplateLoader |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/ext/ExternalTypeIdTest.java | {
"start": 7654,
"end": 20453
} | class ____ {
protected Type965 typeEnum;
protected Object value;
@JsonGetter("type")
String getTypeString() {
return typeEnum.name();
}
@JsonSetter("type")
void setTypeString(String type) {
this.typeEnum = Type965.valueOf(type);
}
@JsonGetter(value = "objectValue")
Object getValue() {
return value;
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXTERNAL_PROPERTY, property = "type")
@JsonSubTypes({ @JsonSubTypes.Type(name = "BIG_DECIMAL", value = BigDecimal.class) })
@JsonSetter(value = "objectValue")
private void setValue(Object value) {
this.value = value;
}
}
/*
/**********************************************************
/* Unit tests, serialization
/**********************************************************
*/
private final ObjectMapper MAPPER = new ObjectMapper();
@Test
public void testSimpleSerialization() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.registerSubtypes(ValueBean.class)
.build();
// This may look odd, but one implementation nastiness is the fact
// that we cannot properly serialize type id before the object,
// because call is made after property name (for object) has already
// been written out. So we'll write it after...
// Deserializer will work either way as it cannot rely on ordering
// anyway.
assertEquals("{\"bean\":{\"value\":11},\"extType\":\"vbean\"}",
mapper.writeValueAsString(new ExternalBean(11)));
}
// If trying to use with Class, should just become "PROPERTY" instead:
@Test
public void testImproperExternalIdSerialization() throws Exception
{
ObjectMapper mapper = new ObjectMapper();
assertEquals("{\"extType\":\"funk\",\"i\":3}",
mapper.writeValueAsString(new FunkyExternalBean()));
}
// for [databind#942]
@Test
public void testExternalTypeIdWithNull() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.registerSubtypes(ValueBean.class)
.build();
ExternalBean b;
b = mapper.readValue(a2q("{'bean':null,'extType':'vbean'}"),
ExternalBean.class);
assertNotNull(b);
b = mapper.readValue(a2q("{'extType':'vbean','bean':null}"),
ExternalBean.class);
assertNotNull(b);
}
/*
/**********************************************************
/* Unit tests, deserialization
/**********************************************************
*/
@Test
public void testSimpleDeserialization() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.registerSubtypes(ValueBean.class)
.build();
ExternalBean result = mapper.readValue("{\"bean\":{\"value\":11},\"extType\":\"vbean\"}", ExternalBean.class);
assertNotNull(result);
assertNotNull(result.bean);
ValueBean vb = (ValueBean) result.bean;
assertEquals(11, vb.value);
// let's also test with order switched:
result = mapper.readValue("{\"extType\":\"vbean\", \"bean\":{\"value\":13}}", ExternalBean.class);
assertNotNull(result);
assertNotNull(result.bean);
vb = (ValueBean) result.bean;
assertEquals(13, vb.value);
}
// Test for verifying that it's ok to have multiple (say, 3)
// externally typed things, mixed with other stuff...
@Test
public void testMultipleTypeIdsDeserialization() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.registerSubtypes(ValueBean.class)
.build();
String json = mapper.writeValueAsString(new ExternalBean3(3));
ExternalBean3 result = mapper.readValue(json, ExternalBean3.class);
assertNotNull(result);
assertNotNull(result.value1);
assertNotNull(result.value2);
assertNotNull(result.value3);
assertEquals(3, ((ValueBean)result.value1).value);
assertEquals(4, ((ValueBean)result.value2).value);
assertEquals(5, ((ValueBean)result.value3).value);
assertEquals(3, result.foo);
}
// Also, it should be ok to use @JsonCreator as well...
@Test
public void testExternalTypeWithCreator() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.registerSubtypes(ValueBean.class)
.build();
String json = mapper.writeValueAsString(new ExternalBeanWithCreator(7));
ExternalBeanWithCreator result = mapper.readValue(json, ExternalBeanWithCreator.class);
assertNotNull(result);
assertNotNull(result.value);
assertEquals(7, ((ValueBean)result.value).value);
assertEquals(7, result.foo);
}
// If trying to use with Class, should just become "PROPERTY" instead:
@Test
public void testImproperExternalIdDeserialization() throws Exception
{
FunkyExternalBean result = MAPPER.readValue("{\"extType\":\"funk\",\"i\":3}",
FunkyExternalBean.class);
assertNotNull(result);
assertEquals(3, result.i);
result = MAPPER.readValue("{\"i\":4,\"extType\":\"funk\"}",
FunkyExternalBean.class);
assertNotNull(result);
assertEquals(4, result.i);
}
@Test
public void testIssue798() throws Exception
{
Base base = new Derived1("derived1 prop val", "base prop val");
BaseContainer baseContainer = new BaseContainer("bc prop val", base);
String generatedJson = MAPPER.writeValueAsString(baseContainer);
BaseContainer baseContainer2 = MAPPER.readValue(generatedJson, BaseContainer.class);
assertEquals("bc prop val", baseContainer.getBaseContainerProperty());
Base b = baseContainer2.getBase();
assertNotNull(b);
if (b.getClass() != Derived1.class) {
fail("Should have type Derived1, was "+b.getClass().getName());
}
Derived1 derived1 = (Derived1) b;
assertEquals("base prop val", derived1.getBaseProperty());
assertEquals("derived1 prop val", derived1.getDerived1Property());
}
// There seems to be some problems if type is also visible...
@Test
public void testIssue831() throws Exception
{
final String JSON = "{ \"petType\": \"dog\",\n"
+"\"pet\": { \"name\": \"Pluto\" }\n}";
House831 result = MAPPER.readValue(JSON, House831.class);
assertNotNull(result);
assertNotNull(result.pet);
assertSame(Dog.class, result.pet.getClass());
assertEquals("dog", result.petType);
}
// For [databind#118]
// Note: String works fine, since no type id will used; other scalar types have issues
@Test
public void testWithScalar118() throws Exception
{
ExternalTypeWithNonPOJO input = new ExternalTypeWithNonPOJO(new java.util.Date(123L));
String json = MAPPER.writeValueAsString(input);
assertNotNull(json);
// and back just to be sure:
ExternalTypeWithNonPOJO result = MAPPER.readValue(json, ExternalTypeWithNonPOJO.class);
assertNotNull(result.value);
assertTrue(result.value instanceof java.util.Date);
}
// For [databind#118] using "natural" type(s)
@Test
public void testWithNaturalScalar118() throws Exception
{
ExternalTypeWithNonPOJO input = new ExternalTypeWithNonPOJO(Integer.valueOf(13));
String json = MAPPER.writeValueAsString(input);
assertNotNull(json);
// and back just to be sure:
ExternalTypeWithNonPOJO result = MAPPER.readValue(json, ExternalTypeWithNonPOJO.class);
assertNotNull(result.value);
assertTrue(result.value instanceof Integer);
// ditto with others types
input = new ExternalTypeWithNonPOJO(Boolean.TRUE);
json = MAPPER.writeValueAsString(input);
assertNotNull(json);
result = MAPPER.readValue(json, ExternalTypeWithNonPOJO.class);
assertNotNull(result.value);
assertTrue(result.value instanceof Boolean);
input = new ExternalTypeWithNonPOJO("foobar");
json = MAPPER.writeValueAsString(input);
assertNotNull(json);
result = MAPPER.readValue(json, ExternalTypeWithNonPOJO.class);
assertNotNull(result.value);
assertTrue(result.value instanceof String);
assertEquals("foobar", result.value);
}
// For [databind#119]... and bit of [#167] as well
@Test
public void testWithAsValue() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.enable(DateTimeFeature.WRITE_DATES_AS_TIMESTAMPS)
.build();
ExternalTypeWithNonPOJO input = new ExternalTypeWithNonPOJO(new AsValueThingy(12345L));
String json = mapper.writeValueAsString(input);
assertNotNull(json);
assertEquals("{\"value\":12345,\"type\":\"thingy\"}", json);
// and get it back too:
ExternalTypeWithNonPOJO result = mapper.readValue(json, ExternalTypeWithNonPOJO.class);
assertNotNull(result);
assertNotNull(result.value);
assertEquals(AsValueThingy.class, result.value.getClass());
assertEquals(12345L, ((AsValueThingy) result.value).rawDate);
}
// for [databind#222]
@Test
public void testExternalTypeWithProp222() throws Exception
{
JsonMapper mapper = JsonMapper.builder()
.enable(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY)
.build();
Issue222Bean input = new Issue222Bean(13);
String json = mapper.writeValueAsString(input);
assertEquals("{\"type\":\"foo\",\"value\":{\"x\":13}}", json);
}
// [databind#928]
@Test
public void testInverseExternalId928() throws Exception
{
final String CLASS = Payload928.class.getName();
final ObjectMapper mapper = jsonMapperBuilder()
.polymorphicTypeValidator(new NoCheckSubTypeValidator())
.build();
final String successCase = "{\"payload\":{\"something\":\"test\"},\"class\":\""+CLASS+"\"}";
Envelope928 envelope1 = mapper.readValue(successCase, Envelope928.class);
assertNotNull(envelope1);
assertEquals(Payload928.class, envelope1._payload.getClass());
// and then re-ordered case that was problematic
final String failCase = "{\"class\":\""+CLASS+"\",\"payload\":{\"something\":\"test\"}}";
Envelope928 envelope2 = mapper.readValue(failCase, Envelope928.class);
assertNotNull(envelope2);
assertEquals(Payload928.class, envelope2._payload.getClass());
}
// for [databind#965]
@Test
public void testBigDecimal965() throws Exception
{
Wrapper965 w = new Wrapper965();
w.typeEnum = Type965.BIG_DECIMAL;
final String NUM_STR = "-10000000000.0000000001";
w.value = new BigDecimal(NUM_STR);
String json = MAPPER.writeValueAsString(w);
// simple sanity check so serialization is faithful
if (!json.contains(NUM_STR)) {
fail("JSON content should contain value '"+NUM_STR+"', does not appear to: "+json);
}
Wrapper965 w2 = MAPPER.readerFor(Wrapper965.class)
.with(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)
.readValue(json);
assertEquals(w.typeEnum, w2.typeEnum);
assertTrue(w.value.equals(w2.value),
String.format("Expected %s = %s; got back %s = %s",
w.value.getClass().getSimpleName(), w.value.toString(), w2.value.getClass().getSimpleName(), w2.value.toString()));
}
@Test
public void testBigDecimal965StringBased() throws Exception
{
Wrapper965 w = new Wrapper965();
w.typeEnum = Type965.BIG_DECIMAL;
final String NUM_STR = "-10000000000.0000000001";
w.value = new BigDecimal(NUM_STR);
String json = "{\"objectValue\":\"-10000000000.0000000001\",\"type\":\"BIG_DECIMAL\"}";
Wrapper965 w2 = MAPPER.readerFor(Wrapper965.class)
.with(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)
.readValue(json);
assertEquals(w.typeEnum, w2.typeEnum);
assertTrue(w.value.equals(w2.value),
String.format("Expected %s = %s; got back %s = %s",
w.value.getClass().getSimpleName(), w.value.toString(), w2.value.getClass().getSimpleName(), w2.value.toString()));
}
static | Wrapper965 |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolPB.java | {
"start": 1823,
"end": 1889
} | interface ____
* add annotations required for security.
*/
public | to |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/functional/RemoteIterators.java | {
"start": 20671,
"end": 21544
} | class ____ implements Closeable {
private Closeable toClose;
/**
* Construct.
* @param o object to close.
*/
private MaybeClose(Object o) {
this(o, true);
}
/**
* Construct -close the object if it is closeable and close==true.
* @param o object to close.
* @param close should close?
*/
private MaybeClose(Object o, boolean close) {
if (close && o instanceof Closeable) {
this.toClose = (Closeable) o;
} else {
this.toClose = null;
}
}
@Override
public void close() throws IOException {
if (toClose != null) {
try {
toClose.close();
} finally {
toClose = null;
}
}
}
}
/**
* An iterator which allows for a fast exit predicate.
* @param <S> source type
*/
private static final | MaybeClose |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRMAppSubmissionData.java | {
"start": 1764,
"end": 5596
} | class ____ {
private final List<ResourceRequest> amResourceRequests;
private final String name;
private final String user;
private final Map<ApplicationAccessType, String> acls;
private final boolean unmanaged;
private final String queue;
private final int maxAppAttempts;
private final String appType;
private final boolean waitForAccepted;
private final boolean keepContainers;
private final boolean isAppIdProvided;
private final ApplicationId applicationId;
private final long attemptFailuresValidityInterval;
private final LogAggregationContext logAggregationContext;
private final boolean cancelTokensWhenComplete;
private final Priority priority;
private final String amLabel;
private final Map<ApplicationTimeoutType, Long> applicationTimeouts;
private final ByteBuffer tokensConf;
private final Set<String> applicationTags;
private final String appNodeLabel;
private final Credentials credentials;
private final Resource resource;
public List<ResourceRequest> getAmResourceRequests() {
return amResourceRequests;
}
public String getName() {
return name;
}
public String getUser() {
return user;
}
public Map<ApplicationAccessType, String> getAcls() {
return acls;
}
public boolean isUnmanaged() {
return unmanaged;
}
public String getQueue() {
return queue;
}
public int getMaxAppAttempts() {
return maxAppAttempts;
}
public String getAppType() {
return appType;
}
public boolean isWaitForAccepted() {
return waitForAccepted;
}
public boolean isKeepContainers() {
return keepContainers;
}
public boolean isAppIdProvided() {
return isAppIdProvided;
}
public ApplicationId getApplicationId() {
return applicationId;
}
public long getAttemptFailuresValidityInterval() {
return attemptFailuresValidityInterval;
}
public LogAggregationContext getLogAggregationContext() {
return logAggregationContext;
}
public boolean isCancelTokensWhenComplete() {
return cancelTokensWhenComplete;
}
public Priority getPriority() {
return priority;
}
public String getAmLabel() {
return amLabel;
}
public Map<ApplicationTimeoutType, Long> getApplicationTimeouts() {
return applicationTimeouts;
}
public ByteBuffer getTokensConf() {
return tokensConf;
}
public Set<String> getApplicationTags() {
return applicationTags;
}
public String getAppNodeLabel() {
return appNodeLabel;
}
public Credentials getCredentials() {
return credentials;
}
public Resource getResource() {
return resource;
}
private MockRMAppSubmissionData(Builder builder) {
this.amLabel = builder.amLabel;
this.tokensConf = builder.tokensConf;
this.maxAppAttempts = builder.maxAppAttempts;
this.logAggregationContext = builder.logAggregationContext;
this.queue = builder.queue;
this.amResourceRequests = builder.amResourceRequests;
this.user = builder.user;
this.priority = builder.priority;
this.waitForAccepted = builder.waitForAccepted;
this.keepContainers = builder.keepContainers;
this.name = builder.name;
this.applicationId = builder.applicationId;
this.attemptFailuresValidityInterval =
builder.attemptFailuresValidityInterval;
this.acls = builder.acls;
this.appType = builder.appType;
this.appNodeLabel = builder.appNodeLabel;
this.isAppIdProvided = builder.isAppIdProvided;
this.unmanaged = builder.unmanaged;
this.applicationTags = builder.applicationTags;
this.cancelTokensWhenComplete = builder.cancelTokensWhenComplete;
this.applicationTimeouts = builder.applicationTimeouts;
this.credentials = builder.credentials;
this.resource = builder.resource;
}
/**
* Tests should use this | MockRMAppSubmissionData |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/util/TestSchemaResolver.java | {
"start": 1585,
"end": 3122
} | class ____ {
@Test
public void testResolving() throws IOException {
// Path testIdl = Paths.get(".", "src", "test", "idl",
// "cycle.avdl").toAbsolutePath();
// IdlReader parser = new IdlReader();
// IdlFile idlFile = parser.parse(testIdl);
// Protocol protocol = idlFile.getProtocol();
// System.out.println(protocol);
// Assert.assertEquals(5, protocol.getTypes().size());
}
@Test(expected = IllegalArgumentException.class)
public void testIsUnresolvedSchemaError1() {
// No "org.apache.avro.idl.unresolved.name" property
Schema s = SchemaBuilder.record("R").fields().endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
}
@Test(expected = IllegalArgumentException.class)
public void testIsUnresolvedSchemaError2() {
// No "UnresolvedSchema" property
Schema s = SchemaBuilder.record("R").prop("org.apache.avro.idl.unresolved.name", "x").fields().endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
}
@Test(expected = IllegalArgumentException.class)
public void testIsUnresolvedSchemaError3() {
// Namespace not "org.apache.avro.compiler".
Schema s = SchemaBuilder.record("UnresolvedSchema").prop("org.apache.avro.idl.unresolved.name", "x").fields()
.endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
}
@Test(expected = IllegalArgumentException.class)
public void testGetUnresolvedSchemaNameError() {
Schema s = SchemaBuilder.fixed("a").size(10);
SchemaResolver.getUnresolvedSchemaName(s);
}
}
| TestSchemaResolver |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/DecodeJob.java | {
"start": 27142,
"end": 27508
} | enum ____ {
/** The initial stage. */
INITIALIZE,
/** Decode from a cached resource. */
RESOURCE_CACHE,
/** Decode from cached source data. */
DATA_CACHE,
/** Decode from retrieved source. */
SOURCE,
/** Encoding transformed resources after a successful load. */
ENCODE,
/** No more viable stages. */
FINISHED,
}
}
| Stage |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/aot/ContextAotProcessorTests.java | {
"start": 4934,
"end": 6111
} | class ____ extends ContextAotProcessor {
AnnotationConfigApplicationContext context;
DemoContextAotProcessor(Class<?> application, Path rootPath) {
this(application, rootPath.resolve("source"), rootPath.resolve("resource"), rootPath.resolve("class"));
}
DemoContextAotProcessor(Class<?> application, Path sourceOutput, Path resourceOutput, Path classOutput) {
super(application, createSettings(sourceOutput, resourceOutput, classOutput, "com.example", "example"));
}
private static Settings createSettings(Path sourceOutput, Path resourceOutput,
Path classOutput, String groupId, String artifactId) {
return Settings.builder()
.sourceOutput(sourceOutput)
.resourceOutput(resourceOutput)
.classOutput(classOutput)
.artifactId(artifactId)
.groupId(groupId)
.build();
}
@Override
protected GenericApplicationContext prepareApplicationContext(Class<?> application) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(application);
this.context = context;
return context;
}
}
@Configuration(proxyBeanMethods = false)
static | DemoContextAotProcessor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ImpossibleNullComparisonTest.java | {
"start": 1028,
"end": 1600
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(ImpossibleNullComparison.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(ImpossibleNullComparison.class, getClass());
@Test
public void scalarCases() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestProtoMessage;
| ImpossibleNullComparisonTest |
java | apache__camel | components/camel-saxon/src/generated/java/org/apache/camel/component/xquery/XQueryComponentConfigurer.java | {
"start": 733,
"end": 4065
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
XQueryComponent target = (XQueryComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "configuration": target.setConfiguration(property(camelContext, net.sf.saxon.Configuration.class, value)); return true;
case "configurationproperties":
case "configurationProperties": target.setConfigurationProperties(property(camelContext, java.util.Map.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "moduleuriresolver":
case "moduleURIResolver": target.setModuleURIResolver(property(camelContext, net.sf.saxon.lib.ModuleURIResolver.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "configuration": return net.sf.saxon.Configuration.class;
case "configurationproperties":
case "configurationProperties": return java.util.Map.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "moduleuriresolver":
case "moduleURIResolver": return net.sf.saxon.lib.ModuleURIResolver.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
XQueryComponent target = (XQueryComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "configuration": return target.getConfiguration();
case "configurationproperties":
case "configurationProperties": return target.getConfigurationProperties();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "moduleuriresolver":
case "moduleURIResolver": return target.getModuleURIResolver();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "configurationproperties":
case "configurationProperties": return java.lang.Object.class;
default: return null;
}
}
}
| XQueryComponentConfigurer |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/validation/MembersInjectionValidator.java | {
"start": 1623,
"end": 5096
} | class ____ {
private final InjectionAnnotations injectionAnnotations;
@Inject
MembersInjectionValidator(InjectionAnnotations injectionAnnotations) {
this.injectionAnnotations = injectionAnnotations;
}
/** Reports errors if a request for a {@code MembersInjector<Foo>}) is invalid. */
ValidationReport validateMembersInjectionRequest(
XElement requestElement, XType membersInjectedType) {
ValidationReport.Builder report = ValidationReport.about(requestElement);
checkQualifiers(report, requestElement);
checkMembersInjectedType(report, membersInjectedType);
return report.build();
}
/**
* Reports errors if a members injection method on a component is invalid.
*
* @throws IllegalArgumentException if the method doesn't have exactly one parameter
*/
ValidationReport validateMembersInjectionMethod(
XMethodElement method, XType membersInjectedType) {
checkArgument(
method.getParameters().size() == 1, "expected a method with one parameter: %s", method);
ValidationReport.Builder report = ValidationReport.about(method);
checkQualifiers(report, method);
checkQualifiers(report, method.getParameters().get(0));
checkMembersInjectedType(report, membersInjectedType);
return report.build();
}
private void checkQualifiers(ValidationReport.Builder report, XElement element) {
for (XAnnotation qualifier : injectionAnnotations.getQualifiers(element)) {
report.addError("Cannot inject members into qualified types", element, qualifier);
break; // just report on the first qualifier, in case there is more than one
}
}
private void checkMembersInjectedType(ValidationReport.Builder report, XType type) {
// Only declared types can be members-injected.
if (!isDeclared(type)) {
report.addError("Cannot inject members into " + XTypes.toStableString(type));
return;
}
// If the type is the erasure of a generic type, that means the user referred to
// Foo<T> as just 'Foo', which we don't allow. (This is a judgement call; we
// *could* allow it and instantiate the type bounds, but we don't.)
if (isRawParameterizedType(type)) {
report.addError("Cannot inject members into raw type " + XTypes.toStableString(type));
return;
}
// If the type has arguments, validate that each type argument is declared.
// Otherwise the type argument may be a wildcard (or other type), and we can't
// resolve that to actual types. For array type arguments, validate the type of the array.
if (!type.getTypeArguments().stream().allMatch(this::isResolvableTypeArgument)) {
report.addError(
"Cannot inject members into types with unbounded type arguments: "
+ XTypes.toStableString(type));
}
}
// TODO(dpb): Can this be inverted so it explicitly rejects wildcards or type variables?
// This logic is hard to describe.
private boolean isResolvableTypeArgument(XType type) {
return isDeclared(type)
|| (isArray(type) && isResolvableArrayComponentType(asArray(type).getComponentType()));
}
private boolean isResolvableArrayComponentType(XType type) {
if (isDeclared(type)) {
return type.getTypeArguments().stream().allMatch(this::isResolvableTypeArgument);
} else if (isArray(type)) {
return isResolvableArrayComponentType(asArray(type).getComponentType());
}
return isPrimitive(type);
}
}
| MembersInjectionValidator |
java | grpc__grpc-java | examples/src/test/java/io/grpc/examples/routeguide/RouteGuideServerTest.java | {
"start": 2026,
"end": 11024
} | class ____ {
/**
* This rule manages automatic graceful shutdown for the registered channel at the end of test.
*/
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
private RouteGuideServer server;
private ManagedChannel inProcessChannel;
private Collection<Feature> features;
@Before
public void setUp() throws Exception {
// Generate a unique in-process server name.
String serverName = InProcessServerBuilder.generateName();
features = new ArrayList<>();
// Use directExecutor for both InProcessServerBuilder and InProcessChannelBuilder can reduce the
// usage timeouts and latches in test. But we still add timeout and latches where they would be
// needed if no directExecutor were used, just for demo purpose.
server = new RouteGuideServer(
InProcessServerBuilder.forName(serverName).directExecutor(), 0, features);
server.start();
// Create a client channel and register for automatic graceful shutdown.
inProcessChannel = grpcCleanup.register(
InProcessChannelBuilder.forName(serverName).directExecutor().build());
}
@After
public void tearDown() throws Exception {
server.stop();
}
@Test
public void getFeature() {
Point point = Point.newBuilder().setLongitude(1).setLatitude(1).build();
Feature unnamedFeature = Feature.newBuilder()
.setName("").setLocation(point).build();
RouteGuideGrpc.RouteGuideBlockingStub stub = RouteGuideGrpc.newBlockingStub(inProcessChannel);
// feature not found in the server
Feature feature = stub.getFeature(point);
assertEquals(unnamedFeature, feature);
// feature found in the server
Feature namedFeature = Feature.newBuilder()
.setName("name").setLocation(point).build();
features.add(namedFeature);
feature = stub.getFeature(point);
assertEquals(namedFeature, feature);
}
@Test
public void listFeatures() throws Exception {
// setup
Rectangle rect = Rectangle.newBuilder()
.setLo(Point.newBuilder().setLongitude(0).setLatitude(0).build())
.setHi(Point.newBuilder().setLongitude(10).setLatitude(10).build())
.build();
Feature f1 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(-1).setLatitude(-1).build())
.setName("f1")
.build(); // not inside rect
Feature f2 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(2).setLatitude(2).build())
.setName("f2")
.build();
Feature f3 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(3).setLatitude(3).build())
.setName("f3")
.build();
Feature f4 = Feature.newBuilder()
.setLocation(Point.newBuilder().setLongitude(4).setLatitude(4).build())
.build(); // unamed
features.add(f1);
features.add(f2);
features.add(f3);
features.add(f4);
final List<Feature> result = new ArrayList<Feature>();
final CountDownLatch latch = new CountDownLatch(1);
StreamObserver<Feature> responseObserver =
new StreamObserver<Feature>() {
@Override
public void onNext(Feature value) {
result.add(value);
}
@Override
public void onError(Throwable t) {
fail();
}
@Override
public void onCompleted() {
latch.countDown();
}
};
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
// run
stub.listFeatures(rect, responseObserver);
assertTrue(latch.await(1, TimeUnit.SECONDS));
// verify
assertEquals(Arrays.asList(f2, f3), result);
}
@Test
public void recordRoute() {
Point p1 = Point.newBuilder().setLongitude(1000).setLatitude(1000).build();
Point p2 = Point.newBuilder().setLongitude(2000).setLatitude(2000).build();
Point p3 = Point.newBuilder().setLongitude(3000).setLatitude(3000).build();
Point p4 = Point.newBuilder().setLongitude(4000).setLatitude(4000).build();
Feature f1 = Feature.newBuilder().setLocation(p1).build(); // unamed
Feature f2 = Feature.newBuilder().setLocation(p2).setName("f2").build();
Feature f3 = Feature.newBuilder().setLocation(p3).setName("f3").build();
Feature f4 = Feature.newBuilder().setLocation(p4).build(); // unamed
features.add(f1);
features.add(f2);
features.add(f3);
features.add(f4);
@SuppressWarnings("unchecked")
StreamObserver<RouteSummary> responseObserver =
(StreamObserver<RouteSummary>) mock(StreamObserver.class);
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
ArgumentCaptor<RouteSummary> routeSummaryCaptor = ArgumentCaptor.forClass(RouteSummary.class);
StreamObserver<Point> requestObserver = stub.recordRoute(responseObserver);
requestObserver.onNext(p1);
requestObserver.onNext(p2);
requestObserver.onNext(p3);
requestObserver.onNext(p4);
verify(responseObserver, never()).onNext(any(RouteSummary.class));
requestObserver.onCompleted();
// allow some ms to let client receive the response. Similar usage later on.
verify(responseObserver, timeout(100)).onNext(routeSummaryCaptor.capture());
RouteSummary summary = routeSummaryCaptor.getValue();
assertEquals(45, summary.getDistance()); // 45 is the hard coded distance from p1 to p4.
assertEquals(2, summary.getFeatureCount());
verify(responseObserver, timeout(100)).onCompleted();
verify(responseObserver, never()).onError(any(Throwable.class));
}
@Test
public void routeChat() {
Point p1 = Point.newBuilder().setLongitude(1).setLatitude(1).build();
Point p2 = Point.newBuilder().setLongitude(2).setLatitude(2).build();
RouteNote n1 = RouteNote.newBuilder().setLocation(p1).setMessage("m1").build();
RouteNote n2 = RouteNote.newBuilder().setLocation(p2).setMessage("m2").build();
RouteNote n3 = RouteNote.newBuilder().setLocation(p1).setMessage("m3").build();
RouteNote n4 = RouteNote.newBuilder().setLocation(p2).setMessage("m4").build();
RouteNote n5 = RouteNote.newBuilder().setLocation(p1).setMessage("m5").build();
RouteNote n6 = RouteNote.newBuilder().setLocation(p1).setMessage("m6").build();
int timesOnNext = 0;
@SuppressWarnings("unchecked")
StreamObserver<RouteNote> responseObserver =
(StreamObserver<RouteNote>) mock(StreamObserver.class);
RouteGuideGrpc.RouteGuideStub stub = RouteGuideGrpc.newStub(inProcessChannel);
StreamObserver<RouteNote> requestObserver = stub.routeChat(responseObserver);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n1);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n2);
verify(responseObserver, never()).onNext(any(RouteNote.class));
requestObserver.onNext(n3);
ArgumentCaptor<RouteNote> routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
verify(responseObserver, timeout(100).times(++timesOnNext)).onNext(routeNoteCaptor.capture());
RouteNote result = routeNoteCaptor.getValue();
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
requestObserver.onNext(n4);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
verify(responseObserver, timeout(100).times(++timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p2, result.getLocation());
assertEquals("m2", result.getMessage());
requestObserver.onNext(n5);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
timesOnNext += 2;
verify(responseObserver, timeout(100).times(timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 2);
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p1, result.getLocation());
assertEquals("m3", result.getMessage());
requestObserver.onNext(n6);
routeNoteCaptor = ArgumentCaptor.forClass(RouteNote.class);
timesOnNext += 3;
verify(responseObserver, timeout(100).times(timesOnNext)).onNext(routeNoteCaptor.capture());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 3);
assertEquals(p1, result.getLocation());
assertEquals("m1", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 2);
assertEquals(p1, result.getLocation());
assertEquals("m3", result.getMessage());
result = routeNoteCaptor.getAllValues().get(timesOnNext - 1);
assertEquals(p1, result.getLocation());
assertEquals("m5", result.getMessage());
requestObserver.onCompleted();
verify(responseObserver, timeout(100)).onCompleted();
verify(responseObserver, never()).onError(any(Throwable.class));
}
}
| RouteGuideServerTest |
java | apache__camel | components/camel-reactive-streams/src/test/java/org/apache/camel/component/reactive/streams/tck/CamelSubscriberConversionVerificationTest.java | {
"start": 1379,
"end": 2964
} | class ____ extends SubscriberBlackboxVerification<Integer> {
private CamelContext context;
public CamelSubscriberConversionVerificationTest() {
super(new TestEnvironment(2000L));
}
@Override
public Subscriber<Integer> createSubscriber() {
init();
RouteBuilder builder = new RouteBuilder() {
@Override
public void configure() {
from("reactive-streams:sub?maxInflightExchanges=20")
.to("log:INFO");
}
};
Subscriber<Integer> sub = CamelReactiveStreams.get(context).streamSubscriber("sub", Integer.class);
try {
builder.addRoutesToCamelContext(context);
context.start();
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
return sub;
}
@Override
public Integer createElement(int element) {
return element;
}
protected void init() {
tearDown();
this.context = new DefaultCamelContext();
DefaultShutdownStrategy shutdownStrategy = new DefaultShutdownStrategy();
shutdownStrategy.setShutdownNowOnTimeout(true);
shutdownStrategy.setTimeout(1);
this.context.setShutdownStrategy(shutdownStrategy);
}
@AfterTest
protected void tearDown() {
try {
if (this.context != null) {
this.context.stop();
}
} catch (Exception ex) {
throw new RuntimeCamelException(ex);
}
}
}
| CamelSubscriberConversionVerificationTest |
java | quarkusio__quarkus | integration-tests/oidc-client-reactive/src/main/java/io/quarkus/it/keycloak/MisconfiguredClientFilter.java | {
"start": 357,
"end": 496
} | interface ____ {
@GET
@Produces("text/plain")
@Path("userNameReactive")
Uni<String> getUserName();
}
| MisconfiguredClientFilter |
java | quarkusio__quarkus | integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/PerClassSpyTest.java | {
"start": 1070,
"end": 1182
} | class ____ {
public String call(String input) {
return input;
}
}
}
| IdentityService |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/LambdaMonoSubscriberTest.java | {
"start": 10975,
"end": 11250
} | class ____ implements Subscription {
volatile boolean isCancelled = false;
volatile long requested = -1L;
@Override
public void request(long n) {
this.requested = n;
}
@Override
public void cancel() {
this.isCancelled = true;
}
}
}
| TestSubscription |
java | netty__netty | resolver/src/main/java/io/netty/resolver/AddressResolver.java | {
"start": 960,
"end": 3157
} | interface ____<T extends SocketAddress> extends Closeable {
/**
* Returns {@code true} if and only if the specified address is supported by this resolved.
*/
boolean isSupported(SocketAddress address);
/**
* Returns {@code true} if and only if the specified address has been resolved.
*
* @throws UnsupportedAddressTypeException if the specified address is not supported by this resolver
*/
boolean isResolved(SocketAddress address);
/**
* Resolves the specified address. If the specified address is resolved already, this method does nothing
* but returning the original address.
*
* @param address the address to resolve
*
* @return the {@link SocketAddress} as the result of the resolution
*/
Future<T> resolve(SocketAddress address);
/**
* Resolves the specified address. If the specified address is resolved already, this method does nothing
* but returning the original address.
*
* @param address the address to resolve
* @param promise the {@link Promise} which will be fulfilled when the name resolution is finished
*
* @return the {@link SocketAddress} as the result of the resolution
*/
Future<T> resolve(SocketAddress address, Promise<T> promise);
/**
* Resolves the specified address. If the specified address is resolved already, this method does nothing
* but returning the original address.
*
* @param address the address to resolve
*
* @return the list of the {@link SocketAddress}es as the result of the resolution
*/
Future<List<T>> resolveAll(SocketAddress address);
/**
* Resolves the specified address. If the specified address is resolved already, this method does nothing
* but returning the original address.
*
* @param address the address to resolve
* @param promise the {@link Promise} which will be fulfilled when the name resolution is finished
*
* @return the list of the {@link SocketAddress}es as the result of the resolution
*/
Future<List<T>> resolveAll(SocketAddress address, Promise<List<T>> promise);
/**
* Closes all the resources allocated and used by this resolver.
*/
@Override
void close();
}
| AddressResolver |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/QueueAclsInfo.java | {
"start": 1345,
"end": 2582
} | class ____ implements Writable {
private String queueName;
private String[] operations;
/**
* Default constructor for QueueAclsInfo.
*
*/
public QueueAclsInfo() {
}
/**
* Construct a new QueueAclsInfo object using the queue name and the
* queue operations array
*
* @param queueName Name of the job queue
* @param operations
*/
public QueueAclsInfo(String queueName, String[] operations) {
this.queueName = queueName;
this.operations = operations;
}
/**
* Get queue name.
*
* @return name
*/
public String getQueueName() {
return queueName;
}
protected void setQueueName(String queueName) {
this.queueName = queueName;
}
/**
* Get opearations allowed on queue.
*
* @return array of String
*/
public String[] getOperations() {
return operations;
}
@Override
public void readFields(DataInput in) throws IOException {
queueName = StringInterner.weakIntern(Text.readString(in));
operations = WritableUtils.readStringArray(in);
}
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, queueName);
WritableUtils.writeStringArray(out, operations);
}
}
| QueueAclsInfo |
java | quarkusio__quarkus | integration-tests/test-extension/tests/src/test/java/io/quarkus/it/extension/ParamsTest.java | {
"start": 4991,
"end": 5163
} | class ____ {
private final Map<String, String> map = Map.of();
Map<String, String> getMap() {
return map;
}
}
static | TestObject2 |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/response/GoogleAiStudioEmbeddingsResponseEntity.java | {
"start": 1444,
"end": 4117
} | class ____ {
private static final String FAILED_TO_FIND_FIELD_TEMPLATE =
"Failed to find required field [%s] in Google AI Studio embeddings response";
/**
* Parses the Google AI Studio batch embeddings response (will be used for single and batch embeddings).
* For a request like:
*
* <pre>
* <code>
* {
* "inputs": ["Embed this", "Embed this, too"]
* }
* </code>
* </pre>
*
* The response would look like:
*
* <pre>
* <code>
* {
* "embeddings": [
* {
* "values": [
* -0.00606332,
* 0.058092743,
* -0.06390548
* ]
* },
* {
* "values": [
* -0.00606332,
* -0.06390548,
* 0.058092743
* ]
* }
* ]
* }
*
* </code>
* </pre>
*/
public static DenseEmbeddingFloatResults fromResponse(Request request, HttpResult response) throws IOException {
var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE);
try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) {
moveToFirstToken(jsonParser);
XContentParser.Token token = jsonParser.currentToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser);
positionParserAtTokenAfterField(jsonParser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE);
List<DenseEmbeddingFloatResults.Embedding> embeddingList = parseList(
jsonParser,
GoogleAiStudioEmbeddingsResponseEntity::parseEmbeddingObject
);
return new DenseEmbeddingFloatResults(embeddingList);
}
}
private static DenseEmbeddingFloatResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
positionParserAtTokenAfterField(parser, "values", FAILED_TO_FIND_FIELD_TEMPLATE);
List<Float> embeddingValuesList = parseList(parser, XContentUtils::parseFloat);
// parse and discard the rest of the object
consumeUntilObjectEnd(parser);
return DenseEmbeddingFloatResults.Embedding.of(embeddingValuesList);
}
private GoogleAiStudioEmbeddingsResponseEntity() {}
}
| GoogleAiStudioEmbeddingsResponseEntity |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java | {
"start": 1725,
"end": 5277
} | class ____ extends ValuesSourceAggregatorFactory {
private final HistogramAggregatorSupplier aggregatorSupplier;
private final double interval, offset;
private final BucketOrder order;
private final boolean keyed;
private final long minDocCount;
private final DoubleBounds extendedBounds;
private final DoubleBounds hardBounds;
static void registerAggregators(ValuesSourceRegistry.Builder builder) {
builder.register(HistogramAggregationBuilder.REGISTRY_KEY, CoreValuesSourceType.RANGE, RangeHistogramAggregator::new, true);
builder.register(
HistogramAggregationBuilder.REGISTRY_KEY,
List.of(
CoreValuesSourceType.NUMERIC,
CoreValuesSourceType.DATE,
CoreValuesSourceType.BOOLEAN,
TimeSeriesValuesSourceType.COUNTER
),
NumericHistogramAggregator::new,
true
);
}
public HistogramAggregatorFactory(
String name,
ValuesSourceConfig config,
double interval,
double offset,
BucketOrder order,
boolean keyed,
long minDocCount,
DoubleBounds extendedBounds,
DoubleBounds hardBounds,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata,
HistogramAggregatorSupplier aggregatorSupplier
) throws IOException {
super(name, config, context, parent, subFactoriesBuilder, metadata);
this.aggregatorSupplier = aggregatorSupplier;
this.interval = interval;
this.offset = offset;
this.order = order;
this.keyed = keyed;
this.minDocCount = minDocCount;
this.extendedBounds = extendedBounds;
this.hardBounds = hardBounds;
}
@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
// If min_doc_count is provided, we do not support them being larger than 1
// This is because we cannot be sure about their relative scale when sampled
if (getSamplingContext().map(SamplingContext::isSampled).orElse(false)) {
if (minDocCount > 1) {
throw new ElasticsearchStatusException(
"aggregation [{}] is within a sampling context; " + "min_doc_count, provided [{}], cannot be greater than 1",
RestStatus.BAD_REQUEST,
name(),
minDocCount
);
}
}
return aggregatorSupplier.build(
name,
factories,
interval,
offset,
order,
keyed,
minDocCount,
extendedBounds,
hardBounds,
config,
context,
parent,
cardinality,
metadata
);
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException {
return new NumericHistogramAggregator(
name,
factories,
interval,
offset,
order,
keyed,
minDocCount,
extendedBounds,
hardBounds,
config,
context,
parent,
CardinalityUpperBound.NONE,
metadata
);
}
}
| HistogramAggregatorFactory |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/wall/spi/MySqlWallVisitor.java | {
"start": 1432,
"end": 6548
} | class ____ extends WallVisitorBase implements WallVisitor, MySqlASTVisitor {
public MySqlWallVisitor(WallProvider provider) {
super(provider);
}
@Override
public DbType getDbType() {
return DbType.mysql;
}
@Override
public boolean visit(MySqlSelectQueryBlock x) {
WallVisitorUtils.checkSelect(this, x);
return true;
}
@Override
public boolean visit(MySqlDeleteStatement x) {
WallVisitorUtils.checkReadOnly(this, x.getFrom());
return visit((SQLDeleteStatement) x);
}
@Override
public boolean visit(MySqlUpdateStatement x) {
return visit((SQLUpdateStatement) x);
}
@Override
public boolean visit(MySqlInsertStatement x) {
return visit((SQLInsertStatement) x);
}
public boolean visit(SQLIdentifierExpr x) {
return true;
}
public boolean visit(SQLPropertyExpr x) {
if (x.getOwner() instanceof SQLVariantRefExpr) {
SQLVariantRefExpr varExpr = (SQLVariantRefExpr) x.getOwner();
SQLObject parent = x.getParent();
String varName = varExpr.getName();
if (varName.equalsIgnoreCase("@@session") || varName.equalsIgnoreCase("@@global")) {
if (!(parent instanceof SQLSelectItem) && !(parent instanceof SQLAssignItem)) {
violations.add(new IllegalSQLObjectViolation(ErrorCode.VARIANT_DENY,
"variable in condition not allow", toSQL(x)));
return false;
}
if (!checkVar(x.getParent(), x.getName())) {
boolean isTop = WallVisitorUtils.isTopNoneFromSelect(this, x);
if (!isTop) {
boolean allow = true;
if (isDeny(varName)
&& (WallVisitorUtils.isWhereOrHaving(x) || WallVisitorUtils.checkSqlExpr(varExpr))) {
allow = false;
}
if (!allow) {
violations.add(new IllegalSQLObjectViolation(ErrorCode.VARIANT_DENY,
"variable not allow : " + x.getName(),
toSQL(x)));
}
}
}
return false;
}
}
WallVisitorUtils.check(this, x);
return true;
}
public boolean checkVar(SQLObject parent, String varName) {
if (varName == null) {
return false;
}
if (varName.equals("?")) {
return true;
}
if (!config.isVariantCheck()) {
return true;
}
if (varName.startsWith("@@")) {
if (!(parent instanceof SQLSelectItem) && !(parent instanceof SQLAssignItem)) {
return false;
}
varName = varName.substring(2);
}
if (config.getPermitVariants().contains(varName)) {
return true;
}
return false;
}
public boolean isDeny(String varName) {
if (varName.startsWith("@@")) {
varName = varName.substring(2);
}
varName = varName.toLowerCase();
return config.getDenyVariants().contains(varName);
}
public boolean visit(SQLVariantRefExpr x) {
String varName = x.getName();
if (varName == null) {
return false;
}
if (varName.startsWith("@@") && !checkVar(x.getParent(), x.getName())) {
final WallTopStatementContext topStatementContext = WallVisitorUtils.getWallTopStatementContext();
if (topStatementContext != null
&& (topStatementContext.fromSysSchema() || topStatementContext.fromSysTable())) {
return false;
}
boolean isTop = WallVisitorUtils.isTopNoneFromSelect(this, x);
if (!isTop) {
boolean allow = true;
if (isDeny(varName) && (WallVisitorUtils.isWhereOrHaving(x) || WallVisitorUtils.checkSqlExpr(x))) {
allow = false;
}
if (!allow) {
violations.add(new IllegalSQLObjectViolation(ErrorCode.VARIANT_DENY, "variable not allow : "
+ x.getName(), toSQL(x)));
}
}
}
return false;
}
@Override
public boolean visit(MySqlOutFileExpr x) {
if (!config.isSelectIntoOutfileAllow() && !WallVisitorUtils.isTopSelectOutFile(x)) {
violations.add(new IllegalSQLObjectViolation(ErrorCode.INTO_OUTFILE, "into out file not allow", toSQL(x)));
}
return true;
}
@Override
public boolean isDenyTable(String name) {
if (!config.isTableCheck()) {
return false;
}
return !this.provider.checkDenyTable(name);
}
@Override
public boolean visit(MySqlCreateTableStatement x) {
WallVisitorUtils.check(this, x);
return true;
}
}
| MySqlWallVisitor |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenIT0139InterpolationWithProjectPrefixTest.java | {
"start": 1096,
"end": 3860
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test that expressions of the form ${project.*} resolve correctly to POM values.
*
* @throws Exception in case of failure
*/
@Test
public void testit0139() throws Exception {
File testDir = extractResources("/it0139");
File child = new File(testDir, "child");
Verifier verifier = newVerifier(child.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("initialize");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties props = verifier.loadProperties("target/interpolated.properties");
String prefix = "project.properties.";
assertEquals(child.getCanonicalFile(), new File(props.getProperty(prefix + "projectDir")).getCanonicalFile());
assertEquals("org.apache.maven.its.it0139.child", props.getProperty(prefix + "projectGroupId"));
assertEquals("child", props.getProperty(prefix + "projectArtifactId"));
assertEquals("2.0-alpha-1", props.getProperty(prefix + "projectVersion"));
assertEquals("jar", props.getProperty(prefix + "projectPackaging"));
assertEquals("child-name", props.getProperty(prefix + "projectName"));
assertEquals("child-desc", props.getProperty(prefix + "projectDesc"));
assertEquals("http://child.org/", props.getProperty(prefix + "projectUrl"));
assertEquals("2008", props.getProperty(prefix + "projectYear"));
assertEquals("child-org-name", props.getProperty(prefix + "projectOrgName"));
assertEquals("2.0.0", props.getProperty(prefix + "projectPrereqMvn"));
assertEquals("http://scm.org/", props.getProperty(prefix + "projectScmUrl"));
assertEquals("http://issue.org/", props.getProperty(prefix + "projectIssueUrl"));
assertEquals("http://ci.org/", props.getProperty(prefix + "projectCiUrl"));
assertEquals("child-dist-repo", props.getProperty(prefix + "projectDistRepoName"));
assertEquals("org.apache.maven.its.it0139", props.getProperty(prefix + "parentGroupId"));
assertEquals("parent", props.getProperty(prefix + "parentArtifactId"));
assertEquals("1.0", props.getProperty(prefix + "parentVersion"));
/*
* NOTE: We intentionally do not check whether the build paths have been basedir aligned, that's another
* story...
*/
// Inline version check: (2.0.8,) - current Maven version matches
assertTrue(props.getProperty(prefix + "projectBuildOut").endsWith("bin"));
assertTrue(props.getProperty(prefix + "projectSiteOut").endsWith("doc"));
}
}
| MavenIT0139InterpolationWithProjectPrefixTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java | {
"start": 4217,
"end": 140338
} | class ____ extends ESTestCase {
public void testFindAliases() {
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(
IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias1").build())
.putAlias(AliasMetadata.builder("alias2").build())
)
.put(
IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias2").build())
.putAlias(AliasMetadata.builder("alias3").build())
)
.build();
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT);
Map<String, List<AliasMetadata>> aliases = project.findAliases(request.aliases(), Strings.EMPTY_ARRAY);
assertThat(aliases, anEmptyMap());
}
{
final GetAliasesRequest request;
if (randomBoolean()) {
request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT);
} else {
request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT, randomFrom("alias1", "alias2"));
// replacing with empty aliases behaves as if aliases were unspecified at request building
request.replaceAliases(Strings.EMPTY_ARRAY);
}
Map<String, List<AliasMetadata>> aliases = project.findAliases(request.aliases(), new String[] { "index" });
assertThat(aliases, aMapWithSize(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList, transformedItemsMatch(AliasMetadata::alias, contains("alias1", "alias2")));
}
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT, "alias*");
Map<String, List<AliasMetadata>> aliases = project.findAliases(request.aliases(), new String[] { "index", "index2" });
assertThat(aliases, aMapWithSize(2));
List<AliasMetadata> indexAliasMetadataList = aliases.get("index");
assertThat(indexAliasMetadataList, transformedItemsMatch(AliasMetadata::alias, contains("alias1", "alias2")));
List<AliasMetadata> index2AliasMetadataList = aliases.get("index2");
assertThat(index2AliasMetadataList, transformedItemsMatch(AliasMetadata::alias, contains("alias2", "alias3")));
}
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT, "alias1");
Map<String, List<AliasMetadata>> aliases = project.findAliases(request.aliases(), new String[] { "index" });
assertThat(aliases, aMapWithSize(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList, transformedItemsMatch(AliasMetadata::alias, contains("alias1")));
}
{
Map<String, List<AliasMetadata>> aliases = project.findAllAliases(new String[] { "index" });
assertThat(aliases, aMapWithSize(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList, transformedItemsMatch(AliasMetadata::alias, contains("alias1", "alias2")));
}
{
Map<String, List<AliasMetadata>> aliases = project.findAllAliases(Strings.EMPTY_ARRAY);
assertThat(aliases, anEmptyMap());
}
}
public void testFindDataStreamAliases() {
ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
addDataStream("d1", builder);
addDataStream("d2", builder);
addDataStream("d3", builder);
addDataStream("d4", builder);
builder.put("alias1", "d1", null, null);
builder.put("alias2", "d2", null, null);
builder.put("alias2-part2", "d2", null, null);
ProjectMetadata project = builder.build();
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT);
Map<String, List<DataStreamAlias>> aliases = project.findDataStreamAliases(request.aliases(), Strings.EMPTY_ARRAY);
assertThat(aliases, anEmptyMap());
}
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("alias1");
Map<String, List<DataStreamAlias>> aliases = project.findDataStreamAliases(request.aliases(), new String[] { "index" });
assertThat(aliases, anEmptyMap());
}
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("alias1");
Map<String, List<DataStreamAlias>> aliases = project.findDataStreamAliases(
request.aliases(),
new String[] { "index", "d1", "d2" }
);
assertEquals(1, aliases.size());
List<DataStreamAlias> found = aliases.get("d1");
assertThat(found, transformedItemsMatch(DataStreamAlias::getAlias, contains("alias1")));
}
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("ali*");
Map<String, List<DataStreamAlias>> aliases = project.findDataStreamAliases(request.aliases(), new String[] { "index", "d2" });
assertEquals(1, aliases.size());
List<DataStreamAlias> found = aliases.get("d2");
assertThat(found, transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
}
// test exclusion
{
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("*");
Map<String, List<DataStreamAlias>> aliases = project.findDataStreamAliases(
request.aliases(),
new String[] { "index", "d1", "d2", "d3", "d4" }
);
assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
assertThat(aliases.get("d1"), transformedItemsMatch(DataStreamAlias::getAlias, contains("alias1")));
request.aliases("*", "-alias1");
aliases = project.findDataStreamAliases(request.aliases(), new String[] { "index", "d1", "d2", "d3", "d4" });
assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
assertNull(aliases.get("d1"));
}
}
public void testDataStreamAliasesByDataStream() {
ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
addDataStream("d1", builder);
addDataStream("d2", builder);
addDataStream("d3", builder);
addDataStream("d4", builder);
builder.put("alias1", "d1", null, null);
builder.put("alias2", "d2", null, null);
builder.put("alias2-part2", "d2", null, null);
ProjectMetadata project = builder.build();
var aliases = project.dataStreamAliasesByDataStream();
assertTrue(aliases.containsKey("d1"));
assertTrue(aliases.containsKey("d2"));
assertFalse(aliases.containsKey("d3"));
assertFalse(aliases.containsKey("d4"));
assertEquals(1, aliases.get("d1").size());
assertEquals(2, aliases.get("d2").size());
assertThat(aliases.get("d2"), transformedItemsMatch(DataStreamAlias::getAlias, containsInAnyOrder("alias2", "alias2-part2")));
}
public void testFindAliasWithExclusion() {
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(
IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias1").build())
.putAlias(AliasMetadata.builder("alias2").build())
)
.put(
IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias1").build())
.putAlias(AliasMetadata.builder("alias3").build())
)
.build();
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("*", "-alias1");
Map<String, List<AliasMetadata>> aliases = project.findAliases(request.aliases(), new String[] { "index", "index2" });
assertThat(aliases.get("index"), transformedItemsMatch(AliasMetadata::alias, contains("alias2")));
assertThat(aliases.get("index2"), transformedItemsMatch(AliasMetadata::alias, contains("alias3")));
}
public void testFindDataStreams() {
final int numIndices = randomIntBetween(2, 5);
final int numBackingIndices = randomIntBetween(2, 5);
final String dataStreamName = "my-data-stream";
CreateIndexResult result = createIndices(numIndices, numBackingIndices, dataStreamName);
List<Index> allIndices = new ArrayList<>(result.indices);
allIndices.addAll(result.backingIndices);
String[] concreteIndices = allIndices.stream().map(Index::getName).toArray(String[]::new);
Map<String, DataStream> dataStreams = result.project.findDataStreams(concreteIndices);
assertThat(dataStreams, aMapWithSize(numBackingIndices));
for (Index backingIndex : result.backingIndices) {
assertThat(dataStreams, hasKey(backingIndex.getName()));
assertThat(dataStreams.get(backingIndex.getName()).getName(), equalTo(dataStreamName));
}
}
public void testFindAliasWithExclusionAndOverride() {
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(
IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("aa").build())
.putAlias(AliasMetadata.builder("ab").build())
.putAlias(AliasMetadata.builder("bb").build())
)
.build();
GetAliasesRequest request = new GetAliasesRequest(TEST_REQUEST_TIMEOUT).aliases("a*", "-*b", "b*");
List<AliasMetadata> aliases = project.findAliases(request.aliases(), new String[] { "index" }).get("index");
assertThat(aliases, transformedItemsMatch(AliasMetadata::alias, contains("aa", "bb")));
}
public void testAliasCollidingWithAnExistingIndex() {
int indexCount = randomIntBetween(10, 100);
Set<String> indices = Sets.newHashSetWithExpectedSize(indexCount);
for (int i = 0; i < indexCount; i++) {
indices.add(randomAlphaOfLength(10));
}
Map<String, Set<String>> aliasToIndices = new HashMap<>();
for (String alias : randomSubsetOf(randomIntBetween(1, 10), indices)) {
Set<String> indicesInAlias;
do {
indicesInAlias = new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices));
indicesInAlias.remove(alias);
} while (indicesInAlias.isEmpty());
aliasToIndices.put(alias, indicesInAlias);
}
int properAliases = randomIntBetween(0, 3);
for (int i = 0; i < properAliases; i++) {
aliasToIndices.put(randomAlphaOfLength(5), new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices)));
}
ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
for (String index : indices) {
IndexMetadata.Builder indexBuilder = IndexMetadata.builder(index)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0);
aliasToIndices.forEach((key, value) -> {
if (value.contains(index)) {
indexBuilder.putAlias(AliasMetadata.builder(key).build());
}
});
projectBuilder.put(indexBuilder);
}
Exception e = expectThrows(IllegalStateException.class, projectBuilder::build);
assertThat(e.getMessage(), startsWith("index, alias, and data stream names need to be unique"));
}
public void testValidateAliasWriteOnly() {
String alias = randomAlphaOfLength(5);
String indexA = randomAlphaOfLength(6);
String indexB = randomAlphaOfLength(7);
Boolean aWriteIndex = randomBoolean() ? null : randomBoolean();
Boolean bWriteIndex;
if (Boolean.TRUE.equals(aWriteIndex)) {
bWriteIndex = randomFrom(Boolean.FALSE, null);
} else {
bWriteIndex = randomFrom(Boolean.TRUE, Boolean.FALSE, null);
}
// when only one index/alias pair exist
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(buildIndexMetadata(indexA, alias, aWriteIndex))
.build();
// when alias points to two indices, but valid
// one of the following combinations: [(null, null), (null, true), (null, false), (false, false)]
ProjectMetadata.builder(project).put(buildIndexMetadata(indexB, alias, bWriteIndex)).build();
// when too many write indices
Exception exception = expectThrows(IllegalStateException.class, () -> {
IndexMetadata.Builder metaA = buildIndexMetadata(indexA, alias, true);
IndexMetadata.Builder metaB = buildIndexMetadata(indexB, alias, true);
ProjectMetadata.builder(randomProjectIdOrDefault()).put(metaA).put(metaB).build();
});
assertThat(exception.getMessage(), startsWith("alias [" + alias + "] has more than one write index ["));
}
public void testValidateHiddenAliasConsistency() {
String alias = randomAlphaOfLength(5);
String indexA = randomAlphaOfLength(6);
String indexB = randomAlphaOfLength(7);
{
Exception ex = expectThrows(
IllegalStateException.class,
() -> buildMetadataWithHiddenIndexMix(alias, indexA, true, indexB, randomFrom(false, null)).build()
);
assertThat(ex.getMessage(), containsString("has is_hidden set to true on indices"));
}
{
Exception ex = expectThrows(
IllegalStateException.class,
() -> buildMetadataWithHiddenIndexMix(alias, indexA, randomFrom(false, null), indexB, true).build()
);
assertThat(ex.getMessage(), containsString("has is_hidden set to true on indices"));
}
}
private ProjectMetadata.Builder buildMetadataWithHiddenIndexMix(
String aliasName,
String indexAName,
Boolean indexAHidden,
String indexBName,
Boolean indexBHidden
) {
IndexMetadata.Builder indexAMeta = IndexMetadata.builder(indexAName)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder(aliasName).isHidden(indexAHidden).build());
IndexMetadata.Builder indexBMeta = IndexMetadata.builder(indexBName)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder(aliasName).isHidden(indexBHidden).build());
return ProjectMetadata.builder(randomProjectIdOrDefault()).put(indexAMeta).put(indexBMeta);
}
public void testResolveIndexRouting() {
IndexMetadata.Builder builder = IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias0").build())
.putAlias(AliasMetadata.builder("alias1").routing("1").build())
.putAlias(AliasMetadata.builder("alias2").routing("1,2").build());
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault()).put(builder).build();
// no alias, no index
assertNull(project.resolveIndexRouting(null, null));
assertEquals(project.resolveIndexRouting("0", null), "0");
// index, no alias
assertNull(project.resolveIndexRouting(null, "index"));
assertEquals(project.resolveIndexRouting("0", "index"), "0");
// alias with no index routing
assertNull(project.resolveIndexRouting(null, "alias0"));
assertEquals(project.resolveIndexRouting("0", "alias0"), "0");
// alias with index routing.
assertEquals(project.resolveIndexRouting(null, "alias1"), "1");
Exception ex = expectThrows(IllegalArgumentException.class, () -> project.resolveIndexRouting("0", "alias1"));
assertThat(
ex.getMessage(),
is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")
);
// alias with invalid index routing.
ex = expectThrows(IllegalArgumentException.class, () -> project.resolveIndexRouting(null, "alias2"));
assertThat(
ex.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")
);
ex = expectThrows(IllegalArgumentException.class, () -> project.resolveIndexRouting("1", "alias2"));
assertThat(
ex.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")
);
IndexMetadata.Builder builder2 = IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias0").build());
ProjectMetadata projectTwoIndices = ProjectMetadata.builder(project).put(builder2).build();
// alias with multiple indices
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> projectTwoIndices.resolveIndexRouting("1", "alias0")
);
assertThat(exception.getMessage(), startsWith("Alias [alias0] has more than one index associated with it"));
}
public void testResolveWriteIndexRouting() {
AliasMetadata.Builder aliasZeroBuilder = AliasMetadata.builder("alias0");
if (randomBoolean()) {
aliasZeroBuilder.writeIndex(true);
}
IndexMetadata.Builder builder = IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(aliasZeroBuilder.build())
.putAlias(AliasMetadata.builder("alias1").routing("1").build())
.putAlias(AliasMetadata.builder("alias2").routing("1,2").build())
.putAlias(AliasMetadata.builder("alias3").writeIndex(false).build())
.putAlias(AliasMetadata.builder("alias4").routing("1,2").writeIndex(true).build());
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault()).put(builder).build();
// no alias, no index
assertNull(project.resolveWriteIndexRouting(null, null));
assertEquals(project.resolveWriteIndexRouting("0", null), "0");
// index, no alias
assertNull(project.resolveWriteIndexRouting(null, "index"));
assertEquals(project.resolveWriteIndexRouting("0", "index"), "0");
// alias with no index routing
assertNull(project.resolveWriteIndexRouting(null, "alias0"));
assertEquals(project.resolveWriteIndexRouting("0", "alias0"), "0");
// alias with index routing.
assertEquals(project.resolveWriteIndexRouting(null, "alias1"), "1");
Exception exception = expectThrows(IllegalArgumentException.class, () -> project.resolveWriteIndexRouting("0", "alias1"));
assertThat(
exception.getMessage(),
is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation")
);
// alias with invalid index routing.
exception = expectThrows(IllegalArgumentException.class, () -> project.resolveWriteIndexRouting(null, "alias2"));
assertThat(
exception.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")
);
exception = expectThrows(IllegalArgumentException.class, () -> project.resolveWriteIndexRouting("1", "alias2"));
assertThat(
exception.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation")
);
exception = expectThrows(IllegalArgumentException.class, () -> project.resolveWriteIndexRouting(randomFrom("1", null), "alias4"));
assertThat(
exception.getMessage(),
is("index/alias [alias4] provided with routing value [1,2] that resolved to several routing values, rejecting operation")
);
// alias with no write index
exception = expectThrows(IllegalArgumentException.class, () -> project.resolveWriteIndexRouting("1", "alias3"));
assertThat(exception.getMessage(), is("alias [alias3] does not have a write index"));
// aliases with multiple indices
AliasMetadata.Builder aliasZeroBuilderTwo = AliasMetadata.builder("alias0");
if (randomBoolean()) {
aliasZeroBuilder.writeIndex(false);
}
IndexMetadata.Builder builder2 = IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(aliasZeroBuilderTwo.build())
.putAlias(AliasMetadata.builder("alias1").routing("0").writeIndex(true).build())
.putAlias(AliasMetadata.builder("alias2").writeIndex(true).build());
ProjectMetadata projectTwoIndices = ProjectMetadata.builder(project).put(builder2).build();
// verify that new write index is used
assertThat("0", equalTo(projectTwoIndices.resolveWriteIndexRouting("0", "alias1")));
}
public void testFindMappings() throws IOException {
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder("index1").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(FIND_MAPPINGS_TEST_ITEM))
.put(IndexMetadata.builder("index2").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(FIND_MAPPINGS_TEST_ITEM))
.build();
{
AtomicInteger onNextIndexCalls = new AtomicInteger(0);
Map<String, MappingMetadata> mappings = project.findMappings(
Strings.EMPTY_ARRAY,
MapperPlugin.NOOP_FIELD_FILTER,
onNextIndexCalls::incrementAndGet
);
assertThat(mappings, anEmptyMap());
assertThat(onNextIndexCalls.get(), equalTo(0));
}
{
AtomicInteger onNextIndexCalls = new AtomicInteger(0);
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1" },
MapperPlugin.NOOP_FIELD_FILTER,
onNextIndexCalls::incrementAndGet
);
assertThat(mappings, aMapWithSize(1));
assertIndexMappingsNotFiltered(mappings, "index1");
assertThat(onNextIndexCalls.get(), equalTo(1));
}
{
AtomicInteger onNextIndexCalls = new AtomicInteger(0);
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1", "index2" },
MapperPlugin.NOOP_FIELD_FILTER,
onNextIndexCalls::incrementAndGet
);
assertThat(mappings, aMapWithSize(2));
assertIndexMappingsNotFiltered(mappings, "index1");
assertIndexMappingsNotFiltered(mappings, "index2");
assertThat(onNextIndexCalls.get(), equalTo(2));
}
}
public void testFindMappingsNoOpFilters() throws IOException {
MappingMetadata originalMappingMetadata = new MappingMetadata(
"_doc",
XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, true)
);
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder("index1").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(originalMappingMetadata))
.build();
{
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1" },
MapperPlugin.NOOP_FIELD_FILTER,
Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP
);
MappingMetadata mappingMetadata = mappings.get("index1");
assertSame(originalMappingMetadata, mappingMetadata);
}
{
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1" },
index -> field -> randomBoolean(),
Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP
);
MappingMetadata mappingMetadata = mappings.get("index1");
assertNotSame(originalMappingMetadata, mappingMetadata);
}
}
@SuppressWarnings("unchecked")
public void testFindMappingsWithFilters() throws IOException {
String mapping = FIND_MAPPINGS_TEST_ITEM;
if (randomBoolean()) {
Map<String, Object> stringObjectMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, false);
Map<String, Object> doc = (Map<String, Object>) stringObjectMap.get("_doc");
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.map(doc);
mapping = Strings.toString(builder);
}
}
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder("index1").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(mapping))
.put(IndexMetadata.builder("index2").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(mapping))
.put(IndexMetadata.builder("index3").settings(indexSettings(IndexVersion.current(), 1, 0)).putMapping(mapping))
.build();
{
Map<String, MappingMetadata> mappings = project.findMappings(new String[] { "index1", "index2", "index3" }, index -> {
if (index.equals("index1")) {
return field -> field.startsWith("name.") == false
&& field.startsWith("properties.key.") == false
&& field.equals("age") == false
&& field.equals("address.location") == false;
}
if (index.equals("index2")) {
return Predicates.never();
}
return FieldPredicate.ACCEPT_ALL;
}, Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP);
assertIndexMappingsNoFields(mappings, "index2");
assertIndexMappingsNotFiltered(mappings, "index3");
MappingMetadata docMapping = mappings.get("index1");
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertThat(sourceAsMap.keySet(), containsInAnyOrder("properties", "_routing", "_source"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertThat(typeProperties.keySet(), containsInAnyOrder("name", "address", "birth", "ip", "suggest", "properties"));
Map<String, Object> name = (Map<String, Object>) typeProperties.get("name");
assertThat(name.keySet(), containsInAnyOrder("properties"));
Map<String, Object> nameProperties = (Map<String, Object>) name.get("properties");
assertThat(nameProperties, anEmptyMap());
Map<String, Object> address = (Map<String, Object>) typeProperties.get("address");
assertThat(address.keySet(), containsInAnyOrder("type", "properties"));
Map<String, Object> addressProperties = (Map<String, Object>) address.get("properties");
assertThat(addressProperties.keySet(), containsInAnyOrder("street", "area"));
assertLeafs(addressProperties, "street", "area");
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertThat(properties.keySet(), containsInAnyOrder("type", "properties"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertThat(propertiesProperties.keySet(), containsInAnyOrder("key", "value"));
assertLeafs(propertiesProperties, "key");
assertMultiField(propertiesProperties, "value", "keyword");
}
{
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1", "index2", "index3" },
index -> field -> (index.equals("index3") && field.endsWith("keyword")),
Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP
);
assertIndexMappingsNoFields(mappings, "index1");
assertIndexMappingsNoFields(mappings, "index2");
MappingMetadata mappingMetadata = mappings.get("index3");
Map<String, Object> sourceAsMap = mappingMetadata.getSourceAsMap();
assertThat(sourceAsMap.keySet(), containsInAnyOrder("_routing", "_source", "properties"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertThat(typeProperties.keySet(), containsInAnyOrder("properties"));
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertThat(properties.keySet(), containsInAnyOrder("type", "properties"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertThat(propertiesProperties.keySet(), containsInAnyOrder("key", "value"));
Map<String, Object> key = (Map<String, Object>) propertiesProperties.get("key");
assertThat(key.keySet(), containsInAnyOrder("properties"));
Map<String, Object> keyProperties = (Map<String, Object>) key.get("properties");
assertThat(keyProperties.keySet(), containsInAnyOrder("keyword"));
assertLeafs(keyProperties, "keyword");
Map<String, Object> value = (Map<String, Object>) propertiesProperties.get("value");
assertThat(value.keySet(), containsInAnyOrder("properties"));
Map<String, Object> valueProperties = (Map<String, Object>) value.get("properties");
assertThat(valueProperties.keySet(), containsInAnyOrder("keyword"));
assertLeafs(valueProperties, "keyword");
}
{
Map<String, MappingMetadata> mappings = project.findMappings(
new String[] { "index1", "index2", "index3" },
index -> field -> (index.equals("index2")),
Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP
);
assertIndexMappingsNoFields(mappings, "index1");
assertIndexMappingsNoFields(mappings, "index3");
assertIndexMappingsNotFiltered(mappings, "index2");
}
}
public void testOldestIndexComputation() {
ProjectMetadata project = buildIndicesWithVersions(
IndexVersions.MINIMUM_COMPATIBLE,
IndexVersion.current(),
IndexVersion.fromId(IndexVersion.current().id() + 1)
).build();
assertEquals(IndexVersions.MINIMUM_COMPATIBLE, project.oldestIndexVersion());
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
assertEquals(IndexVersion.current(), b.build().oldestIndexVersion());
Throwable ex = expectThrows(
IllegalArgumentException.class,
() -> buildIndicesWithVersions(
IndexVersions.MINIMUM_COMPATIBLE,
IndexVersions.ZERO,
IndexVersion.fromId(IndexVersion.current().id() + 1)
).build()
);
assertEquals("[index.version.created] is not present in the index settings for index with UUID [null]", ex.getMessage());
}
private ProjectMetadata.Builder buildIndicesWithVersions(IndexVersion... indexVersions) {
int lastIndexNum = randomIntBetween(9, 50);
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
for (IndexVersion indexVersion : indexVersions) {
IndexMetadata im = IndexMetadata.builder(DataStream.getDefaultBackingIndexName("index", lastIndexNum))
.settings(settings(indexVersion))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
b.put(im, false);
lastIndexNum = randomIntBetween(lastIndexNum + 1, lastIndexNum + 50);
}
return b;
}
private static IndexMetadata.Builder buildIndexMetadata(String name, String alias, Boolean writeIndex) {
return IndexMetadata.builder(name)
.settings(settings(IndexVersion.current()))
.creationDate(randomNonNegativeLong())
.putAlias(AliasMetadata.builder(alias).writeIndex(writeIndex))
.numberOfShards(1)
.numberOfReplicas(0);
}
@SuppressWarnings("unchecked")
private static void assertIndexMappingsNoFields(Map<String, MappingMetadata> mappings, String index) {
MappingMetadata docMapping = mappings.get(index);
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertThat(sourceAsMap.keySet(), containsInAnyOrder("_routing", "_source", "properties"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertThat(typeProperties, anEmptyMap());
}
@SuppressWarnings("unchecked")
private static void assertIndexMappingsNotFiltered(Map<String, MappingMetadata> mappings, String index) {
MappingMetadata docMapping = mappings.get(index);
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertThat(sourceAsMap.keySet(), containsInAnyOrder("_routing", "_source", "properties"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertThat(typeProperties.keySet(), containsInAnyOrder("name", "address", "birth", "age", "ip", "suggest", "properties"));
Map<String, Object> name = (Map<String, Object>) typeProperties.get("name");
assertThat(name.keySet(), containsInAnyOrder("properties"));
Map<String, Object> nameProperties = (Map<String, Object>) name.get("properties");
assertThat(nameProperties.keySet(), containsInAnyOrder("first", "last"));
assertLeafs(nameProperties, "first", "last");
Map<String, Object> address = (Map<String, Object>) typeProperties.get("address");
assertThat(address.keySet(), containsInAnyOrder("type", "properties"));
Map<String, Object> addressProperties = (Map<String, Object>) address.get("properties");
assertThat(addressProperties.keySet(), containsInAnyOrder("street", "location", "area"));
assertLeafs(addressProperties, "street", "location", "area");
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertThat(properties.keySet(), containsInAnyOrder("type", "properties"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertThat(propertiesProperties.keySet(), containsInAnyOrder("key", "value"));
assertMultiField(propertiesProperties, "key", "keyword");
assertMultiField(propertiesProperties, "value", "keyword");
}
@SuppressWarnings("unchecked")
public static void assertLeafs(Map<String, Object> properties, String... fields) {
assertThat(properties.keySet(), hasItems(fields));
for (String field : fields) {
Map<String, Object> fieldProp = (Map<String, Object>) properties.get(field);
assertThat(fieldProp, not(hasKey("properties")));
assertThat(fieldProp, not(hasKey("fields")));
}
}
public static void assertMultiField(Map<String, Object> properties, String field, String... subFields) {
assertThat(properties, hasKey(field));
@SuppressWarnings("unchecked")
Map<String, Object> fieldProp = (Map<String, Object>) properties.get(field);
assertThat(fieldProp, hasKey("fields"));
@SuppressWarnings("unchecked")
Map<String, Object> subFieldsDef = (Map<String, Object>) fieldProp.get("fields");
assertLeafs(subFieldsDef, subFields);
}
private static final String FIND_MAPPINGS_TEST_ITEM = """
{
"_doc": {
"_routing": {
"required":true
}, "_source": {
"enabled":false
}, "properties": {
"name": {
"properties": {
"first": {
"type": "keyword"
},
"last": {
"type": "keyword"
}
}
},
"birth": {
"type": "date"
},
"age": {
"type": "integer"
},
"ip": {
"type": "ip"
},
"suggest" : {
"type": "completion"
},
"address": {
"type": "object",
"properties": {
"street": {
"type": "keyword"
},
"location": {
"type": "geo_point"
},
"area": {
"type": "geo_shape", \s
"tree": "quadtree",
"precision": "1m"
}
}
},
"properties": {
"type": "nested",
"properties": {
"key" : {
"type": "text",
"fields": {
"keyword" : {
"type" : "keyword"
}
}
},
"value" : {
"type": "text",
"fields": {
"keyword" : {
"type" : "keyword"
}
}
}
}
}
}
}
}
}""";
public void testBuilderRejectsNullCustom() {
final ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
final String key = randomAlphaOfLength(10);
assertThat(expectThrows(NullPointerException.class, () -> builder.putCustom(key, null)).getMessage(), containsString(key));
}
public void testBuilderRejectsNullInCustoms() {
final ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
final String key = randomAlphaOfLength(10);
{
final Map<String, Metadata.ProjectCustom> map = new HashMap<>();
map.put(key, null);
assertThat(expectThrows(NullPointerException.class, () -> builder.customs(map)).getMessage(), containsString(key));
}
}
public void testCopyAndUpdate() {
var initialIndexUUID = randomUUID();
final String indexName = randomAlphaOfLengthBetween(4, 12);
final ProjectMetadata before = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), initialIndexUUID, 1, 1)))
.build();
var alteredIndexUUID = randomUUID();
assertThat(alteredIndexUUID, not(equalTo(initialIndexUUID)));
final ProjectMetadata after = before.copyAndUpdate(
builder -> builder.put(IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), alteredIndexUUID, 1, 1)))
);
assertThat(after, not(sameInstance(before)));
assertThat(after.index(indexName).getIndexUUID(), equalTo(alteredIndexUUID));
}
public void testBuilderRemoveCustomIf() {
var custom1 = new TestProjectCustomMetadata();
var custom2 = new TestProjectCustomMetadata();
var builder = ProjectMetadata.builder(randomProjectIdOrDefault());
builder.putCustom("custom1", custom1);
builder.putCustom("custom2", custom2);
builder.removeCustomIf((key, value) -> Objects.equals(key, "custom1"));
var project = builder.build();
assertThat(project.custom("custom1"), nullValue());
assertThat(project.custom("custom2"), sameInstance(custom2));
}
public void testBuilderRejectsDataStreamThatConflictsWithIndex() {
final String dataStreamName = "my-data-stream";
IndexMetadata idx = createFirstBackingIndex(dataStreamName).build();
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(idx, false)
.put(
IndexMetadata.builder(dataStreamName)
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.build(),
false
)
.put(newInstance(dataStreamName, List.of(idx.getIndex())));
IllegalStateException e = expectThrows(IllegalStateException.class, b::build);
assertThat(
e.getMessage(),
containsString(
"index, alias, and data stream names need to be unique, but the following duplicates were found [data "
+ "stream ["
+ dataStreamName
+ "] conflicts with index]"
)
);
}
public void testBuilderRejectsDataStreamThatConflictsWithAlias() {
final String dataStreamName = "my-data-stream";
IndexMetadata idx = createFirstBackingIndex(dataStreamName).putAlias(AliasMetadata.builder(dataStreamName).build()).build();
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(idx, false)
.put(newInstance(dataStreamName, List.of(idx.getIndex())));
IllegalStateException e = expectThrows(IllegalStateException.class, b::build);
assertThat(
e.getMessage(),
containsString(
"index, alias, and data stream names need to be unique, but the following duplicates were found ["
+ dataStreamName
+ " (alias of ["
+ idx.getIndex().getName()
+ "]) conflicts with data stream]"
)
);
}
public void testBuilderRejectsAliasThatRefersToDataStreamBackingIndex() {
final String dataStreamName = "my-data-stream";
final String conflictingName = DataStream.getDefaultBackingIndexName(dataStreamName, 2);
IndexMetadata idx = createFirstBackingIndex(dataStreamName).putAlias(new AliasMetadata.Builder(conflictingName)).build();
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(idx, false)
.put(newInstance(dataStreamName, List.of(idx.getIndex())));
AssertionError e = expectThrows(AssertionError.class, b::build);
assertThat(e.getMessage(), containsString("aliases [" + conflictingName + "] cannot refer to backing indices of data streams"));
}
public void testBuilderForDataStreamWithRandomlyNumberedBackingIndices() {
final String dataStreamName = "my-data-stream";
final List<Index> backingIndices = new ArrayList<>();
final int numBackingIndices = randomIntBetween(2, 5);
int lastBackingIndexNum = 0;
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
for (int k = 1; k <= numBackingIndices; k++) {
lastBackingIndexNum = randomIntBetween(lastBackingIndexNum + 1, lastBackingIndexNum + 50);
IndexMetadata im = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, lastBackingIndexNum))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
b.put(im, false);
backingIndices.add(im.getIndex());
}
b.put(newInstance(dataStreamName, backingIndices, lastBackingIndexNum, null));
ProjectMetadata project = b.build();
assertThat(project.dataStreams().keySet(), containsInAnyOrder(dataStreamName));
assertThat(project.dataStreams().get(dataStreamName).getName(), equalTo(dataStreamName));
}
public void testBuildIndicesLookupForDataStreams() {
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
int numDataStreams = randomIntBetween(2, 8);
for (int i = 0; i < numDataStreams; i++) {
String name = "data-stream-" + i;
addDataStream(name, b);
}
ProjectMetadata project = b.build();
assertThat(project.dataStreams().size(), equalTo(numDataStreams));
for (int i = 0; i < numDataStreams; i++) {
String name = "data-stream-" + i;
IndexAbstraction value = project.getIndicesLookup().get(name);
assertThat(value, notNullValue());
DataStream ds = project.dataStreams().get(name);
assertThat(ds, notNullValue());
assertThat(value.isHidden(), is(false));
assertThat(value.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
assertThat(value.getIndices(), hasSize(ds.getIndices().size()));
assertThat(value.getWriteIndex().getName(), DataStreamTestHelper.backingIndexEqualTo(name, (int) ds.getGeneration()));
}
}
public void testBuildIndicesLookupForDataStreamAliases() {
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
addDataStream("d1", b);
addDataStream("d2", b);
addDataStream("d3", b);
addDataStream("d4", b);
b.put("a1", "d1", null, null);
b.put("a1", "d2", null, null);
b.put("a2", "d3", null, null);
b.put("a3", "d1", null, null);
ProjectMetadata project = b.build();
assertThat(project.dataStreams(), aMapWithSize(4));
IndexAbstraction value = project.getIndicesLookup().get("d1");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
value = project.getIndicesLookup().get("d2");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
value = project.getIndicesLookup().get("d3");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
value = project.getIndicesLookup().get("d4");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
value = project.getIndicesLookup().get("a1");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.ALIAS));
value = project.getIndicesLookup().get("a2");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.ALIAS));
value = project.getIndicesLookup().get("a3");
assertThat(value, notNullValue());
assertThat(value.getType(), equalTo(IndexAbstraction.Type.ALIAS));
}
public void testDataStreamAliasValidation() {
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
addDataStream("my-alias", b);
b.put("my-alias", "my-alias", null, null);
var e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and data stream have the same name (my-alias)"));
b = ProjectMetadata.builder(randomProjectIdOrDefault());
addDataStream("d1", b);
addDataStream("my-alias", b);
b.put("my-alias", "d1", null, null);
e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and data stream have the same name (my-alias)"));
b = ProjectMetadata.builder(randomProjectIdOrDefault());
b.put(
IndexMetadata.builder("index1")
.settings(indexSettings(IndexVersion.current(), 1, 0))
.putAlias(new AliasMetadata.Builder("my-alias"))
);
addDataStream("d1", b);
b.put("my-alias", "d1", null, null);
e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and indices alias have the same name (my-alias)"));
}
public void testDataStreamAliasValidationRestoreScenario() {
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
b.dataStreams(
Map.of("my-alias", createDataStream("my-alias")),
Map.of("my-alias", new DataStreamAlias("my-alias", List.of("my-alias"), null, null))
);
var e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and data stream have the same name (my-alias)"));
b = ProjectMetadata.builder(randomProjectIdOrDefault());
b.dataStreams(
Map.of("d1", createDataStream("d1"), "my-alias", createDataStream("my-alias")),
Map.of("my-alias", new DataStreamAlias("my-alias", List.of("d1"), null, null))
);
e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and data stream have the same name (my-alias)"));
b = ProjectMetadata.builder(randomProjectIdOrDefault());
b.put(
IndexMetadata.builder("index1")
.settings(indexSettings(IndexVersion.current(), 1, 0))
.putAlias(new AliasMetadata.Builder("my-alias"))
);
b.dataStreams(Map.of("d1", createDataStream("d1")), Map.of("my-alias", new DataStreamAlias("my-alias", List.of("d1"), null, null)));
e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream alias and indices alias have the same name (my-alias)"));
}
private void addDataStream(String name, ProjectMetadata.Builder b) {
int numBackingIndices = randomIntBetween(1, 4);
List<Index> indices = new ArrayList<>(numBackingIndices);
for (int j = 1; j <= numBackingIndices; j++) {
IndexMetadata idx = createBackingIndex(name, j).build();
indices.add(idx.getIndex());
b.put(idx, true);
}
b.put(newInstance(name, indices));
}
private DataStream createDataStream(String name) {
int numBackingIndices = randomIntBetween(1, 4);
List<Index> indices = new ArrayList<>(numBackingIndices);
for (int j = 1; j <= numBackingIndices; j++) {
IndexMetadata idx = createBackingIndex(name, j).build();
indices.add(idx.getIndex());
}
return newInstance(name, indices);
}
public void testIndicesLookupRecordsDataStreamForBackingIndices() {
final int numIndices = randomIntBetween(2, 5);
final int numBackingIndices = randomIntBetween(2, 5);
final String dataStreamName = "my-data-stream";
CreateIndexResult result = createIndices(numIndices, numBackingIndices, dataStreamName);
SortedMap<String, IndexAbstraction> indicesLookup = result.project.getIndicesLookup();
assertThat(indicesLookup, aMapWithSize(result.indices.size() + result.backingIndices.size() + 1));
for (Index index : result.indices) {
assertThat(indicesLookup, hasKey(index.getName()));
assertNull(indicesLookup.get(index.getName()).getParentDataStream());
}
for (Index index : result.backingIndices) {
assertThat(indicesLookup, hasKey(index.getName()));
assertNotNull(indicesLookup.get(index.getName()).getParentDataStream());
assertThat(indicesLookup.get(index.getName()).getParentDataStream().getName(), equalTo(dataStreamName));
}
}
public void testValidateDataStreamsNoConflicts() {
ProjectMetadata project = createIndices(5, 10, "foo-datastream").project;
// don't expect any exception when validating a system without indices that would conflict with future backing indices
assertDataStreams(project.indices(), (DataStreamMetadata) project.customs().get(DataStreamMetadata.TYPE));
}
public void testValidateDataStreamsIgnoresIndicesWithoutCounter() {
String dataStreamName = "foo-datastream";
ProjectMetadata project = ProjectMetadata.builder(createIndices(10, 10, dataStreamName).project)
.put(
new IndexMetadata.Builder(dataStreamName + "-index-without-counter").settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
)
.put(
new IndexMetadata.Builder(dataStreamName + randomAlphaOfLength(10)).settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
)
.put(
new IndexMetadata.Builder(randomAlphaOfLength(10)).settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
)
.build();
// don't expect any exception when validating against non-backing indices that don't conform to the backing indices naming
// convention
assertDataStreams(project.indices(), (DataStreamMetadata) project.customs().get(DataStreamMetadata.TYPE));
}
public void testValidateDataStreamsAllowsNamesThatStartsWithPrefix() {
String dataStreamName = "foo-datastream";
ProjectMetadata project = ProjectMetadata.builder(createIndices(10, 10, dataStreamName).project)
.put(
new IndexMetadata.Builder(DataStream.BACKING_INDEX_PREFIX + dataStreamName + "-something-100012").settings(
settings(IndexVersion.current())
).numberOfShards(1).numberOfReplicas(1)
)
.build();
// don't expect any exception when validating against (potentially backing) indices that can't create conflict because of
// additional text before number
assertDataStreams(project.indices(), (DataStreamMetadata) project.customs().get(DataStreamMetadata.TYPE));
}
public void testValidateDataStreamsForNullDataStreamMetadata() {
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder("foo-index").settings(settings(IndexVersion.current())).numberOfShards(1).numberOfReplicas(1))
.build();
try {
assertDataStreams(project.indices(), DataStreamMetadata.EMPTY);
} catch (Exception e) {
fail("did not expect exception when validating a system without any data streams but got " + e.getMessage());
}
}
public void testDataStreamAliases() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-eu"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null), is(true));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-us"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-us", null, null), is(true));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-au"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-au", null, null), is(true));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-au", null, null), is(false));
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-us", "logs-postgres-au")
);
}
public void testDataStreamReferToNonExistingDataStream() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
Exception e = expectThrows(IllegalArgumentException.class, () -> mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null));
assertThat(e.getMessage(), equalTo("alias [logs-postgres] refers to a non existing data stream [logs-postgres-eu]"));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-eu"));
mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-eu"));
}
public void testDeleteDataStreamShouldUpdateAlias() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-eu"));
mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-us"));
mdBuilder.put("logs-postgres", "logs-postgres-us", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-au"));
mdBuilder.put("logs-postgres", "logs-postgres-au", null, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-us", "logs-postgres-au")
);
mdBuilder = ProjectMetadata.builder(project);
mdBuilder.removeDataStream("logs-postgres-us");
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-au")
);
mdBuilder = ProjectMetadata.builder(project);
mdBuilder.removeDataStream("logs-postgres-au");
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-eu"));
mdBuilder = ProjectMetadata.builder(project);
mdBuilder.removeDataStream("logs-postgres-eu");
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), nullValue());
}
public void testDeleteDataStreamAlias() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-eu"));
mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-us"));
mdBuilder.put("logs-postgres", "logs-postgres-us", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-au"));
mdBuilder.put("logs-postgres", "logs-postgres-au", null, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-us", "logs-postgres-au")
);
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.removeDataStreamAlias("logs-postgres", "logs-postgres-us", true), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-au")
);
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.removeDataStreamAlias("logs-postgres", "logs-postgres-au", true), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-eu"));
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.removeDataStreamAlias("logs-postgres", "logs-postgres-eu", true), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), nullValue());
}
public void testDeleteDataStreamAliasMustExists() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-eu"));
mdBuilder.put("logs-postgres", "logs-postgres-eu", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-us"));
mdBuilder.put("logs-postgres", "logs-postgres-us", null, null);
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-au"));
mdBuilder.put("logs-postgres", "logs-postgres-au", null, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-eu", "logs-postgres-us", "logs-postgres-au")
);
ProjectMetadata.Builder mdBuilder2 = ProjectMetadata.builder(project);
expectThrows(ResourceNotFoundException.class, () -> mdBuilder2.removeDataStreamAlias("logs-mysql", "logs-postgres-us", true));
assertThat(mdBuilder2.removeDataStreamAlias("logs-mysql", "logs-postgres-us", false), is(false));
}
public void testDataStreamWriteAlias() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-replicated"));
mdBuilder.put("logs-postgres", "logs-postgres-replicated", null, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), nullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", true, null), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-replicated"));
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
}
public void testDataStreamMultipleWriteAlias() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-foobar"));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-barbaz"));
mdBuilder.put("logs", "logs-foobar", true, null);
mdBuilder.put("logs", "logs-barbaz", true, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs"), notNullValue());
assertThat(project.dataStreamAliases().get("logs").getWriteDataStream(), equalTo("logs-barbaz"));
assertThat(project.dataStreamAliases().get("logs").getDataStreams(), containsInAnyOrder("logs-foobar", "logs-barbaz"));
}
public void testDataStreamWriteAliasUnset() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-replicated"));
mdBuilder.put("logs-postgres", "logs-postgres-replicated", true, null);
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-replicated"));
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
mdBuilder = ProjectMetadata.builder(project);
// Side check: null value isn't changing anything:
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", null, null), is(false));
// Unset write flag
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", false, null), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), nullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
}
public void testDataStreamWriteAliasChange() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-primary"));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-replicated"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-primary", true, null), is(true));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", null, null), is(true));
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-primary"));
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-primary", "logs-postgres-replicated")
);
// change write flag:
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-primary", false, null), is(true));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", true, null), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-replicated"));
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-primary", "logs-postgres-replicated")
);
}
public void testDataStreamWriteRemoveAlias() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-primary"));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-replicated"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-primary", true, null), is(true));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", null, null), is(true));
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-primary"));
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-primary", "logs-postgres-replicated")
);
mdBuilder = ProjectMetadata.builder(project);
assertThat(mdBuilder.removeDataStreamAlias("logs-postgres", "logs-postgres-primary", randomBoolean()), is(true));
project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), nullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
}
public void testDataStreamWriteRemoveDataStream() {
ProjectMetadata.Builder mdBuilder = ProjectMetadata.builder(randomProjectIdOrDefault());
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-primary"));
mdBuilder.put(DataStreamTestHelper.randomInstance("logs-postgres-replicated"));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-primary", true, null), is(true));
assertThat(mdBuilder.put("logs-postgres", "logs-postgres-replicated", null, null), is(true));
ProjectMetadata project = mdBuilder.build();
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), equalTo("logs-postgres-primary"));
assertThat(
project.dataStreamAliases().get("logs-postgres").getDataStreams(),
containsInAnyOrder("logs-postgres-primary", "logs-postgres-replicated")
);
mdBuilder = ProjectMetadata.builder(project);
mdBuilder.removeDataStream("logs-postgres-primary");
project = mdBuilder.build();
assertThat(project.dataStreams().keySet(), contains("logs-postgres-replicated"));
assertThat(project.dataStreamAliases().get("logs-postgres"), notNullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getWriteDataStream(), nullValue());
assertThat(project.dataStreamAliases().get("logs-postgres").getDataStreams(), containsInAnyOrder("logs-postgres-replicated"));
}
public void testReuseIndicesLookup() {
String indexName = "my-index";
String aliasName = "my-alias";
String dataStreamName = "logs-mysql-prod";
String dataStreamAliasName = "logs-mysql";
ProjectMetadata previous = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
// Things that should change indices lookup
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata idx = DataStreamTestHelper.createFirstBackingIndex(dataStreamName).build();
builder.put(idx, true);
DataStream dataStream = newInstance(dataStreamName, List.of(idx.getIndex()));
builder.put(dataStream);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
builder.put(dataStreamAliasName, dataStreamName, false, null);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
builder.put(dataStreamAliasName, dataStreamName, true, null);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
builder.put(
IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()))
.creationDate(randomNonNegativeLong())
.numberOfShards(1)
.numberOfReplicas(0)
);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata.Builder imBuilder = IndexMetadata.builder(builder.get(indexName));
imBuilder.putAlias(AliasMetadata.builder(aliasName).build());
builder.put(imBuilder);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata.Builder imBuilder = IndexMetadata.builder(builder.get(indexName));
imBuilder.putAlias(AliasMetadata.builder(aliasName).writeIndex(true).build());
builder.put(imBuilder);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata.Builder imBuilder = IndexMetadata.builder(builder.get(indexName));
Settings.Builder sBuilder = Settings.builder()
.put(builder.get(indexName).getSettings())
.put(IndexMetadata.INDEX_HIDDEN_SETTING.getKey(), true);
imBuilder.settings(sBuilder.build());
builder.put(imBuilder);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), not(sameInstance(project.getIndicesLookup())));
previous = project;
}
// Things that shouldn't change indices lookup
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata.Builder imBuilder = IndexMetadata.builder(builder.get(indexName));
imBuilder.numberOfReplicas(2);
builder.put(imBuilder);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), sameInstance(project.getIndicesLookup()));
previous = project;
}
{
ProjectMetadata.Builder builder = ProjectMetadata.builder(previous);
IndexMetadata.Builder imBuilder = IndexMetadata.builder(builder.get(indexName));
Settings.Builder sBuilder = Settings.builder()
.put(builder.get(indexName).getSettings())
.put(IndexSettings.DEFAULT_FIELD_SETTING.getKey(), "val");
imBuilder.settings(sBuilder.build());
builder.put(imBuilder);
ProjectMetadata project = builder.build();
assertThat(previous.getIndicesLookup(), sameInstance(project.getIndicesLookup()));
previous = project;
}
}
public void testAliasedIndices() {
int numAliases = randomIntBetween(32, 64);
int numIndicesPerAlias = randomIntBetween(8, 16);
ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
for (int i = 0; i < numAliases; i++) {
String aliasName = "alias-" + i;
for (int j = 0; j < numIndicesPerAlias; j++) {
AliasMetadata.Builder alias = new AliasMetadata.Builder(aliasName);
if (j == 0) {
alias.writeIndex(true);
}
String indexName = aliasName + "-" + j;
builder.put(
IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()))
.creationDate(randomNonNegativeLong())
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(alias)
);
}
}
ProjectMetadata project = builder.build();
for (int i = 0; i < numAliases; i++) {
String aliasName = "alias-" + i;
Set<Index> result = project.aliasedIndices(aliasName);
Index[] expected = IntStream.range(0, numIndicesPerAlias)
.mapToObj(j -> aliasName + "-" + j)
.map(name -> new Index(name, ClusterState.UNKNOWN_UUID))
.toArray(Index[]::new);
assertThat(result, containsInAnyOrder(expected));
}
// Add a new alias and index
builder = ProjectMetadata.builder(project);
String newAliasName = "alias-new";
{
builder.put(
IndexMetadata.builder(newAliasName + "-1")
.settings(settings(IndexVersion.current()))
.creationDate(randomNonNegativeLong())
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(new AliasMetadata.Builder(newAliasName).writeIndex(true))
);
}
project = builder.build();
assertThat(project.aliasedIndices(), hasSize(numAliases + 1));
assertThat(project.aliasedIndices(newAliasName), contains(new Index(newAliasName + "-1", ClusterState.UNKNOWN_UUID)));
// Remove the new alias/index
builder = ProjectMetadata.builder(project);
{
builder.remove(newAliasName + "-1");
}
project = builder.build();
assertThat(project.aliasedIndices(), hasSize(numAliases));
assertThat(project.aliasedIndices(newAliasName), empty());
// Add a new alias that points to existing indices
builder = ProjectMetadata.builder(project);
{
IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(project.index("alias-1-0"));
imBuilder.putAlias(new AliasMetadata.Builder(newAliasName));
builder.put(imBuilder);
imBuilder = new IndexMetadata.Builder(project.index("alias-2-1"));
imBuilder.putAlias(new AliasMetadata.Builder(newAliasName));
builder.put(imBuilder);
imBuilder = new IndexMetadata.Builder(project.index("alias-3-2"));
imBuilder.putAlias(new AliasMetadata.Builder(newAliasName));
builder.put(imBuilder);
}
project = builder.build();
assertThat(project.aliasedIndices(), hasSize(numAliases + 1));
assertThat(
project.aliasedIndices(newAliasName),
containsInAnyOrder(
new Index("alias-1-0", ClusterState.UNKNOWN_UUID),
new Index("alias-2-1", ClusterState.UNKNOWN_UUID),
new Index("alias-3-2", ClusterState.UNKNOWN_UUID)
)
);
// Remove the new alias that points to existing indices
builder = ProjectMetadata.builder(project);
{
IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(project.index("alias-1-0"));
imBuilder.removeAlias(newAliasName);
builder.put(imBuilder);
imBuilder = new IndexMetadata.Builder(project.index("alias-2-1"));
imBuilder.removeAlias(newAliasName);
builder.put(imBuilder);
imBuilder = new IndexMetadata.Builder(project.index("alias-3-2"));
imBuilder.removeAlias(newAliasName);
builder.put(imBuilder);
}
project = builder.build();
assertThat(project.aliasedIndices(), hasSize(numAliases));
assertThat(project.aliasedIndices(newAliasName), empty());
}
public void testHiddenAliasValidation() {
final String hiddenAliasName = "hidden_alias";
IndexMetadata hidden1 = buildIndexWithAlias("hidden1", hiddenAliasName, true, IndexVersion.current(), false);
IndexMetadata hidden2 = buildIndexWithAlias("hidden2", hiddenAliasName, true, IndexVersion.current(), false);
IndexMetadata hidden3 = buildIndexWithAlias("hidden3", hiddenAliasName, true, IndexVersion.current(), false);
IndexMetadata nonHidden = buildIndexWithAlias("nonhidden1", hiddenAliasName, false, IndexVersion.current(), false);
IndexMetadata unspecified = buildIndexWithAlias("nonhidden2", hiddenAliasName, null, IndexVersion.current(), false);
{
// Should be ok:
projectWithIndices(hidden1, hidden2, hidden3);
}
{
// Should be ok:
if (randomBoolean()) {
projectWithIndices(nonHidden, unspecified);
} else {
projectWithIndices(unspecified, nonHidden);
}
}
{
IllegalStateException exception = expectThrows(
IllegalStateException.class,
() -> projectWithIndices(hidden1, hidden2, hidden3, nonHidden)
);
assertThat(exception.getMessage(), containsString("alias [" + hiddenAliasName + "] has is_hidden set to true on indices ["));
assertThat(
exception.getMessage(),
allOf(
containsString(hidden1.getIndex().getName()),
containsString(hidden2.getIndex().getName()),
containsString(hidden3.getIndex().getName())
)
);
assertThat(
exception.getMessage(),
containsString(
"but does not have is_hidden set to true on indices ["
+ nonHidden.getIndex().getName()
+ "]; alias must have the same is_hidden setting on all indices"
)
);
}
{
IllegalStateException exception = expectThrows(
IllegalStateException.class,
() -> projectWithIndices(hidden1, hidden2, hidden3, unspecified)
);
assertThat(exception.getMessage(), containsString("alias [" + hiddenAliasName + "] has is_hidden set to true on indices ["));
assertThat(
exception.getMessage(),
allOf(
containsString(hidden1.getIndex().getName()),
containsString(hidden2.getIndex().getName()),
containsString(hidden3.getIndex().getName())
)
);
assertThat(
exception.getMessage(),
containsString(
"but does not have is_hidden set to true on indices ["
+ unspecified.getIndex().getName()
+ "]; alias must have the same is_hidden setting on all indices"
)
);
}
{
final IndexMetadata hiddenIndex = randomFrom(hidden1, hidden2, hidden3);
IllegalStateException exception = expectThrows(IllegalStateException.class, () -> {
if (randomBoolean()) {
projectWithIndices(nonHidden, unspecified, hiddenIndex);
} else {
projectWithIndices(unspecified, nonHidden, hiddenIndex);
}
});
assertThat(
exception.getMessage(),
containsString(
"alias ["
+ hiddenAliasName
+ "] has is_hidden set to true on "
+ "indices ["
+ hiddenIndex.getIndex().getName()
+ "] but does not have is_hidden set to true on indices ["
)
);
assertThat(
exception.getMessage(),
allOf(containsString(unspecified.getIndex().getName()), containsString(nonHidden.getIndex().getName()))
);
assertThat(exception.getMessage(), containsString("but does not have is_hidden set to true on indices ["));
}
}
public static final String SYSTEM_ALIAS_NAME = "system_alias";
public void testSystemAliasValidationMixedVersionSystemAndRegularFails() {
final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween(
random(),
IndexVersions.V_7_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)
);
final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true);
final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false);
IllegalStateException exception = expectThrows(
IllegalStateException.class,
() -> projectWithIndices(currentVersionSystem, oldVersionSystem, regularIndex)
);
assertThat(
exception.getMessage(),
containsString(
"alias ["
+ SYSTEM_ALIAS_NAME
+ "] refers to both system indices ["
+ currentVersionSystem.getIndex().getName()
+ "] and non-system indices: ["
+ regularIndex.getIndex().getName()
+ "], but aliases must refer to either system or non-system indices, not both"
)
);
}
public void testSystemAliasValidationNewSystemAndRegularFails() {
final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false);
IllegalStateException exception = expectThrows(
IllegalStateException.class,
() -> projectWithIndices(currentVersionSystem, regularIndex)
);
assertThat(
exception.getMessage(),
containsString(
"alias ["
+ SYSTEM_ALIAS_NAME
+ "] refers to both system indices ["
+ currentVersionSystem.getIndex().getName()
+ "] and non-system indices: ["
+ regularIndex.getIndex().getName()
+ "], but aliases must refer to either system or non-system indices, not both"
)
);
}
public void testSystemAliasOldSystemAndNewRegular() {
final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween(
random(),
IndexVersions.V_7_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)
);
final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true);
final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false);
// Should be ok:
projectWithIndices(oldVersionSystem, regularIndex);
}
public void testSystemIndexValidationAllRegular() {
final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween(
random(),
IndexVersions.V_7_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)
);
final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true);
// Should be ok
projectWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem);
}
public void testSystemAliasValidationAllSystemSomeOld() {
final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween(
random(),
IndexVersions.V_7_0_0,
IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)
);
final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true);
// Should be ok:
projectWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem);
}
public void testSystemAliasValidationAll8x() {
final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true);
// Should be ok
projectWithIndices(currentVersionSystem, currentVersionSystem2);
}
private void projectWithIndices(IndexMetadata... indices) {
ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
for (var cursor : indices) {
builder.put(cursor, false);
}
builder.build();
}
private IndexMetadata buildIndexWithAlias(
String indexName,
String aliasName,
@Nullable Boolean aliasIsHidden,
IndexVersion indexCreationVersion,
boolean isSystem
) {
final AliasMetadata.Builder aliasMetadata = new AliasMetadata.Builder(aliasName);
if (aliasIsHidden != null || randomBoolean()) {
aliasMetadata.isHidden(aliasIsHidden);
}
return new IndexMetadata.Builder(indexName).settings(settings(indexCreationVersion))
.system(isSystem)
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(aliasMetadata)
.build();
}
public void testMappingDuplication() {
final Set<String> randomMappingDefinitions;
{
int numEntries = randomIntBetween(4, 8);
randomMappingDefinitions = Sets.newHashSetWithExpectedSize(numEntries);
for (int i = 0; i < numEntries; i++) {
Map<String, Object> mapping = RandomAliasActionsGenerator.randomMap(2);
String mappingAsString = Strings.toString((builder, params) -> builder.mapContents(mapping));
randomMappingDefinitions.add(mappingAsString);
}
}
ProjectMetadata project;
int numIndices = randomIntBetween(16, 32);
{
String[] definitions = randomMappingDefinitions.toArray(String[]::new);
ProjectMetadata.Builder mb = ProjectMetadata.builder(randomProjectIdOrDefault());
for (int i = 0; i < numIndices; i++) {
IndexMetadata.Builder indexBuilder = IndexMetadata.builder("index-" + i)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping(definitions[i % randomMappingDefinitions.size()])
.numberOfShards(1)
.numberOfReplicas(0);
if (randomBoolean()) {
mb.put(indexBuilder);
} else {
mb.put(indexBuilder.build(), true);
}
}
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size()));
assertThat(
project.indices().values().stream().map(IndexMetadata::mapping).collect(Collectors.toSet()),
hasSize(project.getMappingsByHash().size())
);
// Add a new index with a new index with known mapping:
MappingMetadata mapping = project.indices().get("index-" + randomInt(numIndices - 1)).mapping();
MappingMetadata entry = project.getMappingsByHash().get(mapping.getSha256());
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.put(
IndexMetadata.builder("index-" + numIndices)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping(mapping)
.numberOfShards(1)
.numberOfReplicas(0)
);
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size()));
assertThat(project.getMappingsByHash().get(mapping.getSha256()), equalTo(entry));
// Remove index and ensure mapping cache stays the same
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.remove("index-" + numIndices);
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size()));
assertThat(project.getMappingsByHash().get(mapping.getSha256()), equalTo(entry));
// Update a mapping of an index:
IndexMetadata luckyIndex = project.index("index-" + randomInt(numIndices - 1));
entry = project.getMappingsByHash().get(luckyIndex.mapping().getSha256());
MappingMetadata updatedMapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Map.of("mapping", "updated"));
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.put(IndexMetadata.builder(luckyIndex).putMapping(updatedMapping));
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size() + 1));
assertThat(project.getMappingsByHash().get(luckyIndex.mapping().getSha256()), equalTo(entry));
assertThat(project.getMappingsByHash().get(updatedMapping.getSha256()), equalTo(updatedMapping));
// Remove the index with updated mapping
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.remove(luckyIndex.getIndex().getName());
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size()));
assertThat(project.getMappingsByHash().get(updatedMapping.getSha256()), nullValue());
// Add an index with new mapping and then later remove it:
MappingMetadata newMapping = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Map.of("new", "mapping"));
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.put(
IndexMetadata.builder("index-" + numIndices)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.putMapping(newMapping)
.numberOfShards(1)
.numberOfReplicas(0)
);
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size() + 1));
assertThat(project.getMappingsByHash().get(newMapping.getSha256()), equalTo(newMapping));
{
ProjectMetadata.Builder mb = new ProjectMetadata.Builder(project);
mb.remove("index-" + numIndices);
project = mb.build();
}
assertThat(project.getMappingsByHash(), aMapWithSize(randomMappingDefinitions.size()));
assertThat(project.getMappingsByHash().get(newMapping.getSha256()), nullValue());
}
public void testWithLifecycleState() {
String indexName = "my-index";
String indexUUID = randomAlphaOfLength(10);
ProjectMetadata project1 = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(
IndexMetadata.builder(indexName)
.settings(settings(IndexVersion.current()).put(IndexMetadata.SETTING_INDEX_UUID, indexUUID))
.creationDate(randomNonNegativeLong())
.numberOfShards(1)
.numberOfReplicas(0)
)
.build();
IndexMetadata index1 = project1.index(indexName);
assertThat(project1.getIndicesLookup(), notNullValue());
assertThat(index1.getLifecycleExecutionState(), sameInstance(LifecycleExecutionState.EMPTY_STATE));
LifecycleExecutionState state = LifecycleExecutionState.builder().setPhase("phase").setAction("action").setStep("step").build();
ProjectMetadata project2 = project1.withLifecycleState(index1.getIndex(), state);
IndexMetadata index2 = project2.index(indexName);
// the indices lookups are the same object
assertThat(project2.getIndicesLookup(), sameInstance(project1.getIndicesLookup()));
// the lifecycle state and version were changed
assertThat(index2.getLifecycleExecutionState().asMap(), is(state.asMap()));
assertThat(index2.getVersion(), is(index1.getVersion() + 1));
// but those are the only differences between the two
IndexMetadata.Builder builder = IndexMetadata.builder(index2);
builder.version(builder.version() - 1);
builder.removeCustom(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY);
assertThat(index1, equalTo(builder.build()));
// withLifecycleState returns the same reference if nothing changed
ProjectMetadata project3 = project2.withLifecycleState(index2.getIndex(), state);
assertThat(project3, sameInstance(project2));
// withLifecycleState rejects a nonsense Index
String randomUUID = randomValueOtherThan(indexUUID, () -> randomAlphaOfLength(10));
expectThrows(IndexNotFoundException.class, () -> project1.withLifecycleState(new Index(indexName, randomUUID), state));
}
public void testRetrieveIndexModeFromTemplateTsdb() throws IOException {
// tsdb:
var tsdbTemplate = new Template(Settings.builder().put("index.mode", "time_series").build(), new CompressedXContent("{}"), null);
// Settings in component template:
{
var componentTemplate = new ComponentTemplate(tsdbTemplate, null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES));
}
// Settings in composable index template:
{
var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.template(tsdbTemplate)
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES));
}
}
public void testRetrieveIndexModeFromTemplateLogsdb() throws IOException {
// logsdb:
var logsdbTemplate = new Template(Settings.builder().put("index.mode", "logsdb").build(), new CompressedXContent("{}"), null);
// Settings in component template:
{
var componentTemplate = new ComponentTemplate(logsdbTemplate, null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB));
}
// Settings in composable index template:
{
var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.template(logsdbTemplate)
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB));
}
}
public void testRetrieveIndexModeFromTemplateEmpty() throws IOException {
// no index mode:
var emptyTemplate = new Template(Settings.EMPTY, new CompressedXContent("{}"), null);
// Settings in component template:
{
var componentTemplate = new ComponentTemplate(emptyTemplate, null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), nullValue());
}
// Settings in composable index template:
{
var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null);
var indexTemplate = ComposableIndexTemplate.builder()
.indexPatterns(List.of("test-*"))
.template(emptyTemplate)
.componentTemplates(List.of("component_template_1"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.build();
ProjectMetadata p = ProjectMetadata.builder(randomProjectIdOrDefault())
.put("component_template_1", componentTemplate)
.put("index_template_1", indexTemplate)
.build();
assertThat(p.retrieveIndexModeFromTemplate(indexTemplate), nullValue());
}
}
private static CreateIndexResult createIndices(int numIndices, int numBackingIndices, String dataStreamName) {
// create some indices that do not back a data stream
final List<Index> indices = new ArrayList<>();
int lastIndexNum = randomIntBetween(9, 50);
ProjectMetadata.Builder b = ProjectMetadata.builder(randomProjectIdOrDefault());
for (int k = 1; k <= numIndices; k++) {
IndexMetadata im = IndexMetadata.builder(DataStream.getDefaultBackingIndexName("index", lastIndexNum))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
b.put(im, false);
indices.add(im.getIndex());
lastIndexNum = randomIntBetween(lastIndexNum + 1, lastIndexNum + 50);
}
// create some backing indices for a data stream
final List<Index> backingIndices = new ArrayList<>();
int lastBackingIndexNum = 0;
for (int k = 1; k <= numBackingIndices; k++) {
lastBackingIndexNum = randomIntBetween(lastBackingIndexNum + 1, lastBackingIndexNum + 50);
IndexMetadata im = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, lastBackingIndexNum))
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
b.put(im, false);
backingIndices.add(im.getIndex());
}
b.put(newInstance(dataStreamName, backingIndices, lastBackingIndexNum, null));
return new CreateIndexResult(indices, backingIndices, b.build());
}
private record CreateIndexResult(List<Index> indices, List<Index> backingIndices, ProjectMetadata project) {};
public void testToXContent() throws IOException {
final ProjectMetadata projectMetadata = prepareProjectMetadata();
ToXContent.Params params = EMPTY_PARAMS;
AbstractChunkedSerializingTestCase.assertChunkCount(projectMetadata, p -> expectedChunkCount(params, p));
final BytesArray expected = new BytesArray(
Strings.format(
"""
{
"templates": {},
"indices": {
"index-01": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 1,
"state": "open",
"settings": {
"index": {
"number_of_shards": "1",
"number_of_replicas": "1",
"uuid": "i3e800000001",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.1"
],
"primary_terms": {
"0": 0
},
"in_sync_allocations": {
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
"index-02": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 2,
"state": "open",
"settings": {
"index": {
"number_of_shards": "2",
"number_of_replicas": "0",
"uuid": "i7d000000002",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.2"
],
"primary_terms": {
"0": 0,
"1": 0
},
"in_sync_allocations": {
"1": [],
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
"index-03": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 3,
"state": "open",
"settings": {
"index": {
"number_of_shards": "3",
"number_of_replicas": "1",
"uuid": "ibb800000003",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.3"
],
"primary_terms": {
"0": 0,
"1": 0,
"2": 0
},
"in_sync_allocations": {
"2": [],
"1": [],
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
".ds-logs-ultron-2024.08.30-000001": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 1,
"state": "open",
"settings": {
"index": {
"hidden": "true",
"number_of_shards": "1",
"number_of_replicas": "2",
"uuid": "d1000001",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [],
"primary_terms": {
"0": 0
},
"in_sync_allocations": {
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
".ds-logs-ultron-2024.08.30-000002": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 3,
"state": "open",
"settings": {
"index": {
"hidden": "true",
"number_of_shards": "3",
"number_of_replicas": "1",
"uuid": "d2000002",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [],
"primary_terms": {
"0": 0,
"1": 0,
"2": 0
},
"in_sync_allocations": {
"0": [],
"1": [],
"2": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
}
},
"index_template": {
"index_template": {
"template": {
"index_patterns": [
"index-*"
],
"composed_of": [],
"priority": 10
}
}
},
"index-graveyard": {
"tombstones": []
},
"data_stream": {
"data_stream": {
"logs-ultron": {
"name": "logs-ultron",
"timestamp_field": {
"name": "@timestamp"
},
"indices": [
{
"index_name": ".ds-logs-ultron-2024.08.30-000001",
"index_uuid": "d1000001"
},
{
"index_name": ".ds-logs-ultron-2024.08.30-000002",
"index_uuid": "d2000002"
}
],
"generation": 2,
"hidden": false,
"replicated": false,
"system": false,
"allow_custom_routing": false,
"settings" : { },
"failure_rollover_on_write": true,
"rollover_on_write": false
}
},
"data_stream_aliases": {}
}
}
""",
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current()
)
);
final BytesReference actual = XContentHelper.toXContent(projectMetadata, XContentType.JSON, randomBoolean());
assertToXContentEquivalent(expected, actual, XContentType.JSON);
}
public void testToXContentMultiProject() throws IOException {
final ProjectMetadata projectMetadata = prepareProjectMetadata();
ToXContent.Params params = new ToXContent.MapParams(Map.of("multi-project", "true"));
AbstractChunkedSerializingTestCase.assertChunkCount(projectMetadata, params, p -> expectedChunkCount(params, p));
final BytesArray expected = new BytesArray(
Strings.format(
"""
{
"templates": {},
"indices": {
"index-01": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 1,
"state": "open",
"settings": {
"index": {
"number_of_shards": "1",
"number_of_replicas": "1",
"uuid": "i3e800000001",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.1"
],
"primary_terms": {
"0": 0
},
"in_sync_allocations": {
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
"index-02": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 2,
"state": "open",
"settings": {
"index": {
"number_of_shards": "2",
"number_of_replicas": "0",
"uuid": "i7d000000002",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.2"
],
"primary_terms": {
"0": 0,
"1": 0
},
"in_sync_allocations": {
"1": [],
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
"index-03": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 3,
"state": "open",
"settings": {
"index": {
"number_of_shards": "3",
"number_of_replicas": "1",
"uuid": "ibb800000003",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [
"alias.3"
],
"primary_terms": {
"0": 0,
"1": 0,
"2": 0
},
"in_sync_allocations": {
"2": [],
"1": [],
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
".ds-logs-ultron-2024.08.30-000001": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 1,
"state": "open",
"settings": {
"index": {
"hidden": "true",
"number_of_shards": "1",
"number_of_replicas": "2",
"uuid": "d1000001",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [],
"primary_terms": {
"0": 0
},
"in_sync_allocations": {
"0": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
},
".ds-logs-ultron-2024.08.30-000002": {
"version": 1,
"transport_version" : "0",
"mapping_version": 1,
"settings_version": 1,
"aliases_version": 1,
"routing_num_shards": 3,
"state": "open",
"settings": {
"index": {
"hidden": "true",
"number_of_shards": "3",
"number_of_replicas": "1",
"uuid": "d2000002",
"version": {
"created": "%s"
}
}
},
"mappings": {},
"aliases": [],
"primary_terms": {
"0": 0,
"1": 0,
"2": 0
},
"in_sync_allocations": {
"0": [],
"1": [],
"2": []
},
"rollover_info": {},
"mappings_updated_version": %s,
"system": false,
"timestamp_range": {
"shards": []
},
"event_ingested_range": {
"shards": []
}
}
},
"index_template": {
"index_template": {
"template": {
"index_patterns": [
"index-*"
],
"composed_of": [],
"priority": 10
}
}
},
"index-graveyard": {
"tombstones": []
},
"data_stream": {
"data_stream": {
"logs-ultron": {
"name": "logs-ultron",
"timestamp_field": {
"name": "@timestamp"
},
"indices": [
{
"index_name": ".ds-logs-ultron-2024.08.30-000001",
"index_uuid": "d1000001"
},
{
"index_name": ".ds-logs-ultron-2024.08.30-000002",
"index_uuid": "d2000002"
}
],
"generation": 2,
"hidden": false,
"replicated": false,
"system": false,
"allow_custom_routing": false,
"settings" : { },
"failure_rollover_on_write": true,
"rollover_on_write": false
}
},
"data_stream_aliases": {}
}
}
""",
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current(),
IndexVersion.current()
)
);
final BytesReference actual = XContentHelper.toXContent(projectMetadata, XContentType.JSON, params, randomBoolean());
assertToXContentEquivalent(expected, actual, XContentType.JSON);
}
private static ProjectMetadata prepareProjectMetadata() {
final ProjectId projectId = randomUniqueProjectId();
final ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId);
for (int i = 1; i <= 3; i++) {
builder.put(
IndexMetadata.builder(Strings.format("index-%02d", i))
.settings(
indexSettings(IndexVersion.current(), i, i % 2).put(
IndexMetadata.SETTING_INDEX_UUID,
Strings.format("i%x%04d", (i * 1000 << 16), i)
)
)
.putAlias(AliasMetadata.builder(Strings.format("alias.%d", i)).build())
.build(),
false
);
}
builder.indexTemplates(
Map.of("template", ComposableIndexTemplate.builder().indexPatterns(List.of("index-*")).priority(10L).build())
);
final String dataStreamName = "logs-ultron";
final IndexMetadata backingIndex1 = DataStreamTestHelper.createBackingIndex(dataStreamName, 1, 1725000000000L)
.settings(
indexSettings(IndexVersion.current(), 1, 2).put("index.hidden", true)
.put(IndexMetadata.SETTING_INDEX_UUID, Strings.format("d%x", 0x1000001))
)
.build();
final IndexMetadata backingIndex2 = DataStreamTestHelper.createBackingIndex(dataStreamName, 2, 1725025000000L)
.settings(
indexSettings(IndexVersion.current(), 3, 1).put("index.hidden", true)
.put(IndexMetadata.SETTING_INDEX_UUID, Strings.format("d%x", 0x2000002))
)
.build();
DataStream dataStream = DataStreamTestHelper.newInstance(
dataStreamName,
List.of(backingIndex1.getIndex(), backingIndex2.getIndex())
);
builder.put(backingIndex1, false);
builder.put(backingIndex2, false);
builder.put(dataStream);
final ProjectMetadata projectMetadata = builder.build();
return projectMetadata;
}
static int expectedChunkCount(ToXContent.Params params, ProjectMetadata project) {
final var context = Metadata.XContentContext.from(params);
long chunkCount = 0;
if (context == Metadata.XContentContext.API) {
// 2 chunks wrapping "indices"" and one chunk per index
chunkCount += 2 + project.indices().size();
}
// 2 chunks wrapping "templates" and one chunk per template
chunkCount += 2 + project.templates().size();
for (Metadata.ProjectCustom custom : project.customs().values()) {
chunkCount += 2; // open / close object
if (custom instanceof ComponentTemplateMetadata componentTemplateMetadata) {
chunkCount += checkChunkSize(custom, params, 2 + componentTemplateMetadata.componentTemplates().size());
} else if (custom instanceof ComposableIndexTemplateMetadata composableIndexTemplateMetadata) {
chunkCount += checkChunkSize(custom, params, 2 + composableIndexTemplateMetadata.indexTemplates().size());
} else if (custom instanceof DataStreamMetadata dataStreamMetadata) {
chunkCount += checkChunkSize(
custom,
params,
4 + dataStreamMetadata.dataStreams().size() + dataStreamMetadata.getDataStreamAliases().size()
);
} else if (custom instanceof IndexGraveyard indexGraveyard) {
chunkCount += checkChunkSize(custom, params, 2 + indexGraveyard.getTombstones().size());
} else if (custom instanceof IngestMetadata ingestMetadata) {
chunkCount += checkChunkSize(custom, params, 2 + ingestMetadata.getPipelines().size());
} else if (custom instanceof PersistentTasksCustomMetadata persistentTasksCustomMetadata) {
chunkCount += checkChunkSize(custom, params, 3 + persistentTasksCustomMetadata.tasks().size());
} else if (custom instanceof RepositoriesMetadata repositoriesMetadata) {
chunkCount += checkChunkSize(custom, params, repositoriesMetadata.repositories().size());
} else {
// could be anything, we have to just try it
chunkCount += count(custom.toXContentChunked(params));
}
}
return Math.toIntExact(chunkCount);
}
private static | ProjectMetadataTests |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitGroupMultiXmlTest.java | {
"start": 1071,
"end": 3087
} | class ____ extends ContextTestSupport {
@Test
public void testTokenXMLPairGroup() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:split");
mock.expectedMessageCount(3);
mock.message(0).body().isEqualTo(
"<order id=\"1\" xmlns=\"http:acme.com\">Camel in Action</order><order id=\"2\" xmlns=\"http:acme.com\">ActiveMQ in Action</order>");
mock.message(1).body().isEqualTo(
"<order id=\"3\" xmlns=\"http:acme.com\">Spring in Action</order><order id=\"4\" xmlns=\"http:acme.com\">Scala in Action</order>");
mock.message(2).body().isEqualTo("<order id=\"5\" xmlns=\"http:acme.com\">Groovy in Action</order>");
String body = createBody();
template.sendBodyAndHeader(fileUri(), body, Exchange.FILE_NAME, "orders.xml");
assertMockEndpointsSatisfied();
}
protected String createBody() {
StringBuilder sb = new StringBuilder("<?xml version=\"1.0\"?>\n");
sb.append("<orders xmlns=\"http:acme.com\">\n");
sb.append(" <order id=\"1\">Camel in Action</order>\n");
sb.append(" <order id=\"2\">ActiveMQ in Action</order>\n");
sb.append(" <order id=\"3\">Spring in Action</order>\n");
sb.append(" <order id=\"4\">Scala in Action</order>\n");
sb.append(" <order id=\"5\">Groovy in Action</order>\n");
sb.append("</orders>");
return sb.toString();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
from(fileUri("?initialDelay=0&delay=10"))
// split the order child tags, and inherit namespaces from
// the orders root tag
.split().tokenizeXML("order", "orders", 2).to("log:split").to("mock:split");
// END SNIPPET: e1
}
};
}
}
| SplitGroupMultiXmlTest |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/RememberMeConfigurer.java | {
"start": 3641,
"end": 18672
} | class ____<H extends HttpSecurityBuilder<H>>
extends AbstractHttpConfigurer<RememberMeConfigurer<H>, H> {
/**
* The default name for remember me parameter name and remember me cookie name
*/
private static final String DEFAULT_REMEMBER_ME_NAME = "remember-me";
private AuthenticationSuccessHandler authenticationSuccessHandler;
private String key;
private RememberMeServices rememberMeServices;
private LogoutHandler logoutHandler;
private String rememberMeParameter = DEFAULT_REMEMBER_ME_NAME;
private String rememberMeCookieName = DEFAULT_REMEMBER_ME_NAME;
private String rememberMeCookieDomain;
private PersistentTokenRepository tokenRepository;
private UserDetailsService userDetailsService;
private Integer tokenValiditySeconds;
private Boolean useSecureCookie;
private Boolean alwaysRemember;
/**
* Creates a new instance
*/
public RememberMeConfigurer() {
}
/**
* Allows specifying how long (in seconds) a token is valid for
* @param tokenValiditySeconds
* @return {@link RememberMeConfigurer} for further customization
* @see AbstractRememberMeServices#setTokenValiditySeconds(int)
*/
public RememberMeConfigurer<H> tokenValiditySeconds(int tokenValiditySeconds) {
this.tokenValiditySeconds = tokenValiditySeconds;
return this;
}
/**
* Whether the cookie should be flagged as secure or not. Secure cookies can only be
* sent over an HTTPS connection and thus cannot be accidentally submitted over HTTP
* where they could be intercepted.
* <p>
* By default the cookie will be secure if the request is secure. If you only want to
* use remember-me over HTTPS (recommended) you should set this property to
* {@code true}.
* @param useSecureCookie set to {@code true} to always user secure cookies,
* {@code false} to disable their use.
* @return the {@link RememberMeConfigurer} for further customization
* @see AbstractRememberMeServices#setUseSecureCookie(boolean)
*/
public RememberMeConfigurer<H> useSecureCookie(boolean useSecureCookie) {
this.useSecureCookie = useSecureCookie;
return this;
}
/**
* Specifies the {@link UserDetailsService} used to look up the {@link UserDetails}
* when a remember me token is valid. When using a
* {@link org.springframework.security.web.SecurityFilterChain} bean, the default is
* to look for a {@link UserDetailsService} bean. Alternatively, one can populate
* {@link #rememberMeServices(RememberMeServices)}.
* @param userDetailsService the {@link UserDetailsService} to configure
* @return the {@link RememberMeConfigurer} for further customization
* @see AbstractRememberMeServices
*/
public RememberMeConfigurer<H> userDetailsService(UserDetailsService userDetailsService) {
this.userDetailsService = userDetailsService;
return this;
}
/**
* Specifies the {@link PersistentTokenRepository} to use. The default is to use
* {@link TokenBasedRememberMeServices} instead.
* @param tokenRepository the {@link PersistentTokenRepository} to use
* @return the {@link RememberMeConfigurer} for further customization
*/
public RememberMeConfigurer<H> tokenRepository(PersistentTokenRepository tokenRepository) {
this.tokenRepository = tokenRepository;
return this;
}
/**
* Sets the key to identify tokens created for remember me authentication. Default is
* a secure randomly generated key. If {@link #rememberMeServices(RememberMeServices)}
* is specified and is of type {@link AbstractRememberMeServices}, then the default is
* the key set in {@link AbstractRememberMeServices}.
* @param key the key to identify tokens created for remember me authentication
* @return the {@link RememberMeConfigurer} for further customization
*/
public RememberMeConfigurer<H> key(String key) {
this.key = key;
return this;
}
/**
* The HTTP parameter used to indicate to remember the user at time of login.
* @param rememberMeParameter the HTTP parameter used to indicate to remember the user
* @return the {@link RememberMeConfigurer} for further customization
*/
public RememberMeConfigurer<H> rememberMeParameter(String rememberMeParameter) {
this.rememberMeParameter = rememberMeParameter;
return this;
}
/**
* The name of cookie which store the token for remember me authentication. Defaults
* to 'remember-me'.
* @param rememberMeCookieName the name of cookie which store the token for remember
* me authentication
* @return the {@link RememberMeConfigurer} for further customization
* @since 4.0.1
*/
public RememberMeConfigurer<H> rememberMeCookieName(String rememberMeCookieName) {
this.rememberMeCookieName = rememberMeCookieName;
return this;
}
/**
* The domain name within which the remember me cookie is visible.
* @param rememberMeCookieDomain the domain name within which the remember me cookie
* is visible.
* @return the {@link RememberMeConfigurer} for further customization
* @since 4.1.0
*/
public RememberMeConfigurer<H> rememberMeCookieDomain(String rememberMeCookieDomain) {
this.rememberMeCookieDomain = rememberMeCookieDomain;
return this;
}
/**
* Allows control over the destination a remembered user is sent to when they are
* successfully authenticated. By default, the filter will just allow the current
* request to proceed, but if an {@code AuthenticationSuccessHandler} is set, it will
* be invoked and the {@code doFilter()} method will return immediately, thus allowing
* the application to redirect the user to a specific URL, regardless of what the
* original request was for.
* @param authenticationSuccessHandler the strategy to invoke immediately before
* returning from {@code doFilter()}.
* @return {@link RememberMeConfigurer} for further customization
* @see RememberMeAuthenticationFilter#setAuthenticationSuccessHandler(AuthenticationSuccessHandler)
*/
public RememberMeConfigurer<H> authenticationSuccessHandler(
AuthenticationSuccessHandler authenticationSuccessHandler) {
this.authenticationSuccessHandler = authenticationSuccessHandler;
return this;
}
/**
* Specify the {@link RememberMeServices} to use.
* @param rememberMeServices the {@link RememberMeServices} to use
* @return the {@link RememberMeConfigurer} for further customizations
* @see RememberMeServices
*/
public RememberMeConfigurer<H> rememberMeServices(RememberMeServices rememberMeServices) {
this.rememberMeServices = rememberMeServices;
return this;
}
/**
* Whether the cookie should always be created even if the remember-me parameter is
* not set.
* <p>
* By default this will be set to {@code false}.
* @param alwaysRemember set to {@code true} to always trigger remember me,
* {@code false} to use the remember-me parameter.
* @return the {@link RememberMeConfigurer} for further customization
* @see AbstractRememberMeServices#setAlwaysRemember(boolean)
*/
public RememberMeConfigurer<H> alwaysRemember(boolean alwaysRemember) {
this.alwaysRemember = alwaysRemember;
return this;
}
@SuppressWarnings("unchecked")
@Override
public void init(H http) {
validateInput();
String key = getKey();
RememberMeServices rememberMeServices = getRememberMeServices(http, key);
http.setSharedObject(RememberMeServices.class, rememberMeServices);
LogoutConfigurer<H> logoutConfigurer = http.getConfigurer(LogoutConfigurer.class);
if (logoutConfigurer != null && this.logoutHandler != null) {
logoutConfigurer.addLogoutHandler(this.logoutHandler);
}
RememberMeAuthenticationProvider authenticationProvider = new RememberMeAuthenticationProvider(key);
authenticationProvider = postProcess(authenticationProvider);
http.authenticationProvider(authenticationProvider);
initDefaultLoginFilter(http);
}
@Override
public void configure(H http) {
RememberMeAuthenticationFilter rememberMeFilter = new RememberMeAuthenticationFilter(
http.getSharedObject(AuthenticationManager.class), this.rememberMeServices);
if (this.authenticationSuccessHandler != null) {
rememberMeFilter.setAuthenticationSuccessHandler(this.authenticationSuccessHandler);
}
SecurityContextConfigurer<?> securityContextConfigurer = http.getConfigurer(SecurityContextConfigurer.class);
if (securityContextConfigurer != null && securityContextConfigurer.isRequireExplicitSave()) {
SecurityContextRepository securityContextRepository = securityContextConfigurer
.getSecurityContextRepository();
rememberMeFilter.setSecurityContextRepository(securityContextRepository);
}
rememberMeFilter.setSecurityContextHolderStrategy(getSecurityContextHolderStrategy());
SessionAuthenticationStrategy sessionAuthenticationStrategy = http
.getSharedObject(SessionAuthenticationStrategy.class);
if (sessionAuthenticationStrategy != null) {
rememberMeFilter.setSessionAuthenticationStrategy(sessionAuthenticationStrategy);
}
rememberMeFilter = postProcess(rememberMeFilter);
http.addFilter(rememberMeFilter);
}
/**
* Validate rememberMeServices and rememberMeCookieName have not been set at the same
* time.
*/
private void validateInput() {
if (this.rememberMeServices != null && !DEFAULT_REMEMBER_ME_NAME.equals(this.rememberMeCookieName)) {
throw new IllegalArgumentException("Can not set rememberMeCookieName and custom rememberMeServices.");
}
}
/**
* Returns the HTTP parameter used to indicate to remember the user at time of login.
* @return the HTTP parameter used to indicate to remember the user
*/
private String getRememberMeParameter() {
return this.rememberMeParameter;
}
/**
* If available, initializes the {@link DefaultLoginPageGeneratingFilter} shared
* object.
* @param http the {@link HttpSecurityBuilder} to use
*/
private void initDefaultLoginFilter(H http) {
DefaultLoginPageGeneratingFilter loginPageGeneratingFilter = http
.getSharedObject(DefaultLoginPageGeneratingFilter.class);
if (loginPageGeneratingFilter != null) {
loginPageGeneratingFilter.setRememberMeParameter(getRememberMeParameter());
}
}
/**
* Gets the {@link RememberMeServices} or creates the {@link RememberMeServices}.
* @param http the {@link HttpSecurity} to lookup shared objects
* @param key the {@link #key(String)}
* @return the {@link RememberMeServices} to use
* @throws Exception
*/
private RememberMeServices getRememberMeServices(H http, String key) {
if (this.rememberMeServices != null) {
if (this.rememberMeServices instanceof LogoutHandler && this.logoutHandler == null) {
this.logoutHandler = (LogoutHandler) this.rememberMeServices;
}
return this.rememberMeServices;
}
AbstractRememberMeServices tokenRememberMeServices = createRememberMeServices(http, key);
tokenRememberMeServices.setParameter(this.rememberMeParameter);
tokenRememberMeServices.setCookieName(this.rememberMeCookieName);
if (this.rememberMeCookieDomain != null) {
tokenRememberMeServices.setCookieDomain(this.rememberMeCookieDomain);
}
if (this.tokenValiditySeconds != null) {
tokenRememberMeServices.setTokenValiditySeconds(this.tokenValiditySeconds);
}
if (this.useSecureCookie != null) {
tokenRememberMeServices.setUseSecureCookie(this.useSecureCookie);
}
if (this.alwaysRemember != null) {
tokenRememberMeServices.setAlwaysRemember(this.alwaysRemember);
}
tokenRememberMeServices.afterPropertiesSet();
this.logoutHandler = tokenRememberMeServices;
this.rememberMeServices = tokenRememberMeServices;
return tokenRememberMeServices;
}
/**
* Creates the {@link RememberMeServices} to use when none is provided. The result is
* either {@link PersistentTokenRepository} (if a {@link PersistentTokenRepository} is
* specified, else {@link TokenBasedRememberMeServices}.
* @param http the {@link HttpSecurity} to lookup shared objects
* @param key the {@link #key(String)}
* @return the {@link RememberMeServices} to use
*/
private AbstractRememberMeServices createRememberMeServices(H http, String key) {
return (this.tokenRepository != null) ? createPersistentRememberMeServices(http, key)
: createTokenBasedRememberMeServices(http, key);
}
/**
* Creates {@link TokenBasedRememberMeServices}
* @param http the {@link HttpSecurity} to lookup shared objects
* @param key the {@link #key(String)}
* @return the {@link TokenBasedRememberMeServices}
*/
private AbstractRememberMeServices createTokenBasedRememberMeServices(H http, String key) {
UserDetailsService userDetailsService = getUserDetailsService(http);
return new TokenBasedRememberMeServices(key, userDetailsService);
}
/**
* Creates {@link PersistentTokenBasedRememberMeServices}
* @param http the {@link HttpSecurity} to lookup shared objects
* @param key the {@link #key(String)}
* @return the {@link PersistentTokenBasedRememberMeServices}
*/
private AbstractRememberMeServices createPersistentRememberMeServices(H http, String key) {
UserDetailsService userDetailsService = getUserDetailsService(http);
return new PersistentTokenBasedRememberMeServices(key, userDetailsService, this.tokenRepository);
}
/**
* Gets the {@link UserDetailsService} to use. Either the explicitly configured
* {@link UserDetailsService} from {@link #userDetailsService(UserDetailsService)}, a
* shared object from {@link HttpSecurity#getSharedObject(Class)} or the
* {@link UserDetailsService} bean.
* @param http {@link HttpSecurity} to get the shared {@link UserDetailsService}
* @return the {@link UserDetailsService} to use
*/
private UserDetailsService getUserDetailsService(H http) {
if (this.userDetailsService == null) {
this.userDetailsService = getSharedOrBean(http, UserDetailsService.class);
}
Assert.state(this.userDetailsService != null,
() -> "userDetailsService cannot be null. Invoke " + RememberMeConfigurer.class.getSimpleName()
+ "#userDetailsService(UserDetailsService) or see its javadoc for alternative approaches.");
return this.userDetailsService;
}
/**
* Gets the key to use for validating remember me tokens. If a value was passed into
* {@link #key(String)}, then that is returned. Alternatively, if a key was specified
* in the {@link #rememberMeServices(RememberMeServices)}}, then that is returned. If
* no key was specified in either of those cases, then a secure random string is
* generated.
* @return the remember me key to use
*/
private String getKey() {
if (this.key == null) {
if (this.rememberMeServices instanceof AbstractRememberMeServices) {
this.key = ((AbstractRememberMeServices) this.rememberMeServices).getKey();
}
else {
this.key = UUID.randomUUID().toString();
}
}
return this.key;
}
private <C> C getSharedOrBean(H http, Class<C> type) {
C shared = http.getSharedObject(type);
if (shared != null) {
return shared;
}
ApplicationContext context = getBuilder().getSharedObject(ApplicationContext.class);
if (context == null) {
return null;
}
return context.getBeanProvider(type).getIfUnique();
}
}
| RememberMeConfigurer |
java | apache__rocketmq | example/src/main/java/org/apache/rocketmq/example/filter/SqlFilterConsumer.java | {
"start": 1299,
"end": 2277
} | class ____ {
public static void main(String[] args) throws Exception {
DefaultMQPushConsumer consumer = new DefaultMQPushConsumer("please_rename_unique_group_name");
// Don't forget to set enablePropertyFilter=true in broker
consumer.subscribe("SqlFilterTest",
MessageSelector.bySql("(TAGS is not null and TAGS in ('TagA', 'TagB'))" +
"and (a is not null and a between 0 and 3)"));
consumer.registerMessageListener(new MessageListenerConcurrently() {
@Override
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
ConsumeConcurrentlyContext context) {
System.out.printf("%s Receive New Messages: %s %n", Thread.currentThread().getName(), msgs);
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
}
});
consumer.start();
System.out.printf("Consumer Started.%n");
}
}
| SqlFilterConsumer |
java | quarkusio__quarkus | extensions/smallrye-health/runtime/src/main/java/io/quarkus/smallrye/health/runtime/SmallRyeHealthRecorder.java | {
"start": 758,
"end": 2852
} | class ____ {
private final SmallRyeHealthBuildFixedConfig buildFixedConfig;
private final RuntimeValue<SmallRyeHealthRuntimeConfig> runtimeConfig;
public SmallRyeHealthRecorder(
final SmallRyeHealthBuildFixedConfig buildFixedConfig,
final RuntimeValue<SmallRyeHealthRuntimeConfig> runtimeConfig) {
this.buildFixedConfig = buildFixedConfig;
this.runtimeConfig = runtimeConfig;
}
public void registerHealthCheckResponseProvider(Class<? extends HealthCheckResponseProvider> providerClass) {
try {
HealthCheckResponse.setResponseProvider(providerClass.getConstructor().newInstance());
} catch (Exception e) {
throw new IllegalStateException(
"Unable to instantiate service " + providerClass + " using the no-arg constructor.");
}
}
public Handler<RoutingContext> uiHandler(String healthUiFinalDestination, String healthUiPath,
List<FileSystemStaticHandler.StaticWebRootConfiguration> webRootConfigurations, ShutdownContext shutdownContext) {
if (runtimeConfig.getValue().enable().orElse(runtimeConfig.getValue().enabled())) {
WebJarStaticHandler handler = new WebJarStaticHandler(healthUiFinalDestination, healthUiPath,
webRootConfigurations);
shutdownContext.addShutdownTask(new ShutdownContext.CloseRunnable(handler));
return handler;
} else {
return new WebJarNotFoundHandler();
}
}
public void processSmallRyeHealthRuntimeConfiguration() {
SmallRyeHealthReporter reporter = Arc.container().select(SmallRyeHealthReporter.class).get();
reporter.setAdditionalProperties(runtimeConfig.getValue().additionalProperties());
reporter.setHealthChecksConfigs(runtimeConfig.getValue().check().entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().enabled())));
SmallRyeHealthHandlerBase.problemDetails = buildFixedConfig.includeProblemDetails();
}
}
| SmallRyeHealthRecorder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ImpossibleNullComparisonTest.java | {
"start": 17532,
"end": 17958
} | class ____ {
public boolean o(Optional<String> o) {
// BUG: Diagnostic contains: o.isEmpty()
return o.get() == null;
}
}
""")
.doTest();
}
@Test
public void optionalGetSwitched() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.Optional;
public | Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawErasureCoderFactory.java | {
"start": 1159,
"end": 1751
} | class ____ implements RawErasureCoderFactory {
public static final String CODER_NAME = "xor_native";
@Override
public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
return new NativeXORRawEncoder(coderOptions);
}
@Override
public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
return new NativeXORRawDecoder(coderOptions);
}
@Override
public String getCoderName() {
return CODER_NAME;
}
@Override
public String getCodecName() {
return ErasureCodeConstants.XOR_CODEC_NAME;
}
}
| NativeXORRawErasureCoderFactory |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/amazon/AwsSecretSettingsTests.java | {
"start": 1220,
"end": 5728
} | class ____ extends AbstractBWCWireSerializationTestCase<AwsSecretSettings> {
public void testNewSecretSettings() {
AwsSecretSettings initialSettings = createRandom();
AwsSecretSettings newSettings = createRandom();
AwsSecretSettings finalSettings = (AwsSecretSettings) initialSettings.newSecretSettings(
Map.of(ACCESS_KEY_FIELD, newSettings.accessKey().toString(), SECRET_KEY_FIELD, newSettings.secretKey().toString())
);
assertEquals(newSettings, finalSettings);
}
public void testIt_CreatesSettings_ReturnsNullFromMap_null() {
var secrets = AwsSecretSettings.fromMap(null);
assertNull(secrets);
}
public void testIt_CreatesSettings_FromMap_WithValues() {
var secrets = AwsSecretSettings.fromMap(new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest")));
assertThat(
secrets,
is(new AwsSecretSettings(new SecureString("accesstest".toCharArray()), new SecureString("secrettest".toCharArray())))
);
}
public void testIt_CreatesSettings_FromMap_IgnoresExtraKeys() {
var secrets = AwsSecretSettings.fromMap(
new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest", "extrakey", "extravalue"))
);
assertThat(
secrets,
is(new AwsSecretSettings(new SecureString("accesstest".toCharArray()), new SecureString("secrettest".toCharArray())))
);
}
public void testIt_FromMap_ThrowsValidationException_AccessKeyMissing() {
var thrownException = expectThrows(
ValidationException.class,
() -> AwsSecretSettings.fromMap(new HashMap<>(Map.of(SECRET_KEY_FIELD, "secrettest")))
);
assertThat(
thrownException.getMessage(),
containsString(Strings.format("[secret_settings] does not contain the required setting [%s]", ACCESS_KEY_FIELD))
);
}
public void testIt_FromMap_ThrowsValidationException_SecretKeyMissing() {
var thrownException = expectThrows(
ValidationException.class,
() -> AwsSecretSettings.fromMap(new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest")))
);
assertThat(
thrownException.getMessage(),
containsString(Strings.format("[secret_settings] does not contain the required setting [%s]", SECRET_KEY_FIELD))
);
}
public void testToXContent_CreatesProperContent() throws IOException {
var secrets = AwsSecretSettings.fromMap(new HashMap<>(Map.of(ACCESS_KEY_FIELD, "accesstest", SECRET_KEY_FIELD, "secrettest")));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
secrets.toXContent(builder, null);
String xContentResult = Strings.toString(builder);
assertThat(xContentResult, CoreMatchers.is("""
{"access_key":"accesstest","secret_key":"secrettest"}"""));
}
public static Map<String, Object> getAmazonBedrockSecretSettingsMap(String accessKey, String secretKey) {
return new HashMap<String, Object>(Map.of(ACCESS_KEY_FIELD, accessKey, SECRET_KEY_FIELD, secretKey));
}
@Override
protected AwsSecretSettings mutateInstanceForVersion(AwsSecretSettings instance, TransportVersion version) {
return instance;
}
@Override
protected Writeable.Reader<AwsSecretSettings> instanceReader() {
return AwsSecretSettings::new;
}
@Override
protected AwsSecretSettings createTestInstance() {
return createRandom();
}
@Override
protected AwsSecretSettings mutateInstance(AwsSecretSettings instance) throws IOException {
if (randomBoolean()) {
var accessKey = randomValueOtherThan(instance.accessKey().toString(), () -> randomAlphaOfLength(10));
return new AwsSecretSettings(new SecureString(accessKey.toCharArray()), instance.secretKey());
} else {
var secretKey = randomValueOtherThan(instance.secretKey().toString(), () -> randomAlphaOfLength(10));
return new AwsSecretSettings(instance.accessKey(), new SecureString(secretKey.toCharArray()));
}
}
private static AwsSecretSettings createRandom() {
return new AwsSecretSettings(
new SecureString(randomAlphaOfLength(10).toCharArray()),
new SecureString(randomAlphaOfLength(10).toCharArray())
);
}
}
| AwsSecretSettingsTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.