language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIService.java | {
"start": 3662,
"end": 15262
} | class ____ extends SenderService implements RerankingInferenceService {
public static final String NAME = "voyageai";
private static final String SERVICE_NAME = "Voyage AI";
private static final EnumSet<TaskType> supportedTaskTypes = EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.RERANK);
private static final Integer DEFAULT_BATCH_SIZE = 7;
private static final Map<String, Integer> MODEL_BATCH_SIZES = Map.of(
"voyage-multimodal-3",
7,
"voyage-3-large",
7,
"voyage-code-3",
7,
"voyage-3",
10,
"voyage-3-lite",
30,
"voyage-finance-2",
7,
"voyage-law-2",
7,
"voyage-code-2",
7,
"voyage-2",
72,
"voyage-02",
72
);
private static final Map<String, Integer> RERANKERS_INPUT_SIZE = Map.of(
"rerank-lite-1",
2800 // The smallest model has a 4K context length https://docs.voyageai.com/docs/reranker
);
/**
* Apart from rerank-lite-1 all other models have a context length of at least 8k.
* This value is based on 1 token == 0.75 words and allowing for some overhead
*/
private static final int DEFAULT_RERANKER_INPUT_SIZE_WORDS = 5500;
public static final EnumSet<InputType> VALID_INPUT_TYPE_VALUES = EnumSet.of(
InputType.INGEST,
InputType.SEARCH,
InputType.INTERNAL_INGEST,
InputType.INTERNAL_SEARCH
);
private static final TransportVersion VOYAGE_AI_INTEGRATION_ADDED = TransportVersion.fromName("voyage_ai_integration_added");
public VoyageAIService(
HttpRequestSender.Factory factory,
ServiceComponents serviceComponents,
InferenceServiceExtension.InferenceServiceFactoryContext context
) {
this(factory, serviceComponents, context.clusterService());
}
public VoyageAIService(HttpRequestSender.Factory factory, ServiceComponents serviceComponents, ClusterService clusterService) {
super(factory, serviceComponents, clusterService);
}
@Override
public String name() {
return NAME;
}
@Override
public void parseRequestConfig(
String inferenceEntityId,
TaskType taskType,
Map<String, Object> config,
ActionListener<Model> parsedModelListener
) {
try {
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
Map<String, Object> taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS);
ChunkingSettings chunkingSettings = null;
if (TaskType.TEXT_EMBEDDING.equals(taskType)) {
chunkingSettings = ChunkingSettingsBuilder.fromMap(
removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)
);
}
VoyageAIModel model = createModel(
inferenceEntityId,
taskType,
serviceSettingsMap,
taskSettingsMap,
chunkingSettings,
serviceSettingsMap,
ConfigurationParseContext.REQUEST
);
throwIfNotEmptyMap(config, NAME);
throwIfNotEmptyMap(serviceSettingsMap, NAME);
throwIfNotEmptyMap(taskSettingsMap, NAME);
parsedModelListener.onResponse(model);
} catch (Exception e) {
parsedModelListener.onFailure(e);
}
}
private static VoyageAIModel createModelFromPersistent(
String inferenceEntityId,
TaskType taskType,
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
ChunkingSettings chunkingSettings,
@Nullable Map<String, Object> secretSettings
) {
return createModel(
inferenceEntityId,
taskType,
serviceSettings,
taskSettings,
chunkingSettings,
secretSettings,
ConfigurationParseContext.PERSISTENT
);
}
private static VoyageAIModel createModel(
String inferenceEntityId,
TaskType taskType,
Map<String, Object> serviceSettings,
Map<String, Object> taskSettings,
ChunkingSettings chunkingSettings,
@Nullable Map<String, Object> secretSettings,
ConfigurationParseContext context
) {
return switch (taskType) {
case TEXT_EMBEDDING -> new VoyageAIEmbeddingsModel(
inferenceEntityId,
NAME,
serviceSettings,
taskSettings,
chunkingSettings,
secretSettings,
context
);
case RERANK -> new VoyageAIRerankModel(inferenceEntityId, NAME, serviceSettings, taskSettings, secretSettings, context);
default -> throw createInvalidTaskTypeException(inferenceEntityId, NAME, taskType, context);
};
}
@Override
public VoyageAIModel parsePersistedConfigWithSecrets(
String inferenceEntityId,
TaskType taskType,
Map<String, Object> config,
Map<String, Object> secrets
) {
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
Map<String, Object> taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS);
Map<String, Object> secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS);
ChunkingSettings chunkingSettings = null;
if (TaskType.TEXT_EMBEDDING.equals(taskType)) {
chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS));
}
return createModelFromPersistent(
inferenceEntityId,
taskType,
serviceSettingsMap,
taskSettingsMap,
chunkingSettings,
secretSettingsMap
);
}
@Override
public VoyageAIModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map<String, Object> config) {
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
Map<String, Object> taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS);
ChunkingSettings chunkingSettings = null;
if (TaskType.TEXT_EMBEDDING.equals(taskType)) {
chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS));
}
return createModelFromPersistent(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, chunkingSettings, null);
}
@Override
public InferenceServiceConfiguration getConfiguration() {
return Configuration.get();
}
@Override
public EnumSet<TaskType> supportedTaskTypes() {
return supportedTaskTypes;
}
@Override
protected void doUnifiedCompletionInfer(
Model model,
UnifiedChatInput inputs,
TimeValue timeout,
ActionListener<InferenceServiceResults> listener
) {
throwUnsupportedUnifiedCompletionOperation(NAME);
}
@Override
public void doInfer(
Model model,
InferenceInputs inputs,
Map<String, Object> taskSettings,
TimeValue timeout,
ActionListener<InferenceServiceResults> listener
) {
if (model instanceof VoyageAIModel == false) {
listener.onFailure(createInvalidModelException(model));
return;
}
VoyageAIModel voyageaiModel = (VoyageAIModel) model;
var actionCreator = new VoyageAIActionCreator(getSender(), getServiceComponents());
var action = voyageaiModel.accept(actionCreator, taskSettings);
action.execute(inputs, timeout, listener);
}
@Override
protected void validateInputType(InputType inputType, Model model, ValidationException validationException) {
ServiceUtils.validateInputTypeAgainstAllowlist(inputType, VALID_INPUT_TYPE_VALUES, SERVICE_NAME, validationException);
}
@Override
protected void doChunkedInfer(
Model model,
List<ChunkInferenceInput> inputs,
Map<String, Object> taskSettings,
InputType inputType,
TimeValue timeout,
ActionListener<List<ChunkedInference>> listener
) {
if (model instanceof VoyageAIModel == false) {
listener.onFailure(createInvalidModelException(model));
return;
}
VoyageAIModel voyageaiModel = (VoyageAIModel) model;
var actionCreator = new VoyageAIActionCreator(getSender(), getServiceComponents());
List<EmbeddingRequestChunker.BatchRequestAndListener> batchedRequests = new EmbeddingRequestChunker<>(
inputs,
getBatchSize(voyageaiModel),
voyageaiModel.getConfigurations().getChunkingSettings()
).batchRequestsWithListeners(listener);
for (var request : batchedRequests) {
var action = voyageaiModel.accept(actionCreator, taskSettings);
action.execute(new EmbeddingsInput(request.batch().inputs(), inputType), timeout, request.listener());
}
}
private static int getBatchSize(VoyageAIModel model) {
return MODEL_BATCH_SIZES.getOrDefault(model.getServiceSettings().modelId(), DEFAULT_BATCH_SIZE);
}
@Override
public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) {
if (model instanceof VoyageAIEmbeddingsModel embeddingsModel) {
var serviceSettings = embeddingsModel.getServiceSettings();
var similarityFromModel = serviceSettings.similarity();
var similarityToUse = similarityFromModel == null ? defaultSimilarity() : similarityFromModel;
var maxInputTokens = serviceSettings.maxInputTokens();
var dimensionSetByUser = serviceSettings.dimensionsSetByUser();
var updatedServiceSettings = new VoyageAIEmbeddingsServiceSettings(
new VoyageAIServiceSettings(
serviceSettings.getCommonSettings().modelId(),
serviceSettings.getCommonSettings().rateLimitSettings()
),
serviceSettings.getEmbeddingType(),
similarityToUse,
embeddingSize,
maxInputTokens,
dimensionSetByUser
);
return new VoyageAIEmbeddingsModel(embeddingsModel, updatedServiceSettings);
} else {
throw ServiceUtils.invalidModelTypeForUpdateModelWithEmbeddingDetails(model.getClass());
}
}
/**
* Return the default similarity measure for the embedding type.
* VoyageAI embeddings are normalized to unit vectors therefore Dot
* Product similarity can be used and is the default for all VoyageAI
* models.
*
* @return The default similarity.
*/
static SimilarityMeasure defaultSimilarity() {
return SimilarityMeasure.DOT_PRODUCT;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return VOYAGE_AI_INTEGRATION_ADDED;
}
@Override
public int rerankerWindowSize(String modelId) {
Integer inputSize = RERANKERS_INPUT_SIZE.get(modelId);
return inputSize != null ? inputSize : DEFAULT_RERANKER_INPUT_SIZE_WORDS;
}
public static | VoyageAIService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/internal/MetadataImpl.java | {
"start": 20460,
"end": 22340
} | class ____ known: " + entityName );
}
final var referencedProperty = persistentClass.getReferencedProperty( propertyName );
if ( referencedProperty == null ) {
throw new MappingException( "Property not known: " + entityName + '.' + propertyName );
}
return referencedProperty.getType();
}
//Specific for copies only:
public Map<String,PersistentClass> getEntityBindingMap() {
return entityBindingMap;
}
public Map<String, Collection> getCollectionBindingMap() {
return collectionBindingMap;
}
public Map<String, TypeDefinition> getTypeDefinitionMap() {
return typeDefinitionMap;
}
public Map<String, FetchProfile> getFetchProfileMap() {
return fetchProfileMap;
}
public Map<Class<?>, MappedSuperclass> getMappedSuperclassMap() {
return mappedSuperclassMap;
}
public Map<String, IdentifierGeneratorDefinition> getIdGeneratorDefinitionMap() {
return idGeneratorDefinitionMap;
}
public Map<String, NamedEntityGraphDefinition> getNamedEntityGraphMap() {
return namedEntityGraphMap;
}
public BootstrapContext getBootstrapContext() {
return bootstrapContext;
}
public Map<String, NamedHqlQueryDefinition<?>> getNamedQueryMap() {
return namedQueryMap;
}
public Map<String, NamedNativeQueryDefinition<?>> getNamedNativeQueryMap() {
return namedNativeQueryMap;
}
public Map<String, NamedProcedureCallDefinition> getNamedProcedureCallMap() {
return namedProcedureCallMap;
}
public Map<String, NamedResultSetMappingDescriptor> getSqlResultSetMappingMap() {
return sqlResultSetMappingMap;
}
public java.util.List<org.hibernate.mapping.Component> getComposites() {
return composites;
}
public Map<Class<?>, Component> getGenericComponentsMap() {
return genericComponentsMap;
}
public Map<Class<?>, DiscriminatorType<?>> getEmbeddableDiscriminatorTypesMap() {
return embeddableDiscriminatorTypesMap;
}
}
| not |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/InternalAsyncUtil.java | {
"start": 1119,
"end": 1153
} | class ____.
* </p>
*/
public | private |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/test/java/org/apache/flink/state/rocksdb/RocksDBMemoryControllerUtilsTest.java | {
"start": 4582,
"end": 5479
} | class ____
implements RocksDBMemoryControllerUtils.RocksDBMemoryFactory {
private Long actualCacheCapacity = null;
private Long actualWbmCapacity = null;
@Override
public Cache createCache(long cacheCapacity, double highPriorityPoolRatio) {
actualCacheCapacity = cacheCapacity;
return RocksDBMemoryControllerUtils.RocksDBMemoryFactory.DEFAULT.createCache(
cacheCapacity, highPriorityPoolRatio);
}
@Override
public WriteBufferManager createWriteBufferManager(
long writeBufferManagerCapacity, Cache cache) {
actualWbmCapacity = writeBufferManagerCapacity;
return RocksDBMemoryControllerUtils.RocksDBMemoryFactory.DEFAULT
.createWriteBufferManager(writeBufferManagerCapacity, cache);
}
}
}
| TestingRocksDBMemoryFactory |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/MultipleInputStreamTaskTest.java | {
"start": 25395,
"end": 61203
} | class ____ extends AbstractInput<String, String> {
public DuplicatingInput(AbstractStreamOperatorV2<String> owner, int inputId) {
super(owner, inputId);
}
@Override
public void processElement(StreamRecord<String> element) throws Exception {
output.collect(element);
output.collect(element);
}
}
}
@TestTemplate
void testLifeCycleOrder() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(applyObjectReuse(objectReuse))
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new LifeCycleTrackingMockSource(Boundedness.BOUNDED, 1),
WatermarkStrategy.noWatermarks()),
BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOperatorChain(
new LifeCycleTrackingMapToStringMultipleInputOperatorFactory())
.chain(
new LifeCycleTrackingMap<>(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()))
.finish()
.build()) {
testHarness.waitForTaskCompletion();
}
assertThat(LIFE_CYCLE_EVENTS)
.contains(
LifeCycleTrackingMap.OPEN,
LifeCycleTrackingMapToStringMultipleInputOperator.OPEN,
LifeCycleTrackingMockSourceReader.START,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMapToStringMultipleInputOperator.END_INPUT,
LifeCycleTrackingMapToStringMultipleInputOperator.FINISH,
LifeCycleTrackingMap.END_INPUT,
LifeCycleTrackingMap.CLOSE,
LifeCycleTrackingMapToStringMultipleInputOperator.CLOSE,
LifeCycleTrackingMockSourceReader.CLOSE);
}
@TestTemplate
void testInputFairness() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
testHarness.setAutoProcess(false);
testHarness.processElement(new StreamRecord<>("0"), 0);
testHarness.processElement(new StreamRecord<>("1"), 0);
testHarness.processElement(new StreamRecord<>("2"), 0);
testHarness.processElement(new StreamRecord<>("3"), 0);
testHarness.processElement(new StreamRecord<>("0"), 2);
testHarness.processElement(new StreamRecord<>("1"), 2);
testHarness.processAll();
// We do not know which of the input will be picked first, but we are expecting them
// to alternate
// NOTE: the behaviour of alternation once per record is not part of any contract.
// Task is just expected to not starve any of the inputs, it just happens to be
// currently implemented in truly "fair" fashion. That means this test might need
// to be adjusted if logic changes.
expectedOutput.add(new StreamRecord<>("0"));
expectedOutput.add(new StreamRecord<>("0"));
expectedOutput.add(new StreamRecord<>("1"));
expectedOutput.add(new StreamRecord<>("1"));
expectedOutput.add(new StreamRecord<>("2"));
expectedOutput.add(new StreamRecord<>("3"));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
}
}
@TestTemplate
void testWatermark() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
buildWatermarkTestHarness(2, false)) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
int initialTime = 0;
testHarness.processElement(new Watermark(initialTime), 0, 0);
testHarness.processElement(new Watermark(initialTime), 0, 1);
addSourceRecords(testHarness, 1, initialTime);
expectedOutput.add(
new StreamRecord<>("" + (initialTime), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime), 1, 0);
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
testHarness.processElement(new Watermark(initialTime), 1, 1);
// now the watermark should have propagated, Map simply forward Watermarks
expectedOutput.add(new Watermark(initialTime));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// contrary to checkpoint barriers these elements are not blocked by watermarks
testHarness.processElement(new StreamRecord<>("Hello", initialTime), 0, 0);
testHarness.processElement(new StreamRecord<>(42.0, initialTime), 1, 1);
expectedOutput.add(new StreamRecord<>("Hello", initialTime));
expectedOutput.add(new StreamRecord<>("42.0", initialTime));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
testHarness.processElement(new Watermark(initialTime + 4), 0, 0);
testHarness.processElement(new Watermark(initialTime + 3), 0, 1);
addSourceRecords(testHarness, 1, initialTime + 3);
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 3), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime + 3), 1, 0);
testHarness.processElement(new Watermark(initialTime + 2), 1, 1);
// check whether we get the minimum of all the watermarks, this must also only occur in
// the output after the two StreamRecords
expectedOutput.add(new Watermark(initialTime + 2));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// advance watermark from one of the inputs, now we should get a new one since the
// minimum increases
testHarness.processElement(new Watermark(initialTime + 4), 1, 1);
expectedOutput.add(new Watermark(initialTime + 3));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// advance the other inputs, now we should get a new one since the minimum increases
// again
testHarness.processElement(new Watermark(initialTime + 4), 0, 1);
addSourceRecords(testHarness, 1, initialTime + 4);
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 4), TimestampAssigner.NO_TIMESTAMP));
testHarness.processElement(new Watermark(initialTime + 4), 1, 0);
expectedOutput.add(new Watermark(initialTime + 4));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
List<String> resultElements =
TestHarnessUtil.getRawElementsFromOutput(testHarness.getOutput());
assertThat(resultElements).hasSize(5);
}
}
/**
* This test verifies that watermarks and watermark statuses are correctly forwarded. This also
* checks whether watermarks are forwarded only when we have received watermarks from all
* inputs. The forwarded watermark must be the minimum of the watermarks of all active inputs.
*/
@TestTemplate
void testWatermarkAndWatermarkStatusForwarding() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
buildWatermarkTestHarness(2, true)) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
int initialTime = 0;
// test whether idle input channels are acknowledged correctly when forwarding
// watermarks
testHarness.processElement(WatermarkStatus.IDLE, 0, 1);
testHarness.processElement(new Watermark(initialTime + 6), 0, 0);
testHarness.processElement(new Watermark(initialTime + 5), 1, 1);
testHarness.processElement(WatermarkStatus.IDLE, 1, 0); // once this is acknowledged,
expectedOutput.add(new Watermark(initialTime + 5));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// We make the second input idle, which should forward W=6 from the first input
testHarness.processElement(WatermarkStatus.IDLE, 1, 1);
expectedOutput.add(new Watermark(initialTime + 6));
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// Make the first input idle
testHarness.processElement(WatermarkStatus.IDLE, 0, 0);
expectedOutput.add(WatermarkStatus.IDLE);
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// make source active once again, emit a watermark and go idle again.
addSourceRecords(testHarness, 1, initialTime + 10);
expectedOutput.add(WatermarkStatus.ACTIVE); // activate source on new record
expectedOutput.add(
new StreamRecord<>("" + (initialTime + 10), TimestampAssigner.NO_TIMESTAMP));
expectedOutput.add(new Watermark(initialTime + 10)); // forward W from source
expectedOutput.add(WatermarkStatus.IDLE); // go idle after reading all records
testHarness.processAll();
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
// make some network input channel active again
testHarness.processElement(WatermarkStatus.ACTIVE, 0, 1);
expectedOutput.add(WatermarkStatus.ACTIVE);
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
}
}
@TestTemplate
void testAdvanceToEndOfEventTime() throws Exception {
try (StreamTaskMailboxTestHarness<String> testHarness =
buildWatermarkTestHarness(2, false)) {
testHarness.processElement(Watermark.MAX_WATERMARK, 0, 0);
testHarness.processElement(Watermark.MAX_WATERMARK, 0, 1);
testHarness.getStreamTask().advanceToEndOfEventTime();
testHarness.processElement(Watermark.MAX_WATERMARK, 1, 0);
assertThat(testHarness.getOutput()).doesNotContain(Watermark.MAX_WATERMARK);
testHarness.processElement(Watermark.MAX_WATERMARK, 1, 1);
assertThat(testHarness.getOutput()).containsExactly(Watermark.MAX_WATERMARK);
}
}
@TestTemplate
@SuppressWarnings("unchecked")
void testWatermarkMetrics() throws Exception {
OperatorID mainOperatorId = new OperatorID();
OperatorID chainedOperatorId = new OperatorID();
InterceptingOperatorMetricGroup mainOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingOperatorMetricGroup chainedOperatorMetricGroup =
new InterceptingOperatorMetricGroup();
InterceptingTaskMetricGroup taskMetricGroup =
new InterceptingTaskMetricGroup() {
@Override
public InternalOperatorMetricGroup getOrAddOperator(
OperatorID id, String name, Map<String, String> additionalVariables) {
if (id.equals(mainOperatorId)) {
return mainOperatorMetricGroup;
} else if (id.equals(chainedOperatorId)) {
return chainedOperatorMetricGroup;
} else {
return super.getOrAddOperator(id, name, additionalVariables);
}
}
};
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.modifyExecutionConfig(applyObjectReuse(objectReuse))
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addSourceInput(
new SourceOperatorFactory<>(
new MockSource(
Boundedness.CONTINUOUS_UNBOUNDED, 2, true, false),
WatermarkStrategy.forGenerator(
ctx -> new RecordToWatermarkGenerator())),
BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOperatorChain(
mainOperatorId, new MapToStringMultipleInputOperatorFactory(3))
.chain(
chainedOperatorId,
new WatermarkMetricOperator(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()))
.finish()
.setTaskMetricGroup(taskMetricGroup)
.build()) {
Gauge<Long> taskInputWatermarkGauge =
(Gauge<Long>) taskMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> mainInput1WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(1));
Gauge<Long> mainInput2WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(2));
Gauge<Long> mainInput3WatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.currentInputWatermarkName(3));
Gauge<Long> mainInputWatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> mainOutputWatermarkGauge =
(Gauge<Long>)
mainOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
Gauge<Long> chainedInputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_INPUT_WATERMARK);
Gauge<Long> chainedOutputWatermarkGauge =
(Gauge<Long>)
chainedOperatorMetricGroup.get(MetricNames.IO_CURRENT_OUTPUT_WATERMARK);
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput1WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput2WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput3WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
testHarness.processElement(new Watermark(1L), 0);
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput1WatermarkGauge.getValue()).isOne();
assertThat(mainInput2WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput3WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
addSourceRecords(testHarness, 1, 2);
testHarness.processAll();
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainInput1WatermarkGauge.getValue()).isOne();
assertThat(mainInput2WatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainInput3WatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(mainOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(Long.MIN_VALUE);
testHarness.processElement(new Watermark(2L), 1);
assertThat(taskInputWatermarkGauge.getValue()).isOne();
assertThat(mainInputWatermarkGauge.getValue()).isOne();
assertThat(mainInput1WatermarkGauge.getValue()).isOne();
assertThat(mainInput2WatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainInput3WatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainOutputWatermarkGauge.getValue()).isOne();
assertThat(chainedInputWatermarkGauge.getValue()).isOne();
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(2L);
testHarness.processElement(new Watermark(4L), 0);
addSourceRecords(testHarness, 1, 3);
testHarness.processAll();
assertThat(taskInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainInput1WatermarkGauge.getValue()).isEqualTo(4L);
assertThat(mainInput2WatermarkGauge.getValue()).isEqualTo(3L);
assertThat(mainInput3WatermarkGauge.getValue()).isEqualTo(2L);
assertThat(mainOutputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedInputWatermarkGauge.getValue()).isEqualTo(2L);
assertThat(chainedOutputWatermarkGauge.getValue()).isEqualTo(4L);
finishAddingRecords(testHarness, 1);
testHarness.endInput();
testHarness.waitForTaskCompletion();
testHarness.finishProcessing();
}
}
/**
* Tests the checkpoint related metrics are registered into {@link TaskIOMetricGroup} correctly
* while generating the {@link TwoInputStreamTask}.
*/
@TestTemplate
void testCheckpointBarrierMetrics() throws Exception {
final Map<String, Metric> metrics = new ConcurrentHashMap<>();
final TaskMetricGroup taskMetricGroup =
StreamTaskTestHarness.createTaskMetricGroup(metrics);
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO, 2)
.addInput(BasicTypeInfo.INT_TYPE_INFO, 2)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO, 2)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.setTaskMetricGroup(taskMetricGroup)
.build()) {
assertThat(metrics)
.containsKey(MetricNames.CHECKPOINT_ALIGNMENT_TIME)
.containsKey(MetricNames.CHECKPOINT_START_DELAY_TIME);
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
/** The CanEmitBatchOfRecords should always be false for {@link MultipleInputStreamTask}. */
@TestTemplate
void testCanEmitBatchOfRecords() throws Exception {
AvailabilityProvider.AvailabilityHelper availabilityHelper =
new AvailabilityProvider.AvailabilityHelper();
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.addAdditionalOutput(
new ResultPartitionWriterWithAvailabilityHelper(availabilityHelper))
.setupOperatorChain(new MapToStringMultipleInputOperatorFactory(3))
.finishForSingletonOperatorChain(IntSerializer.INSTANCE)
.build()) {
StreamTask.CanEmitBatchOfRecordsChecker canEmitBatchOfRecordsChecker =
testHarness.streamTask.getCanEmitBatchOfRecords();
testHarness.processAll();
availabilityHelper.resetAvailable();
assertThat(canEmitBatchOfRecordsChecker.check()).isFalse();
// The canEmitBatchOfRecordsChecker should be the false after the record writer is
// unavailable.
availabilityHelper.resetUnavailable();
assertThat(canEmitBatchOfRecordsChecker.check()).isFalse();
// Restore record writer to available
availabilityHelper.resetAvailable();
assertThat(canEmitBatchOfRecordsChecker.check()).isFalse();
// The canEmitBatchOfRecordsChecker should be the false after add the mail to mail box.
testHarness.streamTask.mainMailboxExecutor.execute(() -> {}, "mail");
assertThat(canEmitBatchOfRecordsChecker.check()).isFalse();
testHarness.processAll();
assertThat(canEmitBatchOfRecordsChecker.check()).isFalse();
}
}
@TestTemplate
void testLatencyMarker() throws Exception {
final Map<String, Metric> metrics = new ConcurrentHashMap<>();
final TaskMetricGroup taskMetricGroup =
StreamTaskTestHarness.createTaskMetricGroup(metrics);
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.setupOutputForSingletonOperatorChain(
new MapToStringMultipleInputOperatorFactory(3))
.setTaskMetricGroup(taskMetricGroup)
.build()) {
ArrayDeque<Object> expectedOutput = new ArrayDeque<>();
OperatorID sourceId = new OperatorID();
LatencyMarker latencyMarker = new LatencyMarker(42L, sourceId, 0);
testHarness.processElement(latencyMarker);
expectedOutput.add(latencyMarker);
assertThat(testHarness.getOutput()).containsExactlyElementsOf(expectedOutput);
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
@TestTemplate
void testTriggeringAlignedNoTimeoutCheckpointWithFinishedChannels() throws Exception {
testTriggeringCheckpointWithFinishedChannels(
CheckpointOptions.alignedNoTimeout(
CheckpointType.CHECKPOINT,
CheckpointStorageLocationReference.getDefault()));
}
@TestTemplate
void testTriggeringUnalignedCheckpointWithFinishedChannels() throws Exception {
testTriggeringCheckpointWithFinishedChannels(
CheckpointOptions.unaligned(
CheckpointType.CHECKPOINT,
CheckpointStorageLocationReference.getDefault()));
}
@TestTemplate
void testTriggeringAlignedWithTimeoutCheckpointWithFinishedChannels() throws Exception {
testTriggeringCheckpointWithFinishedChannels(
CheckpointOptions.alignedWithTimeout(
CheckpointType.CHECKPOINT,
CheckpointStorageLocationReference.getDefault(),
10L));
}
private void testTriggeringCheckpointWithFinishedChannels(CheckpointOptions checkpointOptions)
throws Exception {
ResultPartition[] partitionWriters = new ResultPartition[2];
try {
for (int i = 0; i < partitionWriters.length; ++i) {
partitionWriters[i] =
PartitionTestUtils.createPartition(ResultPartitionType.PIPELINED_BOUNDED);
partitionWriters[i].setup();
}
CompletingCheckpointResponder checkpointResponder = new CompletingCheckpointResponder();
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.STRING_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.DOUBLE_TYPE_INFO)
.addAdditionalOutput(partitionWriters)
.setCheckpointResponder(checkpointResponder)
.addJobConfig(
CheckpointingOptions.CHECKPOINTING_INTERVAL,
Duration.ofSeconds(1))
.addJobConfig(
CheckpointingOptions.ENABLE_UNALIGNED,
checkpointOptions.isUnalignedCheckpoint()
|| checkpointOptions.isTimeoutable())
.setupOperatorChain(new MapToStringMultipleInputOperatorFactory(3))
.finishForSingletonOperatorChain(StringSerializer.INSTANCE)
.build()) {
checkpointResponder.setHandlers(
testHarness.streamTask::notifyCheckpointCompleteAsync,
testHarness.streamTask::notifyCheckpointAbortAsync);
testHarness.getStreamTask().getCheckpointBarrierHandler().get();
// Tests triggering checkpoint when all the inputs are alive.
CompletableFuture<Boolean> checkpointFuture =
triggerCheckpoint(testHarness, 2, checkpointOptions);
processMailTillCheckpointSucceeds(testHarness, checkpointFuture);
assertThat(testHarness.getTaskStateManager().getReportedCheckpointId())
.isEqualTo(2);
// Tests triggering checkpoint after some inputs have received EndOfPartition.
testHarness.processEvent(new EndOfData(StopMode.DRAIN), 0, 0);
testHarness.processEvent(EndOfPartitionEvent.INSTANCE, 0, 0);
checkpointFuture = triggerCheckpoint(testHarness, 4, checkpointOptions);
processMailTillCheckpointSucceeds(testHarness, checkpointFuture);
assertThat(testHarness.getTaskStateManager().getReportedCheckpointId())
.isEqualTo(4);
// Tests triggering checkpoint after all the inputs have received EndOfPartition.
testHarness.processEvent(new EndOfData(StopMode.DRAIN), 1, 0);
testHarness.processEvent(new EndOfData(StopMode.DRAIN), 2, 0);
testHarness.processEvent(EndOfPartitionEvent.INSTANCE, 1, 0);
testHarness.processEvent(EndOfPartitionEvent.INSTANCE, 2, 0);
checkpointFuture = triggerCheckpoint(testHarness, 6, checkpointOptions);
// Notifies the result partition that all records are processed after the
// last checkpoint is triggered.
checkpointFuture.thenAccept(
(ignored) -> {
for (ResultPartition resultPartition : partitionWriters) {
resultPartition.onSubpartitionAllDataProcessed(0);
}
});
// The checkpoint 6 would be triggered successfully.
testHarness.processAll();
testHarness.finishProcessing();
assertThat(checkpointFuture).isDone();
testHarness.getTaskStateManager().getWaitForReportLatch().await();
assertThat(testHarness.getTaskStateManager().getReportedCheckpointId())
.isEqualTo(6);
// Each result partition should have emitted 3 barriers and 1 EndOfUserRecordsEvent.
for (ResultPartition resultPartition : partitionWriters) {
assertThat(resultPartition.getNumberOfQueuedBuffers()).isEqualTo(4);
}
}
} finally {
for (ResultPartitionWriter writer : partitionWriters) {
if (writer != null) {
writer.close();
}
}
}
}
@TestTemplate
void testSkipExecutionsIfFinishedOnRestore() throws Exception {
OperatorID nonSourceOperatorId = new OperatorID();
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.setCollectNetworkEvents()
.addJobConfig(
CheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofSeconds(1))
.modifyExecutionConfig(applyObjectReuse(objectReuse))
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.setTaskStateSnapshot(1, TaskStateSnapshot.FINISHED_ON_RESTORE)
.setupOperatorChain(
nonSourceOperatorId,
new LifeCycleMonitorMultipleInputOperatorFactory())
.chain(new TestFinishedOnRestoreStreamOperator(), StringSerializer.INSTANCE)
.finish()
.build()) {
testHarness.processElement(Watermark.MAX_WATERMARK, 0);
testHarness.processElement(Watermark.MAX_WATERMARK, 1);
testHarness.processElement(Watermark.MAX_WATERMARK, 2);
testHarness.waitForTaskCompletion();
assertThat(testHarness.getOutput())
.containsExactly(Watermark.MAX_WATERMARK, new EndOfData(StopMode.DRAIN));
}
}
@TestTemplate
void testTriggeringStopWithSavepointWithDrain() throws Exception {
SourceOperatorFactory<Integer> sourceOperatorFactory =
new SourceOperatorFactory<>(
new MockSource(Boundedness.CONTINUOUS_UNBOUNDED, 2),
WatermarkStrategy.noWatermarks());
CompletableFuture<Boolean> checkpointCompleted = new CompletableFuture<>();
CheckpointResponder checkpointResponder =
new TestCheckpointResponder() {
@Override
public void acknowledgeCheckpoint(
JobID jobID,
ExecutionAttemptID executionAttemptID,
long checkpointId,
CheckpointMetrics checkpointMetrics,
TaskStateSnapshot subtaskState) {
super.acknowledgeCheckpoint(
jobID,
executionAttemptID,
checkpointId,
checkpointMetrics,
subtaskState);
checkpointCompleted.complete(null);
}
};
try (StreamTaskMailboxTestHarness<String> testHarness =
new StreamTaskMailboxTestHarnessBuilder<>(
MultipleInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO)
.setCollectNetworkEvents()
.addJobConfig(
CheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofSeconds(1))
.modifyExecutionConfig(applyObjectReuse(objectReuse))
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.setTaskStateSnapshot(1, TaskStateSnapshot.FINISHED_ON_RESTORE)
.setupOperatorChain(new LifeCycleMonitorMultipleInputOperatorFactory())
.finishForSingletonOperatorChain(StringSerializer.INSTANCE)
.setCheckpointResponder(checkpointResponder)
.build()) {
CompletableFuture<Boolean> triggerResult =
testHarness.streamTask.triggerCheckpointAsync(
new CheckpointMetaData(2, 2),
CheckpointOptions.alignedNoTimeout(
SavepointType.terminate(SavepointFormatType.CANONICAL),
CheckpointStorageLocationReference.getDefault()));
checkpointCompleted.whenComplete(
(ignored, exception) ->
testHarness.streamTask.notifyCheckpointCompleteAsync(2));
testHarness.waitForTaskCompletion();
testHarness.finishProcessing();
assertThat(triggerResult).isCompletedWithValue(true);
assertThat(checkpointCompleted).isDone();
}
}
/** Test implementation of {@link MultipleInputStreamOperator}. */
protected static | DuplicatingInput |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexFromFieldDocValuesAndLiteralEvaluator.java | {
"start": 3466,
"end": 4270
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory encoded;
private final int precision;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory encoded, int precision) {
this.source = source;
this.encoded = encoded;
this.precision = precision;
}
@Override
public StGeohexFromFieldDocValuesAndLiteralEvaluator get(DriverContext context) {
return new StGeohexFromFieldDocValuesAndLiteralEvaluator(source, encoded.get(context), precision, context);
}
@Override
public String toString() {
return "StGeohexFromFieldDocValuesAndLiteralEvaluator[" + "encoded=" + encoded + ", precision=" + precision + "]";
}
}
}
| Factory |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java | {
"start": 10657,
"end": 10990
} | class ____ extends TestSearchExtBuilder {
private static final String NAME = "name2";
TestSearchExtBuilder2(String field) {
super(NAME, field);
}
TestSearchExtBuilder2(StreamInput in) throws IOException {
super(NAME, in);
}
}
private static | TestSearchExtBuilder2 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit3TestNotRunTest.java | {
"start": 8742,
"end": 9029
} | class ____ extends TestCase {
void tsetDoesStuff() {}
}
""")
.addOutputLines(
"out/DoesStuffTest.java",
"""
import junit.framework.TestCase;
import org.junit.Test;
public | DoesStuffTest |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/util/reflection/FieldInitializer.java | {
"start": 5133,
"end": 5829
} | interface ____ {
/**
* Try to resolve instances from types.
*
* <p>
* Checks on the real argument type or on the correct argument number
* will happen during the field initialization {@link FieldInitializer#initialize()}.
* I.e the only responsibility of this method, is to provide instances <strong>if possible</strong>.
* </p>
*
* @param argTypes Constructor argument types, should not be null.
* @return The argument instances to be given to the constructor, should not be null.
*/
Object[] resolveTypeInstances(Class<?>... argTypes);
}
private | ConstructorArgumentResolver |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/fuzz/Fuzz32208UTF32ParseTest.java | {
"start": 756,
"end": 3377
} | class ____ extends JacksonCoreTestBase
{
private final byte[] DOC = readResource("/data/fuzz-json-utf32-32208.json");
@Test
void fuzz32208ViaParser() throws Exception
{
final JsonFactory f = newStreamFactory();
JsonParser p = f.createParser(ObjectReadContext.empty(), DOC);
try {
assertToken(JsonToken.VALUE_STRING, p.nextToken());
String text = p.getString();
fail("Should not have passed; got text with length of: "+text.length());
} catch (JacksonIOException e) {
verifyException(e, "Invalid UTF-32 character ");
}
p.close();
}
// How about through UTF32Reader itself?
@Test
void fuzz32208Direct() throws Exception
{
_testFuzz32208Direct(1);
_testFuzz32208Direct(2);
_testFuzz32208Direct(3);
_testFuzz32208Direct(7);
_testFuzz32208Direct(13);
_testFuzz32208Direct(67);
_testFuzz32208Direct(111);
_testFuzz32208Direct(337);
_testFuzz32208Direct(991);
}
@Test
void fuzz32208DirectSingleByte() throws Exception
{
UTF32Reader r = new UTF32Reader(null, new ByteArrayInputStream(DOC), true,
new byte[500], 0, 0, false);
int count = 0;
try {
int ch;
while ((ch = r.read()) >= 0) {
count += ch;
}
fail("Should have failed, got all "+count+" characters, last 0x"+Integer.toHexString(ch));
} catch (CharConversionException e) {
verifyException(e, "Invalid UTF-32 character ");
}
r.close();
}
private void _testFuzz32208Direct(int readSize) throws Exception
{
InputStream in = new ThrottledInputStream(DOC, readSize);
// apparently input is NOT big-endian so:
UTF32Reader r = new UTF32Reader(null, in, true,
new byte[500], 0, 0, false);
int count = 0;
int ch;
try {
final char[] chunkBuffer = new char[19];
while (true) {
ch = r.read(chunkBuffer);
if (ch == -1) {
break;
}
if (ch == 0) {
fail("Received 0 chars; broken reader");
}
count += ch;
}
fail("Should have failed, got all "+count+" characters, last 0x"+Integer.toHexString(ch));
} catch (CharConversionException e) {
verifyException(e, "Invalid UTF-32 character ");
}
r.close();
}
}
| Fuzz32208UTF32ParseTest |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/symbol/IRDecorations.java | {
"start": 5358,
"end": 5558
} | class ____ extends IRDecoration<Integer> {
public IRDSize(Integer value) {
super(value);
}
}
/** describes the depth of a dup instruction */
public static | IRDSize |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneInjectableConstructorTest.java | {
"start": 3009,
"end": 3129
} | class ____ {
/** Class has a single non-injectable constructor. */
public | MoreThanOneInjectableConstructorNegativeCases |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeTransformation.java | {
"start": 1111,
"end": 1565
} | interface ____ {
/** Transforms the given data type to a different data type. */
DataType transform(DataType typeToTransform);
/**
* Transforms the given data type to a different data type.
*
* <p>This method provides a {@link DataTypeFactory} if available.
*/
default DataType transform(@Nullable DataTypeFactory factory, DataType typeToTransform) {
return transform(typeToTransform);
}
}
| TypeTransformation |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/RegexTest.java | {
"start": 736,
"end": 3342
} | class ____ {
@Test
void testInSelect(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertTrue( em.createQuery( "select regexp_like('abcdef', 'ab.*')", Boolean.class ).getSingleResult() );
assertTrue( em.createQuery( "select 'abcdef' like regexp 'ab.*'", Boolean.class ).getSingleResult() );
var builder = (HibernateCriteriaBuilder) em.getCriteriaBuilder();
var query = builder.createQuery( Boolean.class );
query.select( builder.likeRegexp( builder.literal( "abcdef" ), "ab.*" ) );
assertTrue( em.createQuery( query ).getSingleResult() );
} );
scope.inEntityManager( em -> {
assertFalse( em.createQuery( "select not regexp_like('abcdef', 'ab.*')", Boolean.class ).getSingleResult() );
assertFalse( em.createQuery( "select 'abcdef' not like regexp 'ab.*'", Boolean.class ).getSingleResult() );
var builder = (HibernateCriteriaBuilder) em.getCriteriaBuilder();
var query = builder.createQuery( Boolean.class );
query.select( builder.notLikeRegexp( builder.literal( "abcdef" ), "ab.*" ) );
assertFalse( em.createQuery( query ).getSingleResult() );
} );
}
@Test
void testInSelectCaseInsensitive(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertTrue( em.createQuery( "select regexp_like('ABCDEF', 'ab.*', 'i')", Boolean.class ).getSingleResult() );
assertTrue( em.createQuery( "select 'abcdef' ilike regexp 'ab.*'", Boolean.class ).getSingleResult() );
var builder = (HibernateCriteriaBuilder) em.getCriteriaBuilder();
var query = builder.createQuery( Boolean.class );
query.select( builder.ilikeRegexp( builder.literal( "ABCDEF" ), "ab.*" ) );
assertTrue( em.createQuery( query ).getSingleResult() );
} );
scope.inEntityManager( em -> {
assertFalse( em.createQuery( "select not regexp_like('ABCDEF', 'ab.*', 'i')", Boolean.class ).getSingleResult() );
assertFalse( em.createQuery( "select 'abcdef' not ilike regexp 'ab.*'", Boolean.class ).getSingleResult() );
var builder = (HibernateCriteriaBuilder) em.getCriteriaBuilder();
var query = builder.createQuery( Boolean.class );
query.select( builder.notIlikeRegexp( builder.literal( "ABCDEF" ), "ab.*" ) );
assertFalse( em.createQuery( query ).getSingleResult() );
} );
}
@Test
void testInWhere(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
assertEquals( 1, em.createQuery( "select 1 where regexp_like('abcdef', 'ab.*')", Integer.class ).getSingleResult() );
assertEquals( 1, em.createQuery( "select 1 where 'abcdef' like regexp 'ab.*'", Integer.class ).getSingleResult() );
} );
}
}
| RegexTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/ScopeAnnotationOnInterfaceOrAbstractClassTest.java | {
"start": 3251,
"end": 3338
} | class ____ has a custom annotation. */
@CustomScope
public | that |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/mapping/JpaPersistentPropertyImplUnitTests.java | {
"start": 8169,
"end": 8377
} | class ____ {
private String id;
public String getId() {
return id;
}
}
@org.springframework.data.annotation.AccessType(Type.FIELD)
@Access(AccessType.PROPERTY)
static | CompetingTypeLevelAnnotations |
java | elastic__elasticsearch | libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java | {
"start": 1692,
"end": 2939
} | class ____ {
private static final Logger logger = LogManager.getLogger(EntitlementInitialization.class);
private static final Module ENTITLEMENTS_MODULE = PolicyManager.class.getModule();
public static InitializeArgs initializeArgs;
private static ElasticsearchEntitlementChecker checker;
private static AtomicReference<RuntimeException> error = new AtomicReference<>();
// Note: referenced by bridge reflectively
public static EntitlementChecker checker() {
return checker;
}
/**
* Return any exception that occurred during initialization
*/
public static RuntimeException getError() {
return error.get();
}
/**
* Initializes the Entitlement system:
* <ol>
* <li>
* Initialize dynamic instrumentation via {@link DynamicInstrumentation#initialize}
* </li>
* <li>
* Creates the {@link PolicyManager}
* </li>
* <li>
* Creates the {@link ElasticsearchEntitlementChecker} instance referenced by the instrumented methods
* </li>
* </ol>
* <p>
* <strong>NOTE:</strong> this method is referenced by the agent reflectively
* </p>
*
* @param inst the JVM instrumentation | EntitlementInitialization |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/enum_interface_type_handler/Mapper.java | {
"start": 814,
"end": 1011
} | interface ____ {
@Select("select * from users where id = #{id}")
User getUser(Integer id);
@Insert("insert into users (id, color) values (#{id}, #{color})")
int insertUser(User user);
}
| Mapper |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scheduling/annotation/AnnotationAsyncExecutionInterceptor.java | {
"start": 1641,
"end": 3065
} | class ____ extends AsyncExecutionInterceptor {
/**
* Create a new {@code AnnotationAsyncExecutionInterceptor} with the given executor
* and a simple {@link AsyncUncaughtExceptionHandler}.
* @param defaultExecutor the executor to be used by default if no more specific
* executor has been qualified at the method level using {@link Async#value()};
* a local executor for this interceptor will be built otherwise
*/
public AnnotationAsyncExecutionInterceptor(@Nullable Executor defaultExecutor) {
super(defaultExecutor);
}
/**
* Create a new {@code AnnotationAsyncExecutionInterceptor} with the given executor.
* @param defaultExecutor the executor to be used by default if no more specific
* executor has been qualified at the method level using {@link Async#value()};
* a local executor for this interceptor will be built otherwise
* @param exceptionHandler the {@link AsyncUncaughtExceptionHandler} to use to
* handle exceptions thrown by asynchronous method executions with {@code void}
* return type
*/
public AnnotationAsyncExecutionInterceptor(@Nullable Executor defaultExecutor, AsyncUncaughtExceptionHandler exceptionHandler) {
super(defaultExecutor, exceptionHandler);
}
/**
* Return the qualifier or bean name of the executor to be used when executing the
* given method, specified via {@link Async#value} at the method or declaring
* | AnnotationAsyncExecutionInterceptor |
java | quarkusio__quarkus | extensions/kubernetes-service-binding/runtime/src/main/java/io/quarkus/kubernetes/service/binding/runtime/ServiceBinding.java | {
"start": 491,
"end": 4913
} | class ____ {
private static final Logger log = Logger.getLogger(ServiceBinding.class);
private static final String PROVIDER = "provider";
private static final String TYPE = "type";
private final String name;
private final String provider;
private final Map<String, String> properties;
private final String type;
private final String bindingDirectory;
public ServiceBinding(Path bindingDirectory) {
this(bindingDirectory.getFileName().toString(), getFilenameToContentMap(bindingDirectory), bindingDirectory);
}
// visible for testing
ServiceBinding(String name, Map<String, String> filenameToContentMap, Path bindingDirectory) {
Map<String, String> properties = new HashMap<>();
String type = null;
String provider = null;
for (Map.Entry<String, String> entry : filenameToContentMap.entrySet()) {
if (TYPE.equals(entry.getKey())) {
type = entry.getValue();
} else if (PROVIDER.equals(entry.getKey())) {
provider = entry.getValue();
} else {
properties.put(entry.getKey(), entry.getValue());
}
}
if (type == null) {
throw new IllegalArgumentException("Directory '" + bindingDirectory
+ "' does not represent a valid Service ServiceBinding directory as it does not specify a type");
}
this.bindingDirectory = bindingDirectory.toString();
this.name = name;
this.type = type;
this.provider = provider;
this.properties = Collections.unmodifiableMap(properties);
}
private static Map<String, String> getFilenameToContentMap(Path directory) {
if (!Files.exists(directory) || !Files.isDirectory(directory)) {
log.warn("File '" + directory + "' is not a proper service binding directory so it will skipped");
return Collections.emptyMap();
}
File[] files = directory.toFile().listFiles(new FileFilter() {
@Override
public boolean accept(File f) {
try {
return !Files.isHidden(f.toPath()) && !Files.isDirectory(f.toPath());
} catch (IOException e) {
throw new IllegalStateException("Unable to determine if file '" + f + "' is a regular file", e);
}
}
});
Map<String, String> result = new HashMap<>();
if (files != null) {
for (File f : files) {
try {
result.put(f.toPath().getFileName().toString(),
Files.readString(f.toPath()).trim());
} catch (IOException e) {
throw new IllegalStateException("Unable to read file '" + f + "'", e);
}
}
}
return result;
}
public String getName() {
return name;
}
public Map<String, String> getProperties() {
return properties;
}
public String getType() {
return type;
}
public String getProvider() {
return provider;
}
public String getBindingDirectory() {
return bindingDirectory;
}
@Override
public String toString() {
return "ServiceBinding{" +
"name='" + name + '\'' +
", provider='" + provider + '\'' +
", type='" + type + '\'' +
'}';
}
public static List<ServiceBinding> matchingByType(String type, List<ServiceBinding> all) {
Objects.requireNonNull(type, "Type must not be null");
List<ServiceBinding> result = new ArrayList<>();
for (ServiceBinding binding : all) {
if (type.equals(binding.getType())) {
result.add(binding);
}
}
return result;
}
public static Optional<ServiceBinding> singleMatchingByType(String type, List<ServiceBinding> all) {
List<ServiceBinding> allMatching = matchingByType(type, all);
if (allMatching.isEmpty()) {
return Optional.empty();
}
ServiceBinding first = allMatching.get(0);
if (allMatching.size() > 1) {
log.warn("More than one ServiceBinding matches type '" + type + "', but only " + first + " will be used");
}
return Optional.of(first);
}
}
| ServiceBinding |
java | apache__camel | components/camel-ref/src/main/java/org/apache/camel/component/ref/RefEndpoint.java | {
"start": 1559,
"end": 3037
} | class ____ extends DefaultEndpoint implements DelegateEndpoint {
private volatile Endpoint endpoint;
@UriPath
@Metadata(required = true)
private String name;
public RefEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public boolean isRemote() {
return false;
}
public String getName() {
return name;
}
/**
* Name of endpoint to lookup in the registry.
*/
public void setName(String name) {
this.name = name;
}
@Override
public Producer createProducer() throws Exception {
return endpoint.createProducer();
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
return endpoint.createConsumer(processor);
}
@Override
public Endpoint getEndpoint() {
return endpoint;
}
@Override
protected void doInit() throws Exception {
if (endpoint == null) {
// endpoint is mandatory
endpoint = CamelContextHelper.mandatoryLookup(getCamelContext(), name, Endpoint.class);
if (getCamelContext().hasEndpoint(getEndpoint().getEndpointUri()) == null
|| getCamelContext().hasEndpoint(getEndpoint().getEndpointUri()) != endpoint) {
getCamelContext().addEndpoint(getEndpoint().getEndpointUri(), endpoint);
}
}
super.doInit();
}
}
| RefEndpoint |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/ValidationUtils.java | {
"start": 1346,
"end": 11420
} | class ____ {
private static final Log logger = LogFactory.getLog(ValidationUtils.class);
/**
* Invoke the given {@link Validator} for the supplied object and
* {@link Errors} instance.
* @param validator the {@code Validator} to be invoked
* @param target the object to bind the parameters to
* @param errors the {@link Errors} instance that should store the errors
* @throws IllegalArgumentException if either of the {@code Validator} or {@code Errors}
* arguments is {@code null}, or if the supplied {@code Validator} does not
* {@link Validator#supports(Class) support} the validation of the supplied object's type
*/
public static void invokeValidator(Validator validator, Object target, Errors errors) {
invokeValidator(validator, target, errors, (Object[]) null);
}
/**
* Invoke the given {@link Validator}/{@link SmartValidator} for the supplied object and
* {@link Errors} instance.
* @param validator the {@code Validator} to be invoked
* @param target the object to bind the parameters to
* @param errors the {@link Errors} instance that should store the errors
* @param validationHints one or more hint objects to be passed to the validation engine
* @throws IllegalArgumentException if either of the {@code Validator} or {@code Errors}
* arguments is {@code null}, or if the supplied {@code Validator} does not
* {@link Validator#supports(Class) support} the validation of the supplied object's type
*/
public static void invokeValidator(
Validator validator, Object target, Errors errors, Object @Nullable ... validationHints) {
Assert.notNull(validator, "Validator must not be null");
Assert.notNull(target, "Target object must not be null");
Assert.notNull(errors, "Errors object must not be null");
if (logger.isDebugEnabled()) {
logger.debug("Invoking validator [" + validator + "]");
}
if (!validator.supports(target.getClass())) {
throw new IllegalArgumentException(
"Validator [" + validator.getClass() + "] does not support [" + target.getClass() + "]");
}
if (!ObjectUtils.isEmpty(validationHints) && validator instanceof SmartValidator smartValidator) {
smartValidator.validate(target, errors, validationHints);
}
else {
validator.validate(target, errors);
}
if (logger.isDebugEnabled()) {
if (errors.hasErrors()) {
logger.debug("Validator found " + errors.getErrorCount() + " errors");
}
else {
logger.debug("Validator found no errors");
}
}
}
/**
* Reject the given field with the given error code if the value is empty.
* <p>An 'empty' value in this context means either {@code null} or
* the empty string "".
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
*/
public static void rejectIfEmpty(Errors errors, String field, String errorCode) {
rejectIfEmpty(errors, field, errorCode, null, null);
}
/**
* Reject the given field with the given error code and default message
* if the value is empty.
* <p>An 'empty' value in this context means either {@code null} or
* the empty string "".
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode error code, interpretable as message key
* @param defaultMessage fallback default message
*/
public static void rejectIfEmpty(Errors errors, String field, String errorCode, String defaultMessage) {
rejectIfEmpty(errors, field, errorCode, null, defaultMessage);
}
/**
* Reject the given field with the given error code and error arguments
* if the value is empty.
* <p>An 'empty' value in this context means either {@code null} or
* the empty string "".
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
* @param errorArgs the error arguments, for argument binding via MessageFormat
* (can be {@code null})
*/
public static void rejectIfEmpty(Errors errors, String field, String errorCode, Object[] errorArgs) {
rejectIfEmpty(errors, field, errorCode, errorArgs, null);
}
/**
* Reject the given field with the given error code, error arguments
* and default message if the value is empty.
* <p>An 'empty' value in this context means either {@code null} or
* the empty string "".
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
* @param errorArgs the error arguments, for argument binding via MessageFormat
* (can be {@code null})
* @param defaultMessage fallback default message
*/
public static void rejectIfEmpty(Errors errors, String field, String errorCode,
Object @Nullable [] errorArgs, @Nullable String defaultMessage) {
Assert.notNull(errors, "Errors object must not be null");
Object value = errors.getFieldValue(field);
if (value == null || !StringUtils.hasLength(value.toString())) {
errors.rejectValue(field, errorCode, errorArgs, defaultMessage);
}
}
/**
* Reject the given field with the given error code if the value is empty
* or just contains whitespace.
* <p>An 'empty' value in this context means either {@code null},
* the empty string "", or consisting wholly of whitespace.
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
*/
public static void rejectIfEmptyOrWhitespace(Errors errors, String field, String errorCode) {
rejectIfEmptyOrWhitespace(errors, field, errorCode, null, null);
}
/**
* Reject the given field with the given error code and default message
* if the value is empty or just contains whitespace.
* <p>An 'empty' value in this context means either {@code null},
* the empty string "", or consisting wholly of whitespace.
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
* @param defaultMessage fallback default message
*/
public static void rejectIfEmptyOrWhitespace(
Errors errors, String field, String errorCode, String defaultMessage) {
rejectIfEmptyOrWhitespace(errors, field, errorCode, null, defaultMessage);
}
/**
* Reject the given field with the given error code and error arguments
* if the value is empty or just contains whitespace.
* <p>An 'empty' value in this context means either {@code null},
* the empty string "", or consisting wholly of whitespace.
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
* @param errorArgs the error arguments, for argument binding via MessageFormat
* (can be {@code null})
*/
public static void rejectIfEmptyOrWhitespace(
Errors errors, String field, String errorCode, Object @Nullable [] errorArgs) {
rejectIfEmptyOrWhitespace(errors, field, errorCode, errorArgs, null);
}
/**
* Reject the given field with the given error code, error arguments
* and default message if the value is empty or just contains whitespace.
* <p>An 'empty' value in this context means either {@code null},
* the empty string "", or consisting wholly of whitespace.
* <p>The object whose field is being validated does not need to be passed
* in because the {@link Errors} instance can resolve field values by itself
* (it will usually hold an internal reference to the target object).
* @param errors the {@code Errors} instance to register errors on
* @param field the field name to check
* @param errorCode the error code, interpretable as message key
* @param errorArgs the error arguments, for argument binding via MessageFormat
* (can be {@code null})
* @param defaultMessage fallback default message
*/
public static void rejectIfEmptyOrWhitespace(
Errors errors, String field, String errorCode, Object @Nullable [] errorArgs, @Nullable String defaultMessage) {
Assert.notNull(errors, "Errors object must not be null");
Object value = errors.getFieldValue(field);
if (value == null || !StringUtils.hasText(value.toString())) {
errors.rejectValue(field, errorCode, errorArgs, defaultMessage);
}
}
}
| ValidationUtils |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/naming/utils/InitUtils.java | {
"start": 4354,
"end": 4694
} | class ____ be loaded when they are used, which will make deserialize before register.
* </p>
*
* <p>
* 子类实现类中的静态代码串中已经向Jackson进行了注册,但是由于classloader的原因,只有当 该子类被使用的时候,才会加载该类。这可能会导致Jackson先进性反序列化,再注册子类,从而导致 反序列化失败。
* </p>
*/
public static void initSerialization() {
// TODO register in implementation | will |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/resource/beans/container/internal/CdiBeanContainerBuilder.java | {
"start": 629,
"end": 723
} | class ____ building a CDI-based {@link BeanContainer}.
*
* @author Steve Ebersole
*/
public | for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StringFormatWithLiteralTest.java | {
"start": 11419,
"end": 11584
} | class ____ {
String test() {
return "hello ['world']";
}
}
""")
.doTest();
}
}
| ExampleClass |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/condition/ConditionStateRouter.java | {
"start": 3407,
"end": 14365
} | class ____<T> extends AbstractStateRouter<T> {
public static final String NAME = "condition";
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(AbstractStateRouter.class);
protected static final Pattern ROUTE_PATTERN = Pattern.compile("([&!=,]*)\\s*([^&!=,\\s]+)");
protected Map<String, ConditionMatcher> whenCondition;
protected Map<String, ConditionMatcher> thenCondition;
protected List<ConditionMatcherFactory> matcherFactories;
private final boolean enabled;
public ConditionStateRouter(URL url, String rule, boolean force, boolean enabled) {
super(url);
this.setForce(force);
this.enabled = enabled;
matcherFactories =
moduleModel.getExtensionLoader(ConditionMatcherFactory.class).getActivateExtensions();
if (enabled) {
this.init(rule);
}
}
public ConditionStateRouter(URL url) {
super(url);
this.setUrl(url);
this.setForce(url.getParameter(FORCE_KEY, false));
matcherFactories =
moduleModel.getExtensionLoader(ConditionMatcherFactory.class).getActivateExtensions();
this.enabled = url.getParameter(ENABLED_KEY, true);
if (enabled) {
init(url.getParameterAndDecoded(RULE_KEY));
}
}
public void init(String rule) {
try {
if (rule == null || rule.trim().length() == 0) {
throw new IllegalArgumentException("Illegal route rule!");
}
rule = rule.replace("consumer.", "").replace("provider.", "");
int i = rule.indexOf("=>");
String whenRule = i < 0 ? null : rule.substring(0, i).trim();
String thenRule = i < 0 ? rule.trim() : rule.substring(i + 2).trim();
Map<String, ConditionMatcher> when =
StringUtils.isBlank(whenRule) || "true".equals(whenRule) ? new HashMap<>() : parseRule(whenRule);
Map<String, ConditionMatcher> then =
StringUtils.isBlank(thenRule) || "false".equals(thenRule) ? null : parseRule(thenRule);
// NOTE: It should be determined on the business level whether the `When condition` can be empty or not.
this.whenCondition = when;
this.thenCondition = then;
} catch (ParseException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
private Map<String, ConditionMatcher> parseRule(String rule) throws ParseException {
Map<String, ConditionMatcher> condition = new HashMap<>();
if (StringUtils.isBlank(rule)) {
return condition;
}
// Key-Value pair, stores both match and mismatch conditions
ConditionMatcher matcherPair = null;
// Multiple values
Set<String> values = null;
final Matcher matcher = ROUTE_PATTERN.matcher(rule);
while (matcher.find()) { // Try to match one by one
String separator = matcher.group(1);
String content = matcher.group(2);
// Start part of the condition expression.
if (StringUtils.isEmpty(separator)) {
matcherPair = this.getMatcher(content);
condition.put(content, matcherPair);
}
// The KV part of the condition expression
else if ("&".equals(separator)) {
if (condition.get(content) == null) {
matcherPair = this.getMatcher(content);
condition.put(content, matcherPair);
} else {
matcherPair = condition.get(content);
}
}
// The Value in the KV part.
else if ("=".equals(separator)) {
if (matcherPair == null) {
throw new ParseException(
"Illegal route rule \""
+ rule + "\", The error char '" + separator
+ "' at index " + matcher.start() + " before \""
+ content + "\".",
matcher.start());
}
values = matcherPair.getMatches();
values.add(content);
}
// The Value in the KV part.
else if ("!=".equals(separator)) {
if (matcherPair == null) {
throw new ParseException(
"Illegal route rule \""
+ rule + "\", The error char '" + separator
+ "' at index " + matcher.start() + " before \""
+ content + "\".",
matcher.start());
}
values = matcherPair.getMismatches();
values.add(content);
}
// The Value in the KV part, if Value have more than one items.
else if (",".equals(separator)) { // Should be separated by ','
if (values == null || values.isEmpty()) {
throw new ParseException(
"Illegal route rule \""
+ rule + "\", The error char '" + separator
+ "' at index " + matcher.start() + " before \""
+ content + "\".",
matcher.start());
}
values.add(content);
} else {
throw new ParseException(
"Illegal route rule \"" + rule
+ "\", The error char '" + separator + "' at index "
+ matcher.start() + " before \"" + content + "\".",
matcher.start());
}
}
return condition;
}
@Override
protected BitList<Invoker<T>> doRoute(
BitList<Invoker<T>> invokers,
URL url,
Invocation invocation,
boolean needToPrintMessage,
Holder<RouterSnapshotNode<T>> nodeHolder,
Holder<String> messageHolder)
throws RpcException {
if (!enabled) {
if (needToPrintMessage) {
messageHolder.set("Directly return. Reason: ConditionRouter disabled.");
}
return invokers;
}
if (CollectionUtils.isEmpty(invokers)) {
if (needToPrintMessage) {
messageHolder.set("Directly return. Reason: Invokers from previous router is empty.");
}
return invokers;
}
try {
if (!matchWhen(url, invocation)) {
if (needToPrintMessage) {
messageHolder.set("Directly return. Reason: WhenCondition not match.");
}
return invokers;
}
if (thenCondition == null) {
logger.warn(
CLUSTER_CONDITIONAL_ROUTE_LIST_EMPTY,
"condition state router thenCondition is empty",
"",
"The current consumer in the service blocklist. consumer: " + NetUtils.getLocalHost()
+ ", service: " + url.getServiceKey());
if (needToPrintMessage) {
messageHolder.set("Empty return. Reason: ThenCondition is empty.");
}
return BitList.emptyList();
}
BitList<Invoker<T>> result = invokers.clone();
result.removeIf(invoker -> !matchThen(invoker.getUrl(), url));
if (!result.isEmpty()) {
if (needToPrintMessage) {
messageHolder.set("Match return.");
}
return result;
} else if (this.isForce()) {
logger.warn(
CLUSTER_CONDITIONAL_ROUTE_LIST_EMPTY,
"execute condition state router result list is empty. and force=true",
"",
"The route result is empty and force execute. consumer: " + NetUtils.getLocalHost()
+ ", service: " + url.getServiceKey() + ", router: "
+ url.getParameterAndDecoded(RULE_KEY));
if (needToPrintMessage) {
messageHolder.set("Empty return. Reason: Empty result from condition and condition is force.");
}
return result;
}
} catch (Throwable t) {
logger.error(
CLUSTER_FAILED_EXEC_CONDITION_ROUTER,
"execute condition state router exception",
"",
"Failed to execute condition router rule: " + getUrl() + ", invokers: " + invokers + ", cause: "
+ t.getMessage(),
t);
}
if (needToPrintMessage) {
messageHolder.set("Directly return. Reason: Error occurred ( or result is empty ).");
}
return invokers;
}
@Override
public boolean isRuntime() {
// We always return true for previously defined Router, that is, old Router doesn't support cache anymore.
// return true;
return this.getUrl().getParameter(RUNTIME_KEY, false);
}
private ConditionMatcher getMatcher(String key) {
for (ConditionMatcherFactory factory : matcherFactories) {
if (factory.shouldMatch(key)) {
return factory.createMatcher(key, moduleModel);
}
}
return moduleModel
.getExtensionLoader(ConditionMatcherFactory.class)
.getExtension("param")
.createMatcher(key, moduleModel);
}
boolean matchWhen(URL url, Invocation invocation) {
if (CollectionUtils.isEmptyMap(whenCondition)) {
return true;
}
return doMatch(url, null, invocation, whenCondition, true);
}
private boolean matchThen(URL url, URL param) {
if (CollectionUtils.isEmptyMap(thenCondition)) {
return false;
}
return doMatch(url, param, null, thenCondition, false);
}
private boolean doMatch(
URL url,
URL param,
Invocation invocation,
Map<String, ConditionMatcher> conditions,
boolean isWhenCondition) {
Map<String, String> sample = url.toOriginalMap();
for (Map.Entry<String, ConditionMatcher> entry : conditions.entrySet()) {
ConditionMatcher matchPair = entry.getValue();
if (!matchPair.isMatch(sample, param, invocation, isWhenCondition)) {
return false;
}
}
return true;
}
}
| ConditionStateRouter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/internal/JdbcLiteralFormatterTemporal.java | {
"start": 685,
"end": 2227
} | class ____<T> extends BasicJdbcLiteralFormatter<T> {
private final TemporalType precision;
public JdbcLiteralFormatterTemporal(JavaType<T> javaType, TemporalType precision) {
super( javaType );
this.precision = precision;
}
@Override
public void appendJdbcLiteral(SqlAppender appender, T value, Dialect dialect, WrapperOptions options) {
final TimeZone jdbcTimeZone = getJdbcTimeZone( options );
// for performance reasons, avoid conversions if we can
if ( value instanceof java.util.Date date ) {
dialect.appendDateTimeLiteral( appender, date, precision, jdbcTimeZone );
}
else if ( value instanceof java.util.Calendar calendar ) {
dialect.appendDateTimeLiteral( appender, calendar, precision, jdbcTimeZone );
}
else if ( value instanceof TemporalAccessor temporalAccessor ) {
dialect.appendDateTimeLiteral( appender, temporalAccessor, precision, jdbcTimeZone );
}
else {
dialect.appendDateTimeLiteral( appender, unwrap( value, options ), precision, jdbcTimeZone );
}
}
private java.util.Date unwrap(Object value, WrapperOptions options) {
return switch ( precision ) {
case DATE -> unwrap( value, java.sql.Date.class, options );
case TIME -> unwrap( value, java.sql.Time.class, options );
case TIMESTAMP -> unwrap( value, java.util.Date.class, options );
};
}
private static TimeZone getJdbcTimeZone(WrapperOptions options) {
return options == null || options.getJdbcTimeZone() == null
? TimeZone.getDefault()
: options.getJdbcTimeZone();
}
}
| JdbcLiteralFormatterTemporal |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/FunctionalInterfaceClash.java | {
"start": 5811,
"end": 6813
} | class
____ (streamSuperMethods(msym, types).anyMatch(t -> !t.owner.isInterface())
&& clash.stream()
.anyMatch(
methodSymbol ->
streamSuperMethods(methodSymbol, types)
.anyMatch(t -> !t.owner.isInterface()))) {
continue;
}
if (isSuppressed(member, state)) {
continue;
}
String message =
"When passing lambda arguments to this function, callers will need a cast to"
+ " disambiguate with: "
+ clash.stream()
.map(m -> "\n " + Signatures.prettyMethodSignature(origin, m))
.sorted()
.collect(joining(""));
state.reportMatch(buildDescription(member).setMessage(message).build());
}
}
return NO_MATCH;
}
/**
* A string representation of a method descriptor, where all parameters whose type is a functional
* | if |
java | quarkusio__quarkus | extensions/kubernetes-client/runtime/src/main/java/io/quarkus/kubernetes/client/KubernetesConfigCustomizer.java | {
"start": 860,
"end": 937
} | interface ____ {
void customize(Config config);
}
| KubernetesConfigCustomizer |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/saml/SingleSpSamlRealmSettings.java | {
"start": 496,
"end": 1535
} | class ____ {
public static final String TYPE = "saml";
public static final Setting.AffixSetting<String> SP_ENTITY_ID = RealmSettings.simpleString(
TYPE,
"sp.entity_id",
Setting.Property.NodeScope
);
public static final Setting.AffixSetting<String> SP_ACS = RealmSettings.simpleString(TYPE, "sp.acs", Setting.Property.NodeScope);
public static final Setting.AffixSetting<String> SP_LOGOUT = RealmSettings.simpleString(TYPE, "sp.logout", Setting.Property.NodeScope);
public static Set<Setting.AffixSetting<?>> getSettings() {
Set<Setting.AffixSetting<?>> samlSettings = SamlRealmSettings.getSettings(TYPE);
samlSettings.add(SP_ENTITY_ID);
samlSettings.add(SP_ACS);
samlSettings.add(SP_LOGOUT);
return samlSettings;
}
public static <T> String getFullSettingKey(String realmName, Function<String, Setting.AffixSetting<T>> setting) {
return RealmSettings.getFullSettingKey(realmName, setting.apply(TYPE));
}
}
| SingleSpSamlRealmSettings |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/sql/SynchronizedSpaceTests.java | {
"start": 3913,
"end": 10918
} | interface
____ Query nativeQuery = queryProducer.apply( session );
executor.accept( nativeQuery );
}
);
// see if the entries exist based on the expectation
assertThat( cacheSystem.containsEntity( CachedEntity.class, 1 ) ).isEqualTo( shouldExistAfter );
assertThat( cacheSystem.containsEntity( CachedEntity.class, 2 ) ).isEqualTo( shouldExistAfter );
}
@Test
public void testSyncedCachedScenario(SessionFactoryScope scope) {
final String tableName = "cached_entity";
checkUseCase(
scope,
tableName,
query -> ((NativeQuery<?>) query).addSynchronizedQuerySpace( tableName ),
// the 2 CachedEntity entries should not be there
false
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from CachedEntity", CachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testNonSyncedNonCachedScenario(SessionFactoryScope scope) {
// NonCachedEntity updated by native-query without adding query spaces
// - the outcome should be all cached data being invalidated
checkUseCase(
scope,
"non_cached_entity",
query -> {
},
// the 2 CachedEntity entries should not be there
false
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from NonCachedEntity", NonCachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testSyncedNonCachedScenario(SessionFactoryScope scope) {
// NonCachedEntity updated by native-query with query spaces
// - the caches for CachedEntity are not invalidated - they are not affected by the specified query-space
final String tableName = "non_cached_entity";
checkUseCase(
scope,
tableName,
query -> ((NativeQuery<?>) query).addSynchronizedQuerySpace( tableName ),
// the 2 CachedEntity entries should still be there
true
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from NonCachedEntity", NonCachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testSyncedNonCachedScenarioUsingHint(SessionFactoryScope scope) {
// same as `#testSyncedNonCachedScenario`, but here using the hint
final String tableName = "non_cached_entity";
checkUseCase(
scope,
tableName,
query -> query.setHint( HINT_NATIVE_SPACES, tableName ),
// the 2 CachedEntity entries should still be there
true
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from NonCachedEntity", NonCachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testSyncedNonCachedScenarioUsingHintWithCollection(SessionFactoryScope scope) {
// same as `#testSyncedNonCachedScenario`, but here using the hint
final String tableName = "non_cached_entity";
final Set<String> spaces = new HashSet<>();
spaces.add( tableName );
checkUseCase(
scope,
tableName,
query -> query.setHint( HINT_NATIVE_SPACES, spaces ),
// the 2 CachedEntity entries should still be there
true
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from NonCachedEntity", NonCachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testSyncedNonCachedScenarioUsingHintWithArray(SessionFactoryScope scope) {
// same as `#testSyncedNonCachedScenario`, but here using the hint
final String tableName = "non_cached_entity";
final String[] spaces = {tableName};
checkUseCase(
scope,
tableName,
query -> query.setHint( HINT_NATIVE_SPACES, spaces ),
// the 2 CachedEntity entries should still be there
true
);
// and of course, let's make sure the update happened :)
scope.inTransaction(
session -> {
session.createQuery( "from NonCachedEntity", NonCachedEntity.class ).list().forEach(
cachedEntity -> assertThat( cachedEntity.name ).isEqualTo( "updated" )
);
}
);
}
@Test
public void testSyncedNonCachedScenarioUsingAnnotationWithReturnClass(SessionFactoryScope scope) {
checkUseCase(
scope,
(session) -> session.createNamedQuery( "NonCachedEntity_return_class" ),
Query::getResultList,
true
);
}
@Test
public void testSyncedNonCachedScenarioUsingAnnotationWithResultSetMapping(SessionFactoryScope scope) {
checkUseCase(
scope,
(session) -> session.createNamedQuery( "NonCachedEntity_resultset_mapping" ),
Query::getResultList,
true
);
}
@Test
public void testSyncedNonCachedScenarioUsingAnnotationWithSpaces(SessionFactoryScope scope) {
checkUseCase(
scope,
(session) -> session.createNamedQuery( "NonCachedEntity_spaces" ),
Query::getResultList,
true
);
}
@Test
public void testSyncedNonCachedScenarioUsingJpaAnnotationWithNoResultMapping(SessionFactoryScope scope) {
checkUseCase(
scope,
(session) -> session.createNamedQuery( "NonCachedEntity_raw_jpa" ),
Query::getResultList,
true
);
}
@Test
public void testSyncedNonCachedScenarioUsingJpaAnnotationWithHint(SessionFactoryScope scope) {
checkUseCase(
scope,
(session) -> session.createNamedQuery( "NonCachedEntity_hint_jpa" ),
Query::getResultList,
true
);
}
private void loadAll(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.createQuery( "from CachedEntity" ).list();
// this one is not strictly needed since this entity is not cached.
// but it helps my OCD feel better to have it ;)
session.createQuery( "from NonCachedEntity" ).list();
}
);
}
@BeforeEach
public void prepareTest(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist( new CachedEntity( 1, "first cached" ) );
session.persist( new CachedEntity( 2, "second cached" ) );
session.persist( new NonCachedEntity( 1, "first non-cached" ) );
session.persist( new NonCachedEntity( 2, "second non-cached" ) );
}
);
cleanupCache( scope );
}
@AfterEach
public void cleanupTest(SessionFactoryScope scope) {
cleanupCache( scope );
scope.getSessionFactory().getSchemaManager().truncateMappedObjects();
}
private void cleanupCache(SessionFactoryScope scope) {
scope.getSessionFactory().getCache().evictAllRegions();
}
@Entity(name = "CachedEntity")
@Table(name = "cached_entity")
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static | final |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java | {
"start": 732,
"end": 1210
} | class ____ extends NumberFieldBlockLoaderTestCase<Double> {
public HalfFloatFieldBlockLoaderTests(Params params) {
super(FieldType.HALF_FLOAT, params);
}
@Override
protected Double convert(Number value, Map<String, Object> fieldMapping) {
// All float values are represented as double
return (double) HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value.floatValue()));
}
}
| HalfFloatFieldBlockLoaderTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CompileTimeConstantCheckerTest.java | {
"start": 24388,
"end": 25073
} | enum ____ {
A("A");
@CompileTimeConstant final String s;
Test(@CompileTimeConstant String s) {
this.s = s;
}
void invokeCTCMethod() {
ctcMethod(s);
}
void ctcMethod(@CompileTimeConstant String s) {}
}
""")
.doTest();
}
@Test
public void nonConstantField_positive() {
compilationHelper
.addSourceLines(
"test/CompileTimeConstantTestCase.java",
"""
package test;
import com.google.errorprone.annotations.CompileTimeConstant;
public abstract | Test |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableReplayEagerTruncateTest.java | {
"start": 24483,
"end": 63454
} | class ____ extends Worker {
private final Disposable mockDisposable;
public boolean unsubscribed;
InprocessWorker(Disposable mockDisposable) {
this.mockDisposable = mockDisposable;
}
@NonNull
@Override
public Disposable schedule(@NonNull Runnable action) {
action.run();
return mockDisposable; // this subscription is returned but discarded
}
@NonNull
@Override
public Disposable schedule(@NonNull Runnable action, long delayTime, @NonNull TimeUnit unit) {
action.run();
return mockDisposable;
}
@Override
public void dispose() {
unsubscribed = true;
}
@Override
public boolean isDisposed() {
return unsubscribed;
}
}
@Test
public void boundedReplayBuffer() {
BoundedReplayBuffer<Integer> buf = new BoundedReplayBuffer<Integer>(false) {
private static final long serialVersionUID = -5182053207244406872L;
@Override
void truncate() {
}
};
buf.addLast(new Node(1));
buf.addLast(new Node(2));
buf.addLast(new Node(3));
buf.addLast(new Node(4));
buf.addLast(new Node(5));
List<Integer> values = new ArrayList<>();
buf.collect(values);
Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5), values);
buf.removeSome(2);
buf.removeFirst();
buf.removeSome(2);
values.clear();
buf.collect(values);
Assert.assertTrue(values.isEmpty());
buf.addLast(new Node(5));
buf.addLast(new Node(6));
buf.collect(values);
Assert.assertEquals(Arrays.asList(5, 6), values);
}
@Test
public void timedAndSizedTruncation() {
TestScheduler test = new TestScheduler();
SizeAndTimeBoundReplayBuffer<Integer> buf = new SizeAndTimeBoundReplayBuffer<>(2, 2000, TimeUnit.MILLISECONDS, test, false);
List<Integer> values = new ArrayList<>();
buf.next(1);
test.advanceTimeBy(1, TimeUnit.SECONDS);
buf.next(2);
test.advanceTimeBy(1, TimeUnit.SECONDS);
buf.collect(values);
Assert.assertEquals(Arrays.asList(2), values);
buf.next(3);
buf.next(4);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(3, 4), values);
test.advanceTimeBy(2, TimeUnit.SECONDS);
buf.next(5);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(5), values);
Assert.assertFalse(buf.hasCompleted());
test.advanceTimeBy(2, TimeUnit.SECONDS);
buf.complete();
values.clear();
buf.collect(values);
Assert.assertTrue(values.isEmpty());
Assert.assertEquals(1, buf.size);
Assert.assertTrue(buf.hasCompleted());
Assert.assertFalse(buf.hasError());
}
@Test
public void timedAndSizedTruncationError() {
TestScheduler test = new TestScheduler();
SizeAndTimeBoundReplayBuffer<Integer> buf = new SizeAndTimeBoundReplayBuffer<>(2, 2000, TimeUnit.MILLISECONDS, test, false);
Assert.assertFalse(buf.hasCompleted());
Assert.assertFalse(buf.hasError());
List<Integer> values = new ArrayList<>();
buf.next(1);
test.advanceTimeBy(1, TimeUnit.SECONDS);
buf.next(2);
test.advanceTimeBy(1, TimeUnit.SECONDS);
buf.collect(values);
Assert.assertEquals(Arrays.asList(2), values);
buf.next(3);
buf.next(4);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(3, 4), values);
test.advanceTimeBy(2, TimeUnit.SECONDS);
buf.next(5);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(5), values);
Assert.assertFalse(buf.hasCompleted());
Assert.assertFalse(buf.hasError());
test.advanceTimeBy(2, TimeUnit.SECONDS);
buf.error(new TestException());
values.clear();
buf.collect(values);
Assert.assertTrue(values.isEmpty());
Assert.assertEquals(1, buf.size);
Assert.assertFalse(buf.hasCompleted());
Assert.assertTrue(buf.hasError());
}
@Test
public void sizedTruncation() {
SizeBoundReplayBuffer<Integer> buf = new SizeBoundReplayBuffer<>(2, false);
List<Integer> values = new ArrayList<>();
buf.next(1);
buf.next(2);
buf.collect(values);
Assert.assertEquals(Arrays.asList(1, 2), values);
buf.next(3);
buf.next(4);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(3, 4), values);
buf.next(5);
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(4, 5), values);
Assert.assertFalse(buf.hasCompleted());
buf.complete();
values.clear();
buf.collect(values);
Assert.assertEquals(Arrays.asList(4, 5), values);
Assert.assertEquals(3, buf.size);
Assert.assertTrue(buf.hasCompleted());
Assert.assertFalse(buf.hasError());
}
@Test
public void coldReplayNoBackpressure() {
Observable<Integer> source = Observable.range(0, 1000).replay().autoConnect();
TestObserverEx<Integer> to = new TestObserverEx<>();
source.subscribe(to);
to.assertNoErrors();
to.assertTerminated();
List<Integer> onNextEvents = to.values();
assertEquals(1000, onNextEvents.size());
for (int i = 0; i < 1000; i++) {
assertEquals((Integer)i, onNextEvents.get(i));
}
}
@Test
public void cache() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
Observable<String> o = Observable.unsafeCreate(new ObservableSource<String>() {
@Override
public void subscribe(final Observer<? super String> observer) {
observer.onSubscribe(Disposable.empty());
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
System.out.println("published Observable being executed");
observer.onNext("one");
observer.onComplete();
}
}).start();
}
}).replay().autoConnect();
// we then expect the following 2 subscriptions to get that same value
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
o.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
System.out.println("v: " + v);
latch.countDown();
}
});
// subscribe again
o.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
System.out.println("v: " + v);
latch.countDown();
}
});
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
}
@Test
public void unsubscribeSource() throws Throwable {
Action unsubscribe = mock(Action.class);
Observable<Integer> o = Observable.just(1).doOnDispose(unsubscribe).replay().autoConnect();
o.subscribe();
o.subscribe();
o.subscribe();
verify(unsubscribe, never()).run();
}
@Test
public void take() {
TestObserverEx<Integer> to = new TestObserverEx<>();
Observable<Integer> cached = Observable.range(1, 100).replay().autoConnect();
cached.take(10).subscribe(to);
to.assertNoErrors();
to.assertTerminated();
to.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
// FIXME no longer assertable
// ts.assertUnsubscribed();
}
@Test
public void async() {
Observable<Integer> source = Observable.range(1, 10000);
for (int i = 0; i < 100; i++) {
TestObserverEx<Integer> to1 = new TestObserverEx<>();
Observable<Integer> cached = source.replay().autoConnect();
cached.observeOn(Schedulers.computation()).subscribe(to1);
to1.awaitDone(2, TimeUnit.SECONDS);
to1.assertNoErrors();
to1.assertTerminated();
assertEquals(10000, to1.values().size());
TestObserverEx<Integer> to2 = new TestObserverEx<>();
cached.observeOn(Schedulers.computation()).subscribe(to2);
to2.awaitDone(2, TimeUnit.SECONDS);
to2.assertNoErrors();
to2.assertTerminated();
assertEquals(10000, to2.values().size());
}
}
@Test
public void asyncComeAndGo() {
Observable<Long> source = Observable.interval(1, 1, TimeUnit.MILLISECONDS)
.take(1000)
.subscribeOn(Schedulers.io());
Observable<Long> cached = source.replay().autoConnect();
Observable<Long> output = cached.observeOn(Schedulers.computation());
List<TestObserverEx<Long>> list = new ArrayList<>(100);
for (int i = 0; i < 100; i++) {
TestObserverEx<Long> to = new TestObserverEx<>();
list.add(to);
output.skip(i * 10).take(10).subscribe(to);
}
List<Long> expected = new ArrayList<>();
for (int i = 0; i < 10; i++) {
expected.add((long)(i - 10));
}
int j = 0;
for (TestObserverEx<Long> to : list) {
to.awaitDone(3, TimeUnit.SECONDS);
to.assertNoErrors();
to.assertTerminated();
for (int i = j * 10; i < j * 10 + 10; i++) {
expected.set(i - j * 10, (long)i);
}
to.assertValueSequence(expected);
j++;
}
}
@Test
public void noMissingBackpressureException() {
final int m = 4 * 1000 * 1000;
Observable<Integer> firehose = Observable.unsafeCreate(new ObservableSource<Integer>() {
@Override
public void subscribe(Observer<? super Integer> t) {
t.onSubscribe(Disposable.empty());
for (int i = 0; i < m; i++) {
t.onNext(i);
}
t.onComplete();
}
});
TestObserverEx<Integer> to = new TestObserverEx<>();
firehose.replay().autoConnect().observeOn(Schedulers.computation()).takeLast(100).subscribe(to);
to.awaitDone(3, TimeUnit.SECONDS);
to.assertNoErrors();
to.assertTerminated();
assertEquals(100, to.values().size());
}
@Test
public void valuesAndThenError() {
Observable<Integer> source = Observable.range(1, 10)
.concatWith(Observable.<Integer>error(new TestException()))
.replay().autoConnect();
TestObserverEx<Integer> to = new TestObserverEx<>();
source.subscribe(to);
to.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
to.assertNotComplete();
Assert.assertEquals(1, to.errors().size());
TestObserverEx<Integer> to2 = new TestObserverEx<>();
source.subscribe(to2);
to2.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
to2.assertNotComplete();
Assert.assertEquals(1, to2.errors().size());
}
@Test
public void replayTime() {
Observable.just(1).replay(1, TimeUnit.MINUTES, Schedulers.computation(), true)
.autoConnect()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(1);
}
@Test
public void replaySizeAndTime() {
Observable.just(1).replay(1, 1, TimeUnit.MILLISECONDS, Schedulers.computation(), true)
.autoConnect()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(1);
}
@Test
public void replaySelectorTime() {
Observable.just(1).replay(Functions.<Observable<Integer>>identity(), 1, TimeUnit.MINUTES)
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(1);
}
@Test
public void replayMaxInt() {
Observable.range(1, 2)
.replay(Integer.MAX_VALUE, true)
.autoConnect()
.test()
.assertResult(1, 2);
}
@Test
public void source() {
Observable<Integer> source = Observable.range(1, 3);
assertSame(source, (((HasUpstreamObservableSource<?>)source.replay())).source());
}
@Test
public void connectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.range(1, 3).replay();
Runnable r = new Runnable() {
@Override
public void run() {
co.connect();
}
};
TestHelper.race(r, r);
}
}
@Test
public void subscribeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.range(1, 3).replay();
final TestObserver<Integer> to1 = new TestObserver<>();
final TestObserver<Integer> to2 = new TestObserver<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.subscribe(to1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
co.subscribe(to2);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void addRemoveRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.range(1, 3).replay();
final TestObserver<Integer> to1 = new TestObserver<>();
final TestObserver<Integer> to2 = new TestObserver<>();
co.subscribe(to1);
Runnable r1 = new Runnable() {
@Override
public void run() {
to1.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
co.subscribe(to2);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void cancelOnArrival() {
Observable.range(1, 2)
.replay(Integer.MAX_VALUE, true)
.autoConnect()
.test(true)
.assertEmpty();
}
@Test
public void cancelOnArrival2() {
ConnectableObservable<Integer> co = PublishSubject.<Integer>create()
.replay(Integer.MAX_VALUE, true);
co.test();
co
.autoConnect()
.test(true)
.assertEmpty();
}
@Test
public void connectConsumerThrows() {
ConnectableObservable<Integer> co = Observable.range(1, 2)
.replay();
try {
co.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable t) throws Exception {
throw new TestException();
}
});
fail("Should have thrown");
} catch (TestException ex) {
// expected
}
co.test().assertEmpty().dispose();
co.connect();
co.test().assertResult(1, 2);
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
observer.onSubscribe(Disposable.empty());
observer.onError(new TestException("First"));
observer.onNext(1);
observer.onError(new TestException("Second"));
observer.onComplete();
}
}.replay()
.autoConnect()
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void subscribeOnNextRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishSubject<Integer> ps = PublishSubject.create();
final ConnectableObservable<Integer> co = ps.replay();
final TestObserver<Integer> to1 = new TestObserver<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.subscribe(to1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
for (int j = 0; j < 1000; j++) {
ps.onNext(j);
}
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void unsubscribeOnNextRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishSubject<Integer> ps = PublishSubject.create();
final ConnectableObservable<Integer> co = ps.replay();
final TestObserver<Integer> to1 = new TestObserver<>();
co.subscribe(to1);
Runnable r1 = new Runnable() {
@Override
public void run() {
to1.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
for (int j = 0; j < 1000; j++) {
ps.onNext(j);
}
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void unsubscribeReplayRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableObservable<Integer> co = Observable.range(1, 1000).replay();
final TestObserver<Integer> to1 = new TestObserver<>();
co.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
co.subscribe(to1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
to1.dispose();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void reentrantOnNext() {
final PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
if (t == 1) {
ps.onNext(2);
ps.onComplete();
}
super.onNext(t);
}
};
ps.replay().autoConnect().subscribe(to);
ps.onNext(1);
to.assertResult(1, 2);
}
@Test
public void reentrantOnNextBound() {
final PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
if (t == 1) {
ps.onNext(2);
ps.onComplete();
}
super.onNext(t);
}
};
ps.replay(10, true).autoConnect().subscribe(to);
ps.onNext(1);
to.assertResult(1, 2);
}
@Test
public void reentrantOnNextCancel() {
final PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
if (t == 1) {
ps.onNext(2);
dispose();
}
super.onNext(t);
}
};
ps.replay().autoConnect().subscribe(to);
ps.onNext(1);
to.assertValues(1);
}
@Test
public void reentrantOnNextCancelBounded() {
final PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> to = new TestObserver<Integer>() {
@Override
public void onNext(Integer t) {
if (t == 1) {
ps.onNext(2);
dispose();
}
super.onNext(t);
}
};
ps.replay(10, true).autoConnect().subscribe(to);
ps.onNext(1);
to.assertValues(1);
}
@Test
public void delayedUpstreamOnSubscribe() {
final Observer<?>[] sub = { null };
new Observable<Integer>() {
@Override
protected void subscribeActual(Observer<? super Integer> observer) {
sub[0] = observer;
}
}
.replay()
.connect()
.dispose();
Disposable bs = Disposable.empty();
sub[0].onSubscribe(bs);
assertTrue(bs.isDisposed());
}
@Test
public void timedNoOutdatedData() {
TestScheduler scheduler = new TestScheduler();
Observable<Integer> source = Observable.just(1)
.replay(2, TimeUnit.SECONDS, scheduler, true)
.autoConnect();
source.test().assertResult(1);
source.test().assertResult(1);
scheduler.advanceTimeBy(3, TimeUnit.SECONDS);
source.test().assertResult();
}
@Test
public void replaySelectorReturnsNull() {
Observable.just(1)
.replay(new Function<Observable<Integer>, Observable<Object>>() {
@Override
public Observable<Object> apply(Observable<Integer> v) throws Exception {
return null;
}
})
.to(TestHelper.<Object>testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The selector returned a null ObservableSource");
}
@Test
public void replaySelectorConnectableReturnsNull() {
ObservableReplay.multicastSelector(Functions.justSupplier((ConnectableObservable<Integer>)null), Functions.justFunction(Observable.just(1)))
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The connectableFactory returned a null ConnectableObservable");
}
@Test
public void noHeadRetentionCompleteSize() {
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
source.onNext(2);
source.onComplete();
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noHeadRetentionErrorSize() {
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
source.onNext(2);
source.onError(new TestException());
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noHeadRetentionSize() {
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
source.onNext(2);
assertNull(buf.get().value);
buf.trimHead();
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noHeadRetentionCompleteTime() {
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, TimeUnit.MINUTES, Schedulers.computation(), true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
source.onNext(2);
source.onComplete();
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noHeadRetentionErrorTime() {
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, TimeUnit.MINUTES, Schedulers.computation(), true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
source.onNext(2);
source.onError(new TestException());
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noHeadRetentionTime() {
TestScheduler sch = new TestScheduler();
PublishSubject<Integer> source = PublishSubject.create();
ObservableReplay<Integer> co = (ObservableReplay<Integer>)source
.replay(1, TimeUnit.MILLISECONDS, sch, true);
co.connect();
BoundedReplayBuffer<Integer> buf = (BoundedReplayBuffer<Integer>)(co.current.get().buffer);
source.onNext(1);
sch.advanceTimeBy(2, TimeUnit.MILLISECONDS);
source.onNext(2);
assertNull(buf.get().value);
buf.trimHead();
assertNull(buf.get().value);
Object o = buf.get();
buf.trimHead();
assertSame(o, buf.get());
}
@Test
public void noBoundedRetentionViaThreadLocal() throws Exception {
Observable<byte[]> source = Observable.range(1, 200)
.map(new Function<Integer, byte[]>() {
@Override
public byte[] apply(Integer v) throws Exception {
return new byte[1024 * 1024];
}
})
.replay(new Function<Observable<byte[]>, Observable<byte[]>>() {
@Override
public Observable<byte[]> apply(final Observable<byte[]> o) throws Exception {
return o.take(1)
.concatMap(new Function<byte[], Observable<byte[]>>() {
@Override
public Observable<byte[]> apply(byte[] v) throws Exception {
return o;
}
});
}
}, 1)
.takeLast(1)
;
System.out.println("Bounded Replay Leak check: Wait before GC");
Thread.sleep(1000);
System.out.println("Bounded Replay Leak check: GC");
System.gc();
Thread.sleep(500);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
MemoryUsage memHeap = memoryMXBean.getHeapMemoryUsage();
long initial = memHeap.getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
final AtomicLong after = new AtomicLong();
source.subscribe(new Consumer<byte[]>() {
@Override
public void accept(byte[] v) throws Exception {
System.out.println("Bounded Replay Leak check: Wait before GC 2");
Thread.sleep(1000);
System.out.println("Bounded Replay Leak check: GC 2");
System.gc();
Thread.sleep(500);
after.set(memoryMXBean.getHeapMemoryUsage().getUsed());
}
});
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after.get() / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after.get()) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after.get() / 1024.0 / 1024.0);
}
}
@Test
public void sizeBoundEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
ConnectableObservable<int[]> co = ps.replay(1, true);
TestObserver<int[]> to = co.test();
co.connect();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void timeBoundEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
TestScheduler scheduler = new TestScheduler();
ConnectableObservable<int[]> co = ps.replay(1, TimeUnit.SECONDS, scheduler, true);
TestObserver<int[]> to = co.test();
co.connect();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
scheduler.advanceTimeBy(2, TimeUnit.SECONDS);
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void timeAndSizeBoundEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
TestScheduler scheduler = new TestScheduler();
ConnectableObservable<int[]> co = ps.replay(1, 5, TimeUnit.SECONDS, scheduler, true);
TestObserver<int[]> to = co.test();
co.connect();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
scheduler.advanceTimeBy(2, TimeUnit.SECONDS);
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void sizeBoundSelectorEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
Observable<int[]> co = ps.replay(Functions.<Observable<int[]>>identity(), 1, true);
TestObserver<int[]> to = co.test();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void timeBoundSelectorEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
TestScheduler scheduler = new TestScheduler();
Observable<int[]> co = ps.replay(Functions.<Observable<int[]>>identity(), 1, TimeUnit.SECONDS, scheduler, true);
TestObserver<int[]> to = co.test();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
scheduler.advanceTimeBy(2, TimeUnit.SECONDS);
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void timeAndSizeSelectorBoundEagerTruncate() throws Exception {
PublishSubject<int[]> ps = PublishSubject.create();
TestScheduler scheduler = new TestScheduler();
Observable<int[]> co = ps.replay(Functions.<Observable<int[]>>identity(), 1, 5, TimeUnit.SECONDS, scheduler, true);
TestObserver<int[]> to = co.test();
Thread.sleep(200);
System.gc();
Thread.sleep(200);
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
long initial = memoryMXBean.getHeapMemoryUsage().getUsed();
System.out.printf("Bounded Replay Leak check: Starting: %.3f MB%n", initial / 1024.0 / 1024.0);
ps.onNext(new int[100 * 1024 * 1024]);
to.assertValueCount(1);
to.values().clear();
scheduler.advanceTimeBy(2, TimeUnit.SECONDS);
ps.onNext(new int[0]);
Thread.sleep(200);
System.gc();
Thread.sleep(200);
long after = memoryMXBean.getHeapMemoryUsage().getUsed();
to.dispose();
System.out.printf("Bounded Replay Leak check: After: %.3f MB%n", after / 1024.0 / 1024.0);
if (initial + 100 * 1024 * 1024 < after) {
Assert.fail("Bounded Replay Leak check: Memory leak detected: " + (initial / 1024.0 / 1024.0)
+ " -> " + after / 1024.0 / 1024.0);
}
}
@Test
public void timeAndSizeNoTerminalTruncationOnTimechange() {
Observable.just(1).replay(1, 1, TimeUnit.SECONDS, new TimesteppingScheduler(), true)
.autoConnect()
.test()
.assertComplete()
.assertNoErrors();
}
@Test
public void disposeNoNeedForResetSizeBound() {
PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.replay(10, true);
TestObserver<Integer> to = co.test();
Disposable d = co.connect();
ps.onNext(1);
d.dispose();
to = co.test();
to.assertEmpty();
co.connect();
to.assertEmpty();
ps.onNext(2);
to.assertValuesOnly(2);
}
@Test
public void disposeNoNeedForResetTimeBound() {
PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.replay(10, TimeUnit.MINUTES, Schedulers.single(), true);
TestObserver<Integer> to = co.test();
Disposable d = co.connect();
ps.onNext(1);
d.dispose();
to = co.test();
to.assertEmpty();
co.connect();
to.assertEmpty();
ps.onNext(2);
to.assertValuesOnly(2);
}
@Test
public void disposeNoNeedForResetTimeAndSIzeBound() {
PublishSubject<Integer> ps = PublishSubject.create();
ConnectableObservable<Integer> co = ps.replay(10, 10, TimeUnit.MINUTES, Schedulers.single(), true);
TestObserver<Integer> to = co.test();
Disposable d = co.connect();
ps.onNext(1);
d.dispose();
to = co.test();
to.assertEmpty();
co.connect();
to.assertEmpty();
ps.onNext(2);
to.assertValuesOnly(2);
}
}
| InprocessWorker |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/scheduling/ScheduledTasksEndpoint.java | {
"start": 9826,
"end": 10362
} | class ____ extends IntervalTaskDescriptor {
private FixedDelayTaskDescriptor(ScheduledTask scheduledTask, FixedDelayTask task) {
super(scheduledTask, TaskType.FIXED_DELAY, task);
}
private FixedDelayTaskDescriptor(ScheduledTask scheduledTask, TriggerTask task, PeriodicTrigger trigger) {
super(scheduledTask, TaskType.FIXED_DELAY, task, trigger);
}
}
/**
* Description of a {@link FixedRateTask} or a {@link TriggerTask} with a fixed-rate
* {@link PeriodicTrigger}.
*/
public static final | FixedDelayTaskDescriptor |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/visitors/PluginBuilderAttributeVisitor.java | {
"start": 1586,
"end": 2543
} | class ____ extends AbstractPluginVisitor<PluginBuilderAttribute> {
public PluginBuilderAttributeVisitor() {
super(PluginBuilderAttribute.class);
}
@Override
public Object visit(
final Configuration configuration, final Node node, final LogEvent event, final StringBuilder log) {
final String overridden = this.annotation.value();
final String name = overridden.isEmpty() ? this.member.getName() : overridden;
final Map<String, String> attributes = node.getAttributes();
final String rawValue = removeAttributeValue(attributes, name, this.aliases);
final String replacedValue = this.substitutor.replace(event, rawValue);
final Object value = convert(replacedValue, null);
final Object debugValue = this.annotation.sensitive() ? "*****" : value;
StringBuilders.appendKeyDqValue(log, name, debugValue);
return value;
}
}
| PluginBuilderAttributeVisitor |
java | spring-projects__spring-security | messaging/src/test/java/org/springframework/security/messaging/access/expression/DefaultMessageSecurityExpressionHandlerTests.java | {
"start": 2305,
"end": 5600
} | class ____ {
@Mock(answer = Answers.CALLS_REAL_METHODS)
AuthenticationTrustResolver trustResolver;
@Mock
PermissionEvaluator permissionEvaluator;
DefaultMessageSecurityExpressionHandler<Object> handler;
Message<Object> message;
Authentication authentication;
@BeforeEach
public void setup() {
this.handler = new DefaultMessageSecurityExpressionHandler<>();
this.message = new GenericMessage<>("");
this.authentication = new AnonymousAuthenticationToken("key", "anonymous",
AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
}
// SEC-2705
@Test
public void trustResolverPopulated() {
EvaluationContext context = this.handler.createEvaluationContext(this.authentication, this.message);
Expression expression = this.handler.getExpressionParser().parseExpression("authenticated");
assertThat(ExpressionUtils.evaluateAsBoolean(expression, context)).isFalse();
}
@Test
public void trustResolverNull() {
assertThatIllegalArgumentException().isThrownBy(() -> this.handler.setTrustResolver(null));
}
@Test
public void trustResolverCustom() {
this.handler.setTrustResolver(this.trustResolver);
EvaluationContext context = this.handler.createEvaluationContext(this.authentication, this.message);
Expression expression = this.handler.getExpressionParser().parseExpression("authenticated");
given(this.trustResolver.isAnonymous(this.authentication)).willReturn(false);
assertThat(ExpressionUtils.evaluateAsBoolean(expression, context)).isTrue();
}
@Test
public void roleHierarchy() {
this.authentication = new TestingAuthenticationToken("admin", "pass", "ROLE_ADMIN");
RoleHierarchyImpl roleHierarchy = RoleHierarchyImpl.fromHierarchy("ROLE_ADMIN > ROLE_USER");
this.handler.setRoleHierarchy(roleHierarchy);
EvaluationContext context = this.handler.createEvaluationContext(this.authentication, this.message);
Expression expression = this.handler.getExpressionParser().parseExpression("hasRole('ROLE_USER')");
assertThat(ExpressionUtils.evaluateAsBoolean(expression, context)).isTrue();
}
@Test
public void permissionEvaluator() {
this.handler.setPermissionEvaluator(this.permissionEvaluator);
EvaluationContext context = this.handler.createEvaluationContext(this.authentication, this.message);
Expression expression = this.handler.getExpressionParser().parseExpression("hasPermission(message, 'read')");
given(this.permissionEvaluator.hasPermission(this.authentication, this.message, "read")).willReturn(true);
assertThat(ExpressionUtils.evaluateAsBoolean(expression, context)).isTrue();
}
@Test
public void createEvaluationContextSupplierAuthentication() {
Supplier<Authentication> mockAuthenticationSupplier = mock(Supplier.class);
given(mockAuthenticationSupplier.get()).willReturn(this.authentication);
EvaluationContext context = this.handler.createEvaluationContext(mockAuthenticationSupplier, this.message);
verifyNoInteractions(mockAuthenticationSupplier);
assertThat(context.getRootObject()).extracting(TypedValue::getValue)
.asInstanceOf(InstanceOfAssertFactories.type(MessageSecurityExpressionRoot.class))
.extracting(SecurityExpressionRoot::getAuthentication)
.isEqualTo(this.authentication);
verify(mockAuthenticationSupplier).get();
}
}
| DefaultMessageSecurityExpressionHandlerTests |
java | google__truth | core/src/main/java/com/google/common/truth/MathUtil.java | {
"start": 770,
"end": 2550
} | class ____ {
private MathUtil() {}
/**
* Returns true iff {@code left} and {@code right} are finite values within {@code tolerance} of
* each other. Note that both this method and {@link #notEqualWithinTolerance} return false if
* either {@code left} or {@code right} is infinite or NaN.
*/
public static boolean equalWithinTolerance(double left, double right, double tolerance) {
return abs(left - right) <= abs(tolerance);
}
/**
* Returns true iff {@code left} and {@code right} are finite values within {@code tolerance} of
* each other. Note that both this method and {@link #notEqualWithinTolerance} return false if
* either {@code left} or {@code right} is infinite or NaN.
*/
public static boolean equalWithinTolerance(float left, float right, float tolerance) {
return equalWithinTolerance(left, right, (double) tolerance);
}
/**
* Returns true iff {@code left} and {@code right} are finite values not within {@code tolerance}
* of each other. Note that both this method and {@link #equalWithinTolerance} return false if
* either {@code left} or {@code right} is infinite or NaN.
*/
public static boolean notEqualWithinTolerance(double left, double right, double tolerance) {
return isFinite(left) && isFinite(right) && abs(left - right) > abs(tolerance);
}
/**
* Returns true iff {@code left} and {@code right} are finite values not within {@code tolerance}
* of each other. Note that both this method and {@link #equalWithinTolerance} return false if
* either {@code left} or {@code right} is infinite or NaN.
*/
public static boolean notEqualWithinTolerance(float left, float right, float tolerance) {
return notEqualWithinTolerance(left, right, (double) tolerance);
}
}
| MathUtil |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java | {
"start": 1765,
"end": 2894
} | class ____ extends HadoopTestCase {
public TestJobOutputCommitter() throws IOException {
super(CLUSTER_MR, LOCAL_FS, 1, 1);
}
private static String TEST_ROOT_DIR = new File(System.getProperty(
"test.build.data", "/tmp")
+ "/" + "test-job-output-committer").toString();
private static final String CUSTOM_CLEANUP_FILE_NAME = "_custom_cleanup";
private static final String ABORT_KILLED_FILE_NAME = "_custom_abort_killed";
private static final String ABORT_FAILED_FILE_NAME = "_custom_abort_failed";
private static Path inDir = new Path(TEST_ROOT_DIR, "test-input");
private static int outDirs = 0;
private FileSystem fs;
private Configuration conf = null;
@BeforeEach
public void setUp() throws Exception {
super.setUp();
conf = createJobConf();
fs = getFileSystem();
}
@AfterEach
public void tearDown() throws Exception {
fs.delete(new Path(TEST_ROOT_DIR), true);
super.tearDown();
}
/**
* Committer with deprecated {@link FileOutputCommitter#cleanupJob(JobContext)}
* making a _failed/_killed in the output folder
*/
static | TestJobOutputCommitter |
java | apache__avro | lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/DoubleTest.java | {
"start": 2187,
"end": 2801
} | class ____ extends BasicState {
private double[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new double[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextDouble();
}
}
}
@State(Scope.Thread)
public static | TestStateEncode |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/InterpolatorException.java | {
"start": 1147,
"end": 2665
} | class ____ extends MavenException {
/**
* Constructs a new InterpolatorException with {@code null} as its
* detail message. The cause is not initialized, and may subsequently be
* initialized by a call to {@link #initCause}.
*/
public InterpolatorException() {}
/**
* Constructs a new InterpolatorException with the specified detail message.
* The cause is not initialized, and may subsequently be initialized by
* a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public InterpolatorException(String message) {
super(message);
}
/**
* Constructs a new InterpolatorException with the specified detail message and cause.
*
* <p>Note that the detail message associated with {@code cause} is <i>not</i>
* automatically incorporated in this exception's detail message.</p>
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). A {@code null} value is
* permitted, and indicates that the cause is nonexistent or unknown.
*/
public InterpolatorException(String message, Throwable cause) {
super(message, cause);
}
}
| InterpolatorException |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/util/collections/binary/BytesMapTestBase.java | {
"start": 1218,
"end": 4200
} | class ____ {
protected static final long RANDOM_SEED = 76518743207143L;
protected static final int PAGE_SIZE = 32 * 1024;
protected static final int NUM_ENTRIES = 10000;
protected BinaryRowData[] getRandomizedInputs(int num) {
final Random rnd = new Random(RANDOM_SEED);
return getRandomizedInputs(num, rnd, true);
}
protected BinaryRowData[] getRandomizedInputs(int num, Random rnd, boolean nullable) {
BinaryRowData[] lists = new BinaryRowData[num];
for (int i = 0; i < num; i++) {
int intVal = rnd.nextInt(Integer.MAX_VALUE);
long longVal = -rnd.nextLong();
boolean boolVal = longVal % 2 == 0;
String strVal = nullable && boolVal ? null : getString(intVal, intVal % 1024) + i;
Double doubleVal = rnd.nextDouble();
Short shotVal = (short) intVal;
Float floatVal = nullable && boolVal ? null : rnd.nextFloat();
lists[i] = createRow(intVal, strVal, doubleVal, longVal, boolVal, floatVal, shotVal);
}
return lists;
}
protected BinaryRowData createRow(
Integer f0, String f1, Double f2, Long f3, Boolean f4, Float f5, Short f6) {
BinaryRowData row = new BinaryRowData(7);
BinaryRowWriter writer = new BinaryRowWriter(row);
// int, string, double, long, boolean
if (f0 == null) {
writer.setNullAt(0);
} else {
writer.writeInt(0, f0);
}
if (f1 == null) {
writer.setNullAt(1);
} else {
writer.writeString(1, StringData.fromString(f1));
}
if (f2 == null) {
writer.setNullAt(2);
} else {
writer.writeDouble(2, f2);
}
if (f3 == null) {
writer.setNullAt(3);
} else {
writer.writeLong(3, f3);
}
if (f4 == null) {
writer.setNullAt(4);
} else {
writer.writeBoolean(4, f4);
}
if (f5 == null) {
writer.setNullAt(5);
} else {
writer.writeFloat(5, f5);
}
if (f6 == null) {
writer.setNullAt(6);
} else {
writer.writeShort(6, f6);
}
writer.complete();
return row;
}
protected int needNumMemSegments(int numEntries, int valLen, int keyLen, int pageSize) {
return 2 * (valLen + keyLen + 1024 * 3 + 4 + 8 + 8) * numEntries / pageSize;
}
protected int rowLength(RowType tpe) {
return BinaryRowData.calculateFixPartSizeInBytes(tpe.getFieldCount())
+ BytesHashMap.getVariableLength(tpe.getChildren().toArray(new LogicalType[0]));
}
private String getString(int count, int length) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < length; i++) {
builder.append(count);
}
return builder.toString();
}
}
| BytesMapTestBase |
java | spring-projects__spring-framework | spring-r2dbc/src/test/java/org/springframework/r2dbc/connection/lookup/AbstractRoutingConnectionFactoryTests.java | {
"start": 1352,
"end": 6636
} | class ____ {
private static final String ROUTING_KEY = "routingKey";
final DummyRoutingConnectionFactory connectionFactory = new DummyRoutingConnectionFactory();
@Mock
ConnectionFactory defaultConnectionFactory;
@Mock
ConnectionFactory routedConnectionFactory;
@BeforeEach
void before() {
connectionFactory.setDefaultTargetConnectionFactory(defaultConnectionFactory);
}
@Test
void shouldDetermineRoutedFactory() {
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.setConnectionFactoryLookup(new MapConnectionFactoryLookup());
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "key"))
.as(StepVerifier::create)
.expectNext(routedConnectionFactory)
.verifyComplete();
}
@Test
void shouldFallbackToDefaultConnectionFactory() {
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.as(StepVerifier::create)
.expectNext(defaultConnectionFactory)
.verifyComplete();
}
@Test
void initializationShouldFailUnsupportedLookupKey() {
connectionFactory.setTargetConnectionFactories(Map.of("key", new Object()));
assertThatIllegalArgumentException().isThrownBy(connectionFactory::initialize);
}
@Test
void initializationShouldFailUnresolvableKey() {
connectionFactory.setTargetConnectionFactories(Map.of("key", "value"));
connectionFactory.setConnectionFactoryLookup(new MapConnectionFactoryLookup());
assertThatThrownBy(connectionFactory::initialize)
.isInstanceOf(ConnectionFactoryLookupFailureException.class)
.hasMessageContaining("No ConnectionFactory with name 'value' registered");
}
@Test
void unresolvableConnectionFactoryRetrievalShouldFail() {
connectionFactory.setLenientFallback(false);
connectionFactory.setConnectionFactoryLookup(new MapConnectionFactoryLookup());
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "unknown"))
.as(StepVerifier::create)
.verifyError(IllegalStateException.class);
}
@Test
void connectionFactoryRetrievalWithUnknownLookupKeyShouldReturnDefaultConnectionFactory() {
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.setDefaultTargetConnectionFactory(defaultConnectionFactory);
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "unknown"))
.as(StepVerifier::create)
.expectNext(defaultConnectionFactory)
.verifyComplete();
}
@Test
void connectionFactoryRetrievalWithoutLookupKeyShouldReturnDefaultConnectionFactory() {
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.setDefaultTargetConnectionFactory(defaultConnectionFactory);
connectionFactory.setLenientFallback(false);
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.as(StepVerifier::create)
.expectNext(defaultConnectionFactory)
.verifyComplete();
}
@Test
void shouldLookupFromMap() {
MapConnectionFactoryLookup lookup =
new MapConnectionFactoryLookup("lookup-key", routedConnectionFactory);
connectionFactory.setConnectionFactoryLookup(lookup);
connectionFactory.setTargetConnectionFactories(Map.of("my-key", "lookup-key"));
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "my-key"))
.as(StepVerifier::create)
.expectNext(routedConnectionFactory)
.verifyComplete();
}
@Test
void shouldAllowModificationsAfterInitialization() {
MapConnectionFactoryLookup lookup = new MapConnectionFactoryLookup();
connectionFactory.setConnectionFactoryLookup(lookup);
connectionFactory.setTargetConnectionFactories(lookup.getConnectionFactories());
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "lookup-key"))
.as(StepVerifier::create)
.expectNext(defaultConnectionFactory)
.verifyComplete();
lookup.addConnectionFactory("lookup-key", routedConnectionFactory);
connectionFactory.afterPropertiesSet();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "lookup-key"))
.as(StepVerifier::create)
.expectNext(routedConnectionFactory)
.verifyComplete();
}
@Test
void initializeShouldDetermineRoutedFactory() {
connectionFactory.setTargetConnectionFactories(Map.of("key", routedConnectionFactory));
connectionFactory.setConnectionFactoryLookup(new MapConnectionFactoryLookup());
connectionFactory.initialize();
connectionFactory.determineTargetConnectionFactory()
.contextWrite(Context.of(ROUTING_KEY, "key"))
.as(StepVerifier::create)
.expectNext(routedConnectionFactory)
.verifyComplete();
}
static | AbstractRoutingConnectionFactoryTests |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/metrics/Http1xWorkerMetricsTest.java | {
"start": 494,
"end": 634
} | class ____ extends Http1xMetricsTest {
public Http1xWorkerMetricsTest() {
super(ThreadingModel.EVENT_LOOP);
}
}
| Http1xWorkerMetricsTest |
java | netty__netty | codec-http/src/test/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshakerTest.java | {
"start": 2802,
"end": 22926
} | class ____ {
protected abstract WebSocketClientHandshaker newHandshaker(URI uri, String subprotocol, HttpHeaders headers,
boolean absoluteUpgradeUrl,
boolean generateOriginHeader);
protected WebSocketClientHandshaker newHandshaker(URI uri) {
return newHandshaker(uri, null, null, false, true);
}
protected abstract CharSequence getOriginHeaderName();
protected abstract CharSequence getProtocolHeaderName();
protected abstract CharSequence[] getHandshakeRequiredHeaderNames();
@Test
public void hostHeaderWs() {
for (String scheme : new String[]{"ws://", "http://"}) {
for (String host : new String[]{"localhost", "127.0.0.1", "[::1]", "Netty.io"}) {
String enter = scheme + host;
testHostHeader(enter, host);
testHostHeader(enter + '/', host);
testHostHeader(enter + ":80", host);
testHostHeader(enter + ":443", host + ":443");
testHostHeader(enter + ":9999", host + ":9999");
testHostHeader(enter + "/path", host);
testHostHeader(enter + ":80/path", host);
testHostHeader(enter + ":443/path", host + ":443");
testHostHeader(enter + ":9999/path", host + ":9999");
}
}
}
@Test
public void hostHeaderWss() {
for (String scheme : new String[]{"wss://", "https://"}) {
for (String host : new String[]{"localhost", "127.0.0.1", "[::1]", "Netty.io"}) {
String enter = scheme + host;
testHostHeader(enter, host);
testHostHeader(enter + '/', host);
testHostHeader(enter + ":80", host + ":80");
testHostHeader(enter + ":443", host);
testHostHeader(enter + ":9999", host + ":9999");
testHostHeader(enter + "/path", host);
testHostHeader(enter + ":80/path", host + ":80");
testHostHeader(enter + ":443/path", host);
testHostHeader(enter + ":9999/path", host + ":9999");
}
}
}
@Test
public void hostHeaderWithoutScheme() {
testHostHeader("//localhost/", "localhost");
testHostHeader("//localhost/path", "localhost");
testHostHeader("//localhost:80/", "localhost:80");
testHostHeader("//localhost:443/", "localhost:443");
testHostHeader("//localhost:9999/", "localhost:9999");
}
@Test
public void originHeaderWs() {
for (String scheme : new String[]{"ws://", "http://"}) {
for (String host : new String[]{"localhost", "127.0.0.1", "[::1]", "NETTY.IO"}) {
String enter = scheme + host;
String expect = "http://" + host.toLowerCase();
testOriginHeader(enter, expect);
testOriginHeader(enter + '/', expect);
testOriginHeader(enter + ":80", expect);
testOriginHeader(enter + ":443", expect + ":443");
testOriginHeader(enter + ":9999", expect + ":9999");
testOriginHeader(enter + "/path%20with%20ws", expect);
testOriginHeader(enter + ":80/path%20with%20ws", expect);
testOriginHeader(enter + ":443/path%20with%20ws", expect + ":443");
testOriginHeader(enter + ":9999/path%20with%20ws", expect + ":9999");
}
}
}
@Test
public void originHeaderWss() {
for (String scheme : new String[]{"wss://", "https://"}) {
for (String host : new String[]{"localhost", "127.0.0.1", "[::1]", "NETTY.IO"}) {
String enter = scheme + host;
String expect = "https://" + host.toLowerCase();
testOriginHeader(enter, expect);
testOriginHeader(enter + '/', expect);
testOriginHeader(enter + ":80", expect + ":80");
testOriginHeader(enter + ":443", expect);
testOriginHeader(enter + ":9999", expect + ":9999");
testOriginHeader(enter + "/path%20with%20ws", expect);
testOriginHeader(enter + ":80/path%20with%20ws", expect + ":80");
testOriginHeader(enter + ":443/path%20with%20ws", expect);
testOriginHeader(enter + ":9999/path%20with%20ws", expect + ":9999");
}
}
}
@Test
public void originHeaderWithoutScheme() {
testOriginHeader("//localhost/", "http://localhost");
testOriginHeader("//localhost/path", "http://localhost");
// http scheme by port
testOriginHeader("//localhost:80/", "http://localhost");
testOriginHeader("//localhost:80/path", "http://localhost");
// https scheme by port
testOriginHeader("//localhost:443/", "https://localhost");
testOriginHeader("//localhost:443/path", "https://localhost");
// http scheme for non standard port
testOriginHeader("//localhost:9999/", "http://localhost:9999");
testOriginHeader("//localhost:9999/path", "http://localhost:9999");
// convert host to lower case
testOriginHeader("//LOCALHOST/", "http://localhost");
}
@Test
public void testSetOriginFromCustomHeaders() {
HttpHeaders customHeaders = new DefaultHttpHeaders().set(getOriginHeaderName(), "http://example.com");
WebSocketClientHandshaker handshaker = newHandshaker(URI.create("ws://server.example.com/chat"), null,
customHeaders, false, true);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("http://example.com", request.headers().get(getOriginHeaderName()));
} finally {
request.release();
}
}
@Test
public void testOriginHeaderIsAbsentWhenGeneratingDisable() {
URI uri = URI.create("http://example.com/ws");
WebSocketClientHandshaker handshaker = newHandshaker(uri, null, null, false, false);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertFalse(request.headers().contains(getOriginHeaderName()));
assertEquals("/ws", request.uri());
} finally {
request.release();
}
}
@Test
public void testInvalidHostWhenIncorrectWebSocketURI() {
URI uri = URI.create("/ws");
EmbeddedChannel channel = new EmbeddedChannel(new HttpClientCodec());
final WebSocketClientHandshaker handshaker = newHandshaker(uri, null, null, false, true);
final ChannelFuture handshakeFuture = handshaker.handshake(channel);
assertFalse(handshakeFuture.isSuccess());
assertInstanceOf(IllegalArgumentException.class, handshakeFuture.cause());
assertEquals("Cannot generate the 'host' header value, webSocketURI should contain host" +
" or passed through customHeaders", handshakeFuture.cause().getMessage());
assertFalse(channel.finish());
}
@Test
public void testInvalidOriginWhenIncorrectWebSocketURI() {
URI uri = URI.create("/ws");
EmbeddedChannel channel = new EmbeddedChannel(new HttpClientCodec());
HttpHeaders headers = new DefaultHttpHeaders();
headers.set(HttpHeaderNames.HOST, "localhost:80");
final WebSocketClientHandshaker handshaker = newHandshaker(uri, null, headers, false, true);
final ChannelFuture handshakeFuture = handshaker.handshake(channel);
assertFalse(handshakeFuture.isSuccess());
assertInstanceOf(IllegalArgumentException.class, handshakeFuture.cause());
assertEquals("Cannot generate the '" + getOriginHeaderName() + "' header value," +
" webSocketURI should contain host or disable generateOriginHeader" +
" or pass value through customHeaders", handshakeFuture.cause().getMessage());
assertFalse(channel.finish());
}
private void testHostHeader(String uri, String expected) {
testHeaderDefaultHttp(uri, HttpHeaderNames.HOST, expected);
}
private void testOriginHeader(String uri, String expected) {
testHeaderDefaultHttp(uri, getOriginHeaderName(), expected);
}
protected void testHeaderDefaultHttp(String uri, CharSequence header, String expectedValue) {
WebSocketClientHandshaker handshaker = newHandshaker(URI.create(uri));
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals(expectedValue, request.headers().get(header));
} finally {
request.release();
}
}
@Test
@SuppressWarnings("deprecation")
public void testUpgradeUrl() {
URI uri = URI.create("ws://localhost:9999/path%20with%20ws");
WebSocketClientHandshaker handshaker = newHandshaker(uri);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("/path%20with%20ws", request.getUri());
} finally {
request.release();
}
}
@Test
public void testUpgradeUrlWithQuery() {
URI uri = URI.create("ws://localhost:9999/path%20with%20ws?a=b%20c");
WebSocketClientHandshaker handshaker = newHandshaker(uri);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("/path%20with%20ws?a=b%20c", request.uri());
} finally {
request.release();
}
}
@Test
public void testUpgradeUrlWithoutPath() {
URI uri = URI.create("ws://localhost:9999");
WebSocketClientHandshaker handshaker = newHandshaker(uri);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("/", request.uri());
} finally {
request.release();
}
}
@Test
public void testUpgradeUrlWithoutPathWithQuery() {
URI uri = URI.create("ws://localhost:9999?a=b%20c");
WebSocketClientHandshaker handshaker = newHandshaker(uri);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("/?a=b%20c", request.uri());
} finally {
request.release();
}
}
@Test
public void testAbsoluteUpgradeUrlWithQuery() {
URI uri = URI.create("ws://localhost:9999/path%20with%20ws?a=b%20c");
WebSocketClientHandshaker handshaker = newHandshaker(uri, null, null, true, true);
FullHttpRequest request = handshaker.newHandshakeRequest();
try {
assertEquals("ws://localhost:9999/path%20with%20ws?a=b%20c", request.uri());
} finally {
request.release();
}
}
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testHttpResponseAndFrameInSameBuffer() {
testHttpResponseAndFrameInSameBuffer(false);
}
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testHttpResponseAndFrameInSameBufferCodec() {
testHttpResponseAndFrameInSameBuffer(true);
}
private void testHttpResponseAndFrameInSameBuffer(boolean codec) {
String url = "ws://localhost:9999/ws";
final WebSocketClientHandshaker shaker = newHandshaker(URI.create(url));
final WebSocketClientHandshaker handshaker = new WebSocketClientHandshaker(
shaker.uri(), shaker.version(), null, EmptyHttpHeaders.INSTANCE, Integer.MAX_VALUE, -1) {
@Override
protected FullHttpRequest newHandshakeRequest() {
return shaker.newHandshakeRequest();
}
@Override
protected void verify(FullHttpResponse response) {
// Not do any verification, so we not need to care sending the correct headers etc in the test,
// which would just make things more complicated.
}
@Override
protected WebSocketFrameDecoder newWebsocketDecoder() {
return shaker.newWebsocketDecoder();
}
@Override
protected WebSocketFrameEncoder newWebSocketEncoder() {
return shaker.newWebSocketEncoder();
}
};
// use randomBytes helper from utils to check that it functions properly
byte[] data = WebSocketUtil.randomBytes(24);
// Create a EmbeddedChannel which we will use to encode a BinaryWebsocketFrame to bytes and so use these
// to test the actual handshaker.
WebSocketServerHandshakerFactory factory = new WebSocketServerHandshakerFactory(url, null, false);
FullHttpRequest request = shaker.newHandshakeRequest();
WebSocketServerHandshaker socketServerHandshaker = factory.newHandshaker(request);
request.release();
EmbeddedChannel websocketChannel = new EmbeddedChannel(socketServerHandshaker.newWebSocketEncoder(),
socketServerHandshaker.newWebsocketDecoder());
assertTrue(websocketChannel.writeOutbound(new BinaryWebSocketFrame(Unpooled.wrappedBuffer(data))));
byte[] bytes = ("HTTP/1.1 101 Switching Protocols\r\nSec-Websocket-Accept: not-verify\r\n" +
"Upgrade: websocket\r\n\r\n").getBytes(CharsetUtil.US_ASCII);
CompositeByteBuf compositeByteBuf = Unpooled.compositeBuffer();
compositeByteBuf.addComponent(true, Unpooled.wrappedBuffer(bytes));
for (;;) {
ByteBuf frameBytes = websocketChannel.readOutbound();
if (frameBytes == null) {
break;
}
compositeByteBuf.addComponent(true, frameBytes);
}
EmbeddedChannel ch = new EmbeddedChannel(new HttpObjectAggregator(Integer.MAX_VALUE),
new SimpleChannelInboundHandler<FullHttpResponse>() {
@Override
protected void channelRead0(ChannelHandlerContext ctx, FullHttpResponse msg) throws Exception {
handshaker.finishHandshake(ctx.channel(), msg);
ctx.pipeline().remove(this);
}
});
if (codec) {
ch.pipeline().addFirst(new HttpClientCodec());
} else {
ch.pipeline().addFirst(new HttpRequestEncoder(), new HttpResponseDecoder());
}
// We need to first write the request as HttpClientCodec will fail if we receive a response before a request
// was written.
shaker.handshake(ch).syncUninterruptibly();
for (;;) {
// Just consume the bytes, we are not interested in these.
ByteBuf buf = ch.readOutbound();
if (buf == null) {
break;
}
buf.release();
}
assertTrue(ch.writeInbound(compositeByteBuf));
assertTrue(ch.finish());
BinaryWebSocketFrame frame = ch.readInbound();
ByteBuf expect = Unpooled.wrappedBuffer(data);
try {
assertEquals(expect, frame.content());
assertTrue(frame.isFinalFragment());
assertEquals(0, frame.rsv());
} finally {
expect.release();
frame.release();
}
}
@Test
public void testDuplicateWebsocketHandshakeHeaders() {
URI uri = URI.create("ws://localhost:9999/foo");
HttpHeaders inputHeaders = new DefaultHttpHeaders();
String bogusSubProtocol = "bogusSubProtocol";
String bogusHeaderValue = "bogusHeaderValue";
// add values for the headers that are reserved for use in the websockets handshake
for (CharSequence header : getHandshakeRequiredHeaderNames()) {
if (!HttpHeaderNames.HOST.equals(header)) {
inputHeaders.add(header, bogusHeaderValue);
}
}
inputHeaders.add(getProtocolHeaderName(), bogusSubProtocol);
String realSubProtocol = "realSubProtocol";
WebSocketClientHandshaker handshaker = newHandshaker(uri, realSubProtocol, inputHeaders, false, true);
FullHttpRequest request = handshaker.newHandshakeRequest();
HttpHeaders outputHeaders = request.headers();
// the header values passed in originally have been replaced with values generated by the Handshaker
for (CharSequence header : getHandshakeRequiredHeaderNames()) {
assertEquals(1, outputHeaders.getAll(header).size());
assertNotEquals(bogusHeaderValue, outputHeaders.get(header));
}
// the subprotocol header value is that of the subprotocol string passed into the Handshaker
assertEquals(1, outputHeaders.getAll(getProtocolHeaderName()).size());
assertEquals(realSubProtocol, outputHeaders.get(getProtocolHeaderName()));
request.release();
}
@Test
public void testWebSocketClientHandshakeException() {
URI uri = URI.create("ws://localhost:9999/exception");
WebSocketClientHandshaker handshaker = newHandshaker(uri, null, null, false, true);
FullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED);
response.headers().set(HttpHeaderNames.WWW_AUTHENTICATE, "realm = access token required");
try {
handshaker.finishHandshake(null, response);
fail("Expected WebSocketClientHandshakeException");
} catch (WebSocketClientHandshakeException exception) {
assertEquals("Invalid handshake response getStatus: 401 Unauthorized", exception.getMessage());
assertEquals(HttpResponseStatus.UNAUTHORIZED, exception.response().status());
assertTrue(exception.response().headers().contains(HttpHeaderNames.WWW_AUTHENTICATE,
"realm = access token required", false));
} finally {
response.release();
}
}
@Test
public void testHandshakeForHttpResponseWithoutAggregator() {
EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestEncoder(), new HttpResponseDecoder());
URI uri = URI.create("ws://localhost:9999/chat");
WebSocketClientHandshaker clientHandshaker = newHandshaker(uri);
FullHttpRequest handshakeRequest = clientHandshaker.newHandshakeRequest();
handshakeRequest.release();
String accept = "";
if (clientHandshaker.version() != WebSocketVersion.V00) {
String acceptSeed = handshakeRequest.headers().get(HttpHeaderNames.SEC_WEBSOCKET_KEY)
+ WEBSOCKET_13_ACCEPT_GUID;
byte[] sha1 = WebSocketUtil.sha1(acceptSeed.getBytes(CharsetUtil.US_ASCII));
accept = WebSocketUtil.base64(sha1);
}
HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, SWITCHING_PROTOCOLS);
response.headers()
.set(HttpHeaderNames.UPGRADE, HttpHeaderValues.WEBSOCKET)
.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.UPGRADE)
.set(HttpHeaderNames.SEC_WEBSOCKET_ACCEPT, accept);
ChannelFuture handshakeFuture = clientHandshaker.processHandshake(channel, response);
assertFalse(handshakeFuture.isDone());
assertNotNull(channel.pipeline().get("handshaker"));
if (clientHandshaker.version() != WebSocketVersion.V00) {
assertNull(channel.pipeline().get("httpAggregator"));
channel.writeInbound(LastHttpContent.EMPTY_LAST_CONTENT);
} else {
assertNotNull(channel.pipeline().get("httpAggregator"));
channel.writeInbound(new DefaultLastHttpContent(
Unpooled.copiedBuffer("8jKS'y:G*Co,Wxa-", CharsetUtil.US_ASCII)));
}
assertTrue(handshakeFuture.isDone());
assertNull(channel.pipeline().get("handshaker"));
assertFalse(channel.finish());
}
}
| WebSocketClientHandshakerTest |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/DiagnosticTrustManager.java | {
"start": 1199,
"end": 1351
} | class ____ to log
* diagnostic messages, so it must be provided with a function by which it can do that.
*/
@FunctionalInterface
public | is |
java | apache__dubbo | dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/utils/MockReferenceConfig.java | {
"start": 1003,
"end": 1810
} | class ____ extends ReferenceConfig<FooService> {
static AtomicLong counter = new AtomicLong();
FooService value;
boolean destroyMethodRun = false;
public static void setCounter(long c) {
counter.set(c);
}
public boolean isGetMethodRun() {
return value != null;
}
public boolean isDestroyMethodRun() {
return destroyMethodRun;
}
@Override
public synchronized FooService get(boolean check) {
if (value != null) return value;
counter.getAndIncrement();
value = super.get(check);
return value;
}
public long getCounter() {
return counter.get();
}
@Override
public synchronized void destroy() {
super.destroy();
destroyMethodRun = true;
}
}
| MockReferenceConfig |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsTests.java | {
"start": 141863,
"end": 141958
} | interface ____ {
}
@DefaultOverrideAliasImplicitMeta1
static | DefaultOverrideImplicitAliasMeta2 |
java | micronaut-projects__micronaut-core | discovery-core/src/main/java/io/micronaut/discovery/ServiceInstanceList.java | {
"start": 812,
"end": 1263
} | interface ____ {
/**
* @return The service ID
*/
String getID();
/**
* Returns the current list of services. Note: This method should NEVER block.
*
* @return The instances
*/
List<ServiceInstance> getInstances();
/**
* @return The context path to use for requests to the service.
*/
default Optional<String> getContextPath() {
return Optional.empty();
}
}
| ServiceInstanceList |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/GlobalMethodSecurityConfigurationTests.java | {
"start": 13986,
"end": 14310
} | class ____ {
@Bean
PermissionEvaluator permissionEvaluator() {
return mock(PermissionEvaluator.class);
}
@Bean
MethodSecurityService service() {
return new MethodSecurityServiceImpl();
}
}
@Configuration
@EnableGlobalMethodSecurity(prePostEnabled = true)
public static | AutowirePermissionEvaluatorConfig |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/lucene/analysis/miscellaneous/DuplicateByteSequenceSpotter.java | {
"start": 3361,
"end": 6519
} | class ____ to containing "this"
// (profiler suggested this was a cost)
static final long TREE_NODE_OBJECT_SIZE = RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.NUM_BYTES_OBJECT_REF;
// A TreeNode specialization with an array ref (dynamically allocated and
// fixed-size)
static final long ROOT_TREE_NODE_OBJECT_SIZE = TREE_NODE_OBJECT_SIZE + RamUsageEstimator.NUM_BYTES_OBJECT_REF;
// A KeyedTreeNode specialization with an array ref (dynamically allocated
// and grown)
static final long LIGHTWEIGHT_TREE_NODE_OBJECT_SIZE = TREE_NODE_OBJECT_SIZE + RamUsageEstimator.NUM_BYTES_OBJECT_REF;
// A KeyedTreeNode specialization with a short-based hit count and a
// sequence of bytes encoded as an int
static final long LEAF_NODE_OBJECT_SIZE = TREE_NODE_OBJECT_SIZE + Short.BYTES + Integer.BYTES;
public DuplicateByteSequenceSpotter() {
this.nodesAllocatedByDepth = new int[4];
this.bytesAllocated = 0;
root = new RootTreeNode((byte) 1, null, 0);
}
/**
* Reset the sequence detection logic to avoid any continuation of the
* immediately previous bytes. A minimum of dupSequenceSize bytes need to be
* added before any new duplicate sequences will be reported.
* Hit counts are not reset by calling this method.
*/
public void startNewSequence() {
sequenceBufferFilled = false;
nextFreePos = 0;
}
/**
* Add a byte to the sequence.
* @param b
* the next byte in a sequence
* @return number of times this byte and the preceding 6 bytes have been
* seen before as a sequence (only counts up to 255)
*
*/
public short addByte(byte b) {
// Add latest byte to circular buffer
sequenceBuffer[nextFreePos] = b;
nextFreePos++;
if (nextFreePos >= sequenceBuffer.length) {
nextFreePos = 0;
sequenceBufferFilled = true;
}
if (sequenceBufferFilled == false) {
return 0;
}
TreeNode node = root;
// replay updated sequence of bytes represented in the circular
// buffer starting from the tail
int p = nextFreePos;
// The first tier of nodes are addressed using individual bytes from the
// sequence
node = node.add(sequenceBuffer[p], 0);
p = nextBufferPos(p);
node = node.add(sequenceBuffer[p], 1);
p = nextBufferPos(p);
node = node.add(sequenceBuffer[p], 2);
// The final 3 bytes in the sequence are represented in an int
// where the 4th byte will contain a hit count.
p = nextBufferPos(p);
int sequence = 0xFF & sequenceBuffer[p];
p = nextBufferPos(p);
sequence = sequence << 8 | (0xFF & sequenceBuffer[p]);
p = nextBufferPos(p);
sequence = sequence << 8 | (0xFF & sequenceBuffer[p]);
return (short) (node.add(sequence << 8) - 1);
}
private int nextBufferPos(int p) {
p++;
if (p >= sequenceBuffer.length) {
p = 0;
}
return p;
}
/**
* Base | reference |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/TextFileBasedIdentityHandler.java | {
"start": 2291,
"end": 7379
} | class ____ implements IdentityHandler {
private static final Logger LOG = LoggerFactory.getLogger(TextFileBasedIdentityHandler.class);
/**
* Expected no of fields in the user mapping file.
*/
private static final int NO_OF_FIELDS_USER_MAPPING = 6;
/**
* Expected no of fields in the group mapping file.
*/
private static final int NO_OF_FIELDS_GROUP_MAPPING = 4;
/**
* Array index for the local username.
* Example:
* a2b27aec-77bd-46dd-8c8c-39611a333331:user1:11000:21000:spi-user1:abcf86e9-5a5b-49e2-a253-f5c9e2afd4ec
*/
private static final int ARRAY_INDEX_FOR_LOCAL_USER_NAME = 1;
/**
* Array index for the security group name.
* Example:
* 1d23024d-957c-4456-aac1-a57f9e2de914:group1:21000:sgp-group1
*/
private static final int ARRAY_INDEX_FOR_LOCAL_GROUP_NAME = 1;
/**
* Array index for the AAD Service Principal's Object ID.
*/
private static final int ARRAY_INDEX_FOR_AAD_SP_OBJECT_ID = 0;
/**
* Array index for the AAD Security Group's Object ID.
*/
private static final int ARRAY_INDEX_FOR_AAD_SG_OBJECT_ID = 0;
private String userMappingFileLocation;
private String groupMappingFileLocation;
private HashMap<String, String> userMap;
private HashMap<String, String> groupMap;
public TextFileBasedIdentityHandler(String userMappingFilePath, String groupMappingFilePath) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(userMappingFilePath),
"Local User to Service Principal mapping filePath cannot by Null or Empty");
Preconditions.checkArgument(!Strings.isNullOrEmpty(groupMappingFilePath),
"Local Group to Security Group mapping filePath cannot by Null or Empty");
this.userMappingFileLocation = userMappingFilePath;
this.groupMappingFileLocation = groupMappingFilePath;
//Lazy Loading
this.userMap = new HashMap<>();
this.groupMap = new HashMap<>();
}
/**
* Perform lookup from Service Principal's Object ID to Local Username.
* @param originalIdentity AAD object ID.
* @return Local User name, if no name found or on exception, returns empty string.
* */
public synchronized String lookupForLocalUserIdentity(String originalIdentity) throws IOException {
if(Strings.isNullOrEmpty(originalIdentity)) {
return EMPTY_STRING;
}
if (userMap.size() == 0) {
loadMap(userMap, userMappingFileLocation, NO_OF_FIELDS_USER_MAPPING, ARRAY_INDEX_FOR_AAD_SP_OBJECT_ID);
}
try {
String username = !Strings.isNullOrEmpty(userMap.get(originalIdentity))
? userMap.get(originalIdentity).split(COLON)[ARRAY_INDEX_FOR_LOCAL_USER_NAME] : EMPTY_STRING;
return username;
} catch (ArrayIndexOutOfBoundsException e) {
LOG.error("Error while parsing the line, returning empty string", e);
return EMPTY_STRING;
}
}
/**
* Perform lookup from Security Group's Object ID to Local Security Group name.
* @param originalIdentity AAD object ID.
* @return Local Security group name, if no name found or on exception, returns empty string.
* */
public synchronized String lookupForLocalGroupIdentity(String originalIdentity) throws IOException {
if(Strings.isNullOrEmpty(originalIdentity)) {
return EMPTY_STRING;
}
if (groupMap.size() == 0) {
loadMap(groupMap, groupMappingFileLocation, NO_OF_FIELDS_GROUP_MAPPING,
ARRAY_INDEX_FOR_AAD_SG_OBJECT_ID);
}
try {
String groupname =
!Strings.isNullOrEmpty(groupMap.get(originalIdentity))
? groupMap.get(originalIdentity).split(COLON)[ARRAY_INDEX_FOR_LOCAL_GROUP_NAME] : EMPTY_STRING;
return groupname;
} catch (ArrayIndexOutOfBoundsException e) {
LOG.error("Error while parsing the line, returning empty string", e);
return EMPTY_STRING;
}
}
/**
* Creates the map from the file using the key index.
* @param cache Instance of cache object to store the data.
* @param fileLocation Location of the file to be loaded.
* @param keyIndex Index of the key from the data loaded from the key.
*/
private static void loadMap(HashMap<String, String> cache, String fileLocation, int noOfFields, int keyIndex)
throws IOException {
LOG.debug("Loading identity map from file {}", fileLocation);
int errorRecord = 0;
File file = new File(fileLocation);
LineIterator it = null;
try {
it = FileUtils.lineIterator(file, "UTF-8");
while (it.hasNext()) {
String line = it.nextLine();
if (!Strings.isNullOrEmpty(line.trim()) && !line.startsWith(HASH)) {
if (line.split(COLON).length != noOfFields) {
errorRecord += 1;
continue;
}
cache.put(line.split(COLON)[keyIndex], line);
}
}
LOG.debug("Loaded map stats - File: {}, Loaded: {}, Error: {} ", fileLocation, cache.size(), errorRecord);
} catch (ArrayIndexOutOfBoundsException e) {
LOG.error("Error while parsing mapping file", e);
} finally {
IOUtils.cleanupWithLogger(LOG, it);
}
}
}
| TextFileBasedIdentityHandler |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralModel.java | {
"start": 1142,
"end": 2295
} | class ____ extends RateLimitGroupingModel {
protected URI uri;
protected RateLimitSettings rateLimitSettings;
protected MistralModel(ModelConfigurations configurations, ModelSecrets secrets) {
super(configurations, secrets);
}
protected MistralModel(RateLimitGroupingModel model, ServiceSettings serviceSettings) {
super(model, serviceSettings);
}
public URI uri() {
return this.uri;
}
@Override
public RateLimitSettings rateLimitSettings() {
return this.rateLimitSettings;
}
@Override
public int rateLimitGroupingHash() {
return Objects.hash(getServiceSettings().modelId(), getSecretSettings().apiKey());
}
// Needed for testing only
public void setURI(String newUri) {
try {
this.uri = new URI(newUri);
} catch (URISyntaxException e) {
// swallow any error
}
}
@Override
public DefaultSecretSettings getSecretSettings() {
return (DefaultSecretSettings) super.getSecretSettings();
}
public abstract ExecutableAction accept(MistralActionVisitor creator);
}
| MistralModel |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 59163,
"end": 59680
} | class ____ loaded.
* </p>
*
* @since 3.12.0
*/
public static final boolean IS_OS_MAC_OSX_CATALINA = getOsMatches("Mac OS X", "10.15");
/**
* The constant {@code true} if this is macOS X Big Sur.
* <p>
* The value depends on the value of the {@link #OS_NAME} and {@link #OS_VERSION} constants.
* </p>
* <p>
* The value is {@code false} if {@link #OS_NAME} or {@link #OS_VERSION} is {@code null}.
* </p>
* <p>
* This value is initialized when the | is |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/providers/FileTestCase.java | {
"start": 626,
"end": 4361
} | class ____ {
private static final String FILE = "src/test/resources/lorem.txt";
@RegisterExtension
static final ResteasyReactiveUnitTest config = new ResteasyReactiveUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(FileResource.class, WithWriterInterceptor.class, WriterInterceptor.class));
@Test
public void testFiles() throws Exception {
String content = Files.readString(Path.of(FILE));
String contentLength = String.valueOf(content.length());
RestAssured.get("/providers/file/file")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/file-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
RestAssured.get("/providers/file/path")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, contentLength)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/path-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
RestAssured.get("/providers/file/async-file")
.then()
.header(HttpHeaders.CONTENT_LENGTH, Matchers.nullValue())
.statusCode(200)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/mutiny-async-file")
.then()
.header(HttpHeaders.CONTENT_LENGTH, Matchers.nullValue())
.statusCode(200)
.body(Matchers.equalTo(content));
RestAssured.get("/providers/file/async-file-partial")
.then()
.statusCode(200)
.header(HttpHeaders.CONTENT_LENGTH, "10")
.body(Matchers.equalTo(content.substring(20, 30)));
}
@Test
public void testChecks() throws IOException {
// creation-time checks
Path path = Paths.get(FILE);
// works
new PathPart(path, 10, 10);
new PathPart(path, 0, Files.size(path));
// fails
try {
new PathPart(path, -1, 10);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 0, -1);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 0, 1000);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new PathPart(path, 250, 250);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
File file = new File(FILE);
// works
new FilePart(file, 10, 10);
new FilePart(file, 0, file.length());
// fails
try {
new FilePart(file, -1, 10);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 0, -1);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 0, 1000);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
try {
new FilePart(file, 250, 250);
Assertions.fail();
} catch (IllegalArgumentException x) {
}
}
}
| FileTestCase |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/MasterTriggerRestoreHook.java | {
"start": 8114,
"end": 8288
} | interface ____ extends java.io.Serializable {
/** Instantiates the {@code MasterTriggerRestoreHook}. */
<V> MasterTriggerRestoreHook<V> create();
}
}
| Factory |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMeters.java | {
"start": 5065,
"end": 5347
} | enum ____ implements KeyName {
/**
* Type of cache pages for this cache.
*/
CACHE_TYPE {
@Override
public String asString() {
return "cache.type";
}
}
}
| AllocatorPooledCacheKeyNames |
java | google__dagger | javatests/artifacts/hilt-android/simple/app/src/sharedTest/java/dagger/hilt/android/simple/AliasOfMultipleScopesTest.java | {
"start": 3238,
"end": 3625
} | interface ____ {
@Provides
@CustomScoped
static CustomScopedDep customScopedDep() {
return new CustomScopedDep() {};
}
@Provides
@AliasScoped
static AliasScopedDep aliasScopedDep() {
return new AliasScopedDep() {};
}
}
/** An activity to test injection. */
@AndroidEntryPoint(ComponentActivity.class)
public static final | CustomTestModule |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java | {
"start": 8290,
"end": 9141
} | class ____ extends Plugin {
@SuppressWarnings("unused")
public MultiplePublicConstructorsPlugin() {
}
@SuppressWarnings("unused")
public MultiplePublicConstructorsPlugin(final Settings settings) {
}
}
final Path home = createTempDir();
final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
final IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> PluginsService.loadPlugin(MultiplePublicConstructorsPlugin.class, settings, home)
);
assertThat(e, hasToString(containsString("no unique public constructor")));
}
public void testLoadPluginWithNoPublicConstructorOfCorrectSignature() {
| MultiplePublicConstructorsPlugin |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/JsonCompleteFileAppenderTest.java | {
"start": 1954,
"end": 4508
} | class ____ {
public JsonCompleteFileAppenderTest(final Class<ContextSelector> contextSelector) {
this.loggerContextRule = new LoggerContextRule("JsonCompleteFileAppenderTest.xml", contextSelector);
this.cleanFiles = new CleanFiles(logFile);
this.ruleChain = RuleChain.outerRule(cleanFiles).around(loggerContextRule);
}
@BeforeClass
public static void beforeClass() {
System.setProperty(ClockFactory.PROPERTY_NAME, Log4jLogEventTest.FixedTimeClock.class.getName());
}
@AfterClass
public static void afterClass() {
System.clearProperty(ClockFactory.PROPERTY_NAME);
}
@Parameters(name = "{0}")
public static Class<?>[] getParameters() {
return CoreContextSelectors.CLASSES;
}
private final File logFile = new File("target", "JsonCompleteFileAppenderTest.log");
private final LoggerContextRule loggerContextRule;
private final CleanFiles cleanFiles;
@Rule
public RuleChain ruleChain;
@Test
public void testFlushAtEndOfBatch() throws Exception {
final Logger logger = this.loggerContextRule.getLogger("com.foo.Bar");
final String logMsg = "Message flushed with immediate flush=true";
logger.info(logMsg);
logger.error(logMsg, new IllegalArgumentException("badarg"));
this.loggerContextRule.getLoggerContext().stop(); // stops async thread
final List<String> lines = Files.readAllLines(logFile.toPath(), StandardCharsets.UTF_8);
final String[] expected = {
"[", // equals
"{", // equals
" \"instant\" : {", //
" \"epochSecond\" : 1234567,", //
" \"nanoOfSecond\" : 890000000", //
" },", //
" \"thread\" : \"main\",", //
" \"level\" : \"INFO\",", //
" \"loggerName\" : \"com.foo.Bar\",", //
" \"message\" : \"Message flushed with immediate flush=true\",", //
" \"endOfBatch\" : false,", //
" \"loggerFqcn\" : \"org.apache.logging.log4j.spi.AbstractLogger\",", //
};
for (int i = 0; i < expected.length; i++) {
final String line = lines.get(i);
assertTrue(
"line " + i + " incorrect: [" + line + "], does not contain: [" + expected[i] + ']',
line.contains(expected[i]));
}
final String location = "testFlushAtEndOfBatch";
assertFalse("no location", lines.get(0).contains(location));
}
}
| JsonCompleteFileAppenderTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ReservationListResponsePBImpl.java | {
"start": 1656,
"end": 4801
} | class ____ extends
ReservationListResponse {
private ReservationListResponseProto proto = ReservationListResponseProto
.getDefaultInstance();
private ReservationListResponseProto.Builder builder = null;
private boolean viaProto = false;
private List<ReservationAllocationState> reservations;
public ReservationListResponsePBImpl() {
builder = ReservationListResponseProto.newBuilder();
}
public ReservationListResponsePBImpl(
ReservationListResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public ReservationListResponseProto getProto() {
if (viaProto) {
mergeLocalToProto();
} else {
proto = builder.build();
}
viaProto = true;
return proto;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = ReservationListResponseProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public List<ReservationAllocationState> getReservationAllocationState() {
initReservations();
mergeLocalToProto();
return this.reservations;
}
@Override
public void setReservationAllocationState(List<ReservationAllocationState>
newReservations) {
if (newReservations == null) {
builder.clearReservations();
return;
}
reservations = newReservations;
mergeLocalToProto();
}
private void mergeLocalToBuilder() {
if (this.reservations != null) {
int size = reservations.size();
builder.clearReservations();
for (int i = 0; i < size; i++) {
builder.addReservations(i, convertToProtoFormat(
reservations.get(i)
));
}
}
}
private void mergeLocalToProto() {
if (viaProto) {
maybeInitBuilder();
}
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private ReservationAllocationStatePBImpl convertFromProtoFormat(
ReservationAllocationStateProto p) {
return new ReservationAllocationStatePBImpl(p);
}
private ReservationAllocationStateProto convertToProtoFormat(
ReservationAllocationState r) {
return ((ReservationAllocationStatePBImpl)r).getProto();
}
private void initReservations() {
if (this.reservations != null) {
return;
}
ReservationListResponseProtoOrBuilder p = viaProto ? proto : builder;
List<ReservationAllocationStateProto> reservationProtos =
p.getReservationsList();
reservations = new ArrayList<>();
for (ReservationAllocationStateProto r : reservationProtos) {
reservations.add(convertFromProtoFormat(r));
}
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
}
| ReservationListResponsePBImpl |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/HashCodeBuilder.java | {
"start": 6067,
"end": 31972
} | class ____ append details of
* @param builder
* the builder to append to
* @param useTransients
* whether to use transient fields
* @param excludeFields
* Collection of String field names to exclude from use in calculation of hash code
*/
private static void reflectionAppend(final Object object, final Class<?> clazz, final HashCodeBuilder builder, final boolean useTransients,
final String[] excludeFields) {
if (isRegistered(object)) {
return;
}
try {
register(object);
// The elements in the returned array are not sorted and are not in any particular order.
final Field[] fields = ArraySorter.sort(clazz.getDeclaredFields(), Comparator.comparing(Field::getName));
AccessibleObject.setAccessible(fields, true);
for (final Field field : fields) {
if (!ArrayUtils.contains(excludeFields, field.getName())
&& !field.getName().contains("$")
&& (useTransients || !Modifier.isTransient(field.getModifiers()))
&& !Modifier.isStatic(field.getModifiers())
&& !field.isAnnotationPresent(HashCodeExclude.class)) {
builder.append(Reflection.getUnchecked(field, object));
}
}
} finally {
unregister(object);
}
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* Transient members will be not be used, as they are likely derived fields, and not part of the value of the
* {@link Object}.
* </p>
*
* <p>
* Static fields will not be tested. Superclass fields will be included.
* </p>
*
* <p>
* Two randomly chosen, non-zero, odd numbers must be passed in. Ideally these should be different for each class,
* however this is not vital. Prime numbers are preferred, especially for the multiplier.
* </p>
*
* @param initialNonZeroOddNumber
* a non-zero, odd number used as the initial value. This will be the returned
* value if no fields are found to include in the hash code
* @param multiplierNonZeroOddNumber
* a non-zero, odd number used as the multiplier
* @param object
* the Object to create a {@code hashCode} for
* @return int hash code
* @throws NullPointerException
* if the Object is {@code null}
* @throws IllegalArgumentException
* if the number is zero or even
*
* @see HashCodeExclude
*/
public static int reflectionHashCode(final int initialNonZeroOddNumber, final int multiplierNonZeroOddNumber, final Object object) {
return reflectionHashCode(initialNonZeroOddNumber, multiplierNonZeroOddNumber, object, false, null);
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the TestTransients parameter is set to {@code true}, transient members will be tested, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the {@link Object}.
* </p>
*
* <p>
* Static fields will not be tested. Superclass fields will be included.
* </p>
*
* <p>
* Two randomly chosen, non-zero, odd numbers must be passed in. Ideally these should be different for each class,
* however this is not vital. Prime numbers are preferred, especially for the multiplier.
* </p>
*
* @param initialNonZeroOddNumber
* a non-zero, odd number used as the initial value. This will be the returned
* value if no fields are found to include in the hash code
* @param multiplierNonZeroOddNumber
* a non-zero, odd number used as the multiplier
* @param object
* the Object to create a {@code hashCode} for
* @param testTransients
* whether to include transient fields
* @return int hash code
* @throws NullPointerException
* if the Object is {@code null}
* @throws IllegalArgumentException
* if the number is zero or even
*
* @see HashCodeExclude
*/
public static int reflectionHashCode(final int initialNonZeroOddNumber, final int multiplierNonZeroOddNumber, final Object object,
final boolean testTransients) {
return reflectionHashCode(initialNonZeroOddNumber, multiplierNonZeroOddNumber, object, testTransients, null);
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the TestTransients parameter is set to {@code true}, transient members will be tested, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the {@link Object}.
* </p>
*
* <p>
* Static fields will not be included. Superclass fields will be included up to and including the specified
* superclass. A null superclass is treated as java.lang.Object.
* </p>
*
* <p>
* Two randomly chosen, non-zero, odd numbers must be passed in. Ideally these should be different for each class,
* however this is not vital. Prime numbers are preferred, especially for the multiplier.
* </p>
*
* @param <T>
* the type of the object involved
* @param initialNonZeroOddNumber
* a non-zero, odd number used as the initial value. This will be the returned
* value if no fields are found to include in the hash code
* @param multiplierNonZeroOddNumber
* a non-zero, odd number used as the multiplier
* @param object
* the Object to create a {@code hashCode} for
* @param testTransients
* whether to include transient fields
* @param reflectUpToClass
* the superclass to reflect up to (inclusive), may be {@code null}
* @param excludeFields
* array of field names to exclude from use in calculation of hash code
* @return int hash code
* @throws NullPointerException
* if the Object is {@code null}
* @throws IllegalArgumentException
* if the number is zero or even
*
* @see HashCodeExclude
* @since 2.0
*/
public static <T> int reflectionHashCode(final int initialNonZeroOddNumber, final int multiplierNonZeroOddNumber, final T object,
final boolean testTransients, final Class<? super T> reflectUpToClass, final String... excludeFields) {
Objects.requireNonNull(object, "object");
final HashCodeBuilder builder = new HashCodeBuilder(initialNonZeroOddNumber, multiplierNonZeroOddNumber);
Class<?> clazz = object.getClass();
reflectionAppend(object, clazz, builder, testTransients, excludeFields);
while (clazz.getSuperclass() != null && clazz != reflectUpToClass) {
clazz = clazz.getSuperclass();
reflectionAppend(object, clazz, builder, testTransients, excludeFields);
}
return builder.toHashCode();
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* This constructor uses two hard coded choices for the constants needed to build a hash code.
* </p>
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the TestTransients parameter is set to {@code true}, transient members will be tested, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the {@link Object}.
* </p>
*
* <p>
* Static fields will not be tested. Superclass fields will be included. If no fields are found to include
* in the hash code, the result of this method will be constant.
* </p>
*
* @param object
* the Object to create a {@code hashCode} for
* @param testTransients
* whether to include transient fields
* @return int hash code
* @throws NullPointerException
* if the object is {@code null}
*
* @see HashCodeExclude
*/
public static int reflectionHashCode(final Object object, final boolean testTransients) {
return reflectionHashCode(DEFAULT_INITIAL_VALUE, DEFAULT_MULTIPLIER_VALUE, object,
testTransients, null);
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* This constructor uses two hard coded choices for the constants needed to build a hash code.
* </p>
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* Transient members will be not be used, as they are likely derived fields, and not part of the value of the
* {@link Object}.
* </p>
*
* <p>
* Static fields will not be tested. Superclass fields will be included. If no fields are found to include
* in the hash code, the result of this method will be constant.
* </p>
*
* @param object
* the Object to create a {@code hashCode} for
* @param excludeFields
* Collection of String field names to exclude from use in calculation of hash code
* @return int hash code
* @throws NullPointerException
* if the object is {@code null}
*
* @see HashCodeExclude
*/
public static int reflectionHashCode(final Object object, final Collection<String> excludeFields) {
return reflectionHashCode(object, ReflectionToStringBuilder.toNoNullStringArray(excludeFields));
}
/**
* Uses reflection to build a valid hash code from the fields of {@code object}.
*
* <p>
* This constructor uses two hard coded choices for the constants needed to build a hash code.
* </p>
*
* <p>
* It uses {@code AccessibleObject.setAccessible} to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* Transient members will be not be used, as they are likely derived fields, and not part of the value of the
* {@link Object}.
* </p>
*
* <p>
* Static fields will not be tested. Superclass fields will be included. If no fields are found to include
* in the hash code, the result of this method will be constant.
* </p>
*
* @param object
* the Object to create a {@code hashCode} for
* @param excludeFields
* array of field names to exclude from use in calculation of hash code
* @return int hash code
* @throws NullPointerException
* if the object is {@code null}
*
* @see HashCodeExclude
*/
public static int reflectionHashCode(final Object object, final String... excludeFields) {
return reflectionHashCode(DEFAULT_INITIAL_VALUE, DEFAULT_MULTIPLIER_VALUE, object, false,
null, excludeFields);
}
/**
* Registers the given object. Used by the reflection methods to avoid infinite loops.
*
* @param value
* The object to register.
*/
private static void register(final Object value) {
getRegistry().add(new IDKey(value));
}
/**
* Unregisters the given object.
*
* <p>
* Used by the reflection methods to avoid infinite loops.
* </p>
*
* @param value
* The object to unregister.
* @since 2.3
*/
private static void unregister(final Object value) {
final Set<IDKey> registry = getRegistry();
registry.remove(new IDKey(value));
if (registry.isEmpty()) {
REGISTRY.remove();
}
}
/**
* Constant to use in building the hashCode.
*/
private final int iConstant;
/**
* Running total of the hashCode.
*/
private int iTotal;
/**
* Uses two hard coded choices for the constants needed to build a {@code hashCode}.
*/
public HashCodeBuilder() {
iConstant = 37;
iTotal = 17;
}
/**
* Two randomly chosen, odd numbers must be passed in. Ideally these should be different for each class,
* however this is not vital.
*
* <p>
* Prime numbers are preferred, especially for the multiplier.
* </p>
*
* @param initialOddNumber
* an odd number used as the initial value
* @param multiplierOddNumber
* an odd number used as the multiplier
* @throws IllegalArgumentException
* if the number is even
*/
public HashCodeBuilder(final int initialOddNumber, final int multiplierOddNumber) {
Validate.isTrue(initialOddNumber % 2 != 0, "HashCodeBuilder requires an odd initial value");
Validate.isTrue(multiplierOddNumber % 2 != 0, "HashCodeBuilder requires an odd multiplier");
iConstant = multiplierOddNumber;
iTotal = initialOddNumber;
}
/**
* Append a {@code hashCode} for a {@code boolean}.
*
* <p>
* This adds {@code 1} when true, and {@code 0} when false to the {@code hashCode}.
* </p>
* <p>
* This is in contrast to the standard {@link Boolean#hashCode()} handling, which computes
* a {@code hashCode} value of {@code 1231} for {@link Boolean} instances
* that represent {@code true} or {@code 1237} for {@link Boolean} instances
* that represent {@code false}.
* </p>
* <p>
* This is in accordance with the <em>Effective Java</em> design.
* </p>
*
* @param value
* the boolean to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final boolean value) {
iTotal = iTotal * iConstant + (value ? 0 : 1);
return this;
}
/**
* Append a {@code hashCode} for a {@code boolean} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final boolean[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final boolean element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code byte}.
*
* @param value
* the byte to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final byte value) {
iTotal = iTotal * iConstant + value;
return this;
}
/**
* Append a {@code hashCode} for a {@code byte} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final byte[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final byte element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code char}.
*
* @param value
* the char to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final char value) {
iTotal = iTotal * iConstant + value;
return this;
}
/**
* Append a {@code hashCode} for a {@code char} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final char[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final char element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code double}.
*
* @param value
* the double to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final double value) {
return append(Double.doubleToLongBits(value));
}
/**
* Append a {@code hashCode} for a {@code double} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final double[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final double element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code float}.
*
* @param value
* the float to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final float value) {
iTotal = iTotal * iConstant + Float.floatToIntBits(value);
return this;
}
/**
* Append a {@code hashCode} for a {@code float} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final float[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final float element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for an {@code int}.
*
* @param value
* the int to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final int value) {
iTotal = iTotal * iConstant + value;
return this;
}
/**
* Append a {@code hashCode} for an {@code int} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final int[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final int element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code long}.
*
* @param value
* the long to add to the {@code hashCode}
* @return {@code this} instance.
*/
// NOTE: This method uses >> and not >>> as Effective Java and
// Long.hashCode do. Ideally we should switch to >>> at
// some stage. There are backwards compat issues, so
// that will have to wait for the time being. cf LANG-342.
public HashCodeBuilder append(final long value) {
iTotal = iTotal * iConstant + (int) (value ^ value >> 32);
return this;
}
/**
* Append a {@code hashCode} for a {@code long} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final long[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final long element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for an {@link Object}.
*
* @param object
* the Object to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final Object object) {
if (object == null) {
iTotal = iTotal * iConstant;
} else if (ObjectUtils.isArray(object)) {
// factor out array case in order to keep method small enough
// to be inlined
appendArray(object);
} else {
iTotal = iTotal * iConstant + object.hashCode();
}
return this;
}
/**
* Append a {@code hashCode} for an {@link Object} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final Object[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final Object element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for a {@code short}.
*
* @param value
* the short to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final short value) {
iTotal = iTotal * iConstant + value;
return this;
}
/**
* Append a {@code hashCode} for a {@code short} array.
*
* @param array
* the array to add to the {@code hashCode}
* @return {@code this} instance.
*/
public HashCodeBuilder append(final short[] array) {
if (array == null) {
iTotal = iTotal * iConstant;
} else {
for (final short element : array) {
append(element);
}
}
return this;
}
/**
* Append a {@code hashCode} for an array.
*
* @param object
* the array to add to the {@code hashCode}
*/
private void appendArray(final Object object) {
// 'Switch' on type of array, to dispatch to the correct handler
// This handles multidimensional arrays
if (object instanceof long[]) {
append((long[]) object);
} else if (object instanceof int[]) {
append((int[]) object);
} else if (object instanceof short[]) {
append((short[]) object);
} else if (object instanceof char[]) {
append((char[]) object);
} else if (object instanceof byte[]) {
append((byte[]) object);
} else if (object instanceof double[]) {
append((double[]) object);
} else if (object instanceof float[]) {
append((float[]) object);
} else if (object instanceof boolean[]) {
append((boolean[]) object);
} else {
// Not an array of primitives
append((Object[]) object);
}
}
/**
* Adds the result of super.hashCode() to this builder.
*
* @param superHashCode
* the result of calling {@code super.hashCode()}
* @return {@code this} instance.
* @since 2.0
*/
public HashCodeBuilder appendSuper(final int superHashCode) {
iTotal = iTotal * iConstant + superHashCode;
return this;
}
/**
* Returns the computed {@code hashCode}.
*
* @return {@code hashCode} based on the fields appended
* @since 3.0
*/
@Override
public Integer build() {
return Integer.valueOf(toHashCode());
}
/**
* Implements equals using the hash code.
*
* @since 3.13.0
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof HashCodeBuilder)) {
return false;
}
final HashCodeBuilder other = (HashCodeBuilder) obj;
return iTotal == other.iTotal;
}
/**
* The computed {@code hashCode} from toHashCode() is returned due to the likelihood
* of bugs in mis-calling toHashCode() and the unlikeliness of it mattering what the hashCode for
* HashCodeBuilder itself is.
*
* @return {@code hashCode} based on the fields appended
* @since 2.5
*/
@Override
public int hashCode() {
return toHashCode();
}
/**
* Returns the computed {@code hashCode}.
*
* @return {@code hashCode} based on the fields appended
*/
public int toHashCode() {
return iTotal;
}
}
| to |
java | quarkusio__quarkus | integration-tests/redis-devservices/src/test/java/io/quarkus/redis/devservices/it/PlainQuarkusTest.java | {
"start": 287,
"end": 459
} | class ____ {
@Inject
RedisDataSource redisClient;
@Test
public void shouldStartRedisContainer() {
assertNotNull(redisClient);
}
}
| PlainQuarkusTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmTreatedSingularJoin.java | {
"start": 931,
"end": 4826
} | class ____<O,T, S extends T>
extends SqmSingularJoin<O,S>
implements SqmTreatedAttributeJoin<O,T,S> {
private final SqmSingularJoin<O,T> wrappedPath;
private final SqmTreatableDomainType<S> treatTarget;
public SqmTreatedSingularJoin(
SqmSingularJoin<O,T> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias) {
this( wrappedPath, treatTarget, alias, false );
}
public SqmTreatedSingularJoin(
SqmSingularJoin<O,T> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias,
boolean fetched) {
//noinspection unchecked
super(
wrappedPath.getLhs(),
wrappedPath.getNavigablePath()
.treatAs( treatTarget.getTypeName(), alias ),
(SqmSingularPersistentAttribute<O, S>)
wrappedPath.getAttribute(),
alias,
wrappedPath.getSqmJoinType(),
fetched,
wrappedPath.nodeBuilder()
);
this.treatTarget = treatTarget;
this.wrappedPath = wrappedPath;
}
private SqmTreatedSingularJoin(
NavigablePath navigablePath,
SqmSingularJoin<O,T> wrappedPath,
SqmTreatableDomainType<S> treatTarget,
@Nullable String alias,
boolean fetched) {
//noinspection unchecked
super(
wrappedPath.getLhs(),
navigablePath,
(SqmSingularPersistentAttribute<O, S>)
wrappedPath.getAttribute(),
alias,
wrappedPath.getSqmJoinType(),
fetched,
wrappedPath.nodeBuilder()
);
this.treatTarget = treatTarget;
this.wrappedPath = wrappedPath;
}
@Override
public SqmTreatedSingularJoin<O, T, S> copy(SqmCopyContext context) {
final SqmTreatedSingularJoin<O, T, S> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmTreatedSingularJoin<O, T, S> path = context.registerCopy(
this,
new SqmTreatedSingularJoin<>(
getNavigablePath(),
wrappedPath.copy( context ),
treatTarget,
getExplicitAlias(),
isFetched()
)
);
copyTo( path, context );
return path;
}
@Override
public SqmSingularJoin<O,T> getWrappedPath() {
return wrappedPath;
}
@Override
public TreatableDomainType<S> getTreatTarget() {
return treatTarget;
}
@Override
public @NonNull SqmBindableType<S> getNodeType() {
return treatTarget;
}
@Override
public SqmPathSource<S> getReferencedPathSource() {
return treatTarget;
}
@Override
public SqmPathSource<S> getResolvedModel() {
return treatTarget;
}
@Override
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
hql.append( "treat(" );
wrappedPath.appendHqlString( hql, context );
hql.append( " as " );
hql.append( treatTarget.getTypeName() );
hql.append( ')' );
}
@Override
public <S1 extends S> SqmTreatedSingularJoin<O, S, S1> treatAs(EntityDomainType<S1> treatTarget, @Nullable String alias, boolean fetch) {
//noinspection unchecked
return (SqmTreatedSingularJoin<O, S, S1>) wrappedPath.treatAs( treatTarget, alias, fetch );
}
@Override
public <S1 extends S> SqmTreatedSingularJoin<O, S, S1> treatAs(Class<S1> treatJavaType, @Nullable String alias, boolean fetch) {
//noinspection unchecked
return (SqmTreatedSingularJoin<O, S, S1>) wrappedPath.treatAs( treatJavaType, alias, fetch );
}
@Override
public SqmTreatedSingularJoin<O,T,S> on(@Nullable JpaExpression<Boolean> restriction) {
return (SqmTreatedSingularJoin<O, T, S>) super.on( restriction );
}
@Override
public SqmTreatedSingularJoin<O,T,S> on(JpaPredicate @Nullable... restrictions) {
return (SqmTreatedSingularJoin<O, T, S>) super.on( restrictions );
}
@Override
public SqmTreatedSingularJoin<O,T,S> on(@Nullable Expression<Boolean> restriction) {
return (SqmTreatedSingularJoin<O, T, S>) super.on( restriction );
}
@Override
public SqmTreatedSingularJoin<O,T,S> on(Predicate @Nullable... restrictions) {
return (SqmTreatedSingularJoin<O, T, S>) super.on( restrictions );
}
}
| SqmTreatedSingularJoin |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/AbstractAnnotationBeanPostProcessor.java | {
"start": 14824,
"end": 16192
} | class ____ extends InjectionMetadata {
private Class<?> targetClass;
private final Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> fieldElements;
private final Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> methodElements;
public AnnotatedInjectionMetadata(
Class<?> targetClass,
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> fieldElements,
Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> methodElements) {
super(targetClass, combine(fieldElements, methodElements));
this.targetClass = targetClass;
this.fieldElements = fieldElements;
this.methodElements = methodElements;
}
public Collection<AbstractAnnotationBeanPostProcessor.AnnotatedFieldElement> getFieldElements() {
return fieldElements;
}
public Collection<AbstractAnnotationBeanPostProcessor.AnnotatedMethodElement> getMethodElements() {
return methodElements;
}
// @Override // since Spring 5.2.4
protected boolean needsRefresh(Class<?> clazz) {
if (this.targetClass == clazz) {
return false;
}
// IGNORE Spring CGLIB enhanced | AnnotatedInjectionMetadata |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 28027,
"end": 28526
} | class ____ {
// BUG: Diagnostic contains: instantiated
final X<?> x = null;
}
""")
.doTest();
}
@Test
public void mutableWildcardInstantiation_immutableTypeParameter() {
compilationHelper
.addSourceLines(
"A.java",
"""
import com.google.errorprone.annotations.Immutable;
import com.google.errorprone.annotations.ImmutableTypeParameter;
@Immutable
| Test |
java | quarkusio__quarkus | integration-tests/spring-web/src/main/java/io/quarkus/it/spring/cache/Greeting.java | {
"start": 44,
"end": 376
} | class ____ {
private final String message;
private final Integer count;
public Greeting(String message, Integer count) {
this.message = message;
this.count = count;
}
public String getMessage() {
return message;
}
public Integer getCount() {
return count;
}
}
| Greeting |
java | quarkusio__quarkus | extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java | {
"start": 48796,
"end": 54585
} | enum ____ of %s: %s", constant, maybeEnum, key));
}
}
}
}
return false;
}
private void constructLine(StringBuilder builder, Iterator<String> it) {
if (it.hasNext()) {
String nextLine = adaptLine(it.next());
if (nextLine.endsWith("\\")) {
builder.append(nextLine.substring(0, nextLine.length() - 1));
constructLine(builder, it);
} else {
builder.append(nextLine);
}
}
}
private String adaptLine(String line) {
return line.stripLeading().replace("\\n", "\n");
}
private boolean hasMessageBundleMethod(ClassInfo bundleInterface, String name) {
return messageBundleMethod(bundleInterface, name) != null;
}
private MethodInfo messageBundleMethod(ClassInfo bundleInterface, String name) {
for (MethodInfo method : bundleInterface.methods()) {
if (method.name().equals(name)) {
return method;
}
}
return null;
}
private String generateImplementation(MessageBundleBuildItem bundle, ClassInfo defaultBundleInterface,
String defaultBundleImpl, ClassInfoWrapper bundleInterfaceWrapper, ClassOutput classOutput,
BuildProducer<MessageBundleMethodBuildItem> messageTemplateMethods,
Map<String, String> messageTemplates, String locale, IndexView index) {
ClassInfo bundleInterface = bundleInterfaceWrapper.getClassInfo();
LOG.debugf("Generate bundle implementation for %s", bundleInterface);
AnnotationInstance bundleAnnotation = defaultBundleInterface != null
? defaultBundleInterface.declaredAnnotation(Names.BUNDLE)
: bundleInterface.declaredAnnotation(Names.BUNDLE);
String bundleName = bundle.getName();
AnnotationValue defaultKeyValue = bundleAnnotation.value(BUNDLE_DEFAULT_KEY);
String baseName;
if (bundleInterface.enclosingClass() != null) {
baseName = DotNames.simpleName(bundleInterface.enclosingClass()) + ValueResolverGenerator.NESTED_SEPARATOR
+ DotNames.simpleName(bundleInterface);
} else {
baseName = DotNames.simpleName(bundleInterface);
}
if (locale != null) {
baseName = baseName + "_" + locale;
}
String generatedClassName = bundleInterface.name()
+ (locale != null ? "_" + locale : "")
+ SUFFIX;
String resolveMethodPrefix = baseName + SUFFIX;
Gizmo gizmo = Gizmo.create(classOutput)
.withDebugInfo(false)
.withParameters(false);
gizmo.class_(generatedClassName, cc -> {
// MyMessages_Bundle implements MyMessages, Resolver
cc.implements_(classDescOf(bundleInterface));
cc.implements_(Resolver.class);
if (defaultBundleImpl != null) {
cc.extends_(ClassDesc.of(defaultBundleImpl));
}
cc.defaultConstructor();
// key -> method
Map<String, MessageMethod> keyMap = new LinkedHashMap<>();
List<MethodInfo> methods = new ArrayList<>(bundleInterfaceWrapper.methods());
// Sort methods
methods.sort(Comparator.comparing(MethodInfo::name).thenComparing(Comparator.comparing(MethodInfo::toString)));
for (MethodInfo method : methods) {
cc.method(methodDescOf(method), mc -> {
List<ParamVar> params = new ArrayList<>(method.parametersCount());
for (int i = 0; i < method.parametersCount(); i++) {
String paramName = method.parameterName(i);
params.add(mc.parameter(paramName != null ? paramName : "arg" + i, i));
}
if (!method.returnType().name().equals(DotNames.STRING)) {
throw new MessageBundleException(
String.format("A message bundle method must return java.lang.String: %s#%s",
bundleInterface, method.name()));
}
LOG.debugf("Found message bundle method %s on %s", method, bundleInterface);
AnnotationInstance messageAnnotation;
if (defaultBundleInterface != null) {
MethodInfo defaultBundleMethod = bundleInterfaceWrapper.method(method.name(),
method.parameterTypes().toArray(new Type[] {}));
if (defaultBundleMethod == null) {
throw new MessageBundleException(
String.format("Default bundle method not found on %s: %s", bundleInterface, method));
}
messageAnnotation = defaultBundleMethod.annotation(Names.MESSAGE);
} else {
messageAnnotation = method.annotation(Names.MESSAGE);
}
if (messageAnnotation == null) {
LOG.debugf("@Message not declared on %s#%s - using the default key/value", bundleInterface, method);
messageAnnotation = AnnotationInstance.builder(Names.MESSAGE).value(Message.DEFAULT_VALUE)
.add("name", Message.DEFAULT_NAME).build();
}
String key = getKey(method, messageAnnotation, defaultKeyValue);
if (key.equals(MESSAGE)) {
throw new MessageBundleException(String.format(
"A message bundle | constant |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/jdbc/MergedSqlConfigTests.java | {
"start": 17054,
"end": 17358
} | class ____ {
@Sql
public void globalConfigMethod() {
}
@Sql(config = @SqlConfig(encoding = "local", separator = "@@", commentPrefix = "#", errorMode = CONTINUE_ON_ERROR))
public void globalConfigWithLocalOverridesMethod() {
}
}
}
@SqlConfig(commentPrefix = "`")
public static | Nested |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java | {
"start": 750,
"end": 2083
} | class ____ extends BucketMetricsPipelineAggregator {
private final double sigma;
private double sum = 0;
private long count = 0;
private double min = Double.POSITIVE_INFINITY;
private double max = Double.NEGATIVE_INFINITY;
private double sumOfSqrs = 1;
ExtendedStatsBucketPipelineAggregator(
String name,
String[] bucketsPaths,
double sigma,
GapPolicy gapPolicy,
DocValueFormat formatter,
Map<String, Object> metadata
) {
super(name, bucketsPaths, gapPolicy, formatter, metadata);
this.sigma = sigma;
}
@Override
protected void preCollection() {
sum = 0;
count = 0;
min = Double.POSITIVE_INFINITY;
max = Double.NEGATIVE_INFINITY;
sumOfSqrs = 0;
}
@Override
protected void collectBucketValue(String bucketKey, Double bucketValue) {
sum += bucketValue;
min = Math.min(min, bucketValue);
max = Math.max(max, bucketValue);
count += 1;
sumOfSqrs += bucketValue * bucketValue;
}
@Override
protected InternalAggregation buildAggregation(Map<String, Object> metadata) {
return new InternalExtendedStatsBucket(name(), count, sum, min, max, sumOfSqrs, sigma, format, metadata);
}
}
| ExtendedStatsBucketPipelineAggregator |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/HdfsManifestToResourcesPlugin.java | {
"start": 2389,
"end": 2663
} | class ____ a plugin for the
* {@link org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.RuncContainerRuntime}
* that maps runC image manifests into their associated config and
* layers that are located in HDFS.
*/
@InterfaceStability.Unstable
public | is |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/query/internal/impl/AbstractAuditAssociationQuery.java | {
"start": 1686,
"end": 1868
} | class ____ all {@link AuditAssociationQuery} implementations.
*
* @author Felix Feisst (feisst dot felix at gmail dot com)
* @author Chris Cranford
*/
@Incubating
public abstract | for |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/partial/ThriftMetadata.java | {
"start": 5489,
"end": 6647
} | class ____ extends ThriftObject {
ThriftPrimitive(ThriftObject parent, TFieldIdEnum fieldId, FieldMetaData data) {
super(parent, fieldId, data);
}
public boolean isBinary() {
return this.data.valueMetaData.isBinary();
}
@Override
protected void toPrettyString(StringBuilder sb, int level) {
String fieldType = this.getTypeName();
this.append(sb, "%s%s %s;\n", this.getIndent(level), fieldType, this.getName());
}
private String getTypeName() {
byte fieldType = this.data.valueMetaData.type;
switch (fieldType) {
case TType.BOOL:
return "bool";
case TType.BYTE:
return "byte";
case TType.I16:
return "i16";
case TType.I32:
return "i32";
case TType.I64:
return "i64";
case TType.DOUBLE:
return "double";
case TType.STRING:
if (this.isBinary()) {
return "binary";
} else {
return "string";
}
default:
throw unsupportedFieldTypeException(fieldType);
}
}
}
public static | ThriftPrimitive |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/db2/ast/DB2Statement.java | {
"start": 719,
"end": 778
} | interface ____ extends SQLStatement, DB2Object {
}
| DB2Statement |
java | apache__camel | components/camel-twilio/src/generated/java/org/apache/camel/component/twilio/SipDomainCredentialListMappingEndpointConfigurationConfigurer.java | {
"start": 753,
"end": 4666
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("ApiName", org.apache.camel.component.twilio.internal.TwilioApiName.class);
map.put("CredentialListSid", java.lang.String.class);
map.put("MethodName", java.lang.String.class);
map.put("PathAccountSid", java.lang.String.class);
map.put("PathDomainSid", java.lang.String.class);
map.put("PathSid", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.twilio.SipDomainCredentialListMappingEndpointConfiguration target = (org.apache.camel.component.twilio.SipDomainCredentialListMappingEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.twilio.internal.TwilioApiName.class, value)); return true;
case "credentiallistsid":
case "credentialListSid": target.setCredentialListSid(property(camelContext, java.lang.String.class, value)); return true;
case "methodname":
case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true;
case "pathaccountsid":
case "pathAccountSid": target.setPathAccountSid(property(camelContext, java.lang.String.class, value)); return true;
case "pathdomainsid":
case "pathDomainSid": target.setPathDomainSid(property(camelContext, java.lang.String.class, value)); return true;
case "pathsid":
case "pathSid": target.setPathSid(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return org.apache.camel.component.twilio.internal.TwilioApiName.class;
case "credentiallistsid":
case "credentialListSid": return java.lang.String.class;
case "methodname":
case "methodName": return java.lang.String.class;
case "pathaccountsid":
case "pathAccountSid": return java.lang.String.class;
case "pathdomainsid":
case "pathDomainSid": return java.lang.String.class;
case "pathsid":
case "pathSid": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.twilio.SipDomainCredentialListMappingEndpointConfiguration target = (org.apache.camel.component.twilio.SipDomainCredentialListMappingEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "apiname":
case "apiName": return target.getApiName();
case "credentiallistsid":
case "credentialListSid": return target.getCredentialListSid();
case "methodname":
case "methodName": return target.getMethodName();
case "pathaccountsid":
case "pathAccountSid": return target.getPathAccountSid();
case "pathdomainsid":
case "pathDomainSid": return target.getPathDomainSid();
case "pathsid":
case "pathSid": return target.getPathSid();
default: return null;
}
}
}
| SipDomainCredentialListMappingEndpointConfigurationConfigurer |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/extension-codestart/deployment/src/main/java/io/quarkiverse/custom/deployment/CustomProcessor.java | {
"start": 153,
"end": 333
} | class ____ {
private static final String FEATURE = "custom";
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(FEATURE);
}
}
| CustomProcessor |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ExportException.java | {
"start": 633,
"end": 2266
} | class ____ extends ElasticsearchException implements Iterable<ExportException> {
private final List<ExportException> exceptions = new ArrayList<>();
public ExportException(Throwable throwable) {
super(throwable);
}
public ExportException(String msg, Object... args) {
super(msg, args);
}
public ExportException(String msg, Throwable throwable, Object... args) {
super(msg, throwable, args);
}
public ExportException(StreamInput in) throws IOException {
super(in);
for (int i = in.readVInt(); i > 0; i--) {
exceptions.add(new ExportException(in));
}
}
public boolean addExportException(ExportException e) {
return exceptions.add(e);
}
public boolean hasExportExceptions() {
return exceptions.size() > 0;
}
@Override
public Iterator<ExportException> iterator() {
return exceptions.iterator();
}
@Override
protected void writeTo(StreamOutput out, Writer<Throwable> nestedExceptionsWriter) throws IOException {
super.writeTo(out, nestedExceptionsWriter);
out.writeCollection(exceptions);
}
@Override
protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException {
if (hasExportExceptions()) {
builder.startArray("exceptions");
for (ExportException exception : exceptions) {
builder.startObject();
exception.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
}
}
}
| ExportException |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/CorsConfigurerTests.java | {
"start": 10232,
"end": 11056
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().authenticated())
.cors(withDefaults());
return http.build();
// @formatter:on
}
@Bean
CorsConfigurationSource corsConfigurationSource() {
UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource();
CorsConfiguration corsConfiguration = new CorsConfiguration();
corsConfiguration.setAllowedOrigins(Collections.singletonList("*"));
corsConfiguration.setAllowedMethods(Arrays.asList(RequestMethod.GET.name(), RequestMethod.POST.name()));
source.registerCorsConfiguration("/**", corsConfiguration);
return source;
}
}
@Configuration
@EnableWebSecurity
static | ConfigSourceConfig |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/image/node/AclsImageByIdNode.java | {
"start": 1040,
"end": 1947
} | class ____ implements MetadataNode {
/**
* The name of this node.
*/
public static final String NAME = "byId";
/**
* The ACLs image.
*/
private final AclsImage image;
public AclsImageByIdNode(AclsImage image) {
this.image = image;
}
@Override
public Collection<String> childNames() {
ArrayList<String> childNames = new ArrayList<>();
for (Uuid uuid : image.acls().keySet()) {
childNames.add(uuid.toString());
}
return childNames;
}
@Override
public MetadataNode child(String name) {
Uuid uuid;
try {
uuid = Uuid.fromString(name);
} catch (Exception e) {
return null;
}
StandardAcl acl = image.acls().get(uuid);
if (acl == null) return null;
return new MetadataLeafNode(acl.toString());
}
}
| AclsImageByIdNode |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/mvnsh/CommonsCliShellOptions.java | {
"start": 1917,
"end": 2113
} | class ____ extends CommonsCliOptions.CLIManager {
@Override
protected String commandLineSyntax(String command) {
return command + " [options]";
}
}
}
| CLIManager |
java | apache__rocketmq | openmessaging/src/test/java/io/openmessaging/rocketmq/utils/BeanUtilsTest.java | {
"start": 1099,
"end": 1193
} | class ____ {
private KeyValue properties = OMS.newKeyValue();
public static | BeanUtilsTest |
java | apache__camel | components/camel-jmx/src/test/java/org/apache/camel/component/jmx/CamelJmxConsumerTest.java | {
"start": 1203,
"end": 2421
} | class ____ extends CamelTestSupport {
@Override
protected boolean useJmx() {
return true;
}
@Test
public void testJmxConsumer() throws Exception {
getMockEndpoint("mock:result").expectedMinimumMessageCount(1);
getMockEndpoint("mock:result").message(0).body().contains("<newValue>true</newValue>");
// change the attribute so JMX triggers
ManagedRouteMBean mr
= context.getCamelContextExtension().getContextPlugin(ManagedCamelContext.class).getManagedRoute("foo");
mr.setTracing(true);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String id = getContext().getName();
fromF("jmx:platform?objectDomain=org.apache.camel&key.context=%s&key.type=routes&key.name=\"foo\"", id)
.routeId("jmxRoute")
.to("log:jmx")
.to("mock:result");
from("direct:foo").routeId("foo").to("log:foo", "mock:foo");
}
};
}
}
| CamelJmxConsumerTest |
java | apache__logging-log4j2 | log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/junit/LoggerContextRule.java | {
"start": 9634,
"end": 12660
} | class ____ this LoggerContext.
*
* @param clazz
* The Class whose name should be used as the Logger name. If null it will default to the calling class.
* @return the named Logger.
*/
public Logger getLogger(final Class<?> clazz) {
return loggerContext.getLogger(clazz.getName());
}
/**
* Gets a named Logger in this LoggerContext.
*
* @param name
* the name of the Logger to look up or create.
* @return the named Logger.
*/
public Logger getLogger(final String name) {
return loggerContext.getLogger(name);
}
/**
* Gets a named Appender or throws an exception for this LoggerContext.
*
* @param name
* the name of the Appender to look up.
* @return the named Appender.
* @throws AssertionError
* if the Appender doesn't exist.
*/
public Appender getRequiredAppender(final String name) {
final Appender appender = getAppender(name);
assertNotNull("Appender named " + name + " was null.", appender);
return appender;
}
/**
* Gets a named Appender or throws an exception for this LoggerContext.
*
* @param <T>
* The target Appender class
* @param name
* the name of the Appender to look up.
* @param cls
* The target Appender class
* @return the named Appender.
* @throws AssertionError
* if the Appender doesn't exist.
*/
public <T extends Appender> T getRequiredAppender(final String name, final Class<T> cls) {
final T appender = getAppender(name, cls);
assertNotNull("Appender named " + name + " was null in logger context " + loggerContext, appender);
return appender;
}
/**
* Gets the root logger.
*
* @return the root logger.
*/
public Logger getRootLogger() {
return loggerContext.getRootLogger();
}
public void reconfigure() {
loggerContext.reconfigure();
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append("LoggerContextRule [configLocation=");
builder.append(configurationLocation);
builder.append(", contextSelectorClass=");
builder.append(contextSelectorClass);
builder.append("]");
return builder.toString();
}
public RuleChain withCleanFilesRule(final String... files) {
return RuleChain.outerRule(new CleanFiles(files)).around(this);
}
public RuleChain withCleanFoldersRule(
final boolean before, final boolean after, final int maxTries, final String... folders) {
return RuleChain.outerRule(new CleanFolders(before, after, maxTries, folders))
.around(this);
}
public RuleChain withCleanFoldersRule(final String... folders) {
return RuleChain.outerRule(new CleanFolders(folders)).around(this);
}
}
| in |
java | apache__kafka | clients/src/test/java/org/apache/kafka/test/MockProducerInterceptor.java | {
"start": 1356,
"end": 5523
} | class ____ implements ClusterResourceListener, ProducerInterceptor<String, String> {
public static final AtomicInteger INIT_COUNT = new AtomicInteger(0);
public static final AtomicInteger CLOSE_COUNT = new AtomicInteger(0);
public static final AtomicInteger ONSEND_COUNT = new AtomicInteger(0);
public static final AtomicInteger CONFIG_COUNT = new AtomicInteger(0);
public static final AtomicInteger THROW_CONFIG_EXCEPTION = new AtomicInteger(0);
public static final AtomicInteger THROW_ON_CONFIG_EXCEPTION_THRESHOLD = new AtomicInteger(0);
public static final AtomicInteger ON_SUCCESS_COUNT = new AtomicInteger(0);
public static final AtomicInteger ON_ERROR_COUNT = new AtomicInteger(0);
public static final AtomicInteger ON_ERROR_WITH_METADATA_COUNT = new AtomicInteger(0);
public static final AtomicInteger ON_ACKNOWLEDGEMENT_COUNT = new AtomicInteger(0);
public static final AtomicReference<ClusterResource> CLUSTER_META = new AtomicReference<>();
public static final ClusterResource NO_CLUSTER_ID = new ClusterResource("no_cluster_id");
public static final AtomicReference<ClusterResource> CLUSTER_ID_BEFORE_ON_ACKNOWLEDGEMENT = new AtomicReference<>(NO_CLUSTER_ID);
public static final String APPEND_STRING_PROP = "mock.interceptor.append";
private String appendStr;
public MockProducerInterceptor() {
INIT_COUNT.incrementAndGet();
}
@Override
public void configure(Map<String, ?> configs) {
// ensure this method is called and expected configs are passed in
Object o = configs.get(APPEND_STRING_PROP);
if (o == null)
throw new ConfigException("Mock producer interceptor expects configuration " + APPEND_STRING_PROP);
if (o instanceof String)
appendStr = (String) o;
// clientId also must be in configs
Object clientIdValue = configs.get(ProducerConfig.CLIENT_ID_CONFIG);
if (clientIdValue == null)
throw new ConfigException("Mock producer interceptor expects configuration " + ProducerConfig.CLIENT_ID_CONFIG);
CONFIG_COUNT.incrementAndGet();
if (CONFIG_COUNT.get() == THROW_ON_CONFIG_EXCEPTION_THRESHOLD.get()) {
throw new ConfigException("Failed to instantiate interceptor. Reached configuration exception threshold.");
}
}
@Override
public ProducerRecord<String, String> onSend(ProducerRecord<String, String> record) {
ONSEND_COUNT.incrementAndGet();
return new ProducerRecord<>(
record.topic(), record.partition(), record.key(), record.value().concat(appendStr));
}
@Override
public void onAcknowledgement(RecordMetadata metadata, Exception exception) {
ON_ACKNOWLEDGEMENT_COUNT.incrementAndGet();
// This will ensure that we get the cluster metadata when onAcknowledgement is called for the first time
// as subsequent compareAndSet operations will fail.
CLUSTER_ID_BEFORE_ON_ACKNOWLEDGEMENT.compareAndSet(NO_CLUSTER_ID, CLUSTER_META.get());
if (exception != null) {
ON_ERROR_COUNT.incrementAndGet();
if (metadata != null) {
ON_ERROR_WITH_METADATA_COUNT.incrementAndGet();
}
} else if (metadata != null)
ON_SUCCESS_COUNT.incrementAndGet();
}
@Override
public void close() {
CLOSE_COUNT.incrementAndGet();
}
public static void setThrowOnConfigExceptionThreshold(int value) {
THROW_ON_CONFIG_EXCEPTION_THRESHOLD.set(value);
}
public static void resetCounters() {
INIT_COUNT.set(0);
CLOSE_COUNT.set(0);
ONSEND_COUNT.set(0);
CONFIG_COUNT.set(0);
THROW_CONFIG_EXCEPTION.set(0);
ON_SUCCESS_COUNT.set(0);
ON_ERROR_COUNT.set(0);
ON_ERROR_WITH_METADATA_COUNT.set(0);
THROW_ON_CONFIG_EXCEPTION_THRESHOLD.set(0);
CLUSTER_META.set(null);
CLUSTER_ID_BEFORE_ON_ACKNOWLEDGEMENT.set(NO_CLUSTER_ID);
}
@Override
public void onUpdate(ClusterResource clusterResource) {
CLUSTER_META.set(clusterResource);
}
} | MockProducerInterceptor |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/JobTaskVertexTest.java | {
"start": 9859,
"end": 10715
} | class ____
implements OutputFormat<Object>, InitializeOnMaster {
private final SharedReference<AtomicInteger> globalParallelism;
private TestInitializeOutputFormat(SharedReference<AtomicInteger> globalParallelism) {
this.globalParallelism = globalParallelism;
}
@Override
public void configure(Configuration parameters) {}
@Override
public void open(InitializationContext context) throws IOException {}
@Override
public void writeRecord(Object record) throws IOException {}
@Override
public void close() throws IOException {}
@Override
public void initializeGlobal(int parallelism) throws IOException {
globalParallelism.get().set(parallelism);
}
}
private static final | TestInitializeOutputFormat |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/spi/AbstractDelegatingMetadataBuildingOptions.java | {
"start": 874,
"end": 1060
} | class ____ custom implementors of {@link MetadataBuildingOptions} using delegation.
*
* @author Gunnar Morling
* @author Steve Ebersole
*/
@SuppressWarnings("unused")
public abstract | for |
java | grpc__grpc-java | okhttp/src/test/java/io/grpc/okhttp/OkHttpProtocolNegotiatorTest.java | {
"start": 5909,
"end": 6509
} | class ____ extends FakeAndroidSslSocket {
@Override
public byte[] getAlpnSelectedProtocol() {
return "h2".getBytes(UTF_8);
}
}
@Test
public void getSelectedProtocol_alpn() throws Exception {
when(platform.getTlsExtensionType()).thenReturn(TlsExtensionType.ALPN_AND_NPN);
AndroidNegotiator negotiator = new AndroidNegotiator(platform);
FakeAndroidSslSocket androidSock = new FakeAndroidSslSocketAlpn();
String actual = negotiator.getSelectedProtocol(androidSock);
assertEquals("h2", actual);
}
@VisibleForTesting
public static | FakeAndroidSslSocketAlpn |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/FieldScopeLogic.java | {
"start": 7137,
"end": 8233
} | class ____ extends FieldScopeLogic {
private static final PartialScopeLogic EMPTY = new PartialScopeLogic(FieldNumberTree.empty());
private final FieldNumberTree fieldNumberTree;
PartialScopeLogic(FieldNumberTree fieldNumberTree) {
this.fieldNumberTree = fieldNumberTree;
}
@Override
public String toString() {
return String.format("PartialScopeLogic(%s)", fieldNumberTree);
}
@Override
final FieldScopeResult policyFor(Descriptor rootDescriptor, SubScopeId subScopeId) {
return fieldNumberTree.hasChild(subScopeId)
? FieldScopeResult.INCLUDED_NONRECURSIVELY
: FieldScopeResult.EXCLUDED_RECURSIVELY;
}
@Override
final FieldScopeLogic subScopeImpl(Descriptor rootDescriptor, SubScopeId subScopeId) {
return newPartialScopeLogic(fieldNumberTree.child(subScopeId));
}
private static PartialScopeLogic newPartialScopeLogic(FieldNumberTree fieldNumberTree) {
return fieldNumberTree.isEmpty() ? EMPTY : new PartialScopeLogic(fieldNumberTree);
}
}
private static final | PartialScopeLogic |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_33.java | {
"start": 970,
"end": 3657
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select COUNT() AS count,\n" +
"DATE_FORMAT(DATE(reg_time), '%Y-%m-%d') AS date,\n" +
"(HOUR(reg_time) DIV 2) as intervalTime\n" +
"FROM USER_RECOMMEND_INFO WHERE 1=1\n" +
"and reg_time >=\"2016-12-01 00:00:00\"\n" +
"and reg_time <='2016-12-01 23:59:59'\n" +
"group by DATE(reg_time),HOUR(reg_time) DIV 2";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
// assertEquals(1, visitor.getTables().size());
// assertEquals(1, visitor.getColumns().size());
// assertEquals(0, visitor.getConditions().size());
// assertEquals(0, visitor.getOrderByColumns().size());
{
String output = SQLUtils.toMySqlString(stmt);
assertEquals("SELECT COUNT() AS count, DATE_FORMAT(DATE(reg_time), '%Y-%m-%d') AS date\n" +
"\t, (HOUR(reg_time) DIV 2) AS intervalTime\n" +
"FROM USER_RECOMMEND_INFO\n" +
"WHERE 1 = 1\n" +
"\tAND reg_time >= '2016-12-01 00:00:00'\n" +
"\tAND reg_time <= '2016-12-01 23:59:59'\n" +
"GROUP BY DATE(reg_time), HOUR(reg_time) DIV 2", //
output);
}
{
String output = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("select count() as count, DATE_FORMAT(DATE(reg_time), '%Y-%m-%d') as date\n" +
"\t, (HOUR(reg_time) div 2) as intervalTime\n" +
"from USER_RECOMMEND_INFO\n" +
"where 1 = 1\n" +
"\tand reg_time >= '2016-12-01 00:00:00'\n" +
"\tand reg_time <= '2016-12-01 23:59:59'\n" +
"group by DATE(reg_time), HOUR(reg_time) div 2", //
output);
}
}
}
| MySqlSelectTest_33 |
java | google__guice | core/test/com/google/inject/spi/ElementSourceTest.java | {
"start": 538,
"end": 1430
} | class ____ extends TestCase {
private static final StackTraceElement BINDER_INSTALL =
new StackTraceElement(
"com.google.inject.spi.Elements$RecordingBinder",
"install",
"Unknown Source",
234 /* line number*/);
public void testGetCallStack_IntegrationTest() throws Exception {
List<Element> elements = Elements.getElements(new A());
for (Element element : elements) {
if (element instanceof Binding) {
Binding<?> binding = (Binding<?>) element;
Class<? extends Annotation> annotationType = binding.getKey().getAnnotationType();
if (annotationType != null && annotationType.equals(SampleAnnotation.class)) {
ElementSource elementSource = (ElementSource) binding.getSource();
List<String> moduleClassNames = elementSource.getModuleClassNames();
// Check module | ElementSourceTest |
java | bumptech__glide | annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/IndexerGenerator.java | {
"start": 1641,
"end": 4683
} | class ____ {
private static final String INDEXER_NAME_PREFIX = "GlideIndexer_";
private static final int MAXIMUM_FILE_NAME_LENGTH = 255;
private final ProcessorUtil processorUtil;
IndexerGenerator(ProcessorUtil processorUtil) {
this.processorUtil = processorUtil;
}
TypeSpec generate(List<TypeElement> types) {
List<TypeElement> modules = new ArrayList<>();
List<TypeElement> extensions = new ArrayList<>();
for (TypeElement element : types) {
if (processorUtil.isExtension(element)) {
extensions.add(element);
} else if (processorUtil.isLibraryGlideModule(element)) {
modules.add(element);
} else {
throw new IllegalArgumentException("Unrecognized type: " + element);
}
}
if (!modules.isEmpty() && !extensions.isEmpty()) {
throw new IllegalArgumentException(
"Given both modules and extensions, expected one or the "
+ "other. Modules: "
+ modules
+ " Extensions: "
+ extensions);
}
if (!modules.isEmpty()) {
return generate(types, GlideModule.class);
} else {
return generate(types, GlideExtension.class);
}
}
private TypeSpec generate(
List<TypeElement> libraryModules, Class<? extends Annotation> annotation) {
AnnotationSpec.Builder annotationBuilder = AnnotationSpec.builder(Index.class);
String value = getAnnotationValue(annotation);
for (TypeElement childModule : libraryModules) {
annotationBuilder.addMember(value, "$S", ClassName.get(childModule).toString());
}
StringBuilder indexerNameBuilder =
new StringBuilder(INDEXER_NAME_PREFIX + annotation.getSimpleName() + "_");
for (TypeElement element : libraryModules) {
indexerNameBuilder.append(element.getQualifiedName().toString().replace(".", "_"));
indexerNameBuilder.append("_");
}
indexerNameBuilder =
new StringBuilder(indexerNameBuilder.substring(0, indexerNameBuilder.length() - 1));
String indexerName = indexerNameBuilder.toString();
// If the indexer name has too many packages/modules, it can exceed the file name length
// allowed by the file system, which can break compilation. To avoid that, fall back to a
// deterministic UUID.
if (indexerName.length() >= (MAXIMUM_FILE_NAME_LENGTH - INDEXER_NAME_PREFIX.length())) {
indexerName =
INDEXER_NAME_PREFIX
+ UUID.nameUUIDFromBytes(indexerName.getBytes()).toString().replace("-", "_");
}
return TypeSpec.classBuilder(indexerName)
.addAnnotation(annotationBuilder.build())
.addModifiers(Modifier.PUBLIC)
.build();
}
private static String getAnnotationValue(Class<? extends Annotation> annotation) {
if (annotation == GlideModule.class) {
return "modules";
} else if (annotation == GlideExtension.class) {
return "extensions";
} else {
throw new IllegalArgumentException("Unrecognized annotation: " + annotation);
}
}
}
| IndexerGenerator |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/logging/StandardStackTracePrinter.java | {
"start": 14538,
"end": 15297
} | class ____ {
private static final String ELLIPSIS = "...";
private final Appendable out;
private int remaining;
Output(Appendable out) {
this.out = out;
this.remaining = StandardStackTracePrinter.this.maximumLength - ELLIPSIS.length();
}
void println(String indent, String string) throws IOException {
if (this.remaining > 0) {
String line = indent + string + StandardStackTracePrinter.this.lineSeparator;
if (line.length() > this.remaining) {
line = line.substring(0, this.remaining) + ELLIPSIS;
}
this.out.append(line);
this.remaining -= line.length();
}
}
}
/**
* Holds the stacktrace for a specific throwable and caches things that are expensive
* to calculate.
*/
private static final | Output |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/domain/misc/RichType.java | {
"start": 797,
"end": 1671
} | class ____ {
private RichType richType;
// Required for test
private String richField;
private String richProperty;
private Map richMap = new HashMap<>();
private List richList = new ArrayList<>() {
private static final long serialVersionUID = 1L;
{
add("bar");
}
};
public RichType getRichType() {
return richType;
}
public void setRichType(RichType richType) {
this.richType = richType;
}
public String getRichProperty() {
return richProperty;
}
public void setRichProperty(String richProperty) {
this.richProperty = richProperty;
}
public List getRichList() {
return richList;
}
public void setRichList(List richList) {
this.richList = richList;
}
public Map getRichMap() {
return richMap;
}
public void setRichMap(Map richMap) {
this.richMap = richMap;
}
}
| RichType |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java | {
"start": 2357,
"end": 3404
} | class ____ extends Plugin implements RepositoryPlugin {
public static final String FAKE_VERSIONS_TYPE = "fakeversionsrepo";
@Override
public Map<String, Repository.Factory> getRepositories(
Environment env,
NamedXContentRegistry namedXContentRegistry,
ClusterService clusterService,
BigArrays bigArrays,
RecoverySettings recoverySettings,
RepositoriesMetrics repositoriesMetrics,
SnapshotMetrics snapshotMetrics
) {
return Map.of(
FAKE_VERSIONS_TYPE,
(projectId, metadata) -> new FakeVersionsRepo(
projectId,
metadata,
env,
namedXContentRegistry,
clusterService,
bigArrays,
recoverySettings
)
);
}
// fakes an old index version format to activate license checks
private static | TestRepositoryPlugin |
java | google__guice | core/src/com/google/inject/Binder.java | {
"start": 21000,
"end": 22083
} | class ____ bound using {@link LinkedBindingBuilder#toConstructor}, Guice will still
* inject that constructor regardless of annotations.
*
* @since 4.0
*/
void requireAtInjectOnConstructors();
/**
* Requires that Guice finds an exactly matching binding annotation. This disables the error-prone
* feature in Guice where it can substitute a binding for <code>{@literal @}Named Foo</code> when
* attempting to inject <code>{@literal @}Named("foo") Foo</code>.
*
* @since 4.0
*/
void requireExactBindingAnnotations();
/**
* Adds a scanner that will look in all installed modules for annotations the scanner can parse,
* and binds them like {@literal @}Provides methods. Scanners apply to all modules installed in
* the injector. Scanners installed in child injectors or private modules do not impact modules in
* siblings or parents, however scanners installed in parents do apply to all child injectors and
* private modules.
*
* @since 4.0
*/
void scanModulesForAnnotatedMethods(ModuleAnnotatedMethodScanner scanner);
}
| is |
java | elastic__elasticsearch | qa/packaging/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java | {
"start": 1466,
"end": 10962
} | class ____ extends PackagingTestCase {
private static final String DEFAULT_ID = "elasticsearch-service-x64";
private static final String DEFAULT_DISPLAY_NAME = "Elasticsearch " + FileUtils.getCurrentVersion() + " (elasticsearch-service-x64)";
private static String serviceScript;
@BeforeClass
public static void ensureWindows() {
assumeTrue(Platforms.WINDOWS);
assumeTrue(distribution().hasJdk);
}
@After
public void uninstallService() {
sh.runIgnoreExitCode(deleteCommand(DEFAULT_ID));
}
private static String startCommand(String serviceId) {
return "\"sc.exe start " + serviceId + "\"";
}
private static String stopCommand(String serviceId) {
return "\"sc.exe stop " + serviceId + "\"";
}
private static String deleteCommand(String serviceId) {
return "\"sc.exe delete " + serviceId + "\"";
}
private void assertService(String id, String status) {
Result result = sh.run("Get-Service " + id + " | Format-List -Property Name, Status, DisplayName");
assertThat(result.stdout(), containsString("Name : " + id));
assertThat(result.stdout(), containsString("Status : " + status));
}
private void waitForStop(String id, Duration timeout) {
var stopped = false;
var start = System.currentTimeMillis();
while (stopped == false) {
Result result = sh.run("(Get-Service " + id + " ).\"Status\"");
if (result.exitCode() != 0) {
logger.warn("Cannot get status for {}: stdout:[{}], stderr:[{}]", id, result.stdout(), result.stderr());
break;
}
stopped = "Stopped".equalsIgnoreCase(result.stdout());
Duration elapsed = Duration.ofMillis(System.currentTimeMillis() - start);
if (elapsed.compareTo(timeout) > 0) {
logger.warn("Timeout waiting for stop {}: stdout:[{}], stderr:[{}]", id, result.stdout(), result.stderr());
break;
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
break;
}
}
}
// runs the service command, dumping all log files on failure
private Result assertCommand(String script) {
Result result = sh.runIgnoreExitCode(script);
assertExit(result, script, 0);
return result;
}
private Result assertFailure(String script, int exitCode) {
Result result = sh.runIgnoreExitCode(script);
assertExit(result, script, exitCode);
return result;
}
@Override
protected void dumpDebug() {
super.dumpDebug();
dumpServiceLogs();
}
private void dumpServiceLogs() {
logger.warn("\n");
try (var logsDir = Files.list(installation.logs)) {
for (Path logFile : logsDir.toList()) {
String filename = logFile.getFileName().toString();
if (filename.startsWith("elasticsearch-service-x64")) {
logger.warn(filename + "\n" + FileUtils.slurp(logFile));
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void assertExit(Result result, String script, int exitCode) {
if (result.exitCode() != exitCode) {
logger.error("---- Unexpected exit code (expected " + exitCode + ", got " + result.exitCode() + ") for script: " + script);
logger.error(result);
logger.error("Dumping log files\n");
dumpDebug();
fail();
} else {
logger.info("\nscript: " + script + "\nstdout: " + result.stdout() + "\nstderr: " + result.stderr());
}
}
public void test10InstallArchive() throws Exception {
installation = installArchive(sh, distribution());
verifyArchiveInstallation(installation, distribution());
setFileSuperuser("test_superuser", "test_superuser_password");
serviceScript = installation.bin("elasticsearch-service.bat").toString();
}
public void test12InstallService() {
sh.run(serviceScript + " install");
assertService(DEFAULT_ID, "Stopped");
sh.run(deleteCommand(DEFAULT_ID));
}
public void test15RemoveNotInstalled() {
Result result = assertFailure(deleteCommand(DEFAULT_ID), 1);
assertThat(result.stdout(), containsString("The specified service does not exist as an installed service"));
}
public void test16InstallSpecialCharactersInJdkPath() throws IOException {
assumeTrue("Only run this test when we know where the JDK is.", distribution().hasJdk);
final Path relocatedJdk = installation.bundledJdk.getParent().resolve("a (special) jdk");
sh.getEnv().put("ES_JAVA_HOME", relocatedJdk.toString());
try {
mv(installation.bundledJdk, relocatedJdk);
Result result = sh.run(serviceScript + " install");
assertThat(result.stdout(), containsString("The service 'elasticsearch-service-x64' has been installed"));
} finally {
sh.runIgnoreExitCode(deleteCommand(DEFAULT_ID));
mv(relocatedJdk, installation.bundledJdk);
}
}
public void test20CustomizeServiceId() {
String serviceId = "my-es-service";
try {
sh.getEnv().put("SERVICE_ID", serviceId);
sh.run(serviceScript + " install");
assertService(serviceId, "Stopped");
} finally {
sh.run(deleteCommand(serviceId));
}
}
public void test21CustomizeServiceDisplayName() {
String displayName = "my es service display name";
sh.getEnv().put("SERVICE_DISPLAY_NAME", displayName);
sh.run(serviceScript + " install");
assertService(DEFAULT_ID, "Stopped");
sh.run(deleteCommand(DEFAULT_ID));
}
// NOTE: service description is not attainable through any powershell api, so checking it is not possible...
public void assertStartedAndStop() throws Exception {
ServerUtils.waitForElasticsearch(installation);
runElasticsearchTests();
assertCommand(stopCommand(DEFAULT_ID));
waitForStop(DEFAULT_ID, Duration.ofMinutes(1));
assertService(DEFAULT_ID, "Stopped");
// the process is stopped async, and can become a zombie process, so we poll for the process actually being gone
assertCommand(
"$p = Get-Service -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;"
+ "$i = 0;"
+ "do {"
+ " $p = Get-Process -Name \"elasticsearch-service-x64\" -ErrorAction SilentlyContinue;"
+ " echo \"$p\";"
+ " if ($p -eq $Null) {"
+ " Write-Host \"exited after $i seconds\";"
+ " exit 0;"
+ " }"
+ " Start-Sleep -Seconds 1;"
+ " $i += 1;"
+ "} while ($i -lt 300);"
+ "exit 9;"
);
}
public void test30StartStop() throws Exception {
sh.run(serviceScript + " install");
assertCommand(startCommand(DEFAULT_ID));
assertStartedAndStop();
}
public void test31StartNotInstalled() throws IOException {
Result result = sh.runIgnoreExitCode(startCommand(DEFAULT_ID));
assertThat(result.stderr(), result.exitCode(), equalTo(1));
dumpServiceLogs();
assertThat(result.stdout(), containsString("The specified service does not exist as an installed service"));
}
public void test32StopNotStarted() throws IOException {
sh.run(serviceScript + " install");
Result result = sh.runIgnoreExitCode(stopCommand(DEFAULT_ID));
assertThat(result.stdout(), containsString("The service has not been started"));
}
public void test33JavaChanged() throws Exception {
final Path alternateJdk = installation.bundledJdk.getParent().resolve("jdk.copy");
try {
copyDirectory(installation.bundledJdk, alternateJdk);
sh.getEnv().put("ES_JAVA_HOME", alternateJdk.toString());
assertCommand(serviceScript + " install");
sh.getEnv().remove("ES_JAVA_HOME");
assertCommand(startCommand(DEFAULT_ID));
assertStartedAndStop();
} finally {
FileUtils.rm(alternateJdk);
}
}
public void test80JavaOptsInEnvVar() throws Exception {
sh.getEnv().put("ES_JAVA_OPTS", "-Xmx2g -Xms2g");
sh.run(serviceScript + " install");
assertCommand(startCommand(DEFAULT_ID));
assertStartedAndStop();
sh.getEnv().remove("ES_JAVA_OPTS");
}
public void test81JavaOptsInJvmOptions() throws Exception {
withCustomConfig(tempConf -> {
append(tempConf.resolve("jvm.options"), "-Xmx2g" + System.lineSeparator());
append(tempConf.resolve("jvm.options"), "-Xms2g" + System.lineSeparator());
sh.run(serviceScript + " install");
assertCommand(startCommand(DEFAULT_ID));
assertStartedAndStop();
});
}
// TODO:
// custom SERVICE_USERNAME/SERVICE_PASSWORD
// custom SERVICE_LOG_DIR
// custom LOG_OPTS (looks like it currently conflicts with setting custom log dir)
// install and run java opts Xmx/s (each data size type)
}
| WindowsServiceTests |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/OrderBy.java | {
"start": 139,
"end": 392
} | class ____ {
String fieldName;
boolean descending;
boolean ignoreCase;
public OrderBy(String fieldName, boolean descending, boolean ignoreCase) {
this.fieldName = fieldName;
this.descending = descending;
this.ignoreCase = ignoreCase;
}
}
| OrderBy |
java | quarkusio__quarkus | extensions/spring-cache/deployment/src/test/java/io/quarkus/cache/test/runtime/UnsupportedAnnotationValueTest.java | {
"start": 419,
"end": 901
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClass(CachedService.class))
.setExpectedException(IllegalArgumentException.class);
@Test
public void testApplicationShouldNotStart() {
fail("Application should not start when an unsupported annotation value is set");
}
@Singleton
static | UnsupportedAnnotationValueTest |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/PartitionReader.java | {
"start": 1598,
"end": 2304
} | interface ____<T> extends Closeable {
/**
* Proceed to next record, returns false if there is no more records.
*
* @throws IOException if failure happens during disk/network IO like reading files.
*/
boolean next() throws IOException;
/**
* Return the current record. This method should return same value until `next` is called.
*/
T get();
/**
* Returns an array of custom task metrics. By default it returns empty array. Note that it is
* not recommended to put heavy logic in this method as it may affect reading performance.
*/
default CustomTaskMetric[] currentMetricsValues() {
CustomTaskMetric[] NO_METRICS = {};
return NO_METRICS;
}
}
| PartitionReader |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/type/java/LocaleJavaTypeDescriptorTest.java | {
"start": 559,
"end": 4329
} | class ____ extends AbstractDescriptorTest<Locale> {
final Locale original = toLocale( "de", "DE", null, null );
final Locale copy = toLocale( "de", "DE", null, null );
final Locale different = toLocale( "de", null, null, null );
public LocaleJavaTypeDescriptorTest() {
super( LocaleJavaType.INSTANCE );
}
@Override
protected Data<Locale> getTestData() {
return new Data<>( original, copy, different );
}
@Override
protected boolean shouldBeMutable() {
return false;
}
@Override
protected boolean isIdentityDifferentFromEquality() {
return false;
}
@Test
public void testConversionFromString() {
assertLocaleString( toLocale( "de", null, null, null ), "de", "de" );
assertLocaleString( toLocale( "de", "DE", null, null ), "de_DE", "de-DE" );
assertLocaleString( toLocale( null, "DE", null, null ), "_DE", "und-DE" );
assertLocaleString( toLocale( null, "DE", "ch123", null ), "_DE_ch123", "und-DE-ch123" );
assertLocaleString( toLocale( "de", null, "ch123", null ), "de__ch123", "de-ch123" );
assertLocaleString( toLocale( "de", "DE", "ch123", null ), "de_DE_ch123", "de-DE-ch123" );
assertLocaleString( toLocale( "zh", "HK", null, "Hant" ), "zh_HK_#Hant", "zh-Hant-HK" );
assertLocaleString( toLocale( "ja", null, null, null, "u-nu-japanese" ), "ja__#u-nu-japanese", "ja-u-nu-japanese" );
assertLocaleString( toLocale( "ja", null, null, null, "u-nu-japanese", "x-linux" ), "ja__#u-nu-japanese-x-linux", "ja-u-nu-japanese-x-linux" );
assertLocaleString( toLocale( "ja", "JP", null, null, "u-nu-japanese" ), "ja_JP_#u-nu-japanese", "ja-JP-u-nu-japanese" );
assertLocaleString( toLocale( "ja", "JP", null, null, "u-nu-japanese", "x-linux" ), "ja_JP_#u-nu-japanese-x-linux", "ja-JP-u-nu-japanese-x-linux" );
assertLocaleString( toLocale( "ja", "JP", null, "Jpan", "u-nu-japanese" ), "ja_JP_#Jpan_u-nu-japanese", "ja-Jpan-JP-u-nu-japanese" );
assertLocaleString( toLocale( "ja", "JP", null, "Jpan", "u-nu-japanese", "x-linux" ), "ja_JP_#Jpan_u-nu-japanese-x-linux", "ja-Jpan-JP-u-nu-japanese-x-linux" );
// Note that these Locale objects make no sense, since Locale#toString requires at least a language or region
// to produce a non-empty string, but we test parsing that anyway, especially since the language tag now produces a value
assertLocaleString( toLocale( null, null, "ch123", null ), "__ch123", "und-ch123" );
assertLocaleString( toLocale( "", "", "", null ), "", "und" );
assertLocaleString( toLocale( null, null, null, "Hant" ), "___#Hant", "und-Hant" );
assertLocaleString( Locale.ROOT, "", "und" );
}
private void assertLocaleString(Locale expectedLocale, String string, String languageTag) {
assertEquals( expectedLocale, LocaleJavaType.INSTANCE.fromString( string ) );
assertEquals( expectedLocale, LocaleJavaType.INSTANCE.fromString( languageTag ) );
assertEquals( expectedLocale.toLanguageTag(), languageTag );
if ( !isEmpty( expectedLocale.getLanguage() ) || !isEmpty( expectedLocale.getCountry() ) ) {
assertEquals( expectedLocale.toString(), string );
}
}
private static Locale toLocale(String lang, String region, String variant, String script, String... extensions) {
final Locale.Builder builder = new Locale.Builder();
if ( StringHelper.isNotEmpty( lang ) ) {
builder.setLanguage( lang );
}
if ( StringHelper.isNotEmpty( region ) ) {
builder.setRegion( region );
}
if ( StringHelper.isNotEmpty( variant ) ) {
builder.setVariant( variant );
}
if ( StringHelper.isNotEmpty( script ) ) {
builder.setScript( script );
}
for ( String extension : extensions ) {
assert extension.charAt( 1 ) == '-';
builder.setExtension( extension.charAt( 0 ), extension.substring( 2 ) );
}
return builder.build();
}
}
| LocaleJavaTypeDescriptorTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.