language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | integration-tests/micrometer-prometheus/src/main/java/io/quarkus/it/micrometer/prometheus/SubResource.java | {
"start": 139,
"end": 412
} | class ____ {
private final String value;
public SubResource(String value) {
this.value = value;
}
@GET
@Path("/{subParam}")
public String get(@PathParam("subParam") String subParam) {
return value + ":" + subParam;
}
}
| SubResource |
java | apache__camel | components/camel-as2/camel-as2-component/src/test/java/org/apache/camel/component/as2/AbstractAS2ITSupport.java | {
"start": 1190,
"end": 1280
} | class ____ AS2 Integration tests generated by Camel API component maven plugin.
*/
public | for |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/bean/BeanAsArrayBuilderDeserializer.java | {
"start": 389,
"end": 14224
} | class ____
extends BeanDeserializerBase
{
/**
* Deserializer we delegate operations that we cannot handle.
*/
final protected BeanDeserializerBase _delegate;
/**
* Properties in order expected to be found in JSON array.
*/
final protected SettableBeanProperty[] _orderedProperties;
final protected AnnotatedMethod _buildMethod;
/**
* Type that the builder will produce, target type; as opposed to
* `handledType()` which refers to Builder class.
*/
protected final JavaType _targetType;
/*
/**********************************************************************
/* Life-cycle, construction, initialization
/**********************************************************************
*/
/**
* Main constructor used both for creating new instances (by
* {@link BeanDeserializer#asArrayDeserializer}) and for
* creating copies with different delegate.
*/
public BeanAsArrayBuilderDeserializer(BeanDeserializerBase delegate,
JavaType targetType,
SettableBeanProperty[] ordered,
AnnotatedMethod buildMethod)
{
super(delegate);
_delegate = delegate;
_targetType = targetType;
_orderedProperties = ordered;
_buildMethod = buildMethod;
}
@Override
public ValueDeserializer<Object> unwrappingDeserializer(DeserializationContext ctxt,
NameTransformer unwrapper)
{
// We can't do much about this; could either replace _delegate with unwrapping instance,
// or just replace this one. Latter seems more sensible.
return _delegate.unwrappingDeserializer(ctxt, unwrapper);
}
@Override
public BeanDeserializerBase withObjectIdReader(ObjectIdReader oir) {
return new BeanAsArrayBuilderDeserializer(_delegate.withObjectIdReader(oir),
_targetType, _orderedProperties, _buildMethod);
}
@Override
public BeanDeserializerBase withByNameInclusion(Set<String> ignorableProps,
Set<String> includableProps) {
return new BeanAsArrayBuilderDeserializer(_delegate.withByNameInclusion(ignorableProps, includableProps),
_targetType, _orderedProperties, _buildMethod);
}
@Override
public BeanDeserializerBase withIgnoreAllUnknown(boolean ignoreUnknown) {
return new BeanAsArrayBuilderDeserializer(_delegate.withIgnoreAllUnknown(ignoreUnknown),
_targetType, _orderedProperties, _buildMethod);
}
@Override
public BeanDeserializerBase withBeanProperties(BeanPropertyMap props) {
return new BeanAsArrayBuilderDeserializer(_delegate.withBeanProperties(props),
_targetType, _orderedProperties, _buildMethod);
}
@Override
protected BeanDeserializerBase asArrayDeserializer() {
return this;
}
@Override
protected void initNameMatcher(DeserializationContext ctxt) { }
/*
/**********************************************************************
/* Overrides
/**********************************************************************
*/
@Override
public Boolean supportsUpdate(DeserializationConfig config) {
// 26-Oct-2016, tatu: No, we can't merge Builder-based POJOs as of now
return Boolean.FALSE;
}
/*
/**********************************************************************
/* ValueDeserializer implementation
/**********************************************************************
*/
protected final Object finishBuild(DeserializationContext ctxt, Object builder)
throws JacksonException
{
try {
return _buildMethod.getMember().invoke(builder, (Object[]) null);
} catch (Exception e) {
return wrapInstantiationProblem(ctxt, e);
}
}
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// Let's delegate just in case we got a JSON Object (could error out, alternatively?)
if (!p.isExpectedStartArrayToken()) {
return finishBuild(ctxt, _deserializeFromNonArray(p, ctxt));
}
if (!_vanillaProcessing) {
return finishBuild(ctxt, _deserializeNonVanilla(p, ctxt));
}
Object builder = _valueInstantiator.createUsingDefault(ctxt);
final SettableBeanProperty[] props = _orderedProperties;
int i = 0;
final int propCount = props.length;
while (true) {
if (p.nextToken() == JsonToken.END_ARRAY) {
return finishBuild(ctxt, builder);
}
if (i == propCount) {
break;
}
SettableBeanProperty prop = props[i];
if (prop != null) { // normal case
try {
builder = prop.deserializeSetAndReturn(p, ctxt, builder);
} catch (Exception e) {
throw wrapAndThrow(e, builder, prop.getName(), ctxt);
}
} else { // just skip?
p.skipChildren();
}
++i;
}
// 09-Nov-2016, tatu: Should call `handleUnknownProperty()` in Context, but it'd give
// non-optimal exception message so...
if (!_ignoreAllUnknown && ctxt.isEnabled(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)) {
ctxt.reportInputMismatch(handledType(),
"Unexpected JSON values; expected at most %d properties (in JSON Array)",
propCount);
// fall through
}
// otherwise, skip until end
while (p.nextToken() != JsonToken.END_ARRAY) {
p.skipChildren();
}
return finishBuild(ctxt, builder);
}
@Override
public Object deserialize(JsonParser p, DeserializationContext ctxt, Object value)
throws JacksonException
{
// 26-Oct-2016, tatu: Will fail, but let the original deserializer provide message
return _delegate.deserialize(p, ctxt, value);
}
@Override
public Object deserializeFromObject(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
return _deserializeFromNonArray(p, ctxt);
}
/*
/**********************************************************************
/* Helper methods, non-standard creation
/**********************************************************************
*/
/**
* Alternate deserialization method that has to check many more configuration
* aspects than the "vanilla" processing.
* Note: should NOT resolve builder; caller will do that
*
* @return Builder object in use.
*/
protected Object _deserializeNonVanilla(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
if (_nonStandardCreation) {
return deserializeFromObjectUsingNonDefault(p, ctxt);
}
Object builder = _valueInstantiator.createUsingDefault(ctxt);
if (_injectables != null) {
injectValues(ctxt, builder);
}
Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
final SettableBeanProperty[] props = _orderedProperties;
int i = 0;
final int propCount = props.length;
while (true) {
if (p.nextToken() == JsonToken.END_ARRAY) {
return builder;
}
if (i == propCount) {
break;
}
SettableBeanProperty prop = props[i];
++i;
if (prop != null) { // normal case
if (activeView == null || prop.visibleInView(activeView)) {
try {
prop.deserializeSetAndReturn(p, ctxt, builder);
} catch (Exception e) {
throw wrapAndThrow(e, builder, prop.getName(), ctxt);
}
continue;
}
}
// otherwise, skip it (view-filtered, no prop etc)
p.skipChildren();
}
// Ok; extra fields? Let's fail, unless ignoring extra props is fine
if (!_ignoreAllUnknown && ctxt.isEnabled(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)) {
ctxt.reportWrongTokenException(this, JsonToken.END_ARRAY,
"Unexpected JSON value(s); expected at most %d properties (in JSON Array)",
propCount);
// will never reach here as exception has been thrown
}
// otherwise, skip until end
while (p.nextToken() != JsonToken.END_ARRAY) {
p.skipChildren();
}
return builder;
}
/**
* Method called to deserialize bean using "property-based creator":
* this means that a non-default constructor or factory method is
* called, and then possibly other setters. The trick is that
* values for creator method need to be buffered, first; and
* due to non-guaranteed ordering possibly some other properties
* as well.
*/
@Override
protected final Object _deserializeUsingPropertyBased(final JsonParser p,
final DeserializationContext ctxt)
throws JacksonException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
final SettableBeanProperty[] props = _orderedProperties;
final int propCount = props.length;
final Class<?> activeView = _needViewProcesing ? ctxt.getActiveView() : null;
int i = 0;
Object builder = null;
for (; p.nextToken() != JsonToken.END_ARRAY; ++i) {
SettableBeanProperty prop = (i < propCount) ? props[i] : null;
if (prop == null) { // we get null if there are extra elements; maybe otherwise too?
p.skipChildren();
continue;
}
if ((activeView != null) && !prop.visibleInView(activeView)) {
p.skipChildren();
continue;
}
// if we have already constructed POJO, things are simple:
if (builder != null) {
try {
builder = prop.deserializeSetAndReturn(p, ctxt, builder);
} catch (Exception e) {
throw wrapAndThrow(e, builder, prop.getName(), ctxt);
}
continue;
}
final String propName = prop.getName();
// if not yet, maybe we got a creator property?
final SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
// Object Id property?
if (buffer.readIdProperty(propName) && creatorProp == null) {
continue;
}
if (creatorProp != null) {
// [databind#1381]: if useInput=FALSE, skip deserialization from input
if (creatorProp.isInjectionOnly()) {
// Skip the input value, will be injected later in PropertyValueBuffer
p.skipChildren();
continue;
}
// Last creator property to set?
if (buffer.assignParameter(creatorProp, creatorProp.deserialize(p, ctxt))) {
try {
builder = creator.build(ctxt, buffer);
} catch (Exception e) {
throw wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
// polymorphic?
if (builder.getClass() != _beanType.getRawClass()) {
/* 23-Jul-2012, tatu: Not sure if these could ever be properly
* supported (since ordering of elements may not be guaranteed);
* but make explicitly non-supported for now.
*/
return ctxt.reportBadDefinition(_beanType, String.format(
"Cannot support implicit polymorphic deserialization for POJOs-as-Arrays style: nominal type %s, actual type %s",
ClassUtil.getTypeDescription(_beanType),
builder.getClass().getName()));
}
}
continue;
}
// regular property? needs buffering
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
}
// In case we didn't quite get all the creator properties, we may have to do this:
if (builder == null) {
try {
builder = creator.build(ctxt, buffer);
} catch (Exception e) {
return wrapInstantiationProblem(ctxt, e);
}
}
return builder;
}
/*
/**********************************************************************
/* Helper methods, error reporting
/**********************************************************************
*/
protected Object _deserializeFromNonArray(JsonParser p, DeserializationContext ctxt)
throws JacksonException
{
// Let's start with failure
return ctxt.handleUnexpectedToken(getValueType(ctxt), p.currentToken(), p,
"Cannot deserialize a POJO (of type %s) from non-Array representation (token: %s): "
+"type/property designed to be serialized as JSON Array",
_beanType.getRawClass().getName(),
p.currentToken());
// in future, may allow use of "standard" POJO serialization as well; if so, do:
//return _delegate.deserialize(p, ctxt);
}
}
| BeanAsArrayBuilderDeserializer |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java | {
"start": 6188,
"end": 69571
} | class ____ {
public static final Set<String> MODELS_STORED_AS_RESOURCE = Collections.singleton("lang_ident_model_1");
private static final ToXContent.Params FOR_INTERNAL_STORAGE_PARAMS = new ToXContent.MapParams(
Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")
);
private static final String MODEL_RESOURCE_PATH = "/org/elasticsearch/xpack/ml/inference/persistence/";
private static final String MODEL_RESOURCE_FILE_EXT = ".json";
private static final int COMPRESSED_MODEL_CHUNK_SIZE = 16 * 1024 * 1024;
private static final int MAX_NUM_DEFINITION_DOCS = 100;
private static final int MAX_COMPRESSED_MODEL_SIZE = COMPRESSED_MODEL_CHUNK_SIZE * MAX_NUM_DEFINITION_DOCS;
private static final Logger logger = LogManager.getLogger(TrainedModelProvider.class);
private final Client client;
private final NamedXContentRegistry xContentRegistry;
private final TrainedModelCacheMetadataService modelCacheMetadataService;
public TrainedModelProvider(
Client client,
TrainedModelCacheMetadataService modelCacheMetadataService,
NamedXContentRegistry xContentRegistry
) {
this.client = client;
this.modelCacheMetadataService = modelCacheMetadataService;
this.xContentRegistry = xContentRegistry;
}
public void storeTrainedModel(TrainedModelConfig trainedModelConfig, ActionListener<Boolean> listener) {
storeTrainedModel(trainedModelConfig, listener, false);
}
public void storeTrainedModel(TrainedModelConfig trainedModelConfig, ActionListener<Boolean> listener, boolean allowOverwriting) {
if (MODELS_STORED_AS_RESOURCE.contains(trainedModelConfig.getModelId())) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelConfig.getModelId())
)
);
return;
}
BytesReference definition;
try {
definition = trainedModelConfig.getCompressedDefinition();
} catch (IOException ex) {
listener.onFailure(
ExceptionsHelper.serverError(
"Unexpected IOException while serializing definition for storage for model [{}]",
ex,
trainedModelConfig.getModelId()
)
);
return;
}
TrainedModelLocation location = trainedModelConfig.getLocation();
if (definition == null && location == null) {
listener.onFailure(
ExceptionsHelper.badRequestException(
"Unable to store [{}]. [{}] or [{}] is required",
trainedModelConfig.getModelId(),
TrainedModelConfig.DEFINITION.getPreferredName(),
TrainedModelConfig.LOCATION.getPreferredName()
)
);
return;
}
if (definition != null) {
storeTrainedModelAndDefinition(trainedModelConfig, listener, allowOverwriting);
} else {
storeTrainedModelConfig(trainedModelConfig, listener, allowOverwriting);
}
}
public void storeTrainedModelConfig(TrainedModelConfig trainedModelConfig, ActionListener<Boolean> listener) {
storeTrainedModelConfig(trainedModelConfig, listener, false);
}
public void storeTrainedModelConfig(TrainedModelConfig trainedModelConfig, ActionListener<Boolean> listener, boolean allowOverwriting) {
if (MODELS_STORED_AS_RESOURCE.contains(trainedModelConfig.getModelId())) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelConfig.getModelId())
)
);
return;
}
assert trainedModelConfig.getModelDefinition() == null;
IndexRequest request = createRequest(
trainedModelConfig.getModelId(),
InferenceIndexConstants.LATEST_INDEX_NAME,
trainedModelConfig,
allowOverwriting
);
request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportIndexAction.TYPE,
request,
ActionListener.wrap(indexResponse -> refreshCacheVersion(listener), e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelConfig.getModelId())
)
);
} else {
listener.onFailure(
new ElasticsearchStatusException(
Messages.getMessage(Messages.INFERENCE_FAILED_TO_STORE_MODEL, trainedModelConfig.getModelId()),
RestStatus.INTERNAL_SERVER_ERROR,
e
)
);
}
})
);
}
public void storeTrainedModelDefinitionDoc(TrainedModelDefinitionDoc trainedModelDefinitionDoc, ActionListener<Void> listener) {
storeTrainedModelDefinitionDoc(trainedModelDefinitionDoc, InferenceIndexConstants.LATEST_INDEX_NAME, listener);
}
public void storeTrainedModelVocabulary(
String modelId,
VocabularyConfig vocabularyConfig,
Vocabulary vocabulary,
ActionListener<Void> listener
) {
storeTrainedModelVocabulary(modelId, vocabularyConfig, vocabulary, listener, false);
}
public void storeTrainedModelVocabulary(
String modelId,
VocabularyConfig vocabularyConfig,
Vocabulary vocabulary,
ActionListener<Void> listener,
boolean allowOverwriting
) {
if (MODELS_STORED_AS_RESOURCE.contains(modelId)) {
listener.onFailure(new ResourceAlreadyExistsException(Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, modelId)));
return;
}
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportIndexAction.TYPE,
createRequest(VocabularyConfig.docId(modelId), vocabularyConfig.getIndex(), vocabulary, allowOverwriting).setRefreshPolicy(
WriteRequest.RefreshPolicy.IMMEDIATE
),
ActionListener.wrap(indexResponse -> listener.onResponse(null), e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
listener.onFailure(
new ResourceAlreadyExistsException(Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_VOCAB_EXISTS, modelId))
);
} else {
listener.onFailure(
new ElasticsearchStatusException(
Messages.getMessage(Messages.INFERENCE_FAILED_TO_STORE_MODEL_VOCAB, modelId),
RestStatus.INTERNAL_SERVER_ERROR,
e
)
);
}
})
);
}
public void storeTrainedModelDefinitionDoc(
TrainedModelDefinitionDoc trainedModelDefinitionDoc,
String index,
ActionListener<Void> listener
) {
storeTrainedModelDefinitionDoc(trainedModelDefinitionDoc, index, listener, false);
}
public void storeTrainedModelDefinitionDoc(
TrainedModelDefinitionDoc trainedModelDefinitionDoc,
String index,
ActionListener<Void> listener,
boolean allowOverwriting
) {
if (MODELS_STORED_AS_RESOURCE.contains(trainedModelDefinitionDoc.getModelId())) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelDefinitionDoc.getModelId())
)
);
return;
}
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportIndexAction.TYPE,
createRequest(trainedModelDefinitionDoc.getDocId(), index, trainedModelDefinitionDoc, allowOverwriting),
ActionListener.wrap(indexResponse -> listener.onResponse(null), e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(
Messages.INFERENCE_TRAINED_MODEL_DOC_EXISTS,
trainedModelDefinitionDoc.getModelId(),
trainedModelDefinitionDoc.getDocNum()
)
)
);
} else {
listener.onFailure(
new ElasticsearchStatusException(
Messages.getMessage(
Messages.INFERENCE_FAILED_TO_STORE_MODEL_DEFINITION,
trainedModelDefinitionDoc.getModelId(),
trainedModelDefinitionDoc.getDocNum()
),
RestStatus.INTERNAL_SERVER_ERROR,
e
)
);
}
})
);
}
public void storeTrainedModelMetadata(TrainedModelMetadata trainedModelMetadata, ActionListener<Void> listener) {
storeTrainedModelMetadata(trainedModelMetadata, listener, false);
}
public void storeTrainedModelMetadata(
TrainedModelMetadata trainedModelMetadata,
ActionListener<Void> listener,
boolean allowOverwriting
) {
if (MODELS_STORED_AS_RESOURCE.contains(trainedModelMetadata.getModelId())) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelMetadata.getModelId())
)
);
return;
}
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportIndexAction.TYPE,
createRequest(
trainedModelMetadata.getDocId(),
InferenceIndexConstants.LATEST_INDEX_NAME,
trainedModelMetadata,
allowOverwriting
),
ActionListener.wrap(indexResponse -> listener.onResponse(null), e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_METADATA_EXISTS, trainedModelMetadata.getModelId())
)
);
} else {
listener.onFailure(
new ElasticsearchStatusException(
Messages.getMessage(Messages.INFERENCE_FAILED_TO_STORE_MODEL_METADATA, trainedModelMetadata.getModelId()),
RestStatus.INTERNAL_SERVER_ERROR,
e
)
);
}
})
);
}
public void getTrainedModelMetadata(
Collection<String> modelIds,
@Nullable TaskId parentTaskId,
ActionListener<Map<String, TrainedModelMetadata>> listener
) {
SearchRequest searchRequest = client.prepareSearch(InferenceIndexConstants.INDEX_PATTERN)
.setQuery(
QueryBuilders.constantScoreQuery(
QueryBuilders.boolQuery()
.filter(QueryBuilders.termsQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelIds))
.filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelMetadata.NAME))
)
)
.setSize(10_000)
// First find the latest index
.addSort("_index", SortOrder.DESC)
.request();
if (parentTaskId != null) {
searchRequest.setParentTask(parentTaskId);
}
executeAsyncWithOrigin(client, ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> {
if (searchResponse.getHits().getHits().length == 0) {
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_METADATA_NOT_FOUND, modelIds)));
return;
}
HashMap<String, TrainedModelMetadata> map = new HashMap<>();
for (SearchHit hit : searchResponse.getHits().getHits()) {
String modelId = TrainedModelMetadata.modelId(Objects.requireNonNull(hit.getId()));
map.putIfAbsent(modelId, parseMetadataLenientlyFromSource(hit.getSourceRef(), modelId));
}
listener.onResponse(map);
}, e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) {
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_METADATA_NOT_FOUND, modelIds)));
return;
}
listener.onFailure(e);
}));
}
public void refreshInferenceIndex(ActionListener<BroadcastResponse> listener) {
executeAsyncWithOrigin(
client,
ML_ORIGIN,
RefreshAction.INSTANCE,
new RefreshRequest(InferenceIndexConstants.INDEX_PATTERN),
listener
);
}
private void storeTrainedModelAndDefinition(
TrainedModelConfig trainedModelConfig,
ActionListener<Boolean> listener,
boolean allowOverwriting
) {
List<TrainedModelDefinitionDoc> trainedModelDefinitionDocs = new ArrayList<>();
try {
BytesReference compressedDefinition = trainedModelConfig.getCompressedDefinition();
if (compressedDefinition.length() > MAX_COMPRESSED_MODEL_SIZE) {
listener.onFailure(
ExceptionsHelper.badRequestException(
"Unable to store model as compressed definition of size [{}] bytes the limit is [{}] bytes",
compressedDefinition.length(),
MAX_COMPRESSED_MODEL_SIZE
)
);
return;
}
List<BytesReference> chunkedDefinition = chunkDefinitionWithSize(compressedDefinition, COMPRESSED_MODEL_CHUNK_SIZE);
for (int i = 0; i < chunkedDefinition.size(); ++i) {
trainedModelDefinitionDocs.add(
new TrainedModelDefinitionDoc.Builder().setDocNum(i)
.setModelId(trainedModelConfig.getModelId())
.setBinaryData(chunkedDefinition.get(i))
.setCompressionVersion(TrainedModelConfig.CURRENT_DEFINITION_COMPRESSION_VERSION)
.setDefinitionLength(chunkedDefinition.get(i).length())
// If it is the last doc, it is the EOS
.setEos(i == chunkedDefinition.size() - 1)
.build()
);
}
} catch (IOException ex) {
listener.onFailure(
ExceptionsHelper.serverError(
"Unexpected IOException while serializing definition for storage for model [{}]",
ex,
trainedModelConfig.getModelId()
)
);
return;
}
BulkRequestBuilder bulkRequest = client.prepareBulk(InferenceIndexConstants.LATEST_INDEX_NAME)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(createRequest(trainedModelConfig.getModelId(), trainedModelConfig, allowOverwriting));
trainedModelDefinitionDocs.forEach(defDoc -> bulkRequest.add(createRequest(defDoc.getDocId(), defDoc, allowOverwriting)));
ActionListener<Boolean> wrappedListener = ActionListener.wrap(listener::onResponse, e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
listener.onFailure(
new ResourceAlreadyExistsException(
Messages.getMessage(Messages.INFERENCE_TRAINED_MODEL_EXISTS, trainedModelConfig.getModelId())
)
);
} else {
listener.onFailure(
new ElasticsearchStatusException(
Messages.getMessage(Messages.INFERENCE_FAILED_TO_STORE_MODEL, trainedModelConfig.getModelId()),
RestStatus.INTERNAL_SERVER_ERROR,
e
)
);
}
});
ActionListener<BulkResponse> bulkResponseActionListener = ActionListener.wrap(r -> {
assert r.getItems().length == trainedModelDefinitionDocs.size() + 1;
if (r.getItems()[0].isFailed()) {
logger.error(
() -> "[" + trainedModelConfig.getModelId() + "] failed to store trained model config for inference",
r.getItems()[0].getFailure().getCause()
);
wrappedListener.onFailure(r.getItems()[0].getFailure().getCause());
return;
}
if (r.hasFailures()) {
Exception firstFailure = Arrays.stream(r.getItems())
.filter(BulkItemResponse::isFailed)
.map(BulkItemResponse::getFailure)
.map(BulkItemResponse.Failure::getCause)
.findFirst()
.orElse(new Exception("unknown failure"));
logger.error(
() -> format("[%s] failed to store trained model definition for inference", trainedModelConfig.getModelId()),
firstFailure
);
wrappedListener.onFailure(firstFailure);
return;
}
refreshCacheVersion(wrappedListener);
}, wrappedListener::onFailure);
executeAsyncWithOrigin(client, ML_ORIGIN, TransportBulkAction.TYPE, bulkRequest.request(), bulkResponseActionListener);
}
/**
* Get the model definition for inference.
*
* The caller should ensure the requested model has an InferenceDefinition,
* some models such as {@code org.elasticsearch.xpack.core.ml.inference.trainedmodel.pytorch.PyTorchModel}
* do not.
*
* @param modelId The model tp get
* @param unsafe when true, the compressed bytes size is not checked and the circuit breaker is solely responsible for
* preventing OOMs
* @param listener The listener
*/
public void getTrainedModelForInference(final String modelId, boolean unsafe, final ActionListener<InferenceDefinition> listener) {
// TODO Change this when we get more than just langIdent stored
if (MODELS_STORED_AS_RESOURCE.contains(modelId)) {
try {
TrainedModelConfig config = loadModelFromResource(modelId, false).build().ensureParsedDefinitionUnsafe(xContentRegistry);
assert config.getModelDefinition().getTrainedModel() instanceof LangIdentNeuralNetwork;
assert config.getModelType() == TrainedModelType.LANG_IDENT;
listener.onResponse(
InferenceDefinition.builder()
.setPreProcessors(config.getModelDefinition().getPreProcessors())
.setTrainedModel((LangIdentNeuralNetwork) config.getModelDefinition().getTrainedModel())
.build()
);
return;
} catch (ElasticsearchException | IOException ex) {
listener.onFailure(ex);
return;
}
}
List<TrainedModelDefinitionDoc> docs = new ArrayList<>();
ChunkedTrainedModelRestorer modelRestorer = new ChunkedTrainedModelRestorer(
modelId,
client,
client.threadPool().executor(MachineLearning.UTILITY_THREAD_POOL_NAME),
xContentRegistry
);
// TODO how could we stream in the model definition WHILE parsing it?
// This would reduce the overall memory usage as we won't have to load the whole compressed string
// XContentParser supports streams.
modelRestorer.restoreModelDefinition(docs::add, success -> {
try {
BytesReference compressedData = getDefinitionFromDocs(docs, modelId);
InferenceDefinition inferenceDefinition = unsafe
? InferenceToXContentCompressor.inflateUnsafe(compressedData, InferenceDefinition::fromXContent, xContentRegistry)
: InferenceToXContentCompressor.inflate(compressedData, InferenceDefinition::fromXContent, xContentRegistry);
listener.onResponse(inferenceDefinition);
} catch (Exception e) {
listener.onFailure(e);
}
}, e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) {
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)));
}
listener.onFailure(e);
});
}
public void getTrainedModel(
final String modelId,
final GetTrainedModelsAction.Includes includes,
@Nullable TaskId parentTaskId,
final ActionListener<TrainedModelConfig> finalListener
) {
getTrainedModel(modelId, Collections.emptySet(), includes, parentTaskId, finalListener);
}
public void getTrainedModel(
final String modelId,
final Set<String> modelAliases,
final GetTrainedModelsAction.Includes includes,
@Nullable TaskId parentTaskId,
final ActionListener<TrainedModelConfig> finalListener
) {
if (MODELS_STORED_AS_RESOURCE.contains(modelId)) {
try {
finalListener.onResponse(loadModelFromResource(modelId, includes.isIncludeModelDefinition() == false).build());
return;
} catch (ElasticsearchException ex) {
finalListener.onFailure(ex);
return;
}
}
ActionListener<TrainedModelConfig.Builder> getTrainedModelListener = ActionListener.wrap(modelBuilder -> {
modelBuilder.setModelAliases(modelAliases);
if ((includes.isIncludeFeatureImportanceBaseline()
|| includes.isIncludeTotalFeatureImportance()
|| includes.isIncludeHyperparameters()) == false) {
finalListener.onResponse(modelBuilder.build());
return;
}
this.getTrainedModelMetadata(Collections.singletonList(modelId), parentTaskId, ActionListener.wrap(metadata -> {
TrainedModelMetadata modelMetadata = metadata.get(modelId);
if (modelMetadata != null) {
if (includes.isIncludeTotalFeatureImportance()) {
modelBuilder.setFeatureImportance(modelMetadata.getTotalFeatureImportances());
}
if (includes.isIncludeFeatureImportanceBaseline()) {
modelBuilder.setBaselineFeatureImportance(modelMetadata.getFeatureImportanceBaselines());
}
if (includes.isIncludeHyperparameters()) {
modelBuilder.setHyperparameters(modelMetadata.getHyperparameters());
}
}
finalListener.onResponse(modelBuilder.build());
}, failure -> {
// total feature importance is not necessary for a model to be valid
// we shouldn't fail if it is not found
if (ExceptionsHelper.unwrapCause(failure) instanceof ResourceNotFoundException) {
finalListener.onResponse(modelBuilder.build());
return;
}
finalListener.onFailure(failure);
}));
}, finalListener::onFailure);
QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds(modelId));
SearchRequest trainedModelConfigSearch = client.prepareSearch(InferenceIndexConstants.INDEX_PATTERN)
.setQuery(queryBuilder)
// use sort to get the last
.addSort("_index", SortOrder.DESC)
.setSize(1)
.request();
if (parentTaskId != null) {
trainedModelConfigSearch.setParentTask(parentTaskId);
}
ActionListener<SearchResponse> trainedModelSearchHandler = ActionListener.wrap(modelSearchResponse -> {
TrainedModelConfig.Builder builder;
try {
builder = handleHits(modelSearchResponse.getHits(), modelId, this::parseModelConfigLenientlyFromSource).get(0);
} catch (ResourceNotFoundException ex) {
getTrainedModelListener.onFailure(
new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId))
);
return;
} catch (Exception ex) {
getTrainedModelListener.onFailure(ex);
return;
}
if (includes.isIncludeModelDefinition() == false) {
getTrainedModelListener.onResponse(builder);
return;
}
if (builder.getModelType() == TrainedModelType.PYTORCH && includes.isIncludeModelDefinition()) {
finalListener.onFailure(
ExceptionsHelper.badRequestException(
"[{}] is type [{}] and does not support retrieving the definition",
modelId,
builder.getModelType()
)
);
return;
}
executeAsyncWithOrigin(
client,
ML_ORIGIN,
TransportSearchAction.TYPE,
ChunkedTrainedModelRestorer.buildSearch(
client,
modelId,
InferenceIndexConstants.INDEX_PATTERN,
MAX_NUM_DEFINITION_DOCS,
parentTaskId
),
ActionListener.wrap(definitionSearchResponse -> {
try {
List<TrainedModelDefinitionDoc> docs = handleHits(
definitionSearchResponse.getHits(),
modelId,
(bytes, resourceId) -> ChunkedTrainedModelRestorer.parseModelDefinitionDocLenientlyFromSource(
bytes,
resourceId,
xContentRegistry
)
);
try {
BytesReference compressedData = getDefinitionFromDocs(docs, modelId);
builder.setDefinitionFromBytes(compressedData);
} catch (ElasticsearchException elasticsearchException) {
getTrainedModelListener.onFailure(elasticsearchException);
return;
}
} catch (ResourceNotFoundException ex) {
getTrainedModelListener.onFailure(
new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId))
);
return;
} catch (Exception ex) {
getTrainedModelListener.onFailure(ex);
return;
}
getTrainedModelListener.onResponse(builder);
}, getTrainedModelListener::onFailure)
);
}, getTrainedModelListener::onFailure);
executeAsyncWithOrigin(client, ML_ORIGIN, TransportSearchAction.TYPE, trainedModelConfigSearch, trainedModelSearchHandler);
}
public void getTrainedModels(
Set<String> modelIds,
GetTrainedModelsAction.Includes includes,
boolean allowNoResources,
@Nullable TaskId parentTaskId,
final ActionListener<List<TrainedModelConfig>> finalListener
) {
getTrainedModels(
modelIds.stream().collect(Collectors.toMap(Function.identity(), _k -> Collections.emptySet())),
includes,
allowNoResources,
parentTaskId,
finalListener
);
}
/**
* Gets all the provided trained config model objects
*
* NOTE:
* This does no expansion on the ids.
* It assumes that there are fewer than 10k.
*/
public void getTrainedModels(
Map<String, Set<String>> modelIds,
GetTrainedModelsAction.Includes includes,
boolean allowNoResources,
@Nullable TaskId parentTaskId,
final ActionListener<List<TrainedModelConfig>> finalListener
) {
QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(
QueryBuilders.idsQuery().addIds(modelIds.keySet().toArray(new String[0]))
);
SearchRequest searchRequest = client.prepareSearch(InferenceIndexConstants.INDEX_PATTERN)
.addSort(TrainedModelConfig.MODEL_ID.getPreferredName(), SortOrder.ASC)
.addSort("_index", SortOrder.DESC)
.setQuery(queryBuilder)
.setSize(modelIds.size())
.request();
if (parentTaskId != null) {
searchRequest.setParentTask(parentTaskId);
}
List<TrainedModelConfig.Builder> configs = new ArrayList<>(modelIds.size());
Set<String> modelsInIndex = Sets.difference(modelIds.keySet(), MODELS_STORED_AS_RESOURCE);
Set<String> modelsAsResource = Sets.intersection(MODELS_STORED_AS_RESOURCE, modelIds.keySet());
for (String modelId : modelsAsResource) {
try {
configs.add(loadModelFromResource(modelId, true));
} catch (ElasticsearchException ex) {
finalListener.onFailure(ex);
return;
}
}
if (modelsInIndex.isEmpty()) {
finalListener.onResponse(
configs.stream()
.map(TrainedModelConfig.Builder::build)
.sorted(Comparator.comparing(TrainedModelConfig::getModelId))
.collect(Collectors.toList())
);
return;
}
ActionListener<List<TrainedModelConfig.Builder>> getTrainedModelListener = ActionListener.wrap(modelBuilders -> {
if ((includes.isIncludeFeatureImportanceBaseline()
|| includes.isIncludeTotalFeatureImportance()
|| includes.isIncludeHyperparameters()) == false) {
finalListener.onResponse(
modelBuilders.stream()
.map(b -> b.setModelAliases(modelIds.get(b.getModelId())).build())
.sorted(Comparator.comparing(TrainedModelConfig::getModelId))
.collect(Collectors.toList())
);
return;
}
this.getTrainedModelMetadata(
modelIds.keySet(),
parentTaskId,
ActionListener.wrap(metadata -> finalListener.onResponse(modelBuilders.stream().map(builder -> {
TrainedModelMetadata modelMetadata = metadata.get(builder.getModelId());
if (modelMetadata != null) {
if (includes.isIncludeTotalFeatureImportance()) {
builder.setFeatureImportance(modelMetadata.getTotalFeatureImportances());
}
if (includes.isIncludeFeatureImportanceBaseline()) {
builder.setBaselineFeatureImportance(modelMetadata.getFeatureImportanceBaselines());
}
if (includes.isIncludeHyperparameters()) {
builder.setHyperparameters(modelMetadata.getHyperparameters());
}
}
return builder.setModelAliases(modelIds.get(builder.getModelId())).build();
}).sorted(Comparator.comparing(TrainedModelConfig::getModelId)).collect(Collectors.toList())), failure -> {
// total feature importance is not necessary for a model to be valid
// we shouldn't fail if it is not found
if (ExceptionsHelper.unwrapCause(failure) instanceof ResourceNotFoundException) {
finalListener.onResponse(
modelBuilders.stream()
.map(TrainedModelConfig.Builder::build)
.sorted(Comparator.comparing(TrainedModelConfig::getModelId))
.collect(Collectors.toList())
);
return;
}
finalListener.onFailure(failure);
})
);
}, finalListener::onFailure);
ActionListener<SearchResponse> configSearchHandler = ActionListener.wrap(searchResponse -> {
Set<String> observedIds = new HashSet<>(searchResponse.getHits().getHits().length + modelsAsResource.size(), 1.0f);
observedIds.addAll(modelsAsResource);
for (SearchHit searchHit : searchResponse.getHits().getHits()) {
try {
if (observedIds.contains(searchHit.getId()) == false) {
configs.add(parseModelConfigLenientlyFromSource(searchHit.getSourceRef(), searchHit.getId()));
observedIds.add(searchHit.getId());
}
} catch (IOException ex) {
getTrainedModelListener.onFailure(ExceptionsHelper.serverError(INFERENCE_FAILED_TO_DESERIALIZE, ex, searchHit.getId()));
return;
}
}
// We previously expanded the IDs.
// If the config has gone missing between then and now we should throw if allowNoResources is false
// Otherwise, treat it as if it was never expanded to begin with.
Set<String> missingConfigs = Sets.difference(modelIds.keySet(), observedIds);
if (missingConfigs.isEmpty() == false && allowNoResources == false) {
getTrainedModelListener.onFailure(
new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND_MULTIPLE, missingConfigs))
);
return;
}
// Ensure sorted even with the injection of locally resourced models
getTrainedModelListener.onResponse(configs);
}, getTrainedModelListener::onFailure);
executeAsyncWithOrigin(client, ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, configSearchHandler);
}
public void deleteTrainedModel(String modelId, ActionListener<Boolean> listener) {
if (MODELS_STORED_AS_RESOURCE.contains(modelId)) {
listener.onFailure(
ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_CANNOT_DELETE_ML_MANAGED_MODEL, modelId))
);
return;
}
DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false);
request.indices(InferenceIndexConstants.INDEX_PATTERN, MlStatsIndex.indexPattern());
QueryBuilder query = QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId);
request.setQuery(query);
request.setRefresh(true);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, ActionListener.wrap(deleteResponse -> {
if (deleteResponse.getDeleted() == 0) {
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)));
return;
}
refreshCacheVersion(listener);
}, e -> {
if (e.getClass() == IndexNotFoundException.class) {
listener.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)));
} else {
listener.onFailure(e);
}
}));
}
/**
* Returns a Tuple of
* - hit count: the number of matching model Ids
* - Map model id -> aliases: All matched model Ids and
* the list of aliases that reference the model Id
*
* @param idExpression The expression to expand
* @param allowNoResources When wildcard expressions are used allow
* no matches (don't error)
* @param pageParams paging
* @param tags Tags the model must contain
* @param modelAliasMetadata Aliases
* @param parentTaskId Optional parent task Id
* @param previouslyMatchedIds Ids that have already been matched (e.g. deployment Id).
* It is not an error if these Ids are not matched in the query
* @param idsListener The listener
*/
public void expandIds(
String idExpression,
boolean allowNoResources,
PageParams pageParams,
Set<String> tags,
ModelAliasMetadata modelAliasMetadata,
@Nullable TaskId parentTaskId,
Set<String> previouslyMatchedIds,
ActionListener<Tuple<Long, Map<String, Set<String>>>> idsListener
) {
String[] tokens = Strings.tokenizeToStringArray(idExpression, ",");
Set<String> expandedIdsFromAliases = new HashSet<>();
if (Strings.isAllOrWildcard(tokens) == false) {
for (String token : tokens) {
if (Regex.isSimpleMatchPattern(token)) {
for (String modelAlias : modelAliasMetadata.modelAliases().keySet()) {
if (Regex.simpleMatch(token, modelAlias)) {
expandedIdsFromAliases.add(modelAliasMetadata.getModelId(modelAlias));
}
}
} else if (modelAliasMetadata.getModelId(token) != null) {
expandedIdsFromAliases.add(modelAliasMetadata.getModelId(token));
}
}
}
Set<String> matchedResourceIds = matchedResourceIds(tokens);
Set<String> foundResourceIds;
if (tags.isEmpty()) {
foundResourceIds = matchedResourceIds;
} else {
foundResourceIds = new HashSet<>();
for (String resourceId : matchedResourceIds) {
// Does the model as a resource have all the tags?
if (Sets.newHashSet(loadModelFromResource(resourceId, true).build().getTags()).containsAll(tags)) {
foundResourceIds.add(resourceId);
}
}
}
expandedIdsFromAliases.addAll(Arrays.asList(tokens));
// We need to include the translated model alias, and ANY tokens that were not translated
String[] tokensForQuery = expandedIdsFromAliases.toArray(new String[0]);
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().sort(
SortBuilders.fieldSort(TrainedModelConfig.MODEL_ID.getPreferredName())
// If there are no resources, there might be no mapping for the id field.
// This makes sure we don't get an error if that happens.
.unmappedType("long")
)
.query(buildExpandIdsQuery(tokensForQuery, tags))
// We "buffer" the from and size to take into account models stored as resources.
// This is so we handle the edge cases when the model that is stored as a resource is at the start/end of
// a page.
.from(Math.max(0, pageParams.getFrom() - foundResourceIds.size()))
.size(Math.min(10_000, pageParams.getSize() + foundResourceIds.size()));
sourceBuilder.trackTotalHits(true)
// we only care about the item id's
.fetchSource(TrainedModelConfig.MODEL_ID.getPreferredName(), null);
IndicesOptions indicesOptions = SearchRequest.DEFAULT_INDICES_OPTIONS;
SearchRequest searchRequest = new SearchRequest(InferenceIndexConstants.INDEX_PATTERN).indicesOptions(
IndicesOptions.fromOptions(
true,
indicesOptions.allowNoIndices(),
indicesOptions.expandWildcardsOpen(),
indicesOptions.expandWildcardsClosed(),
indicesOptions
)
).source(sourceBuilder);
if (parentTaskId != null) {
searchRequest.setParentTask(parentTaskId);
}
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
ML_ORIGIN,
searchRequest,
ActionListener.<SearchResponse>wrap(response -> {
long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size();
Set<String> foundFromDocs = new HashSet<>();
for (SearchHit hit : response.getHits().getHits()) {
Map<String, Object> docSource = hit.getSourceAsMap();
if (docSource == null) {
continue;
}
Object idValue = docSource.get(TrainedModelConfig.MODEL_ID.getPreferredName());
if (idValue instanceof String) {
foundFromDocs.add(idValue.toString());
}
}
Map<String, Set<String>> allFoundIds = collectIds(pageParams, foundResourceIds, foundFromDocs).stream()
.collect(Collectors.toMap(Function.identity(), k -> new HashSet<>()));
// We technically have matched on model tokens and any reversed referenced aliases
// We may end up with "over matching" on the aliases (matching on an alias that was not provided)
// But the expanded ID matcher does not care.
Set<String> matchedTokens = new HashSet<>(allFoundIds.keySet());
// We should gather ALL model aliases referenced by the given model IDs
// This way the callers have access to them
modelAliasMetadata.modelAliases().forEach((alias, modelIdEntry) -> {
final String modelId = modelIdEntry.getModelId();
if (allFoundIds.containsKey(modelId)) {
allFoundIds.get(modelId).add(alias);
matchedTokens.add(alias);
}
});
// Reverse lookup to see what model aliases were matched by their found trained model IDs
ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(tokens, allowNoResources);
requiredMatches.filterMatchedIds(matchedTokens);
requiredMatches.filterMatchedIds(previouslyMatchedIds);
if (requiredMatches.hasUnmatchedIds()) {
idsListener.onFailure(ExceptionsHelper.missingTrainedModel(requiredMatches.unmatchedIdsString()));
} else {
idsListener.onResponse(Tuple.tuple(totalHitCount, allFoundIds));
}
}, idsListener::onFailure),
client::search
);
}
public void getInferenceStats(String[] modelIds, @Nullable TaskId parentTaskId, ActionListener<List<InferenceStats>> listener) {
SubscribableListener.<ClusterHealthResponse>newForked((delegate) -> {
// first wait for the index to be available
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
ML_ORIGIN,
new ClusterHealthRequest(new TimeValue(2, TimeUnit.SECONDS), MlStatsIndex.indexPattern()).waitForYellowStatus(),
delegate,
client.admin().cluster()::health
);
})
.<List<InferenceStats>>andThen(
client.threadPool().executor(MachineLearning.UTILITY_THREAD_POOL_NAME),
client.threadPool().getThreadContext(),
(delegate, clusterHealthResponse) -> {
if (clusterHealthResponse.isTimedOut()) {
logger.error(
"getInferenceStats Timed out waiting for index [{}] to be available, "
+ "this will probably cause the request to fail",
MlStatsIndex.indexPattern()
);
}
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
Arrays.stream(modelIds).map(TrainedModelProvider::buildStatsSearchRequest).forEach(multiSearchRequest::add);
if (multiSearchRequest.requests().isEmpty()) {
delegate.onResponse(Collections.emptyList());
return;
}
if (parentTaskId != null) {
multiSearchRequest.setParentTask(parentTaskId);
}
executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
ML_ORIGIN,
multiSearchRequest,
ActionListener.<MultiSearchResponse>wrap(responses -> {
List<InferenceStats> allStats = new ArrayList<>(modelIds.length);
int modelIndex = 0;
assert responses.getResponses().length == modelIds.length
: "mismatch between search response size and models requested";
for (MultiSearchResponse.Item response : responses.getResponses()) {
if (response.isFailure()) {
if (ExceptionsHelper.unwrapCause(response.getFailure()) instanceof ResourceNotFoundException) {
modelIndex++;
continue;
}
logger.error(
() -> "[" + Strings.arrayToCommaDelimitedString(modelIds) + "] search failed for models",
response.getFailure()
);
delegate.onFailure(
ExceptionsHelper.serverError(
"Searching for stats for models [{}] failed",
response.getFailure(),
Strings.arrayToCommaDelimitedString(modelIds)
)
);
return;
}
try {
InferenceStats inferenceStats = handleMultiNodeStatsResponse(
response.getResponse(),
modelIds[modelIndex++]
);
if (inferenceStats != null) {
allStats.add(inferenceStats);
}
} catch (Exception e) {
delegate.onFailure(e);
return;
}
}
delegate.onResponse(allStats);
}, e -> {
Throwable unwrapped = ExceptionsHelper.unwrapCause(e);
if (unwrapped instanceof ResourceNotFoundException) {
delegate.onResponse(Collections.emptyList());
return;
}
delegate.onFailure((Exception) unwrapped);
}),
client::multiSearch
);
}
)
.addListener(
listener,
client.threadPool().executor(MachineLearning.UTILITY_THREAD_POOL_NAME),
client.threadPool().getThreadContext()
);
}
private static SearchRequest buildStatsSearchRequest(String modelId) {
BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(InferenceStats.MODEL_ID.getPreferredName(), modelId))
.filter(QueryBuilders.termQuery(InferenceStats.TYPE.getPreferredName(), InferenceStats.NAME));
return new SearchRequest(MlStatsIndex.indexPattern()).indicesOptions(IndicesOptions.lenientExpandOpen())
.allowPartialSearchResults(false)
.source(
SearchSourceBuilder.searchSource()
.size(0)
.aggregation(
AggregationBuilders.sum(InferenceStats.FAILURE_COUNT.getPreferredName())
.field(InferenceStats.FAILURE_COUNT.getPreferredName())
)
.aggregation(
AggregationBuilders.sum(InferenceStats.MISSING_ALL_FIELDS_COUNT.getPreferredName())
.field(InferenceStats.MISSING_ALL_FIELDS_COUNT.getPreferredName())
)
.aggregation(
AggregationBuilders.sum(InferenceStats.INFERENCE_COUNT.getPreferredName())
.field(InferenceStats.INFERENCE_COUNT.getPreferredName())
)
.aggregation(
AggregationBuilders.sum(InferenceStats.CACHE_MISS_COUNT.getPreferredName())
.field(InferenceStats.CACHE_MISS_COUNT.getPreferredName())
)
.aggregation(
AggregationBuilders.max(InferenceStats.TIMESTAMP.getPreferredName())
.field(InferenceStats.TIMESTAMP.getPreferredName())
)
.query(queryBuilder)
);
}
private static InferenceStats handleMultiNodeStatsResponse(SearchResponse response, String modelId) {
if (response.getAggregations() == null) {
logger.trace(() -> "[" + modelId + "] no previously stored stats found");
return null;
}
Sum failures = response.getAggregations().get(InferenceStats.FAILURE_COUNT.getPreferredName());
Sum missing = response.getAggregations().get(InferenceStats.MISSING_ALL_FIELDS_COUNT.getPreferredName());
Sum cacheMiss = response.getAggregations().get(InferenceStats.CACHE_MISS_COUNT.getPreferredName());
Sum count = response.getAggregations().get(InferenceStats.INFERENCE_COUNT.getPreferredName());
Max timeStamp = response.getAggregations().get(InferenceStats.TIMESTAMP.getPreferredName());
return new InferenceStats(
missing == null ? 0L : Double.valueOf(missing.value()).longValue(),
count == null ? 0L : Double.valueOf(count.value()).longValue(),
failures == null ? 0L : Double.valueOf(failures.value()).longValue(),
cacheMiss == null ? 0L : Double.valueOf(cacheMiss.value()).longValue(),
modelId,
null,
timeStamp == null || (Numbers.isValidDouble(timeStamp.value()) == false)
? Instant.now()
: Instant.ofEpochMilli(Double.valueOf(timeStamp.value()).longValue())
);
}
static Set<String> collectIds(PageParams pageParams, Set<String> foundFromResources, Set<String> foundFromDocs) {
// If there are no matching resource models, there was no buffering and the models from the docs
// are paginated correctly.
if (foundFromResources.isEmpty()) {
return foundFromDocs;
}
TreeSet<String> allFoundIds = new TreeSet<>(foundFromDocs);
allFoundIds.addAll(foundFromResources);
if (pageParams.getFrom() > 0) {
// not the first page so there will be extra results at the front to remove
int numToTrimFromFront = Math.min(foundFromResources.size(), pageParams.getFrom());
for (int i = 0; i < numToTrimFromFront; i++) {
allFoundIds.remove(allFoundIds.first());
}
}
// trim down to size removing from the rear
while (allFoundIds.size() > pageParams.getSize()) {
allFoundIds.remove(allFoundIds.last());
}
return allFoundIds;
}
static QueryBuilder buildExpandIdsQuery(String[] tokens, Collection<String> tags) {
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery()
.filter(buildQueryIdExpressionQuery(tokens, TrainedModelConfig.MODEL_ID.getPreferredName()));
for (String tag : tags) {
boolQueryBuilder.filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), tag));
}
return QueryBuilders.constantScoreQuery(boolQueryBuilder);
}
TrainedModelConfig.Builder loadModelFromResource(String modelId, boolean nullOutDefinition) {
URL resource = getClass().getResource(MODEL_RESOURCE_PATH + modelId + MODEL_RESOURCE_FILE_EXT);
if (resource == null) {
logger.error("[{}] presumed stored as a resource but not found", modelId);
throw new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId));
}
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG.withRegistry(xContentRegistry),
getClass().getResourceAsStream(MODEL_RESOURCE_PATH + modelId + MODEL_RESOURCE_FILE_EXT)
)
) {
TrainedModelConfig.Builder builder = TrainedModelConfig.fromXContent(parser, true);
if (nullOutDefinition) {
builder.clearDefinition();
}
return builder;
} catch (IOException ioEx) {
logger.error(() -> "[" + modelId + "] failed to parse model definition", ioEx);
throw ExceptionsHelper.serverError(INFERENCE_FAILED_TO_DESERIALIZE, ioEx, modelId);
}
}
private static QueryBuilder buildQueryIdExpressionQuery(String[] tokens, String resourceIdField) {
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelConfig.NAME));
if (Strings.isAllOrWildcard(tokens)) {
return boolQuery;
}
// If the resourceId is not _all or *, we should see if it is a comma delimited string with wild-cards
// e.g. id1,id2*,id3
BoolQueryBuilder shouldQueries = new BoolQueryBuilder();
List<String> terms = new ArrayList<>();
for (String token : tokens) {
if (Regex.isSimpleMatchPattern(token)) {
shouldQueries.should(QueryBuilders.wildcardQuery(resourceIdField, token));
} else {
terms.add(token);
}
}
if (terms.isEmpty() == false) {
shouldQueries.should(QueryBuilders.termsQuery(resourceIdField, terms));
}
if (shouldQueries.should().isEmpty() == false) {
boolQuery.filter(shouldQueries);
}
return boolQuery;
}
private static Set<String> matchedResourceIds(String[] tokens) {
if (Strings.isAllOrWildcard(tokens)) {
return MODELS_STORED_AS_RESOURCE;
}
Set<String> matchedModels = new HashSet<>();
for (String token : tokens) {
if (Regex.isSimpleMatchPattern(token)) {
for (String modelId : MODELS_STORED_AS_RESOURCE) {
if (Regex.simpleMatch(token, modelId)) {
matchedModels.add(modelId);
}
}
} else {
if (MODELS_STORED_AS_RESOURCE.contains(token)) {
matchedModels.add(token);
}
}
}
return Collections.unmodifiableSet(matchedModels);
}
private static <T> List<T> handleHits(
SearchHits hits,
String resourceId,
CheckedBiFunction<BytesReference, String, T, Exception> parseLeniently
) throws Exception {
if (hits.getHits().length == 0) {
throw new ResourceNotFoundException(resourceId);
}
List<T> results = new ArrayList<>(hits.getHits().length);
String initialIndex = hits.getAt(0).getIndex();
for (SearchHit hit : hits) {
// We don't want to spread across multiple backing indices
if (hit.getIndex().equals(initialIndex)) {
results.add(parseLeniently.apply(hit.getSourceRef(), resourceId));
}
}
return results;
}
static BytesReference getDefinitionFromDocs(List<TrainedModelDefinitionDoc> docs, String modelId) throws ElasticsearchException {
// If the user requested the compressed data string, we need access to the underlying bytes.
// BytesArray gives us that access.
BytesReference bytes = docs.size() == 1
? docs.get(0).getBinaryData()
: new BytesArray(
CompositeBytesReference.of(docs.stream().map(TrainedModelDefinitionDoc::getBinaryData).toArray(BytesReference[]::new))
.toBytesRef()
);
if (docs.get(0).getTotalDefinitionLength() != null) {
if (bytes.length() != docs.get(0).getTotalDefinitionLength()) {
throw ExceptionsHelper.serverError(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId));
}
}
TrainedModelDefinitionDoc lastDoc = docs.get(docs.size() - 1);
// Either we are missing the last doc, or some previous doc
if (lastDoc.isEos() == false || lastDoc.getDocNum() != docs.size() - 1) {
throw ExceptionsHelper.serverError(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId));
}
return bytes;
}
public static List<BytesReference> chunkDefinitionWithSize(BytesReference definition, int chunkSize) {
List<BytesReference> chunks = new ArrayList<>((int) Math.ceil(definition.length() / (double) chunkSize));
for (int i = 0; i < definition.length(); i += chunkSize) {
BytesReference chunk = definition.slice(i, Math.min(chunkSize, definition.length() - i));
chunks.add(chunk);
}
return chunks;
}
private TrainedModelConfig.Builder parseModelConfigLenientlyFromSource(BytesReference source, String modelId) throws IOException {
try (XContentParser parser = createParser(source)) {
TrainedModelConfig.Builder builder = TrainedModelConfig.fromXContent(parser, true);
if (builder.getModelType() == null) {
// before TrainedModelConfig::modelType was added tree ensembles and the
// lang ident model were the only models supported. Models created after
// VERSION_3RD_PARTY_CONFIG_ADDED must have modelType set, if not set modelType
// is a tree ensemble
builder.setModelType(TrainedModelType.TREE_ENSEMBLE);
}
return builder;
} catch (IOException e) {
logger.error(() -> "[" + modelId + "] failed to parse model", e);
throw e;
}
}
private TrainedModelMetadata parseMetadataLenientlyFromSource(BytesReference source, String modelId) throws IOException {
try (XContentParser parser = createParser(source)) {
return TrainedModelMetadata.fromXContent(parser, true);
} catch (IOException e) {
logger.error(() -> "[" + modelId + "] failed to parse model metadata", e);
throw e;
}
}
private XContentParser createParser(BytesReference source) throws IOException {
return XContentHelper.createParserNotCompressed(
LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG.withRegistry(xContentRegistry),
source,
XContentType.JSON
);
}
private static IndexRequest createRequest(String docId, String index, ToXContentObject body, boolean allowOverwriting) {
return createRequest(new IndexRequest(index), docId, body, allowOverwriting);
}
private static IndexRequest createRequest(String docId, ToXContentObject body, boolean allowOverwriting) {
return createRequest(new IndexRequest(), docId, body, allowOverwriting);
}
private static IndexRequest createRequest(IndexRequest request, String docId, ToXContentObject body, boolean allowOverwriting) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
XContentBuilder source = body.toXContent(builder, FOR_INTERNAL_STORAGE_PARAMS);
var operation = allowOverwriting ? DocWriteRequest.OpType.INDEX : DocWriteRequest.OpType.CREATE;
return request.opType(operation).id(docId).source(source);
} catch (IOException ex) {
// This should never happen. If we were able to deserialize the object (from Native or REST) and then fail to serialize it again
// that is not the users fault. We did something wrong and should throw.
throw ExceptionsHelper.serverError("Unexpected serialization exception for [" + docId + "]", ex);
}
}
private void refreshCacheVersion(ActionListener<Boolean> listener) {
modelCacheMetadataService.updateCacheVersion(ActionListener.wrap(resp -> {
// Checking the response is always AcknowledgedResponse.TRUE because AcknowledgedResponse.FALSE does not make sense.
// Errors should be reported through the onFailure method of the listener.
assert resp.equals(AcknowledgedResponse.TRUE);
listener.onResponse(true);
}, listener::onFailure));
}
}
| TrainedModelProvider |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestClientThrottlingAnalyzer.java | {
"start": 1014,
"end": 6229
} | class ____ extends AbstractWasbTestWithTimeout {
private static final int ANALYSIS_PERIOD = 1000;
private static final int ANALYSIS_PERIOD_PLUS_10_PERCENT = ANALYSIS_PERIOD
+ ANALYSIS_PERIOD / 10;
private static final long MEGABYTE = 1024 * 1024;
private static final int MAX_ACCEPTABLE_PERCENT_DIFFERENCE = 20;
private void sleep(long milliseconds) {
try {
Thread.sleep(milliseconds);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
private void fuzzyValidate(long expected, long actual, double percentage) {
final double lowerBound = Math.max(expected - percentage / 100 * expected, 0);
final double upperBound = expected + percentage / 100 * expected;
assertTrue(actual >= lowerBound && actual <= upperBound, String.format(
"The actual value %1$d is not within the expected range: "
+ "[%2$.2f, %3$.2f].",
actual,
lowerBound,
upperBound));
}
private void validate(long expected, long actual) {
assertEquals(expected, actual,
String.format("The actual value %1$d is not the expected value %2$d.", actual, expected));
}
private void validateLessThanOrEqual(long maxExpected, long actual) {
assertTrue(actual < maxExpected, String.format(
"The actual value %1$d is not less than or equal to the maximum"
+ " expected value %2$d.",
actual,
maxExpected));
}
/**
* Ensure that there is no waiting (sleepDuration = 0) if the metrics have
* never been updated. This validates proper initialization of
* ClientThrottlingAnalyzer.
*/
@Test
public void testNoMetricUpdatesThenNoWaiting() {
ClientThrottlingAnalyzer analyzer = new ClientThrottlingAnalyzer(
"test",
ANALYSIS_PERIOD);
validate(0, analyzer.getSleepDuration());
sleep(ANALYSIS_PERIOD_PLUS_10_PERCENT);
validate(0, analyzer.getSleepDuration());
}
/**
* Ensure that there is no waiting (sleepDuration = 0) if the metrics have
* only been updated with successful requests.
*/
@Test
public void testOnlySuccessThenNoWaiting() {
ClientThrottlingAnalyzer analyzer = new ClientThrottlingAnalyzer(
"test",
ANALYSIS_PERIOD);
analyzer.addBytesTransferred(8 * MEGABYTE, false);
validate(0, analyzer.getSleepDuration());
sleep(ANALYSIS_PERIOD_PLUS_10_PERCENT);
validate(0, analyzer.getSleepDuration());
}
/**
* Ensure that there is waiting (sleepDuration != 0) if the metrics have
* only been updated with failed requests. Also ensure that the
* sleepDuration decreases over time.
*/
@Test
public void testOnlyErrorsAndWaiting() {
ClientThrottlingAnalyzer analyzer = new ClientThrottlingAnalyzer(
"test",
ANALYSIS_PERIOD);
validate(0, analyzer.getSleepDuration());
analyzer.addBytesTransferred(4 * MEGABYTE, true);
sleep(ANALYSIS_PERIOD_PLUS_10_PERCENT);
final int expectedSleepDuration1 = 1100;
validateLessThanOrEqual(expectedSleepDuration1, analyzer.getSleepDuration());
sleep(10 * ANALYSIS_PERIOD);
final int expectedSleepDuration2 = 900;
validateLessThanOrEqual(expectedSleepDuration2, analyzer.getSleepDuration());
}
/**
* Ensure that there is waiting (sleepDuration != 0) if the metrics have
* only been updated with both successful and failed requests. Also ensure
* that the sleepDuration decreases over time.
*/
@Test
public void testSuccessAndErrorsAndWaiting() {
ClientThrottlingAnalyzer analyzer = new ClientThrottlingAnalyzer(
"test",
ANALYSIS_PERIOD);
validate(0, analyzer.getSleepDuration());
analyzer.addBytesTransferred(8 * MEGABYTE, false);
analyzer.addBytesTransferred(2 * MEGABYTE, true);
sleep(ANALYSIS_PERIOD_PLUS_10_PERCENT);
NanoTimer timer = new NanoTimer();
analyzer.suspendIfNecessary();
final int expectedElapsedTime = 126;
fuzzyValidate(expectedElapsedTime,
timer.elapsedTimeMs(),
MAX_ACCEPTABLE_PERCENT_DIFFERENCE);
sleep(10 * ANALYSIS_PERIOD);
final int expectedSleepDuration = 110;
validateLessThanOrEqual(expectedSleepDuration, analyzer.getSleepDuration());
}
/**
* Ensure that there is waiting (sleepDuration != 0) if the metrics have
* only been updated with many successful and failed requests. Also ensure
* that the sleepDuration decreases to zero over time.
*/
@Test
public void testManySuccessAndErrorsAndWaiting() {
ClientThrottlingAnalyzer analyzer = new ClientThrottlingAnalyzer(
"test",
ANALYSIS_PERIOD);
validate(0, analyzer.getSleepDuration());
final int numberOfRequests = 20;
for (int i = 0; i < numberOfRequests; i++) {
analyzer.addBytesTransferred(8 * MEGABYTE, false);
analyzer.addBytesTransferred(2 * MEGABYTE, true);
}
sleep(ANALYSIS_PERIOD_PLUS_10_PERCENT);
NanoTimer timer = new NanoTimer();
analyzer.suspendIfNecessary();
fuzzyValidate(7,
timer.elapsedTimeMs(),
MAX_ACCEPTABLE_PERCENT_DIFFERENCE);
sleep(10 * ANALYSIS_PERIOD);
validate(0, analyzer.getSleepDuration());
}
}
| TestClientThrottlingAnalyzer |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/example/SpringXPathFilterTest.java | {
"start": 1068,
"end": 1616
} | class ____ extends XPathFilterTest {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Override
// The API is deprecated, we can remove warnings safely as the tests will disappear when removing this component.
@SuppressWarnings("deprecation")
protected CamelContext createCamelContext() throws Exception {
return SpringCamelContext.springCamelContext(
new ClassPathXmlApplicationContext("org/apache/camel/spring/example/xpathFilter.xml"), true);
}
}
| SpringXPathFilterTest |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/FirstValueWithRetractAggFunctionWithoutOrderTest.java | {
"start": 4820,
"end": 5390
} | class ____
extends NumberFirstValueWithRetractAggFunctionWithoutOrderTestBase<Float> {
@Override
protected Float getValue(String v) {
return Float.valueOf(v);
}
@Override
protected AggregateFunction<Float, FirstValueWithRetractAccumulator<Float>>
getAggregator() {
return new FirstValueWithRetractAggFunction<>(DataTypes.FLOAT().getLogicalType());
}
}
/** Test for {@link DoubleType}. */
@Nested
final | FloatFirstValueWithRetractAggFunctionWithoutOrderTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/sorted/state/BatchExecutionStateBackendTest.java | {
"start": 53715,
"end": 54279
} | class ____
implements AggregateFunction<Long, Long, Long> {
@Override
public Long createAccumulator() {
return 0L;
}
@Override
public Long add(Long value, Long accumulator) {
return accumulator += value;
}
@Override
public Long getResult(Long accumulator) {
return accumulator;
}
@Override
public Long merge(Long a, Long b) {
return a + b;
}
}
private static final | ImmutableAggregatingAddingFunction |
java | apache__rocketmq | common/src/test/java/org/apache/rocketmq/common/chain/HandlerChainTest.java | {
"start": 990,
"end": 2141
} | class ____ {
private HandlerChain<Integer, String> handlerChain;
private Handler<Integer, String> handler1;
private Handler<Integer, String> handler2;
@Before
public void setUp() {
handlerChain = HandlerChain.create();
handler1 = (t, chain) -> "Handler1";
handler2 = (t, chain) -> null;
}
@Test
public void testHandle_withEmptyChain() {
handlerChain.addNext(handler1);
handlerChain.handle(1);
assertNull("Expected null since the handler chain is empty", handlerChain.handle(2));
}
@Test
public void testHandle_withNonEmptyChain() {
handlerChain.addNext(handler1);
String result = handlerChain.handle(1);
assertEquals("Handler1", result);
}
@Test
public void testHandle_withMultipleHandlers() {
handlerChain.addNext(handler1);
handlerChain.addNext(handler2);
String result1 = handlerChain.handle(1);
String result2 = handlerChain.handle(2);
assertEquals("Handler1", result1);
assertNull("Expected null since there are no more handlers", result2);
}
}
| HandlerChainTest |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredSessionStoreTest.java | {
"start": 24317,
"end": 30385
} | interface ____ extends SessionStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { }
@SuppressWarnings("unchecked")
@Test
public void shouldSetFlushListenerOnWrappedCachingStore() {
setUpWithoutContext();
final CachedSessionStore cachedSessionStore = mock(CachedSessionStore.class);
when(cachedSessionStore.setFlushListener(any(CacheFlushListener.class), eq(false))).thenReturn(true);
store = new MeteredSessionStore<>(
cachedSessionStore,
STORE_TYPE,
Serdes.String(),
Serdes.String(),
new MockTime());
assertTrue(store.setFlushListener(null, false));
}
@Test
public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() {
setUpWithoutContext();
assertFalse(store.setFlushListener(null, false));
}
@Test
public void shouldRemoveMetricsOnClose() {
setUp();
doNothing().when(innerStore).close();
init(); // replays "inner"
// There's always a "count" metric registered
assertThat(storeMetrics(), not(empty()));
store.close();
assertThat(storeMetrics(), empty());
}
@Test
public void shouldRemoveMetricsEvenIfWrappedStoreThrowsOnClose() {
setUp();
doThrow(new RuntimeException("Oops!")).when(innerStore).close();
init(); // replays "inner"
assertThat(storeMetrics(), not(empty()));
assertThrows(RuntimeException.class, store::close);
assertThat(storeMetrics(), empty());
}
@SuppressWarnings("unused")
@Test
public void shouldTrackOpenIteratorsMetric() {
setUp();
when(innerStore.backwardFetch(KEY_BYTES)).thenReturn(KeyValueIterators.emptyIterator());
init();
final KafkaMetric openIteratorsMetric = metric("num-open-iterators");
assertThat(openIteratorsMetric, not(nullValue()));
assertThat((Long) openIteratorsMetric.metricValue(), equalTo(0L));
try (final KeyValueIterator<Windowed<String>, String> unused = store.backwardFetch(KEY)) {
assertThat((Long) openIteratorsMetric.metricValue(), equalTo(1L));
}
assertThat((Long) openIteratorsMetric.metricValue(), equalTo(0L));
}
@SuppressWarnings("unused")
@Test
public void shouldTimeIteratorDuration() {
setUp();
when(innerStore.backwardFetch(KEY_BYTES)).thenReturn(KeyValueIterators.emptyIterator());
init();
final KafkaMetric iteratorDurationAvgMetric = metric("iterator-duration-avg");
final KafkaMetric iteratorDurationMaxMetric = metric("iterator-duration-max");
assertThat(iteratorDurationAvgMetric, not(nullValue()));
assertThat(iteratorDurationMaxMetric, not(nullValue()));
assertThat((Double) iteratorDurationAvgMetric.metricValue(), equalTo(Double.NaN));
assertThat((Double) iteratorDurationMaxMetric.metricValue(), equalTo(Double.NaN));
try (final KeyValueIterator<Windowed<String>, String> unused = store.backwardFetch(KEY)) {
// nothing to do, just close immediately
mockTime.sleep(2);
}
assertThat((double) iteratorDurationAvgMetric.metricValue(), equalTo(2.0 * TimeUnit.MILLISECONDS.toNanos(1)));
assertThat((double) iteratorDurationMaxMetric.metricValue(), equalTo(2.0 * TimeUnit.MILLISECONDS.toNanos(1)));
try (final KeyValueIterator<Windowed<String>, String> iterator = store.backwardFetch(KEY)) {
// nothing to do, just close immediately
mockTime.sleep(3);
}
assertThat((double) iteratorDurationAvgMetric.metricValue(), equalTo(2.5 * TimeUnit.MILLISECONDS.toNanos(1)));
assertThat((double) iteratorDurationMaxMetric.metricValue(), equalTo(3.0 * TimeUnit.MILLISECONDS.toNanos(1)));
}
@SuppressWarnings("unused")
@Test
public void shouldTrackOldestOpenIteratorTimestamp() {
setUp();
when(innerStore.backwardFetch(KEY_BYTES)).thenReturn(KeyValueIterators.emptyIterator());
init();
final KafkaMetric oldestIteratorTimestampMetric = metric("oldest-iterator-open-since-ms");
assertThat(oldestIteratorTimestampMetric, not(nullValue()));
assertThat(oldestIteratorTimestampMetric.metricValue(), nullValue());
KeyValueIterator<Windowed<String>, String> second = null;
final long secondTimestamp;
try {
try (final KeyValueIterator<Windowed<String>, String> unused = store.backwardFetch(KEY)) {
final long oldestTimestamp = mockTime.milliseconds();
assertThat((Long) oldestIteratorTimestampMetric.metricValue(), equalTo(oldestTimestamp));
mockTime.sleep(100);
// open a second iterator before closing the first to test that we still produce the first iterator's timestamp
second = store.backwardFetch(KEY);
secondTimestamp = mockTime.milliseconds();
assertThat((Long) oldestIteratorTimestampMetric.metricValue(), equalTo(oldestTimestamp));
mockTime.sleep(100);
}
// now that the first iterator is closed, check that the timestamp has advanced to the still open second iterator
assertThat((Long) oldestIteratorTimestampMetric.metricValue(), equalTo(secondTimestamp));
} finally {
if (second != null) {
second.close();
}
}
assertThat((Integer) oldestIteratorTimestampMetric.metricValue(), nullValue());
}
private KafkaMetric metric(final String name) {
return this.metrics.metric(new MetricName(name, STORE_LEVEL_GROUP, "", this.tags));
}
private List<MetricName> storeMetrics() {
return metrics.metrics()
.keySet()
.stream()
.filter(name -> name.group().equals(STORE_LEVEL_GROUP) && name.tags().equals(tags))
.collect(Collectors.toList());
}
}
| CachedSessionStore |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java | {
"start": 1094,
"end": 6353
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("sum", ElementType.LONG),
new IntermediateStateDesc("seen", ElementType.BOOLEAN) );
private final DriverContext driverContext;
private final LongState state;
private final List<Integer> channels;
public SumIntAggregatorFunction(DriverContext driverContext, List<Integer> channels,
LongState state) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
}
public static SumIntAggregatorFunction create(DriverContext driverContext,
List<Integer> channels) {
return new SumIntAggregatorFunction(driverContext, channels, new LongState(SumIntAggregator.init()));
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
IntBlock vBlock = page.getBlock(channels.get(0));
IntVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
IntBlock vBlock = page.getBlock(channels.get(0));
IntVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(IntVector vVector) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
int vValue = vVector.getInt(valuesPosition);
state.longValue(SumIntAggregator.combine(state.longValue(), vValue));
}
}
private void addRawVector(IntVector vVector, BooleanVector mask) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
int vValue = vVector.getInt(valuesPosition);
state.longValue(SumIntAggregator.combine(state.longValue(), vValue));
}
}
private void addRawBlock(IntBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
int vValue = vBlock.getInt(vOffset);
state.longValue(SumIntAggregator.combine(state.longValue(), vValue));
}
}
}
private void addRawBlock(IntBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
int vValue = vBlock.getInt(vOffset);
state.longValue(SumIntAggregator.combine(state.longValue(), vValue));
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block sumUncast = page.getBlock(channels.get(0));
if (sumUncast.areAllValuesNull()) {
return;
}
LongVector sum = ((LongBlock) sumUncast).asVector();
assert sum.getPositionCount() == 1;
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert seen.getPositionCount() == 1;
if (seen.getBoolean(0)) {
state.longValue(SumIntAggregator.combine(state.longValue(), sum.getLong(0)));
state.seen(true);
}
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
if (state.seen() == false) {
blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1);
return;
}
blocks[offset] = driverContext.blockFactory().newConstantLongBlockWith(state.longValue(), 1);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| SumIntAggregatorFunction |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/hql/internal/QualifiedJoinPathConsumer.java | {
"start": 7741,
"end": 7971
} | interface ____ {
void consumeIdentifier(String identifier, boolean isTerminal, boolean allowReuse);
void consumeTreat(String typeName, boolean isTerminal);
SemanticPathPart getConsumedPart();
}
private static | ConsumerDelegate |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java | {
"start": 10436,
"end": 23242
} | enum ____ {
/**
* Creates the target file only if no file exists at that path already. Does not overwrite
* existing files and directories.
*/
NO_OVERWRITE,
/**
* Creates a new target file regardless of any existing files or directories. Existing files
* and directories will be deleted (recursively) automatically before creating the new file.
*/
OVERWRITE
}
/** Logger for all FileSystem work. */
private static final Logger LOG = LoggerFactory.getLogger(FileSystem.class);
/**
* This lock guards the methods {@link #initOutPathLocalFS(Path, WriteMode, boolean)} and {@link
* #initOutPathDistFS(Path, WriteMode, boolean)} which are otherwise susceptible to races.
*/
private static final ReentrantLock OUTPUT_DIRECTORY_INIT_LOCK = new ReentrantLock(true);
/** Object used to protect calls to specific methods. */
private static final ReentrantLock LOCK = new ReentrantLock(true);
/** Cache for file systems, by scheme + authority. */
private static final HashMap<FSKey, FileSystem> CACHE = new HashMap<>();
/**
* Mapping of file system schemes to the corresponding factories, populated in {@link
* FileSystem#initialize(Configuration, PluginManager)}.
*/
private static final HashMap<String, FileSystemFactory> FS_FACTORIES = new HashMap<>();
/** The default factory that is used when no scheme matches. */
private static final FileSystemFactory FALLBACK_FACTORY = loadHadoopFsFactory();
/** All known plugins for a given scheme, do not fallback for those. */
private static final Multimap<String, String> DIRECTLY_SUPPORTED_FILESYSTEM =
ImmutableMultimap.<String, String>builder()
.put("wasb", "flink-fs-azure-hadoop")
.put("wasbs", "flink-fs-azure-hadoop")
.put("abfs", "flink-fs-azure-hadoop")
.put("abfss", "flink-fs-azure-hadoop")
.put("oss", "flink-oss-fs-hadoop")
.put("s3", "flink-s3-fs-hadoop")
.put("s3", "flink-s3-fs-presto")
.put("s3a", "flink-s3-fs-hadoop")
.put("s3p", "flink-s3-fs-presto")
.put("gs", "flink-gs-fs-hadoop")
.build();
/** Exceptions for DIRECTLY_SUPPORTED_FILESYSTEM. */
private static final Set<String> ALLOWED_FALLBACK_FILESYSTEMS = new HashSet<>();
/**
* The default filesystem scheme to be used, configured during process-wide initialization. This
* value defaults to the local file systems scheme {@code 'file:///'} or {@code 'file:/'}.
*/
private static URI defaultScheme;
// ------------------------------------------------------------------------
// Initialization
// ------------------------------------------------------------------------
/**
* Initializes the shared file system settings.
*
* <p>The given configuration is passed to each file system factory to initialize the respective
* file systems. Because the configuration of file systems may be different subsequent to the
* call of this method, this method clears the file system instance cache.
*
* <p>This method also reads the default file system URI from the configuration key {@link
* CoreOptions#DEFAULT_FILESYSTEM_SCHEME}. All calls to {@link FileSystem#get(URI)} where the
* URI has no scheme will be interpreted as relative to that URI. As an example, assume the
* default file system URI is set to {@code 'hdfs://localhost:9000/'}. A file path of {@code
* '/user/USERNAME/in.txt'} is interpreted as {@code
* 'hdfs://localhost:9000/user/USERNAME/in.txt'}.
*
* @deprecated use {@link #initialize(Configuration, PluginManager)} instead.
* @param config the configuration from where to fetch the parameter.
*/
@Deprecated
public static void initialize(Configuration config) throws IllegalConfigurationException {
initializeWithoutPlugins(config);
}
private static void initializeWithoutPlugins(Configuration config)
throws IllegalConfigurationException {
initialize(config, null);
}
/**
* Initializes the shared file system settings.
*
* <p>The given configuration is passed to each file system factory to initialize the respective
* file systems. Because the configuration of file systems may be different subsequent to the
* call of this method, this method clears the file system instance cache.
*
* <p>This method also reads the default file system URI from the configuration key {@link
* CoreOptions#DEFAULT_FILESYSTEM_SCHEME}. All calls to {@link FileSystem#get(URI)} where the
* URI has no scheme will be interpreted as relative to that URI. As an example, assume the
* default file system URI is set to {@code 'hdfs://localhost:9000/'}. A file path of {@code
* '/user/USERNAME/in.txt'} is interpreted as {@code
* 'hdfs://localhost:9000/user/USERNAME/in.txt'}.
*
* @param config the configuration from where to fetch the parameter.
* @param pluginManager optional plugin manager that is used to initialized filesystems provided
* as plugins.
*/
public static void initialize(Configuration config, @Nullable PluginManager pluginManager)
throws IllegalConfigurationException {
LOCK.lock();
try {
// make sure file systems are re-instantiated after re-configuration
CACHE.clear();
FS_FACTORIES.clear();
Collection<Supplier<Iterator<FileSystemFactory>>> factorySuppliers = new ArrayList<>(2);
factorySuppliers.add(() -> ServiceLoader.load(FileSystemFactory.class).iterator());
if (pluginManager != null) {
factorySuppliers.add(
() ->
Iterators.transform(
pluginManager.load(FileSystemFactory.class),
PluginFileSystemFactory::of));
}
final List<FileSystemFactory> fileSystemFactories =
loadFileSystemFactories(factorySuppliers);
// configure all file system factories
for (FileSystemFactory factory : fileSystemFactories) {
factory.configure(config);
String scheme = factory.getScheme();
FileSystemFactory fsf =
ConnectionLimitingFactory.decorateIfLimited(factory, scheme, config);
FS_FACTORIES.put(scheme, fsf);
}
// configure the default (fallback) factory
FALLBACK_FACTORY.configure(config);
// also read the default file system scheme
final String stringifiedUri = config.get(CoreOptions.DEFAULT_FILESYSTEM_SCHEME, null);
if (stringifiedUri == null) {
defaultScheme = null;
} else {
try {
defaultScheme = new URI(stringifiedUri);
} catch (URISyntaxException e) {
throw new IllegalConfigurationException(
"The default file system scheme ('"
+ CoreOptions.DEFAULT_FILESYSTEM_SCHEME
+ "') is invalid: "
+ stringifiedUri,
e);
}
}
ALLOWED_FALLBACK_FILESYSTEMS.clear();
final Iterable<String> allowedFallbackFilesystems =
Splitter.on(';')
.omitEmptyStrings()
.trimResults()
.split(config.get(CoreOptions.ALLOWED_FALLBACK_FILESYSTEMS));
allowedFallbackFilesystems.forEach(ALLOWED_FALLBACK_FILESYSTEMS::add);
} finally {
LOCK.unlock();
}
}
// ------------------------------------------------------------------------
// Obtaining File System Instances
// ------------------------------------------------------------------------
/**
* Returns a reference to the {@link FileSystem} instance for accessing the local file system.
*
* @return a reference to the {@link FileSystem} instance for accessing the local file system.
*/
public static FileSystem getLocalFileSystem() {
return FileSystemSafetyNet.wrapWithSafetyNetWhenActivated(
LocalFileSystem.getSharedInstance());
}
/**
* Returns a reference to the {@link FileSystem} instance for accessing the file system
* identified by the given {@link URI}.
*
* @param uri the {@link URI} identifying the file system
* @return a reference to the {@link FileSystem} instance for accessing the file system
* identified by the given {@link URI}.
* @throws IOException thrown if a reference to the file system instance could not be obtained
*/
public static FileSystem get(URI uri) throws IOException {
return FileSystemSafetyNet.wrapWithSafetyNetWhenActivated(getUnguardedFileSystem(uri));
}
@Internal
public static FileSystem getUnguardedFileSystem(final URI fsUri) throws IOException {
checkNotNull(fsUri, "file system URI");
LOCK.lock();
try {
final URI uri;
if (fsUri.getScheme() != null) {
uri = fsUri;
} else {
// Apply the default fs scheme
final URI defaultUri = getDefaultFsUri();
URI rewrittenUri = null;
try {
rewrittenUri =
new URI(
defaultUri.getScheme(),
null,
defaultUri.getHost(),
defaultUri.getPort(),
fsUri.getPath(),
null,
null);
} catch (URISyntaxException e) {
// for local URIs, we make one more try to repair the path by making it absolute
if (defaultUri.getScheme().equals("file")) {
try {
rewrittenUri =
new URI(
"file",
null,
new Path(new File(fsUri.getPath()).getAbsolutePath())
.toUri()
.getPath(),
null);
} catch (URISyntaxException ignored) {
// could not help it...
}
}
}
if (rewrittenUri != null) {
uri = rewrittenUri;
} else {
throw new IOException(
"The file system URI '"
+ fsUri
+ "' declares no scheme and cannot be interpreted relative to the default file system URI ("
+ defaultUri
+ ").");
}
}
// print a helpful pointer for malformed local URIs (happens a lot to new users)
if (uri.getScheme().equals("file")
&& uri.getAuthority() != null
&& !uri.getAuthority().isEmpty()) {
String supposedUri = "file:///" + uri.getAuthority() + uri.getPath();
throw new IOException(
"Found local file path with authority '"
+ uri.getAuthority()
+ "' in path '"
+ uri.toString()
+ "'. Hint: Did you forget a slash? (correct path would be '"
+ supposedUri
+ "')");
}
final FSKey key = new FSKey(uri.getScheme(), uri.getAuthority());
// See if there is a file system object in the cache
{
FileSystem cached = CACHE.get(key);
if (cached != null) {
return cached;
}
}
// this "default" initialization makes sure that the FileSystem | WriteMode |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JdbcEndpointBuilderFactory.java | {
"start": 14856,
"end": 21846
} | interface ____
extends
EndpointProducerBuilder {
default JdbcEndpointBuilder basic() {
return (JdbcEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* To use a custom org.apache.camel.component.jdbc.BeanRowMapper when
* using outputClass. The default implementation will lower case the row
* names and skip underscores, and dashes. For example CUST_ID is mapped
* as custId.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.BeanRowMapper</code> type.
*
* Group: advanced
*
* @param beanRowMapper the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder beanRowMapper(org.apache.camel.component.jdbc.BeanRowMapper beanRowMapper) {
doSetProperty("beanRowMapper", beanRowMapper);
return this;
}
/**
* To use a custom org.apache.camel.component.jdbc.BeanRowMapper when
* using outputClass. The default implementation will lower case the row
* names and skip underscores, and dashes. For example CUST_ID is mapped
* as custId.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.BeanRowMapper</code> type.
*
* Group: advanced
*
* @param beanRowMapper the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder beanRowMapper(String beanRowMapper) {
doSetProperty("beanRowMapper", beanRowMapper);
return this;
}
/**
* To use a custom strategy for working with connections. Do not use a
* custom strategy when using the spring-jdbc component because a
* special Spring ConnectionStrategy is used by default to support
* Spring Transactions.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.ConnectionStrategy</code> type.
*
* Group: advanced
*
* @param connectionStrategy the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder connectionStrategy(org.apache.camel.component.jdbc.ConnectionStrategy connectionStrategy) {
doSetProperty("connectionStrategy", connectionStrategy);
return this;
}
/**
* To use a custom strategy for working with connections. Do not use a
* custom strategy when using the spring-jdbc component because a
* special Spring ConnectionStrategy is used by default to support
* Spring Transactions.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.ConnectionStrategy</code> type.
*
* Group: advanced
*
* @param connectionStrategy the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder connectionStrategy(String connectionStrategy) {
doSetProperty("connectionStrategy", connectionStrategy);
return this;
}
/**
* Allows the plugin to use a custom
* org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy to
* control preparation of the query and prepared statement.
*
* The option is a:
* <code>org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy</code> type.
*
* Group: advanced
*
* @param prepareStatementStrategy the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder prepareStatementStrategy(org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy prepareStatementStrategy) {
doSetProperty("prepareStatementStrategy", prepareStatementStrategy);
return this;
}
/**
* Allows the plugin to use a custom
* org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy to
* control preparation of the query and prepared statement.
*
* The option will be converted to a
* <code>org.apache.camel.component.jdbc.JdbcPrepareStatementStrategy</code> type.
*
* Group: advanced
*
* @param prepareStatementStrategy the value to set
* @return the dsl builder
*/
default AdvancedJdbcEndpointBuilder prepareStatementStrategy(String prepareStatementStrategy) {
doSetProperty("prepareStatementStrategy", prepareStatementStrategy);
return this;
}
}
public | AdvancedJdbcEndpointBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 3092,
"end": 3353
} | interface ____ {}
""")
.addSourceLines(
"MyTest.java",
"""
import com.google.errorprone.annotations.Immutable;
import java.lang.annotation.Annotation;
@Immutable
final | Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DropboxEndpointBuilderFactory.java | {
"start": 30313,
"end": 32792
} | interface ____ {
/**
* Dropbox (camel-dropbox)
* Upload, download and manage files, folders, groups, collaborations,
* etc on Dropbox.
*
* Category: cloud,file,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-dropbox
*
* @return the dsl builder for the headers' name.
*/
default DropboxHeaderNameBuilder dropbox() {
return DropboxHeaderNameBuilder.INSTANCE;
}
/**
* Dropbox (camel-dropbox)
* Upload, download and manage files, folders, groups, collaborations,
* etc on Dropbox.
*
* Category: cloud,file,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-dropbox
*
* Syntax: <code>dropbox:operation</code>
*
* Path parameter: operation (required)
* The specific action (typically is a CRUD action) to perform on
* Dropbox remote folder.
* There are 5 enums and the value can be one of: put, del, search, get,
* move
*
* @param path operation
* @return the dsl builder
*/
default DropboxEndpointBuilder dropbox(String path) {
return DropboxEndpointBuilderFactory.endpointBuilder("dropbox", path);
}
/**
* Dropbox (camel-dropbox)
* Upload, download and manage files, folders, groups, collaborations,
* etc on Dropbox.
*
* Category: cloud,file,api
* Since: 2.14
* Maven coordinates: org.apache.camel:camel-dropbox
*
* Syntax: <code>dropbox:operation</code>
*
* Path parameter: operation (required)
* The specific action (typically is a CRUD action) to perform on
* Dropbox remote folder.
* There are 5 enums and the value can be one of: put, del, search, get,
* move
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path operation
* @return the dsl builder
*/
default DropboxEndpointBuilder dropbox(String componentName, String path) {
return DropboxEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Dropbox component.
*/
public static | DropboxBuilders |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ClassUtils.java | {
"start": 57027,
"end": 58130
} | class ____ to a subclass-declared interface:
// see, for example, HashMap.HashIterator.hasNext)
result = findInterfaceMethodIfPossible(
methodName, parameterTypes, targetClass, declaringClass, requirePublicInterface);
}
return (result != null ? result : method);
}
private static @Nullable Method findInterfaceMethodIfPossible(String methodName, Class<?>[] parameterTypes,
Class<?> startClass, Class<?> endClass, boolean requirePublicInterface) {
Class<?> current = startClass;
while (current != null && current != endClass) {
for (Class<?> ifc : current.getInterfaces()) {
try {
if (!requirePublicInterface || Modifier.isPublic(ifc.getModifiers())) {
return ifc.getMethod(methodName, parameterTypes);
}
}
catch (NoSuchMethodException ex) {
// ignore
}
}
current = current.getSuperclass();
}
return null;
}
/**
* Get the closest publicly accessible method in the supplied method's type hierarchy that
* has a method signature equivalent to the supplied method, if possible.
* <p>This method recursively searches the | method |
java | resilience4j__resilience4j | resilience4j-rxjava2/src/main/java/io/github/resilience4j/ratelimiter/operator/FlowableRateLimiter.java | {
"start": 2189,
"end": 2655
} | class ____ extends AbstractSubscriber<T> {
RateLimiterSubscriber(Subscriber<? super T> downstreamSubscriber) {
super(downstreamSubscriber);
}
@Override
public void hookOnError(Throwable t) {
// NoOp
}
@Override
public void hookOnComplete() {
// NoOp
}
@Override
public void hookOnCancel() {
// NoOp
}
}
} | RateLimiterSubscriber |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/CommonQueryContract.java | {
"start": 7261,
"end": 23538
} | interface ____ of its subtypes. For example,
* {@link SelectionQuery#setCacheRegion} is preferred over
* {@link org.hibernate.jpa.HibernateHints#HINT_CACHE_REGION}.
*/
CommonQueryContract setHint(String hintName, Object value);
/**
* Get the {@link ParameterMetadata} object representing the parameters
* of this query, and providing access to the {@link QueryParameter}s.
*
* @since 7.0
*/
ParameterMetadata getParameterMetadata();
/**
* Bind the given argument to a named query parameter.
* <p>
* If the type of the parameter cannot be inferred from the context in
* which it occurs, use one of the overloads which accepts a "type",
* or pass a {@link TypedParameterValue}.
*
* @see #setParameter(String, Object, Class)
* @see #setParameter(String, Object, Type)
*
* @see TypedParameterValue
*/
CommonQueryContract setParameter(String parameter, Object value);
/**
* Bind the given argument to a named query parameter using the given
* {@link Class} reference to attempt to infer the {@link Type}.
* If unable to infer an appropriate {@link Type}, fall back to
* {@link #setParameter(String, Object)}.
*
* @see #setParameter(String, Object, Type)
*/
<P> CommonQueryContract setParameter(String parameter, P value, Class<P> type);
/**
* Bind the given argument to a named query parameter using the given
* {@link Type}.
*/
<P> CommonQueryContract setParameter(String parameter, P value, Type<P> type);
/**
* Bind an {@link Instant} to the named query parameter using just the
* portion indicated by the given {@link TemporalType}.
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(String parameter, Instant value, TemporalType temporalType);
/**
* @see jakarta.persistence.Query#setParameter(String, Calendar, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(String parameter, Calendar value, TemporalType temporalType);
/**
* @see jakarta.persistence.Query#setParameter(String, Date, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(String parameter, Date value, TemporalType temporalType);
/**
* Bind the given argument to an ordinal query parameter.
* <p>
* If the type of the parameter cannot be inferred from the context in
* which it occurs, use one of the overloads which accepts a "type",
* or pass a {@link TypedParameterValue}.
*
* @see #setParameter(int, Object, Class)
* @see #setParameter(int, Object, Type)
*
* @see TypedParameterValue
*/
CommonQueryContract setParameter(int parameter, Object value);
/**
* Bind the given argument to an ordinal query parameter using the given
* {@link Class} reference to attempt to infer the {@link Type}.
* If unable to infer an appropriate {@link Type}, fall back to
* {@link #setParameter(int, Object)}.
*
* @see #setParameter(int, Object, Type)
*/
<P> CommonQueryContract setParameter(int parameter, P value, Class<P> type);
/**
* Bind the given argument to an ordinal query parameter using the given
* {@link Type}.
*/
<P> CommonQueryContract setParameter(int parameter, P value, Type<P> type);
/**
* Bind an {@link Instant} to an ordinal query parameter using just the
* portion indicated by the given {@link TemporalType}.
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(int parameter, Instant value, TemporalType temporalType);
/**
* @see jakarta.persistence.Query#setParameter(int, Date, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(int parameter, Date value, TemporalType temporalType);
/**
* @see jakarta.persistence.Query#setParameter(int, Calendar, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(int parameter, Calendar value, TemporalType temporalType);
/**
* Bind an argument to the query parameter represented by the given
* {@link QueryParameter}.
* <p>
* If the type of the parameter cannot be inferred from the context in
* which it occurs, use one of the overloads which accepts a "type".
*
* @see #setParameter(QueryParameter, Object, Type)
*
* @param parameter the query parameter memento
* @param value the argument, which might be null
*
* @return {@code this}, for method chaining
*/
<T> CommonQueryContract setParameter(QueryParameter<T> parameter, T value);
/**
* Bind an argument to the query parameter represented by the given
* {@link QueryParameter}, using the given {@link Class} reference to attempt
* to infer the {@link Type} to use. If unable to infer an appropriate
* {@link Type}, fall back to {@link #setParameter(QueryParameter, Object)}.
*
* @param parameter the query parameter memento
* @param value the argument, which might be null
* @param type a {@link Type} representing the type of the parameter
*
* @return {@code this}, for method chaining
*
* @see #setParameter(QueryParameter, Object, Type)
*/
<P> CommonQueryContract setParameter(QueryParameter<P> parameter, P value, Class<P> type);
/**
* Bind an argument to the query parameter represented by the given
* {@link QueryParameter}, using the given {@link Type}.
*
* @param parameter the query parameter memento
* @param val the argument, which might be null
* @param type a {@link Type} representing the type of the parameter
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameter(QueryParameter<P> parameter, P val, Type<P> type);
/**
* @see jakarta.persistence.Query#setParameter(Parameter, Object)
*/
<T> CommonQueryContract setParameter(Parameter<T> param, T value);
/**
* @see jakarta.persistence.Query#setParameter(Parameter, Calendar, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(Parameter<Calendar> param, Calendar value, TemporalType temporalType);
/**
* @see jakarta.persistence.Query#setParameter(Parameter, Date, TemporalType)
*
* @deprecated since {@link TemporalType} is deprecated
*/
@Deprecated(since = "7")
CommonQueryContract setParameter(Parameter<Date> param, Date value, TemporalType temporalType);
/**
* Bind multiple arguments to a named query parameter.
* <p>
* The "type mapping" for the binding is inferred from the type of
* the first collection element.
*
* @see #setParameterList(java.lang.String, java.util.Collection, Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setParameterList(String parameter, @SuppressWarnings("rawtypes") Collection values);
/**
* Bind multiple arguments to a named query parameter using the given
* {@link Class} reference to attempt to infer the {@link Type}
* If unable to infer an appropriate {@link Type}, fall back to
* {@link #setParameterList(String, Collection)}.
*
* @see #setParameterList(java.lang.String, java.util.Collection, Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(String parameter, Collection<? extends P> values, Class<P> javaType);
/**
* Bind multiple arguments to a named query parameter using the given
* {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(String parameter, Collection<? extends P> values, Type<P> type);
/**
* Bind multiple arguments to a named query parameter.
* <p>
* The "type mapping" for the binding is inferred from the type of
* the first collection element
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setParameterList(String parameter, Object[] values);
/**
* Bind multiple arguments to a named query parameter using the given
* Class reference to attempt to determine the {@link Type}
* to use. If unable to determine an appropriate {@link Type},
* {@link #setParameterList(String, Collection)} is used
*
* @see #setParameterList(java.lang.String, Object[], Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(String parameter, P[] values, Class<P> javaType);
/**
* Bind multiple arguments to a named query parameter using the given
* {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(String parameter, P[] values, Type<P> type);
/**
* Bind multiple arguments to an ordinal query parameter.
* <p>
* The "type mapping" for the binding is inferred from the type of
* the first collection element
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setParameterList(int parameter, @SuppressWarnings("rawtypes") Collection values);
/**
* Bind multiple arguments to an ordinal query parameter using the given
* {@link Class} reference to attempt to infer the {@link Type}.
* If unable to infer an appropriate {@link Type}, fall back to
* {@link #setParameterList(String, Collection)}.
*
* @see #setParameterList(int, Collection, Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(int parameter, Collection<? extends P> values, Class<P> javaType);
/**
* Bind multiple arguments to an ordinal query parameter using the given
* {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(int parameter, Collection<? extends P> values, Type<P> type);
/**
* Bind multiple arguments to an ordinal query parameter.
* <p>
* The "type mapping" for the binding is inferred from the type of
* the first collection element
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setParameterList(int parameter, Object[] values);
/**
* Bind multiple arguments to an ordinal query parameter using the given
* {@link Class} reference to attempt to infer the {@link Type}.
* If unable to infer an appropriate {@link Type}, fall back to
* {@link #setParameterList(String, Collection)}.
*
* @see #setParameterList(int, Object[], Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(int parameter, P[] values, Class<P> javaType);
/**
* Bind multiple arguments to an ordinal query parameter using the given
* {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(int parameter, P[] values, Type<P> type);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter}.
* <p>
* The type of the parameter is inferred from the context in which it
* occurs, and from the type of the first given argument.
*
* @param parameter the parameter memento
* @param values a collection of arguments
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, Collection<? extends P> values);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter} using the given {@link Class} reference
* to attempt to infer the {@link Type} to use. If unable to
* infer an appropriate {@link Type}, fall back to using
* {@link #setParameterList(String, Collection)}.
*
* @see #setParameterList(QueryParameter, java.util.Collection, Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Class<P> javaType);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter}, using the given {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Type<P> type);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter}.
* <p>
* The type of the parameter is inferred between the context in which it
* occurs, the type associated with the {@code QueryParameter} and the
* type of the first given argument.
*
* @param parameter the parameter memento
* @param values a collection of arguments
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, P[] values);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter} using the given {@link Class} reference
* to attempt to infer the {@link Type} to use. If unable to
* infer an appropriate {@link Type}, fall back to using
* {@link #setParameterList(String, Collection)}.
*
* @see #setParameterList(QueryParameter, Object[], Type)
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, P[] values, Class<P> javaType);
/**
* Bind multiple arguments to the query parameter represented by the
* given {@link QueryParameter}, using the given the {@link Type}.
*
* @apiNote This is used for binding a list of values to an expression
* such as {@code entity.field in (:values)}.
*
* @return {@code this}, for method chaining
*/
<P> CommonQueryContract setParameterList(QueryParameter<P> parameter, P[] values, Type<P> type);
/**
* Bind the property values of the given bean to named parameters of
* the query, matching property names with parameter names and mapping
* property types to Hibernate types using heuristics.
*
* @param bean any JavaBean or POJO
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setProperties(Object bean);
/**
* Bind the values of the given {@code Map} to named parameters of the
* query, matching key names with parameter names and mapping value types
* to Hibernate types using heuristics.
*
* @param bean a {@link Map} of names to arguments
*
* @return {@code this}, for method chaining
*/
CommonQueryContract setProperties(@SuppressWarnings("rawtypes") Map bean);
}
| and |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerRequiredModifiersTest.java | {
"start": 8541,
"end": 8976
} | class ____ {
private final FluentLogger logger = FluentLogger.forEnclosingClass();
}
""")
.doTest();
}
@Test
public void positive_replacesInheritedLogger() {
refactoringHelper()
.addInputLines(
"in/Parent.java",
"""
import com.google.common.flogger.FluentLogger;
@SuppressWarnings("FloggerRequiredModifiers")
| Test |
java | grpc__grpc-java | okhttp/src/test/java/io/grpc/okhttp/HeadersTest.java | {
"start": 976,
"end": 2505
} | class ____ {
@Test
public void createRequestHeaders_sanitizes() {
Metadata metaData = new Metadata();
// Intentionally being explicit here rather than relying on any pre-defined lists of headers,
// since the goal of this test is to validate the correctness of such lists in the first place.
metaData.put(GrpcUtil.CONTENT_TYPE_KEY, "to-be-removed");
metaData.put(GrpcUtil.USER_AGENT_KEY, "to-be-removed");
metaData.put(GrpcUtil.TE_HEADER, "to-be-removed");
Metadata.Key<String> userKey = Metadata.Key.of("user-key", Metadata.ASCII_STRING_MARSHALLER);
String userValue = "user-value";
metaData.put(userKey, userValue);
String path = "//testServerice/test";
String authority = "localhost";
String userAgent = "useragent";
List<Header> headers = Headers.createRequestHeaders(
metaData,
path,
authority,
userAgent,
false,
false);
// 7 reserved headers, 1 user header
assertEquals(7 + 1, headers.size());
// Check the 3 reserved headers that are non pseudo
// Users can not create pseudo headers keys so no need to check for them here
assertThat(headers).contains(Headers.CONTENT_TYPE_HEADER);
assertThat(headers).contains(new Header(GrpcUtil.USER_AGENT_KEY.name(), userAgent));
assertThat(headers).contains(new Header(GrpcUtil.TE_HEADER.name(), GrpcUtil.TE_TRAILERS));
// Check the user header is in tact
assertThat(headers).contains(new Header(userKey.name(), userValue));
}
}
| HeadersTest |
java | netty__netty | transport/src/main/java/io/netty/channel/ChannelInboundHandler.java | {
"start": 804,
"end": 2862
} | interface ____ extends ChannelHandler {
/**
* The {@link Channel} of the {@link ChannelHandlerContext} was registered with its {@link EventLoop}
*/
void channelRegistered(ChannelHandlerContext ctx) throws Exception;
/**
* The {@link Channel} of the {@link ChannelHandlerContext} was unregistered from its {@link EventLoop}
*/
void channelUnregistered(ChannelHandlerContext ctx) throws Exception;
/**
* The {@link Channel} of the {@link ChannelHandlerContext} is now active
*/
void channelActive(ChannelHandlerContext ctx) throws Exception;
/**
* The {@link Channel} of the {@link ChannelHandlerContext} was registered is now inactive and reached its
* end of lifetime.
*/
void channelInactive(ChannelHandlerContext ctx) throws Exception;
/**
* Invoked when the current {@link Channel} has read a message from the peer.
*/
void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception;
/**
* Invoked when the last message read by the current read operation has been consumed by
* {@link #channelRead(ChannelHandlerContext, Object)}. If {@link ChannelOption#AUTO_READ} is off, no further
* attempt to read an inbound data from the current {@link Channel} will be made until
* {@link ChannelHandlerContext#read()} is called.
*/
void channelReadComplete(ChannelHandlerContext ctx) throws Exception;
/**
* Gets called if an user event was triggered.
*/
void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception;
/**
* Gets called once the writable state of a {@link Channel} changed. You can check the state with
* {@link Channel#isWritable()}.
*/
void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception;
/**
* Gets called if a {@link Throwable} was thrown.
*/
@Override
@SuppressWarnings("deprecation")
void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception;
}
| ChannelInboundHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ProtocolBufferOrdinalTest.java | {
"start": 1505,
"end": 1866
} | class ____ {
public static void checkCallOnOrdinal() {
// BUG: Diagnostic contains: ProtocolBufferOrdinal
TestEnum.TEST_ENUM_VAL.ordinal();
// BUG: Diagnostic contains: ProtocolBufferOrdinal
ProtoLiteEnum.FOO.ordinal();
}
| ProtocolBufferOrdinalPositiveCases |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/TomcatJdbcConnectionDetailsBeanPostProcessorTests.java | {
"start": 1130,
"end": 2049
} | class ____ {
@Test
@SuppressWarnings("unchecked")
void setUsernamePasswordUrlAndDriverClassName() {
DataSource dataSource = new DataSource();
dataSource.setUrl("will-be-overwritten");
dataSource.setUsername("will-be-overwritten");
dataSource.setPassword("will-be-overwritten");
dataSource.setDriverClassName("will-be-overwritten");
new TomcatJdbcConnectionDetailsBeanPostProcessor(mock(ObjectProvider.class)).processDataSource(dataSource,
new TestJdbcConnectionDetails());
assertThat(dataSource.getUrl()).isEqualTo("jdbc:customdb://customdb.example.com:12345/database-1");
assertThat(dataSource.getUsername()).isEqualTo("user-1");
assertThat(dataSource.getPoolProperties().getPassword()).isEqualTo("password-1");
assertThat(dataSource.getPoolProperties().getDriverClassName())
.isEqualTo(DatabaseDriver.POSTGRESQL.getDriverClassName());
}
}
| TomcatJdbcConnectionDetailsBeanPostProcessorTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/sps/StoragePolicySatisfier.java | {
"start": 4428,
"end": 34257
} | enum ____ {
// Represents that, the analysis skipped due to some conditions. A such
// condition is if block collection is in incomplete state.
ANALYSIS_SKIPPED_FOR_RETRY,
// Represents that few or all blocks found respective target to do
// the storage movement.
BLOCKS_TARGETS_PAIRED,
// Represents that none of the blocks found respective target to do
// the storage movement.
NO_BLOCKS_TARGETS_PAIRED,
// Represents that, none of the blocks found for block storage movements.
BLOCKS_ALREADY_SATISFIED,
// Represents that, the analysis skipped due to some conditions.
// Example conditions are if no blocks really exists in block collection
// or
// if analysis is not required on ec files with unsuitable storage
// policies
BLOCKS_TARGET_PAIRING_SKIPPED,
// Represents that, All the reported blocks are satisfied the policy but
// some of the blocks are low redundant.
FEW_LOW_REDUNDANCY_BLOCKS,
// Represents that, movement failures due to unexpected errors.
BLOCKS_FAILED_TO_MOVE
}
private Status status = null;
private Map<Block, Set<StorageTypeNodePair>> assignedBlocks = null;
BlocksMovingAnalysis(Status status,
Map<Block, Set<StorageTypeNodePair>> assignedBlocks) {
this.status = status;
this.assignedBlocks = assignedBlocks;
}
}
public void init(final Context context) {
this.ctxt = context;
this.storageMovementNeeded = new BlockStorageMovementNeeded(context);
this.storageMovementsMonitor = new BlockStorageMovementAttemptedItems(
this, storageMovementNeeded, context);
this.spsWorkMultiplier = getSPSWorkMultiplier(getConf());
this.blockMovementMaxRetry = getConf().getInt(
DFSConfigKeys.DFS_STORAGE_POLICY_SATISFIER_MAX_RETRY_ATTEMPTS_KEY,
DFSConfigKeys.DFS_STORAGE_POLICY_SATISFIER_MAX_RETRY_ATTEMPTS_DEFAULT);
}
/**
* Start storage policy satisfier demon thread. Also start block storage
* movements monitor for retry the attempts if needed.
*/
@Override
public synchronized void start(StoragePolicySatisfierMode serviceMode) {
if (serviceMode == StoragePolicySatisfierMode.NONE) {
LOG.error("Can't start StoragePolicySatisfier for the given mode:{}",
serviceMode);
return;
}
LOG.info("Starting {} StoragePolicySatisfier.",
StringUtils.toLowerCase(serviceMode.toString()));
isRunning = true;
storagePolicySatisfierThread = new Daemon(this);
storagePolicySatisfierThread.setName("StoragePolicySatisfier");
storagePolicySatisfierThread.start();
this.storageMovementsMonitor.start();
this.storageMovementNeeded.activate();
dnCacheMgr = new DatanodeCacheManager(conf);
}
@Override
public synchronized void stop(boolean forceStop) {
isRunning = false;
if (storagePolicySatisfierThread == null) {
return;
}
storageMovementNeeded.close();
storagePolicySatisfierThread.interrupt();
this.storageMovementsMonitor.stop();
if (forceStop) {
storageMovementNeeded.clearQueuesWithNotification();
} else {
LOG.info("Stopping StoragePolicySatisfier.");
}
}
@Override
public synchronized void stopGracefully() {
if (isRunning) {
stop(false);
}
if (this.storageMovementsMonitor != null) {
this.storageMovementsMonitor.stopGracefully();
}
if (storagePolicySatisfierThread != null) {
try {
storagePolicySatisfierThread.join(3000);
} catch (InterruptedException ie) {
if (LOG.isDebugEnabled()) {
LOG.debug("Interrupted Exception while waiting to join sps thread,"
+ " ignoring it", ie);
}
}
}
}
@Override
public boolean isRunning() {
return isRunning;
}
@Override
public void run() {
while (isRunning) {
// Check if dependent service is running
if (!ctxt.isRunning()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Upstream service is down, skipping the sps work.");
}
continue;
}
ItemInfo itemInfo = null;
try {
boolean retryItem = false;
if (!ctxt.isInSafeMode()) {
itemInfo = storageMovementNeeded.get();
if (itemInfo != null) {
if(itemInfo.getRetryCount() >= blockMovementMaxRetry){
LOG.info("Failed to satisfy the policy after "
+ blockMovementMaxRetry + " retries. Removing inode "
+ itemInfo.getFile() + " from the queue");
storageMovementNeeded.removeItemTrackInfo(itemInfo, false);
continue;
}
long trackId = itemInfo.getFile();
BlocksMovingAnalysis status = null;
BlockStoragePolicy existingStoragePolicy;
// TODO: presently, context internally acquire the lock
// and returns the result. Need to discuss to move the lock outside?
HdfsFileStatus fileStatus = ctxt.getFileInfo(trackId);
// Check path existence.
if (fileStatus == null || fileStatus.isDir()) {
// File doesn't exists (maybe got deleted) or its a directory,
// just remove trackId from the queue
storageMovementNeeded.removeItemTrackInfo(itemInfo, true);
} else {
byte existingStoragePolicyID = fileStatus.getStoragePolicy();
existingStoragePolicy = ctxt
.getStoragePolicy(existingStoragePolicyID);
HdfsLocatedFileStatus file = (HdfsLocatedFileStatus) fileStatus;
status = analyseBlocksStorageMovementsAndAssignToDN(file,
existingStoragePolicy);
switch (status.status) {
// Just add to monitor, so it will be retried after timeout
case ANALYSIS_SKIPPED_FOR_RETRY:
// Just add to monitor, so it will be tracked for report and
// be removed on storage movement attempt finished report.
case BLOCKS_TARGETS_PAIRED:
if (LOG.isDebugEnabled()) {
LOG.debug("Block analysis status:{} for the file id:{}."
+ " Adding to attempt monitor queue for the storage "
+ "movement attempt finished report",
status.status, fileStatus.getFileId());
}
this.storageMovementsMonitor.add(itemInfo.getStartPath(),
itemInfo.getFile(), monotonicNow(), status.assignedBlocks,
itemInfo.getRetryCount());
break;
case NO_BLOCKS_TARGETS_PAIRED:
if (LOG.isDebugEnabled()) {
LOG.debug("Adding trackID:{} for the file id:{} back to"
+ " retry queue as none of the blocks found its eligible"
+ " targets.", trackId, fileStatus.getFileId());
}
retryItem = true;
break;
case FEW_LOW_REDUNDANCY_BLOCKS:
if (LOG.isDebugEnabled()) {
LOG.debug("Adding trackID:{} for the file id:{} back to "
+ "retry queue as some of the blocks are low redundant.",
trackId, fileStatus.getFileId());
}
retryItem = true;
break;
case BLOCKS_FAILED_TO_MOVE:
if (LOG.isDebugEnabled()) {
LOG.debug("Adding trackID:{} for the file id:{} back to "
+ "retry queue as some of the blocks movement failed.",
trackId, fileStatus.getFileId());
}
retryItem = true;
break;
// Just clean Xattrs
case BLOCKS_TARGET_PAIRING_SKIPPED:
case BLOCKS_ALREADY_SATISFIED:
default:
LOG.info("Block analysis status:{} for the file id:{}."
+ " So, Cleaning up the Xattrs.", status.status,
fileStatus.getFileId());
storageMovementNeeded.removeItemTrackInfo(itemInfo, true);
break;
}
}
}
} else {
LOG.info("Namenode is in safemode. It will retry again.");
Thread.sleep(3000);
}
int numLiveDn = ctxt.getNumLiveDataNodes();
if (storageMovementNeeded.size() == 0
|| blockCount > (numLiveDn * spsWorkMultiplier)) {
Thread.sleep(3000);
blockCount = 0L;
}
if (retryItem) {
this.storageMovementNeeded.add(itemInfo);
}
} catch (IOException e) {
LOG.error("Exception during StoragePolicySatisfier execution - "
+ "will continue next cycle", e);
// Since it could not finish this item in previous iteration due to IOE,
// just try again.
this.storageMovementNeeded.add(itemInfo);
} catch (Throwable t) {
synchronized (this) {
if (isRunning) {
isRunning = false;
if (t instanceof InterruptedException) {
LOG.info("Stopping StoragePolicySatisfier.", t);
} else {
LOG.error("StoragePolicySatisfier thread received "
+ "runtime exception.", t);
}
// Stopping monitor thread and clearing queues as well
this.clearQueues();
this.storageMovementsMonitor.stopGracefully();
}
}
}
}
}
private BlocksMovingAnalysis analyseBlocksStorageMovementsAndAssignToDN(
HdfsLocatedFileStatus fileInfo,
BlockStoragePolicy existingStoragePolicy) throws IOException {
BlocksMovingAnalysis.Status status =
BlocksMovingAnalysis.Status.BLOCKS_ALREADY_SATISFIED;
final ErasureCodingPolicy ecPolicy = fileInfo.getErasureCodingPolicy();
final LocatedBlocks locatedBlocks = fileInfo.getLocatedBlocks();
final boolean lastBlkComplete = locatedBlocks.isLastBlockComplete();
if (!lastBlkComplete) {
// Postpone, currently file is under construction
LOG.info("File: {} is under construction. So, postpone"
+ " this to the next retry iteration", fileInfo.getFileId());
return new BlocksMovingAnalysis(
BlocksMovingAnalysis.Status.ANALYSIS_SKIPPED_FOR_RETRY,
new HashMap<>());
}
List<LocatedBlock> blocks = locatedBlocks.getLocatedBlocks();
if (blocks.size() == 0) {
LOG.info("File: {} is not having any blocks."
+ " So, skipping the analysis.", fileInfo.getFileId());
return new BlocksMovingAnalysis(
BlocksMovingAnalysis.Status.BLOCKS_TARGET_PAIRING_SKIPPED,
new HashMap<>());
}
List<BlockMovingInfo> blockMovingInfos = new ArrayList<BlockMovingInfo>();
boolean hasLowRedundancyBlocks = false;
int replication = fileInfo.getReplication();
DatanodeMap liveDns = dnCacheMgr.getLiveDatanodeStorageReport(ctxt);
for (int i = 0; i < blocks.size(); i++) {
LocatedBlock blockInfo = blocks.get(i);
// Block is considered as low redundancy when the block locations array
// length is less than expected replication factor. If any of the block is
// low redundant, then hasLowRedundancyBlocks will be marked as true.
hasLowRedundancyBlocks |= isLowRedundancyBlock(blockInfo, replication,
ecPolicy);
List<StorageType> expectedStorageTypes;
if (blockInfo.isStriped()) {
if (ErasureCodingPolicyManager
.checkStoragePolicySuitableForECStripedMode(
existingStoragePolicy.getId())) {
expectedStorageTypes = existingStoragePolicy
.chooseStorageTypes((short) blockInfo.getLocations().length);
} else {
// Currently we support only limited policies (HOT, COLD, ALLSSD)
// for EC striped mode files. SPS will ignore to move the blocks if
// the storage policy is not in EC Striped mode supported policies
LOG.warn("The storage policy " + existingStoragePolicy.getName()
+ " is not suitable for Striped EC files. "
+ "So, ignoring to move the blocks");
return new BlocksMovingAnalysis(
BlocksMovingAnalysis.Status.BLOCKS_TARGET_PAIRING_SKIPPED,
new HashMap<>());
}
} else {
expectedStorageTypes = existingStoragePolicy
.chooseStorageTypes(fileInfo.getReplication());
}
List<StorageType> existing = new LinkedList<StorageType>(
Arrays.asList(blockInfo.getStorageTypes()));
if (!removeOverlapBetweenStorageTypes(expectedStorageTypes,
existing, true)) {
boolean blocksPaired = computeBlockMovingInfos(blockMovingInfos,
blockInfo, expectedStorageTypes, existing, blockInfo.getLocations(),
liveDns, ecPolicy);
if (blocksPaired) {
status = BlocksMovingAnalysis.Status.BLOCKS_TARGETS_PAIRED;
} else if (status !=
BlocksMovingAnalysis.Status.BLOCKS_TARGETS_PAIRED) {
// Check if the previous block was successfully paired. Here the
// status will set to NO_BLOCKS_TARGETS_PAIRED only when none of the
// blocks of a file found its eligible targets to satisfy the storage
// policy.
status = BlocksMovingAnalysis.Status.NO_BLOCKS_TARGETS_PAIRED;
}
}
}
// If there is no block paired and few blocks are low redundant, so marking
// the status as FEW_LOW_REDUNDANCY_BLOCKS.
if (hasLowRedundancyBlocks
&& status != BlocksMovingAnalysis.Status.BLOCKS_TARGETS_PAIRED) {
status = BlocksMovingAnalysis.Status.FEW_LOW_REDUNDANCY_BLOCKS;
}
Map<Block, Set<StorageTypeNodePair>> assignedBlocks = new HashMap<>();
for (BlockMovingInfo blkMovingInfo : blockMovingInfos) {
// Check for at least one block storage movement has been chosen
try {
ctxt.submitMoveTask(blkMovingInfo);
LOG.debug("BlockMovingInfo: {}", blkMovingInfo);
StorageTypeNodePair nodeStorage = new StorageTypeNodePair(
blkMovingInfo.getTargetStorageType(), blkMovingInfo.getTarget());
Set<StorageTypeNodePair> nodesWithStorage = assignedBlocks
.get(blkMovingInfo.getBlock());
if (nodesWithStorage == null) {
nodesWithStorage = new HashSet<>();
assignedBlocks.put(blkMovingInfo.getBlock(), nodesWithStorage);
}
nodesWithStorage.add(nodeStorage);
blockCount++;
} catch (IOException e) {
LOG.warn("Exception while scheduling movement task", e);
// failed to move the block.
status = BlocksMovingAnalysis.Status.BLOCKS_FAILED_TO_MOVE;
}
}
return new BlocksMovingAnalysis(status, assignedBlocks);
}
/**
* The given block is considered as low redundancy when the block locations
* length is less than expected replication factor. For EC blocks, redundancy
* is the summation of data + parity blocks.
*
* @param blockInfo
* block
* @param replication
* replication factor of the given file block
* @param ecPolicy
* erasure coding policy of the given file block
* @return true if the given block is low redundant.
*/
private boolean isLowRedundancyBlock(LocatedBlock blockInfo, int replication,
ErasureCodingPolicy ecPolicy) {
boolean hasLowRedundancyBlock = false;
if (blockInfo.isStriped()) {
// For EC blocks, redundancy is the summation of data + parity blocks.
replication = ecPolicy.getNumDataUnits() + ecPolicy.getNumParityUnits();
}
// block is considered as low redundancy when the block locations length is
// less than expected replication factor.
hasLowRedundancyBlock = blockInfo.getLocations().length < replication ? true
: false;
return hasLowRedundancyBlock;
}
/**
* Compute the list of block moving information corresponding to the given
* blockId. This will check that each block location of the given block is
* satisfying the expected storage policy. If block location is not satisfied
* the policy then find out the target node with the expected storage type to
* satisfy the storage policy.
*
* @param blockMovingInfos
* - list of block source and target node pair
* @param blockInfo
* - block details
* @param expectedStorageTypes
* - list of expected storage type to satisfy the storage policy
* @param existing
* - list to get existing storage types
* @param storages
* - available storages
* @param liveDns
* - live datanodes which can be used as targets
* @param ecPolicy
* - ec policy of sps invoked file
* @return false if some of the block locations failed to find target node to
* satisfy the storage policy, true otherwise
*/
private boolean computeBlockMovingInfos(
List<BlockMovingInfo> blockMovingInfos, LocatedBlock blockInfo,
List<StorageType> expectedStorageTypes, List<StorageType> existing,
DatanodeInfo[] storages, DatanodeMap liveDns,
ErasureCodingPolicy ecPolicy) {
boolean foundMatchingTargetNodesForBlock = true;
if (!removeOverlapBetweenStorageTypes(expectedStorageTypes,
existing, true)) {
List<StorageTypeNodePair> sourceWithStorageMap =
new ArrayList<StorageTypeNodePair>();
List<DatanodeInfo> existingBlockStorages = new ArrayList<DatanodeInfo>(
Arrays.asList(storages));
// Add existing storages into exclude nodes to avoid choosing this as
// remote target later.
List<DatanodeInfo> excludeNodes = new ArrayList<>(existingBlockStorages);
// if expected type exists in source node already, local movement would be
// possible, so lets find such sources first.
Iterator<DatanodeInfo> iterator = existingBlockStorages.iterator();
while (iterator.hasNext()) {
DatanodeInfoWithStorage dnInfo = (DatanodeInfoWithStorage) iterator
.next();
if (checkSourceAndTargetTypeExists(dnInfo, existing,
expectedStorageTypes, liveDns)) {
sourceWithStorageMap
.add(new StorageTypeNodePair(dnInfo.getStorageType(), dnInfo));
iterator.remove();
existing.remove(dnInfo.getStorageType());
}
}
// Let's find sources for existing types left.
for (StorageType existingType : existing) {
iterator = existingBlockStorages.iterator();
while (iterator.hasNext()) {
DatanodeInfoWithStorage dnStorageInfo =
(DatanodeInfoWithStorage) iterator.next();
StorageType storageType = dnStorageInfo.getStorageType();
if (storageType == existingType) {
iterator.remove();
sourceWithStorageMap.add(new StorageTypeNodePair(storageType,
dnStorageInfo));
break;
}
}
}
EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>> targetDns =
findTargetsForExpectedStorageTypes(expectedStorageTypes, liveDns);
foundMatchingTargetNodesForBlock |= findSourceAndTargetToMove(
blockMovingInfos, blockInfo, sourceWithStorageMap,
expectedStorageTypes, targetDns,
ecPolicy, excludeNodes);
}
return foundMatchingTargetNodesForBlock;
}
/**
* Find the good target node for each source node for which block storages was
* misplaced.
*
* @param blockMovingInfos
* - list of block source and target node pair
* @param blockInfo
* - Block
* @param sourceWithStorageList
* - Source Datanode with storages list
* @param expectedTypes
* - Expecting storages to move
* @param targetDns
* - Available DNs for expected storage types
* @param ecPolicy
* - erasure coding policy of sps invoked file
* @param excludeNodes
* - existing source nodes, which has replica copy
* @return false if some of the block locations failed to find target node to
* satisfy the storage policy
*/
private boolean findSourceAndTargetToMove(
List<BlockMovingInfo> blockMovingInfos, LocatedBlock blockInfo,
List<StorageTypeNodePair> sourceWithStorageList,
List<StorageType> expectedTypes,
EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>> targetDns,
ErasureCodingPolicy ecPolicy, List<DatanodeInfo> excludeNodes) {
boolean foundMatchingTargetNodesForBlock = true;
// Looping over all the source node locations and choose the target
// storage within same node if possible. This is done separately to
// avoid choosing a target which already has this block.
for (int i = 0; i < sourceWithStorageList.size(); i++) {
StorageTypeNodePair existingTypeNodePair = sourceWithStorageList.get(i);
// Check whether the block replica is already placed in the expected
// storage type in this source datanode.
if (!expectedTypes.contains(existingTypeNodePair.storageType)) {
StorageTypeNodePair chosenTarget = chooseTargetTypeInSameNode(blockInfo,
existingTypeNodePair.dn, targetDns, expectedTypes);
if (chosenTarget != null) {
if (blockInfo.isStriped()) {
buildStripedBlockMovingInfos(blockInfo, existingTypeNodePair.dn,
existingTypeNodePair.storageType, chosenTarget.dn,
chosenTarget.storageType, blockMovingInfos,
ecPolicy);
} else {
buildContinuousBlockMovingInfos(blockInfo, existingTypeNodePair.dn,
existingTypeNodePair.storageType, chosenTarget.dn,
chosenTarget.storageType, blockMovingInfos);
}
expectedTypes.remove(chosenTarget.storageType);
}
}
}
// If all the sources and targets are paired within same node, then simply
// return.
if (expectedTypes.size() <= 0) {
return foundMatchingTargetNodesForBlock;
}
// Looping over all the source node locations. Choose a remote target
// storage node if it was not found out within same node.
for (int i = 0; i < sourceWithStorageList.size(); i++) {
StorageTypeNodePair existingTypeNodePair = sourceWithStorageList.get(i);
StorageTypeNodePair chosenTarget = null;
// Chosen the target storage within same datanode. So just skipping this
// source node.
if (checkIfAlreadyChosen(blockMovingInfos, existingTypeNodePair.dn)) {
continue;
}
if (chosenTarget == null && dnCacheMgr.getCluster().isNodeGroupAware()) {
chosenTarget = chooseTarget(blockInfo, existingTypeNodePair.dn,
expectedTypes, Matcher.SAME_NODE_GROUP, targetDns,
excludeNodes);
}
// Then, match nodes on the same rack
if (chosenTarget == null) {
chosenTarget =
chooseTarget(blockInfo, existingTypeNodePair.dn, expectedTypes,
Matcher.SAME_RACK, targetDns, excludeNodes);
}
if (chosenTarget == null) {
chosenTarget =
chooseTarget(blockInfo, existingTypeNodePair.dn, expectedTypes,
Matcher.ANY_OTHER, targetDns, excludeNodes);
}
if (null != chosenTarget) {
if (blockInfo.isStriped()) {
buildStripedBlockMovingInfos(blockInfo, existingTypeNodePair.dn,
existingTypeNodePair.storageType, chosenTarget.dn,
chosenTarget.storageType, blockMovingInfos, ecPolicy);
} else {
buildContinuousBlockMovingInfos(blockInfo, existingTypeNodePair.dn,
existingTypeNodePair.storageType, chosenTarget.dn,
chosenTarget.storageType, blockMovingInfos);
}
expectedTypes.remove(chosenTarget.storageType);
excludeNodes.add(chosenTarget.dn);
} else {
LOG.warn(
"Failed to choose target datanode for the required"
+ " storage types {}, block:{}, existing storage type:{}",
expectedTypes, blockInfo, existingTypeNodePair.storageType);
}
}
if (expectedTypes.size() > 0) {
foundMatchingTargetNodesForBlock = false;
}
return foundMatchingTargetNodesForBlock;
}
private boolean checkIfAlreadyChosen(List<BlockMovingInfo> blockMovingInfos,
DatanodeInfo dn) {
for (BlockMovingInfo blockMovingInfo : blockMovingInfos) {
if (blockMovingInfo.getSource().equals(dn)) {
return true;
}
}
return false;
}
private void buildContinuousBlockMovingInfos(LocatedBlock blockInfo,
DatanodeInfo sourceNode, StorageType sourceStorageType,
DatanodeInfo targetNode, StorageType targetStorageType,
List<BlockMovingInfo> blkMovingInfos) {
Block blk = ExtendedBlock.getLocalBlock(blockInfo.getBlock());
BlockMovingInfo blkMovingInfo = new BlockMovingInfo(blk, sourceNode,
targetNode, sourceStorageType, targetStorageType);
blkMovingInfos.add(blkMovingInfo);
}
private void buildStripedBlockMovingInfos(LocatedBlock blockInfo,
DatanodeInfo sourceNode, StorageType sourceStorageType,
DatanodeInfo targetNode, StorageType targetStorageType,
List<BlockMovingInfo> blkMovingInfos, ErasureCodingPolicy ecPolicy) {
// For a striped block, it needs to construct internal block at the given
// index of a block group. Here it is iterating over all the block indices
// and construct internal blocks which can be then considered for block
// movement.
LocatedStripedBlock sBlockInfo = (LocatedStripedBlock) blockInfo;
byte[] indices = sBlockInfo.getBlockIndices();
DatanodeInfo[] locations = sBlockInfo.getLocations();
for (int i = 0; i < indices.length; i++) {
byte blkIndex = indices[i];
if (blkIndex >= 0) {
// pick block movement only for the given source node.
if (sourceNode.equals(locations[i])) {
// construct internal block
ExtendedBlock extBlock = sBlockInfo.getBlock();
long numBytes = StripedBlockUtil.getInternalBlockLength(
extBlock.getNumBytes(), ecPolicy, blkIndex);
Block blk = new Block(ExtendedBlock.getLocalBlock(extBlock));
long blkId = blk.getBlockId() + blkIndex;
blk.setBlockId(blkId);
blk.setNumBytes(numBytes);
BlockMovingInfo blkMovingInfo = new BlockMovingInfo(blk, sourceNode,
targetNode, sourceStorageType, targetStorageType);
blkMovingInfos.add(blkMovingInfo);
}
}
}
}
/**
* Choose the target storage within same datanode if possible.
*
* @param blockInfo
* - block info
* @param source
* - source datanode
* @param targetDns
* - set of target datanodes with its respective storage type
* @param targetTypes
* - list of target storage types
*/
private StorageTypeNodePair chooseTargetTypeInSameNode(LocatedBlock blockInfo,
DatanodeInfo source,
EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>> targetDns,
List<StorageType> targetTypes) {
for (StorageType t : targetTypes) {
List<DatanodeWithStorage.StorageDetails> targetNodeStorages =
targetDns.get(t);
if (targetNodeStorages == null) {
continue;
}
for (DatanodeWithStorage.StorageDetails targetNode : targetNodeStorages) {
if (targetNode.getDatanodeInfo().equals(source)) {
// Good target with enough space to write the given block size.
if (targetNode.hasSpaceForScheduling(blockInfo.getBlockSize())) {
targetNode.incScheduledSize(blockInfo.getBlockSize());
return new StorageTypeNodePair(t, source);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Datanode:{} storage type:{} doesn't have sufficient "
+ "space:{} to move the target block size:{}",
source, t, targetNode, blockInfo.getBlockSize());
}
}
}
}
return null;
}
private StorageTypeNodePair chooseTarget(LocatedBlock block,
DatanodeInfo source, List<StorageType> targetTypes, Matcher matcher,
EnumMap<StorageType, List<DatanodeWithStorage.StorageDetails>>
locsForExpectedStorageTypes, List<DatanodeInfo> excludeNodes) {
for (StorageType t : targetTypes) {
List<DatanodeWithStorage.StorageDetails> nodesWithStorages =
locsForExpectedStorageTypes.get(t);
if (nodesWithStorages == null || nodesWithStorages.isEmpty()) {
continue; // no target nodes with the required storage type.
}
Collections.shuffle(nodesWithStorages);
for (DatanodeWithStorage.StorageDetails targetNode : nodesWithStorages) {
DatanodeInfo target = targetNode.getDatanodeInfo();
if (!excludeNodes.contains(target)
&& matcher.match(dnCacheMgr.getCluster(), source, target)) {
// Good target with enough space to write the given block size.
if (targetNode.hasSpaceForScheduling(block.getBlockSize())) {
targetNode.incScheduledSize(block.getBlockSize());
return new StorageTypeNodePair(t, target);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Datanode:{} storage type:{} doesn't have sufficient "
+ "space:{} to move the target block size:{}",
target, t, targetNode, block.getBlockSize());
}
}
}
}
return null;
}
/**
* Keeps datanode with its respective storage type.
*/
static final | Status |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/test/java/org/springframework/boot/buildpack/platform/socket/FileDescriptorTests.java | {
"start": 935,
"end": 2887
} | class ____ {
private final int sourceHandle = 123;
private int closedHandle;
@Test
void acquireReturnsHandle() throws Exception {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
try (Handle handle = descriptor.acquire()) {
assertThat(handle.intValue()).isEqualTo(this.sourceHandle);
assertThat(handle.isClosed()).isFalse();
}
}
@Test
void acquireWhenClosedReturnsClosedHandle() throws Exception {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
descriptor.close();
try (Handle handle = descriptor.acquire()) {
assertThat(handle.intValue()).isEqualTo(-1);
assertThat(handle.isClosed()).isTrue();
}
}
@Test
void acquireWhenPendingCloseReturnsClosedHandle() throws Exception {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
try (Handle handle1 = descriptor.acquire()) {
descriptor.close();
try (Handle handle2 = descriptor.acquire()) {
assertThat(handle2.intValue()).isEqualTo(-1);
assertThat(handle2.isClosed()).isTrue();
}
}
}
@Test
void finalizeTriggersClose() {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
descriptor.close();
assertThat(this.closedHandle).isEqualTo(this.sourceHandle);
}
@Test
void closeWhenHandleAcquiredClosesOnRelease() throws Exception {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
try (Handle handle = descriptor.acquire()) {
descriptor.close();
assertThat(this.closedHandle).isZero();
}
assertThat(this.closedHandle).isEqualTo(this.sourceHandle);
}
@Test
void closeWhenHandleNotAcquiredClosesImmediately() {
FileDescriptor descriptor = new FileDescriptor(this.sourceHandle, this::close);
descriptor.close();
assertThat(this.closedHandle).isEqualTo(this.sourceHandle);
}
private void close(int handle) {
this.closedHandle = handle;
}
}
| FileDescriptorTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestHdfsManifestToResourcesPlugin.java | {
"start": 2037,
"end": 2191
} | class ____ the hdfs manifest to resources plugin used by the
* RuncContainerRuntime to map an image manifest into a list of local resources.
*/
public | tests |
java | apache__camel | components/camel-wasm/src/main/java/org/apache/camel/wasm/WasmSupport.java | {
"start": 1093,
"end": 2107
} | class ____ {
public static final ObjectMapper MAPPER = JsonMapper.builder().build();
private WasmSupport() {
}
public static byte[] serialize(Exchange exchange) throws Exception {
Wrapper env = new Wrapper();
env.body = exchange.getMessage().getBody(byte[].class);
for (String headerName : exchange.getMessage().getHeaders().keySet()) {
env.headers.put(headerName, exchange.getMessage().getHeader(headerName, String.class));
}
return MAPPER.writeValueAsBytes(env);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public static void deserialize(byte[] in, Exchange out) throws Exception {
// cleanup
out.getMessage().getHeaders().clear();
out.getMessage().setBody(null);
Wrapper w = MAPPER.readValue(in, Wrapper.class);
out.getMessage().setBody(w.body);
if (w.headers != null) {
out.getMessage().setHeaders((Map) w.headers);
}
}
public static | WasmSupport |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/highavailability/nonha/embedded/EmbeddedLeaderService.java | {
"start": 17258,
"end": 18761
} | class ____ implements LeaderElection {
final String componentId;
volatile LeaderContender contender;
volatile boolean isLeader;
volatile boolean running;
EmbeddedLeaderElection(String componentId) {
this.componentId = componentId;
}
@Override
public void startLeaderElection(LeaderContender contender) throws Exception {
checkNotNull(contender);
addContender(this, contender);
}
@Override
public void close() {
removeContender(this);
}
@Override
public CompletableFuture<Void> confirmLeadershipAsync(
UUID leaderSessionID, String leaderAddress) {
checkNotNull(leaderSessionID);
checkNotNull(leaderAddress);
return confirmLeader(this, leaderSessionID, leaderAddress);
}
@Override
public CompletableFuture<Boolean> hasLeadershipAsync(UUID leaderSessionId) {
return CompletableFuture.completedFuture(
isLeader && leaderSessionId.equals(currentLeaderSessionId));
}
void shutdown(Exception cause) {
if (running) {
running = false;
isLeader = false;
contender.revokeLeadership();
contender = null;
}
}
}
// ------------------------------------------------------------------------
private | EmbeddedLeaderElection |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/ClusterAllocationSimulationTests.java | {
"start": 24956,
"end": 27582
} | class ____ implements ClusterInfoService {
private final ClusterService clusterService;
private final Map<String, Long> nodeSizes = new HashMap<>();
private final Map<String, Long> indexSizes = new HashMap<>();
private TestClusterInfoService(ClusterService clusterService) {
this.clusterService = clusterService;
}
public void addNode(String nodeId, long size) {
nodeSizes.put(nodeId, size);
}
public void addIndex(String indexName, long size) {
indexSizes.put(indexName, size);
}
@Override
public ClusterInfo getClusterInfo() {
var state = clusterService.state();
var diskSpaceUsage = new HashMap<String, DiskUsage>();
for (DiscoveryNode node : state.nodes()) {
var nodeSize = nodeSizes.getOrDefault(node.getId(), 0L);
diskSpaceUsage.put(node.getId(), new DiskUsage(node.getId(), node.getName(), "/data", nodeSize, nodeSize));
}
var shardSizes = new HashMap<String, Long>();
var dataPath = new HashMap<ClusterInfo.NodeAndShard, String>();
for (IndexRoutingTable indexRoutingTable : state.getRoutingTable()) {
var shardRouting = indexRoutingTable.shard(0).primaryShard();
var shardSize = indexSizes.get(shardRouting.shardId().getIndexName());
if (shardSize == null) {
logger.error("Failed to find index [{}]", shardRouting.shardId().getIndexName());
continue;
}
if (shardRouting.unassigned()) {
continue;
}
diskSpaceUsage.compute(shardRouting.currentNodeId(), (k, currentUsage) -> {
if (currentUsage == null) {
logger.error("Failed to find node [{}]", k);
return null;
}
return currentUsage.copyWithFreeBytes(currentUsage.freeBytes() - shardSize);
});
shardSizes.put(ClusterInfo.shardIdentifierFromRouting(shardRouting), shardSize);
dataPath.put(new ClusterInfo.NodeAndShard(shardRouting.currentNodeId(), shardRouting.shardId()), "/data");
}
return ClusterInfo.builder()
.leastAvailableSpaceUsage(diskSpaceUsage)
.mostAvailableSpaceUsage(diskSpaceUsage)
.shardSizes(shardSizes)
.dataPath(dataPath)
.build();
}
}
}
| TestClusterInfoService |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpTcpServerConsumerRequiredEndOfDataWithoutValidationTest.java | {
"start": 1161,
"end": 3461
} | class ____
extends TcpServerConsumerEndOfDataAndValidationTestSupport {
@Override
boolean validatePayload() {
return false;
}
@Override
boolean requireEndOfData() {
return true;
}
@Override
@Test
public void testInvalidMessage() {
assertDoesNotThrow(() -> runNthInvalidMessage());
}
@Override
@Test
public void testNthInvalidMessage() {
assertDoesNotThrow(() -> runNthInvalidMessage());
}
@Override
@Test
public void testMessageContainingEmbeddedStartOfBlock() {
expectedCompleteCount = 1;
assertDoesNotThrow(() -> runMessageContainingEmbeddedStartOfBlock());
}
@Override
@Test
public void testNthMessageContainingEmbeddedStartOfBlock() {
assertDoesNotThrow(() -> runNthMessageContainingEmbeddedStartOfBlock());
}
@Override
@Test
public void testMessageContainingEmbeddedEndOfBlock() {
setExpectedCounts();
NotifyBuilder done = new NotifyBuilder(context()).whenDone(1).create();
mllpClient.sendFramedData(
Hl7TestMessageGenerator.generateMessage().replaceFirst("PID", "PID" + MllpProtocolConstants.END_OF_BLOCK));
assertFalse(done.matches(5, TimeUnit.SECONDS), "Exchange should not have completed");
}
@Override
@Test
public void testInvalidMessageContainingEmbeddedEndOfBlock() {
expectedInvalidCount = 1;
assertDoesNotThrow(() -> runInvalidMessageContainingEmbeddedEndOfBlock());
}
@Override
@Test
public void testNthMessageContainingEmbeddedEndOfBlock() {
expectedInvalidCount = 1;
assertDoesNotThrow(() -> runNthMessageContainingEmbeddedEndOfBlock());
}
@Override
@Test
public void testInitialMessageWithoutEndOfDataByte() {
setExpectedCounts();
mllpClient.setSendEndOfData(false);
assertDoesNotThrow(() -> mllpClient.sendFramedData(Hl7TestMessageGenerator.generateMessage()));
}
@Override
@Test
public void testMessageWithoutEndOfDataByte() {
expectedCompleteCount = 1;
expectedInvalidCount = 1;
assertDoesNotThrow(() -> runMessageWithoutEndOfDataByte());
}
}
| MllpTcpServerConsumerRequiredEndOfDataWithoutValidationTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/impala/ast/ImpalaSQLPartitionValue.java | {
"start": 153,
"end": 1994
} | class ____ extends SQLPartitionValue {
private Integer leftBound;
private Integer rightBound;
private Operator leftOperator;
private Operator rightOperator;
public ImpalaSQLPartitionValue() {
super();
}
public void setOperator(Operator operator) {
this.operator = operator;
}
public Integer getLeftBound() {
return leftBound;
}
public void setLeftBound(Integer leftBound) {
this.leftBound = leftBound;
}
public Integer getRightBound() {
return rightBound;
}
public void setRightBound(Integer rightBound) {
this.rightBound = rightBound;
}
public String constructPartitionName() {
StringBuilder sb = new StringBuilder();
sb.append("partition_").append(leftBound != null ? leftBound.toString() : "")
.append("_").append(rightBound != null ? rightBound.toString() : "");
return sb.toString();
}
public Operator getLeftOperator() {
return leftOperator;
}
public void setLeftOperator(Operator leftOperator) {
this.leftOperator = leftOperator;
}
public Operator getRightOperator() {
return rightOperator;
}
public void setRightOperator(Operator rightOperator) {
this.rightOperator = rightOperator;
}
@Override
public ImpalaSQLPartitionValue clone() {
ImpalaSQLPartitionValue x = new ImpalaSQLPartitionValue();
x.setOperator(operator);
x.setLeftBound(leftBound);
x.setRightBound(rightBound);
x.setLeftOperator(leftOperator);
x.setRightOperator(rightOperator);
for (SQLExpr item : items) {
SQLExpr item2 = item.clone();
item2.setParent(x);
x.items.add(item2);
}
return x;
}
}
| ImpalaSQLPartitionValue |
java | apache__camel | components/camel-ai/camel-langchain4j-web-search/src/main/java/org/apache/camel/component/langchain4j/web/search/LangChain4jWebSearchProducer.java | {
"start": 1270,
"end": 5167
} | class ____ extends DefaultProducer {
private WebSearchEngine webSearchEngine;
public LangChain4jWebSearchProducer(LangChain4jWebSearchEndpoint endpoint) {
super(endpoint);
}
@Override
public LangChain4jWebSearchEndpoint getEndpoint() {
return (LangChain4jWebSearchEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
// check if there's a custom WebSearchRequest -- advanced
WebSearchRequest webSearchRequest = getEndpoint().getConfiguration().getWebSearchRequest();
// build a Web Search Request
if (webSearchRequest == null) {
final String searchTerms = exchange.getMessage().getMandatoryBody(String.class);
webSearchRequest = WebSearchRequest.builder()
.searchTerms(searchTerms)
.maxResults(getEndpoint().getConfiguration().getMaxResults())
.language(getEndpoint().getConfiguration().getLanguage())
.geoLocation(getEndpoint().getConfiguration().getGeoLocation())
.startPage(getEndpoint().getConfiguration().getStartPage())
.startIndex(getEndpoint().getConfiguration().getStartIndex())
.additionalParams(getEndpoint().getConfiguration().getAdditionalParams())
.build();
}
// perform the request
WebSearchResults webSearchResults = webSearchEngine.search(webSearchRequest);
// exrtact the list
List<WebSearchOrganicResult> resultList = webSearchResults.results();
// compute the response
computeResponse(resultList, exchange, webSearchRequest.maxResults());
}
@Override
protected void doStart() throws Exception {
super.doStart();
this.webSearchEngine = getEndpoint().getConfiguration().getWebSearchEngine();
ObjectHelper.notNull(webSearchEngine, "webSearchEngine");
}
/**
* Computes the response of the web search based on the configuration and input results.
*
* @param webSearchOrganicResults the list of WebSearchOrganicResult objects to process
* @param exchange the Apache Camel Exchange object
* @param maxResults maxResults
*/
private void computeResponse(List<WebSearchOrganicResult> webSearchOrganicResults, Exchange exchange, Integer maxResults) {
// Check if the input list is null or empty and handle it gracefully
if (webSearchOrganicResults == null || webSearchOrganicResults.isEmpty()) {
exchange.getIn().setBody(null);
return;
}
// return a single object as a response
if (maxResults == 1) {
switch (getEndpoint().getConfiguration().getResultType()) {
case LANGCHAIN4J_WEB_SEARCH_ORGANIC_RESULT -> exchange.getIn().setBody(webSearchOrganicResults.get(0));
case CONTENT -> exchange.getIn().setBody(webSearchOrganicResults.get(0).content());
case SNIPPET -> exchange.getIn().setBody(webSearchOrganicResults.get(0).snippet());
}
} else { // return a List of Objects as a response
switch (getEndpoint().getConfiguration().getResultType()) {
case LANGCHAIN4J_WEB_SEARCH_ORGANIC_RESULT -> exchange.getIn().setBody(webSearchOrganicResults);
case CONTENT -> exchange.getIn()
.setBody(webSearchOrganicResults.stream().map(WebSearchOrganicResult::content)
.collect(Collectors.toList()));
case SNIPPET -> exchange.getIn()
.setBody(webSearchOrganicResults.stream().map(WebSearchOrganicResult::snippet)
.collect(Collectors.toList()));
}
}
}
}
| LangChain4jWebSearchProducer |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/core/AuthenticatedPrincipal.java | {
"start": 1512,
"end": 1740
} | interface ____ {
/**
* Returns the name of the authenticated <code>Principal</code>. Never
* <code>null</code>.
* @return the name of the authenticated <code>Principal</code>
*/
String getName();
}
| AuthenticatedPrincipal |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationRowKey.java | {
"start": 3869,
"end": 9580
} | class ____ implements
KeyConverter<ApplicationRowKey>, KeyConverterToString<ApplicationRowKey> {
private final KeyConverter<String> appIDKeyConverter =
new AppIdKeyConverter();
/**
* Intended for use in ApplicationRowKey only.
*/
private ApplicationRowKeyConverter() {
}
/**
* Application row key is of the form
* clusterId!userName!flowName!flowRunId!appId with each segment separated
* by !. The sizes below indicate sizes of each one of these segements in
* sequence. clusterId, userName and flowName are strings. flowrunId is a
* long hence 8 bytes in size. app id is represented as 12 bytes with
* cluster timestamp part of appid takes 8 bytes(long) and seq id takes 4
* bytes(int). Strings are variable in size (i.e. end whenever separator is
* encountered). This is used while decoding and helps in determining where
* to split.
*/
private static final int[] SEGMENT_SIZES = {Separator.VARIABLE_SIZE,
Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG,
AppIdKeyConverter.getKeySize() };
/*
* (non-Javadoc)
*
* Encodes ApplicationRowKey object into a byte array with each
* component/field in ApplicationRowKey separated by Separator#QUALIFIERS.
* This leads to an application table row key of the form
* clusterId!userName!flowName!flowRunId!appId If flowRunId in passed
* ApplicationRowKey object is null (and the fields preceding it i.e.
* clusterId, userId and flowName are not null), this returns a row key
* prefix of the form clusterId!userName!flowName! and if appId in
* ApplicationRowKey is null (other 4 components all are not null), this
* returns a row key prefix of the form
* clusterId!userName!flowName!flowRunId! flowRunId is inverted while
* encoding as it helps maintain a descending order for row keys in the
* application table.
*
* @see
* org.apache.hadoop.yarn.server.timelineservice.storage.common
* .KeyConverter#encode(java.lang.Object)
*/
@Override
public byte[] encode(ApplicationRowKey rowKey) {
byte[] cluster =
Separator.encode(rowKey.getClusterId(), Separator.SPACE,
Separator.TAB, Separator.QUALIFIERS);
byte[] user =
Separator.encode(rowKey.getUserId(), Separator.SPACE, Separator.TAB,
Separator.QUALIFIERS);
byte[] flow =
Separator.encode(rowKey.getFlowName(), Separator.SPACE,
Separator.TAB, Separator.QUALIFIERS);
byte[] first = Separator.QUALIFIERS.join(cluster, user, flow);
// Note that flowRunId is a long, so we can't encode them all at the same
// time.
if (rowKey.getFlowRunId() == null) {
return Separator.QUALIFIERS.join(first, Separator.EMPTY_BYTES);
}
byte[] second =
Bytes.toBytes(LongConverter.invertLong(
rowKey.getFlowRunId()));
if (rowKey.getAppId() == null || rowKey.getAppId().isEmpty()) {
return Separator.QUALIFIERS.join(first, second, Separator.EMPTY_BYTES);
}
byte[] third = appIDKeyConverter.encode(rowKey.getAppId());
return Separator.QUALIFIERS.join(first, second, third);
}
/*
* (non-Javadoc)
*
* Decodes an application row key of the form
* clusterId!userName!flowName!flowRunId!appId represented in byte format
* and converts it into an ApplicationRowKey object.flowRunId is inverted
* while decoding as it was inverted while encoding.
*
* @see
* org.apache.hadoop.yarn.server.timelineservice.storage.common
* .KeyConverter#decode(byte[])
*/
@Override
public ApplicationRowKey decode(byte[] rowKey) {
byte[][] rowKeyComponents =
Separator.QUALIFIERS.split(rowKey, SEGMENT_SIZES);
if (rowKeyComponents.length != 5) {
throw new IllegalArgumentException("the row key is not valid for "
+ "an application");
}
String clusterId =
Separator.decode(Bytes.toString(rowKeyComponents[0]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
String userId =
Separator.decode(Bytes.toString(rowKeyComponents[1]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
String flowName =
Separator.decode(Bytes.toString(rowKeyComponents[2]),
Separator.QUALIFIERS, Separator.TAB, Separator.SPACE);
Long flowRunId =
LongConverter.invertLong(Bytes.toLong(rowKeyComponents[3]));
String appId = appIDKeyConverter.decode(rowKeyComponents[4]);
return new ApplicationRowKey(clusterId, userId, flowName, flowRunId,
appId);
}
@Override
public String encodeAsString(ApplicationRowKey key) {
if (key.clusterId == null || key.userId == null || key.flowName == null
|| key.flowRunId == null || key.appId == null) {
throw new IllegalArgumentException();
}
return TimelineReaderUtils
.joinAndEscapeStrings(new String[] {key.clusterId, key.userId,
key.flowName, key.flowRunId.toString(), key.appId});
}
@Override
public ApplicationRowKey decodeFromString(String encodedRowKey) {
List<String> split = TimelineReaderUtils.split(encodedRowKey);
if (split == null || split.size() != 5) {
throw new IllegalArgumentException(
"Invalid row key for application table.");
}
Long flowRunId = Long.valueOf(split.get(3));
return new ApplicationRowKey(split.get(0), split.get(1), split.get(2),
flowRunId, split.get(4));
}
}
}
| ApplicationRowKeyConverter |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/typeRef/TypeReferenceTest.java | {
"start": 2713,
"end": 3237
} | class ____ {
private int id;
private String name;
public Bean(){
}
public Bean(int id, String name){
this.id = id;
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| Bean |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/Input.java | {
"start": 1754,
"end": 4064
} | interface ____<IN> {
/**
* Processes one element that arrived on this input of the {@link MultipleInputStreamOperator}.
* This method is guaranteed to not be called concurrently with other methods of the operator.
*/
void processElement(StreamRecord<IN> element) throws Exception;
/**
* Processes a {@link Watermark} that arrived on the first input of this two-input operator.
* This method is guaranteed to not be called concurrently with other methods of the operator.
*
* @see org.apache.flink.streaming.api.watermark.Watermark
*/
void processWatermark(Watermark mark) throws Exception;
/**
* Processes a {@link WatermarkStatus} that arrived on this input of the {@link
* MultipleInputStreamOperator}. This method is guaranteed to not be called concurrently with
* other methods of the operator.
*
* @see WatermarkStatus
*/
void processWatermarkStatus(WatermarkStatus watermarkStatus) throws Exception;
/**
* Processes a {@link LatencyMarker} that arrived on the first input of this two-input operator.
* This method is guaranteed to not be called concurrently with other methods of the operator.
*
* @see org.apache.flink.streaming.runtime.streamrecord.LatencyMarker
*/
void processLatencyMarker(LatencyMarker latencyMarker) throws Exception;
/**
* Set the correct key context before processing the {@code record}. Used for example to extract
* key from the {@code record} and pass that key to the state backends. This method is
* guaranteed to not be called concurrently with other methods of the operator.
*/
void setKeyContextElement(StreamRecord<IN> record) throws Exception;
/**
* Processes a {@link RecordAttributes} that arrived at this input. This method is guaranteed to
* not be called concurrently with other methods of the operator.
*/
@Experimental
default void processRecordAttributes(RecordAttributes recordAttributes) throws Exception {}
/**
* Processes a {@link org.apache.flink.api.common.watermark.Watermark} that arrived at this
* input, wrapped in a {@link WatermarkEvent}.
*/
@Experimental
default void processWatermark(WatermarkEvent watermark) throws Exception {}
}
| Input |
java | alibaba__fastjson | src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectU1.java | {
"start": 95,
"end": 1591
} | class ____ {
private int a;
private String b = "";
private String c = "";
private String d = "";
private List<ObjectU1_A> e;
private List<ObjectU1_B> f;
private long g;
private long h;
private long i;
private long j;
private long k;
private long l;
private List<ObjectV1_A> m;
public int getA() {
return a;
}
public void setA(int a) {
this.a = a;
}
public String getB() {
return b;
}
public void setB(String b) {
this.b = b;
}
public String getC() {
return c;
}
public void setC(String c) {
this.c = c;
}
public String getD() {
return d;
}
public void setD(String d) {
this.d = d;
}
public List<ObjectU1_A> getE() {
return e;
}
public void setE(List<ObjectU1_A> e) {
this.e = e;
}
public List<ObjectU1_B> getF() {
return f;
}
public void setF(List<ObjectU1_B> f) {
this.f = f;
}
public long getG() {
return g;
}
public void setG(long g) {
this.g = g;
}
public long getH() {
return h;
}
public void setH(long h) {
this.h = h;
}
public long getI() {
return i;
}
public void setI(long i) {
this.i = i;
}
public long getJ() {
return j;
}
public void setJ(long j) {
this.j = j;
}
public long getK() {
return k;
}
public void setK(long k) {
this.k = k;
}
public long getL() {
return l;
}
public void setL(long l) {
this.l = l;
}
public List<ObjectV1_A> getM() {
return m;
}
public void setM(List<ObjectV1_A> m) {
this.m = m;
}
}
| ObjectU1 |
java | alibaba__fastjson | src/test/java/com/alibaba/fastjson/deserializer/issue3050/beans/Person.java | {
"start": 130,
"end": 1020
} | class ____ {
private String name;
private String address;
private String id;
private int age;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public String toString() {
return "Person{" +
"name='" + name + '\'' +
", address='" + address + '\'' +
", id='" + id + '\'' +
", age=" + age +
'}';
}
}
| Person |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/rerank/JinaAIRerankTaskSettings.java | {
"start": 1147,
"end": 5822
} | class ____ implements TaskSettings {
public static final String NAME = "jinaai_rerank_task_settings";
public static final String RETURN_DOCUMENTS = "return_documents";
public static final String TOP_N_DOCS_ONLY = "top_n";
public static final JinaAIRerankTaskSettings EMPTY_SETTINGS = new JinaAIRerankTaskSettings(null, null);
public static JinaAIRerankTaskSettings fromMap(Map<String, Object> map) {
ValidationException validationException = new ValidationException();
if (map == null || map.isEmpty()) {
return EMPTY_SETTINGS;
}
Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException);
Integer topNDocumentsOnly = extractOptionalPositiveInteger(
map,
TOP_N_DOCS_ONLY,
ModelConfigurations.TASK_SETTINGS,
validationException
);
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return of(topNDocumentsOnly, returnDocuments);
}
/**
* Creates a new {@link JinaAIRerankTaskSettings} by preferring non-null fields from the request settings over the original settings.
*
* @param originalSettings the settings stored as part of the inference entity configuration
* @param requestTaskSettings the settings passed in within the task_settings field of the request
* @return a constructed {@link JinaAIRerankTaskSettings}
*/
public static JinaAIRerankTaskSettings of(JinaAIRerankTaskSettings originalSettings, JinaAIRerankTaskSettings requestTaskSettings) {
return new JinaAIRerankTaskSettings(
requestTaskSettings.getTopNDocumentsOnly() != null
? requestTaskSettings.getTopNDocumentsOnly()
: originalSettings.getTopNDocumentsOnly(),
requestTaskSettings.getReturnDocuments() != null
? requestTaskSettings.getReturnDocuments()
: originalSettings.getReturnDocuments()
);
}
public static JinaAIRerankTaskSettings of(Integer topNDocumentsOnly, Boolean returnDocuments) {
return new JinaAIRerankTaskSettings(topNDocumentsOnly, returnDocuments);
}
private final Integer topNDocumentsOnly;
private final Boolean returnDocuments;
public JinaAIRerankTaskSettings(StreamInput in) throws IOException {
this(in.readOptionalInt(), in.readOptionalBoolean());
}
public JinaAIRerankTaskSettings(@Nullable Integer topNDocumentsOnly, @Nullable Boolean doReturnDocuments) {
this.topNDocumentsOnly = topNDocumentsOnly;
this.returnDocuments = doReturnDocuments;
}
@Override
public boolean isEmpty() {
return topNDocumentsOnly == null && returnDocuments == null;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (topNDocumentsOnly != null) {
builder.field(TOP_N_DOCS_ONLY, topNDocumentsOnly);
}
if (returnDocuments != null) {
builder.field(RETURN_DOCUMENTS, returnDocuments);
}
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_18_0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalInt(topNDocumentsOnly);
out.writeOptionalBoolean(returnDocuments);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
JinaAIRerankTaskSettings that = (JinaAIRerankTaskSettings) o;
return Objects.equals(returnDocuments, that.returnDocuments) && Objects.equals(topNDocumentsOnly, that.topNDocumentsOnly);
}
@Override
public int hashCode() {
return Objects.hash(returnDocuments, topNDocumentsOnly);
}
public Boolean getDoesReturnDocuments() {
return returnDocuments;
}
public Integer getTopNDocumentsOnly() {
return topNDocumentsOnly;
}
public Boolean getReturnDocuments() {
return returnDocuments;
}
@Override
public TaskSettings updatedTaskSettings(Map<String, Object> newSettings) {
JinaAIRerankTaskSettings updatedSettings = JinaAIRerankTaskSettings.fromMap(new HashMap<>(newSettings));
return JinaAIRerankTaskSettings.of(this, updatedSettings);
}
}
| JinaAIRerankTaskSettings |
java | apache__spark | core/src/test/java/org/apache/spark/util/collection/TestTimSort.java | {
"start": 1085,
"end": 4524
} | class ____ {
private static final int MIN_MERGE = 32;
/**
* Returns an array of integers that demonstrate the bug in TimSort
*/
public static int[] getTimSortBugTestSet(int length) {
int minRun = minRunLength(length);
List<Long> runs = runsJDKWorstCase(minRun, length);
return createArray(runs, length);
}
private static int minRunLength(int n) {
int r = 0; // Becomes 1 if any 1 bits are shifted off
while (n >= MIN_MERGE) {
r |= (n & 1);
n >>= 1;
}
return n + r;
}
private static int[] createArray(List<Long> runs, int length) {
int[] a = new int[length];
Arrays.fill(a, 0);
int endRun = -1;
for (long len : runs) {
a[endRun += len] = 1;
}
a[length - 1] = 0;
return a;
}
/**
* Fills <code>runs</code> with a sequence of run lengths of the form<br>
* Y_n x_{n,1} x_{n,2} ... x_{n,l_n} <br>
* Y_{n-1} x_{n-1,1} x_{n-1,2} ... x_{n-1,l_{n-1}} <br>
* ... <br>
* Y_1 x_{1,1} x_{1,2} ... x_{1,l_1}<br>
* The Y_i's are chosen to satisfy the invariant throughout execution,
* but the x_{i,j}'s are merged (by <code>TimSort.mergeCollapse</code>)
* into an X_i that violates the invariant.
*
* @param length The sum of all run lengths that will be added to <code>runs</code>.
*/
private static List<Long> runsJDKWorstCase(int minRun, int length) {
List<Long> runs = new ArrayList<>();
long runningTotal = 0, Y = minRun + 4, X = minRun;
while (runningTotal + Y + X <= length) {
runningTotal += X + Y;
generateJDKWrongElem(runs, minRun, X);
runs.add(0, Y);
// X_{i+1} = Y_i + x_{i,1} + 1, since runs.get(1) = x_{i,1}
X = Y + runs.get(1) + 1;
// Y_{i+1} = X_{i+1} + Y_i + 1
Y += X + 1;
}
if (runningTotal + X <= length) {
runningTotal += X;
generateJDKWrongElem(runs, minRun, X);
}
runs.add(length - runningTotal);
return runs;
}
/**
* Adds a sequence x_1, ..., x_n of run lengths to <code>runs</code> such that:<br>
* 1. X = x_1 + ... + x_n <br>
* 2. x_j >= minRun for all j <br>
* 3. x_1 + ... + x_{j-2} < x_j < x_1 + ... + x_{j-1} for all j <br>
* These conditions guarantee that TimSort merges all x_j's one by one
* (resulting in X) using only merges on the second-to-last element.
*
* @param X The sum of the sequence that should be added to runs.
*/
private static void generateJDKWrongElem(List<Long> runs, int minRun, long X) {
for (long newTotal; X >= 2L * minRun + 1; X = newTotal) {
//Default strategy
newTotal = X / 2 + 1;
//Specialized strategies
if (3L * minRun + 3 <= X && X <= 4L * minRun + 1) {
// add x_1=MIN+1, x_2=MIN, x_3=X-newTotal to runs
newTotal = 2L * minRun + 1;
} else if (5L * minRun + 5 <= X && X <= 6L * minRun + 5) {
// add x_1=MIN+1, x_2=MIN, x_3=MIN+2, x_4=X-newTotal to runs
newTotal = 3L * minRun + 3;
} else if (8L * minRun + 9 <= X && X <= 10L * minRun + 9) {
// add x_1=MIN+1, x_2=MIN, x_3=MIN+2, x_4=2MIN+2, x_5=X-newTotal to runs
newTotal = 5L * minRun + 5;
} else if (13L * minRun + 15 <= X && X <= 16L * minRun + 17) {
// add x_1=MIN+1, x_2=MIN, x_3=MIN+2, x_4=2MIN+2, x_5=3MIN+4, x_6=X-newTotal to runs
newTotal = 8L * minRun + 9;
}
runs.add(0, X - newTotal);
}
runs.add(0, X);
}
}
| TestTimSort |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/orphan/User.java | {
"start": 215,
"end": 924
} | class ____ {
private Integer id;
private String userid;
private Set<Mail> mails = new HashSet();
public User() {
}
public User(String userid) {
this.userid = userid;
}
public Integer getId() {
return id;
}
protected void setId(Integer id) {
this.id = id;
}
public String getUserid() {
return userid;
}
public void setUserid(String userid) {
this.userid = userid;
}
public Set<Mail> getMails() {
return mails;
}
private void setMails(Set<Mail> mails) {
this.mails = mails;
}
public Mail addMail(String alias) {
Mail mail = new Mail( alias, this );
getMails().add( mail );
return mail;
}
public void removeMail(Mail mail) {
getMails().remove( mail );
}
}
| User |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerRequiredModifiersTest.java | {
"start": 6286,
"end": 6738
} | class ____ {
private Private() {}
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
}
}
""")
.doTest();
}
@Test
public void positive_extractsHiddenLoggersForInterfaces() {
refactoringHelper()
.addInputLines(
"in/Test.java",
"""
import com.google.common.flogger.FluentLogger;
| Private |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/executor/ReuseExecutor.java | {
"start": 1338,
"end": 4081
} | class ____ extends BaseExecutor {
private final Map<String, Statement> statementMap = new HashMap<>();
public ReuseExecutor(Configuration configuration, Transaction transaction) {
super(configuration, transaction);
}
@Override
public int doUpdate(MappedStatement ms, Object parameter) throws SQLException {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(this, ms, parameter, RowBounds.DEFAULT, null, null);
Statement stmt = prepareStatement(handler, ms.getStatementLog());
return handler.update(stmt);
}
@Override
public <E> List<E> doQuery(MappedStatement ms, Object parameter, RowBounds rowBounds, ResultHandler resultHandler,
BoundSql boundSql) throws SQLException {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameter, rowBounds, resultHandler,
boundSql);
Statement stmt = prepareStatement(handler, ms.getStatementLog());
return handler.query(stmt, resultHandler);
}
@Override
protected <E> Cursor<E> doQueryCursor(MappedStatement ms, Object parameter, RowBounds rowBounds, BoundSql boundSql)
throws SQLException {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameter, rowBounds, null, boundSql);
Statement stmt = prepareStatement(handler, ms.getStatementLog());
return handler.queryCursor(stmt);
}
@Override
public List<BatchResult> doFlushStatements(boolean isRollback) {
for (Statement stmt : statementMap.values()) {
closeStatement(stmt);
}
statementMap.clear();
return Collections.emptyList();
}
private Statement prepareStatement(StatementHandler handler, Log statementLog) throws SQLException {
Statement stmt;
BoundSql boundSql = handler.getBoundSql();
String sql = boundSql.getSql();
if (hasStatementFor(sql)) {
stmt = getStatement(sql);
applyTransactionTimeout(stmt);
} else {
Connection connection = getConnection(statementLog);
stmt = handler.prepare(connection, transaction.getTimeout());
putStatement(sql, stmt);
}
handler.parameterize(stmt);
return stmt;
}
private boolean hasStatementFor(String sql) {
try {
Statement statement = statementMap.get(sql);
return statement != null && !statement.getConnection().isClosed();
} catch (SQLException e) {
return false;
}
}
private Statement getStatement(String s) {
return statementMap.get(s);
}
private void putStatement(String sql, Statement stmt) {
statementMap.put(sql, stmt);
}
}
| ReuseExecutor |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java | {
"start": 1020,
"end": 2856
} | class ____ extends BlockHash {
private final int channel;
private boolean seenNull = false;
NullBlockHash(int channel, BlockFactory blockFactory) {
super(blockFactory);
this.channel = channel;
}
@Override
public void add(Page page, GroupingAggregatorFunction.AddInput addInput) {
var block = page.getBlock(channel);
if (block.areAllValuesNull()) {
seenNull = true;
try (IntVector groupIds = blockFactory.newConstantIntVector(0, block.getPositionCount())) {
addInput.add(0, groupIds);
}
} else {
throw new IllegalArgumentException("can't use NullBlockHash for non-null blocks");
}
}
@Override
public ReleasableIterator<IntBlock> lookup(Page page, ByteSizeValue targetBlockSize) {
Block block = page.getBlock(channel);
if (block.areAllValuesNull()) {
return ReleasableIterator.single(blockFactory.newConstantIntVector(0, block.getPositionCount()).asBlock());
}
throw new IllegalArgumentException("can't use NullBlockHash for non-null blocks");
}
@Override
public Block[] getKeys() {
return new Block[] { blockFactory.newConstantNullBlock(seenNull ? 1 : 0) };
}
@Override
public IntVector nonEmpty() {
return blockFactory.newConstantIntVector(0, seenNull ? 1 : 0);
}
@Override
public BitArray seenGroupIds(BigArrays bigArrays) {
BitArray seen = new BitArray(1, bigArrays);
if (seenNull) {
seen.set(0);
}
return seen;
}
@Override
public void close() {
// Nothing to close
}
@Override
public String toString() {
return "NullBlockHash{channel=" + channel + ", seenNull=" + seenNull + '}';
}
}
| NullBlockHash |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java | {
"start": 5349,
"end": 5431
} | class ____ the execution of a file system command.
*/
abstract private static | for |
java | micronaut-projects__micronaut-core | function/src/main/java/io/micronaut/function/BinaryTypeConfiguration.java | {
"start": 1209,
"end": 3468
} | class ____ {
static final String PREFIX = "micronaut.function.binary-types";
private static final Set<String> DEFAULT_BINARY_TYPES = Set.of(
MediaType.APPLICATION_OCTET_STREAM,
MediaType.IMAGE_JPEG,
MediaType.IMAGE_PNG,
MediaType.IMAGE_GIF,
"application/zip"
);
private boolean useDefaultBinaryTypes = true;
@NonNull
private List<String> additionalTypes = new ArrayList<>();
/**
* If this is false then calls to {@link #isMediaTypeBinary(String)} will only check the additional types, and ignore the defaults.
* The defaults are:
* {@value MediaType#APPLICATION_OCTET_STREAM},
* {@value MediaType#IMAGE_JPEG},
* {@value MediaType#IMAGE_PNG},
* {@value MediaType#IMAGE_GIF},
* "application/zip"
*
* @return Whether to use the default binary types
*/
public boolean isUseDefaults() {
return useDefaultBinaryTypes;
}
/**
* Sets whether to use the default binary types.
*
* @param useDefaults True if they should be used
*/
public void setUseDefaults(boolean useDefaults) {
this.useDefaultBinaryTypes = useDefaults;
}
/**
* The additional media types to consider binary.
*
* @return A lists of {@link MediaType} objects
*/
public @NonNull List<String> getAdditionalTypes() {
return additionalTypes;
}
/**
* Sets the additional media types to consider binary.
*
* @param additionalTypes The media types
*/
public void setAdditionalTypes(@NonNull List<String> additionalTypes) {
ArgumentUtils.requireNonNull("additionalTypes", additionalTypes);
this.additionalTypes = additionalTypes;
}
/**
* Checks whether the given media type is considered binary.
*
* @param mediaType The media type
* @return Whether the media type is considered binary
*/
public boolean isMediaTypeBinary(String mediaType) {
if (mediaType == null) {
return false;
}
if (useDefaultBinaryTypes && DEFAULT_BINARY_TYPES.contains(mediaType)) {
return true;
}
return additionalTypes.contains(mediaType);
}
}
| BinaryTypeConfiguration |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/replication/ReplicationSplitHelper.java | {
"start": 1617,
"end": 1975
} | class ____ the coordination logic during a split. If documents are only routed to the source then it will be a normal
* primary action. If documents are only routed to the target it will be delegated to the target. If documents are routed to both then
* the request will be split into two and executed locally and delegated to the target.
*/
public | implements |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java | {
"start": 27676,
"end": 32788
} | class ____ extends Configured implements Tool {
private static final String USAGE = "Usage: hdfs mover "
+ "[-p <files/dirs> | -f <local file>]"
+ "\n\t-p <files/dirs>\ta space separated list of HDFS files/dirs to migrate."
+ "\n\t-f <local file>\ta local file containing a list of HDFS files/dirs to migrate.";
private static Options buildCliOptions() {
Options opts = new Options();
Option file = Option.builder("f").argName("pathsFile").hasArg()
.desc("a local file containing files/dirs to migrate")
.build();
Option paths = Option.builder("p").argName("paths").hasArgs()
.desc("specify space separated files/dirs to migrate")
.build();
OptionGroup group = new OptionGroup();
group.addOption(file);
group.addOption(paths);
opts.addOptionGroup(group);
return opts;
}
private static String[] readPathFile(String file) throws IOException {
List<String> list = Lists.newArrayList();
BufferedReader reader = new BufferedReader(
new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8));
try {
String line;
while ((line = reader.readLine()) != null) {
if (!line.trim().isEmpty()) {
list.add(line);
}
}
} finally {
IOUtils.cleanupWithLogger(LOG, reader);
}
return list.toArray(new String[list.size()]);
}
private static Map<URI, List<Path>> getNameNodePaths(CommandLine line,
Configuration conf) throws Exception {
Map<URI, List<Path>> map = Maps.newHashMap();
String[] paths = null;
if (line.hasOption("f")) {
paths = readPathFile(line.getOptionValue("f"));
} else if (line.hasOption("p")) {
paths = line.getOptionValues("p");
}
Collection<URI> namenodes = DFSUtil.getInternalNsRpcUris(conf);
if (paths == null || paths.length == 0) {
for (URI namenode : namenodes) {
map.put(namenode, null);
}
return map;
}
final URI singleNs = namenodes.size() == 1 ?
namenodes.iterator().next() : null;
for (String path : paths) {
Path target = new Path(path);
if (!target.isUriPathAbsolute()) {
throw new IllegalArgumentException("The path " + target
+ " is not absolute");
}
URI targetUri = target.toUri();
if ((targetUri.getAuthority() == null || targetUri.getScheme() ==
null) && singleNs == null) {
// each path must contains both scheme and authority information
// unless there is only one name service specified in the
// configuration
throw new IllegalArgumentException("The path " + target
+ " does not contain scheme and authority thus cannot identify"
+ " its name service");
}
URI key = singleNs;
if (singleNs == null) {
key = new URI(targetUri.getScheme(), targetUri.getAuthority(),
null, null, null);
if (!namenodes.contains(key)) {
throw new IllegalArgumentException("Cannot resolve the path " +
target + ". The namenode services specified in the " +
"configuration: " + namenodes);
}
}
List<Path> targets = map.get(key);
if (targets == null) {
targets = Lists.newArrayList();
map.put(key, targets);
}
targets.add(Path.getPathWithoutSchemeAndAuthority(target));
}
return map;
}
@VisibleForTesting
static Map<URI, List<Path>> getNameNodePathsToMove(Configuration conf,
String... args) throws Exception {
final Options opts = buildCliOptions();
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args, true);
return getNameNodePaths(commandLine, conf);
}
@Override
public int run(String[] args) throws Exception {
final long startTime = Time.monotonicNow();
final Configuration conf = getConf();
try {
final Map<URI, List<Path>> map = getNameNodePathsToMove(conf, args);
return Mover.run(map, conf);
} catch (IOException e) {
System.out.println(e + ". Exiting ...");
return ExitStatus.IO_EXCEPTION.getExitCode();
} catch (InterruptedException e) {
System.out.println(e + ". Exiting ...");
return ExitStatus.INTERRUPTED.getExitCode();
} catch (ParseException e) {
System.out.println(e + ". Exiting ...");
return ExitStatus.ILLEGAL_ARGUMENTS.getExitCode();
} catch (IllegalArgumentException e) {
System.out.println(e + ". Exiting ...");
return ExitStatus.ILLEGAL_ARGUMENTS.getExitCode();
} finally {
DefaultMetricsSystem.shutdown();
System.out.format("%-24s ", DateFormat.getDateTimeInstance().format(new Date()));
System.out.println("Mover took " + StringUtils.formatTime(Time.monotonicNow()-startTime));
}
}
}
private static | Cli |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/export/CustomEditorConfigurerTests.java | {
"start": 971,
"end": 2067
} | class ____ extends AbstractJmxTests {
private final SimpleDateFormat df = new SimpleDateFormat("yyyy/MM/dd");
@Override
protected String getApplicationContextPath() {
return "org/springframework/jmx/export/customConfigurer.xml";
}
@Test
void datesInApplicationContext() throws Exception {
DateRange dr = getContext().getBean("dateRange", DateRange.class);
assertThat(dr.getStartDate()).as("startDate").isEqualTo(getStartDate());
assertThat(dr.getEndDate()).as("endDate").isEqualTo(getEndDate());
}
@Test
void datesInJmx() throws Exception {
ObjectName oname = new ObjectName("bean:name=dateRange");
Date startJmx = (Date) getServer().getAttribute(oname, "StartDate");
Date endJmx = (Date) getServer().getAttribute(oname, "EndDate");
assertThat(startJmx).as("startDate").isEqualTo(getStartDate());
assertThat(endJmx).as("endDate").isEqualTo(getEndDate());
}
private Date getStartDate() throws ParseException {
return df.parse("2004/10/12");
}
private Date getEndDate() throws ParseException {
return df.parse("2004/11/13");
}
}
| CustomEditorConfigurerTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/constructor/SubclassConstructorGuardTest.java | {
"start": 565,
"end": 963
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Simple.class, SimpleBean.class,
SimpleInterceptor.class);
@Test
public void testConstructorGuard() throws IOException {
assertEquals("foo::bar", Arc.container().instance(SimpleBean.class).get().foo());
}
@Simple
@Singleton
static | SubclassConstructorGuardTest |
java | google__gson | gson/src/test/java/com/google/gson/internal/GsonTypesTest.java | {
"start": 1656,
"end": 2823
} | class
____ type = GsonTypes.newParameterizedTypeWithOwner(null, List.class, A.class);
assertThat(type.getOwnerType()).isNull();
assertThat(type.getRawType()).isEqualTo(List.class);
assertThat(type.getActualTypeArguments()).asList().containsExactly(A.class);
// A<B>. A is a static inner class.
type = GsonTypes.newParameterizedTypeWithOwner(null, A.class, B.class);
assertThat(getFirstTypeArgument(type)).isEqualTo(B.class);
IllegalArgumentException e =
assertThrows(
IllegalArgumentException.class,
// NonStaticInner<A> is not allowed without owner
() -> GsonTypes.newParameterizedTypeWithOwner(null, NonStaticInner.class, A.class));
assertThat(e).hasMessageThat().isEqualTo("Must specify owner type for " + NonStaticInner.class);
type =
GsonTypes.newParameterizedTypeWithOwner(GsonTypesTest.class, NonStaticInner.class, A.class);
assertThat(type.getOwnerType()).isEqualTo(GsonTypesTest.class);
assertThat(type.getRawType()).isEqualTo(NonStaticInner.class);
assertThat(type.getActualTypeArguments()).asList().containsExactly(A.class);
final | ParameterizedType |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/MappingTest_insert.java | {
"start": 256,
"end": 1927
} | class ____ extends TestCase {
private String sql = "insert into user (id, name) values (123, 'abc')";
Map<String, String> mapping = Collections.singletonMap("user", "user_01");
public void test_mapping() throws Exception {
String result = SQLUtils.refactor(sql, null, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
public void test_mapping_mysql() throws Exception {
String result = SQLUtils.refactor(sql, JdbcConstants.MYSQL, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
public void test_mapping_pg() throws Exception {
String result = SQLUtils.refactor(sql, JdbcConstants.POSTGRESQL, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
public void test_mapping_oracle() throws Exception {
String result = SQLUtils.refactor(sql, JdbcConstants.ORACLE, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
public void test_mapping_sqlserver() throws Exception {
String result = SQLUtils.refactor(sql, JdbcConstants.SQL_SERVER, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
public void test_mapping_db2() throws Exception {
String result = SQLUtils.refactor(sql, JdbcConstants.DB2, mapping);
assertEquals("INSERT INTO user_01 (id, name)\n" +
"VALUES (123, 'abc')", result);
}
}
| MappingTest_insert |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/instrument/classloading/ReflectiveLoadTimeWeaver.java | {
"start": 2235,
"end": 2649
} | class ____ which is not visible to the
* web application). There is no direct API dependency between this LoadTimeWeaver
* adapter and the underlying ClassLoader, just a 'loose' method contract.
*
* @author Costin Leau
* @author Juergen Hoeller
* @since 2.0
* @see #addTransformer(java.lang.instrument.ClassFileTransformer)
* @see #getThrowawayClassLoader()
* @see SimpleThrowawayClassLoader
*/
public | loader |
java | google__dagger | javatests/dagger/internal/codegen/DependencyCycleValidationTest.java | {
"start": 3573,
"end": 5548
} | interface ____");
});
}
@Test
public void cyclicDependencyWithModuleBindingValidation() {
// Cycle errors should not show a dependency trace to an entry point when doing full binding
// graph validation. So ensure that the message doesn't end with "test.Outer.C is requested at
// test.Outer.CComponent.getC()", as the previous test's message does.
CompilerTests.daggerCompiler(SIMPLE_CYCLIC_DEPENDENCY)
.withProcessingOptions(
ImmutableMap.<String, String>builder()
.put("dagger.fullBindingGraphValidation", "ERROR")
.putAll(compilerMode.processorOptions())
.buildOrThrow())
.compile(
subject -> {
subject.hasErrorCount(2);
subject
.hasErrorContaining(
String.join(
"\n",
"Found a dependency cycle:",
" Outer.C is injected at",
" [Outer.MModule] Outer.A(cParam)",
" Outer.A is injected at",
" [Outer.MModule] Outer.B(aParam)",
" Outer.B is injected at",
" [Outer.MModule] Outer.C(bParam)",
" Outer.C is injected at",
" [Outer.MModule] Outer.A(cParam)",
" ...",
"",
"======================",
"Full classname legend:",
"======================",
"Outer: test.Outer",
"========================",
"End of classname legend:",
"========================"))
.onSource(SIMPLE_CYCLIC_DEPENDENCY)
.onLineContaining(" | CComponent |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java | {
"start": 44394,
"end": 45161
} | class ____ false for dangling
* links so we can get a FileNotFoundException for links that exist.
* It's also possible that we raced with a delete of the link. Use
* the readBasicFileAttributes method in java.nio.file.attributes
* when available.
*/
if (!target.isEmpty()) {
return new FileStatus(0, false, 0, 0, 0, 0, FsPermission.getDefault(),
"", "", new Path(target), f);
}
// f refers to a file or directory that does not exist
throw e;
}
}
/**
* Calls out to platform's native stat(1) implementation to get file metadata
* (permissions, user, group, atime, mtime, etc). This works around the lack
* of lstat(2) in Java 6.
*
* Currently, the {@link Stat} | returns |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scripting/groovy/TestService.java | {
"start": 680,
"end": 727
} | interface ____ {
String sayHello();
}
| TestService |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ValueLiteralExpression.java | {
"start": 2311,
"end": 2864
} | class ____ take any value described by a {@link DataType}. However, it is
* recommended to use instances with default conversion (see {@link DataType#getConversionClass()}.
*
* <p>Equals/hashCode support of this expression depends on the equals/hashCode support of the
* value.
*
* <p>The data type can be extracted automatically from non-null values using value-based extraction
* (see {@link ValueDataTypeConverter}).
*
* <p>Symbols (enums extending from {@link TableSymbol}) are considered as literal values.
*/
@PublicEvolving
public final | can |
java | resilience4j__resilience4j | resilience4j-spring6/src/main/java/io/github/resilience4j/spring6/fallback/configure/FallbackConfiguration.java | {
"start": 1540,
"end": 2697
} | class ____ {
@Bean
@Conditional(value = {RxJava2OnClasspathCondition.class})
public RxJava2FallbackDecorator rxJava2FallbackDecorator() {
return new RxJava2FallbackDecorator();
}
@Bean
@Conditional(value = {RxJava3OnClasspathCondition.class})
public RxJava3FallbackDecorator rxJava3FallbackDecorator() {
return new RxJava3FallbackDecorator();
}
@Bean
@Conditional(value = {ReactorOnClasspathCondition.class})
public ReactorFallbackDecorator reactorFallbackDecorator() {
return new ReactorFallbackDecorator();
}
@Bean
public CompletionStageFallbackDecorator completionStageFallbackDecorator() {
return new CompletionStageFallbackDecorator();
}
@Bean
public FallbackDecorators fallbackDecorators(@Autowired(required = false) List<FallbackDecorator> fallbackDecorator) {
return new FallbackDecorators(fallbackDecorator);
}
@Bean
public FallbackExecutor fallbackExecutor(SpelResolver spelResolver, FallbackDecorators fallbackDecorators) {
return new FallbackExecutor(spelResolver, fallbackDecorators);
}
}
| FallbackConfiguration |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ClientUpgradeCodec.java | {
"start": 1690,
"end": 7273
} | class ____ implements HttpClientUpgradeHandler.UpgradeCodec {
private static final List<CharSequence> UPGRADE_HEADERS = Collections.singletonList(HTTP_UPGRADE_SETTINGS_HEADER);
private final String handlerName;
private final Http2ConnectionHandler connectionHandler;
private final ChannelHandler upgradeToHandler;
private final ChannelHandler http2MultiplexHandler;
public Http2ClientUpgradeCodec(Http2FrameCodec frameCodec, ChannelHandler upgradeToHandler) {
this(null, frameCodec, upgradeToHandler);
}
public Http2ClientUpgradeCodec(String handlerName, Http2FrameCodec frameCodec, ChannelHandler upgradeToHandler) {
this(handlerName, (Http2ConnectionHandler) frameCodec, upgradeToHandler, null);
}
/**
* Creates the codec using a default name for the connection handler when adding to the
* pipeline.
*
* @param connectionHandler the HTTP/2 connection handler
*/
public Http2ClientUpgradeCodec(Http2ConnectionHandler connectionHandler) {
this((String) null, connectionHandler);
}
/**
* Creates the codec using a default name for the connection handler when adding to the
* pipeline.
*
* @param connectionHandler the HTTP/2 connection handler
* @param http2MultiplexHandler the Http2 Multiplexer handler to work with Http2FrameCodec
*/
public Http2ClientUpgradeCodec(Http2ConnectionHandler connectionHandler,
Http2MultiplexHandler http2MultiplexHandler) {
this((String) null, connectionHandler, http2MultiplexHandler);
}
/**
* Creates the codec providing an upgrade to the given handler for HTTP/2.
*
* @param handlerName the name of the HTTP/2 connection handler to be used in the pipeline,
* or {@code null} to auto-generate the name
* @param connectionHandler the HTTP/2 connection handler
*/
public Http2ClientUpgradeCodec(String handlerName, Http2ConnectionHandler connectionHandler) {
this(handlerName, connectionHandler, connectionHandler, null);
}
/**
* Creates the codec providing an upgrade to the given handler for HTTP/2.
*
* @param handlerName the name of the HTTP/2 connection handler to be used in the pipeline,
* or {@code null} to auto-generate the name
* @param connectionHandler the HTTP/2 connection handler
*/
public Http2ClientUpgradeCodec(String handlerName, Http2ConnectionHandler connectionHandler,
Http2MultiplexHandler http2MultiplexHandler) {
this(handlerName, connectionHandler, connectionHandler, http2MultiplexHandler);
}
private Http2ClientUpgradeCodec(String handlerName, Http2ConnectionHandler connectionHandler, ChannelHandler
upgradeToHandler, Http2MultiplexHandler http2MultiplexHandler) {
this.handlerName = handlerName;
this.connectionHandler = checkNotNull(connectionHandler, "connectionHandler");
this.upgradeToHandler = checkNotNull(upgradeToHandler, "upgradeToHandler");
this.http2MultiplexHandler = http2MultiplexHandler;
}
@Override
public CharSequence protocol() {
return HTTP_UPGRADE_PROTOCOL_NAME;
}
@Override
public Collection<CharSequence> setUpgradeHeaders(ChannelHandlerContext ctx,
HttpRequest upgradeRequest) {
CharSequence settingsValue = getSettingsHeaderValue(ctx);
upgradeRequest.headers().set(HTTP_UPGRADE_SETTINGS_HEADER, settingsValue);
return UPGRADE_HEADERS;
}
@Override
public void upgradeTo(ChannelHandlerContext ctx, FullHttpResponse upgradeResponse)
throws Exception {
try {
// Add the handler to the pipeline.
ctx.pipeline().addAfter(ctx.name(), handlerName, upgradeToHandler);
// Add the Http2 Multiplex handler as this handler handle events produced by the connectionHandler.
// See https://github.com/netty/netty/issues/9495
if (http2MultiplexHandler != null) {
final String name = ctx.pipeline().context(connectionHandler).name();
ctx.pipeline().addAfter(name, null, http2MultiplexHandler);
}
// Reserve local stream 1 for the response.
connectionHandler.onHttpClientUpgrade();
} catch (Http2Exception e) {
ctx.fireExceptionCaught(e);
ctx.close();
}
}
/**
* Converts the current settings for the handler to the Base64-encoded representation used in
* the HTTP2-Settings upgrade header.
*/
private CharSequence getSettingsHeaderValue(ChannelHandlerContext ctx) {
ByteBuf buf = null;
ByteBuf encodedBuf = null;
try {
// Get the local settings for the handler.
Http2Settings settings = connectionHandler.decoder().localSettings();
// Serialize the payload of the SETTINGS frame.
int payloadLength = SETTING_ENTRY_LENGTH * settings.size();
buf = ctx.alloc().buffer(payloadLength);
for (CharObjectMap.PrimitiveEntry<Long> entry : settings.entries()) {
buf.writeChar(entry.key());
buf.writeInt(entry.value().intValue());
}
// Base64 encode the payload and then convert to a string for the header.
encodedBuf = Base64.encode(buf, URL_SAFE);
return encodedBuf.toString(UTF_8);
} finally {
release(buf);
release(encodedBuf);
}
}
}
| Http2ClientUpgradeCodec |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/serialization/Serializer.java | {
"start": 1029,
"end": 1106
} | interface ____ expected to have a constructor with no parameter.
*
* <p>This | is |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java | {
"start": 43860,
"end": 44354
} | class ____ {
@GET("/foo/bar/") //
Call<ResponseBody> method(@QueryName String ping) {
return null;
}
}
Request request = buildRequest(Example.class, "pong");
assertThat(request.method()).isEqualTo("GET");
assertThat(request.headers().size()).isEqualTo(0);
assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/?pong");
assertThat(request.body()).isNull();
}
@Test
public void getWithEncodedQueryNameParam() {
| Example |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/DefaultScopeTest.java | {
"start": 1425,
"end": 1862
} | class ____ {
@Inject
Bean<NoScopeButResource> bean;
public Bean<NoScopeButResource> getBean() {
return bean;
}
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "hello";
}
@Incoming("source")
public String toUpperCase(String payload) {
return payload.toUpperCase();
}
}
}
| NoScopeButResource |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/insert/OracleInsertTest19.java | {
"start": 1062,
"end": 3207
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "insert into TB_DUOBAO_PARTICIPATE_NUMBER ( PARTICIPATE_NUMBER, PERIOD_ID, PRODUCT_ID,number_index)"
+ " (SELECT ?,?,?,? FROM DUAL)"
+ " union all "
+ " (SELECT ?,?,?,? FROM DUAL)";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("INSERT INTO TB_DUOBAO_PARTICIPATE_NUMBER (PARTICIPATE_NUMBER, PERIOD_ID, PRODUCT_ID, number_index)"
+ "\n(SELECT ?, ?, ?, ?"
+ "\nFROM DUAL)"
+ "\nUNION ALL"
+ "\n(SELECT ?, ?, ?, ?"
+ "\nFROM DUAL)",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("relationships : " + visitor.getRelationships());
assertEquals(1, visitor.getTables().size());
assertEquals(4, visitor.getColumns().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("TB_DUOBAO_PARTICIPATE_NUMBER")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("TB_DUOBAO_PARTICIPATE_NUMBER", "PARTICIPATE_NUMBER")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("TB_DUOBAO_PARTICIPATE_NUMBER", "PERIOD_ID")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("TB_DUOBAO_PARTICIPATE_NUMBER", "PRODUCT_ID")));
assertTrue(visitor.getColumns().contains(new TableStat.Column("TB_DUOBAO_PARTICIPATE_NUMBER", "number_index")));
}
}
| OracleInsertTest19 |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_189.java | {
"start": 303,
"end": 822
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "select 10 / ( 2 * 2) / 2;";
// System.out.println(sql);
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT 10 / (2 * 2) / 2;", stmt.toString());
}
}
| MySqlSelectTest_189 |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxDistinctFuseable.java | {
"start": 1491,
"end": 2883
} | class ____<T, K, C>
extends InternalFluxOperator<T, T> implements Fuseable {
final Function<? super T, ? extends K> keyExtractor;
final Supplier<C> collectionSupplier;
final BiPredicate<C, K> distinctPredicate;
final Consumer<C> cleanupCallback;
FluxDistinctFuseable(Flux<? extends T> source,
Function<? super T, ? extends K> keyExtractor, Supplier<C> collectionSupplier,
BiPredicate<C, K> distinctPredicate, Consumer<C> cleanupCallback) {
super(source);
this.keyExtractor = Objects.requireNonNull(keyExtractor, "keyExtractor");
this.collectionSupplier = Objects.requireNonNull(collectionSupplier, "collectionSupplier");
this.distinctPredicate = Objects.requireNonNull(distinctPredicate, "distinctPredicate");
this.cleanupCallback = Objects.requireNonNull(cleanupCallback, "cleanupCallback");
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
C collection = Objects.requireNonNull(collectionSupplier.get(),
"The collectionSupplier returned a null collection");
return new DistinctFuseableSubscriber<>(actual, collection, keyExtractor,
distinctPredicate, cleanupCallback);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
}
| FluxDistinctFuseable |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/filters/Filter.java | {
"start": 375,
"end": 1105
} | interface ____ {
/**
* The handler called on HTTP request or failure.
* It's important that the handler call {@link RoutingContext#next()} to invoke the next filter or the user routes.
*
* @return the handler
*/
Handler<RoutingContext> getHandler();
/**
* @return the priority of the filter.
*/
int getPriority();
/**
* Whether to add {@link #getHandler()} as HTTP request handler (via {@link Route#handler(Handler)}) or
* as failure handler (via {@link Route#failureHandler(Handler)}).
*
* @return true if filter should be applied on failures rather than HTTP requests
*/
default boolean isFailureHandler() {
return false;
}
}
| Filter |
java | spring-projects__spring-boot | module/spring-boot-http-client/src/test/java/org/springframework/boot/http/client/reactive/AbstractClientHttpConnectorBuilderTests.java | {
"start": 2744,
"end": 9615
} | class ____<T extends ClientHttpConnector> {
private static final Function<HttpMethod, HttpStatus> ALWAYS_FOUND = (method) -> HttpStatus.FOUND;
private final Class<T> connectorType;
private final ClientHttpConnectorBuilder<T> builder;
AbstractClientHttpConnectorBuilderTests(Class<T> connectorType, ClientHttpConnectorBuilder<T> builder) {
this.connectorType = connectorType;
this.builder = builder;
}
@Test
void buildReturnsConnectorOfExpectedType() {
T connector = this.builder.build();
assertThat(connector).isInstanceOf(this.connectorType);
}
@Test
void buildWhenHasConnectTimeout() {
HttpClientSettings settings = HttpClientSettings.defaults().withConnectTimeout(Duration.ofSeconds(60));
T connector = this.builder.build(settings);
assertThat(connectTimeout(connector)).isEqualTo(Duration.ofSeconds(60).toMillis());
}
@Test
void buildWhenHadReadTimeout() {
HttpClientSettings settings = HttpClientSettings.defaults().withReadTimeout(Duration.ofSeconds(120));
T connector = this.builder.build(settings);
assertThat(readTimeout(connector)).isEqualTo(Duration.ofSeconds(120).toMillis());
}
@ParameterizedTest
@WithPackageResources("test.jks")
@ValueSource(strings = { "GET", "POST" })
void connectWithSslBundle(String httpMethod) throws Exception {
TomcatServletWebServerFactory webServerFactory = new TomcatServletWebServerFactory(0);
webServerFactory.setSsl(ssl());
WebServer webServer = webServerFactory
.getWebServer((context) -> context.addServlet("test", TestServlet.class).addMapping("/"));
try {
webServer.start();
int port = webServer.getPort();
URI uri = new URI("https://localhost:%s".formatted(port));
ClientHttpConnector insecureConnector = this.builder.build();
ClientRequest insecureRequest = createRequest(httpMethod, uri);
assertThatExceptionOfType(WebClientRequestException.class)
.isThrownBy(() -> getResponse(insecureConnector, insecureRequest))
.withCauseInstanceOf(SSLHandshakeException.class);
ClientHttpConnector secureConnector = this.builder.build(HttpClientSettings.ofSslBundle(sslBundle()));
ClientRequest secureRequest = createRequest(httpMethod, uri);
ClientResponse secureResponse = getResponse(secureConnector, secureRequest);
assertThat(secureResponse.bodyToMono(String.class).block())
.contains("Received " + httpMethod + " request to /");
}
finally {
webServer.stop();
}
}
@ParameterizedTest
@WithPackageResources("test.jks")
@ValueSource(strings = { "GET", "POST" })
void connectWithSslBundleAndOptionsMismatch(String httpMethod) throws Exception {
TomcatServletWebServerFactory webServerFactory = new TomcatServletWebServerFactory(0);
webServerFactory.setSsl(ssl("TLS_AES_128_GCM_SHA256"));
WebServer webServer = webServerFactory
.getWebServer((context) -> context.addServlet("test", TestServlet.class).addMapping("/"));
try {
webServer.start();
int port = webServer.getPort();
URI uri = new URI("https://localhost:%s".formatted(port));
ClientHttpConnector secureConnector = this.builder.build(
HttpClientSettings.ofSslBundle(sslBundle(SslOptions.of(Set.of("TLS_AES_256_GCM_SHA384"), null))));
ClientRequest secureRequest = createRequest(httpMethod, uri);
assertThatExceptionOfType(WebClientRequestException.class)
.isThrownBy(() -> getResponse(secureConnector, secureRequest))
.withCauseInstanceOf(SSLHandshakeException.class);
}
finally {
webServer.stop();
}
}
@ParameterizedTest
@ValueSource(strings = { "GET", "POST", "PUT", "PATCH", "DELETE" })
void redirectDefault(String httpMethod) throws Exception {
testRedirect(null, HttpMethod.valueOf(httpMethod), this::getExpectedRedirect);
}
@ParameterizedTest
@ValueSource(strings = { "GET", "POST", "PUT", "PATCH", "DELETE" })
void redirectFollow(String httpMethod) throws Exception {
HttpClientSettings settings = HttpClientSettings.defaults().withRedirects(HttpRedirects.FOLLOW);
testRedirect(settings, HttpMethod.valueOf(httpMethod), this::getExpectedRedirect);
}
@ParameterizedTest
@ValueSource(strings = { "GET", "POST", "PUT", "PATCH", "DELETE" })
void redirectDontFollow(String httpMethod) throws Exception {
HttpClientSettings settings = HttpClientSettings.defaults().withRedirects(HttpRedirects.DONT_FOLLOW);
testRedirect(settings, HttpMethod.valueOf(httpMethod), ALWAYS_FOUND);
}
protected final void testRedirect(@Nullable HttpClientSettings settings, HttpMethod httpMethod,
Function<HttpMethod, HttpStatus> expectedStatusForMethod) throws URISyntaxException {
HttpStatus expectedStatus = expectedStatusForMethod.apply(httpMethod);
TomcatServletWebServerFactory webServerFactory = new TomcatServletWebServerFactory(0);
WebServer webServer = webServerFactory
.getWebServer((context) -> context.addServlet("test", TestServlet.class).addMapping("/"));
try {
webServer.start();
int port = webServer.getPort();
URI uri = new URI("http://localhost:%s".formatted(port) + "/redirect");
ClientHttpConnector connector = this.builder.build(settings);
ClientRequest request = createRequest(httpMethod, uri);
ClientResponse response = getResponse(connector, request);
assertThat(response.statusCode()).isEqualTo(expectedStatus);
if (expectedStatus == HttpStatus.OK) {
assertThat(response.bodyToMono(String.class).block()).contains("request to /redirected");
}
}
finally {
webServer.stop();
}
}
private ClientRequest createRequest(String httpMethod, URI uri) {
return createRequest(HttpMethod.valueOf(httpMethod), uri);
}
private ClientRequest createRequest(HttpMethod httpMethod, URI uri) {
return ClientRequest.create(httpMethod, uri).build();
}
private ClientResponse getResponse(ClientHttpConnector connector, ClientRequest request) {
ClientResponse response = ExchangeFunctions.create(connector).exchange(request).block();
assertThat(response).isNotNull();
return response;
}
private Ssl ssl(String... ciphers) {
Ssl ssl = new Ssl();
ssl.setClientAuth(ClientAuth.NEED);
ssl.setKeyPassword("password");
ssl.setKeyStore("classpath:test.jks");
ssl.setTrustStore("classpath:test.jks");
if (ciphers.length > 0) {
ssl.setCiphers(ciphers);
}
return ssl;
}
protected final SslBundle sslBundle() {
return sslBundle(SslOptions.NONE);
}
protected final SslBundle sslBundle(SslOptions sslOptions) {
JksSslStoreDetails storeDetails = JksSslStoreDetails.forLocation("classpath:test.jks");
JksSslStoreBundle stores = new JksSslStoreBundle(storeDetails, storeDetails);
return SslBundle.of(stores, SslBundleKey.of("password"), sslOptions);
}
protected HttpStatus getExpectedRedirect(HttpMethod httpMethod) {
return HttpStatus.OK;
}
protected abstract long connectTimeout(T connector);
protected abstract long readTimeout(T connector);
public static | AbstractClientHttpConnectorBuilderTests |
java | apache__flink | flink-test-utils-parent/flink-test-utils-junit/src/main/java/org/apache/flink/core/testutils/FilteredClassLoader.java | {
"start": 1282,
"end": 1463
} | class ____ for the filtered classes. */
private final HashSet<String> filteredClassNames;
/**
* Creates a new filtered classloader.
*
* @param delegate The | names |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 128235,
"end": 128411
} | class ____ extends ProcessTableFunction<Integer> {
public void eval(int i) {}
public void eval(String i) {}
}
private static | MultiEvalProcessTableFunction |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/constructor/nestedsource/source/Studio.java | {
"start": 253,
"end": 555
} | class ____ {
private final String name;
private final String city;
public Studio(String name, String city) {
this.name = name;
this.city = city;
}
public String getName() {
return name;
}
public String getCity() {
return city;
}
}
| Studio |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/aspectj/annotation/InstantiationModelAwarePointcutAdvisorImpl.java | {
"start": 8137,
"end": 10477
} | class ____ extends DynamicMethodMatcherPointcut {
private final AspectJExpressionPointcut declaredPointcut;
private final Pointcut preInstantiationPointcut;
private @Nullable LazySingletonAspectInstanceFactoryDecorator aspectInstanceFactory;
public PerTargetInstantiationModelPointcut(AspectJExpressionPointcut declaredPointcut,
Pointcut preInstantiationPointcut, MetadataAwareAspectInstanceFactory aspectInstanceFactory) {
this.declaredPointcut = declaredPointcut;
this.preInstantiationPointcut = preInstantiationPointcut;
if (aspectInstanceFactory instanceof LazySingletonAspectInstanceFactoryDecorator lazyFactory) {
this.aspectInstanceFactory = lazyFactory;
}
}
@Override
public boolean matches(Method method, Class<?> targetClass) {
// We're either instantiated and matching on declared pointcut,
// or uninstantiated matching on either pointcut...
return (isAspectMaterialized() && this.declaredPointcut.matches(method, targetClass)) ||
this.preInstantiationPointcut.getMethodMatcher().matches(method, targetClass);
}
@Override
public boolean matches(Method method, Class<?> targetClass, @Nullable Object... args) {
// This can match only on declared pointcut.
return (isAspectMaterialized() && this.declaredPointcut.matches(method, targetClass, args));
}
private boolean isAspectMaterialized() {
return (this.aspectInstanceFactory == null || this.aspectInstanceFactory.isMaterialized());
}
@Override
public boolean equals(@Nullable Object other) {
// For equivalence, we only need to compare the preInstantiationPointcut fields since
// they include the declaredPointcut fields. In addition, we should not compare the
// aspectInstanceFactory fields since LazySingletonAspectInstanceFactoryDecorator does
// not implement equals().
return (this == other || (other instanceof PerTargetInstantiationModelPointcut that &&
ObjectUtils.nullSafeEquals(this.preInstantiationPointcut, that.preInstantiationPointcut)));
}
@Override
public int hashCode() {
return ObjectUtils.nullSafeHashCode(this.declaredPointcut.getExpression());
}
@Override
public String toString() {
return PerTargetInstantiationModelPointcut.class.getName() + ": " + this.declaredPointcut.getExpression();
}
}
}
| PerTargetInstantiationModelPointcut |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-server/hadoop-yarn-server-timelineservice-hbase-server-2/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineServerUtils.java | {
"start": 1660,
"end": 7511
} | class ____ {
private HBaseTimelineServerUtils() {
}
/**
* Creates a {@link Tag} from the input attribute.
*
* @param attribute Attribute from which tag has to be fetched.
* @return a HBase Tag.
*/
public static Tag getTagFromAttribute(Map.Entry<String, byte[]> attribute) {
// attribute could be either an Aggregation Operation or
// an Aggregation Dimension
// Get the Tag type from either
AggregationOperation aggOp = AggregationOperation
.getAggregationOperation(attribute.getKey());
if (aggOp != null) {
Tag t = createTag(aggOp.getTagType(), attribute.getValue());
return t;
}
AggregationCompactionDimension aggCompactDim =
AggregationCompactionDimension.getAggregationCompactionDimension(
attribute.getKey());
if (aggCompactDim != null) {
Tag t = createTag(aggCompactDim.getTagType(), attribute.getValue());
return t;
}
return null;
}
/**
* creates a new cell based on the input cell but with the new value.
*
* @param origCell Original cell
* @param newValue new cell value
* @return cell
* @throws IOException while creating new cell.
*/
public static Cell createNewCell(Cell origCell, byte[] newValue)
throws IOException {
return CellUtil.createCell(CellUtil.cloneRow(origCell),
CellUtil.cloneFamily(origCell), CellUtil.cloneQualifier(origCell),
origCell.getTimestamp(), KeyValue.Type.Put.getCode(), newValue);
}
/**
* creates a cell with the given inputs.
*
* @param row row of the cell to be created
* @param family column family name of the new cell
* @param qualifier qualifier for the new cell
* @param ts timestamp of the new cell
* @param newValue value of the new cell
* @param tags tags in the new cell
* @return cell
* @throws IOException while creating the cell.
*/
public static Cell createNewCell(byte[] row, byte[] family, byte[] qualifier,
long ts, byte[] newValue, byte[] tags) throws IOException {
return CellUtil.createCell(row, family, qualifier, ts, KeyValue.Type.Put,
newValue, tags);
}
/**
* Create a Tag.
* @param tagType tag type
* @param tag the content of the tag in byte array.
* @return an instance of Tag
*/
public static Tag createTag(byte tagType, byte[] tag) {
return new ArrayBackedTag(tagType, tag);
}
/**
* Create a Tag.
* @param tagType tag type
* @param tag the content of the tag in String.
* @return an instance of Tag
*/
public static Tag createTag(byte tagType, String tag) {
return createTag(tagType, Bytes.toBytes(tag));
}
/**
* Convert a cell to a list of tags.
* @param cell the cell to convert
* @return a list of tags
*/
public static List<Tag> convertCellAsTagList(Cell cell) {
return TagUtil.asList(
cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
/**
* Convert a list of tags to a byte array.
* @param tags the list of tags to convert
* @return byte array representation of the list of tags
*/
public static byte[] convertTagListToByteArray(List<Tag> tags) {
return TagUtil.fromList(tags);
}
/**
* returns app id from the list of tags.
*
* @param tags cell tags to be looked into
* @return App Id as the AggregationCompactionDimension
*/
public static String getAggregationCompactionDimension(List<Tag> tags) {
String appId = null;
for (Tag t : tags) {
if (AggregationCompactionDimension.APPLICATION_ID.getTagType() == t
.getType()) {
appId = Bytes.toString(Tag.cloneValue(t));
return appId;
}
}
return appId;
}
/**
* Returns the first seen aggregation operation as seen in the list of input
* tags or null otherwise.
*
* @param tags list of HBase tags.
* @return AggregationOperation
*/
public static AggregationOperation getAggregationOperationFromTagsList(
List<Tag> tags) {
for (AggregationOperation aggOp : AggregationOperation.values()) {
for (Tag tag : tags) {
if (tag.getType() == aggOp.getTagType()) {
return aggOp;
}
}
}
return null;
}
// flush and compact all the regions of the primary table
/**
* Flush and compact all regions of a table.
* @param server region server
* @param table the table to flush and compact
* @throws IOException any IOE raised, or translated exception.
* @return the number of regions flushed and compacted
*/
public static int flushCompactTableRegions(HRegionServer server,
TableName table) throws IOException {
List<HRegion> regions = server.getRegions(table);
for (HRegion region : regions) {
region.flush(true);
region.compact(true);
}
return regions.size();
}
/**
* Check the existence of FlowRunCoprocessor in a table.
* @param server region server
* @param table table to check
* @param existenceExpected true if the FlowRunCoprocessor is expected
* to be loaded in the table, false otherwise
* @throws Exception Exception if any.
*/
public static void validateFlowRunCoprocessor(HRegionServer server,
TableName table, boolean existenceExpected) throws Exception {
List<HRegion> regions = server.getRegions(table);
for (HRegion region : regions) {
boolean found = false;
Set<String> coprocs = region.getCoprocessorHost().getCoprocessors();
for (String coprocName : coprocs) {
if (coprocName.contains("FlowRunCoprocessor")) {
found = true;
}
}
if (found != existenceExpected) {
throw new Exception("FlowRunCoprocessor is" +
(existenceExpected ? " not " : " ") + "loaded in table " + table);
}
}
}
}
| HBaseTimelineServerUtils |
java | apache__avro | lang/java/tools/src/test/compiler/output/OptionalGettersAllFieldsTest.java | {
"start": 2306,
"end": 9177
} | class ____ by the given SchemaStore
*/
public static BinaryMessageDecoder<OptionalGettersAllFieldsTest> createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this OptionalGettersAllFieldsTest to a ByteBuffer.
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a OptionalGettersAllFieldsTest from a ByteBuffer.
* @param b a byte buffer holding serialized data for an instance of this class
* @return a OptionalGettersAllFieldsTest instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class
*/
public static OptionalGettersAllFieldsTest fromByteBuffer(
java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private java.lang.CharSequence name;
private java.lang.CharSequence nullable_name;
private java.lang.Object favorite_number;
private java.lang.Integer nullable_favorite_number;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use <code>newBuilder()</code>.
*/
public OptionalGettersAllFieldsTest() {}
/**
* All-args constructor.
* @param name The new value for name
* @param nullable_name The new value for nullable_name
* @param favorite_number The new value for favorite_number
* @param nullable_favorite_number The new value for nullable_favorite_number
*/
public OptionalGettersAllFieldsTest(java.lang.CharSequence name, java.lang.CharSequence nullable_name, java.lang.Object favorite_number, java.lang.Integer nullable_favorite_number) {
this.name = name;
this.nullable_name = nullable_name;
this.favorite_number = favorite_number;
this.nullable_favorite_number = nullable_favorite_number;
}
@Override
public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; }
@Override
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
@Override
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return name;
case 1: return nullable_name;
case 2: return favorite_number;
case 3: return nullable_favorite_number;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
// Used by DatumReader. Applications should not call.
@Override
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: name = (java.lang.CharSequence)value$; break;
case 1: nullable_name = (java.lang.CharSequence)value$; break;
case 2: favorite_number = value$; break;
case 3: nullable_favorite_number = (java.lang.Integer)value$; break;
default: throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
/**
* Gets the value of the 'name' field as an Optional<java.lang.CharSequence>.
* @return The value wrapped in an Optional<java.lang.CharSequence>.
*/
public Optional<java.lang.CharSequence> getName() {
return Optional.<java.lang.CharSequence>ofNullable(name);
}
/**
* Sets the value of the 'name' field.
* @param value the value to set.
*/
public void setName(java.lang.CharSequence value) {
this.name = value;
}
/**
* Gets the value of the 'nullable_name' field as an Optional<java.lang.CharSequence>.
* @return The value wrapped in an Optional<java.lang.CharSequence>.
*/
public Optional<java.lang.CharSequence> getNullableName() {
return Optional.<java.lang.CharSequence>ofNullable(nullable_name);
}
/**
* Sets the value of the 'nullable_name' field.
* @param value the value to set.
*/
public void setNullableName(java.lang.CharSequence value) {
this.nullable_name = value;
}
/**
* Gets the value of the 'favorite_number' field as an Optional<java.lang.Object>.
* @return The value wrapped in an Optional<java.lang.Object>.
*/
public Optional<java.lang.Object> getFavoriteNumber() {
return Optional.<java.lang.Object>ofNullable(favorite_number);
}
/**
* Sets the value of the 'favorite_number' field.
* @param value the value to set.
*/
public void setFavoriteNumber(java.lang.Object value) {
this.favorite_number = value;
}
/**
* Gets the value of the 'nullable_favorite_number' field as an Optional<java.lang.Integer>.
* @return The value wrapped in an Optional<java.lang.Integer>.
*/
public Optional<java.lang.Integer> getNullableFavoriteNumber() {
return Optional.<java.lang.Integer>ofNullable(nullable_favorite_number);
}
/**
* Sets the value of the 'nullable_favorite_number' field.
* @param value the value to set.
*/
public void setNullableFavoriteNumber(java.lang.Integer value) {
this.nullable_favorite_number = value;
}
/**
* Creates a new OptionalGettersAllFieldsTest RecordBuilder.
* @return A new OptionalGettersAllFieldsTest RecordBuilder
*/
public static avro.examples.baseball.OptionalGettersAllFieldsTest.Builder newBuilder() {
return new avro.examples.baseball.OptionalGettersAllFieldsTest.Builder();
}
/**
* Creates a new OptionalGettersAllFieldsTest RecordBuilder by copying an existing Builder.
* @param other The existing builder to copy.
* @return A new OptionalGettersAllFieldsTest RecordBuilder
*/
public static avro.examples.baseball.OptionalGettersAllFieldsTest.Builder newBuilder(avro.examples.baseball.OptionalGettersAllFieldsTest.Builder other) {
if (other == null) {
return new avro.examples.baseball.OptionalGettersAllFieldsTest.Builder();
} else {
return new avro.examples.baseball.OptionalGettersAllFieldsTest.Builder(other);
}
}
/**
* Creates a new OptionalGettersAllFieldsTest RecordBuilder by copying an existing OptionalGettersAllFieldsTest instance.
* @param other The existing instance to copy.
* @return A new OptionalGettersAllFieldsTest RecordBuilder
*/
public static avro.examples.baseball.OptionalGettersAllFieldsTest.Builder newBuilder(avro.examples.baseball.OptionalGettersAllFieldsTest other) {
if (other == null) {
return new avro.examples.baseball.OptionalGettersAllFieldsTest.Builder();
} else {
return new avro.examples.baseball.OptionalGettersAllFieldsTest.Builder(other);
}
}
/**
* RecordBuilder for OptionalGettersAllFieldsTest instances.
*/
@org.apache.avro.specific.AvroGenerated
public static | backed |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/TopicPartitionMetadataTest.java | {
"start": 1049,
"end": 4214
} | class ____ {
@Test
public void shouldGetPartitionTimeAndProcessorMeta() {
final ProcessorMetadata metadata = new ProcessorMetadata();
final String key = "some_key";
final long value = 100L;
metadata.put(key, value);
final TopicPartitionMetadata topicMeta = new TopicPartitionMetadata(100L, metadata);
assertThat(topicMeta.partitionTime(), is(100L));
assertThat(topicMeta.processorMetadata(), is(metadata));
}
@Test
public void shouldDecodeVersionOne() {
final byte[] serialized = ByteBuffer.allocate(Byte.BYTES + Long.BYTES)
.put((byte) 1)
.putLong(100L)
.array();
final String serializedString = Base64.getEncoder().encodeToString(serialized);
final TopicPartitionMetadata topicMeta = TopicPartitionMetadata.decode(serializedString);
assertThat(topicMeta.partitionTime(), is(100L));
assertThat(topicMeta.processorMetadata(), is(new ProcessorMetadata()));
}
@Test
public void shouldEncodeDecodeVersionTwo() {
final ProcessorMetadata metadata = new ProcessorMetadata();
final String key = "some_key";
final long value = 100L;
metadata.put(key, value);
final TopicPartitionMetadata expected = new TopicPartitionMetadata(100L, metadata);
final String serializedString = expected.encode();
final TopicPartitionMetadata topicMeta = TopicPartitionMetadata.decode(serializedString);
assertThat(topicMeta, is(expected));
}
@Test
public void shouldEncodeDecodeEmptyMetaVersionTwo() {
final TopicPartitionMetadata expected = new TopicPartitionMetadata(100L, new ProcessorMetadata());
final String serializedString = expected.encode();
final TopicPartitionMetadata topicMeta = TopicPartitionMetadata.decode(serializedString);
assertThat(topicMeta, is(expected));
}
@Test
public void shouldDecodeEmptyStringVersionTwo() {
final TopicPartitionMetadata expected = new TopicPartitionMetadata(RecordQueue.UNKNOWN, new ProcessorMetadata());
final TopicPartitionMetadata topicMeta = TopicPartitionMetadata.decode("");
assertThat(topicMeta, is(expected));
}
@Test
public void shouldReturnUnknownTimestampIfUnknownVersion() {
final byte[] emptyMessage = {TopicPartitionMetadata.LATEST_MAGIC_BYTE + 1};
final String encodedString = Base64.getEncoder().encodeToString(emptyMessage);
final TopicPartitionMetadata decoded = TopicPartitionMetadata.decode(encodedString);
assertThat(decoded.partitionTime(), is(RecordQueue.UNKNOWN));
assertThat(decoded.processorMetadata(), is(new ProcessorMetadata()));
}
@Test
public void shouldReturnUnknownTimestampIfInvalidMetadata() {
final String invalidBase64String = "{}";
final TopicPartitionMetadata decoded = TopicPartitionMetadata.decode(invalidBase64String);
assertThat(decoded.partitionTime(), is(RecordQueue.UNKNOWN));
assertThat(decoded.processorMetadata(), is(new ProcessorMetadata()));
}
}
| TopicPartitionMetadataTest |
java | greenrobot__greendao | DaoGenerator/src/org/greenrobot/greendao/generator/DaoUtil.java | {
"start": 1047,
"end": 3828
} | class ____ {
public static String dbName(String javaName) {
StringBuilder builder = new StringBuilder(javaName);
for (int i = 1; i < builder.length(); i++) {
boolean lastWasUpper = Character.isUpperCase(builder.charAt(i - 1));
boolean isUpper = Character.isUpperCase(builder.charAt(i));
if (isUpper && !lastWasUpper) {
builder.insert(i, '_');
i++;
}
}
return builder.toString().toUpperCase();
}
public static String getClassnameFromFullyQualified(String clazz) {
int index = clazz.lastIndexOf('.');
if (index != -1) {
return clazz.substring(index + 1);
} else {
return clazz;
}
}
public static String capFirst(String string) {
return Character.toUpperCase(string.charAt(0)) + (string.length() > 1 ? string.substring(1) : "");
}
public static String getPackageFromFullyQualified(String clazz) {
int index = clazz.lastIndexOf('.');
if (index != -1) {
return clazz.substring(0, index);
} else {
return null;
}
}
public static byte[] readAllBytes(InputStream in) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copyAllBytes(in, out);
return out.toByteArray();
}
public static byte[] readAllBytes(File file) throws IOException {
FileInputStream is = new FileInputStream(file);
try {
return DaoUtil.readAllBytes(is);
} finally {
is.close();
}
}
public static byte[] readAllBytes(String filename) throws IOException {
FileInputStream is = new FileInputStream(filename);
try {
return DaoUtil.readAllBytes(is);
} finally {
is.close();
}
}
/**
* Copies all available data from in to out without closing any stream.
*
* @return number of bytes copied
*/
public static int copyAllBytes(InputStream in, OutputStream out) throws IOException {
int byteCount = 0;
byte[] buffer = new byte[4096];
while (true) {
int read = in.read(buffer);
if (read == -1) {
break;
}
out.write(buffer, 0, read);
byteCount += read;
}
return byteCount;
}
public static String checkConvertToJavaDoc(String javaDoc, String indent) {
if (javaDoc != null && !javaDoc.trim().startsWith("/**")) {
javaDoc = javaDoc.replace("\n", "\n" + indent + " * ");
javaDoc = indent + "/**\n" + indent + " * " + javaDoc + "\n" + indent + " */";
}
return javaDoc;
}
}
| DaoUtil |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/get/ShardMultiGetFromTranslogRequestSerializationTests.java | {
"start": 815,
"end": 1919
} | class ____ extends AbstractWireSerializingTestCase<Request> {
@Override
protected Writeable.Reader<Request> instanceReader() {
return Request::new;
}
@Override
protected Request createTestInstance() {
return new Request(randomMultiGetShardRequest(), randomShardId());
}
@Override
protected Request mutateInstance(Request instance) throws IOException {
return randomBoolean()
? new Request(instance.getMultiGetShardRequest(), randomValueOtherThan(instance.getShardId(), this::randomShardId))
: new Request(
randomValueOtherThan(instance.getMultiGetShardRequest(), this::randomMultiGetShardRequest),
instance.getShardId()
);
}
private ShardId randomShardId() {
return new ShardId(randomAlphaOfLength(10), UUID.randomUUID().toString(), randomIntBetween(0, 5));
}
private MultiGetShardRequest randomMultiGetShardRequest() {
return MultiGetShardRequestTests.createTestInstance(randomBoolean());
}
}
| ShardMultiGetFromTranslogRequestSerializationTests |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/authentication/LdapAuthenticationProviderTests.java | {
"start": 2214,
"end": 8575
} | class ____ {
@Test
public void testSupportsUsernamePasswordAuthenticationToken() {
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator(),
new MockAuthoritiesPopulator());
assertThat(ldapProvider.supports(UsernamePasswordAuthenticationToken.class)).isTrue();
}
@Test
public void testDefaultMapperIsSet() {
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator(),
new MockAuthoritiesPopulator());
assertThat(ldapProvider.getUserDetailsContextMapper() instanceof LdapUserDetailsMapper).isTrue();
}
@Test
public void testEmptyOrNullUserNameThrowsException() {
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator(),
new MockAuthoritiesPopulator());
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(
() -> ldapProvider.authenticate(UsernamePasswordAuthenticationToken.unauthenticated(null, "password")));
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> ldapProvider
.authenticate(UsernamePasswordAuthenticationToken.unauthenticated("", "bobspassword")));
}
@Test
public void usernameNotFoundExceptionIsHiddenByDefault() {
final LdapAuthenticator authenticator = mock(LdapAuthenticator.class);
final UsernamePasswordAuthenticationToken joe = UsernamePasswordAuthenticationToken.unauthenticated("joe",
"password");
given(authenticator.authenticate(joe)).willThrow(new UsernameNotFoundException("nobody"));
LdapAuthenticationProvider provider = new LdapAuthenticationProvider(authenticator);
assertThatExceptionOfType(BadCredentialsException.class).isThrownBy(() -> provider.authenticate(joe));
}
@Test
public void usernameNotFoundExceptionIsNotHiddenIfConfigured() {
final LdapAuthenticator authenticator = mock(LdapAuthenticator.class);
final UsernamePasswordAuthenticationToken joe = UsernamePasswordAuthenticationToken.unauthenticated("joe",
"password");
given(authenticator.authenticate(joe)).willThrow(new UsernameNotFoundException("nobody"));
LdapAuthenticationProvider provider = new LdapAuthenticationProvider(authenticator);
provider.setHideUserNotFoundExceptions(false);
assertThatExceptionOfType(UsernameNotFoundException.class).isThrownBy(() -> provider.authenticate(joe));
}
@Test
public void normalUsage() {
MockAuthoritiesPopulator populator = new MockAuthoritiesPopulator();
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator(), populator);
LdapUserDetailsMapper userMapper = new LdapUserDetailsMapper();
userMapper.setRoleAttributes(new String[] { "ou" });
ldapProvider.setUserDetailsContextMapper(userMapper);
assertThat(ldapProvider.getAuthoritiesPopulator()).isNotNull();
UsernamePasswordAuthenticationToken authRequest = UsernamePasswordAuthenticationToken.unauthenticated("ben",
"benspassword");
Object authDetails = new Object();
authRequest.setDetails(authDetails);
Authentication authResult = ldapProvider.authenticate(authRequest);
assertThat(authResult.getCredentials()).isEqualTo("benspassword");
assertThat(authResult.getDetails()).isSameAs(authDetails);
UserDetails user = (UserDetails) authResult.getPrincipal();
assertThat(user.getAuthorities()).hasSize(2);
assertThat(user.getPassword()).isEqualTo("{SHA}nFCebWjxfaLbHHG1Qk5UU4trbvQ=");
assertThat(user.getUsername()).isEqualTo("ben");
assertThat(populator.getRequestedUsername()).isEqualTo("ben");
assertThat(AuthorityUtils.authorityListToSet(user.getAuthorities())).contains("ROLE_FROM_ENTRY");
assertThat(AuthorityUtils.authorityListToSet(user.getAuthorities())).contains("ROLE_FROM_POPULATOR");
}
@Test
public void passwordIsSetFromUserDataIfUseAuthenticationRequestCredentialsIsFalse() {
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator(),
new MockAuthoritiesPopulator());
ldapProvider.setUseAuthenticationRequestCredentials(false);
UsernamePasswordAuthenticationToken authRequest = UsernamePasswordAuthenticationToken.unauthenticated("ben",
"benspassword");
Authentication authResult = ldapProvider.authenticate(authRequest);
assertThat(authResult.getCredentials()).isEqualTo("{SHA}nFCebWjxfaLbHHG1Qk5UU4trbvQ=");
}
@Test
public void useWithNullAuthoritiesPopulatorReturnsCorrectRole() {
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(new MockAuthenticator());
LdapUserDetailsMapper userMapper = new LdapUserDetailsMapper();
userMapper.setRoleAttributes(new String[] { "ou" });
ldapProvider.setUserDetailsContextMapper(userMapper);
UsernamePasswordAuthenticationToken authRequest = UsernamePasswordAuthenticationToken.unauthenticated("ben",
"benspassword");
UserDetails user = (UserDetails) ldapProvider.authenticate(authRequest).getPrincipal();
assertThat(user.getAuthorities()).hasSize(1);
assertThat(AuthorityUtils.authorityListToSet(user.getAuthorities())).contains("ROLE_FROM_ENTRY");
}
@Test
public void authenticateWithNamingException() {
UsernamePasswordAuthenticationToken authRequest = UsernamePasswordAuthenticationToken.unauthenticated("ben",
"benspassword");
LdapAuthenticator mockAuthenticator = mock(LdapAuthenticator.class);
CommunicationException expectedCause = new CommunicationException(new javax.naming.CommunicationException());
given(mockAuthenticator.authenticate(authRequest)).willThrow(expectedCause);
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(mockAuthenticator);
assertThatExceptionOfType(InternalAuthenticationServiceException.class)
.isThrownBy(() -> ldapProvider.authenticate(authRequest))
.havingCause()
.isSameAs(expectedCause);
}
@Test
void authenticateWhenSuccessThenIssuesFactor() {
MockAuthenticator authenticator = new MockAuthenticator();
MockAuthoritiesPopulator populator = new MockAuthoritiesPopulator();
LdapAuthenticationProvider ldapProvider = new LdapAuthenticationProvider(authenticator, populator);
UsernamePasswordAuthenticationToken request = new UsernamePasswordAuthenticationToken("ben", "benspassword");
Authentication result = ldapProvider.authenticate(request);
SecurityAssertions.assertThat(result).hasAuthority(FactorGrantedAuthority.PASSWORD_AUTHORITY);
}
| LdapAuthenticationProviderTests |
java | google__dagger | javatests/dagger/functional/producers/binds/SimpleBindingModule.java | {
"start": 1484,
"end": 3404
} | class ____ {
@Binds
abstract Object bindObject(FooOfStrings impl);
@Binds
abstract Foo<String> bindFooOfStrings(FooOfStrings impl);
@Binds
abstract Foo<? extends Number> bindFooOfNumbers(Foo<Integer> fooOfIntegers);
@Binds
@SomeQualifier
abstract Foo<String> bindQualifiedFooOfStrings(FooOfStrings impl);
@Produces
static FooOfStrings produceFooOfStrings() {
return new FooOfStrings();
}
@Produces
static Foo<Integer> produceFooOfIntegers() {
return new Foo<Integer>() {};
}
@Produces
static Foo<Double> produceFooOfDoubles() {
return new Foo<Double>() {};
}
@Binds
@IntoSet
abstract Foo<? extends Number> bindFooOfIntegersIntoSet(Foo<Integer> fooOfIntegers);
@Binds
@IntoSet
abstract Foo<? extends Number> bindFooExtendsNumberIntoSet(Foo<Double> fooOfDoubles);
@Binds
@ElementsIntoSet
abstract Set<Object> bindSetOfFooNumbersToObjects(Set<Foo<? extends Number>> setOfFooNumbers);
@Binds
@IntoSet
abstract Object bindFooOfStringsIntoSetOfObjects(FooOfStrings impl);
@Produces
static HashSet<String> produceStringHashSet() {
return new HashSet<>(Arrays.asList("hash-string1", "hash-string2"));
}
@Produces
static TreeSet<CharSequence> produceCharSequenceTreeSet() {
return new TreeSet<CharSequence>(Arrays.asList("tree-charSequence1", "tree-charSequence2"));
}
@Produces
static Collection<CharSequence> produceCharSequenceCollection() {
return Arrays.<CharSequence>asList("list-charSequence");
}
@Binds
@ElementsIntoSet
abstract Set<CharSequence> bindHashSetOfStrings(HashSet<String> set);
@Binds
@ElementsIntoSet
abstract Set<CharSequence> bindTreeSetOfCharSequences(TreeSet<CharSequence> set);
@Binds
@ElementsIntoSet
abstract Set<CharSequence> bindCollectionOfCharSequences(Collection<CharSequence> collection);
@Qualifier
@Retention(RetentionPolicy.RUNTIME)
@ | SimpleBindingModule |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxCreate.java | {
"start": 17551,
"end": 18054
} | class ____<T> extends BaseSink<T> {
NoOverflowBaseAsyncSink(CoreSubscriber<? super T> actual) {
super(actual);
}
@Override
public final FluxSink<T> next(T t) {
if (isTerminated()) {
Operators.onNextDropped(t, ctx);
return this;
}
if (requestedFromDownstream() != 0) {
actual.onNext(t);
produced(this, 1);
}
else {
onOverflow();
Operators.onDiscard(t, ctx);
}
return this;
}
abstract void onOverflow();
}
static final | NoOverflowBaseAsyncSink |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java | {
"start": 38052,
"end": 40119
} | class ____ extends AbstractService
implements ContainerAllocator, RMHeartbeatHandler {
private final ClientService clientService;
private final AppContext context;
private ContainerAllocator containerAllocator;
ContainerAllocatorRouter(ClientService clientService,
AppContext context) {
super(ContainerAllocatorRouter.class.getName());
this.clientService = clientService;
this.context = context;
}
@Override
protected void serviceStart() throws Exception {
if (job.isUber()) {
MRApps.setupDistributedCacheLocal(getConfig());
this.containerAllocator = new LocalContainerAllocator(
this.clientService, this.context, nmHost, nmPort, nmHttpPort
, containerID);
} else {
this.containerAllocator = new RMContainerAllocator(
this.clientService, this.context, preemptionPolicy);
}
((Service)this.containerAllocator).init(getConfig());
((Service)this.containerAllocator).start();
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
ServiceOperations.stop((Service) this.containerAllocator);
super.serviceStop();
}
@Override
public void handle(ContainerAllocatorEvent event) {
this.containerAllocator.handle(event);
}
public void setSignalled(boolean isSignalled) {
((RMCommunicator) containerAllocator).setSignalled(isSignalled);
}
public void setShouldUnregister(boolean shouldUnregister) {
((RMCommunicator) containerAllocator).setShouldUnregister(shouldUnregister);
}
@Override
public long getLastHeartbeatTime() {
return ((RMCommunicator) containerAllocator).getLastHeartbeatTime();
}
@Override
public void runOnNextHeartbeat(Runnable callback) {
((RMCommunicator) containerAllocator).runOnNextHeartbeat(callback);
}
}
/**
* By the time life-cycle of this router starts, job-init would have already
* happened.
*/
private final | ContainerAllocatorRouter |
java | apache__flink | flink-formats/flink-avro/src/test/java/org/apache/flink/formats/avro/AvroStreamingFileSinkITCase.java | {
"start": 2471,
"end": 7302
} | class ____ extends AbstractTestBaseJUnit4 {
@Rule public final Timeout timeoutPerTest = Timeout.seconds(20);
@Test
public void testWriteAvroSpecific() throws Exception {
File folder = TEMPORARY_FOLDER.newFolder();
List<Address> data =
Arrays.asList(
new Address(1, "a", "b", "c", "12345"),
new Address(2, "p", "q", "r", "12345"),
new Address(3, "x", "y", "z", "12345"));
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.enableCheckpointing(100);
AvroWriterFactory<Address> avroWriterFactory = AvroWriters.forSpecificRecord(Address.class);
DataStream<Address> stream =
env.fromSource(
TestDataGenerators.fromDataWithSnapshotsLatch(
data, TypeInformation.of(Address.class)),
WatermarkStrategy.noWatermarks(),
"Test Source");
stream.addSink(
StreamingFileSink.forBulkFormat(Path.fromLocalFile(folder), avroWriterFactory)
.withBucketAssigner(new UniqueBucketAssigner<>("test"))
.build());
env.execute();
validateResults(folder, new SpecificDatumReader<>(Address.class), data);
}
@Test
public void testWriteAvroGeneric() throws Exception {
File folder = TEMPORARY_FOLDER.newFolder();
Schema schema = Address.getClassSchema();
Collection<GenericRecord> data = new GenericTestDataCollection();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.enableCheckpointing(100);
AvroWriterFactory<GenericRecord> avroWriterFactory = AvroWriters.forGenericRecord(schema);
DataStream<GenericRecord> stream =
env.fromSource(
TestDataGenerators.fromDataWithSnapshotsLatch(
data, new GenericRecordAvroTypeInfo(schema)),
WatermarkStrategy.noWatermarks(),
"Test Source");
stream.addSink(
StreamingFileSink.forBulkFormat(Path.fromLocalFile(folder), avroWriterFactory)
.withBucketAssigner(new UniqueBucketAssigner<>("test"))
.build());
env.execute();
validateResults(folder, new GenericDatumReader<>(schema), new ArrayList<>(data));
}
@Test
public void testWriteAvroReflect() throws Exception {
File folder = TEMPORARY_FOLDER.newFolder();
List<Datum> data = Arrays.asList(new Datum("a", 1), new Datum("b", 2), new Datum("c", 3));
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.enableCheckpointing(100);
AvroWriterFactory<Datum> avroWriterFactory = AvroWriters.forReflectRecord(Datum.class);
DataStream<Datum> stream =
env.fromSource(
TestDataGenerators.fromDataWithSnapshotsLatch(
data, TypeInformation.of(Datum.class)),
WatermarkStrategy.noWatermarks(),
"Test Source");
stream.addSink(
StreamingFileSink.forBulkFormat(Path.fromLocalFile(folder), avroWriterFactory)
.withBucketAssigner(new UniqueBucketAssigner<>("test"))
.build());
env.execute();
validateResults(folder, new ReflectDatumReader<>(Datum.class), data);
}
// ------------------------------------------------------------------------
private static <T> void validateResults(
File folder, DatumReader<T> datumReader, List<T> expected) throws Exception {
File[] buckets = folder.listFiles();
assertThat(buckets).hasSize(1);
File[] partFiles = buckets[0].listFiles();
assertThat(partFiles).hasSize(2);
for (File partFile : partFiles) {
assertThat(partFile).isNotEmpty();
final List<T> fileContent = readAvroFile(partFile, datumReader);
assertThat(fileContent).isEqualTo(expected);
}
}
private static <T> List<T> readAvroFile(File file, DatumReader<T> datumReader)
throws IOException {
ArrayList<T> results = new ArrayList<>();
try (DataFileReader<T> dataFileReader = new DataFileReader<>(file, datumReader)) {
while (dataFileReader.hasNext()) {
results.add(dataFileReader.next());
}
}
return results;
}
private static | AvroStreamingFileSinkITCase |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateProcessMessage.java | {
"start": 728,
"end": 2312
} | class ____ {
@Nullable
private final ModelPlotConfig modelPlotConfig;
@Nullable
private final PerPartitionCategorizationConfig perPartitionCategorizationConfig;
@Nullable
private final List<JobUpdate.DetectorUpdate> detectorUpdates;
@Nullable
private final List<MlFilter> filters;
@Nullable
private final List<ScheduledEvent> scheduledEvents;
private UpdateProcessMessage(
@Nullable ModelPlotConfig modelPlotConfig,
@Nullable PerPartitionCategorizationConfig perPartitionCategorizationConfig,
@Nullable List<JobUpdate.DetectorUpdate> detectorUpdates,
@Nullable List<MlFilter> filters,
List<ScheduledEvent> scheduledEvents
) {
this.modelPlotConfig = modelPlotConfig;
this.perPartitionCategorizationConfig = perPartitionCategorizationConfig;
this.detectorUpdates = detectorUpdates;
this.filters = filters;
this.scheduledEvents = scheduledEvents;
}
@Nullable
public ModelPlotConfig getModelPlotConfig() {
return modelPlotConfig;
}
@Nullable
public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() {
return perPartitionCategorizationConfig;
}
@Nullable
public List<JobUpdate.DetectorUpdate> getDetectorUpdates() {
return detectorUpdates;
}
@Nullable
public List<MlFilter> getFilters() {
return filters;
}
@Nullable
public List<ScheduledEvent> getScheduledEvents() {
return scheduledEvents;
}
public static | UpdateProcessMessage |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/BaseSettings.java | {
"start": 3180,
"end": 15858
} | class ____ instantiate. This is typically
* used to do additional configuration (with dependency injection, for example)
* beyond simply construction of instances; or to use alternative constructors.
*/
protected final HandlerInstantiator _handlerInstantiator;
/**
* Default {@link java.util.Locale} used with serialization formats.
* Default value is {@link Locale#getDefault()}.
*/
protected final Locale _locale;
/**
* Default {@link java.util.TimeZone} used with serialization formats,
* if (and only if!) explicitly set by use; otherwise `null` to indicate
* "use default", which means "UTC" (from Jackson 2.7); earlier versions
* (up to 2.6) used "GMT".
*<p>
* Note that if a new value is set, timezone is also assigned to
* {@link #_dateFormat} of this object.
*/
protected final TimeZone _timeZone;
/**
* Explicitly default {@link Base64Variant} to use for handling
* binary data (<code>byte[]</code>), used with data formats
* that use base64 encoding (like JSON, CSV).
*/
protected final Base64Variant _defaultBase64;
/**
* Used to provide custom cache implementation in downstream components.
*/
protected final CacheProvider _cacheProvider;
/**
* Factory used for constructing {@link tools.jackson.databind.JsonNode} instances.
*/
protected final JsonNodeFactory _nodeFactory;
/**
* Handler that specifies some aspects of Constructor auto-detection.
*/
protected final ConstructorDetector _ctorDetector;
/*
/**********************************************************************
/* Construction
/**********************************************************************
*/
public BaseSettings(AnnotationIntrospector ai,
PropertyNamingStrategy pns, EnumNamingStrategy ens,
AccessorNamingStrategy.Provider accNaming,
TypeResolverBuilder<?> defaultTyper, PolymorphicTypeValidator ptv,
DateFormat dateFormat, HandlerInstantiator hi,
Locale locale, TimeZone tz, Base64Variant defaultBase64,
CacheProvider cacheProvider, JsonNodeFactory nodeFactory,
ConstructorDetector ctorDetector)
{
_annotationIntrospector = ai;
_propertyNamingStrategy = pns;
_enumNamingStrategy = ens;
_accessorNaming = accNaming;
_defaultTyper = defaultTyper;
_typeValidator = ptv;
_dateFormat = dateFormat;
_handlerInstantiator = hi;
_locale = locale;
_timeZone = tz;
_defaultBase64 = defaultBase64;
_cacheProvider = cacheProvider;
_nodeFactory = nodeFactory;
_ctorDetector = ctorDetector;
}
/*
/**********************************************************************
/* Factory methods
/**********************************************************************
*/
public BaseSettings withAnnotationIntrospector(AnnotationIntrospector ai) {
if (_annotationIntrospector == ai) {
return this;
}
return new BaseSettings(ai, _propertyNamingStrategy, _enumNamingStrategy, _accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings withInsertedAnnotationIntrospector(AnnotationIntrospector ai) {
return withAnnotationIntrospector(AnnotationIntrospectorPair.create(ai, _annotationIntrospector));
}
public BaseSettings withAppendedAnnotationIntrospector(AnnotationIntrospector ai) {
return withAnnotationIntrospector(AnnotationIntrospectorPair.create(_annotationIntrospector, ai));
}
public BaseSettings with(PropertyNamingStrategy pns) {
if (_propertyNamingStrategy == pns) {
return this;
}
return new BaseSettings(_annotationIntrospector, pns, _enumNamingStrategy, _accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(EnumNamingStrategy ens) {
if (_enumNamingStrategy == ens) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, ens, _accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(AccessorNamingStrategy.Provider p) {
if (_accessorNaming == p) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy, p,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(TypeResolverBuilder<?> typer) {
if (_defaultTyper == typer) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
typer, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(PolymorphicTypeValidator ptv) {
if (_typeValidator == ptv) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, ptv, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(DateFormat df) {
if (_dateFormat == df) {
return this;
}
// 26-Sep-2015, tatu: Related to [databind#939], let's try to force TimeZone if
// (but only if!) it has been set explicitly.
if ((df != null) && hasExplicitTimeZone()) {
df = _force(df, _timeZone);
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, df, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(HandlerInstantiator hi) {
if (_handlerInstantiator == hi) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, hi, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(Locale l) {
if (_locale == l) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, l,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
/**
* Fluent factory for constructing a new instance that uses specified TimeZone.
* Note that timezone used with also be assigned to configured {@link DateFormat},
* changing time formatting defaults.
*/
public BaseSettings with(TimeZone tz)
{
if (tz == _timeZone) {
return this;
}
DateFormat df = _force(_dateFormat, (tz == null) ? DEFAULT_TIMEZONE : tz);
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, df, _handlerInstantiator, _locale,
tz, _defaultBase64, _cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(Base64Variant base64) {
if (base64 == _defaultBase64) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, base64, _cacheProvider, _nodeFactory, _ctorDetector);
}
/**
* Fluent factory for constructing a new instance with provided {@link CacheProvider}.
*
* @return a new instance with provided {@link CacheProvider}.
*/
public BaseSettings with(CacheProvider cacheProvider) {
if (cacheProvider == _cacheProvider) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, cacheProvider, _nodeFactory, _ctorDetector);
}
public BaseSettings with(JsonNodeFactory nodeFactory) {
if (nodeFactory == _nodeFactory) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, nodeFactory, _ctorDetector);
}
public BaseSettings with(ConstructorDetector ctorDetector) {
if (ctorDetector == _ctorDetector) {
return this;
}
return new BaseSettings(_annotationIntrospector, _propertyNamingStrategy, _enumNamingStrategy,
_accessorNaming,
_defaultTyper, _typeValidator, _dateFormat, _handlerInstantiator, _locale,
_timeZone, _defaultBase64, _cacheProvider, _nodeFactory, ctorDetector);
}
/*
/**********************************************************************
/* API
/**********************************************************************
*/
public AnnotationIntrospector getAnnotationIntrospector() {
return _annotationIntrospector;
}
public PropertyNamingStrategy getPropertyNamingStrategy() {
return _propertyNamingStrategy;
}
public EnumNamingStrategy getEnumNamingStrategy() {
return _enumNamingStrategy;
}
public AccessorNamingStrategy.Provider getAccessorNaming() {
return _accessorNaming;
}
public TypeResolverBuilder<?> getDefaultTyper() {
return _defaultTyper;
}
public PolymorphicTypeValidator getPolymorphicTypeValidator() {
return _typeValidator;
}
public DateFormat getDateFormat() {
return _dateFormat;
}
public HandlerInstantiator getHandlerInstantiator() {
return _handlerInstantiator;
}
public Locale getLocale() {
return _locale;
}
public TimeZone getTimeZone() {
TimeZone tz = _timeZone;
return (tz == null) ? DEFAULT_TIMEZONE : tz;
}
/**
* Accessor that may be called to determine whether this settings object
* has been explicitly configured with a TimeZone (true), or is still
* relying on the default settings (false).
*/
public boolean hasExplicitTimeZone() {
return (_timeZone != null);
}
public Base64Variant getBase64Variant() {
return _defaultBase64;
}
public CacheProvider getCacheProvider() {
return _cacheProvider;
}
public JsonNodeFactory getNodeFactory() {
return _nodeFactory;
}
public ConstructorDetector getConstructorDetector() {
return (_ctorDetector == null) ? ConstructorDetector.DEFAULT : _ctorDetector;
}
/*
/**********************************************************************
/* Helper methods
/**********************************************************************
*/
private DateFormat _force(DateFormat df, TimeZone tz)
{
if (df instanceof StdDateFormat sdf) {
return sdf.withTimeZone(tz);
}
// we don't know if original format might be shared; better create a clone:
df = (DateFormat) df.clone();
df.setTimeZone(tz);
return df;
}
}
| to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/internal/BasicTypeImpl.java | {
"start": 476,
"end": 1981
} | class ____<J> extends AbstractSingleColumnStandardBasicType<J> implements AdjustableBasicType<J> {
public static final String EXTERNALIZED_PREFIX = "basicType";
public static final String[] NO_REG_KEYS = ArrayHelper.EMPTY_STRING_ARRAY;
private static int count;
private final String name;
public BasicTypeImpl(JavaType<J> jtd, JdbcType std) {
super( std, jtd );
name = String.format(
Locale.ROOT,
"%s@%s(%s,%s)",
EXTERNALIZED_PREFIX,
++count,
jtd.getJavaTypeClass().getName(),
std.getDefaultSqlTypeCode()
);
}
@Override
public String[] getRegistrationKeys() {
// irrelevant - these are created on-the-fly
return NO_REG_KEYS;
}
/**
* BasicTypeImpl produces a name whose sole purpose is to
* be used as part of interpreting Envers-produced mappings.
* We want to use the same exact BasicTypeImpl *instance* in
* the audit mapping (Envers) as is used in the audited (ORM)
* mapping.
*
* The name is in the form {@code `basicType@${u}(${o},${r})`}, where<ol>
* <li>${u} is a unique number</li>
* <li>${o} is the mapped Java type</li>
* <li>${r} is the mapped SQL type (JDBC type code)</li>
* </ol>
*
* {@code `basicType@${u}`} is enough to uniquely identify this type instance;
* the Java Type and JDBC type code are informational
*
* E.g. {@code `basicType@321(java.lang.String,12)`}
*/
@Override
public String getName() {
return name;
}
@Override
public String toString() {
return name;
}
}
| BasicTypeImpl |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/validation/MultipleDelegateInjectionPointsTest.java | {
"start": 459,
"end": 1058
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(Converter.class, DecoratorWithMultipleDelegateInjetionPoints.class).shouldFail().build();
@Test
public void testFailure() {
assertNotNull(container.getFailure());
assertTrue(
container.getFailure().getMessage()
.contains("DecoratorWithMultipleDelegateInjetionPoints has multiple @Delegate injection points"),
container.getFailure().getMessage());
}
| MultipleDelegateInjectionPointsTest |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/opensaml5Main/java/org/springframework/security/saml2/provider/service/authentication/OpenSaml5AuthenticationProvider.java | {
"start": 21288,
"end": 22300
} | class ____ implements Converter<ResponseToken, Saml2ResponseValidatorResult> {
@Override
@NonNull
public Saml2ResponseValidatorResult convert(ResponseToken responseToken) {
Response response = responseToken.getResponse();
Saml2AuthenticationToken token = responseToken.getToken();
String destination = response.getDestination();
String location = token.getRelyingPartyRegistration().getAssertionConsumerServiceLocation();
if (StringUtils.hasText(destination) && !destination.equals(location)) {
String message = "Invalid destination [" + destination + "] for SAML response [" + response.getID()
+ "]";
return Saml2ResponseValidatorResult
.failure(new Saml2Error(Saml2ErrorCodes.INVALID_DESTINATION, message));
}
return Saml2ResponseValidatorResult.success();
}
}
/**
* A response validator that compares the {@code Issuer} value to the configured
* {@link AssertingPartyMetadata#getEntityId()}
*
* @since 6.5
*/
public static final | DestinationValidator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/callbacks/xml/EntityListenerViaXmlTest.java | {
"start": 567,
"end": 1034
} | class ____ {
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
@JiraKey(value = "HHH-9771")
public void testUsage(EntityManagerFactoryScope scope) {
JournalingListener.reset();
scope.inTransaction(
entityManager -> entityManager.persist( new MyEntity( 1, "steve" ) )
);
assertEquals( 1, JournalingListener.getPrePersistCount() );
}
}
| EntityListenerViaXmlTest |
java | google__guice | extensions/servlet/src/com/google/inject/servlet/InternalServletModule.java | {
"start": 1860,
"end": 5014
} | class ____ implements Provider<ServletContext> {
private ServletContext injectedServletContext;
@Inject
BackwardsCompatibleServletContextProvider() {}
// This setter is called by the GuiceServletContextListener
void set(ServletContext injectedServletContext) {
this.injectedServletContext = injectedServletContext;
}
@Override
public ServletContext get() {
if (null != injectedServletContext) {
return injectedServletContext;
}
Logger.getLogger(InternalServletModule.class.getName())
.warning(
"You are attempting to use a deprecated API (specifically,"
+ " attempting to @Inject ServletContext inside an eagerly created"
+ " singleton. While we allow this for backwards compatibility, be"
+ " warned that this MAY have unexpected behavior if you have more"
+ " than one injector (with ServletModule) running in the same JVM."
+ " Please consult the Guice documentation at"
+ " https://github.com/google/guice/wiki/Servlets for more"
+ " information.");
return GuiceFilter.getServletContext();
}
}
@Override
protected void configure() {
bindScope(RequestScoped.class, REQUEST);
bindScope(SessionScoped.class, SESSION);
bind(ServletRequest.class).to(HttpServletRequest.class);
bind(ServletResponse.class).to(HttpServletResponse.class);
// inject the pipeline into GuiceFilter so it can route requests correctly
// Unfortunate staticness... =(
// NOTE(user): This is maintained for legacy purposes.
requestStaticInjection(GuiceFilter.class);
bind(ManagedFilterPipeline.class);
bind(ManagedServletPipeline.class);
bind(FilterPipeline.class).to(ManagedFilterPipeline.class).asEagerSingleton();
bind(ServletContext.class).toProvider(BackwardsCompatibleServletContextProvider.class);
bind(BackwardsCompatibleServletContextProvider.class);
}
@Provides
@Singleton
@ScopingOnly
GuiceFilter provideScopingOnlyGuiceFilter() {
return new GuiceFilter(new DefaultFilterPipeline());
}
@Provides
@RequestScoped
HttpServletRequest provideHttpServletRequest() {
return GuiceFilter.getRequest(Key.get(HttpServletRequest.class));
}
@Provides
@RequestScoped
HttpServletResponse provideHttpServletResponse() {
return GuiceFilter.getResponse(Key.get(HttpServletResponse.class));
}
@Provides
HttpSession provideHttpSession() {
return GuiceFilter.getRequest(Key.get(HttpSession.class)).getSession();
}
@SuppressWarnings("unchecked") // defined by getParameterMap()
@Provides
@RequestScoped
@RequestParameters
Map<String, String[]> provideRequestParameters(ServletRequest req) {
return req.getParameterMap();
}
@Override
public boolean equals(Object o) {
// Is only ever installed internally, so we don't need to check state.
return o instanceof InternalServletModule;
}
@Override
public int hashCode() {
return InternalServletModule.class.hashCode();
}
}
| BackwardsCompatibleServletContextProvider |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/model/mcp/SecuritySchemeTest.java | {
"start": 1002,
"end": 2633
} | class ____ extends BasicRequestTest {
@Test
void testSerialize() throws JsonProcessingException {
SecurityScheme securityScheme = new SecurityScheme();
securityScheme.setId("sec-1");
securityScheme.setType("apiKey");
securityScheme.setScheme("bearer");
securityScheme.setIn("header");
securityScheme.setName("Authorization");
securityScheme.setDefaultCredential("default-token");
String json = mapper.writeValueAsString(securityScheme);
assertTrue(json.contains("\"id\":\"sec-1\""));
assertTrue(json.contains("\"type\":\"apiKey\""));
assertTrue(json.contains("\"scheme\":\"bearer\""));
assertTrue(json.contains("\"in\":\"header\""));
assertTrue(json.contains("\"name\":\"Authorization\""));
assertTrue(json.contains("\"defaultCredential\":\"default-token\""));
}
@Test
void testDeserialize() throws JsonProcessingException {
String json = "{\"id\":\"sec-1\",\"type\":\"apiKey\",\"scheme\":\"bearer\",\"in\":\"header\","
+ "\"name\":\"Authorization\",\"defaultCredential\":\"default-token\"}";
SecurityScheme result = mapper.readValue(json, SecurityScheme.class);
assertNotNull(result);
assertEquals("sec-1", result.getId());
assertEquals("apiKey", result.getType());
assertEquals("bearer", result.getScheme());
assertEquals("header", result.getIn());
assertEquals("Authorization", result.getName());
assertEquals("default-token", result.getDefaultCredential());
}
} | SecuritySchemeTest |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/testutil/AsyncReaderWrapper.java | {
"start": 322,
"end": 2613
} | class ____
implements AutoCloseable
{
protected final JsonParser _streamReader;
protected AsyncReaderWrapper(JsonParser sr) {
_streamReader = sr;
}
public JsonToken currentToken() {
return _streamReader.currentToken();
}
public String currentText() {
return _streamReader.getString();
}
public String currentTextViaCharacters()
{
char[] ch = _streamReader.getStringCharacters();
int start = _streamReader.getStringOffset();
int len = _streamReader.getStringLength();
return new String(ch, start, len);
}
public String currentTextViaWriter()
{
StringWriter sw = new StringWriter();
int len = _streamReader.getString(sw);
String str = sw.toString();
if (len != str.length()) {
throw new IllegalStateException(String.format(
"Reader.getText(Writer) returned %d, but wrote %d chars",
len, str.length()));
}
return str;
}
public String currentName() {
return _streamReader.currentName();
}
public JsonParser parser() { return _streamReader; }
public abstract JsonToken nextToken();
public TokenStreamContext getParsingContext() {
return _streamReader.streamReadContext();
}
public int getIntValue() { return _streamReader.getIntValue(); }
public long getLongValue() { return _streamReader.getLongValue(); }
public float getFloatValue() { return _streamReader.getFloatValue(); }
public double getDoubleValue() { return _streamReader.getDoubleValue(); }
public BigInteger getBigIntegerValue() { return _streamReader.getBigIntegerValue(); }
public BigDecimal getDecimalValue() { return _streamReader.getDecimalValue(); }
public byte[] getBinaryValue() { return _streamReader.getBinaryValue(); }
public Number getNumberValue() { return _streamReader.getNumberValue(); }
public Object getNumberValueDeferred() { return _streamReader.getNumberValueDeferred(); }
public NumberType getNumberType() { return _streamReader.getNumberType(); }
@Override
public void close() { _streamReader.close(); }
public boolean isClosed() {
return _streamReader.isClosed();
}
}
| AsyncReaderWrapper |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/network/Selectable.java | {
"start": 1041,
"end": 1106
} | interface ____ asynchronous, multi-channel network I/O
*/
public | for |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging-kafka/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/kafka/deployment/DefaultSerdeConfigTest.java | {
"start": 3268,
"end": 17116
} | class ____ {
private static void doTest(Tuple[] expectations, Class<?>... classesToIndex) {
doTest(null, expectations, classesToIndex);
}
private static void doTest(Config customConfig, Tuple[] expectations, Class<?>... classesToIndex) {
doTest(customConfig, expectations, Collections.emptyList(), Collections.emptyList(), classesToIndex);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static void doTest(Config customConfig, Tuple[] expectations,
List<Function<String, Assert>> generatedNames,
List<Function<String, Assert>> reflectiveNames, Class<?>... classesToIndex) {
List<RunTimeConfigurationDefaultBuildItem> configs = new ArrayList<>();
List<GeneratedClassBuildItem> generated = new ArrayList<>();
List<ReflectiveClassBuildItem> reflective = new ArrayList<>();
List<Class<?>> classes = new ArrayList<>(Arrays.asList(classesToIndex));
classes.add(Incoming.class);
classes.add(Outgoing.class);
classes.add(Serializer.class);
classes.add(Deserializer.class);
classes.add(io.quarkus.kafka.client.serialization.ObjectMapperDeserializer.class);
classes.add(io.quarkus.kafka.client.serialization.ObjectMapperSerializer.class);
classes.add(io.quarkus.kafka.client.serialization.JsonbSerializer.class);
classes.add(io.quarkus.kafka.client.serialization.JsonbDeserializer.class);
DefaultSerdeDiscoveryState discovery = new DefaultSerdeDiscoveryState(index(classes)) {
@Override
Config getConfig() {
return customConfig != null ? customConfig : super.getConfig();
}
@Override
boolean isKafkaConnector(List<ConnectorManagedChannelBuildItem> list, boolean incoming, String channelName) {
return true;
}
};
try {
new SmallRyeReactiveMessagingKafkaProcessor().discoverDefaultSerdeConfig(discovery, Collections.emptyList(),
configs::add,
(generatedNames == null) ? null : generated::add,
(reflectiveNames == null) ? null : reflective::add);
assertThat(configs)
.extracting(RunTimeConfigurationDefaultBuildItem::getKey, RunTimeConfigurationDefaultBuildItem::getValue)
.hasSize(expectations.length)
.allSatisfy(tuple -> {
Object[] e = tuple.toArray();
String key = (String) e[0];
String value = (String) e[1];
assertThat(Arrays.stream(expectations).filter(t -> key.equals(t.toArray()[0])))
.hasSize(1)
.satisfiesOnlyOnce(t -> {
Object o = t.toArray()[1];
if (o instanceof String) {
assertThat(value).isEqualTo((String) o);
} else {
((Function<String, Assert>) o).apply(value);
}
});
});
assertThat(generated)
.extracting(GeneratedClassBuildItem::internalName)
.allSatisfy(s -> assertThat(generatedNames).satisfiesOnlyOnce(c -> c.apply(s)));
assertThat(reflective)
.flatExtracting(ReflectiveClassBuildItem::getClassNames)
.extracting(n -> n.replace('/', '.'))
.allSatisfy(s -> assertThat(reflectiveNames).satisfiesOnlyOnce(c -> c.apply(s)));
} finally {
// must not leak the lazily-initialized Config instance associated to the system classloader
if (customConfig == null) {
QuarkusConfigFactory.setConfig(null);
}
}
}
Function<String, Assert> assertMatches(String regex) {
return s -> assertThat(s).matches(regex);
}
Function<String, Assert> assertStartsWith(String starts) {
return s -> assertThat(s).startsWith(starts);
}
private static IndexView index(List<Class<?>> classes) {
Indexer indexer = new Indexer();
for (Class<?> clazz : classes) {
final String resourceName = ClassLoaderHelper.fromClassNameToResourceName(clazz.getName());
try {
try (InputStream stream = DefaultSerdeConfigTest.class.getClassLoader()
.getResourceAsStream(resourceName)) {
indexer.index(stream);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return indexer.complete();
}
// ---
@Test
public void stringInLongOut() {
// @formatter:off
Tuple[] expectations = {
tuple("mp.messaging.outgoing.channel1.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel2.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel3.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel4.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel5.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel6.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel7.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel8.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel9.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel10.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel11.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.outgoing.channel12.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel13.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel14.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel15.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel16.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel17.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel18.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel19.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel20.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel21.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.incoming.channel22.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel23.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel24.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel25.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel26.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel27.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel28.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel29.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel30.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel31.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel32.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel33.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel34.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel35.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel36.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel37.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel38.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel39.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel40.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel41.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel42.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel43.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel44.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel45.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel46.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel47.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel48.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel49.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel50.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel51.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel52.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel53.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel54.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel55.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel56.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel57.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel58.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel59.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel60.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel61.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel62.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel63.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
tuple("mp.messaging.incoming.channel64.value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"),
tuple("mp.messaging.outgoing.channel65.value.serializer", "org.apache.kafka.common.serialization.LongSerializer"),
};
// @formatter:on
doTest(expectations, StringInLongOut.class);
}
private static | DefaultSerdeConfigTest |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/log/LogSqlFormatSqlFalseTest.java | {
"start": 531,
"end": 1339
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyEntity.class)
.addAsResource("application-log-sql-format-sql-false.properties", "application.properties"));
@Inject
EntityManager em;
@BeforeEach
public void activateRequestContext() {
Arc.container().requestContext().activate();
}
@Test
public void testFormattedValue() {
Map<String, Object> properties = em.getEntityManagerFactory().getProperties();
assertNull(properties.get(AvailableSettings.FORMAT_SQL));
}
@AfterEach
public void terminateRequestContext() {
Arc.container().requestContext().terminate();
}
}
| LogSqlFormatSqlFalseTest |
java | google__gson | gson/src/test/java/com/google/gson/stream/JsonReaderTest.java | {
"start": 1507,
"end": 82897
} | class ____ {
@Test
public void testDefaultStrictness() {
JsonReader reader = new JsonReader(reader("{}"));
assertThat(reader.getStrictness()).isEqualTo(Strictness.LEGACY_STRICT);
}
@SuppressWarnings("deprecation") // for JsonReader.setLenient
@Test
public void testSetLenientTrue() {
JsonReader reader = new JsonReader(reader("{}"));
reader.setLenient(true);
assertThat(reader.getStrictness()).isEqualTo(Strictness.LENIENT);
}
@SuppressWarnings("deprecation") // for JsonReader.setLenient
@Test
public void testSetLenientFalse() {
JsonReader reader = new JsonReader(reader("{}"));
reader.setLenient(false);
assertThat(reader.getStrictness()).isEqualTo(Strictness.LEGACY_STRICT);
}
@Test
public void testSetStrictness() {
JsonReader reader = new JsonReader(reader("{}"));
reader.setStrictness(Strictness.STRICT);
assertThat(reader.getStrictness()).isEqualTo(Strictness.STRICT);
}
@Test
public void testSetStrictnessNull() {
JsonReader reader = new JsonReader(reader("{}"));
assertThrows(NullPointerException.class, () -> reader.setStrictness(null));
}
@Test
public void testEscapedNewlineNotAllowedInStrictMode() {
String json = "\"\\\n\"";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextString);
assertThat(expected)
.hasMessageThat()
.startsWith("Cannot escape a newline character in strict mode");
}
@Test
public void testEscapedNewlineAllowedInDefaultMode() throws IOException {
String json = "\"\\\n\"";
JsonReader reader = new JsonReader(reader(json));
assertThat(reader.nextString()).isEqualTo("\n");
}
@Test
public void testStrictModeFailsToParseUnescapedControlCharacter() {
String json = "\"\0\"";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextString);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Unescaped control characters (\\u0000-\\u001F) are not allowed in strict mode");
json = "\"\t\"";
reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
expected = assertThrows(IOException.class, reader::nextString);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Unescaped control characters (\\u0000-\\u001F) are not allowed in strict mode");
json = "\"\u001F\"";
reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
expected = assertThrows(IOException.class, reader::nextString);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Unescaped control characters (\\u0000-\\u001F) are not allowed in strict mode");
}
@Test
public void testStrictModeAllowsOtherControlCharacters() throws IOException {
// JSON specification only forbids control characters U+0000 - U+001F, other control characters
// should be allowed
String json = "\"\u007F\u009F\"";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
assertThat(reader.nextString()).isEqualTo("\u007F\u009F");
}
@Test
public void testNonStrictModeParsesUnescapedControlCharacter() throws IOException {
String json = "\"\t\"";
JsonReader reader = new JsonReader(reader(json));
assertThat(reader.nextString()).isEqualTo("\t");
}
@Test
public void testCapitalizedTrueFailWhenStrict() {
JsonReader reader = new JsonReader(reader("TRUE"));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextBoolean);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
reader = new JsonReader(reader("True"));
reader.setStrictness(Strictness.STRICT);
expected = assertThrows(IOException.class, reader::nextBoolean);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
}
@Test
public void testCapitalizedFalseFailWhenStrict() {
JsonReader reader = new JsonReader(reader("FALSE"));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextBoolean);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
reader = new JsonReader(reader("FaLse"));
reader.setStrictness(Strictness.STRICT);
expected = assertThrows(IOException.class, reader::nextBoolean);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
}
@Test
public void testCapitalizedNullFailWhenStrict() {
JsonReader reader = new JsonReader(reader("NULL"));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextNull);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
reader = new JsonReader(reader("nulL"));
reader.setStrictness(Strictness.STRICT);
expected = assertThrows(IOException.class, reader::nextNull);
assertThat(expected)
.hasMessageThat()
.startsWith(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON"
+ " at line 1 column 1 path $\n");
}
@Test
public void testReadArray() throws IOException {
JsonReader reader = new JsonReader(reader("[true, true]"));
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextBoolean()).isTrue();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testReadEmptyArray() throws IOException {
JsonReader reader = new JsonReader(reader("[]"));
reader.beginArray();
assertThat(reader.hasNext()).isFalse();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testReadObject() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\": \"android\", \"b\": \"banana\"}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("android");
assertThat(reader.nextName()).isEqualTo("b");
assertThat(reader.nextString()).isEqualTo("banana");
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testReadEmptyObject() throws IOException {
JsonReader reader = new JsonReader(reader("{}"));
reader.beginObject();
assertThat(reader.hasNext()).isFalse();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testHasNextEndOfDocument() throws IOException {
JsonReader reader = new JsonReader(reader("{}"));
reader.beginObject();
reader.endObject();
assertThat(reader.hasNext()).isFalse();
}
@Test
public void testSkipArray() throws IOException {
JsonReader reader =
new JsonReader(reader("{\"a\": [\"one\", \"two\", \"three\"], \"b\": 123}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("b");
assertThat(reader.nextInt()).isEqualTo(123);
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipArrayAfterPeek() throws Exception {
JsonReader reader =
new JsonReader(reader("{\"a\": [\"one\", \"two\", \"three\"], \"b\": 123}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.peek()).isEqualTo(BEGIN_ARRAY);
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("b");
assertThat(reader.nextInt()).isEqualTo(123);
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipTopLevelObject() throws Exception {
JsonReader reader =
new JsonReader(reader("{\"a\": [\"one\", \"two\", \"three\"], \"b\": 123}"));
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipObject() throws IOException {
JsonReader reader =
new JsonReader(
reader("{\"a\": { \"c\": [], \"d\": [true, true, {}] }, \"b\": \"banana\"}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("b");
reader.skipValue();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipObjectAfterPeek() throws Exception {
String json =
"{"
+ " \"one\": { \"num\": 1 }"
+ ", \"two\": { \"num\": 2 }"
+ ", \"three\": { \"num\": 3 }"
+ "}";
JsonReader reader = new JsonReader(reader(json));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("one");
assertThat(reader.peek()).isEqualTo(BEGIN_OBJECT);
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("two");
assertThat(reader.peek()).isEqualTo(BEGIN_OBJECT);
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("three");
reader.skipValue();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipObjectName() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\": 1}"));
reader.beginObject();
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.NUMBER);
assertThat(reader.getPath()).isEqualTo("$.<skipped>");
assertThat(reader.nextInt()).isEqualTo(1);
}
@Test
public void testSkipObjectNameSingleQuoted() throws IOException {
JsonReader reader = new JsonReader(reader("{'a': 1}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.NUMBER);
assertThat(reader.getPath()).isEqualTo("$.<skipped>");
assertThat(reader.nextInt()).isEqualTo(1);
}
@Test
public void testSkipObjectNameUnquoted() throws IOException {
JsonReader reader = new JsonReader(reader("{a: 1}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.NUMBER);
assertThat(reader.getPath()).isEqualTo("$.<skipped>");
assertThat(reader.nextInt()).isEqualTo(1);
}
@Test
public void testSkipInteger() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":123456789,\"b\":-123456789}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("b");
reader.skipValue();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipDouble() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":-123.456e-789,\"b\":123456789.0}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
reader.skipValue();
assertThat(reader.nextName()).isEqualTo("b");
reader.skipValue();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipValueAfterEndOfDocument() throws IOException {
JsonReader reader = new JsonReader(reader("{}"));
reader.beginObject();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
assertThat(reader.getPath()).isEqualTo("$");
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void testSkipValueAtArrayEnd() throws IOException {
JsonReader reader = new JsonReader(reader("[]"));
reader.beginArray();
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void testSkipValueAtObjectEnd() throws IOException {
JsonReader reader = new JsonReader(reader("{}"));
reader.beginObject();
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
assertThat(reader.getPath()).isEqualTo("$");
}
@Test
public void testHelloWorld() throws IOException {
String json =
"{\n" //
+ " \"hello\": true,\n" //
+ " \"foo\": [\"world\"]\n" //
+ "}";
JsonReader reader = new JsonReader(reader(json));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("hello");
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextName()).isEqualTo("foo");
reader.beginArray();
assertThat(reader.nextString()).isEqualTo("world");
reader.endArray();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testInvalidJsonInput() throws IOException {
String json =
"{\n" //
+ " \"h\\ello\": true,\n" //
+ " \"foo\": [\"world\"]\n" //
+ "}";
JsonReader reader = new JsonReader(reader(json));
reader.beginObject();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextName());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Invalid escape sequence at line 2 column 8 path $.\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@SuppressWarnings("unused")
@Test
public void testNulls() {
assertThrows(NullPointerException.class, () -> new JsonReader(null));
}
@Test
public void testEmptyString() {
assertThrows(EOFException.class, () -> new JsonReader(reader("")).beginArray());
assertThrows(EOFException.class, () -> new JsonReader(reader("")).beginObject());
}
@Test
public void testCharacterUnescaping() throws IOException {
String json =
"[\"a\","
+ "\"a\\\"\","
+ "\"\\\"\","
+ "\":\","
+ "\",\","
+ "\"\\b\","
+ "\"\\f\","
+ "\"\\n\","
+ "\"\\r\","
+ "\"\\t\","
+ "\" \","
+ "\"\\\\\","
+ "\"{\","
+ "\"}\","
+ "\"[\","
+ "\"]\","
+ "\"\\u0000\","
+ "\"\\u0019\","
+ "\"\\u20AC\""
+ "]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
assertThat(reader.nextString()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("a\"");
assertThat(reader.nextString()).isEqualTo("\"");
assertThat(reader.nextString()).isEqualTo(":");
assertThat(reader.nextString()).isEqualTo(",");
assertThat(reader.nextString()).isEqualTo("\b");
assertThat(reader.nextString()).isEqualTo("\f");
assertThat(reader.nextString()).isEqualTo("\n");
assertThat(reader.nextString()).isEqualTo("\r");
assertThat(reader.nextString()).isEqualTo("\t");
assertThat(reader.nextString()).isEqualTo(" ");
assertThat(reader.nextString()).isEqualTo("\\");
assertThat(reader.nextString()).isEqualTo("{");
assertThat(reader.nextString()).isEqualTo("}");
assertThat(reader.nextString()).isEqualTo("[");
assertThat(reader.nextString()).isEqualTo("]");
assertThat(reader.nextString()).isEqualTo("\0");
assertThat(reader.nextString()).isEqualTo("\u0019");
assertThat(reader.nextString()).isEqualTo("\u20AC");
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testReaderDoesNotTreatU2028U2029AsNewline() throws IOException {
// This test shows that the JSON string [\n"whatever"] is seen as valid
// And the JSON string [\u2028"whatever"] is not.
String jsonInvalid2028 = "[\u2028\"whatever\"]";
JsonReader readerInvalid2028 = new JsonReader(reader(jsonInvalid2028));
readerInvalid2028.beginArray();
assertThrows(IOException.class, readerInvalid2028::nextString);
String jsonInvalid2029 = "[\u2029\"whatever\"]";
JsonReader readerInvalid2029 = new JsonReader(reader(jsonInvalid2029));
readerInvalid2029.beginArray();
assertThrows(IOException.class, readerInvalid2029::nextString);
String jsonValid = "[\n\"whatever\"]";
JsonReader readerValid = new JsonReader(reader(jsonValid));
readerValid.beginArray();
assertThat(readerValid.nextString()).isEqualTo("whatever");
// And even in STRICT mode U+2028 and U+2029 are not considered control characters
// and can appear unescaped in JSON string
String jsonValid2028And2029 = "\"whatever\u2028\u2029\"";
JsonReader readerValid2028And2029 = new JsonReader(reader(jsonValid2028And2029));
readerValid2028And2029.setStrictness(Strictness.STRICT);
assertThat(readerValid2028And2029.nextString()).isEqualTo("whatever\u2028\u2029");
}
@Test
public void testEscapeCharacterQuoteInStrictMode() {
String json = "\"\\'\"";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.STRICT);
IOException expected = assertThrows(IOException.class, reader::nextString);
assertThat(expected)
.hasMessageThat()
.startsWith("Invalid escaped character \"'\" in strict mode");
}
@Test
public void testEscapeCharacterQuoteWithoutStrictMode() throws IOException {
String json = "\"\\'\"";
JsonReader reader = new JsonReader(reader(json));
assertThat(reader.nextString()).isEqualTo("'");
}
@Test
public void testUnescapingInvalidCharacters() throws IOException {
String json = "[\"\\u000g\"]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Malformed Unicode escape \\u000g at line 1 column 5 path $[0]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testUnescapingTruncatedCharacters() throws IOException {
String json = "[\"\\u000";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Unterminated escape sequence at line 1 column 5 path $[0]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testUnescapingTruncatedSequence() throws IOException {
String json = "[\"\\";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Unterminated escape sequence at line 1 column 4 path $[0]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testIntegersWithFractionalPartSpecified() throws IOException {
JsonReader reader = new JsonReader(reader("[1.0,1.0,1.0]"));
reader.beginArray();
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.nextLong()).isEqualTo(1L);
}
@Test
public void testDoubles() throws IOException {
String json =
"[-0.0,"
+ "1.0,"
+ "1.7976931348623157E308,"
+ "4.9E-324,"
+ "0.0,"
+ "0.00,"
+ "-0.5,"
+ "2.2250738585072014E-308,"
+ "3.141592653589793,"
+ "2.718281828459045,"
+ "0,"
+ "0.01,"
+ "0e0,"
+ "1e+0,"
+ "1e-0,"
+ "1e0000," // leading 0 is allowed for exponent
+ "1e00001,"
+ "1e+1]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
assertThat(reader.nextDouble()).isEqualTo(-0.0);
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextDouble()).isEqualTo(1.7976931348623157E308);
assertThat(reader.nextDouble()).isEqualTo(4.9E-324);
assertThat(reader.nextDouble()).isEqualTo(0.0);
assertThat(reader.nextDouble()).isEqualTo(0.0);
assertThat(reader.nextDouble()).isEqualTo(-0.5);
assertThat(reader.nextDouble()).isEqualTo(2.2250738585072014E-308);
assertThat(reader.nextDouble()).isEqualTo(3.141592653589793);
assertThat(reader.nextDouble()).isEqualTo(2.718281828459045);
assertThat(reader.nextDouble()).isEqualTo(0.0);
assertThat(reader.nextDouble()).isEqualTo(0.01);
assertThat(reader.nextDouble()).isEqualTo(0.0);
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextDouble()).isEqualTo(10.0);
assertThat(reader.nextDouble()).isEqualTo(10.0);
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testStrictNonFiniteDoubles() throws IOException {
String json = "[NaN]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextDouble());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testStrictQuotedNonFiniteDoubles() throws IOException {
String json = "[\"NaN\"]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextDouble());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"JSON forbids NaN and infinities: NaN at line 1 column 7 path $[0]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testLenientNonFiniteDoubles() throws IOException {
String json = "[NaN, -Infinity, Infinity]";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextDouble()).isNaN();
assertThat(reader.nextDouble()).isEqualTo(Double.NEGATIVE_INFINITY);
assertThat(reader.nextDouble()).isEqualTo(Double.POSITIVE_INFINITY);
reader.endArray();
}
@Test
public void testLenientQuotedNonFiniteDoubles() throws IOException {
String json = "[\"NaN\", \"-Infinity\", \"Infinity\"]";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextDouble()).isNaN();
assertThat(reader.nextDouble()).isEqualTo(Double.NEGATIVE_INFINITY);
assertThat(reader.nextDouble()).isEqualTo(Double.POSITIVE_INFINITY);
reader.endArray();
}
@Test
public void testStrictNonFiniteDoublesWithSkipValue() throws IOException {
String json = "[NaN]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testLongs() throws IOException {
String json =
"[0,0,0," + "1,1,1," + "-1,-1,-1," + "-9223372036854775808," + "9223372036854775807]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
assertThat(reader.nextLong()).isEqualTo(0L);
assertThat(reader.nextInt()).isEqualTo(0);
assertThat(reader.nextDouble()).isEqualTo(0.0);
assertThat(reader.nextLong()).isEqualTo(1L);
assertThat(reader.nextInt()).isEqualTo(1);
assertThat(reader.nextDouble()).isEqualTo(1.0);
assertThat(reader.nextLong()).isEqualTo(-1L);
assertThat(reader.nextInt()).isEqualTo(-1);
assertThat(reader.nextDouble()).isEqualTo(-1.0);
assertThrows(NumberFormatException.class, () -> reader.nextInt());
assertThat(reader.nextLong()).isEqualTo(Long.MIN_VALUE);
assertThrows(NumberFormatException.class, () -> reader.nextInt());
assertThat(reader.nextLong()).isEqualTo(Long.MAX_VALUE);
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testNumberWithOctalPrefix() throws IOException {
String number = "01";
String expectedLocation = "line 1 column 1 path $";
var e = assertThrows(MalformedJsonException.class, () -> new JsonReader(reader(number)).peek());
assertStrictError(e, expectedLocation);
e = assertThrows(MalformedJsonException.class, () -> new JsonReader(reader(number)).nextInt());
assertStrictError(e, expectedLocation);
e = assertThrows(MalformedJsonException.class, () -> new JsonReader(reader(number)).nextLong());
assertStrictError(e, expectedLocation);
e =
assertThrows(
MalformedJsonException.class, () -> new JsonReader(reader(number)).nextDouble());
assertStrictError(e, expectedLocation);
e =
assertThrows(
MalformedJsonException.class, () -> new JsonReader(reader(number)).nextString());
assertStrictError(e, expectedLocation);
}
@Test
public void testBooleans() throws IOException {
JsonReader reader = new JsonReader(reader("[true,false]"));
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextBoolean()).isFalse();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testPeekingUnquotedStringsPrefixedWithBooleans() throws IOException {
JsonReader reader = new JsonReader(reader("[truey]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
var e = assertThrows(IllegalStateException.class, () -> reader.nextBoolean());
assertUnexpectedStructureError(e, "a boolean", "STRING", "line 1 column 2 path $[0]");
assertThat(reader.nextString()).isEqualTo("truey");
reader.endArray();
}
@Test
public void testMalformedNumbers() throws IOException {
assertNotANumber("-");
assertNotANumber(".");
// plus sign is not allowed for integer part
assertNotANumber("+1");
// leading 0 is not allowed for integer part
assertNotANumber("00");
assertNotANumber("01");
// exponent lacks digit
assertNotANumber("e");
assertNotANumber("0e");
assertNotANumber(".e");
assertNotANumber("0.e");
assertNotANumber("-.0e");
// no integer
assertNotANumber("e1");
assertNotANumber(".e1");
assertNotANumber("-e1");
// trailing characters
assertNotANumber("1x");
assertNotANumber("1.1x");
assertNotANumber("1e1x");
assertNotANumber("1ex");
assertNotANumber("1.1ex");
assertNotANumber("1.1e1x");
// fraction has no digit
assertNotANumber("0.");
assertNotANumber("-0.");
assertNotANumber("0.e1");
assertNotANumber("-0.e1");
// no leading digit
assertNotANumber(".0");
assertNotANumber("-.0");
assertNotANumber(".0e1");
assertNotANumber("-.0e1");
}
private static void assertNotANumber(String s) throws IOException {
JsonReader reader = new JsonReader(reader(s));
reader.setStrictness(Strictness.LENIENT);
assertThat(reader.peek()).isEqualTo(JsonToken.STRING);
assertThat(reader.nextString()).isEqualTo(s);
JsonReader strictReader = new JsonReader(reader(s));
var e =
assertThrows(
"Should have failed reading " + s + " as double",
MalformedJsonException.class,
() -> strictReader.nextDouble());
assertThat(e)
.hasMessageThat()
.startsWith("Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON");
}
@Test
public void testPeekingUnquotedStringsPrefixedWithIntegers() throws IOException {
JsonReader reader = new JsonReader(reader("[12.34e5x]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
assertThrows(NumberFormatException.class, () -> reader.nextInt());
assertThat(reader.nextString()).isEqualTo("12.34e5x");
}
@Test
public void testPeekLongMinValue() throws IOException {
JsonReader reader = new JsonReader(reader("[-9223372036854775808]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThat(reader.nextLong()).isEqualTo(-9223372036854775808L);
}
@Test
public void testPeekLongMaxValue() throws IOException {
JsonReader reader = new JsonReader(reader("[9223372036854775807]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThat(reader.nextLong()).isEqualTo(9223372036854775807L);
}
@Test
public void testLongLargerThanMaxLongThatWrapsAround() throws IOException {
JsonReader reader = new JsonReader(reader("[22233720368547758070]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThrows(NumberFormatException.class, () -> reader.nextLong());
}
@Test
public void testLongLargerThanMinLongThatWrapsAround() throws IOException {
JsonReader reader = new JsonReader(reader("[-22233720368547758070]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThrows(NumberFormatException.class, () -> reader.nextLong());
}
/** Issue 1053, negative zero. */
@Test
public void testNegativeZero() throws Exception {
JsonReader reader = new JsonReader(reader("[-0]"));
reader.setStrictness(Strictness.LEGACY_STRICT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThat(reader.nextString()).isEqualTo("-0");
}
/**
* This test fails because there's no double for 9223372036854775808, and our long parsing uses
* Double.parseDouble() for fractional values.
*/
@Test
@Ignore
public void testPeekLargerThanLongMaxValue() throws IOException {
JsonReader reader = new JsonReader(reader("[9223372036854775808]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThrows(NumberFormatException.class, () -> reader.nextLong());
}
/**
* This test fails because there's no double for -9223372036854775809, and our long parsing uses
* Double.parseDouble() for fractional values.
*/
@Test
@Ignore
public void testPeekLargerThanLongMinValue() throws IOException {
@SuppressWarnings("FloatingPointLiteralPrecision")
double d = -9223372036854775809d;
JsonReader reader = new JsonReader(reader("[-9223372036854775809]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThrows(NumberFormatException.class, () -> reader.nextLong());
assertThat(reader.nextDouble()).isEqualTo(d);
}
/**
* This test fails because there's no double for 9223372036854775806, and our long parsing uses
* Double.parseDouble() for fractional values.
*/
@Test
@Ignore
public void testHighPrecisionLong() throws IOException {
String json = "[9223372036854775806.000]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
assertThat(reader.nextLong()).isEqualTo(9223372036854775806L);
reader.endArray();
}
@Test
public void testPeekMuchLargerThanLongMinValue() throws IOException {
@SuppressWarnings("FloatingPointLiteralPrecision")
double d = -92233720368547758080d;
JsonReader reader = new JsonReader(reader("[-92233720368547758080]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(NUMBER);
assertThrows(NumberFormatException.class, () -> reader.nextLong());
assertThat(reader.nextDouble()).isEqualTo(d);
}
@Test
public void testQuotedNumberWithEscape() throws IOException {
JsonReader reader = new JsonReader(reader("[\"12\\u00334\"]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
assertThat(reader.nextInt()).isEqualTo(1234);
}
@Test
public void testMixedCaseLiterals() throws IOException {
JsonReader reader = new JsonReader(reader("[True,TruE,False,FALSE,NULL,nulL]"));
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextBoolean()).isFalse();
assertThat(reader.nextBoolean()).isFalse();
reader.nextNull();
reader.nextNull();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testMissingValue() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected value at line 1 column 6 path $.a\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testPrematureEndOfInput() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":true,"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextBoolean()).isTrue();
assertThrows(EOFException.class, () -> reader.nextName());
}
@Test
public void testPrematurelyClosed() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":[]}"));
reader.beginObject();
reader.close();
var e = assertThrows(IllegalStateException.class, () -> reader.nextName());
assertThat(e).hasMessageThat().isEqualTo("JsonReader is closed");
JsonReader reader2 = new JsonReader(reader("{\"a\":[]}"));
reader2.close();
e = assertThrows(IllegalStateException.class, () -> reader2.beginObject());
assertThat(e).hasMessageThat().isEqualTo("JsonReader is closed");
JsonReader reader3 = new JsonReader(reader("{\"a\":true}"));
reader3.beginObject();
String unused1 = reader3.nextName();
JsonToken unused2 = reader3.peek();
reader3.close();
e = assertThrows(IllegalStateException.class, () -> reader3.nextBoolean());
assertThat(e).hasMessageThat().isEqualTo("JsonReader is closed");
}
@Test
public void testNextFailuresDoNotAdvance() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":true}"));
reader.beginObject();
var e = assertThrows(IllegalStateException.class, () -> reader.nextString());
assertUnexpectedStructureError(e, "a string", "NAME", "line 1 column 3 path $.");
assertThat(reader.nextName()).isEqualTo("a");
e = assertThrows(IllegalStateException.class, () -> reader.nextName());
assertUnexpectedStructureError(e, "a name", "BOOLEAN", "line 1 column 10 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.beginArray());
assertUnexpectedStructureError(e, "BEGIN_ARRAY", "BOOLEAN", "line 1 column 10 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.endArray());
assertUnexpectedStructureError(e, "END_ARRAY", "BOOLEAN", "line 1 column 10 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.beginObject());
assertUnexpectedStructureError(e, "BEGIN_OBJECT", "BOOLEAN", "line 1 column 10 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.endObject());
assertUnexpectedStructureError(e, "END_OBJECT", "BOOLEAN", "line 1 column 10 path $.a");
assertThat(reader.nextBoolean()).isTrue();
e = assertThrows(IllegalStateException.class, () -> reader.nextString());
assertUnexpectedStructureError(e, "a string", "END_OBJECT", "line 1 column 11 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.nextName());
assertUnexpectedStructureError(e, "a name", "END_OBJECT", "line 1 column 11 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.beginArray());
assertUnexpectedStructureError(e, "BEGIN_ARRAY", "END_OBJECT", "line 1 column 11 path $.a");
e = assertThrows(IllegalStateException.class, () -> reader.endArray());
assertUnexpectedStructureError(e, "END_ARRAY", "END_OBJECT", "line 1 column 11 path $.a");
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
reader.close();
}
@Test
public void testIntegerMismatchFailuresDoNotAdvance() throws IOException {
JsonReader reader = new JsonReader(reader("[1.5]"));
reader.beginArray();
assertThrows(NumberFormatException.class, () -> reader.nextInt());
assertThat(reader.nextDouble()).isEqualTo(1.5d);
reader.endArray();
}
@Test
public void testStringNullIsNotNull() throws IOException {
JsonReader reader = new JsonReader(reader("[\"null\"]"));
reader.beginArray();
var e = assertThrows(IllegalStateException.class, () -> reader.nextNull());
assertUnexpectedStructureError(e, "null", "STRING", "line 1 column 3 path $[0]");
}
@Test
public void testNullLiteralIsNotAString() throws IOException {
JsonReader reader = new JsonReader(reader("[null]"));
reader.beginArray();
var e = assertThrows(IllegalStateException.class, () -> reader.nextString());
assertUnexpectedStructureError(e, "a string", "NULL", "line 1 column 6 path $[0]");
}
@Test
public void testStrictNameValueSeparator() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\"=true}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
var e = assertThrows(MalformedJsonException.class, () -> reader.nextBoolean());
assertStrictError(e, "line 1 column 6 path $.a");
JsonReader reader2 = new JsonReader(reader("{\"a\"=>true}"));
reader2.beginObject();
assertThat(reader2.nextName()).isEqualTo("a");
e = assertThrows(MalformedJsonException.class, () -> reader2.nextBoolean());
assertStrictError(e, "line 1 column 6 path $.a");
}
@Test
public void testLenientNameValueSeparator() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\"=true}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextBoolean()).isTrue();
reader = new JsonReader(reader("{\"a\"=>true}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextBoolean()).isTrue();
}
@Test
public void testStrictNameValueSeparatorWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\"=true}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 6 path $.a");
JsonReader reader2 = new JsonReader(reader("{\"a\"=>true}"));
reader2.beginObject();
assertThat(reader2.nextName()).isEqualTo("a");
e = assertThrows(MalformedJsonException.class, () -> reader2.skipValue());
assertStrictError(e, "line 1 column 6 path $.a");
}
@Test
public void testCommentsInStringValue() throws Exception {
JsonReader reader = new JsonReader(reader("[\"// comment\"]"));
reader.beginArray();
assertThat(reader.nextString()).isEqualTo("// comment");
reader.endArray();
reader = new JsonReader(reader("{\"a\":\"#someComment\"}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("#someComment");
reader.endObject();
reader = new JsonReader(reader("{\"#//a\":\"#some //Comment\"}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("#//a");
assertThat(reader.nextString()).isEqualTo("#some //Comment");
reader.endObject();
}
@Test
public void testStrictComments() throws IOException {
JsonReader reader = new JsonReader(reader("[// comment \n true]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextBoolean());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader2 = new JsonReader(reader("[# comment \n true]"));
reader2.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader2.nextBoolean());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader3 = new JsonReader(reader("[/* comment */ true]"));
reader3.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader3.nextBoolean());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testLenientComments() throws IOException {
JsonReader reader = new JsonReader(reader("[// comment \n true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
reader = new JsonReader(reader("[# comment \n true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
reader = new JsonReader(reader("[/* comment */ true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
}
@Test
public void testStrictCommentsWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("[// comment \n true]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader2 = new JsonReader(reader("[# comment \n true]"));
reader2.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader2.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader3 = new JsonReader(reader("[/* comment */ true]"));
reader3.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader3.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testStrictUnquotedNames() throws IOException {
JsonReader reader = new JsonReader(reader("{a:true}"));
reader.beginObject();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextName());
assertStrictError(e, "line 1 column 3 path $.");
}
@Test
public void testLenientUnquotedNames() throws IOException {
JsonReader reader = new JsonReader(reader("{a:true}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
}
@Test
public void testStrictUnquotedNamesWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("{a:true}"));
reader.beginObject();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 3 path $.");
}
@Test
public void testStrictSingleQuotedNames() throws IOException {
JsonReader reader = new JsonReader(reader("{'a':true}"));
reader.beginObject();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextName());
assertStrictError(e, "line 1 column 3 path $.");
}
@Test
public void testLenientSingleQuotedNames() throws IOException {
JsonReader reader = new JsonReader(reader("{'a':true}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
}
@Test
public void testStrictSingleQuotedNamesWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("{'a':true}"));
reader.beginObject();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 3 path $.");
}
@Test
public void testStrictUnquotedStrings() throws IOException {
JsonReader reader = new JsonReader(reader("[a]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testStrictUnquotedStringsWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("[a]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testLenientUnquotedStrings() throws IOException {
JsonReader reader = new JsonReader(reader("[a]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextString()).isEqualTo("a");
}
@Test
public void testStrictSingleQuotedStrings() throws IOException {
JsonReader reader = new JsonReader(reader("['a']"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testLenientSingleQuotedStrings() throws IOException {
JsonReader reader = new JsonReader(reader("['a']"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextString()).isEqualTo("a");
}
@Test
public void testStrictSingleQuotedStringsWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("['a']"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testStrictSemicolonDelimitedArray() throws IOException {
JsonReader reader = new JsonReader(reader("[true;true]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextBoolean());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testLenientSemicolonDelimitedArray() throws IOException {
JsonReader reader = new JsonReader(reader("[true;true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextBoolean()).isTrue();
}
@Test
public void testStrictSemicolonDelimitedArrayWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("[true;true]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testStrictSemicolonDelimitedNameValuePair() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":true;\"b\":true}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
var e = assertThrows(MalformedJsonException.class, () -> reader.nextBoolean());
assertStrictError(e, "line 1 column 6 path $.a");
}
@Test
public void testLenientSemicolonDelimitedNameValuePair() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":true;\"b\":true}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.nextName()).isEqualTo("b");
}
@Test
public void testStrictSemicolonDelimitedNameValuePairWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":true;\"b\":true}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 6 path $.a");
}
@Test
public void testStrictUnnecessaryArraySeparators() throws IOException {
// The following calls `nextNull()` because a lenient JsonReader would treat redundant array
// separators as implicit JSON null
JsonReader reader = new JsonReader(reader("[true,,true]"));
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextNull());
assertStrictError(e, "line 1 column 8 path $[1]");
JsonReader reader2 = new JsonReader(reader("[,true]"));
reader2.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader2.nextNull());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader3 = new JsonReader(reader("[true,]"));
reader3.beginArray();
assertThat(reader3.nextBoolean()).isTrue();
e = assertThrows(MalformedJsonException.class, () -> reader3.nextNull());
assertStrictError(e, "line 1 column 8 path $[1]");
JsonReader reader4 = new JsonReader(reader("[,]"));
reader4.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader4.nextNull());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testLenientUnnecessaryArraySeparators() throws IOException {
JsonReader reader = new JsonReader(reader("[true,,true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
// Redundant array separators are treated as implicit JSON null
reader.nextNull();
assertThat(reader.nextBoolean()).isTrue();
reader.endArray();
reader = new JsonReader(reader("[,true]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.nextNull();
assertThat(reader.nextBoolean()).isTrue();
reader.endArray();
reader = new JsonReader(reader("[true,]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
reader.nextNull();
reader.endArray();
reader = new JsonReader(reader("[,]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.nextNull();
reader.nextNull();
reader.endArray();
}
@Test
public void testStrictUnnecessaryArraySeparatorsWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("[true,,true]"));
reader.beginArray();
assertThat(reader.nextBoolean()).isTrue();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 8 path $[1]");
JsonReader reader2 = new JsonReader(reader("[,true]"));
reader2.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader2.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
JsonReader reader3 = new JsonReader(reader("[true,]"));
reader3.beginArray();
assertThat(reader3.nextBoolean()).isTrue();
e = assertThrows(MalformedJsonException.class, () -> reader3.skipValue());
assertStrictError(e, "line 1 column 8 path $[1]");
JsonReader reader4 = new JsonReader(reader("[,]"));
reader4.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader4.skipValue());
assertStrictError(e, "line 1 column 3 path $[0]");
}
@Test
public void testStrictMultipleTopLevelValues() throws IOException {
JsonReader reader = new JsonReader(reader("[] []"));
reader.beginArray();
reader.endArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertStrictError(e, "line 1 column 5 path $");
}
@Test
public void testLenientMultipleTopLevelValues() throws IOException {
JsonReader reader = new JsonReader(reader("[] true {}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.endArray();
assertThat(reader.nextBoolean()).isTrue();
reader.beginObject();
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testStrictMultipleTopLevelValuesWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("[] []"));
reader.beginArray();
reader.endArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 5 path $");
}
@Test
public void testTopLevelValueTypes() throws IOException {
JsonReader reader1 = new JsonReader(reader("true"));
assertThat(reader1.nextBoolean()).isTrue();
assertThat(reader1.peek()).isEqualTo(JsonToken.END_DOCUMENT);
JsonReader reader2 = new JsonReader(reader("false"));
assertThat(reader2.nextBoolean()).isFalse();
assertThat(reader2.peek()).isEqualTo(JsonToken.END_DOCUMENT);
JsonReader reader3 = new JsonReader(reader("null"));
assertThat(reader3.peek()).isEqualTo(JsonToken.NULL);
reader3.nextNull();
assertThat(reader3.peek()).isEqualTo(JsonToken.END_DOCUMENT);
JsonReader reader4 = new JsonReader(reader("123"));
assertThat(reader4.nextInt()).isEqualTo(123);
assertThat(reader4.peek()).isEqualTo(JsonToken.END_DOCUMENT);
JsonReader reader5 = new JsonReader(reader("123.4"));
assertThat(reader5.nextDouble()).isEqualTo(123.4);
assertThat(reader5.peek()).isEqualTo(JsonToken.END_DOCUMENT);
JsonReader reader6 = new JsonReader(reader("\"a\""));
assertThat(reader6.nextString()).isEqualTo("a");
assertThat(reader6.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testTopLevelValueTypeWithSkipValue() throws IOException {
JsonReader reader = new JsonReader(reader("true"));
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testStrictNonExecutePrefix() {
JsonReader reader = new JsonReader(reader(")]}'\n []"));
var e = assertThrows(MalformedJsonException.class, () -> reader.beginArray());
assertStrictError(e, "line 1 column 1 path $");
}
@Test
public void testStrictNonExecutePrefixWithSkipValue() {
JsonReader reader = new JsonReader(reader(")]}'\n []"));
var e = assertThrows(MalformedJsonException.class, () -> reader.skipValue());
assertStrictError(e, "line 1 column 1 path $");
}
@Test
public void testLenientNonExecutePrefix() throws IOException {
JsonReader reader = new JsonReader(reader(")]}'\n []"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testLenientNonExecutePrefixWithLeadingWhitespace() throws IOException {
JsonReader reader = new JsonReader(reader("\r\n \t)]}'\n []"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testLenientPartialNonExecutePrefix() throws IOException {
JsonReader reader = new JsonReader(reader(")]}' []"));
reader.setStrictness(Strictness.LENIENT);
assertThat(reader.nextString()).isEqualTo(")");
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Unexpected value at line 1 column 3 path $\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testBomIgnoredAsFirstCharacterOfDocument() throws IOException {
JsonReader reader = new JsonReader(reader("\ufeff[]"));
reader.beginArray();
reader.endArray();
}
@Test
public void testBomForbiddenAsOtherCharacterInDocument() throws IOException {
JsonReader reader = new JsonReader(reader("[\ufeff]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.endArray());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@SuppressWarnings("UngroupedOverloads")
@Test
public void testFailWithPosition() throws IOException {
testFailWithPosition("Expected value at line 6 column 5 path $[1]", "[\n\n\n\n\n\"a\",}]");
}
@Test
public void testFailWithPositionGreaterThanBufferSize() throws IOException {
String spaces = repeat(' ', 8192);
testFailWithPosition(
"Expected value at line 6 column 5 path $[1]", "[\n\n" + spaces + "\n\n\n\"a\",}]");
}
@Test
public void testFailWithPositionOverSlashSlashEndOfLineComment() throws IOException {
testFailWithPosition(
"Expected value at line 5 column 6 path $[1]", "\n// foo\n\n//bar\r\n[\"a\",}");
}
@Test
public void testFailWithPositionOverHashEndOfLineComment() throws IOException {
testFailWithPosition(
"Expected value at line 5 column 6 path $[1]", "\n# foo\n\n#bar\r\n[\"a\",}");
}
@Test
public void testFailWithPositionOverCStyleComment() throws IOException {
testFailWithPosition(
"Expected value at line 6 column 12 path $[1]", "\n\n/* foo\n*\n*\r\nbar */[\"a\",}");
}
@Test
public void testFailWithPositionOverQuotedString() throws IOException {
testFailWithPosition(
"Expected value at line 5 column 3 path $[1]", "[\"foo\nbar\r\nbaz\n\",\n }");
}
@Test
public void testFailWithPositionOverUnquotedString() throws IOException {
testFailWithPosition("Expected value at line 5 column 2 path $[1]", "[\n\nabcd\n\n,}");
}
@Test
public void testFailWithEscapedNewlineCharacter() throws IOException {
testFailWithPosition("Expected value at line 5 column 3 path $[1]", "[\n\n\"\\\n\n\",}");
}
@Test
public void testFailWithPositionIsOffsetByBom() throws IOException {
testFailWithPosition("Expected value at line 1 column 6 path $[1]", "\ufeff[\"a\",}]");
}
private static void testFailWithPosition(String message, String json) throws IOException {
// Validate that it works reading the string normally.
JsonReader reader1 = new JsonReader(reader(json));
reader1.setStrictness(Strictness.LENIENT);
reader1.beginArray();
String unused1 = reader1.nextString();
var e = assertThrows(MalformedJsonException.class, () -> reader1.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
message
+ "\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
// Also validate that it works when skipping.
JsonReader reader2 = new JsonReader(reader(json));
reader2.setStrictness(Strictness.LENIENT);
reader2.beginArray();
reader2.skipValue();
e = assertThrows(MalformedJsonException.class, () -> reader2.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
message
+ "\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testFailWithPositionDeepPath() throws IOException {
JsonReader reader = new JsonReader(reader("[1,{\"a\":[2,3,}"));
reader.beginArray();
int unused1 = reader.nextInt();
reader.beginObject();
String unused2 = reader.nextName();
reader.beginArray();
int unused3 = reader.nextInt();
int unused4 = reader.nextInt();
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected value at line 1 column 14 path $[1].a[2]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testStrictVeryLongNumber() throws IOException {
JsonReader reader = new JsonReader(reader("[0." + repeat('9', 8192) + "]"));
reader.beginArray();
var e = assertThrows(MalformedJsonException.class, () -> reader.nextDouble());
assertStrictError(e, "line 1 column 2 path $[0]");
}
@Test
public void testLenientVeryLongNumber() throws IOException {
JsonReader reader = new JsonReader(reader("[0." + repeat('9', 8192) + "]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(JsonToken.STRING);
assertThat(reader.nextDouble()).isEqualTo(1d);
reader.endArray();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testVeryLongUnquotedLiteral() throws IOException {
String literal = "a" + repeat('b', 8192) + "c";
JsonReader reader = new JsonReader(reader("[" + literal + "]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextString()).isEqualTo(literal);
reader.endArray();
}
@Test
public void testDeeplyNestedArrays() throws IOException {
// this is nested 40 levels deep; Gson is tuned for nesting is 30 levels deep or fewer
JsonReader reader =
new JsonReader(
reader(
"[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]"));
for (int i = 0; i < 40; i++) {
reader.beginArray();
}
assertThat(reader.getPath())
.isEqualTo(
"$[0][0][0][0][0][0][0][0][0][0][0][0][0][0][0]"
+ "[0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0]");
for (int i = 0; i < 40; i++) {
reader.endArray();
}
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testDeeplyNestedObjects() throws IOException {
// Build a JSON document structured like {"a":{"a":{"a":{"a":true}}}}, but 40 levels deep
String json = "true";
for (int i = 0; i < 40; i++) {
json = String.format("{\"a\":%s}", json);
}
JsonReader reader = new JsonReader(reader(json));
for (int i = 0; i < 40; i++) {
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
}
assertThat(reader.getPath())
.isEqualTo(
"$.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a"
+ ".a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a.a");
assertThat(reader.nextBoolean()).isTrue();
for (int i = 0; i < 40; i++) {
reader.endObject();
}
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testNestingLimitDefault() throws IOException {
int defaultLimit = JsonReader.DEFAULT_NESTING_LIMIT;
String json = repeat('[', defaultLimit + 1);
JsonReader reader = new JsonReader(reader(json));
assertThat(reader.getNestingLimit()).isEqualTo(defaultLimit);
for (int i = 0; i < defaultLimit; i++) {
reader.beginArray();
}
MalformedJsonException e =
assertThrows(MalformedJsonException.class, () -> reader.beginArray());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Nesting limit "
+ defaultLimit
+ " reached at line 1 column "
+ (defaultLimit + 2)
+ " path $"
+ "[0]".repeat(defaultLimit));
}
// Note: The column number reported in the expected exception messages is slightly off and points
// behind instead of directly at the '[' or '{'
@Test
public void testNestingLimit() throws IOException {
JsonReader reader = new JsonReader(reader("[{\"a\":1}]"));
reader.setNestingLimit(2);
assertThat(reader.getNestingLimit()).isEqualTo(2);
reader.beginArray();
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextInt()).isEqualTo(1);
reader.endObject();
reader.endArray();
JsonReader reader2 = new JsonReader(reader("[{\"a\":[]}]"));
reader2.setNestingLimit(2);
reader2.beginArray();
reader2.beginObject();
assertThat(reader2.nextName()).isEqualTo("a");
MalformedJsonException e =
assertThrows(MalformedJsonException.class, () -> reader2.beginArray());
assertThat(e)
.hasMessageThat()
.isEqualTo("Nesting limit 2 reached at line 1 column 8 path $[0].a");
JsonReader reader3 = new JsonReader(reader("[]"));
reader3.setNestingLimit(0);
e = assertThrows(MalformedJsonException.class, () -> reader3.beginArray());
assertThat(e).hasMessageThat().isEqualTo("Nesting limit 0 reached at line 1 column 2 path $");
JsonReader reader4 = new JsonReader(reader("[]"));
reader4.setNestingLimit(0);
// Currently also checked when skipping values
e = assertThrows(MalformedJsonException.class, () -> reader4.skipValue());
assertThat(e).hasMessageThat().isEqualTo("Nesting limit 0 reached at line 1 column 2 path $");
JsonReader reader5 = new JsonReader(reader("1"));
reader5.setNestingLimit(0);
// Reading value other than array or object should be allowed
assertThat(reader5.nextInt()).isEqualTo(1);
// Test multiple top-level arrays
JsonReader reader6 = new JsonReader(reader("[] [[]]"));
reader6.setStrictness(Strictness.LENIENT);
reader6.setNestingLimit(1);
reader6.beginArray();
reader6.endArray();
reader6.beginArray();
e = assertThrows(MalformedJsonException.class, () -> reader6.beginArray());
assertThat(e)
.hasMessageThat()
.isEqualTo("Nesting limit 1 reached at line 1 column 6 path $[0]");
JsonReader reader7 = new JsonReader(reader("[]"));
IllegalArgumentException argException =
assertThrows(IllegalArgumentException.class, () -> reader7.setNestingLimit(-1));
assertThat(argException).hasMessageThat().isEqualTo("Invalid nesting limit: -1");
}
// http://code.google.com/p/google-gson/issues/detail?id=409
@Test
public void testStringEndingInSlash() {
JsonReader reader = new JsonReader(reader("/"));
reader.setStrictness(Strictness.LENIENT);
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected value at line 1 column 1 path $\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testDocumentWithCommentEndingInSlash() {
JsonReader reader = new JsonReader(reader("/* foo *//"));
reader.setStrictness(Strictness.LENIENT);
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected value at line 1 column 10 path $\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testStringWithLeadingSlash() {
JsonReader reader = new JsonReader(reader("/x"));
reader.setStrictness(Strictness.LENIENT);
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected value at line 1 column 1 path $\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testUnterminatedObject() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":\"android\"x"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("android");
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Unterminated object at line 1 column 16 path $.a\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testVeryLongQuotedString() throws IOException {
char[] stringChars = new char[1024 * 16];
Arrays.fill(stringChars, 'x');
String string = new String(stringChars);
String json = "[\"" + string + "\"]";
JsonReader reader = new JsonReader(reader(json));
reader.beginArray();
assertThat(reader.nextString()).isEqualTo(string);
reader.endArray();
}
@Test
public void testVeryLongUnquotedString() throws IOException {
char[] stringChars = new char[1024 * 16];
Arrays.fill(stringChars, 'x');
String string = new String(stringChars);
String json = "[" + string + "]";
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextString()).isEqualTo(string);
reader.endArray();
}
@Test
public void testVeryLongUnterminatedString() throws IOException {
char[] stringChars = new char[1024 * 16];
Arrays.fill(stringChars, 'x');
String string = new String(stringChars);
String json = "[" + string;
JsonReader reader = new JsonReader(reader(json));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.nextString()).isEqualTo(string);
assertThrows(EOFException.class, () -> reader.peek());
}
@Test
public void testSkipVeryLongUnquotedString() throws IOException {
JsonReader reader = new JsonReader(reader("[" + repeat('x', 8192) + "]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
reader.skipValue();
reader.endArray();
}
@Test
public void testSkipTopLevelUnquotedString() throws IOException {
JsonReader reader = new JsonReader(reader(repeat('x', 8192)));
reader.setStrictness(Strictness.LENIENT);
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testSkipVeryLongQuotedString() throws IOException {
JsonReader reader = new JsonReader(reader("[\"" + repeat('x', 8192) + "\"]"));
reader.beginArray();
reader.skipValue();
reader.endArray();
}
@Test
public void testSkipTopLevelQuotedString() throws IOException {
JsonReader reader = new JsonReader(reader("\"" + repeat('x', 8192) + "\""));
reader.setStrictness(Strictness.LENIENT);
reader.skipValue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testStringAsNumberWithTruncatedExponent() throws IOException {
JsonReader reader = new JsonReader(reader("[123e]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
}
@Test
public void testStringAsNumberWithDigitAndNonDigitExponent() throws IOException {
JsonReader reader = new JsonReader(reader("[123e4b]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
}
@Test
public void testStringAsNumberWithNonDigitExponent() throws IOException {
JsonReader reader = new JsonReader(reader("[123eb]"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(STRING);
}
@Test
public void testEmptyStringName() throws IOException {
JsonReader reader = new JsonReader(reader("{\"\":true}"));
reader.setStrictness(Strictness.LENIENT);
assertThat(reader.peek()).isEqualTo(BEGIN_OBJECT);
reader.beginObject();
assertThat(reader.peek()).isEqualTo(NAME);
assertThat(reader.nextName()).isEqualTo("");
assertThat(reader.peek()).isEqualTo(JsonToken.BOOLEAN);
assertThat(reader.nextBoolean()).isTrue();
assertThat(reader.peek()).isEqualTo(JsonToken.END_OBJECT);
reader.endObject();
assertThat(reader.peek()).isEqualTo(JsonToken.END_DOCUMENT);
}
@Test
public void testStrictExtraCommasInMaps() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":\"b\",}"));
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("b");
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected name at line 1 column 11 path $.a\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
@Test
public void testLenientExtraCommasInMaps() throws IOException {
JsonReader reader = new JsonReader(reader("{\"a\":\"b\",}"));
reader.setStrictness(Strictness.LENIENT);
reader.beginObject();
assertThat(reader.nextName()).isEqualTo("a");
assertThat(reader.nextString()).isEqualTo("b");
var e = assertThrows(MalformedJsonException.class, () -> reader.peek());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Expected name at line 1 column 11 path $.a\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
private static String repeat(char c, int count) {
char[] array = new char[count];
Arrays.fill(array, c);
return new String(array);
}
@Test
public void testMalformedDocuments() throws IOException {
assertDocument("{]", BEGIN_OBJECT, MalformedJsonException.class);
assertDocument("{,", BEGIN_OBJECT, MalformedJsonException.class);
assertDocument("{{", BEGIN_OBJECT, MalformedJsonException.class);
assertDocument("{[", BEGIN_OBJECT, MalformedJsonException.class);
assertDocument("{:", BEGIN_OBJECT, MalformedJsonException.class);
assertDocument("{\"name\",", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\",", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\":}", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\"::", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\":,", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\"=}", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{\"name\"=>}", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument(
"{\"name\"=>\"string\":", BEGIN_OBJECT, NAME, STRING, MalformedJsonException.class);
assertDocument(
"{\"name\"=>\"string\"=", BEGIN_OBJECT, NAME, STRING, MalformedJsonException.class);
assertDocument(
"{\"name\"=>\"string\"=>", BEGIN_OBJECT, NAME, STRING, MalformedJsonException.class);
assertDocument("{\"name\"=>\"string\",", BEGIN_OBJECT, NAME, STRING, EOFException.class);
assertDocument("{\"name\"=>\"string\",\"name\"", BEGIN_OBJECT, NAME, STRING, NAME);
assertDocument("[}", BEGIN_ARRAY, MalformedJsonException.class);
assertDocument("[,]", BEGIN_ARRAY, NULL, NULL, END_ARRAY);
assertDocument("{", BEGIN_OBJECT, EOFException.class);
assertDocument("{\"name\"", BEGIN_OBJECT, NAME, EOFException.class);
assertDocument("{\"name\",", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{'name'", BEGIN_OBJECT, NAME, EOFException.class);
assertDocument("{'name',", BEGIN_OBJECT, NAME, MalformedJsonException.class);
assertDocument("{name", BEGIN_OBJECT, NAME, EOFException.class);
assertDocument("[", BEGIN_ARRAY, EOFException.class);
assertDocument("[string", BEGIN_ARRAY, STRING, EOFException.class);
assertDocument("[\"string\"", BEGIN_ARRAY, STRING, EOFException.class);
assertDocument("['string'", BEGIN_ARRAY, STRING, EOFException.class);
assertDocument("[123", BEGIN_ARRAY, NUMBER, EOFException.class);
assertDocument("[123,", BEGIN_ARRAY, NUMBER, EOFException.class);
assertDocument("{\"name\":123", BEGIN_OBJECT, NAME, NUMBER, EOFException.class);
assertDocument("{\"name\":123,", BEGIN_OBJECT, NAME, NUMBER, EOFException.class);
assertDocument("{\"name\":\"string\"", BEGIN_OBJECT, NAME, STRING, EOFException.class);
assertDocument("{\"name\":\"string\",", BEGIN_OBJECT, NAME, STRING, EOFException.class);
assertDocument("{\"name\":'string'", BEGIN_OBJECT, NAME, STRING, EOFException.class);
assertDocument("{\"name\":'string',", BEGIN_OBJECT, NAME, STRING, EOFException.class);
assertDocument("{\"name\":false", BEGIN_OBJECT, NAME, BOOLEAN, EOFException.class);
assertDocument("{\"name\":false,,", BEGIN_OBJECT, NAME, BOOLEAN, MalformedJsonException.class);
}
/**
* This test behaves slightly differently in Gson 2.2 and earlier. It fails during peek rather
* than during nextString().
*/
@Test
public void testUnterminatedStringFailure() throws IOException {
JsonReader reader = new JsonReader(reader("[\"string"));
reader.setStrictness(Strictness.LENIENT);
reader.beginArray();
assertThat(reader.peek()).isEqualTo(JsonToken.STRING);
var e = assertThrows(MalformedJsonException.class, () -> reader.nextString());
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Unterminated string at line 1 column 9 path $[0]\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
/** Regression test for an issue with buffer filling and consumeNonExecutePrefix. */
@Test
public void testReadAcrossBuffers() throws IOException {
StringBuilder sb = new StringBuilder("#");
for (int i = 0; i < JsonReader.BUFFER_SIZE - 3; i++) {
sb.append(' ');
}
sb.append("\n)]}'\n3");
JsonReader reader = new JsonReader(reader(sb.toString()));
reader.setStrictness(Strictness.LENIENT);
JsonToken token = reader.peek();
assertThat(token).isEqualTo(JsonToken.NUMBER);
}
private static void assertStrictError(MalformedJsonException exception, String expectedLocation) {
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON at "
+ expectedLocation
+ "\n"
+ "See https://github.com/google/gson/blob/main/Troubleshooting.md#malformed-json");
}
private static void assertUnexpectedStructureError(
IllegalStateException exception,
String expectedToken,
String actualToken,
String expectedLocation) {
String troubleshootingId =
actualToken.equals("NULL") ? "adapter-not-null-safe" : "unexpected-json-structure";
assertThat(exception)
.hasMessageThat()
.isEqualTo(
"Expected "
+ expectedToken
+ " but was "
+ actualToken
+ " at "
+ expectedLocation
+ "\nSee https://github.com/google/gson/blob/main/Troubleshooting.md#"
+ troubleshootingId);
}
private static void assertDocument(String document, Object... expectations) throws IOException {
JsonReader reader = new JsonReader(reader(document));
reader.setStrictness(Strictness.LENIENT);
for (Object expectation : expectations) {
if (expectation == BEGIN_OBJECT) {
reader.beginObject();
} else if (expectation == BEGIN_ARRAY) {
reader.beginArray();
} else if (expectation == END_OBJECT) {
reader.endObject();
} else if (expectation == END_ARRAY) {
reader.endArray();
} else if (expectation == NAME) {
assertThat(reader.nextName()).isEqualTo("name");
} else if (expectation == BOOLEAN) {
assertThat(reader.nextBoolean()).isFalse();
} else if (expectation == STRING) {
assertThat(reader.nextString()).isEqualTo("string");
} else if (expectation == NUMBER) {
assertThat(reader.nextInt()).isEqualTo(123);
} else if (expectation == NULL) {
reader.nextNull();
} else if (expectation instanceof Class
&& Exception.class.isAssignableFrom((Class<?>) expectation)) {
var expected = assertThrows(Exception.class, () -> reader.peek());
assertThat(expected.getClass()).isEqualTo((Class<?>) expectation);
} else {
throw new AssertionError("Unsupported expectation value: " + expectation);
}
}
}
/** Returns a reader that returns one character at a time. */
private static Reader reader(String s) {
/* if (true) */ return new StringReader(s);
/* return new Reader() {
int position = 0;
@Override public int read(char[] buffer, int offset, int count) throws IOException {
if (position == s.length()) {
return -1;
} else if (count > 0) {
buffer[offset] = s.charAt(position++);
return 1;
} else {
throw new IllegalArgumentException();
}
}
@Override public void close() throws IOException {
}
}; */
}
}
| JsonReaderTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/AuxiliaryDatabaseObjectBinder.java | {
"start": 547,
"end": 2343
} | class ____ {
/**
* Handling for a {@code <database-object/>} declaration.
*
* @param context Access to information relative to the mapping document containing this binding
* @param auxDbObjectMapping The {@code <database-object/>} binding
*/
public static void processAuxiliaryDatabaseObject(
HbmLocalMetadataBuildingContext context,
JaxbHbmAuxiliaryDatabaseObjectType auxDbObjectMapping) {
final AuxiliaryDatabaseObject auxDbObject;
if ( auxDbObjectMapping.getDefinition() != null ) {
final String auxDbObjectImplClass = auxDbObjectMapping.getDefinition().getClazz();
try {
auxDbObject = (AuxiliaryDatabaseObject)
context.getBootstrapContext().getClassLoaderService()
.classForName( auxDbObjectImplClass )
.newInstance();
}
catch (ClassLoadingException cle) {
throw cle;
}
catch (Exception e) {
throw new org.hibernate.boot.MappingException(
String.format(
"Unable to instantiate custom AuxiliaryDatabaseObject class [%s]",
auxDbObjectImplClass
),
context.getOrigin()
);
}
}
else {
auxDbObject = new SimpleAuxiliaryDatabaseObject(
context.getMetadataCollector().getDatabase().getDefaultNamespace(),
auxDbObjectMapping.getCreate(),
auxDbObjectMapping.getDrop(),
null
);
}
if ( !auxDbObjectMapping.getDialectScope().isEmpty() ) {
if ( auxDbObject instanceof AuxiliaryDatabaseObject.Expandable expandable ) {
for ( JaxbHbmDialectScopeType dialectScopeBinding : auxDbObjectMapping.getDialectScope() ) {
expandable.addDialectScope( dialectScopeBinding.getName() );
}
}
else {
// error? warn?
}
}
context.getMetadataCollector().getDatabase().addAuxiliaryDatabaseObject( auxDbObject );
}
}
| AuxiliaryDatabaseObjectBinder |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/util/StreamRecordMatchers.java | {
"start": 5730,
"end": 6778
} | class ____<T> extends TypeSafeMatcher<StreamRecord<? extends T>> {
private Matcher<? super T> valueMatcher;
private Matcher<? super Long> timestampMatcher;
private StreamRecordMatcher(
Matcher<? super T> valueMatcher, Matcher<? super Long> timestampMatcher) {
this.valueMatcher = valueMatcher;
this.timestampMatcher = timestampMatcher;
}
@Override
public void describeTo(Description description) {
description
.appendText("a StreamRecordValue(")
.appendValue(valueMatcher)
.appendText(", ")
.appendValue(timestampMatcher)
.appendText(")");
}
@Override
protected boolean matchesSafely(StreamRecord<? extends T> streamRecord) {
return valueMatcher.matches(streamRecord.getValue())
&& timestampMatcher.matches(streamRecord.getTimestamp());
}
}
private static | StreamRecordMatcher |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/name/NameResolutionTest.java | {
"start": 1894,
"end": 2034
} | class ____ {
@Inject
@Named
String producedBing;
@Inject
@Named
Integer bongo;
}
}
| Consumer |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/named_constructor_args/InvalidNamedConstructorArgsTest.java | {
"start": 2073,
"end": 2826
} | interface ____ {
@ConstructorArgs({ @Arg(column = "id", name = "noSuchConstructorArg"), })
@Select("select * from users ")
User select();
}
@Test
void noMatchingConstructorArgName() {
Configuration configuration = sqlSessionFactory.getConfiguration();
when(() -> configuration.addMapper(NoMatchingConstructorMapper.class));
then(caughtException()).isInstanceOf(BuilderException.class).hasMessageContaining(
"'org.apache.ibatis.submitted.named_constructor_args.InvalidNamedConstructorArgsTest$NoMatchingConstructorMapper.select-void'")
.hasMessageContaining("'org.apache.ibatis.submitted.named_constructor_args.User'")
.hasMessageContaining("[noSuchConstructorArg]");
}
| NoMatchingConstructorMapper |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/messages/GenericMessageTester.java | {
"start": 8850,
"end": 9045
} | class ____ implements Instantiator<JobID> {
@Override
public JobID instantiate(Random rnd) {
return randomJobId(rnd);
}
}
public static | JobIdInstantiator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.