language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__dubbo | dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/model/OpenAPI.java | {
"start": 1363,
"end": 8268
} | class ____ extends Node<OpenAPI> {
private String openapi;
private Info info;
private List<Server> servers;
private Map<String, PathItem> paths;
private Components components;
private List<SecurityRequirement> security;
private List<Tag> tags;
private ExternalDocs externalDocs;
private String group;
private int priority;
private transient OpenAPIConfig globalConfig;
private transient OpenAPIConfig config;
private transient ServiceMeta meta;
public String getOpenapi() {
return openapi;
}
public OpenAPI setOpenapi(String openapi) {
this.openapi = openapi;
return this;
}
public Info getInfo() {
return info;
}
public OpenAPI setInfo(Info info) {
this.info = info;
return this;
}
public List<Server> getServers() {
return servers;
}
public OpenAPI setServers(List<Server> servers) {
this.servers = servers;
return this;
}
public OpenAPI addServer(Server server) {
List<Server> thisServers = servers;
if (thisServers == null) {
servers = thisServers = new ArrayList<>();
} else {
for (int i = 0, size = thisServers.size(); i < size; i++) {
if (thisServers.get(i).getUrl().equals(server.getUrl())) {
return this;
}
}
}
thisServers.add(server);
return this;
}
public OpenAPI removeServer(Server server) {
if (servers != null) {
servers.remove(server);
}
return this;
}
public Map<String, PathItem> getPaths() {
return paths;
}
public PathItem getPath(String path) {
return paths == null ? null : paths.get(path);
}
public PathItem getOrAddPath(String path) {
if (paths == null) {
paths = new LinkedHashMap<>();
}
return paths.computeIfAbsent(path, k -> new PathItem());
}
public OpenAPI setPaths(Map<String, PathItem> paths) {
this.paths = paths;
return this;
}
public OpenAPI addPath(String path, PathItem pathItem) {
if (paths == null) {
paths = new LinkedHashMap<>();
}
paths.put(path, pathItem);
return this;
}
public OpenAPI removePath(String path) {
if (paths != null) {
paths.remove(path);
}
return this;
}
public Components getComponents() {
return components;
}
public OpenAPI setComponents(Components components) {
this.components = components;
return this;
}
public List<SecurityRequirement> getSecurity() {
return security;
}
public OpenAPI setSecurity(List<SecurityRequirement> security) {
this.security = security;
return this;
}
public OpenAPI addSecurity(SecurityRequirement securityRequirement) {
if (security == null) {
security = new ArrayList<>();
}
security.add(securityRequirement);
return this;
}
public List<Tag> getTags() {
return tags;
}
public OpenAPI setTags(List<Tag> tags) {
this.tags = tags;
return this;
}
public OpenAPI addTag(Tag tag) {
List<Tag> thisTags = tags;
if (thisTags == null) {
tags = thisTags = new ArrayList<>();
} else {
for (int i = 0, size = thisTags.size(); i < size; i++) {
if (thisTags.get(i).getName().equals(tag.getName())) {
return this;
}
}
}
thisTags.add(tag);
return this;
}
public OpenAPI removeTag(Tag tag) {
if (tags != null) {
tags.remove(tag);
}
return this;
}
public ExternalDocs getExternalDocs() {
return externalDocs;
}
public OpenAPI setExternalDocs(ExternalDocs externalDocs) {
this.externalDocs = externalDocs;
return this;
}
public String getGroup() {
return group;
}
public OpenAPI setGroup(String group) {
this.group = group;
return this;
}
public int getPriority() {
return priority;
}
public OpenAPI setPriority(int priority) {
this.priority = priority;
return this;
}
public OpenAPIConfig getGlobalConfig() {
return globalConfig;
}
public OpenAPI setGlobalConfig(OpenAPIConfig globalConfig) {
this.globalConfig = globalConfig;
return this;
}
public OpenAPIConfig getConfig() {
return config;
}
public OpenAPI setConfig(OpenAPIConfig config) {
this.config = config;
return this;
}
public <T> T getConfigValue(Function<OpenAPIConfig, T> fn) {
if (config != null) {
T value = fn.apply(config);
if (value != null) {
return value;
}
}
return globalConfig == null ? null : fn.apply(globalConfig);
}
public String getConfigSetting(String key) {
return getConfigValue(config -> config == null ? null : config.getSetting(key));
}
public void walkOperations(Consumer<Operation> consumer) {
Map<String, PathItem> paths = this.paths;
if (paths == null) {
return;
}
for (PathItem pathItem : paths.values()) {
Map<HttpMethods, Operation> operations = pathItem.getOperations();
if (operations != null) {
for (Operation operation : operations.values()) {
consumer.accept(operation);
}
}
}
}
public ServiceMeta getMeta() {
return meta;
}
public OpenAPI setMeta(ServiceMeta meta) {
this.meta = meta;
return this;
}
@Override
public OpenAPI clone() {
OpenAPI clone = super.clone();
clone.info = clone(info);
clone.servers = clone(servers);
clone.paths = clone(paths);
clone.components = clone(components);
clone.security = clone(security);
clone.tags = clone(tags);
clone.externalDocs = clone(externalDocs);
return clone;
}
@Override
public Map<String, Object> writeTo(Map<String, Object> node, Context context) {
node.put("openapi", openapi == null ? Constants.VERSION_30 : openapi);
write(node, "info", info, context);
write(node, "servers", servers, context);
write(node, "paths", paths, context);
write(node, "components", components, context);
write(node, "security", security, context);
write(node, "tags", tags, context);
write(node, "externalDocs", externalDocs, context);
writeExtensions(node);
return node;
}
}
| OpenAPI |
java | apache__avro | lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java | {
"start": 2255,
"end": 2426
} | class ____ {
protected static final int REPEATING = -1;
public static int ackCount;
private static boolean throwUndeclaredError;
public static | TestProtocolSpecific |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChunkTests.java | {
"start": 2035,
"end": 9737
} | class ____ extends AbstractScalarFunctionTestCase {
private static String PARAGRAPH_INPUT = """
The Adirondacks, a vast mountain region in northern New York, offer a breathtaking mix of rugged wilderness, serene lakes,
and charming small towns. Spanning over six million acres, the Adirondack Park is larger than Yellowstone, Yosemite, and the
Grand Canyon combined, yet it’s dotted with communities where people live, work, and play amidst nature. Visitors come year-round
to hike High Peaks trails, paddle across mirror-like waters, or ski through snow-covered forests. The area’s pristine beauty,
rich history, and commitment to conservation create a unique balance between wild preservation and human presence, making
the Adirondacks a timeless escape into natural tranquility.
""";
public ChunkTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
private static String randomWordsBetween(int min, int max) {
return IntStream.range(0, randomIntBetween(min, max))
.mapToObj(i -> randomAlphaOfLengthBetween(1, 10))
.collect(Collectors.joining(" "));
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
return parameterSuppliersFromTypedDataWithDefaultChecks(
true,
List.of(new TestCaseSupplier("Chunk with defaults", List.of(DataType.KEYWORD), () -> {
String text = randomWordsBetween(25, 50);
ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(Chunk.DEFAULT_CHUNK_SIZE, 0);
List<String> chunks = Chunk.chunkText(text, chunkingSettings);
Object expectedResult = chunks.size() == 1
? new BytesRef(chunks.get(0).trim())
: chunks.stream().map(s -> new BytesRef(s.trim())).toList();
return new TestCaseSupplier.TestCase(
List.of(new TestCaseSupplier.TypedData(new BytesRef(text), DataType.KEYWORD, "str")),
"ChunkBytesRefEvaluator[str=Attribute[channel=0], "
+ "chunkingSettings={\"strategy\":\"sentence\",\"max_chunk_size\":300,\"sentence_overlap\":0}]",
DataType.KEYWORD,
equalTo(expectedResult)
);
}), new TestCaseSupplier("Chunk with defaults text input", List.of(DataType.TEXT), () -> {
String text = randomWordsBetween(25, 50);
ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(Chunk.DEFAULT_CHUNK_SIZE, 0);
List<String> chunks = Chunk.chunkText(text, chunkingSettings);
Object expectedResult = chunks.size() == 1
? new BytesRef(chunks.get(0).trim())
: chunks.stream().map(s -> new BytesRef(s.trim())).toList();
return new TestCaseSupplier.TestCase(
List.of(new TestCaseSupplier.TypedData(new BytesRef(text), DataType.TEXT, "str")),
"ChunkBytesRefEvaluator[str=Attribute[channel=0], "
+ "chunkingSettings={\"strategy\":\"sentence\",\"max_chunk_size\":300,\"sentence_overlap\":0}]",
DataType.KEYWORD,
equalTo(expectedResult)
);
}))
);
}
private static MapExpression createChunkingSettings(ChunkingSettings chunkingSettings) {
List<Expression> chunkingSettingsMap = new ArrayList<>();
if (Objects.nonNull(chunkingSettings)) {
chunkingSettings.asMap().forEach((k, v) -> {
chunkingSettingsMap.add(Literal.keyword(Source.EMPTY, k));
DataType dataType = ALLOWED_CHUNKING_SETTING_OPTIONS.get(k);
Object value = v;
if (dataType == DataType.KEYWORD) {
if (v instanceof List<?> list) {
value = list.stream().map(item -> BytesRefs.toBytesRef(item)).toList();
} else if (v instanceof String str) {
value = BytesRefs.toBytesRef(str);
}
}
chunkingSettingsMap.add(new Literal(Source.EMPTY, value, dataType));
});
}
return new MapExpression(Source.EMPTY, chunkingSettingsMap);
}
@Override
protected Expression build(Source source, List<Expression> args) {
// With MapParam, args contains: field, options_map
Expression options = args.size() < 2 ? null : args.get(1);
return new Chunk(source, args.get(0), options);
}
public void testDefaults() {
// Default of 300 is huge, only one chunk returned in this case
verifyChunks(null, 1);
}
public void testDefaultChunkingSettings() {
verifyChunks(null, 1);
}
public void testSpecifiedChunkingSettings() {
// We can't randomize here, because we're testing on specifically specified chunk size that's variable.
int chunkSize = 25;
int expectedNumChunks = 6;
ChunkingSettings chunkingSettings = new SentenceBoundaryChunkingSettings(chunkSize, 0);
verifyChunks(chunkingSettings, expectedNumChunks);
}
public void testRandomChunkingSettings() {
ChunkingSettings chunkingSettings = createRandomChunkingSettings();
List<String> result = process(PARAGRAPH_INPUT, chunkingSettings);
assertNotNull(result);
assertFalse(result.isEmpty());
// Actual results depend on chunking settings passed in
}
// Paranoia check, this test will fail if we add new chunking settings options without updating the Chunk function
public void testChunkDefinesAllAllowedChunkingSettingsOptions() {
Set<String> allowedOptions = ALLOWED_CHUNKING_SETTING_OPTIONS.keySet();
Set<String> allOptions = Arrays.stream(ChunkingSettingsOptions.values())
.map(ChunkingSettingsOptions::toString)
.collect(Collectors.toSet());
assertEquals(allOptions, allowedOptions);
}
private void verifyChunks(ChunkingSettings chunkingSettings, int expectedNumChunksReturned) {
ChunkingSettings chunkingSettingsOrDefault = chunkingSettings != null ? chunkingSettings : DEFAULT_CHUNKING_SETTINGS;
List<String> expected = Chunk.chunkText(PARAGRAPH_INPUT, chunkingSettingsOrDefault).stream().map(String::trim).toList();
List<String> result = process(PARAGRAPH_INPUT, chunkingSettingsOrDefault);
assertThat(result.size(), equalTo(expectedNumChunksReturned));
assertThat(result, equalTo(expected));
}
private List<String> process(String str, ChunkingSettings chunkingSettings) {
MapExpression optionsMap = chunkingSettings == null ? null : createChunkingSettings(chunkingSettings);
try (
EvalOperator.ExpressionEvaluator eval = evaluator(new Chunk(Source.EMPTY, field("str", DataType.KEYWORD), optionsMap)).get(
driverContext()
);
Block block = eval.eval(row(List.of(new BytesRef(str))))
) {
if (block.isNull(0)) {
return null;
}
Object result = toJavaObject(block, 0);
if (result instanceof BytesRef bytesRef) {
return List.of(bytesRef.utf8ToString());
} else {
@SuppressWarnings("unchecked")
List<BytesRef> list = (List<BytesRef>) result;
return list.stream().map(BytesRef::utf8ToString).toList();
}
}
}
}
| ChunkTests |
java | apache__camel | components/camel-jcr/src/main/java/org/apache/camel/component/jcr/JcrEndpoint.java | {
"start": 1813,
"end": 9714
} | class ____ extends DefaultEndpoint {
private Credentials credentials;
private Repository repository;
@UriPath
@Metadata(required = true)
private String host;
@UriPath
private String base;
@UriParam
private String username;
@UriParam
private String password;
@UriParam
private int eventTypes;
@UriParam
private boolean deep;
@UriParam
private String uuids;
@UriParam
private String nodeTypeNames;
@UriParam
private boolean noLocal;
@UriParam(defaultValue = "3000", javaType = "java.time.Duration")
private long sessionLiveCheckIntervalOnStart = 3000L;
@UriParam(defaultValue = "60000", javaType = "java.time.Duration")
private long sessionLiveCheckInterval = 60000L;
@UriParam
private String workspaceName;
protected JcrEndpoint(String endpointUri, JcrComponent component) {
super(endpointUri, component);
try {
URI uri = new URI(endpointUri);
if (uri.getUserInfo() != null) {
String[] creds = uri.getUserInfo().split(":");
this.username = creds[0];
this.password = creds.length > 1 ? creds[1] : "";
}
this.host = uri.getHost();
this.base = uri.getPath().replaceAll("^/", "");
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Invalid URI: " + endpointUri, e);
}
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
JcrConsumer answer = new JcrConsumer(this, processor);
configureConsumer(answer);
return answer;
}
@Override
public Producer createProducer() throws Exception {
return new JcrProducer(this);
}
@Override
protected void doStart() throws Exception {
super.doStart();
StringHelper.notEmpty(host, "host", this);
this.repository = getCamelContext().getRegistry().lookupByNameAndType(host, Repository.class);
if (repository == null) {
throw new RuntimeCamelException("No JCR repository defined under '" + host + "'");
}
if (username != null && password != null) {
this.credentials = new SimpleCredentials(username, password.toCharArray());
}
}
public String getHost() {
return host;
}
/**
* Name of the {@link javax.jcr.Repository} to lookup from the Camel registry to be used.
*/
public void setHost(String host) {
this.host = host;
}
/**
* Get the {@link Repository}
*
* @return the repository
*/
protected Repository getRepository() {
return repository;
}
/**
* Get the {@link Credentials} for establishing the JCR repository connection
*
* @return the credentials
*/
protected Credentials getCredentials() {
return credentials;
}
/**
* Get the base node when accessing the repository
*
* @return the base node
*/
protected String getBase() {
return base;
}
public void setBase(String base) {
this.base = base;
}
public String getUsername() {
return username;
}
/**
* Username for login
*/
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
/**
* Password for login
*/
public void setPassword(String password) {
this.password = password;
}
/**
* <code>eventTypes</code> (a combination of one or more event types encoded as a bit mask value such as
* javax.jcr.observation.Event.NODE_ADDED, javax.jcr.observation.Event.NODE_REMOVED, etc.).
*
* @return eventTypes
* @see {@link javax.jcr.observation.Event}
* @see {@link javax.jcr.observation.ObservationManager#addEventListener(javax.jcr.observation.EventListener, int, String, boolean, String[], String[], boolean)}
*/
public int getEventTypes() {
return eventTypes;
}
public void setEventTypes(int eventTypes) {
this.eventTypes = eventTypes;
}
/**
* When <code>isDeep</code> is true, events whose associated parent node is at <code>absPath</code> or within its
* subgraph are received.
*
* @return deep
*/
public boolean isDeep() {
return deep;
}
public void setDeep(boolean deep) {
this.deep = deep;
}
/**
* When a comma separated uuid list string is set, only events whose associated parent node has one of the
* identifiers in the comma separated uuid list will be received.
*
* @return comma separated uuid list string
*/
public String getUuids() {
return uuids;
}
public void setUuids(String uuids) {
this.uuids = uuids;
}
/**
* When a comma separated <code>nodeTypeName</code> list string is set, only events whose associated parent node has
* one of the node types (or a subtype of one of the node types) in this list will be received.
*/
public String getNodeTypeNames() {
return nodeTypeNames;
}
public void setNodeTypeNames(String nodeTypeNames) {
this.nodeTypeNames = nodeTypeNames;
}
/**
* If <code>noLocal</code> is <code>true</code>, then events generated by the session through which the listener was
* registered are ignored. Otherwise, they are not ignored.
*
* @return noLocal
*/
public boolean isNoLocal() {
return noLocal;
}
public void setNoLocal(boolean noLocal) {
this.noLocal = noLocal;
}
/**
* Interval in milliseconds to wait before the first session live checking. The default value is 3000 ms.
*
* @return sessionLiveCheckIntervalOnStart
*/
public long getSessionLiveCheckIntervalOnStart() {
return sessionLiveCheckIntervalOnStart;
}
public void setSessionLiveCheckIntervalOnStart(long sessionLiveCheckIntervalOnStart) {
if (sessionLiveCheckIntervalOnStart <= 0) {
throw new IllegalArgumentException("sessionLiveCheckIntervalOnStart must be positive value");
}
this.sessionLiveCheckIntervalOnStart = sessionLiveCheckIntervalOnStart;
}
/**
* Interval in milliseconds to wait before each session live checking The default value is 60000 ms.
*/
public long getSessionLiveCheckInterval() {
return sessionLiveCheckInterval;
}
public void setSessionLiveCheckInterval(long sessionLiveCheckInterval) {
if (sessionLiveCheckInterval <= 0) {
throw new IllegalArgumentException("sessionLiveCheckInterval must be positive value");
}
this.sessionLiveCheckInterval = sessionLiveCheckInterval;
}
/**
* The workspace to access. If it's not specified then the default one will be used
*/
public String getWorkspaceName() {
return workspaceName;
}
public void setWorkspaceName(String workspaceName) {
this.workspaceName = workspaceName;
}
/**
* Gets the destination name which was configured from the endpoint uri.
*
* @return the destination name resolved from the endpoint uri
*/
public String getEndpointConfiguredDestinationName() {
String remainder = StringHelper.after(getEndpointKey(), "//");
if (remainder != null && remainder.contains("@")) {
remainder = remainder.substring(remainder.indexOf('@'));
}
if (remainder != null && remainder.contains("?")) {
// remove parameters
remainder = StringHelper.before(remainder, "?");
}
if (ObjectHelper.isEmpty(remainder)) {
return remainder;
}
return remainder;
}
}
| JcrEndpoint |
java | apache__camel | components/camel-sql/src/main/java/org/apache/camel/component/sql/stored/template/ast/InParameter.java | {
"start": 1087,
"end": 2809
} | class ____ {
private final String name;
private final String typeName;
private final int sqlType;
private final Integer scale;
private ValueExtractor valueExtractor;
public InParameter(String name, int sqlType, Token valueSrcToken, Integer scale, String typeName) {
this.name = name;
this.sqlType = sqlType;
parseValueExpression(valueSrcToken);
this.scale = scale;
this.typeName = typeName;
if (this.scale != null && this.typeName != null) {
throw new ParseRuntimeException(
String.format("Both scale=%s and typeName=%s cannot be set", this.scale, this.typeName));
}
}
private void parseValueExpression(Token valueSrcToken) {
if (SSPTParserConstants.SIMPLE_EXP_TOKEN == valueSrcToken.kind) {
this.valueExtractor = (exchange, container) -> {
Expression exp = exchange.getContext().resolveLanguage("simple").createExpression(valueSrcToken.toString());
return exp.evaluate(exchange, Object.class);
};
} else if (SSPTParserConstants.PARAMETER_POS_TOKEN == valueSrcToken.kind) {
//remove leading :#
final String mapKey = valueSrcToken.toString().substring(2);
this.valueExtractor = (exchange, container) -> ((Map) container).get(mapKey);
}
}
public Integer getScale() {
return scale;
}
public String getName() {
return name;
}
public String getTypeName() {
return typeName;
}
public int getSqlType() {
return sqlType;
}
public ValueExtractor getValueExtractor() {
return valueExtractor;
}
}
| InParameter |
java | google__dagger | javatests/dagger/internal/codegen/SubcomponentCreatorValidationTest.java | {
"start": 33758,
"end": 34866
} | interface ____ {",
" Supertype build();",
" }",
"}");
CompilerTests.daggerCompiler(foo, bar, supertype, subcomponent)
.compile(
subject -> {
subject.hasErrorCount(0);
subject.hasWarningCount(1);
subject.hasWarningContaining(
process(
"test.HasSupertype.Builder.build() returns test.Supertype, but "
+ "test.HasSupertype declares additional component method(s): bar(). "
+ "In order to provide type-safe access to these methods, override "
+ "build() to return test.HasSupertype"))
.onSource(subcomponent)
.onLine(11);
});
}
@Test
public void covariantFactoryMethodReturnType_hasNewMethod_buildMethodInherited() {
Source foo =
CompilerTests.javaSource(
"test.Foo",
"package test;",
"",
"import javax.inject.Inject;",
"",
" | Builder |
java | apache__camel | components/camel-wal/src/main/java/org/apache/camel/component/wal/LogWriter.java | {
"start": 1314,
"end": 10552
} | class ____ implements AutoCloseable {
/**
* The default buffer capacity: 512 KiB
*/
public static final int DEFAULT_CAPACITY = 1024 * 512;
private static final Logger LOG = LoggerFactory.getLogger(LogWriter.class);
private final Lock lock = new ReentrantLock();
private final FileChannel fileChannel;
private final LogSupervisor flushPolicy;
private final TransactionLog transactionLog;
private long startOfRecords;
/**
* Constructs a new log writer with the default capacity {@link LogWriter#DEFAULT_CAPACITY} (512 KiB). If the file
* already exists, it will be truncated.
*
* @param logFile the transaction log file
* @param logSupervisor the log supervisor {@link LogSupervisor} for the writer
* @throws IOException in case of I/O errors
*/
public LogWriter(File logFile, LogSupervisor logSupervisor) throws IOException {
this(logFile, logSupervisor, DEFAULT_CAPACITY);
}
/**
* Constructs a new log writer with the default capacity {@link LogWriter#DEFAULT_CAPACITY} (512 KiB). If the file
* already exists, it will be truncated.
*
* @param logFile the transaction log file
* @param logSupervisor the log supervisor {@link LogSupervisor} for the writer
* @param maxRecordCount the maximum number of records to keep in the file. Beyond this count, entries will be
* rolled-over.
* @throws IOException in case of I/O errors
*/
LogWriter(File logFile, LogSupervisor logSupervisor, int maxRecordCount) throws IOException {
this.fileChannel = FileChannel.open(logFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING);
final Header header = Header.WA_DEFAULT_V1;
writeHeader(header);
this.flushPolicy = logSupervisor;
this.transactionLog = new TransactionLog(maxRecordCount);
this.flushPolicy.start(this::tryFlush);
}
/**
* Flushes the data to disk
*
* @throws IOException in case of I/O errors
*/
void flush() throws IOException {
fileChannel.force(true);
}
private void tryFlush() {
lock.lock();
try {
flush();
} catch (IOException e) {
LOG.error("Unable to save record: {}", e.getMessage(), e);
throw new RuntimeException(e);
} finally {
lock.unlock();
}
}
public void reset() throws IOException {
fileChannel.truncate(startOfRecords);
fileChannel.position(startOfRecords);
}
@Override
public void close() {
try {
flushPolicy.stop();
flush();
fileChannel.close();
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
}
private void writeHeader(final Header header) throws IOException {
ByteBuffer headerBuffer = ByteBuffer.allocate(Header.BYTES);
headerBuffer.put(header.getFormatName().getBytes());
headerBuffer.putInt(header.getFileVersion());
IOUtil.write(fileChannel, headerBuffer);
startOfRecords = fileChannel.position();
}
/**
* Appends an entry to the transaction log file
*
* @param entry the entry to write to the transaction log
* @return An entry info instance with the metadata for the appended log entry
* @throws IOException for lower-level I/O errors
*/
public EntryInfo.CachedEntryInfo append(LogEntry entry) throws IOException {
final TransactionLog.LayerInfo layerInfo = transactionLog.add(entry);
if (layerInfo.getLayer() == 0) {
return persist(layerInfo, entry);
}
if (layerInfo.isRollingOver()) {
reset();
}
LOG.trace("Writing at position {}", fileChannel.position());
EntryInfo.CachedEntryInfo spear = persist(layerInfo, entry);
final List<EntryInfo> collect = transactionLog.stream()
.filter(c -> c != null && c.layerInfo.getLayer() != transactionLog.currentLayer())
.map(e -> tryPersist(layerInfo, e.logEntry)).collect(Collectors.toList());
if (!collect.isEmpty()) {
final EntryInfo lastOnLayer = collect.get(0);
LOG.trace("Current pos is: {}", fileChannel.position());
LOG.trace("Next pos should be: {}", lastOnLayer.getPosition());
fileChannel.position(lastOnLayer.getPosition());
LOG.trace("Current pos now is: {}", fileChannel.position());
}
return spear;
}
/**
* Persists an entry to the log
*
* @param layerInfo the in-memory layer information about the record being persisted
* @param entry the entry to persist
* @param position the position in the channel where the entry will be persisted
* @throws IOException in case of lower-level I/O errors
*/
private void persist(TransactionLog.LayerInfo layerInfo, LogEntry entry, long position) throws IOException {
ByteBuffer updateBuffer = ByteBuffer.allocate(entry.size());
IOUtil.serialize(updateBuffer, entry);
if (LOG.isTraceEnabled()) {
LOG.trace("Position: {} for record {} with key {}", position, layerInfo, new String(entry.getKey()));
}
long size = IOUtil.write(fileChannel, updateBuffer, position);
if (size == 0) {
LOG.warn("No bytes written for the given record!");
}
}
/**
* Persists an entry to the log
*
* @param layerInfo the in-memory layer information about the record being persisted
* @param entry the entry to persist
* @return an {@link EntryInfo} instance with details of the entry that was just persisted
* @throws IOException in case of lower-level I/O errors
*/
private EntryInfo.CachedEntryInfo persist(TransactionLog.LayerInfo layerInfo, LogEntry entry) throws IOException {
final byte[] key = entry.getKey();
final byte[] value = entry.getValue();
ByteBuffer writeBuffer = ByteBuffer.allocate(LogEntry.size(key, value));
IOUtil.serialize(writeBuffer, entry);
long recordPosition = fileChannel.position();
IOUtil.write(fileChannel, writeBuffer);
return EntryInfo.createForCached(recordPosition, layerInfo);
}
/**
* A wrapper for {@link LogWriter#persist(TransactionLog.LayerInfo, LogEntry)} that throws runtime errors on failure
*
* @param layerInfo the in-memory layer information about the record being persisted
* @param entry the entry to persist
* @return an {@link EntryInfo} instance with details of the entry that was just persisted
*/
private EntryInfo tryPersist(TransactionLog.LayerInfo layerInfo, LogEntry entry) {
try {
return persist(layerInfo, entry);
} catch (IOException e) {
throw new RuntimeCamelException(e);
}
}
/**
* Updates the state of af entry (i.e.: to mark them after they have seen successfully processed)
*
* @param entryInfo the entry information about the entry being updated
* @param state the state to update the entry to
* @throws IOException in case of lower-level I/O errors
*/
public void updateState(EntryInfo.CachedEntryInfo entryInfo, LogEntry.EntryState state) throws IOException {
final TransactionLog.LayerInfo layerInfo = entryInfo.getLayerInfo();
/*
If it has layer information, then it's a hot record kept in the cache. In this case, just
update the cache and let the LogSupervisor flush to disk.
Trying to update a persisted entry here is not acceptable
*/
assert layerInfo != null;
final LogEntry logEntry = transactionLog.update(layerInfo, state);
if (logEntry != null) {
persist(layerInfo, logEntry, entryInfo.getPosition());
}
}
/**
* Updates the state of af entry that has been already persisted to disk. Wraps any lower-level I/O errors in
* runtime exceptions
*
* @param entry the entry to update
* @param state the state to update the entry to
* @throws IOException if the buffer is too small for the entry or in case of lower-level I/O errors
*/
public void updateState(PersistedLogEntry entry, LogEntry.EntryState state) throws IOException {
ByteBuffer updateBuffer = ByteBuffer.allocate(entry.size());
IOUtil.serialize(updateBuffer, state.getCode(), entry.getKeyMetadata(), entry.getKey(), entry.getValueMetadata(),
entry.getValue());
final EntryInfo entryInfo = entry.getEntryInfo();
if (LOG.isTraceEnabled()) {
LOG.trace("Position: {} with key {}", entryInfo.getPosition(), new String(entry.getKey()));
}
long size = IOUtil.write(fileChannel, updateBuffer, entryInfo.getPosition());
if (size == 0) {
LOG.warn("No bytes written for the given record!");
}
}
}
| LogWriter |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 32652,
"end": 32831
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesSetReturningFunction( dialect, "unnest" );
}
}
public static | SupportsUnnest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorFactory.java | {
"start": 2199,
"end": 2687
} | class ____ extends NonCollectingAggregator {
Unmapped(String name, AggregationContext context, Aggregator parent, AggregatorFactories factories, Map<String, Object> metadata)
throws IOException {
super(name, context, parent, factories, metadata);
}
@Override
public InternalAggregation buildEmptyAggregation() {
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), metadata());
}
}
}
| Unmapped |
java | spring-projects__spring-boot | core/spring-boot-test/src/main/java/org/springframework/boot/test/context/PropertyMapping.java | {
"start": 2507,
"end": 2745
} | enum ____ {
/**
* Skip mapping the property.
*/
YES,
/**
* Skip mapping the property when the default attribute value is specified.
*/
ON_DEFAULT_VALUE,
/**
* Don't skip mapping the property.
*/
NO
}
}
| Skip |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/node/PluginComponentBinding.java | {
"start": 2213,
"end": 2313
} | class ____ isolation strategies nor visibility of the provided classes.
* </p>
* @param <I> The | loader |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OpenApiResourceSecuredAtMethodLevel.java | {
"start": 638,
"end": 2497
} | class ____ {
@SuppressWarnings("unused")
private ResourceBean resourceBean;
@GET
@Path("/test-security/naked")
@RolesAllowed("admin")
public String secureEndpointWithoutSecurityAnnotation() {
return "secret";
}
@GET
@Path("/test-security/annotated")
@RolesAllowed("admin")
@SecurityRequirement(name = "JWTCompanyAuthentication")
public String secureEndpointWithSecurityAnnotation() {
return "secret";
}
@GET
@Path("/test-security/methodLevel/1")
@RolesAllowed("user1")
public String secureEndpoint1() {
return "secret";
}
@GET
@Path("/test-security/methodLevel/2")
@RolesAllowed("user2")
public String secureEndpoint2() {
return "secret";
}
@GET
@Path("/test-security/methodLevel/public")
public String publicEndpoint() {
return "boo";
}
@APIResponses({
@APIResponse(responseCode = "401", description = "Who are you?"),
@APIResponse(responseCode = "403", description = "You cannot do that.")
})
@GET
@Path("/test-security/annotated/documented")
@RolesAllowed("admin")
@SecurityRequirement(name = "JWTCompanyAuthentication")
public String secureEndpointWithSecurityAnnotationAndDocument() {
return "secret";
}
@APIResponses({
@APIResponse(responseCode = "401", description = "Who are you?"),
@APIResponse(responseCode = "403", description = "You cannot do that.")
})
@GET
@Path("/test-security/methodLevel/3")
@RolesAllowed("admin")
public String secureEndpoint3() {
return "secret";
}
@GET
@Path("/test-security/methodLevel/4")
@PermissionsAllowed("secure:read")
public String secureEndpoint5() {
return "secret";
}
}
| OpenApiResourceSecuredAtMethodLevel |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/pretty/MessageHelper.java | {
"start": 947,
"end": 11754
} | class ____ {
private MessageHelper() {
}
// entities ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Generate an info message string relating to a particular entity,
* based on the given entityName and id.
*
* @param entityName The defined entity name.
* @param id The entity id value.
* @return An info string, in the form [FooBar#1].
*/
public static String infoString(@Nullable String entityName, @Nullable Object id) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( entityName == null ) {
info.append( "unknown entity name" );
}
else {
info.append( entityName );
}
if ( id == null ) {
info.append( " with null id" );
}
else {
info.append( " with id '" ).append( id ).append( "'" );
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a particular entity.
*
* @param persister The persister for the entity
* @param id The entity id value
* @param factory The session factory - Could be null!
* @return An info string, in the form [FooBar#1]
*/
public static String infoString(
@Nullable EntityPersister persister,
@Nullable Object id,
@Nullable SessionFactoryImplementor factory) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
Type idType;
if( persister == null ) {
info.append( "unknown entity" );
idType = null;
}
else {
info.append( persister.getEntityName() );
idType = persister.getIdentifierType();
}
if ( id == null ) {
info.append( " with null id" );
}
else {
info.append( " with id '" );
if ( idType == null ) {
info.append( id );
}
else if ( factory != null ) {
info.append( idType.toLoggableString( id, factory ) );
}
else {
info.append( "<not loggable>" );
}
info.append( "'" );
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a particular entity,.
*
* @param persister The persister for the entity
* @param id The entity id value
* @param identifierType The entity identifier type mapping
* @param factory The session factory
* @return An info string, in the form [FooBar#1]
*/
public static String infoString(
@Nullable EntityPersister persister,
@Nullable Object id,
Type identifierType,
SessionFactoryImplementor factory) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if( persister == null ) {
info.append( "unknown entity" );
}
else {
info.append( persister.getEntityName() );
}
if ( id == null ) {
info.append( " with null id" );
}
else {
info.append( " with id '" ).append( identifierType.toLoggableString( id, factory ) ).append( "'" );
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a series of entities.
*
* @param persister The persister for the entities
* @param ids The entity id values
* @param factory The session factory
* @return An info string, in the form [FooBar#<1,2,3>]
*/
public static String infoString(
@Nullable EntityPersister persister,
Object[] ids,
SessionFactoryImplementor factory) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( persister == null ) {
info.append( "unknown entity" );
}
else {
info.append( persister.getEntityName() );
info.append( " with ids " );
for ( int i=0; i<ids.length; i++ ) {
info.append( "'" )
.append( persister.getIdentifierType().toLoggableString( ids[i], factory ) )
.append( "'" );
if ( i < ids.length-1 ) {
info.append( ", " );
}
}
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to given entity persister.
*
* @param persister The persister.
* @return An info string, in the form [FooBar]
*/
public static String infoString(@Nullable EntityPersister persister) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( persister == null ) {
info.append( "unknown entity" );
}
else {
info.append( persister.getEntityName() );
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a given property value
* for an entity.
*
* @param entityName The entity name
* @param propertyName The name of the property
* @param key The property value.
* @return An info string, in the form [Foo.bars#1]
*/
public static String infoString(String entityName, String propertyName, @Nullable Object key) {
final StringBuilder info = new StringBuilder()
.append( '[' )
.append( entityName )
.append( '.' )
.append( propertyName );
if ( key == null ) {
info.append( " with null owner id" );
}
else {
info.append( " with owner id '" ).append( key ).append( "'" );
}
info.append( ']' );
return info.toString();
}
// collections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Generate an info message string relating to a particular managed
* collection. Attempts to intelligently handle property-refs issues
* where the collection key is not the same as the owner key.
*
* @param persister The persister for the collection
* @param collection The collection itself
* @param collectionKey The collection key
* @param session The session
* @return An info string, in the form [Foo.bars#1]
*/
public static String collectionInfoString(
@Nullable CollectionPersister persister,
@Nullable PersistentCollection<?> collection,
Object collectionKey,
SharedSessionContractImplementor session ) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( persister == null ) {
info.append( "unreferenced collection" );
}
else {
info.append( persister.getRole() );
final Type ownerIdentifierType =
persister.getOwnerEntityPersister().getIdentifierType();
final Object ownerKey;
// TODO: Is it redundant to attempt to use the collectionKey,
// or is always using the owner id sufficient?
if ( collectionKey.getClass().isAssignableFrom(
ownerIdentifierType.getReturnedClass() ) ) {
ownerKey = collectionKey;
}
else {
final Object collectionOwner = collection == null ? null
: collection.getOwner();
final EntityEntry entry = collectionOwner == null ? null
: session.getPersistenceContextInternal().getEntry( collectionOwner );
ownerKey = entry == null ? null : entry.getId();
}
info.append( " with owner id '" )
.append( ownerIdentifierType.toLoggableString( ownerKey, session.getFactory() ) )
.append( "'" );
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a series of managed
* collections.
*
* @param persister The persister for the collections
* @param ids The id values of the owners
* @param factory The session factory
* @return An info string, in the form [Foo.bars#<1,2,3>]
*/
public static String collectionInfoString(
@Nullable CollectionPersister persister,
Object[] ids,
SessionFactoryImplementor factory) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( persister == null ) {
info.append( "unreferenced collection" );
}
else {
info.append( persister.getRole() );
info.append( " with owner ids " );
for ( int i = 0; i < ids.length; i++ ) {
info.append( "'" );
addIdToCollectionInfoString( persister, ids[i], factory, info );
info.append( "'" );
if ( i < ids.length-1 ) {
info.append( ", " );
}
}
}
info.append( ']' );
return info.toString();
}
/**
* Generate an info message string relating to a particular managed
* collection.
*
* @param persister The persister for the collection
* @param id The id value of the owner
* @param factory The session factory
* @return An info string, in the form [Foo.bars#1]
*/
public static String collectionInfoString(
@Nullable CollectionPersister persister,
@Nullable Object id,
SessionFactoryImplementor factory) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if ( persister == null ) {
info.append( "unreferenced collection" );
}
else {
info.append( persister.getRole() );
if ( id == null ) {
info.append( " with null owner id" );
}
else {
info.append( " with owner id '" );
addIdToCollectionInfoString( persister, id, factory, info );
info.append( "'" );
}
}
info.append( ']' );
return info.toString();
}
private static void addIdToCollectionInfoString(
CollectionPersister persister,
Object id,
SessionFactoryImplementor factory,
StringBuilder s ) {
// Need to use the identifier type of the collection owner
// since the incoming value is actually the owner's id.
// Using the collection's key type causes problems with
// property-ref keys.
// Also need to check that the expected identifier type matches
// the given ID. Due to property-ref keys, the collection key
// may not be the owner key.
final Type ownerIdentifierType =
persister.getOwnerEntityPersister().getIdentifierType();
if ( id.getClass().isAssignableFrom(
ownerIdentifierType.getReturnedClass() ) ) {
s.append( ownerIdentifierType.toLoggableString( id, factory ) );
}
else {
// TODO: This is a crappy backup if a property-ref is used.
// If the reference is an object w/o toString(), this isn't going to work.
s.append( id );
}
}
/**
* Generate an info message string relating to a particular managed
* collection.
*
* @param role The role-name of the collection
* @param id The id value of the owner
* @return An info string, in the form [Foo.bars#1]
*/
public static String collectionInfoString(@Nullable String role, @Nullable Object id) {
final StringBuilder info = new StringBuilder();
info.append( '[' );
if( role == null ) {
info.append( "unreferenced collection" );
}
else {
info.append( role );
if ( id == null ) {
info.append( " with null owner id" );
}
else {
info.append( " with owner id '" ).append( id ).append( "'" );
}
}
info.append( ']' );
return info.toString();
}
public static String collectionInfoString(PluralAttributeMapping loadable, Object key) {
final CollectionPersister collectionDescriptor = loadable.getCollectionDescriptor();
return collectionInfoString( collectionDescriptor, key, collectionDescriptor.getFactory() );
}
public static String infoString(EntityMappingType loadable, Object id) {
final EntityPersister persister = loadable.getEntityPersister();
return infoString( persister, id, persister.getFactory() );
}
}
| MessageHelper |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/interceptor/TransactionScopedInterceptorTest.java | {
"start": 3738,
"end": 3983
} | class ____ {
@Id
public Integer id;
public MyEntity() {
}
public MyEntity(int id) {
this.id = id;
}
}
@PersistenceUnitExtension
@TransactionScoped
public static | MyEntity |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/SimplePropertyRowMapper.java | {
"start": 4213,
"end": 8361
} | class ____ each row should be mapped to
* @param conversionService a {@link ConversionService} for binding
* JDBC values to bean properties
*/
public SimplePropertyRowMapper(Class<T> mappedClass, ConversionService conversionService) {
Assert.notNull(mappedClass, "Mapped Class must not be null");
Assert.notNull(conversionService, "ConversionService must not be null");
this.mappedClass = mappedClass;
this.conversionService = conversionService;
this.mappedConstructor = BeanUtils.getResolvableConstructor(mappedClass);
int paramCount = this.mappedConstructor.getParameterCount();
this.constructorParameterNames = (paramCount > 0 ?
BeanUtils.getParameterNames(this.mappedConstructor) : new String[0]);
this.constructorParameterTypes = new TypeDescriptor[paramCount];
for (int i = 0; i < paramCount; i++) {
this.constructorParameterTypes[i] = new TypeDescriptor(new MethodParameter(this.mappedConstructor, i));
}
}
@Override
public T mapRow(ResultSet rs, int rowNumber) throws SQLException {
@Nullable Object[] args = new Object[this.constructorParameterNames.length];
Set<Integer> usedIndex = new HashSet<>();
for (int i = 0; i < args.length; i++) {
String name = this.constructorParameterNames[i];
int index;
try {
// Try direct name match first
index = rs.findColumn(name);
}
catch (SQLException ex) {
// Try underscored name match instead
index = rs.findColumn(JdbcUtils.convertPropertyNameToUnderscoreName(name));
}
TypeDescriptor td = this.constructorParameterTypes[i];
Object value = JdbcUtils.getResultSetValue(rs, index, td.getType());
usedIndex.add(index);
args[i] = this.conversionService.convert(value, td);
}
T mappedObject = BeanUtils.instantiateClass(this.mappedConstructor, args);
ResultSetMetaData rsmd = rs.getMetaData();
int columnCount = rsmd.getColumnCount();
for (int index = 1; index <= columnCount; index++) {
if (!usedIndex.contains(index)) {
Object desc = getDescriptor(JdbcUtils.lookupColumnName(rsmd, index));
if (desc instanceof MethodParameter mp) {
Method method = mp.getMethod();
if (method != null) {
Object value = JdbcUtils.getResultSetValue(rs, index, mp.getParameterType());
value = this.conversionService.convert(value, new TypeDescriptor(mp));
ReflectionUtils.makeAccessible(method);
ReflectionUtils.invokeMethod(method, mappedObject, value);
}
}
else if (desc instanceof Field field) {
Object value = JdbcUtils.getResultSetValue(rs, index, field.getType());
value = this.conversionService.convert(value, new TypeDescriptor(field));
ReflectionUtils.makeAccessible(field);
ReflectionUtils.setField(field, mappedObject, value);
}
}
}
return mappedObject;
}
private Object getDescriptor(String column) {
return this.propertyDescriptors.computeIfAbsent(column, name -> {
// Try direct match first
PropertyDescriptor pd = BeanUtils.getPropertyDescriptor(this.mappedClass, name);
if (pd != null && pd.getWriteMethod() != null) {
return BeanUtils.getWriteMethodParameter(pd);
}
Field field = ReflectionUtils.findField(this.mappedClass, name);
if (field != null) {
return field;
}
// Try de-underscored match instead
String adaptedName = JdbcUtils.convertUnderscoreNameToPropertyName(name);
if (!adaptedName.equals(name)) {
pd = BeanUtils.getPropertyDescriptor(this.mappedClass, adaptedName);
if (pd != null && pd.getWriteMethod() != null) {
return BeanUtils.getWriteMethodParameter(pd);
}
field = ReflectionUtils.findField(this.mappedClass, adaptedName);
if (field != null) {
return field;
}
}
// Fallback: case-insensitive match
PropertyDescriptor[] pds = BeanUtils.getPropertyDescriptors(this.mappedClass);
for (PropertyDescriptor candidate : pds) {
if (name.equalsIgnoreCase(candidate.getName())) {
return BeanUtils.getWriteMethodParameter(candidate);
}
}
field = ReflectionUtils.findFieldIgnoreCase(this.mappedClass, name);
if (field != null) {
return field;
}
return NO_DESCRIPTOR;
});
}
}
| that |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/providers/serialisers/ServerFormUrlEncodedProvider.java | {
"start": 1159,
"end": 2433
} | class ____ extends MapAsFormUrlEncodedProvider
implements ServerMessageBodyReader<MultivaluedMap>, ServerMessageBodyWriter<MultivaluedMap> {
@Override
public boolean isReadable(Class<?> type, Type genericType, ResteasyReactiveResourceInfo lazyMethod, MediaType mediaType) {
return MultivaluedMap.class.equals(type);
}
@Override
public MultivaluedMap readFrom(Class<MultivaluedMap> type, Type genericType, MediaType mediaType,
ServerRequestContext context) throws WebApplicationException, IOException {
return doReadFrom(mediaType, context.getInputStream());
}
@Override
public boolean isWriteable(Class<?> type, Type genericType, ResteasyReactiveResourceInfo target, MediaType mediaType) {
return MultivaluedMap.class.isAssignableFrom(type);
}
@Override
public void writeResponse(MultivaluedMap o, Type genericType, ServerRequestContext context) throws WebApplicationException {
try {
// FIXME: use response encoding
context.serverResponse().end(multiValuedMapToString(o, MessageReaderUtil.UTF8_CHARSET));
} catch (UnsupportedEncodingException e) {
throw new WebApplicationException(e);
}
}
}
| ServerFormUrlEncodedProvider |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForBoolean.java | {
"start": 3909,
"end": 4755
} | class ____ extends KeyExtractorForBoolean {
private final BooleanBlock block;
MinFromUnorderedBlock(TopNEncoder encoder, byte nul, byte nonNul, BooleanBlock block) {
super(encoder, nul, nonNul);
this.block = block;
}
@Override
public int writeKey(BreakingBytesRefBuilder key, int position) {
int size = block.getValueCount(position);
if (size == 0) {
return nul(key);
}
int start = block.getFirstValueIndex(position);
int end = start + size;
for (int i = start; i < end; i++) {
if (block.getBoolean(i) == false) {
return nonNul(key, false);
}
}
return nonNul(key, true);
}
}
static | MinFromUnorderedBlock |
java | google__guava | android/guava/src/com/google/common/collect/AbstractSequentialIterator.java | {
"start": 1422,
"end": 2383
} | class ____<T> extends UnmodifiableIterator<T> {
private @Nullable T nextOrNull;
/**
* Creates a new iterator with the given first element, or, if {@code firstOrNull} is null,
* creates a new empty iterator.
*/
protected AbstractSequentialIterator(@Nullable T firstOrNull) {
this.nextOrNull = firstOrNull;
}
/**
* Returns the element that follows {@code previous}, or returns {@code null} if no elements
* remain. This method is invoked during each call to {@link #next()} in order to compute the
* result of a <i>future</i> call to {@code next()}.
*/
protected abstract @Nullable T computeNext(T previous);
@Override
public final boolean hasNext() {
return nextOrNull != null;
}
@Override
public final T next() {
if (nextOrNull == null) {
throw new NoSuchElementException();
}
T oldNext = nextOrNull;
nextOrNull = computeNext(oldNext);
return oldNext;
}
}
| AbstractSequentialIterator |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/uri/DefaultFormUrlEncodedDecoder.java | {
"start": 985,
"end": 1782
} | class ____ implements FormUrlEncodedDecoder {
private final FormConfiguration formConfiguration;
DefaultFormUrlEncodedDecoder(FormConfiguration formConfiguration) {
this.formConfiguration = formConfiguration;
}
@Override
@NonNull
public Map<String, Object> decode(@NonNull String formUrlEncodedString,
@NonNull Charset charset) {
QueryStringDecoder decoder = new QueryStringDecoder(formUrlEncodedString, charset, false,
formConfiguration.getMaxDecodedKeyValueParameters(),
formConfiguration.isSemicolonIsNormalChar());
return flatten(decoder.parameters());
}
}
| DefaultFormUrlEncodedDecoder |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/core/context/TransientSecurityContext.java | {
"start": 1094,
"end": 1373
} | class ____ extends SecurityContextImpl {
@Serial
private static final long serialVersionUID = -7925492364422193347L;
public TransientSecurityContext() {
}
public TransientSecurityContext(Authentication authentication) {
super(authentication);
}
}
| TransientSecurityContext |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/internal/Double2DArrays.java | {
"start": 847,
"end": 6185
} | class ____ {
private static final Double2DArrays INSTANCE = new Double2DArrays();
/**
* Returns the singleton instance of this class.
*
* @return the singleton instance of this class.
*/
public static Double2DArrays instance() {
return INSTANCE;
}
private Arrays2D arrays = Arrays2D.instance();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Failures failures = Failures.instance();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
public void setArrays(Arrays2D arrays) {
this.arrays = arrays;
}
/**
* Asserts that the given array is {@code null} or empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is not {@code null} *and* contains one or more elements.
*/
public void assertNullOrEmpty(AssertionInfo info, double[][] actual) {
arrays.assertNullOrEmpty(info, failures, actual);
}
/**
* Asserts that the given array is empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array is not empty.
*/
public void assertEmpty(AssertionInfo info, double[][] actual) {
arrays.assertEmpty(info, failures, actual);
}
/**
* Asserts that the given array is not empty.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given array is empty.
*/
public void assertNotEmpty(AssertionInfo info, double[][] actual) {
arrays.assertNotEmpty(info, failures, actual);
}
/**
* Asserts that the number of elements in the given array is equal to the expected one.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param expectedFirstDimension the expected first dimension size of {@code actual}.
* @param expectedSecondDimension the expected second dimension size of {@code actual}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the actual array's dimensions are not equal to the given ones.
*/
public void assertHasDimensions(AssertionInfo info, double[][] actual, int expectedFirstDimension,
int expectedSecondDimension) {
arrays.assertHasDimensions(info, failures, actual, expectedFirstDimension, expectedSecondDimension);
}
/**
* Assert that the actual array has the same dimensions as the other array.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param other the group to compare
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the other group is {@code null}.
* @throws AssertionError if the actual group does not have the same dimension.
*/
public void assertHasSameDimensionsAs(AssertionInfo info, double[][] actual, Object other) {
arrays.assertHasSameDimensionsAs(info, actual, other);
}
/**
* Asserts that the number of rows in the given array is equal to the expected one.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param expectedNumberOfRows the expected first dimension size of {@code actual}.
*/
public void assertNumberOfRows(AssertionInfo info, double[][] actual, int expectedNumberOfRows) {
arrays.assertNumberOfRows(info, failures, actual, expectedNumberOfRows);
}
/**
* Verifies that the given array contains the given value at the given index.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param value the value to look for.
* @param index the index where the value should be stored in the given array.
* @throws AssertionError if the given array is {@code null} or empty.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of
* the given array.
* @throws AssertionError if the given array does not contain the given value at the given index.
*/
public void assertContains(AssertionInfo info, double[][] actual, double[] value, Index index) {
arrays.assertContains(info, failures, actual, value, index);
}
/**
* Verifies that the given array does not contain the given value at the given index.
*
* @param info contains information about the assertion.
* @param actual the given array.
* @param value the value to look for.
* @param index the index where the value should be stored in the given array.
* @throws AssertionError if the given array is {@code null}.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws AssertionError if the given array contains the given value at the given index.
*/
public void assertDoesNotContain(AssertionInfo info, double[][] actual, double[] value, Index index) {
arrays.assertDoesNotContain(info, failures, actual, value, index);
}
}
| Double2DArrays |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/AsyncExecutionsImpl.java | {
"start": 589,
"end": 11004
} | class ____<T> implements AsyncExecutions<T> {
@SuppressWarnings({ "unchecked", "rawtypes" })
private static final AtomicReferenceFieldUpdater<AsyncExecutionsImpl<?>, CompletionStage> UPDATER = (AtomicReferenceFieldUpdater) AtomicReferenceFieldUpdater
.newUpdater(AsyncExecutionsImpl.class, CompletionStage.class, "publicStage");
private final Map<RedisClusterNode, CompletableFuture<T>> executions;
private volatile CompletionStage<List<T>> publicStage;
@SuppressWarnings("unchecked")
public AsyncExecutionsImpl(Map<RedisClusterNode, CompletionStage<? extends T>> executions) {
Map<RedisClusterNode, CompletionStage<? extends T>> map = new HashMap<>(executions);
this.executions = Collections.unmodifiableMap((Map) map);
}
@Override
public Map<RedisClusterNode, CompletableFuture<T>> asMap() {
return executions;
}
@Override
public Iterator<CompletableFuture<T>> iterator() {
return asMap().values().iterator();
}
@Override
public Collection<RedisClusterNode> nodes() {
return executions.keySet();
}
@Override
public CompletableFuture<T> get(RedisClusterNode redisClusterNode) {
return executions.get(redisClusterNode);
}
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public CompletableFuture<T>[] futures() {
return executions.values().toArray(new CompletableFuture[0]);
}
@Override
public <R, A> CompletionStage<R> thenCollect(Collector<? super T, A, R> collector) {
return publicStage().thenApply(items -> {
A container = collector.supplier().get();
BiConsumer<A, ? super T> accumulator = collector.accumulator();
items.forEach(item -> accumulator.accept(container, item));
if (collector.characteristics().contains(Collector.Characteristics.IDENTITY_FINISH)) {
return (R) container;
}
return collector.finisher().apply(container);
});
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private CompletionStage<List<T>> publicStage() {
CompletionStage stage = UPDATER.get(this);
if (stage == null) {
stage = createPublicStage(this.executions);
UPDATER.compareAndSet(this, null, stage);
}
return stage;
}
@SuppressWarnings("rawtypes")
private CompletableFuture<List<T>> createPublicStage(Map<RedisClusterNode, CompletableFuture<T>> map) {
return CompletableFuture.allOf(map.values().toArray(new CompletableFuture[0])).thenApply(ignore -> {
List<T> results = new ArrayList<>(map.size());
for (CompletionStage<? extends T> value : map.values()) {
results.add(value.toCompletableFuture().join());
}
return results;
});
}
// --------------------------------
// delegate methods.
// --------------------------------
@Override
public <U> CompletionStage<U> thenApply(Function<? super List<T>, ? extends U> fn) {
return publicStage().thenApply(fn);
}
@Override
public <U> CompletionStage<U> thenApplyAsync(Function<? super List<T>, ? extends U> fn) {
return publicStage().thenApplyAsync(fn);
}
@Override
public <U> CompletionStage<U> thenApplyAsync(Function<? super List<T>, ? extends U> fn, Executor executor) {
return publicStage().thenApplyAsync(fn, executor);
}
@Override
public CompletionStage<Void> thenAccept(Consumer<? super List<T>> action) {
return publicStage().thenAccept(action);
}
@Override
public CompletionStage<Void> thenAcceptAsync(Consumer<? super List<T>> action) {
return publicStage().thenAcceptAsync(action);
}
@Override
public CompletionStage<Void> thenAcceptAsync(Consumer<? super List<T>> action, Executor executor) {
return publicStage().thenAcceptAsync(action, executor);
}
@Override
public CompletionStage<Void> thenRun(Runnable action) {
return publicStage().thenRun(action);
}
@Override
public CompletionStage<Void> thenRunAsync(Runnable action) {
return publicStage().thenRunAsync(action);
}
@Override
public CompletionStage<Void> thenRunAsync(Runnable action, Executor executor) {
return publicStage().thenRunAsync(action, executor);
}
@Override
public <U, V> CompletionStage<V> thenCombine(CompletionStage<? extends U> other,
BiFunction<? super List<T>, ? super U, ? extends V> fn) {
return publicStage().thenCombine(other, fn);
}
@Override
public <U, V> CompletionStage<V> thenCombineAsync(CompletionStage<? extends U> other,
BiFunction<? super List<T>, ? super U, ? extends V> fn) {
return publicStage().thenCombineAsync(other, fn);
}
@Override
public <U, V> CompletionStage<V> thenCombineAsync(CompletionStage<? extends U> other,
BiFunction<? super List<T>, ? super U, ? extends V> fn, Executor executor) {
return publicStage().thenCombineAsync(other, fn, executor);
}
@Override
public <U> CompletionStage<Void> thenAcceptBoth(CompletionStage<? extends U> other,
BiConsumer<? super List<T>, ? super U> action) {
return publicStage().thenAcceptBoth(other, action);
}
@Override
public <U> CompletionStage<Void> thenAcceptBothAsync(CompletionStage<? extends U> other,
BiConsumer<? super List<T>, ? super U> action) {
return publicStage().thenAcceptBothAsync(other, action);
}
@Override
public <U> CompletionStage<Void> thenAcceptBothAsync(CompletionStage<? extends U> other,
BiConsumer<? super List<T>, ? super U> action, Executor executor) {
return publicStage().thenAcceptBothAsync(other, action, executor);
}
@Override
public CompletionStage<Void> runAfterBoth(CompletionStage<?> other, Runnable action) {
return publicStage().runAfterBoth(other, action);
}
@Override
public CompletionStage<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action) {
return publicStage().runAfterBothAsync(other, action);
}
@Override
public CompletionStage<Void> runAfterBothAsync(CompletionStage<?> other, Runnable action, Executor executor) {
return publicStage().runAfterBothAsync(other, action, executor);
}
@Override
public <U> CompletionStage<U> applyToEither(CompletionStage<? extends List<T>> other, Function<? super List<T>, U> fn) {
return publicStage().applyToEither(other, fn);
}
@Override
public <U> CompletionStage<U> applyToEitherAsync(CompletionStage<? extends List<T>> other,
Function<? super List<T>, U> fn) {
return publicStage().applyToEitherAsync(other, fn);
}
@Override
public <U> CompletionStage<U> applyToEitherAsync(CompletionStage<? extends List<T>> other, Function<? super List<T>, U> fn,
Executor executor) {
return publicStage().applyToEitherAsync(other, fn, executor);
}
@Override
public CompletionStage<Void> acceptEither(CompletionStage<? extends List<T>> other, Consumer<? super List<T>> action) {
return publicStage().acceptEither(other, action);
}
@Override
public CompletionStage<Void> acceptEitherAsync(CompletionStage<? extends List<T>> other, Consumer<? super List<T>> action) {
return publicStage().acceptEitherAsync(other, action);
}
@Override
public CompletionStage<Void> acceptEitherAsync(CompletionStage<? extends List<T>> other, Consumer<? super List<T>> action,
Executor executor) {
return publicStage().acceptEitherAsync(other, action, executor);
}
@Override
public CompletionStage<Void> runAfterEither(CompletionStage<?> other, Runnable action) {
return publicStage().runAfterEither(other, action);
}
@Override
public CompletionStage<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action) {
return publicStage().runAfterEitherAsync(other, action);
}
@Override
public CompletionStage<Void> runAfterEitherAsync(CompletionStage<?> other, Runnable action, Executor executor) {
return publicStage().runAfterEitherAsync(other, action, executor);
}
@Override
public <U> CompletionStage<U> thenCompose(Function<? super List<T>, ? extends CompletionStage<U>> fn) {
return publicStage().thenCompose(fn);
}
@Override
public <U> CompletionStage<U> thenComposeAsync(Function<? super List<T>, ? extends CompletionStage<U>> fn) {
return publicStage().thenComposeAsync(fn);
}
@Override
public <U> CompletionStage<U> thenComposeAsync(Function<? super List<T>, ? extends CompletionStage<U>> fn,
Executor executor) {
return publicStage().thenComposeAsync(fn, executor);
}
@Override
public CompletionStage<List<T>> exceptionally(Function<Throwable, ? extends List<T>> fn) {
return publicStage().exceptionally(fn);
}
@Override
public CompletionStage<List<T>> whenComplete(BiConsumer<? super List<T>, ? super Throwable> action) {
return publicStage().whenComplete(action);
}
@Override
public CompletionStage<List<T>> whenCompleteAsync(BiConsumer<? super List<T>, ? super Throwable> action) {
return publicStage().whenCompleteAsync(action);
}
@Override
public CompletionStage<List<T>> whenCompleteAsync(BiConsumer<? super List<T>, ? super Throwable> action,
Executor executor) {
return publicStage().whenCompleteAsync(action, executor);
}
@Override
public <U> CompletionStage<U> handle(BiFunction<? super List<T>, Throwable, ? extends U> fn) {
return publicStage().handle(fn);
}
@Override
public <U> CompletionStage<U> handleAsync(BiFunction<? super List<T>, Throwable, ? extends U> fn) {
return publicStage().handleAsync(fn);
}
@Override
public <U> CompletionStage<U> handleAsync(BiFunction<? super List<T>, Throwable, ? extends U> fn, Executor executor) {
return publicStage().handleAsync(fn, executor);
}
@Override
public CompletableFuture<List<T>> toCompletableFuture() {
return publicStage().toCompletableFuture();
}
}
| AsyncExecutionsImpl |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/interop/JDKProxyTest.java | {
"start": 2414,
"end": 3016
} | class ____ implements InvocationHandler {
Object _obj;
public ProxyUtil(Object o) {
_obj = o;
}
@Override
public Object invoke(Object proxy, Method m, Object[] args) throws Throwable {
Object result = null;
result = m.invoke(_obj, args);
return result;
}
}
@SuppressWarnings("unchecked")
T proxy = (T) Proxy.newProxyInstance(type.getClassLoader(), new Class[] { type },
new ProxyUtil(obj));
return proxy;
}
}
| ProxyUtil |
java | processing__processing4 | core/src/processing/core/PVector.java | {
"start": 3001,
"end": 24060
} | class ____ implements Serializable {
/**
*
* The x component of the vector. This field (variable) can be used to both
* get and set the value (see above example.)
*
*
* @webref pvector:field
* @usage web_application
* @webBrief The x component of the vector
*/
public float x;
/**
*
* The y component of the vector. This field (variable) can be used to both
* get and set the value (see above example.)
*
*
* @webref pvector:field
* @usage web_application
* @webBrief The y component of the vector
*/
public float y;
/**
*
* The z component of the vector. This field (variable) can be used to both
* get and set the value (see above example.)
*
*
* @webref pvector:field
* @usage web_application
* @webBrief The z component of the vector
*/
public float z;
/** Array so that this can be temporarily used in an array context */
transient protected float[] array;
/**
* Constructor for an empty vector: x, y, and z are set to 0.
*/
public PVector() {
}
/**
* Constructor for a 3D vector.
*
* @param x the x coordinate.
* @param y the y coordinate.
* @param z the z coordinate.
*/
public PVector(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
}
/**
* Constructor for a 2D vector: z coordinate is set to 0.
*/
public PVector(float x, float y) {
this.x = x;
this.y = y;
}
/**
*
* Sets the x, y, and z component of the vector using two or three separate
* variables, the data from a <b>PVector</b>, or the values from a float array.
*
*
* @webref pvector:method
* @param x the x component of the vector
* @param y the y component of the vector
* @param z the z component of the vector
* @webBrief Set the components of the vector
*/
public PVector set(float x, float y, float z) {
this.x = x;
this.y = y;
this.z = z;
return this;
}
/**
* @param x the x component of the vector
* @param y the y component of the vector
*/
public PVector set(float x, float y) {
this.x = x;
this.y = y;
this.z = 0;
return this;
}
/**
* @param v any variable of type PVector
*/
public PVector set(PVector v) {
x = v.x;
y = v.y;
z = v.z;
return this;
}
/**
* Set the x, y (and maybe z) coordinates using a float[] array as the source.
* @param source array to copy from
*/
public PVector set(float[] source) {
if (source.length >= 2) {
x = source[0];
y = source[1];
}
if (source.length >= 3) {
z = source[2];
} else {
z = 0;
}
return this;
}
/**
*
* Returns a new 2D unit vector with a random direction. If you pass in
* <b>this</b> as an argument, it will use the PApplet's random number
* generator.
*
* @webref pvector:method
* @usage web_application
* @return the random PVector
* @webBrief Make a new 2D unit vector with a random direction
* @see PVector#random3D()
*/
static public PVector random2D() {
return random2D(null, null);
}
/**
* Make a new 2D unit vector with a random direction
* using Processing's current random number generator
* @param parent current PApplet instance
* @return the random PVector
*/
static public PVector random2D(PApplet parent) {
return random2D(null, parent);
}
/**
* Set a 2D vector to a random unit vector with a random direction
* @param target the target vector (if null, a new vector will be created)
* @return the random PVector
*/
static public PVector random2D(PVector target) {
return random2D(target, null);
}
/**
* Make a new 2D unit vector with a random direction. Pass in the parent
* PApplet if you want randomSeed() to work (and be predictable). Or leave
* it null and be... random.
* @return the random PVector
*/
static public PVector random2D(PVector target, PApplet parent) {
return (parent == null) ?
fromAngle((float) (Math.random() * Math.PI*2), target) :
fromAngle(parent.random(PConstants.TAU), target);
}
/**
*
* Returns a new 3D unit vector with a random direction. If you pass in
* <b>this</b> as an argument, it will use the PApplet's random number
* generator.
*
* @webref pvector:method
* @usage web_application
* @return the random PVector
* @webBrief Make a new 3D unit vector with a random direction
* @see PVector#random2D()
*/
static public PVector random3D() {
return random3D(null, null);
}
/**
* Make a new 3D unit vector with a random direction
* using Processing's current random number generator
* @param parent current PApplet instance
* @return the random PVector
*/
static public PVector random3D(PApplet parent) {
return random3D(null, parent);
}
/**
* Set a 3D vector to a random unit vector with a random direction
* @param target the target vector (if null, a new vector will be created)
* @return the random PVector
*/
static public PVector random3D(PVector target) {
return random3D(target, null);
}
/**
* Make a new 3D unit vector with a random direction
* @return the random PVector
*/
static public PVector random3D(PVector target, PApplet parent) {
float angle;
float vz;
if (parent == null) {
angle = (float) (Math.random()*Math.PI*2);
vz = (float) (Math.random()*2-1);
} else {
angle = parent.random(PConstants.TWO_PI);
vz = parent.random(-1,1);
}
float vx = (float) (Math.sqrt(1-vz*vz)*Math.cos(angle));
float vy = (float) (Math.sqrt(1-vz*vz)*Math.sin(angle));
if (target == null) {
target = new PVector(vx, vy, vz);
//target.normalize(); // Should be unnecessary
} else {
target.set(vx,vy,vz);
}
return target;
}
/**
*
* Calculates and returns a new 2D unit vector from the specified angle value
* (in radians).
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Make a new 2D unit vector from an angle
* @param angle the angle in radians
* @return the new unit PVector
*/
static public PVector fromAngle(float angle) {
return fromAngle(angle,null);
}
/**
* Make a new 2D unit vector from an angle
*
* @param target the target vector (if null, a new vector will be created)
* @return the PVector
*/
static public PVector fromAngle(float angle, PVector target) {
if (target == null) {
target = new PVector((float)Math.cos(angle),(float)Math.sin(angle),0);
} else {
target.set((float)Math.cos(angle),(float)Math.sin(angle),0);
}
return target;
}
/**
*
* Copies the components of the vector and returns the result as a <b>PVector</b>.
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Get a copy of the vector
*/
public PVector copy() {
return new PVector(x, y, z);
}
@Deprecated
public PVector get() {
return copy();
}
/**
* @param target
*/
public float[] get(float[] target) {
if (target == null) {
return new float[] { x, y, z };
}
if (target.length >= 2) {
target[0] = x;
target[1] = y;
}
if (target.length >= 3) {
target[2] = z;
}
return target;
}
/**
*
* Calculates the magnitude (length) of the vector and returns the result
* as a float (this is simply the equation <em>sqrt(x*x + y*y + z*z)</em>.)
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Calculate the magnitude of the vector
* @return magnitude (length) of the vector
* @see PVector#magSq()
*/
public float mag() {
return (float) Math.sqrt(x*x + y*y + z*z);
}
/**
*
* Calculates the magnitude (length) of the vector, squared. This method is
* often used to improve performance since, unlike <b>mag()</b>, it does not
* require a <b>sqrt()</b> operation.
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Calculate the magnitude of the vector, squared
* @return squared magnitude of the vector
* @see PVector#mag()
*/
public float magSq() {
return (x*x + y*y + z*z);
}
/**
*
* Adds x, y, and z components to a vector, adds one vector to another, or adds
* two independent vectors together. The version of the method that adds two
* vectors together is a static method and returns a new <b>PVector</b>, the others act
* directly on the vector itself. See the examples for more context.
*
*
* @webref pvector:method
* @usage web_application
* @param v the vector to be added
* @webBrief Adds x, y, and z components to a vector, one vector to another, or
* two independent vectors
*/
public PVector add(PVector v) {
x += v.x;
y += v.y;
z += v.z;
return this;
}
/**
* @param x x component of the vector
* @param y y component of the vector
*/
public PVector add(float x, float y) {
this.x += x;
this.y += y;
return this;
}
/**
* @param z z component of the vector
*/
public PVector add(float x, float y, float z) {
this.x += x;
this.y += y;
this.z += z;
return this;
}
/**
* Add two vectors
* @param v1 a vector
* @param v2 another vector
*/
static public PVector add(PVector v1, PVector v2) {
return add(v1, v2, null);
}
/**
* Add two vectors into a target vector
* @param target the target vector (if null, a new vector will be created)
*/
static public PVector add(PVector v1, PVector v2, PVector target) {
if (target == null) {
target = new PVector(v1.x + v2.x,v1.y + v2.y, v1.z + v2.z);
} else {
target.set(v1.x + v2.x, v1.y + v2.y, v1.z + v2.z);
}
return target;
}
/**
*
* Subtracts x, y, and z components from a vector, subtracts one vector from
* another, or subtracts two independent vectors. The version of the method that
* substracts two vectors is a static method and returns a <b>PVector</b>, the others
* act directly on the vector. See the examples for more context. In all cases,
* the second vector (v2) is subtracted from the first (v1), resulting in v1-v2.
*
*
* @webref pvector:method
* @usage web_application
* @param v any variable of type PVector
* @webBrief Subtract x, y, and z components from a vector, one vector from
* another, or two independent vectors
*/
public PVector sub(PVector v) {
x -= v.x;
y -= v.y;
z -= v.z;
return this;
}
/**
* @param x the x component of the vector
* @param y the y component of the vector
*/
public PVector sub(float x, float y) {
this.x -= x;
this.y -= y;
return this;
}
/**
* @param z the z component of the vector
*/
public PVector sub(float x, float y, float z) {
this.x -= x;
this.y -= y;
this.z -= z;
return this;
}
/**
* Subtract one vector from another
* @param v1 the x, y, and z components of a PVector object
* @param v2 the x, y, and z components of a PVector object
*/
static public PVector sub(PVector v1, PVector v2) {
return sub(v1, v2, null);
}
/**
* Subtract one vector from another and store in another vector
* @param target PVector in which to store the result
*/
static public PVector sub(PVector v1, PVector v2, PVector target) {
if (target == null) {
target = new PVector(v1.x - v2.x, v1.y - v2.y, v1.z - v2.z);
} else {
target.set(v1.x - v2.x, v1.y - v2.y, v1.z - v2.z);
}
return target;
}
/**
*
* Multiplies a vector by a scalar. The version of the method that uses a float
* acts directly on the vector upon which it is called (as in the first example
* above). The versions that receive both a <b>PVector</b> and a float as arguments are
* static methods, and each returns a new <b>PVector</b> that is the result of the
* multiplication operation. Both examples above produce the same visual output.
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Multiply a vector by a scalar
* @param n the number to multiply with the vector
*/
public PVector mult(float n) {
x *= n;
y *= n;
z *= n;
return this;
}
/**
* @param v the vector to multiply by the scalar
*/
static public PVector mult(PVector v, float n) {
return mult(v, n, null);
}
/**
* Multiply a vector by a scalar, and write the result into a target PVector.
* @param target PVector in which to store the result
*/
static public PVector mult(PVector v, float n, PVector target) {
if (target == null) {
target = new PVector(v.x*n, v.y*n, v.z*n);
} else {
target.set(v.x*n, v.y*n, v.z*n);
}
return target;
}
/**
*
* Divides a vector by a scalar. The version of the method that uses a float
* acts directly on the vector upon which it is called (as in the first example
* above). The version that receives both a <b>PVector</b> and a <b>float</b> as arguments is
* a static methods, and returns a new <b>PVector</b> that is the result of the
* division operation. Both examples above produce the same visual output.
*
* @webref pvector:method
* @usage web_application
* @webBrief Divide a vector by a scalar
* @param n the number by which to divide the vector
*/
public PVector div(float n) {
x /= n;
y /= n;
z /= n;
return this;
}
/**
* Divide a vector by a scalar and return the result in a new vector.
* @param v the vector to divide by the scalar
* @return a new vector that is v1 / n
*/
static public PVector div(PVector v, float n) {
return div(v, n, null);
}
/**
* Divide a vector by a scalar and store the result in another vector.
* @param target PVector in which to store the result
*/
static public PVector div(PVector v, float n, PVector target) {
if (target == null) {
target = new PVector(v.x/n, v.y/n, v.z/n);
} else {
target.set(v.x/n, v.y/n, v.z/n);
}
return target;
}
/**
*
* Calculates the Euclidean distance between two points (considering a
* point as a vector object).
*
*
* @webref pvector:method
* @usage web_application
* @param v the x, y, and z coordinates of a PVector
* @webBrief Calculate the distance between two points
*/
public float dist(PVector v) {
float dx = x - v.x;
float dy = y - v.y;
float dz = z - v.z;
return (float) Math.sqrt(dx*dx + dy*dy + dz*dz);
}
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
* @return the Euclidean distance between v1 and v2
*/
static public float dist(PVector v1, PVector v2) {
float dx = v1.x - v2.x;
float dy = v1.y - v2.y;
float dz = v1.z - v2.z;
return (float) Math.sqrt(dx*dx + dy*dy + dz*dz);
}
/**
*
* Calculates the dot product of two vectors.
*
*
* @webref pvector:method
* @usage web_application
* @param v any variable of type PVector
* @return the dot product
* @webBrief Calculate the dot product of two vectors
*/
public float dot(PVector v) {
return x*v.x + y*v.y + z*v.z;
}
/**
* @param x x component of the vector
* @param y y component of the vector
* @param z z component of the vector
*/
public float dot(float x, float y, float z) {
return this.x*x + this.y*y + this.z*z;
}
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
*/
static public float dot(PVector v1, PVector v2) {
return v1.x*v2.x + v1.y*v2.y + v1.z*v2.z;
}
/**
*
* Calculates and returns a vector composed of the cross product between
* two vectors.
*
*
* @webref pvector:method
* @param v the vector to calculate the cross product
* @webBrief Calculate and return the cross product
*/
public PVector cross(PVector v) {
return cross(v, null);
}
/**
* @param v any variable of type PVector
* @param target PVector to store the result
*/
public PVector cross(PVector v, PVector target) {
float crossX = y * v.z - v.y * z;
float crossY = z * v.x - v.z * x;
float crossZ = x * v.y - v.x * y;
if (target == null) {
target = new PVector(crossX, crossY, crossZ);
} else {
target.set(crossX, crossY, crossZ);
}
return target;
}
/**
* @param v1 any variable of type PVector
* @param v2 any variable of type PVector
* @param target PVector to store the result
*/
static public PVector cross(PVector v1, PVector v2, PVector target) {
float crossX = v1.y * v2.z - v2.y * v1.z;
float crossY = v1.z * v2.x - v2.z * v1.x;
float crossZ = v1.x * v2.y - v2.x * v1.y;
if (target == null) {
target = new PVector(crossX, crossY, crossZ);
} else {
target.set(crossX, crossY, crossZ);
}
return target;
}
/**
*
* Normalize the vector to length 1 (make it a unit vector).
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Normalize the vector to a length of 1
*/
public PVector normalize() {
float m = mag();
if (m != 0 && m != 1) {
div(m);
}
return this;
}
/**
* @param target Set to null to create a new vector
* @return a new vector (if target was null), or target
*/
public PVector normalize(PVector target) {
if (target == null) {
target = new PVector();
}
float m = mag();
if (m > 0) {
target.set(x/m, y/m, z/m);
} else {
target.set(x, y, z);
}
return target;
}
/**
*
* Limit the magnitude of this vector to the value used for the <b>max</b> parameter.
*
*
* @webref pvector:method
* @usage web_application
* @param max the maximum magnitude for the vector
* @webBrief Limit the magnitude of the vector
*/
public PVector limit(float max) {
if (magSq() > max*max) {
normalize();
mult(max);
}
return this;
}
/**
*
* Set the magnitude of this vector to the value used for the <b>len</b> parameter.
*
*
* @webref pvector:method
* @usage web_application
* @param len the new length for this vector
* @webBrief Set the magnitude of the vector
*/
public PVector setMag(float len) {
normalize();
mult(len);
return this;
}
/**
* Sets the magnitude of this vector, storing the result in another vector.
* @param target Set to null to create a new vector
* @param len the new length for the new vector
* @return a new vector (if target was null), or target
*/
public PVector setMag(PVector target, float len) {
target = normalize(target);
target.mult(len);
return target;
}
/**
*
* Calculate the vector's direction, that is, the angle this vector makes
* with the positive X axis (only 2D vectors)
*
*
* @webref pvector:method
* @usage web_application
* @return the direction of the vector (angle with positive X axis)
* @webBrief Calculate the angle of rotation for this vector
* @see PVector#setHeading()
*/
public float heading() {
float angle = (float) Math.atan2(y, x);
return angle;
}
@Deprecated
public float heading2D() {
return heading();
}
/**
*
* Sets the angle this vector makes with the positive X axis (only 2D vectors)
* This is equivalent to changing the vector's direction to the given value.
*
* @webref pvector:method
* @usage web_application
* @param angle the direction of the resultant vector
* @return this vector, rotated to have the given direction
* @webBrief Set the direction for this vector
* @see PVector#heading()
*/
public PVector setHeading(float angle) {
float m = mag();
x = (float) (m * Math.cos(angle));
y = (float) (m * Math.sin(angle));
return this;
}
/**
*
* Rotate the vector by an angle (only 2D vectors), magnitude remains the same
*
*
* @webref pvector:method
* @usage web_application
* @webBrief Rotate the vector by an angle (2D only)
* @param theta the angle of rotation
*/
public PVector rotate(float theta) {
float temp = x;
// Might need to check for rounding errors like with angleBetween function?
x = x*PApplet.cos(theta) - y*PApplet.sin(theta);
y = temp*PApplet.sin(theta) + y*PApplet.cos(theta);
return this;
}
/**
*
* Calculates linear interpolation from one vector to another vector. (Just like
* regular <b>lerp()</b>, but for vectors.)<br />
* <br />
* Note that there is one <em>static</em> version of this method, and two
* <em>non-static</em> versions. The static version, <b>lerp(v1, v2, amt)</b> is
* given the two vectors to interpolate and returns a new PVector object. The
* static version is used by referencing the PVector | PVector |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/test/java/io/github/resilience4j/retry/RetryAutoConfigurationRxJavaTest.java | {
"start": 1807,
"end": 5651
} | class ____ {
@Autowired
RetryRegistry retryRegistry;
@Autowired
RetryProperties retryProperties;
@Autowired
RetryAspect retryAspect;
@Autowired
ReactiveRetryDummyService retryDummyService;
@Autowired
private TestRestTemplate restTemplate;
/**
* The test verifies that a Retry instance is created and configured properly when the
* RetryReactiveDummyService is invoked and that the Retry logic is properly handled
*/
@Test
public void testRetryAutoConfigurationRxJava2() throws IOException {
assertThat(retryRegistry).isNotNull();
assertThat(retryProperties).isNotNull();
RetryEventsEndpointResponse retryEventListBefore = getRetryEventsBody(
"/actuator/retryevents");
RetryEventsEndpointResponse retryEventsListOfCBefore = getRetryEventsBody(
"/actuator/retryevents/" + BACKEND_C);
Retry retry = retryRegistry.retry(BACKEND_C);
assertThat(retry).isNotNull();
Retry.Metrics metricsBefore = retry.getMetrics();
retryDummyService.doSomethingFlowable(true).blockingSubscribe(String::toUpperCase,
throwable -> System.out.println("Exception received:" + throwable.getMessage()));
// The invocation is recorded by the CircuitBreaker as a success.
retryDummyService.doSomethingFlowable(false).blockingSubscribe(String::toUpperCase,
throwable -> System.out.println("Exception received:" + throwable.getMessage()));
// expect retry is configured as defined in application.yml
assertThat(retry.getRetryConfig().getMaxAttempts()).isEqualTo(3);
assertThat(retry.getName()).isEqualTo(BACKEND_C);
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IOException())).isTrue();
// expect retry-event actuator endpoint recorded both events
RetryEventsEndpointResponse retryEventList = getRetryEventsBody("/actuator/retryevents");
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventListBefore.getRetryEvents().size() + 3);
retryEventList = getRetryEventsBody("/actuator/retryevents/" + BACKEND_C);
assertThat(retryEventList.getRetryEvents())
.hasSize(retryEventsListOfCBefore.getRetryEvents().size() + 3);
assertThat(
retry.getRetryConfig().getExceptionPredicate().test(new IllegalArgumentException()))
.isTrue();
assertThat(retry.getRetryConfig().getExceptionPredicate().test(new IgnoredException()))
.isFalse();
IntervalBiFunction<?> exponentialBackoff = retry.getRetryConfig().getIntervalBiFunction();
assertThat(exponentialBackoff.apply(1,null)).isEqualTo(100);
assertThat(exponentialBackoff.apply(2,null)).isEqualTo(200);
assertThat(exponentialBackoff.apply(3,null)).isEqualTo(222);
assertThat(retryAspect.getOrder()).isEqualTo(399);
assertThat(retry.getMetrics().getNumberOfFailedCallsWithoutRetryAttempt())
.isEqualTo(metricsBefore.getNumberOfFailedCallsWithoutRetryAttempt());
assertThat(retry.getMetrics().getNumberOfFailedCallsWithRetryAttempt())
.isGreaterThanOrEqualTo(metricsBefore.getNumberOfFailedCallsWithRetryAttempt());
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithoutRetryAttempt())
.isGreaterThanOrEqualTo(metricsBefore.getNumberOfSuccessfulCallsWithoutRetryAttempt());
assertThat(retry.getMetrics().getNumberOfSuccessfulCallsWithRetryAttempt())
.isEqualTo(metricsBefore.getNumberOfSuccessfulCallsWithRetryAttempt());
}
private RetryEventsEndpointResponse getRetryEventsBody(String s) {
return restTemplate.getForEntity(s, RetryEventsEndpointResponse.class).getBody();
}
}
| RetryAutoConfigurationRxJavaTest |
java | apache__camel | components/camel-aws/camel-aws2-step-functions/src/test/java/org/apache/camel/component/aws2/stepfunctions/StepFunctions2ClientFactoryTest.java | {
"start": 1556,
"end": 3570
} | class ____ {
@Test
public void getStandardSfnClientDefault() {
StepFunctions2Configuration sfn2Configuration = new StepFunctions2Configuration();
StepFunctions2InternalClient sfnClient = StepFunctions2ClientFactory.getSfnClient(sfn2Configuration);
assertTrue(sfnClient instanceof StepFunctions2ClientStandardImpl);
}
@Test
public void getStandardSfnClient() {
StepFunctions2Configuration sfn2Configuration = new StepFunctions2Configuration();
sfn2Configuration.setUseDefaultCredentialsProvider(false);
StepFunctions2InternalClient sfnClient = StepFunctions2ClientFactory.getSfnClient(sfn2Configuration);
assertTrue(sfnClient instanceof StepFunctions2ClientStandardImpl);
}
@Test
public void getIAMOptimizedSfnClient() {
StepFunctions2Configuration sfn2Configuration = new StepFunctions2Configuration();
sfn2Configuration.setUseDefaultCredentialsProvider(true);
StepFunctions2InternalClient sfnClient = StepFunctions2ClientFactory.getSfnClient(sfn2Configuration);
assertTrue(sfnClient instanceof StepFunctions2ClientIAMOptimizedImpl);
}
@Test
public void getIAMProfileOptimizedSfnClient() {
StepFunctions2Configuration sfn2Configuration = new StepFunctions2Configuration();
sfn2Configuration.setUseProfileCredentialsProvider(true);
StepFunctions2InternalClient sfnClient = StepFunctions2ClientFactory.getSfnClient(sfn2Configuration);
assertTrue(sfnClient instanceof StepFunctions2ClientIAMProfileOptimizedImpl);
}
@Test
public void getSessionTokenSfnClient() {
StepFunctions2Configuration sfn2Configuration = new StepFunctions2Configuration();
sfn2Configuration.setUseSessionCredentials(true);
StepFunctions2InternalClient sfnClient = StepFunctions2ClientFactory.getSfnClient(sfn2Configuration);
assertTrue(sfnClient instanceof StepFunctions2ClientSessionTokenImpl);
}
}
| StepFunctions2ClientFactoryTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoFilterWhenTest.java | {
"start": 1183,
"end": 11416
} | class ____ {
@Test
public void normalFiltered() {
StepVerifier.withVirtualTime(() -> Mono.just(1)
.filterWhen(v -> Mono.just(v % 2 == 0)
.delayElement(Duration.ofMillis(100))))
.expectSubscription()
.expectNoEvent(Duration.ofMillis(100))
.verifyComplete();
}
@Test
public void normalNotFiltered() {
StepVerifier.withVirtualTime(() -> Mono.just(2)
.filterWhen(v -> Mono.just(v % 2 == 0)
.delayElement(Duration.ofMillis(100))))
.expectSubscription()
.expectNoEvent(Duration.ofMillis(100))
.expectNext(2)
.verifyComplete();
}
@Test
public void normalSyncFiltered() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Mono.just(v % 2 == 0).hide()))
.verifyComplete();
}
@Test
public void normalSyncNotFiltered() {
StepVerifier.create(Mono.just(2)
.filterWhen(v -> Mono.just(v % 2 == 0).hide()))
.expectNext(2)
.verifyComplete();
}
@Test
public void normalSyncFusedFiltered() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Mono.just(v % 2 == 0)))
.verifyComplete();
}
@Test
public void normalSyncFusedNotFiltered() {
StepVerifier.create(Mono.just(2)
.filterWhen(v -> Mono.just(v % 2 == 0)))
.expectNext(2)
.verifyComplete();
}
@Test
public void allEmpty() {
StepVerifier.create(Mono.just(2)
.filterWhen(v -> Mono.<Boolean>empty().hide()))
.verifyComplete();
}
@Test
public void allEmptyFused() {
StepVerifier.create(Mono.just(2)
.filterWhen(v -> Mono.empty()))
.verifyComplete();
}
@Test
public void empty() {
StepVerifier.create(Mono.<Integer>empty()
.filterWhen(v -> Mono.just(true)))
.verifyComplete();
}
@Test
public void emptyBackpressured() {
StepVerifier.create(Mono.<Integer>empty()
.filterWhen(v -> Mono.just(true)), 0L)
.verifyComplete();
}
@Test
public void error() {
StepVerifier.create(Mono.<Integer>error(new IllegalStateException())
.filterWhen(v -> Mono.just(true)))
.verifyError(IllegalStateException.class);
}
@Test
public void errorBackpressured() {
StepVerifier.create(Mono.<Integer>error(new IllegalStateException())
.filterWhen(v -> Mono.just(true)), 0L)
.verifyError(IllegalStateException.class);
}
@Test
public void backpressureExactlyOne() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Mono.just(true)), 1L)
.expectNext(1)
.verifyComplete();
}
@Test
public void oneAndErrorInner() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> s -> {
s.onSubscribe(Operators.emptySubscription());
s.onNext(true);
s.onError(new IllegalStateException());
}))
.expectNext(1)
.expectComplete()
.verifyThenAssertThat()
.hasDroppedErrorsSatisfying(
c -> assertThat(c)
.hasSize(1)
.element(0).isInstanceOf(IllegalStateException.class)
);
}
@Test
public void predicateThrows() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> { throw new IllegalStateException(); }))
.verifyError(IllegalStateException.class);
}
@Test
public void predicateNull() {
StepVerifier.create(Mono.just(1).filterWhen(v -> null))
.verifyError(NullPointerException.class);
}
@Test
public void predicateError() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Mono.<Boolean>error(new IllegalStateException()).hide()))
.verifyError(IllegalStateException.class);
}
@Test
public void predicateErrorFused() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Mono.fromCallable(() -> { throw new IllegalStateException(); })))
.verifyError(IllegalStateException.class);
}
@Test
public void take1Cancel() {
AtomicLong onNextCount = new AtomicLong();
AtomicReference<SignalType> endSignal = new AtomicReference<>();
BaseSubscriber<Object> bs = new BaseSubscriber<Object>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
requestUnbounded();
}
@Override
public void hookOnNext(Object t) {
onNextCount.incrementAndGet();
cancel();
onComplete();
}
@Override
protected void hookFinally(SignalType type) {
endSignal.set(type);
}
};
Mono.just(1)
.filterWhen(v -> Mono.just(true).hide())
.subscribe(bs);
assertThat(onNextCount).hasValue(1);
assertThat(endSignal).hasValue(SignalType.CANCEL);
}
@Test
public void take1CancelBackpressured() {
AtomicLong onNextCount = new AtomicLong();
AtomicReference<SignalType> endSignal = new AtomicReference<>();
BaseSubscriber<Object> bs = new BaseSubscriber<Object>() {
@Override
protected void hookOnSubscribe(Subscription subscription) {
request(1);
}
@Override
public void hookOnNext(Object t) {
onNextCount.incrementAndGet();
cancel();
onComplete();
}
@Override
protected void hookFinally(SignalType type) {
endSignal.set(type);
}
};
Mono.just(1)
.filterWhen(v -> Mono.just(true).hide())
.subscribe(bs);
assertThat(onNextCount).hasValue(1);
assertThat(endSignal).hasValue(SignalType.CANCEL);
}
@Test
public void cancel() {
final Sinks.Many<Boolean> pp = Sinks.many().multicast().onBackpressureBuffer();
StepVerifier.create(Mono.just(1)
.filterWhen(v -> pp.asFlux()))
.thenCancel();
assertThat(pp.currentSubscriberCount()).as("pp has subscriber").isZero();
}
@Test
public void innerFluxCancelled() {
AtomicInteger cancelCount = new AtomicInteger();
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Flux.just(true, false, false)
.doOnCancel(cancelCount::incrementAndGet)))
.expectNext(1)
.verifyComplete();
assertThat(cancelCount).hasValue(1);
}
@Test
public void innerFluxOnlyConsidersFirstValue() {
StepVerifier.create(Mono.just(1)
.filterWhen(v -> Flux.just(false, true, true)))
.verifyComplete();
}
@Test
public void innerMonoNotCancelled() {
AtomicInteger cancelCount = new AtomicInteger();
StepVerifier.create(Mono.just(3)
.filterWhen(v -> Mono.just(true)
.doOnCancel(cancelCount::incrementAndGet)))
.expectNext(3)
.verifyComplete();
assertThat(cancelCount).hasValue(0);
}
@Test
public void scanTerminatedOnlyTrueIfFilterTerminated() {
AtomicReference<Subscriber> subscriber = new AtomicReference<>();
TestPublisher<Boolean> filter = TestPublisher.create();
new MonoFilterWhen<>(new Mono<Integer>() {
@Override
public void subscribe(CoreSubscriber<? super Integer> actual) {
subscriber.set(actual);
//NON-EMPTY SOURCE WILL TRIGGER FILTER SUBSCRIPTION
actual.onNext(2);
actual.onComplete();
}
}, w -> filter)
.subscribe();
assertThat(subscriber.get()).isNotNull()
.isInstanceOf(Scannable.class);
Boolean terminated = ((Scannable) subscriber.get()).scan(Scannable.Attr.TERMINATED);
assertThat(terminated).isFalse();
filter.emit(Boolean.TRUE);
terminated = ((Scannable) subscriber.get()).scan(Scannable.Attr.TERMINATED);
assertThat(terminated).isTrue();
}
@Test
public void scanOperator(){
MonoFilterWhen<Integer> test = new MonoFilterWhen<>(Mono.just(1), null);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<String> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoFilterWhen.MonoFilterWhenMain<String>
test = new MonoFilterWhen.MonoFilterWhenMain<>(
actual, s -> Mono.just(false));
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
//TERMINATED IS COVERED BY TEST ABOVE
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void scanFilterWhenInner() {
CoreSubscriber<String> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoFilterWhen.MonoFilterWhenMain<String>
main = new MonoFilterWhen.MonoFilterWhenMain<>(
actual, s -> Mono.just(false));
MonoFilterWhen.FilterWhenInner<String> test =
new MonoFilterWhen.FilterWhenInner<>(main, true, null);
Subscription innerSubscription = Operators.emptySubscription();
test.onSubscribe(innerSubscription);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(main);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(innerSubscription);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(1L);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.REQUESTED_FROM_DOWNSTREAM)).isEqualTo(0L);
}
}
| MonoFilterWhenTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/models/stream/StreamEntryDeletionResult.java | {
"start": 583,
"end": 2642
} | enum ____ {
UNKNOWN(-2),
/**
* The stream entry ID was not found in the stream.
*/
NOT_FOUND(-1),
/**
* The entry was successfully deleted from the stream.
*/
DELETED(1),
/**
* The entry was not deleted due to one of the following reasons:
* <ul>
* <li>For XDELEX: The entry was not acknowledged by any consumer group</li>
* <li>For XACKDEL: The entry still has pending references in other consumer groups</li>
* </ul>
*/
NOT_DELETED_UNACKNOWLEDGED_OR_STILL_REFERENCED(2);
private final int code;
StreamEntryDeletionResult(int code) {
this.code = code;
}
/**
* Returns the numeric code associated with this result.
*
* @return the numeric code.
*/
public int getCode() {
return code;
}
/**
* Create a {@link StreamEntryDeletionResult} from its numeric code.
*
* @param code the numeric code.
* @return the {@link StreamEntryDeletionResult}.
* @throws IllegalArgumentException if the code is unknown.
*/
public static StreamEntryDeletionResult fromCode(int code) {
switch (code) {
case -1:
return NOT_FOUND;
case 1:
return DELETED;
case 2:
return NOT_DELETED_UNACKNOWLEDGED_OR_STILL_REFERENCED;
default:
return UNKNOWN;
}
}
/**
* Create a {@link StreamEntryDeletionResult} from a {@link Long} value.
*
* @param value the Long value, may be {@code null}.
* @return the {@link StreamEntryDeletionResult}, or {@code null} if the input is {@code null}.
* @throws IllegalArgumentException if the code is unknown.
*/
public static StreamEntryDeletionResult fromLong(Long value) {
if (value == null) {
return null;
}
return fromCode(value.intValue());
}
@Override
public String toString() {
return name() + "(" + code + ")";
}
}
| StreamEntryDeletionResult |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/tofix/ParsingContextExtTypeId2747Test.java | {
"start": 1075,
"end": 2167
} | class ____ extends ValueDeserializer<Location>
{
@Override
public Location deserialize(JsonParser p, DeserializationContext ctxt)
{
p.skipChildren();
return new Location(getCurrentLocationAsString(p));
}
}
static String getCurrentLocationAsString(JsonParser p)
{
// This suffices to give actual path
return p.streamReadContext().pathAsPointer().toString();
}
// [databind#2747]
@JacksonTestFailureExpected
@Test
void locationAccessWithExtTypeId() throws Exception {
ObjectReader objectReader = newJsonMapper().readerFor(Wrapper.class);
Wrapper wrapper = objectReader.readValue("{" +
"\"type\":\"location\"," +
"\"wrapped\": 1" +
"}");
// expecting wrapper.wrapped.value == "wrapped" but is "wrapped[1]"
// due to way `ExternalTypeHandler` exposes value as if "wrapper-array" was used for
// type id, value
assertEquals("/wrapped", ((Location) wrapper.wrapped).value);
}
}
| LocationDeserializer |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleCalendarEndpointBuilderFactory.java | {
"start": 40951,
"end": 43568
} | interface ____ extends EndpointProducerBuilder {
default GoogleCalendarEndpointProducerBuilder basic() {
return (GoogleCalendarEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedGoogleCalendarEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedGoogleCalendarEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Builder for endpoint for the Google Calendar component.
*/
public | AdvancedGoogleCalendarEndpointProducerBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/orphan/one2one/fk/bidirectional/multilevelcascade/DeleteMultiLevelOrphansTest.java | {
"start": 934,
"end": 8783
} | class ____ {
@BeforeEach
public void createData(SessionFactoryScope scope) {
Preisregelung preisregelung = new Preisregelung();
Tranchenmodell tranchenmodell = new Tranchenmodell();
X x = new X();
Tranche tranche1 = new Tranche();
Y y = new Y();
Tranche tranche2 = new Tranche();
preisregelung.setTranchenmodell( tranchenmodell );
tranchenmodell.setPreisregelung( preisregelung );
tranchenmodell.setX( x );
x.setTranchenmodell( tranchenmodell );
tranchenmodell.getTranchen().add( tranche1 );
tranche1.setTranchenmodell( tranchenmodell );
tranchenmodell.getTranchen().add( tranche2 );
tranche2.setTranchenmodell( tranchenmodell );
tranche1.setY( y );
y.setTranche( tranche1 );
scope.inTransaction(
session ->
session.persist( preisregelung )
);
}
@AfterEach
public void cleanupData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
@JiraKey(value = "HHH-9091")
public void testDirectAssociationOrphanedWhileManaged(SessionFactoryScope scope) {
Preisregelung p = scope.fromTransaction(
session -> {
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
Preisregelung preisregelung = (Preisregelung) results.get( 0 );
Tranchenmodell tranchenmodell = preisregelung.getTranchenmodell();
assertNotNull( tranchenmodell );
assertNotNull( tranchenmodell.getX() );
assertEquals( 2, tranchenmodell.getTranchen().size() );
assertNotNull( tranchenmodell.getTranchen().get( 0 ).getY() );
preisregelung.setTranchenmodell( null );
return preisregelung;
}
);
scope.inTransaction(
session -> {
Preisregelung preisregelung = session.get( Preisregelung.class, p.getId() );
assertNull( preisregelung.getTranchenmodell() );
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Tranche" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from X" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Y" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
}
);
}
@Test
@JiraKey(value = "HHH-9091")
public void testReplacedDirectAssociationWhileManaged(SessionFactoryScope scope) {
// Create a new Tranchenmodell with new direct and nested associations
Tranchenmodell tranchenmodellNew = new Tranchenmodell();
X xNew = new X();
tranchenmodellNew.setX( xNew );
xNew.setTranchenmodell( tranchenmodellNew );
Tranche trancheNew = new Tranche();
tranchenmodellNew.getTranchen().add( trancheNew );
trancheNew.setTranchenmodell( tranchenmodellNew );
Y yNew = new Y();
trancheNew.setY( yNew );
yNew.setTranche( trancheNew );
scope.inTransaction(
session -> {
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
Preisregelung preisregelung = (Preisregelung) results.get( 0 );
Tranchenmodell tranchenmodell = preisregelung.getTranchenmodell();
assertNotNull( tranchenmodell );
assertNotNull( tranchenmodell.getX() );
assertEquals( 2, tranchenmodell.getTranchen().size() );
assertNotNull( tranchenmodell.getTranchen().get( 0 ).getY() );
// Replace with a new Tranchenmodell instance containing new direct and nested associations
preisregelung.setTranchenmodell( tranchenmodellNew );
tranchenmodellNew.setPreisregelung( preisregelung );
}
);
Tranchenmodell t = scope.fromTransaction(
session -> {
List results = session.createQuery( "from Tranche" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from X" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Y" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
Preisregelung preisregelung = (Preisregelung) results.get( 0 );
Tranchenmodell tranchenmodell = preisregelung.getTranchenmodell();
assertNotNull( tranchenmodell );
assertEquals( tranchenmodellNew.getId(), tranchenmodell.getId() );
assertNotNull( tranchenmodell.getX() );
assertEquals( xNew.getId(), tranchenmodell.getX().getId() );
assertEquals( 1, tranchenmodell.getTranchen().size() );
assertEquals( trancheNew.getId(), tranchenmodell.getTranchen().get( 0 ).getId() );
assertEquals( yNew.getId(), tranchenmodell.getTranchen().get( 0 ).getY().getId() );
// Replace with a new Tranchenmodell instance with no associations
Tranchenmodell tr = new Tranchenmodell();
preisregelung.setTranchenmodell( tr );
tr.setPreisregelung( preisregelung );
return tr;
}
);
scope.inTransaction(
session -> {
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 1, results.size() );
Tranchenmodell tranchenmodell = (Tranchenmodell) results.get( 0 );
assertEquals( t.getId(), tranchenmodell.getId() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
Preisregelung preisregelung = (Preisregelung) results.get( 0 );
assertEquals( tranchenmodell, preisregelung.getTranchenmodell() );
results = session.createQuery( "from Tranche" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from X" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Y" ).list();
assertEquals( 0, results.size() );
}
);
}
@Test
@JiraKey(value = "HHH-9091")
public void testDirectAndNestedAssociationsOrphanedWhileManaged(SessionFactoryScope scope) {
Preisregelung p = scope.fromTransaction(
session -> {
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
Preisregelung preisregelung = (Preisregelung) results.get( 0 );
Tranchenmodell tranchenmodell = preisregelung.getTranchenmodell();
assertNotNull( tranchenmodell );
assertNotNull( tranchenmodell.getX() );
assertEquals( 2, tranchenmodell.getTranchen().size() );
assertNotNull( tranchenmodell.getTranchen().get( 0 ).getY() );
preisregelung.setTranchenmodell( null );
tranchenmodell.setX( null );
tranchenmodell.getTranchen().get( 0 ).setY( null );
return preisregelung;
}
);
scope.inTransaction(
session -> {
Preisregelung preisregelung = (Preisregelung) session.get(
Preisregelung.class,
p.getId()
);
assertNull( preisregelung.getTranchenmodell() );
List results = session.createQuery( "from Tranchenmodell" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Tranche" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from X" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Y" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Preisregelung" ).list();
assertEquals( 1, results.size() );
}
);
}
}
| DeleteMultiLevelOrphansTest |
java | elastic__elasticsearch | x-pack/plugin/gpu/src/main/java/org/elasticsearch/xpack/gpu/GPUPlugin.java | {
"start": 1285,
"end": 1382
} | enum ____ the tri-state value of the `index.vectors.indexing.use_gpu` setting.
*/
public | for |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/TestSubjectGenerator.java | {
"start": 1024,
"end": 1112
} | interface ____<T extends @Nullable Object> {
T createTestSubject();
}
| TestSubjectGenerator |
java | square__retrofit | retrofit-adapters/guava/src/test/java/retrofit2/adapter/guava/GuavaCallAdapterFactoryTest.java | {
"start": 1139,
"end": 4272
} | class ____ {
private static final Annotation[] NO_ANNOTATIONS = new Annotation[0];
@Rule public final MockWebServer server = new MockWebServer();
private final CallAdapter.Factory factory = GuavaCallAdapterFactory.create();
private Retrofit retrofit;
@Before
public void setUp() {
retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new StringConverterFactory())
.addCallAdapterFactory(factory)
.build();
}
@Test
public void responseType() {
Type bodyClass = new TypeToken<ListenableFuture<String>>() {}.getType();
assertThat(factory.get(bodyClass, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
Type bodyWildcard = new TypeToken<ListenableFuture<? extends String>>() {}.getType();
assertThat(factory.get(bodyWildcard, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
Type bodyGeneric = new TypeToken<ListenableFuture<List<String>>>() {}.getType();
assertThat(factory.get(bodyGeneric, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(new TypeToken<List<String>>() {}.getType());
Type responseClass = new TypeToken<ListenableFuture<Response<String>>>() {}.getType();
assertThat(factory.get(responseClass, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
Type responseWildcard =
new TypeToken<ListenableFuture<Response<? extends String>>>() {}.getType();
assertThat(factory.get(responseWildcard, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
Type resultClass = new TypeToken<ListenableFuture<Response<String>>>() {}.getType();
assertThat(factory.get(resultClass, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
Type resultWildcard =
new TypeToken<ListenableFuture<Response<? extends String>>>() {}.getType();
assertThat(factory.get(resultWildcard, NO_ANNOTATIONS, retrofit).responseType())
.isEqualTo(String.class);
}
@Test
public void nonListenableFutureReturnsNull() {
CallAdapter<?, ?> adapter = factory.get(String.class, NO_ANNOTATIONS, retrofit);
assertThat(adapter).isNull();
}
@Test
public void rawTypeThrows() {
Type observableType = new TypeToken<ListenableFuture>() {}.getType();
try {
factory.get(observableType, NO_ANNOTATIONS, retrofit);
fail();
} catch (IllegalStateException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
"ListenableFuture return type must be parameterized as ListenableFuture<Foo> or ListenableFuture<? extends Foo>");
}
}
@Test
public void rawResponseTypeThrows() {
Type observableType = new TypeToken<ListenableFuture<Response>>() {}.getType();
try {
factory.get(observableType, NO_ANNOTATIONS, retrofit);
fail();
} catch (IllegalStateException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo("Response must be parameterized as Response<Foo> or Response<? extends Foo>");
}
}
}
| GuavaCallAdapterFactoryTest |
java | apache__camel | components/camel-json-validator/src/generated/java/org/apache/camel/component/jsonvalidator/JsonValidatorComponentConfigurer.java | {
"start": 740,
"end": 3373
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
JsonValidatorComponent target = (JsonValidatorComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "contentcache":
case "contentCache": target.setContentCache(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "objectmapper":
case "objectMapper": target.setObjectMapper(property(camelContext, java.lang.String.class, value)); return true;
case "usedefaultobjectmapper":
case "useDefaultObjectMapper": target.setUseDefaultObjectMapper(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "contentcache":
case "contentCache": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "objectmapper":
case "objectMapper": return java.lang.String.class;
case "usedefaultobjectmapper":
case "useDefaultObjectMapper": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
JsonValidatorComponent target = (JsonValidatorComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "contentcache":
case "contentCache": return target.isContentCache();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "objectmapper":
case "objectMapper": return target.getObjectMapper();
case "usedefaultobjectmapper":
case "useDefaultObjectMapper": return target.isUseDefaultObjectMapper();
default: return null;
}
}
}
| JsonValidatorComponentConfigurer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JodaConstructorsTest.java | {
"start": 5788,
"end": 6138
} | class ____ {
private static final Duration ONE_MILLI = new Duration(Long.valueOf(1L));
}
""")
.doTest();
}
@Test
public void instantConstructor() {
helper
.addSourceLines(
"TestClass.java",
"""
import org.joda.time.Instant;
public | TestClass |
java | apache__avro | lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/JSpecifyNullSafeAnnotationsFieldsTest.java | {
"start": 9569,
"end": 21751
} | class ____ extends org.apache.avro.specific.SpecificRecordBuilderBase<JSpecifyNullSafeAnnotationsFieldsTest>
implements org.apache.avro.data.RecordBuilder<JSpecifyNullSafeAnnotationsFieldsTest> {
private java.lang.String name;
private java.lang.String nullable_name;
private int favorite_number;
private java.lang.Integer nullable_favorite_number;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder other) {
super(other);
if (isValidValue(fields()[0], other.name)) {
this.name = data().deepCopy(fields()[0].schema(), other.name);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.nullable_name)) {
this.nullable_name = data().deepCopy(fields()[1].schema(), other.nullable_name);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.favorite_number)) {
this.favorite_number = data().deepCopy(fields()[2].schema(), other.favorite_number);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.nullable_favorite_number)) {
this.nullable_favorite_number = data().deepCopy(fields()[3].schema(), other.nullable_favorite_number);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
}
/**
* Creates a Builder by copying an existing JSpecifyNullSafeAnnotationsFieldsTest instance
* @param other The existing instance to copy.
*/
private Builder(avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.name)) {
this.name = data().deepCopy(fields()[0].schema(), other.name);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.nullable_name)) {
this.nullable_name = data().deepCopy(fields()[1].schema(), other.nullable_name);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.favorite_number)) {
this.favorite_number = data().deepCopy(fields()[2].schema(), other.favorite_number);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.nullable_favorite_number)) {
this.nullable_favorite_number = data().deepCopy(fields()[3].schema(), other.nullable_favorite_number);
fieldSetFlags()[3] = true;
}
}
/**
* Gets the value of the 'name' field.
* @return The value.
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the value of the 'name' field.
* @param value The value of 'name'.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder setName(@org.jspecify.annotations.NonNull java.lang.String value) {
validate(fields()[0], value);
this.name = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'name' field has been set.
* @return True if the 'name' field has been set, false otherwise.
*/
public boolean hasName() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'name' field.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder clearName() {
name = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'nullable_name' field.
* @return The value.
*/
public java.lang.String getNullableName() {
return nullable_name;
}
/**
* Sets the value of the 'nullable_name' field.
* @param value The value of 'nullable_name'.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder setNullableName(@org.jspecify.annotations.Nullable java.lang.String value) {
validate(fields()[1], value);
this.nullable_name = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'nullable_name' field has been set.
* @return True if the 'nullable_name' field has been set, false otherwise.
*/
public boolean hasNullableName() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'nullable_name' field.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder clearNullableName() {
nullable_name = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'favorite_number' field.
* @return The value.
*/
public int getFavoriteNumber() {
return favorite_number;
}
/**
* Sets the value of the 'favorite_number' field.
* @param value The value of 'favorite_number'.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder setFavoriteNumber(@org.jspecify.annotations.NonNull int value) {
validate(fields()[2], value);
this.favorite_number = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'favorite_number' field has been set.
* @return True if the 'favorite_number' field has been set, false otherwise.
*/
public boolean hasFavoriteNumber() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'favorite_number' field.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder clearFavoriteNumber() {
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'nullable_favorite_number' field.
* @return The value.
*/
public java.lang.Integer getNullableFavoriteNumber() {
return nullable_favorite_number;
}
/**
* Sets the value of the 'nullable_favorite_number' field.
* @param value The value of 'nullable_favorite_number'.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder setNullableFavoriteNumber(@org.jspecify.annotations.Nullable java.lang.Integer value) {
validate(fields()[3], value);
this.nullable_favorite_number = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'nullable_favorite_number' field has been set.
* @return True if the 'nullable_favorite_number' field has been set, false otherwise.
*/
public boolean hasNullableFavoriteNumber() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'nullable_favorite_number' field.
* @return This builder.
*/
public avro.examples.baseball.JSpecifyNullSafeAnnotationsFieldsTest.Builder clearNullableFavoriteNumber() {
nullable_favorite_number = null;
fieldSetFlags()[3] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public JSpecifyNullSafeAnnotationsFieldsTest build() {
try {
JSpecifyNullSafeAnnotationsFieldsTest record = new JSpecifyNullSafeAnnotationsFieldsTest();
record.name = fieldSetFlags()[0] ? this.name : (java.lang.String) defaultValue(fields()[0]);
record.nullable_name = fieldSetFlags()[1] ? this.nullable_name : (java.lang.String) defaultValue(fields()[1]);
record.favorite_number = fieldSetFlags()[2] ? this.favorite_number : (java.lang.Integer) defaultValue(fields()[2]);
record.nullable_favorite_number = fieldSetFlags()[3] ? this.nullable_favorite_number : (java.lang.Integer) defaultValue(fields()[3]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<JSpecifyNullSafeAnnotationsFieldsTest>
WRITER$ = (org.apache.avro.io.DatumWriter<JSpecifyNullSafeAnnotationsFieldsTest>)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<JSpecifyNullSafeAnnotationsFieldsTest>
READER$ = (org.apache.avro.io.DatumReader<JSpecifyNullSafeAnnotationsFieldsTest>)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.avro.io.Encoder out)
throws java.io.IOException
{
out.writeString(this.name);
if (this.nullable_name == null) {
out.writeIndex(1);
out.writeNull();
} else {
out.writeIndex(0);
out.writeString(this.nullable_name);
}
out.writeInt(this.favorite_number);
if (this.nullable_favorite_number == null) {
out.writeIndex(1);
out.writeNull();
} else {
out.writeIndex(0);
out.writeInt(this.nullable_favorite_number);
}
}
@Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
this.name = in.readString();
if (in.readIndex() != 0) {
in.readNull();
this.nullable_name = null;
} else {
this.nullable_name = in.readString();
}
this.favorite_number = in.readInt();
if (in.readIndex() != 0) {
in.readNull();
this.nullable_favorite_number = null;
} else {
this.nullable_favorite_number = in.readInt();
}
} else {
for (int i = 0; i < 4; i++) {
switch (fieldOrder[i].pos()) {
case 0:
this.name = in.readString();
break;
case 1:
if (in.readIndex() != 0) {
in.readNull();
this.nullable_name = null;
} else {
this.nullable_name = in.readString();
}
break;
case 2:
this.favorite_number = in.readInt();
break;
case 3:
if (in.readIndex() != 0) {
in.readNull();
this.nullable_favorite_number = null;
} else {
this.nullable_favorite_number = in.readInt();
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
@Override
public int hashCode() {
int result = 1;
result = 31 * result + (this.name == null ? 0 : this.name.hashCode());
result = 31 * result + (this.nullable_name == null ? 0 : this.nullable_name.hashCode());
result = 31 * result + Integer.hashCode(this.favorite_number);
result = 31 * result + (this.nullable_favorite_number == null ? 0 : this.nullable_favorite_number.hashCode());
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof JSpecifyNullSafeAnnotationsFieldsTest)) {
return false;
}
JSpecifyNullSafeAnnotationsFieldsTest other = (JSpecifyNullSafeAnnotationsFieldsTest) o;
if (!java.util.Objects.equals(this.name, other.name)) {
return false;
}
if (!java.util.Objects.equals(this.nullable_name, other.nullable_name)) {
return false;
}
if (this.favorite_number != other.favorite_number) {
return false;
}
if (!java.util.Objects.equals(this.nullable_favorite_number, other.nullable_favorite_number)) {
return false;
}
return true;
}
}
| Builder |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/net/Severity.java | {
"start": 1892,
"end": 3584
} | enum ____ {
/** System is unusable. */
EMERG(0),
/** Action must be taken immediately. */
ALERT(1),
/** Critical conditions. */
CRITICAL(2),
/** Error conditions. */
ERROR(3),
/** Warning conditions. */
WARNING(4),
/** Normal but significant conditions. */
NOTICE(5),
/** Informational messages. */
INFO(6),
/** Debug level messages. */
DEBUG(7);
private final int code;
Severity(final int code) {
this.code = code;
}
/**
* Returns the severity code.
* @return The numeric value associated with the Severity.
*/
public int getCode() {
return this.code;
}
/**
* Determine if the name matches this Severity.
* @param name the name to match.
* @return true if the name matches, false otherwise.
*/
public boolean isEqual(final String name) {
return this.name().equalsIgnoreCase(name);
}
/**
* Returns the Severity for the specified Level.
* @param level The Level.
* @return The matching Severity, or DEBUG if there is no match.
*/
public static Severity getSeverity(final Level level) {
switch (level.getStandardLevel()) {
case ALL:
return DEBUG;
case TRACE:
return DEBUG;
case DEBUG:
return DEBUG;
case INFO:
return INFO;
case WARN:
return WARNING;
case ERROR:
return ERROR;
case FATAL:
return ALERT;
case OFF:
return EMERG;
}
return DEBUG;
}
}
| Severity |
java | elastic__elasticsearch | x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java | {
"start": 2744,
"end": 13042
} | class ____ extends PersistentTasksExecutor<DownsampleShardTaskParams> {
private static final Logger LOGGER = LogManager.getLogger(DownsampleShardPersistentTaskExecutor.class);
private final Client client;
private final boolean isStateless;
public DownsampleShardPersistentTaskExecutor(final Client client, final String taskName, Settings settings, final Executor executor) {
super(taskName, executor);
this.client = Objects.requireNonNull(client);
this.isStateless = DiscoveryNode.isStateless(settings);
}
@Override
protected void nodeOperation(
final AllocatedPersistentTask task,
final DownsampleShardTaskParams params,
final PersistentTaskState state
) {
// NOTE: query the downsampling target index so that we can start the downsampling task from the latest indexed tsid.
final SearchRequest searchRequest = new SearchRequest(params.downsampleIndex());
searchRequest.source().sort(TimeSeriesIdFieldMapper.NAME, SortOrder.DESC).size(1);
searchRequest.preference("_shards:" + params.shardId().id());
client.search(searchRequest, ActionListener.wrap(searchResponse -> {
delegate(task, params, extractTsId(searchResponse.getHits().getHits()));
}, e -> delegate(task, params, null)));
}
private static BytesRef extractTsId(SearchHit[] lastDownsampleTsidHits) {
if (lastDownsampleTsidHits.length == 0) {
return null;
} else {
var searchHit = Arrays.stream(lastDownsampleTsidHits).findFirst().get();
var field = searchHit.field("_tsid");
return field != null ? field.getValue() : null;
}
}
@Override
protected AllocatedPersistentTask createTask(
long id,
final String type,
final String action,
final TaskId parentTaskId,
final PersistentTasksCustomMetadata.PersistentTask<DownsampleShardTaskParams> taskInProgress,
final Map<String, String> headers
) {
final DownsampleShardTaskParams params = taskInProgress.getParams();
return new DownsampleShardTask(
id,
type,
action,
parentTaskId,
params.downsampleIndex(),
params.indexStartTimeMillis(),
params.indexEndTimeMillis(),
params.downsampleConfig(),
headers,
params.shardId()
);
}
@Override
public void validate(DownsampleShardTaskParams params, ClusterState clusterState, @Nullable ProjectId projectId) {
// This is just a pre-check, but doesn't prevent from avoiding from aborting the task when source index disappeared
// after initial creation of the persistent task.
var indexShardRouting = findShardRoutingTable(params.shardId(), clusterState);
if (indexShardRouting == null) {
throw new ShardNotFoundException(params.shardId());
}
}
@Override
protected PersistentTasksCustomMetadata.Assignment doGetAssignment(
final DownsampleShardTaskParams params,
final Collection<DiscoveryNode> candidateNodes,
final ClusterState clusterState,
@Nullable final ProjectId projectId
) {
// NOTE: downsampling works by running a task per each shard of the source index.
// Here we make sure we assign the task to the actual node holding the shard identified by
// the downsampling task shard id.
final ShardId shardId = params.shardId();
// If during re-assignment the source index was deleted, then we need to break out.
// Returning NO_NODE_FOUND just keeps the persistent task until the source index appears again (which would never happen)
// So let's return a node and then in the node operation we would just fail and stop this persistent task
var indexShardRouting = findShardRoutingTable(shardId, clusterState);
if (indexShardRouting == null) {
var node = selectLeastLoadedNode(clusterState, candidateNodes, DiscoveryNode::canContainData);
return new PersistentTasksCustomMetadata.Assignment(node.getId(), "a node to fail and stop this persistent task");
}
// We find the nodes that hold the eligible shards.
// If the current node of such a shard is a candidate node, then we assign the task there.
// This code is inefficient, but we are relying on the laziness of the intermediate operations
// and the assumption that the first shard we examine has high chances of being assigned to a candidate node.
return indexShardRouting.activeShards()
.stream()
.filter(this::isEligible)
.map(ShardRouting::currentNodeId)
.filter(nodeId -> isCandidateNode(candidateNodes, nodeId))
.findAny()
.map(nodeId -> new PersistentTasksCustomMetadata.Assignment(nodeId, "downsampling using node holding shard [" + shardId + "]"))
.orElse(NO_NODE_FOUND);
}
/**
* Only shards that can be searched can be used as the source of a downsampling task.
* For simplicity, in non-stateless deployments we use the primary shard.
*/
private boolean isEligible(ShardRouting shardRouting) {
return shardRouting.started() && (isStateless ? shardRouting.isSearchable() : shardRouting.primary());
}
private boolean isCandidateNode(Collection<DiscoveryNode> candidateNodes, String nodeId) {
for (DiscoveryNode candidateNode : candidateNodes) {
if (candidateNode.getId().equals(nodeId)) {
return true;
}
}
return false;
}
@Override
public Executor getExecutor() {
// The delegate action forks to the a downsample thread:
return EsExecutors.DIRECT_EXECUTOR_SERVICE;
}
private void delegate(final AllocatedPersistentTask task, final DownsampleShardTaskParams params, final BytesRef lastDownsampleTsid) {
DownsampleShardTask downsampleShardTask = (DownsampleShardTask) task;
client.execute(
DelegatingAction.INSTANCE,
new DelegatingAction.Request(downsampleShardTask, lastDownsampleTsid, params),
ActionListener.wrap(empty -> {}, e -> {
LOGGER.error("error while delegating", e);
markAsFailed(downsampleShardTask, e);
})
);
}
private static IndexShardRoutingTable findShardRoutingTable(ShardId shardId, ClusterState clusterState) {
var indexRoutingTable = clusterState.globalRoutingTable().indexRouting(clusterState.metadata(), shardId.getIndex());
return indexRoutingTable.map(routingTable -> routingTable.shard(shardId.getId())).orElse(null);
}
static void realNodeOperation(
Client client,
IndicesService indicesService,
DownsampleMetrics downsampleMetrics,
DownsampleShardTask task,
DownsampleShardTaskParams params,
BytesRef lastDownsampledTsid
) {
client.threadPool().executor(Downsample.DOWNSAMPLE_TASK_THREAD_POOL_NAME).execute(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
markAsFailed(task, e);
}
@Override
protected void doRun() throws Exception {
final var initialState = new DownsampleShardPersistentTaskState(
DownsampleShardIndexerStatus.INITIALIZED,
lastDownsampledTsid
);
try {
final var downsampleShardIndexer = new DownsampleShardIndexer(
task,
client,
indicesService.indexServiceSafe(params.shardId().getIndex()),
downsampleMetrics,
params.shardId(),
params.downsampleIndex(),
params.downsampleConfig(),
params.metrics(),
params.labels(),
params.dimensions(),
initialState
);
downsampleShardIndexer.execute();
task.markAsCompleted();
} catch (final DownsampleShardIndexerException e) {
if (e.isRetriable()) {
LOGGER.warn("Downsampling task [" + task.getPersistentTaskId() + " retriable failure [" + e.getMessage() + "]");
task.markAsLocallyAborted(e.getMessage());
} else {
LOGGER.error(
"Downsampling task [" + task.getPersistentTaskId() + " non retriable failure [" + e.getMessage() + "]"
);
markAsFailed(task, e);
}
} catch (IndexNotFoundException e) {
LOGGER.error("Downsampling task [" + task.getPersistentTaskId() + " failing because source index not assigned");
markAsFailed(task, e);
} catch (final Exception e) {
LOGGER.error("Downsampling task [" + task.getPersistentTaskId() + " non-retriable failure [" + e.getMessage() + "]");
markAsFailed(task, e);
}
}
});
}
private static void markAsFailed(DownsampleShardTask task, Exception e) {
task.setDownsampleShardIndexerStatus(DownsampleShardIndexerStatus.FAILED);
task.updatePersistentTaskState(
new DownsampleShardPersistentTaskState(DownsampleShardIndexerStatus.FAILED, null),
ActionListener.running(() -> task.markAsFailed(e))
);
}
// This is needed for FLS/DLS to work correctly. The _indices_permissions in the thread local aren't set if an searcher is acquired
// directly from this persistent task executor. By delegating to this action (with a request that implements IndicesRequest) the
// security thread local will be setup correctly so that we avoid this error:
// org.elasticsearch.ElasticsearchSecurityException: no indices permissions found
public static | DownsampleShardPersistentTaskExecutor |
java | quarkusio__quarkus | integration-tests/cache/src/test/java/io/quarkus/it/cache/RestClientTestCase.java | {
"start": 356,
"end": 1920
} | class ____ {
private static final String CITY = "Toulouse";
private static final String TODAY = "2020-12-20";
@Test
public void test() {
assertInvocations("0");
getSunriseTimeInvocations();
assertInvocations("1");
getSunriseTimeInvocations();
assertInvocations("1");
invalidate();
getSunriseTimeInvocations();
assertInvocations("2");
invalidateAll();
getSunriseTimeInvocations();
assertInvocations("3");
}
private void assertInvocations(String expectedInvocations) {
given()
.when()
.get("/rest-client/invocations")
.then()
.statusCode(200)
.body(equalTo(expectedInvocations));
}
private void getSunriseTimeInvocations() {
given()
.queryParam("date", TODAY)
.when()
.get("/rest-client/time/{city}", CITY)
.then()
.statusCode(200);
}
private void invalidate() {
given()
.queryParam("date", TODAY)
.queryParam("notPartOfTheCacheKey", "notPartOfTheCacheKey")
.when()
.delete("/rest-client/invalidate/{city}", CITY)
.then()
.statusCode(204);
}
private void invalidateAll() {
given()
.when()
.delete("/rest-client/invalidate")
.then()
.statusCode(204);
}
}
| RestClientTestCase |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/concurrent/CircuitBreakingException.java | {
"start": 870,
"end": 980
} | class ____ for reporting runtime error conditions related to
* circuit breakers.
*
* @since 3.5
*/
public | used |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/ClientThrottlingIntercept.java | {
"start": 6957,
"end": 7586
} | class ____
extends StorageEvent<SendingRequestEvent> {
/**
* Called before the Azure Storage SDK sends a request. Client-side
* throttling uses this to suspend the request, if necessary, to minimize
* errors and maximize throughput.
*
* @param event The connection, operation, and request state.
*/
@Override
public void eventOccurred(SendingRequestEvent event) {
singleton.sendingRequest(event);
}
}
/**
* The ResponseReceivedEvent is fired after the Azure Storage SDK receives a
* response.
*/
@InterfaceAudience.Private
static | SendingRequestEventHandler |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/function/server/support/RouterFunctionMappingVersionTests.java | {
"start": 3561,
"end": 4525
} | class ____ implements WebFluxConfigurer {
@Override
public void configureApiVersioning(ApiVersionConfigurer configurer) {
StandardApiVersionDeprecationHandler handler = new StandardApiVersionDeprecationHandler();
handler.configureVersion("1").setDeprecationLink(URI.create("https://example.org/deprecation"));
configurer.useRequestHeader("API-Version")
.addSupportedVersions("1", "1.1", "1.3")
.setDeprecationHandler(handler);
}
@Bean
RouterFunction<?> routerFunction() {
return RouterFunctions.route()
.path("/", builder -> builder
.GET(version("1.5"), new TestHandler("1.5"))
.GET(version("1.2+"), new TestHandler("1.2"))
.GET(new TestHandler("none")))
.build();
}
}
private record TestHandler(String body) implements HandlerFunction<ServerResponse> {
@Override
public Mono<ServerResponse> handle(ServerRequest request) {
return ServerResponse.ok().bodyValue(body);
}
}
}
| WebConfig |
java | spring-projects__spring-boot | integration-test/spring-boot-actuator-integration-tests/src/test/java/org/springframework/boot/actuate/metrics/export/prometheus/PrometheusScrapeEndpointIntegrationTests.java | {
"start": 4855,
"end": 5561
} | class ____ {
@Bean
PrometheusScrapeEndpoint prometheusScrapeEndpoint(PrometheusRegistry prometheusRegistry) {
return new PrometheusScrapeEndpoint(prometheusRegistry, new Properties());
}
@Bean
PrometheusRegistry prometheusRegistry() {
return new PrometheusRegistry();
}
@Bean
MeterRegistry registry(PrometheusRegistry prometheusRegistry) {
PrometheusMeterRegistry meterRegistry = new PrometheusMeterRegistry((k) -> null, prometheusRegistry,
Clock.SYSTEM);
Counter.builder("counter1").register(meterRegistry);
Counter.builder("counter2").register(meterRegistry);
Counter.builder("counter3").register(meterRegistry);
return meterRegistry;
}
}
}
| TestConfiguration |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/lookup/StrSubstitutor.java | {
"start": 6231,
"end": 40579
} | class ____ implements ConfigurationAware {
/**
* Constant for the default escape character.
*/
public static final char DEFAULT_ESCAPE = '$';
/**
* Constant for the default variable prefix.
*/
public static final StrMatcher DEFAULT_PREFIX = StrMatcher.stringMatcher(DEFAULT_ESCAPE + "{");
/**
* Constant for the default variable suffix.
*/
public static final StrMatcher DEFAULT_SUFFIX = StrMatcher.stringMatcher("}");
/**
* Constant for the default value delimiter of a variable.
*/
public static final String DEFAULT_VALUE_DELIMITER_STRING = ":-";
public static final StrMatcher DEFAULT_VALUE_DELIMITER = StrMatcher.stringMatcher(DEFAULT_VALUE_DELIMITER_STRING);
public static final String ESCAPE_DELIMITER_STRING = ":\\-";
public static final StrMatcher DEFAULT_VALUE_ESCAPE_DELIMITER = StrMatcher.stringMatcher(ESCAPE_DELIMITER_STRING);
private static final int BUF_SIZE = 256;
/**
* Stores the escape character.
*/
private char escapeChar;
/**
* Stores the variable prefix.
*/
private StrMatcher prefixMatcher;
/**
* Stores the variable suffix.
*/
private StrMatcher suffixMatcher;
/**
* Stores the default variable value delimiter
*/
private String valueDelimiterString;
private StrMatcher valueDelimiterMatcher;
/**
* Escape string to avoid matching the value delimiter matcher;
*/
private StrMatcher valueEscapeDelimiterMatcher;
/**
* Variable resolution is delegated to an implementer of VariableResolver.
*/
private StrLookup variableResolver;
/**
* The flag whether substitution in variable names is enabled.
*/
private boolean enableSubstitutionInVariables = true;
/**
* The currently active Configuration for use by ConfigurationAware StrLookup implementations.
*/
private Configuration configuration;
// -----------------------------------------------------------------------
/**
* Creates a new instance with defaults for variable prefix and suffix
* and the escaping character.
*/
public StrSubstitutor() {
this(null, DEFAULT_PREFIX, DEFAULT_SUFFIX, DEFAULT_ESCAPE);
}
/**
* Creates a new instance and initializes it. Uses defaults for variable
* prefix and suffix and the escaping character.
*
* @param valueMap the map with the variables' values, may be null
*/
public StrSubstitutor(final Map<String, String> valueMap) {
this(new PropertiesLookup(valueMap), DEFAULT_PREFIX, DEFAULT_SUFFIX, DEFAULT_ESCAPE);
}
/**
* Creates a new instance and initializes it. Uses a default escaping character.
*
* @param valueMap the map with the variables' values, may be null
* @param prefix the prefix for variables, not null
* @param suffix the suffix for variables, not null
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(final Map<String, String> valueMap, final String prefix, final String suffix) {
this(new PropertiesLookup(valueMap), prefix, suffix, DEFAULT_ESCAPE);
}
/**
* Creates a new instance and initializes it.
*
* @param valueMap the map with the variables' values, may be null
* @param prefix the prefix for variables, not null
* @param suffix the suffix for variables, not null
* @param escape the escape character
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final Map<String, String> valueMap, final String prefix, final String suffix, final char escape) {
this(new PropertiesLookup(valueMap), prefix, suffix, escape);
}
/**
* Creates a new instance and initializes it.
*
* @param valueMap the map with the variables' values, may be null
* @param prefix the prefix for variables, not null
* @param suffix the suffix for variables, not null
* @param escape the escape character
* @param valueDelimiter the variable default value delimiter, may be null
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final Map<String, String> valueMap,
final String prefix,
final String suffix,
final char escape,
final String valueDelimiter) {
this(new PropertiesLookup(valueMap), prefix, suffix, escape, valueDelimiter);
}
/**
* Creates a new instance and initializes it. Uses defaults for variable
* prefix and suffix and the escaping character.
*
* @param properties the map with the variables' values, may be null
*/
public StrSubstitutor(final Properties properties) {
this(toTypeSafeMap(properties));
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
*/
public StrSubstitutor(final StrLookup variableResolver) {
this(variableResolver, DEFAULT_PREFIX, DEFAULT_SUFFIX, DEFAULT_ESCAPE);
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
* @param prefix the prefix for variables, not null
* @param suffix the suffix for variables, not null
* @param escape the escape character
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final StrLookup variableResolver, final String prefix, final String suffix, final char escape) {
this.setVariableResolver(variableResolver);
this.setVariablePrefix(prefix);
this.setVariableSuffix(suffix);
this.setEscapeChar(escape);
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
* @param prefix the prefix for variables, not null
* @param suffix the suffix for variables, not null
* @param escape the escape character
* @param valueDelimiter the variable default value delimiter string, may be null
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final StrLookup variableResolver,
final String prefix,
final String suffix,
final char escape,
final String valueDelimiter) {
this.setVariableResolver(variableResolver);
this.setVariablePrefix(prefix);
this.setVariableSuffix(suffix);
this.setEscapeChar(escape);
this.setValueDelimiter(valueDelimiter);
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
* @param prefixMatcher the prefix for variables, not null
* @param suffixMatcher the suffix for variables, not null
* @param escape the escape character
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final StrLookup variableResolver,
final StrMatcher prefixMatcher,
final StrMatcher suffixMatcher,
final char escape) {
this(
variableResolver,
prefixMatcher,
suffixMatcher,
escape,
DEFAULT_VALUE_DELIMITER,
DEFAULT_VALUE_ESCAPE_DELIMITER);
this.valueDelimiterString = DEFAULT_VALUE_DELIMITER_STRING;
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
* @param prefixMatcher the prefix for variables, not null
* @param suffixMatcher the suffix for variables, not null
* @param escape the escape character
* @param valueDelimiterMatcher the variable default value delimiter matcher, may be null
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final StrLookup variableResolver,
final StrMatcher prefixMatcher,
final StrMatcher suffixMatcher,
final char escape,
final StrMatcher valueDelimiterMatcher) {
this.setVariableResolver(variableResolver);
this.setVariablePrefixMatcher(prefixMatcher);
this.setVariableSuffixMatcher(suffixMatcher);
this.setEscapeChar(escape);
this.setValueDelimiterMatcher(valueDelimiterMatcher);
}
/**
* Creates a new instance and initializes it.
*
* @param variableResolver the variable resolver, may be null
* @param prefixMatcher the prefix for variables, not null
* @param suffixMatcher the suffix for variables, not null
* @param escape the escape character
* @param valueDelimiterMatcher the variable default value delimiter matcher, may be null
* @param valueEscapeMatcher the matcher to escape defaulting, may be null.
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public StrSubstitutor(
final StrLookup variableResolver,
final StrMatcher prefixMatcher,
final StrMatcher suffixMatcher,
final char escape,
final StrMatcher valueDelimiterMatcher,
final StrMatcher valueEscapeMatcher) {
this.setVariableResolver(variableResolver);
this.setVariablePrefixMatcher(prefixMatcher);
this.setVariableSuffixMatcher(suffixMatcher);
this.setEscapeChar(escape);
this.setValueDelimiterMatcher(valueDelimiterMatcher);
valueEscapeDelimiterMatcher = valueEscapeMatcher;
}
StrSubstitutor(final StrSubstitutor other) {
Objects.requireNonNull(other, "other");
this.setVariableResolver(other.getVariableResolver());
this.setVariablePrefixMatcher(other.getVariablePrefixMatcher());
this.setVariableSuffixMatcher(other.getVariableSuffixMatcher());
this.setEscapeChar(other.getEscapeChar());
this.setValueDelimiterMatcher(other.valueDelimiterMatcher);
this.valueEscapeDelimiterMatcher = other.valueEscapeDelimiterMatcher;
this.configuration = other.configuration;
this.enableSubstitutionInVariables = other.isEnableSubstitutionInVariables();
this.valueDelimiterString = other.valueDelimiterString;
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables in the given source object with
* their matching values from the map.
*
* @param source the source text containing the variables to substitute, null returns null
* @param valueMap the map with the values, may be null
* @return the result of the replace operation
*/
public static String replace(final Object source, final Map<String, String> valueMap) {
return new StrSubstitutor(valueMap).replace(source);
}
/**
* Replaces all the occurrences of variables in the given source object with
* their matching values from the map. This method allows to specify a
* custom variable prefix and suffix
*
* @param source the source text containing the variables to substitute, null returns null
* @param valueMap the map with the values, may be null
* @param prefix the prefix of variables, not null
* @param suffix the suffix of variables, not null
* @return the result of the replace operation
* @throws IllegalArgumentException if the prefix or suffix is null
*/
public static String replace(
final Object source, final Map<String, String> valueMap, final String prefix, final String suffix) {
return new StrSubstitutor(valueMap, prefix, suffix).replace(source);
}
/**
* Replaces all the occurrences of variables in the given source object with their matching
* values from the properties.
*
* @param source the source text containing the variables to substitute, null returns null
* @param valueProperties the properties with values, may be null
* @return the result of the replace operation
*/
public static String replace(final Object source, final Properties valueProperties) {
if (valueProperties == null) {
return Objects.toString(source, null);
}
final Map<String, String> valueMap = new HashMap<>();
final Enumeration<?> propNames = valueProperties.propertyNames();
while (propNames.hasMoreElements()) {
final String propName = (String) propNames.nextElement();
final String propValue = valueProperties.getProperty(propName);
valueMap.put(propName, propValue);
}
return StrSubstitutor.replace(source, valueMap);
}
private static Map<String, String> toTypeSafeMap(final Properties properties) {
final Map<String, String> map = new HashMap<>(properties.size());
for (final String name : properties.stringPropertyNames()) {
map.put(name, properties.getProperty(name));
}
return map;
}
private static String handleFailedReplacement(final String input, final Throwable throwable) {
StatusLogger.getLogger().error("Replacement failed on {}", input, throwable);
return input;
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source string as a template.
*
* @param source the string to replace in, null returns null
* @return the result of the replace operation
*/
public String replace(final String source) {
return replace(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source string as a template.
*
* @param event The current LogEvent if there is one.
* @param source the string to replace in, null returns null
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final String source) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(source);
try {
if (!substitute(event, buf, 0, source.length())) {
return source;
}
} catch (Throwable t) {
return handleFailedReplacement(source, t);
}
return buf.toString();
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source string as a template.
* <p>
* Only the specified portion of the string will be processed.
* The rest of the string is not processed, and is not returned.
* </p>
*
* @param source the string to replace in, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final String source, final int offset, final int length) {
return replace(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source string as a template.
* <p>
* Only the specified portion of the string will be processed.
* The rest of the string is not processed, and is not returned.
* </p>
*
* @param event the current LogEvent, if one exists.
* @param source the string to replace in, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final String source, final int offset, final int length) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(length).append(source, offset, length);
try {
if (!substitute(event, buf, 0, length)) {
return source.substring(offset, offset + length);
}
} catch (Throwable t) {
return handleFailedReplacement(source, t);
}
return buf.toString();
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source array as a template.
* The array is not altered by this method.
*
* @param source the character array to replace in, not altered, null returns null
* @return the result of the replace operation
*/
public String replace(final char[] source) {
return replace(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source array as a template.
* The array is not altered by this method.
*
* @param event the current LogEvent, if one exists.
* @param source the character array to replace in, not altered, null returns null
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final char[] source) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(source.length).append(source);
try {
substitute(event, buf, 0, source.length);
} catch (Throwable t) {
return handleFailedReplacement(new String(source), t);
}
return buf.toString();
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source array as a template.
* The array is not altered by this method.
* <p>
* Only the specified portion of the array will be processed.
* The rest of the array is not processed, and is not returned.
* </p>
*
* @param source the character array to replace in, not altered, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final char[] source, final int offset, final int length) {
return replace(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source array as a template.
* The array is not altered by this method.
* <p>
* Only the specified portion of the array will be processed.
* The rest of the array is not processed, and is not returned.
* </p>
*
* @param event the current LogEvent, if one exists.
* @param source the character array to replace in, not altered, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final char[] source, final int offset, final int length) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(length).append(source, offset, length);
try {
substitute(event, buf, 0, length);
} catch (Throwable t) {
return handleFailedReplacement(new String(source, offset, length), t);
}
return buf.toString();
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source buffer as a template.
* The buffer is not altered by this method.
*
* @param source the buffer to use as a template, not changed, null returns null
* @return the result of the replace operation
*/
public String replace(final StringBuffer source) {
return replace(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source buffer as a template.
* The buffer is not altered by this method.
*
* @param event the current LogEvent, if one exists.
* @param source the buffer to use as a template, not changed, null returns null
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final StringBuffer source) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(source.length()).append(source);
try {
substitute(event, buf, 0, buf.length());
} catch (Throwable t) {
return handleFailedReplacement(source.toString(), t);
}
return buf.toString();
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source buffer as a template.
* The buffer is not altered by this method.
* <p>
* Only the specified portion of the buffer will be processed.
* The rest of the buffer is not processed, and is not returned.
* </p>
*
* @param source the buffer to use as a template, not changed, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final StringBuffer source, final int offset, final int length) {
return replace(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source buffer as a template.
* The buffer is not altered by this method.
* <p>
* Only the specified portion of the buffer will be processed.
* The rest of the buffer is not processed, and is not returned.
* </p>
*
* @param event the current LogEvent, if one exists.
* @param source the buffer to use as a template, not changed, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final StringBuffer source, final int offset, final int length) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(length).append(source, offset, length);
try {
substitute(event, buf, 0, length);
} catch (Throwable t) {
return handleFailedReplacement(source.substring(offset, offset + length), t);
}
return buf.toString();
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source builder as a template.
* The builder is not altered by this method.
*
* @param source the builder to use as a template, not changed, null returns null
* @return the result of the replace operation
*/
public String replace(final StringBuilder source) {
return replace(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source builder as a template.
* The builder is not altered by this method.
*
* @param event The LogEvent.
* @param source the builder to use as a template, not changed, null returns null.
* @return the result of the replace operation.
*/
public String replace(final LogEvent event, final StringBuilder source) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(source.length()).append(source);
try {
substitute(event, buf, 0, buf.length());
} catch (Throwable t) {
return handleFailedReplacement(source.toString(), t);
}
return buf.toString();
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source builder as a template.
* The builder is not altered by this method.
* <p>
* Only the specified portion of the builder will be processed.
* The rest of the builder is not processed, and is not returned.
* </p>
*
* @param source the builder to use as a template, not changed, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final StringBuilder source, final int offset, final int length) {
return replace(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables with their matching values
* from the resolver using the given source builder as a template.
* The builder is not altered by this method.
* <p>
* Only the specified portion of the builder will be processed.
* The rest of the builder is not processed, and is not returned.
* </p>
*
* @param event the current LogEvent, if one exists.
* @param source the builder to use as a template, not changed, null returns null
* @param offset the start offset within the array, must be valid
* @param length the length within the array to be processed, must be valid
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final StringBuilder source, final int offset, final int length) {
if (source == null) {
return null;
}
final StringBuilder buf = new StringBuilder(length).append(source, offset, length);
try {
substitute(event, buf, 0, length);
} catch (Throwable t) {
return handleFailedReplacement(source.substring(offset, offset + length), t);
}
return buf.toString();
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables in the given source object with
* their matching values from the resolver. The input source object is
* converted to a string using <code>toString</code> and is not altered.
*
* @param source the source to replace in, null returns null
* @return the result of the replace operation
*/
public String replace(final Object source) {
return replace(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables in the given source object with
* their matching values from the resolver. The input source object is
* converted to a string using <code>toString</code> and is not altered.
*
* @param event the current LogEvent, if one exists.
* @param source the source to replace in, null returns null
* @return the result of the replace operation
*/
public String replace(final LogEvent event, final Object source) {
if (source == null) {
return null;
}
final String stringValue = String.valueOf(source);
final StringBuilder buf = new StringBuilder(stringValue.length()).append(stringValue);
try {
substitute(event, buf, 0, buf.length());
} catch (Throwable t) {
return handleFailedReplacement(stringValue, t);
}
return buf.toString();
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables within the given source buffer
* with their matching values from the resolver.
* The buffer is updated with the result.
*
* @param source the buffer to replace in, updated, null returns false
* @return true if altered
*/
public boolean replaceIn(final StringBuffer source) {
if (source == null) {
return false;
}
return replaceIn(source, 0, source.length());
}
/**
* Replaces all the occurrences of variables within the given source buffer
* with their matching values from the resolver.
* The buffer is updated with the result.
* <p>
* Only the specified portion of the buffer will be processed.
* The rest of the buffer is not processed, but it is not deleted.
* </p>
*
* @param source the buffer to replace in, updated, null returns false
* @param offset the start offset within the array, must be valid
* @param length the length within the buffer to be processed, must be valid
* @return true if altered
*/
public boolean replaceIn(final StringBuffer source, final int offset, final int length) {
return replaceIn(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables within the given source buffer
* with their matching values from the resolver.
* The buffer is updated with the result.
* <p>
* Only the specified portion of the buffer will be processed.
* The rest of the buffer is not processed, but it is not deleted.
* </p>
*
* @param event the current LogEvent, if one exists.
* @param source the buffer to replace in, updated, null returns false
* @param offset the start offset within the array, must be valid
* @param length the length within the buffer to be processed, must be valid
* @return true if altered
*/
public boolean replaceIn(final LogEvent event, final StringBuffer source, final int offset, final int length) {
if (source == null) {
return false;
}
final StringBuilder buf = new StringBuilder(length).append(source, offset, length);
try {
if (!substitute(event, buf, 0, length)) {
return false;
}
} catch (Throwable t) {
StatusLogger.getLogger().error("Replacement failed on {}", source, t);
return false;
}
source.replace(offset, offset + length, buf.toString());
return true;
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables within the given source
* builder with their matching values from the resolver.
*
* @param source the builder to replace in, updated, null returns false
* @return true if altered
*/
public boolean replaceIn(final StringBuilder source) {
return replaceIn(null, source);
}
// -----------------------------------------------------------------------
/**
* Replaces all the occurrences of variables within the given source
* builder with their matching values from the resolver.
*
* @param event the current LogEvent, if one exists.
* @param source the builder to replace in, updated, null returns false
* @return true if altered
*/
public boolean replaceIn(final LogEvent event, final StringBuilder source) {
if (source == null) {
return false;
}
return substitute(event, source, 0, source.length());
}
/**
* Replaces all the occurrences of variables within the given source
* builder with their matching values from the resolver.
* <p>
* Only the specified portion of the builder will be processed.
* The rest of the builder is not processed, but it is not deleted.
* </p>
*
* @param source the builder to replace in, null returns false
* @param offset the start offset within the array, must be valid
* @param length the length within the builder to be processed, must be valid
* @return true if altered
*/
public boolean replaceIn(final StringBuilder source, final int offset, final int length) {
return replaceIn(null, source, offset, length);
}
/**
* Replaces all the occurrences of variables within the given source
* builder with their matching values from the resolver.
* <p>
* Only the specified portion of the builder will be processed.
* The rest of the builder is not processed, but it is not deleted.
* </p>
*
* @param event the current LogEvent, if one is present.
* @param source the builder to replace in, null returns false
* @param offset the start offset within the array, must be valid
* @param length the length within the builder to be processed, must be valid
* @return true if altered
*/
public boolean replaceIn(final LogEvent event, final StringBuilder source, final int offset, final int length) {
if (source == null) {
return false;
}
return substitute(event, source, offset, length);
}
// -----------------------------------------------------------------------
/**
* Internal method that substitutes the variables.
* <p>
* Most users of this | StrSubstitutor |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/CompatibilityFixStrategyTest.java | {
"start": 12271,
"end": 12918
} | class ____ {
@Test
@DisplayName("should provide meaningful description")
void shouldProvideMeaningfulDescription() {
String description = strategy.getDescription();
assertNotNull(description, "Description should not be null");
assertFalse(description.trim().isEmpty(), "Description should not be empty");
assertTrue(
description.toLowerCase().contains("compatibility")
|| description.toLowerCase().contains("fix"),
"Description should mention compatibility or fix");
}
}
}
| StrategyDescriptionTests |
java | apache__flink | flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sinks/LegacyCsvDynamicTableSinkOptions.java | {
"start": 1431,
"end": 2449
} | class ____ {
public static final String IDENTIFIER = "legacy-csv";
public static final ConfigOption<String> PATH =
key("path").stringType().noDefaultValue().withDescription("The path of a file");
public static final ConfigOption<String> FIELD_DELIM =
key("field-delimiter")
.stringType()
.defaultValue(",")
.withDescription("The delimiter between each field in a single row");
public static final ConfigOption<Integer> NUM_FILES =
key("num-files")
.intType()
.defaultValue(-1)
.withDescription("The number of files to be created");
public static final ConfigOption<FileSystem.WriteMode> WRITE_MODE =
key("write-mode")
.enumType(FileSystem.WriteMode.class)
.noDefaultValue()
.withDescription("The write mode when writing to the file");
}
| LegacyCsvDynamicTableSinkOptions |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/util/JacksonFeatureSet.java | {
"start": 357,
"end": 3601
} | class ____<F extends JacksonFeature>
implements java.io.Serializable // since 2.16
{
private static final long serialVersionUID = 1L;
protected int _enabled;
/**
* Constructor for creating instance with specific bitmask, wherein
* {@code 1} bit means matching {@link JacksonFeature} is enabled and
* {@code 0} disabled.
*
* @param bitmask Bitmask for features that are enabled
*/
protected JacksonFeatureSet(int bitmask) {
_enabled = bitmask;
}
/**
* "Default" factory which will calculate settings based on default-enabled
* status of all features.
*
* @param <F> Self-reference type for convenience
*
* @param allFeatures Set of all features (enabled or disabled): usually from
* {@code Enum.values()}
*
* @return Feature set instance constructed
*/
public static <F extends JacksonFeature> JacksonFeatureSet<F> fromDefaults(F[] allFeatures) {
// first sanity check
if (allFeatures.length > 31) {
final String desc = allFeatures[0].getClass().getName();
throw new IllegalArgumentException(String.format(
"Cannot use type `%s` with JacksonFeatureSet: too many entries (%d > 31)",
desc, allFeatures.length));
}
int flags = 0;
for (F f : allFeatures) {
if (f.enabledByDefault()) {
flags |= f.getMask();
}
}
return new JacksonFeatureSet<>(flags);
}
public static <F extends JacksonFeature> JacksonFeatureSet<F> fromBitmask(int bitmask) {
return new JacksonFeatureSet<>(bitmask);
}
/**
* Mutant factory for getting a set in which specified feature is enabled:
* will either return this instance (if no change), or newly created set (if there
* is change).
*
* @param feature Feature to enable in set returned
*
* @return Newly created set of state of feature changed; {@code this} if not
*/
public JacksonFeatureSet<F> with(F feature) {
int newMask = _enabled | feature.getMask();
return (newMask == _enabled) ? this : new JacksonFeatureSet<>(newMask);
}
/**
* Mutant factory for getting a set in which specified feature is disabled:
* will either return this instance (if no change), or newly created set (if there
* is change).
*
* @param feature Feature to disable in set returned
*
* @return Newly created set of state of feature changed; {@code this} if not
*/
public JacksonFeatureSet<F> without(F feature) {
int newMask = _enabled & ~feature.getMask();
return (newMask == _enabled) ? this : new JacksonFeatureSet<>(newMask);
}
/**
* Main accessor for checking whether given feature is enabled in this feature set.
*
* @param feature Feature to check
*
* @return True if feature is enabled in this set; false otherwise
*/
public boolean isEnabled(F feature) {
return (feature.getMask() & _enabled) != 0;
}
/**
* Accessor for underlying bitmask
*
* @return Bitmask of enabled features
*/
public int asBitmask() {
return _enabled;
}
}
| JacksonFeatureSet |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobTaskEvent.java | {
"start": 975,
"end": 1371
} | class ____ extends JobEvent {
private TaskId taskID;
private TaskState taskState;
public JobTaskEvent(TaskId taskID, TaskState taskState) {
super(taskID.getJobId(), JobEventType.JOB_TASK_COMPLETED);
this.taskID = taskID;
this.taskState = taskState;
}
public TaskId getTaskID() {
return taskID;
}
public TaskState getState() {
return taskState;
}
}
| JobTaskEvent |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ChangeContainerNameInDeploymentTriggerDecorator.java | {
"start": 344,
"end": 1963
} | class ____ extends NamedResourceDecorator<DeploymentConfigSpecFluent<?>> {
private final String containerName;
public ChangeContainerNameInDeploymentTriggerDecorator(String containerName) {
this.containerName = containerName;
}
@Override
public void andThenVisit(DeploymentConfigSpecFluent<?> deploymentConfigSpecFluent, ObjectMeta objectMeta) {
if (deploymentConfigSpecFluent.hasTriggers()) {
deploymentConfigSpecFluent
.editFirstTrigger()
.editImageChangeParams()
.withContainerNames(containerName)
.endImageChangeParams()
.endTrigger()
.buildTriggers();
}
}
@Override
public Class<? extends Decorator>[] after() {
return new Class[] { ApplyDeploymentTriggerDecorator.class, AddEnvVarDecorator.class, AddPortDecorator.class,
AddMountDecorator.class, AddPvcVolumeDecorator.class, AddAwsElasticBlockStoreVolumeDecorator.class,
AddAzureDiskVolumeDecorator.class, AddAwsElasticBlockStoreVolumeDecorator.class, ApplyImageDecorator.class,
ApplyImagePullPolicyDecorator.class, ApplyWorkingDirDecorator.class, ApplyCommandDecorator.class,
ApplyArgsDecorator.class, ApplyServiceAccountNamedDecorator.class, AddReadinessProbeDecorator.class,
AddLivenessProbeDecorator.class, ApplyApplicationContainerDecorator.class, AddSidecarDecorator.class,
AddInitContainerDecorator.class };
}
}
| ChangeContainerNameInDeploymentTriggerDecorator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/join/AttributeJoinWithSingleTableInheritanceTest.java | {
"start": 1745,
"end": 9309
} | class ____ {
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testLeftJoin(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final ChildEntityA childEntityA = new SubChildEntityA1( 11 );
s.persist( childEntityA );
final ChildEntityB childEntityB = new ChildEntityB( 21 );
s.persist( childEntityB );
s.persist( new RootOne( 1, childEntityA ) );
s.persist( new RootOne( 2, null ) );
} );
scope.inTransaction( s -> {
// simulate association with ChildEntityB
s.createNativeMutationQuery( "update root_one set child_id = 21 where id = 2" ).executeUpdate();
} );
scope.inTransaction( s -> {
final List<Tuple> resultList = s.createQuery(
"select r, ce " +
"from RootOne r left join r.child ce " +
"order by r.id",
Tuple.class
).getResultList();
assertEquals( 2, resultList.size() );
assertResult( resultList.get( 0 ), 1, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 1 ), 2, 21, null, null, null );
} );
}
@Test
public void testLeftJoinExplicitTreat(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final ChildEntityA childEntityA = new SubChildEntityA1( 11 );
s.persist( childEntityA );
final ChildEntityB childEntityB = new ChildEntityB( 21 );
s.persist( childEntityB );
s.persist( new RootOne( 1, childEntityA ) );
s.persist( new RootOne( 2, null ) );
} );
scope.inTransaction( s -> {
// simulate association with ChildEntityB
s.createNativeMutationQuery( "update root_one set child_id = 21 where id = 2" ).executeUpdate();
} );
scope.inTransaction( s -> {
final List<Tuple> resultList = s.createQuery(
"select r, ce " +
"from RootOne r left join treat(r.child as ChildEntityA) ce " +
"order by r.id",
Tuple.class
).getResultList();
assertEquals( 2, resultList.size() );
assertResult( resultList.get( 0 ), 1, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 1 ), 2, 21, null, null, null );
} );
}
@Test
@Jira("https://hibernate.atlassian.net/browse/HHH-19883")
public void testTreatedJoinWithCondition(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final ChildEntityA childEntityA1 = new SubChildEntityA1( 11 );
childEntityA1.setName( "childA1" );
s.persist( childEntityA1 );
final ChildEntityA childEntityA2 = new SubChildEntityA2( 21 );
childEntityA2.setName( "childA2" );
s.persist( childEntityA2 );
s.persist( new RootOne( 1, childEntityA1 ) );
s.persist( new RootOne( 2, childEntityA2 ) );
} );
scope.inTransaction( s -> {
final Tuple tuple = s.createQuery(
"select r, ce " +
"from RootOne r join treat(r.child as ChildEntityA) ce on ce.name = 'childA1'",
Tuple.class
).getSingleResult();
assertResult( tuple, 1, 11, 11, "child_a_1", SubChildEntityA1.class );
} );
}
@Test
public void testRightJoin(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final SubChildEntityA1 subChildEntityA1 = new SubChildEntityA1( 11 );
s.persist( subChildEntityA1 );
final SubChildEntityA2 subChildEntityA2 = new SubChildEntityA2( 12 );
s.persist( subChildEntityA2 );
s.persist( new ChildEntityB( 21 ) );
s.persist( new RootOne( 1, subChildEntityA1 ) );
s.persist( new RootOne( 2, subChildEntityA1 ) );
s.persist( new RootOne( 3, null ) );
} );
scope.inTransaction( s -> {
// simulate association with ChildEntityB
s.createNativeMutationQuery( "update root_one set child_id = 21 where id = 3" ).executeUpdate();
} );
scope.inTransaction( s -> {
final List<Tuple> resultList = s.createQuery(
"select r, ce " +
"from RootOne r right join r.child ce " +
"order by r.id nulls last, ce.id",
Tuple.class
).getResultList();
assertEquals( 3, resultList.size() );
assertResult( resultList.get( 0 ), 1, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 1 ), 2, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 2 ), null, null, 12, "child_a_2", SubChildEntityA2.class );
} );
}
@Test
public void testCrossJoin(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final SubChildEntityA1 subChildEntityA1 = new SubChildEntityA1( 11 );
s.persist( subChildEntityA1 );
s.persist( new ChildEntityB( 21 ) );
s.persist( new RootOne( 1, subChildEntityA1 ) );
s.persist( new RootOne( 2, null ) );
} );
scope.inTransaction( s -> {
// simulate association with ChildEntityB
s.createNativeMutationQuery( "update root_one set child_id = 21 where id = 2" ).executeUpdate();
} );
scope.inTransaction( s -> {
final List<Tuple> resultList = s.createQuery(
"select r, ce " +
"from RootOne r cross join ChildEntityA ce " +
"order by r.id nulls last, ce.id",
Tuple.class
).getResultList();
assertEquals( 2, resultList.size() );
assertResult( resultList.get( 0 ), 1, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 1 ), 2, 21, 11, "child_a_1", SubChildEntityA1.class );
} );
}
@Test
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportsFullJoin.class )
public void testFullJoin(SessionFactoryScope scope) {
scope.inTransaction( s -> {
final SubChildEntityA1 subChildEntityA1 = new SubChildEntityA1( 11 );
s.persist( subChildEntityA1 );
final SubChildEntityA2 subChildEntityA2 = new SubChildEntityA2( 12 );
s.persist( subChildEntityA2 );
s.persist( new ChildEntityB( 21 ) );
s.persist( new RootOne( 1, subChildEntityA1 ) );
s.persist( new RootOne( 2, subChildEntityA1 ) );
s.persist( new RootOne( 3, null ) );
s.persist( new RootOne( 4, null ) );
} );
scope.inTransaction( s -> {
// simulate association with ChildEntityB
s.createNativeMutationQuery( "update root_one set child_id = 21 where id = 3" ).executeUpdate();
} );
scope.inTransaction( s -> {
final List<Tuple> resultList = s.createQuery(
"select r, ce " +
"from RootOne r full join ChildEntityA ce on ce.id = r.childId " +
"order by r.id nulls last, ce.id",
Tuple.class
).getResultList();
assertEquals( 5, resultList.size() );
assertResult( resultList.get( 0 ), 1, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 1 ), 2, 11, 11, "child_a_1", SubChildEntityA1.class );
assertResult( resultList.get( 2 ), 3, 21, null, null, null );
assertResult( resultList.get( 3 ), 4, null, null, null, null );
assertResult( resultList.get( 4 ), null, null, 12, "child_a_2", SubChildEntityA2.class );
} );
}
private <T extends ChildEntityA> void assertResult(
Tuple result,
Integer rootId,
Integer rootChildId,
Integer childId,
String discValue,
Class<T> subClass) {
if ( rootId != null ) {
final RootOne root = result.get( 0, RootOne.class );
assertEquals( rootId, root.getId() );
assertEquals( rootChildId, root.getChildId() );
}
else {
assertNull( result.get( 0 ) );
}
if ( subClass != null ) {
assertInstanceOf( subClass, result.get( 1 ) );
final ChildEntityA sub1 = result.get( 1, subClass );
assertEquals( childId, sub1.getId() );
assertEquals( discValue, sub1.getDiscCol() );
}
else {
assertNull( result.get( 1 ) );
}
}
@Entity( name = "BaseClass" )
@Inheritance( strategy = InheritanceType.SINGLE_TABLE )
@DiscriminatorColumn( name = "disc_col" )
public static | AttributeJoinWithSingleTableInheritanceTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/component/cascading/toone/Address.java | {
"start": 215,
"end": 1051
} | class ____ {
private Long id;
private String street1;
private String street2;
private String city;
private String state;
private String zipCode;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getStreet1() {
return street1;
}
public void setStreet1(String street1) {
this.street1 = street1;
}
public String getStreet2() {
return street2;
}
public void setStreet2(String street2) {
this.street2 = street2;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getZipCode() {
return zipCode;
}
public void setZipCode(String zipCode) {
this.zipCode = zipCode;
}
}
| Address |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/ArcTestContainer.java | {
"start": 2862,
"end": 15565
} | class ____ {
private final List<Class<?>> resourceReferenceProviders;
private final List<Class<?>> beanClasses;
private final List<Class<?>> additionalClasses;
private final List<Class<? extends Annotation>> resourceAnnotations;
private final List<BeanRegistrar> beanRegistrars;
private final List<ObserverRegistrar> observerRegistrars;
private final List<ContextRegistrar> contextRegistrars;
private final List<QualifierRegistrar> qualifierRegistrars;
private final List<InterceptorBindingRegistrar> interceptorBindingRegistrars;
private final List<StereotypeRegistrar> stereotypeRegistrars;
private final List<AnnotationTransformation> annotationsTransformers;
private final List<InjectionPointsTransformer> injectionsPointsTransformers;
private final List<ObserverTransformer> observerTransformers;
private final List<BeanDeploymentValidator> beanDeploymentValidators;
private boolean shouldFail = false;
private boolean removeUnusedBeans = false;
private final List<Predicate<BeanInfo>> removalExclusions;
private AlternativePriorities alternativePriorities;
private final List<BuildCompatibleExtension> buildCompatibleExtensions;
private boolean strictCompatibility = false;
private boolean optimizeContexts = false;
private final List<Predicate<ClassInfo>> excludeTypes;
private boolean testMode = false;
public Builder() {
resourceReferenceProviders = new ArrayList<>();
beanClasses = new ArrayList<>();
additionalClasses = new ArrayList<>();
resourceAnnotations = new ArrayList<>();
beanRegistrars = new ArrayList<>();
observerRegistrars = new ArrayList<>();
contextRegistrars = new ArrayList<>();
qualifierRegistrars = new ArrayList<>();
interceptorBindingRegistrars = new ArrayList<>();
stereotypeRegistrars = new ArrayList<>();
annotationsTransformers = new ArrayList<>();
injectionsPointsTransformers = new ArrayList<>();
observerTransformers = new ArrayList<>();
beanDeploymentValidators = new ArrayList<>();
removalExclusions = new ArrayList<>();
buildCompatibleExtensions = new ArrayList<>();
excludeTypes = new ArrayList<>();
}
public Builder resourceReferenceProviders(Class<?>... resourceReferenceProviders) {
Collections.addAll(this.resourceReferenceProviders, resourceReferenceProviders);
return this;
}
public Builder beanClasses(Class<?>... beanClasses) {
Collections.addAll(this.beanClasses, beanClasses);
return this;
}
public Builder additionalClasses(Class<?>... additionalClasses) {
Collections.addAll(this.additionalClasses, additionalClasses);
return this;
}
@SafeVarargs
public final Builder resourceAnnotations(Class<? extends Annotation>... resourceAnnotations) {
Collections.addAll(this.resourceAnnotations, resourceAnnotations);
return this;
}
public Builder beanRegistrars(BeanRegistrar... registrars) {
Collections.addAll(this.beanRegistrars, registrars);
return this;
}
public Builder observerRegistrars(ObserverRegistrar... registrars) {
Collections.addAll(this.observerRegistrars, registrars);
return this;
}
public Builder contextRegistrars(ContextRegistrar... registrars) {
Collections.addAll(this.contextRegistrars, registrars);
return this;
}
/**
* @deprecated use {@link #annotationTransformations(AnnotationTransformation...)}
*/
@Deprecated(forRemoval = true)
public Builder annotationsTransformers(AnnotationsTransformer... transformers) {
Collections.addAll(this.annotationsTransformers, transformers);
return this;
}
public Builder annotationTransformations(AnnotationTransformation... transformations) {
Collections.addAll(this.annotationsTransformers, transformations);
return this;
}
public Builder injectionPointsTransformers(InjectionPointsTransformer... transformers) {
Collections.addAll(this.injectionsPointsTransformers, transformers);
return this;
}
public Builder observerTransformers(ObserverTransformer... transformers) {
Collections.addAll(this.observerTransformers, transformers);
return this;
}
public Builder qualifierRegistrars(QualifierRegistrar... registrars) {
Collections.addAll(this.qualifierRegistrars, registrars);
return this;
}
public Builder interceptorBindingRegistrars(InterceptorBindingRegistrar... registrars) {
Collections.addAll(this.interceptorBindingRegistrars, registrars);
return this;
}
public Builder stereotypeRegistrars(StereotypeRegistrar... registrars) {
Collections.addAll(this.stereotypeRegistrars, registrars);
return this;
}
public Builder beanDeploymentValidators(BeanDeploymentValidator... validators) {
Collections.addAll(this.beanDeploymentValidators, validators);
return this;
}
public Builder removeUnusedBeans(boolean value) {
this.removeUnusedBeans = value;
return this;
}
public Builder addRemovalExclusion(Predicate<BeanInfo> exclusion) {
this.removalExclusions.add(exclusion);
return this;
}
public Builder shouldFail() {
this.shouldFail = true;
return this;
}
public Builder alternativePriorities(AlternativePriorities priorities) {
this.alternativePriorities = priorities;
return this;
}
public final Builder buildCompatibleExtensions(BuildCompatibleExtension... extensions) {
Collections.addAll(this.buildCompatibleExtensions, extensions);
return this;
}
public Builder strictCompatibility(boolean strictCompatibility) {
this.strictCompatibility = strictCompatibility;
return this;
}
public Builder testMode(boolean testMode) {
this.testMode = testMode;
return this;
}
public Builder optimizeContexts(boolean value) {
this.optimizeContexts = value;
return this;
}
public Builder excludeType(Predicate<ClassInfo> predicate) {
this.excludeTypes.add(predicate);
return this;
}
public ArcTestContainer build() {
return new ArcTestContainer(this);
}
}
private final List<Class<?>> resourceReferenceProviders;
private final List<Class<?>> beanClasses;
private final List<Class<?>> additionalClasses;
private final List<Predicate<ClassInfo>> excludeTypes;
private final List<Class<? extends Annotation>> resourceAnnotations;
private final List<BeanRegistrar> beanRegistrars;
private final List<ObserverRegistrar> observerRegistrars;
private final List<ContextRegistrar> contextRegistrars;
private final List<QualifierRegistrar> qualifierRegistrars;
private final List<InterceptorBindingRegistrar> interceptorBindingRegistrars;
private final List<StereotypeRegistrar> stereotypeRegistrars;
private final List<AnnotationTransformation> annotationsTransformers;
private final List<InjectionPointsTransformer> injectionPointsTransformers;
private final List<ObserverTransformer> observerTransformers;
private final List<BeanDeploymentValidator> beanDeploymentValidators;
private final boolean shouldFail;
private final AtomicReference<Throwable> buildFailure;
private final boolean removeUnusedBeans;
private final List<Predicate<BeanInfo>> removalExclusions;
private final AlternativePriorities alternativePriorities;
private final List<BuildCompatibleExtension> buildCompatibleExtensions;
private final boolean strictCompatibility;
private final boolean optimizeContexts;
private final boolean testMode;
public ArcTestContainer(Class<?>... beanClasses) {
this.resourceReferenceProviders = Collections.emptyList();
this.beanClasses = Arrays.asList(beanClasses);
this.additionalClasses = Collections.emptyList();
this.resourceAnnotations = Collections.emptyList();
this.beanRegistrars = Collections.emptyList();
this.observerRegistrars = Collections.emptyList();
this.contextRegistrars = Collections.emptyList();
this.interceptorBindingRegistrars = Collections.emptyList();
this.stereotypeRegistrars = Collections.emptyList();
this.qualifierRegistrars = Collections.emptyList();
this.annotationsTransformers = Collections.emptyList();
this.injectionPointsTransformers = Collections.emptyList();
this.observerTransformers = Collections.emptyList();
this.beanDeploymentValidators = Collections.emptyList();
this.buildFailure = new AtomicReference<Throwable>(null);
this.shouldFail = false;
this.removeUnusedBeans = false;
this.removalExclusions = Collections.emptyList();
this.alternativePriorities = null;
this.buildCompatibleExtensions = Collections.emptyList();
this.strictCompatibility = false;
this.optimizeContexts = false;
this.excludeTypes = Collections.emptyList();
this.testMode = false;
}
public ArcTestContainer(Builder builder) {
this.resourceReferenceProviders = builder.resourceReferenceProviders;
this.beanClasses = builder.beanClasses;
this.additionalClasses = builder.additionalClasses;
this.resourceAnnotations = builder.resourceAnnotations;
this.beanRegistrars = builder.beanRegistrars;
this.observerRegistrars = builder.observerRegistrars;
this.contextRegistrars = builder.contextRegistrars;
this.qualifierRegistrars = builder.qualifierRegistrars;
this.interceptorBindingRegistrars = builder.interceptorBindingRegistrars;
this.stereotypeRegistrars = builder.stereotypeRegistrars;
this.annotationsTransformers = builder.annotationsTransformers;
this.injectionPointsTransformers = builder.injectionsPointsTransformers;
this.observerTransformers = builder.observerTransformers;
this.beanDeploymentValidators = builder.beanDeploymentValidators;
this.buildFailure = new AtomicReference<Throwable>(null);
this.shouldFail = builder.shouldFail;
this.removeUnusedBeans = builder.removeUnusedBeans;
this.removalExclusions = builder.removalExclusions;
this.alternativePriorities = builder.alternativePriorities;
this.buildCompatibleExtensions = builder.buildCompatibleExtensions;
this.strictCompatibility = builder.strictCompatibility;
this.optimizeContexts = builder.optimizeContexts;
this.excludeTypes = builder.excludeTypes;
this.testMode = builder.testMode;
}
// this is where we start Arc, we operate on a per-method basis
@Override
public void beforeEach(ExtensionContext extensionContext) throws Exception {
getRootExtensionStore(extensionContext).put(KEY_OLD_TCCL, init(extensionContext));
}
// this is where we shutdown Arc
@Override
public void afterEach(ExtensionContext extensionContext) throws Exception {
ClassLoader oldTccl = getRootExtensionStore(extensionContext).get(KEY_OLD_TCCL, ClassLoader.class);
Thread.currentThread().setContextClassLoader(oldTccl);
shutdown();
}
private static synchronized ExtensionContext.Store getRootExtensionStore(ExtensionContext context) {
if (EXTENSION_NAMESPACE == null) {
EXTENSION_NAMESPACE = ExtensionContext.Namespace.create(ArcTestContainer.class);
}
return context.getRoot().getStore(EXTENSION_NAMESPACE);
}
/**
* In case the test is expected to fail, this method will return a {@link Throwable} that caused it.
*/
public Throwable getFailure() {
return buildFailure.get();
}
private void shutdown() {
Arc.shutdown();
}
private ClassLoader init(ExtensionContext context) {
// retrieve test | Builder |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/transport/SocketAddressProvider.java | {
"start": 937,
"end": 1058
} | interface ____ {
SocketAddress getRemoteSocketAddress();
SocketAddress getLocalSocketAddress();
}
| SocketAddressProvider |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/ExecutableMethodsDefinitionWriter.java | {
"start": 2457,
"end": 5543
} | class ____ implements ClassOutputWriter {
public static final String CLASS_SUFFIX = "$Exec";
public static final Method GET_EXECUTABLE_AT_INDEX_METHOD = ReflectionUtils.getRequiredInternalMethod(AbstractExecutableMethodsDefinition.class, "getExecutableMethodByIndex", int.class);
private static final Constructor<?> METHOD_REFERENCE_CONSTRUCTOR = ReflectionUtils.getRequiredInternalConstructor(
AbstractExecutableMethodsDefinition.MethodReference.class,
Class.class,
AnnotationMetadata.class,
String.class,
Argument.class,
Argument[].class,
boolean.class,
boolean.class);
private static final Constructor<?> SUPER_CONSTRUCTOR = ReflectionUtils.getRequiredInternalConstructor(
AbstractExecutableMethodsDefinition.class,
AbstractExecutableMethodsDefinition.MethodReference[].class);
private static final Method GET_METHOD = ReflectionUtils.getRequiredInternalMethod(AbstractExecutableMethodsDefinition.class, "getMethod", String.class, Class[].class);
private static final Method AT_INDEX_MATCHED_METHOD = ReflectionUtils.getRequiredInternalMethod(AbstractExecutableMethodsDefinition.class, "methodAtIndexMatches", int.class, String.class, Class[].class);
private static final String FIELD_INTERCEPTABLE = "$interceptable";
private static final int MIN_METHODS_TO_GENERATE_GET_METHOD = 5;
private final String className;
private final ClassTypeDef thisType;
private final String beanDefinitionReferenceClassName;
private final List<String> addedMethods = new ArrayList<>();
private final DispatchWriter methodDispatchWriter;
private final Set<String> methodNames = new HashSet<>();
private final AnnotationMetadata annotationMetadataWithDefaults;
private final EvaluatedExpressionProcessor evaluatedExpressionProcessor;
private final OriginatingElements originatingElements;
private final VisitorContext visitorContext;
private byte[] output;
public ExecutableMethodsDefinitionWriter(EvaluatedExpressionProcessor evaluatedExpressionProcessor,
AnnotationMetadata annotationMetadataWithDefaults,
String beanDefinitionClassName,
String beanDefinitionReferenceClassName,
OriginatingElements originatingElements, VisitorContext visitorContext) {
this.originatingElements = originatingElements;
this.annotationMetadataWithDefaults = annotationMetadataWithDefaults;
this.evaluatedExpressionProcessor = evaluatedExpressionProcessor;
this.className = beanDefinitionClassName + CLASS_SUFFIX;
this.visitorContext = visitorContext;
this.thisType = ClassTypeDef.of(className);
this.beanDefinitionReferenceClassName = beanDefinitionReferenceClassName;
this.methodDispatchWriter = new DispatchWriter(className);
}
/**
* @return The generated | ExecutableMethodsDefinitionWriter |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/request/completion/HuggingFaceUnifiedChatCompletionRequestEntity.java | {
"start": 968,
"end": 2075
} | class ____ implements ToXContentObject {
private final String modelId;
private final UnifiedChatCompletionRequestEntity unifiedRequestEntity;
/**
* Constructs a HuggingFaceUnifiedChatCompletionRequestEntity with the specified unified chat input and model ID.
*
* @param unifiedChatInput the unified chat input containing messages and parameters for the completion request
* @param modelId the Hugging Face chat completion model ID to be used for the request
*/
public HuggingFaceUnifiedChatCompletionRequestEntity(UnifiedChatInput unifiedChatInput, @Nullable String modelId) {
this.unifiedRequestEntity = new UnifiedChatCompletionRequestEntity(unifiedChatInput);
this.modelId = modelId;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
unifiedRequestEntity.toXContent(builder, UnifiedCompletionRequest.withMaxTokens(modelId, params));
builder.endObject();
return builder;
}
}
| HuggingFaceUnifiedChatCompletionRequestEntity |
java | apache__kafka | server/src/main/java/org/apache/kafka/server/ServerSocketFactory.java | {
"start": 1427,
"end": 2532
} | class ____ implements ServerSocketFactory {
@Override
public ServerSocketChannel openServerSocket(
String listenerName,
InetSocketAddress socketAddress,
int listenBacklogSize,
int recvBufferSize
) throws IOException {
ServerSocketChannel socketChannel = ServerSocketChannel.open();
try {
socketChannel.configureBlocking(false);
if (recvBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) {
socketChannel.socket().setReceiveBufferSize(recvBufferSize);
}
socketChannel.socket().bind(socketAddress, listenBacklogSize);
} catch (SocketException e) {
Utils.closeQuietly(socketChannel, "server socket");
throw new KafkaException(String.format("Socket server failed to bind to %s:%d: %s.",
socketAddress.getHostString(), socketAddress.getPort(), e.getMessage()), e);
}
return socketChannel;
}
}
}
| KafkaServerSocketFactory |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/BeanCurrentlyInCreationFailureAnalyzerTests.java | {
"start": 9931,
"end": 10099
} | class ____ {
@Bean
BeanThree three(BeanOne one) {
return new BeanThree();
}
}
}
@Configuration(proxyBeanMethods = false)
static | BeanThreeConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/uniqueconstraint/UniqueConstraintNameTest.java | {
"start": 1377,
"end": 1749
} | class ____ {
@Id
@GeneratedValue
long id;
@OneToOne(fetch = FetchType.LAZY, optional = false)
@JoinColumn(name = "my_other_entity_id",
updatable = false,
foreignKey = @ForeignKey(name = "FK_moe"))
private MyOtherEntity myOtherEntity;
@Column(name = "some_long")
private long someLong;
}
@Entity
@Table(name = "my_other_entity")
static | MyEntity |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/AutoCloseTests.java | {
"start": 17289,
"end": 17985
} | class ____ {
@AutoClose
private static AutoCloseable staticClosable;
@AutoClose
private static final AutoCloseable nullStatic = null;
@AutoClose
private final AutoCloseable closable = new AutoCloseSpy("closable");
@AutoClose(" run ") // intentionally contains extra whitespace.
private final Runnable runnable = new AutoCloseSpy("runnable");
@AutoClose
private final AutoCloseable nullField = null;
@BeforeAll
static void setup() {
staticClosable = new AutoCloseSpy("staticClosable");
}
@Test
void test1() {
}
@Test
void test2() {
}
}
@TestInstance(PER_CLASS)
@SuppressWarnings("JUnitMalformedDeclaration")
static | InstancePerMethodTestCase |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/MemoryMappedFileAppenderTest.java | {
"start": 2022,
"end": 5536
} | class ____ {
@Test
@LoggerContextSource("MemoryMappedFileAppenderTest.xml")
void testMemMapBasics(final LoggerContext context) throws Exception {
final Logger log = context.getLogger(getClass());
final Path logFile = Paths.get("target", "MemoryMappedFileAppenderTest.log");
try {
log.warn("Test log1");
assertTrue(Files.exists(logFile));
assertEquals(MemoryMappedFileManager.DEFAULT_REGION_LENGTH, Files.size(logFile));
log.warn("Test log2");
assertEquals(MemoryMappedFileManager.DEFAULT_REGION_LENGTH, Files.size(logFile));
} finally {
context.stop();
}
final int LINESEP = System.lineSeparator().length();
assertEquals(18 + 2 * LINESEP, Files.size(logFile));
final List<String> lines = Files.readAllLines(logFile);
assertThat(lines, both(hasSize(2)).and(contains("Test log1", "Test log2")));
}
@Test
@LoggerContextSource("MemoryMappedFileAppenderRemapTest.xml")
void testMemMapExtendsIfNeeded(final LoggerContext context) throws Exception {
final Logger log = context.getLogger(getClass());
final Path logFile = Paths.get("target", "MemoryMappedFileAppenderRemapTest.log");
final char[] text = new char[256];
Arrays.fill(text, 'A');
final String str = new String(text);
try {
log.warn("Test log1");
assertTrue(Files.exists(logFile));
assertEquals(256, Files.size(logFile));
log.warn(str);
assertEquals(2 * 256, Files.size(logFile));
log.warn(str);
assertEquals(3 * 256, Files.size(logFile));
} finally {
context.stop();
}
assertEquals(521 + 3 * System.lineSeparator().length(), Files.size(logFile), "Expected file size to shrink");
final List<String> lines = Files.readAllLines(logFile);
assertThat(lines, both(hasSize(3)).and(contains("Test log1", str, str)));
}
@Test
@LoggerContextSource("MemoryMappedFileAppenderLocationTest.xml")
void testMemMapLocation(final LoggerContext context) throws Exception {
final Logger log = context.getLogger(getClass());
final Path logFile = Paths.get("target", "MemoryMappedFileAppenderLocationTest.log");
final int expectedFileLength = Integers.ceilingNextPowerOfTwo(32000);
assertEquals(32768, expectedFileLength);
try {
log.warn("Test log1");
assertTrue(Files.exists(logFile));
assertEquals(expectedFileLength, Files.size(logFile));
log.warn("Test log2");
assertEquals(expectedFileLength, Files.size(logFile));
} finally {
context.stop();
}
assertEquals(272 + 2 * System.lineSeparator().length(), Files.size(logFile), "Expected file size to shrink");
final List<String> lines = Files.readAllLines(logFile);
assertThat(
lines,
both(hasSize(2))
.and(
contains(
"org.apache.logging.log4j.core.appender.MemoryMappedFileAppenderTest.testMemMapLocation(MemoryMappedFileAppenderTest.java:105): Test log1",
"org.apache.logging.log4j.core.appender.MemoryMappedFileAppenderTest.testMemMapLocation(MemoryMappedFileAppenderTest.java:108): Test log2")));
}
}
| MemoryMappedFileAppenderTest |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/helpers/DateTimeDateFormat.java | {
"start": 1158,
"end": 2525
} | class ____ extends AbsoluteTimeDateFormat {
private static final long serialVersionUID = 5547637772208514971L;
String[] shortMonths;
public DateTimeDateFormat() {
super();
shortMonths = new DateFormatSymbols().getShortMonths();
}
public DateTimeDateFormat(final TimeZone timeZone) {
this();
setCalendar(Calendar.getInstance(timeZone));
}
/**
* Appends to <code>sbuf</code> the date in the format "dd MMM yyyy HH:mm:ss,SSS" for example, "06 Nov 1994
* 08:49:37,459".
*
* @param sbuf the string buffer to write to
*/
@Override
public StringBuffer format(final Date date, final StringBuffer sbuf, final FieldPosition fieldPosition) {
calendar.setTime(date);
final int day = calendar.get(Calendar.DAY_OF_MONTH);
if (day < 10) {
sbuf.append('0');
}
sbuf.append(day);
sbuf.append(' ');
sbuf.append(shortMonths[calendar.get(Calendar.MONTH)]);
sbuf.append(' ');
final int year = calendar.get(Calendar.YEAR);
sbuf.append(year);
sbuf.append(' ');
return super.format(date, sbuf, fieldPosition);
}
/**
* Always returns null.
*/
@Override
public Date parse(final String s, final ParsePosition pos) {
return null;
}
}
| DateTimeDateFormat |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/misuse/DetectingMisusedMatchersTest.java | {
"start": 934,
"end": 3146
} | class ____ {
final Object finalMethod(Object object) {
return null;
}
}
@Mock private WithFinal withFinal;
@After
public void resetState() {
super.resetState();
}
private void misplaced_any_argument_matcher() {
Object ignored = any();
}
private void misplaced_anyInt_argument_matcher() {
int ignored = anyInt();
}
private void misplaced_anyBoolean_argument_matcher() {
boolean ignored = anyBoolean();
}
@Test
public void should_fail_fast_when_argument_matchers_are_abused() {
misplaced_any_argument_matcher();
try {
mock(IMethods.class);
fail();
} catch (InvalidUseOfMatchersException e) {
assertThat(e).hasMessageContaining("Misplaced or misused argument matcher");
}
}
@Test
public void should_report_argument_locations_when_argument_matchers_misused() {
try {
Observer observer = mock(Observer.class);
misplaced_anyInt_argument_matcher();
misplaced_any_argument_matcher();
misplaced_anyBoolean_argument_matcher();
observer.update(null, null);
validateMockitoUsage();
fail();
} catch (InvalidUseOfMatchersException e) {
assertThat(e)
.hasMessageContaining(
"DetectingMisusedMatchersTest.misplaced_anyInt_argument_matcher")
.hasMessageContaining(
"DetectingMisusedMatchersTest.misplaced_any_argument_matcher")
.hasMessageContaining(
"DetectingMisusedMatchersTest.misplaced_anyBoolean_argument_matcher");
}
}
@SuppressWarnings({"MockitoUsage", "CheckReturnValue"})
@Test
public void shouldSayUnfinishedVerificationButNotInvalidUseOfMatchers() {
assumeTrue("Does not apply for inline mocks", withFinal.getClass() != WithFinal.class);
verify(withFinal).finalMethod(any());
try {
verify(withFinal);
fail();
} catch (UnfinishedVerificationException e) {
}
}
}
| WithFinal |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/QualifierOrScopeOnInjectMethodTest.java | {
"start": 2766,
"end": 2961
} | class ____ {
@Provides
@Named("bar")
int something() {
return 42;
}
}
""")
.doTest();
}
}
| Foo |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/errorhandler/ErrorHandlerRefProperties.java | {
"start": 996,
"end": 1291
} | interface ____ extends ErrorHandlerFactory {
String DEFAULT_ERROR_HANDLER_BUILDER = "CamelDefaultErrorHandlerBuilder";
String getRef();
void setRef(String ref);
boolean isSupportTransacted();
void setSupportTransacted(boolean supportTransacted);
}
| ErrorHandlerRefProperties |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/ClientJwtValidator.java | {
"start": 2807,
"end": 6126
} | class ____ implements JwtValidator {
private static final Logger log = LoggerFactory.getLogger(ClientJwtValidator.class);
public static final String EXPIRATION_CLAIM_NAME = "exp";
public static final String ISSUED_AT_CLAIM_NAME = "iat";
private String scopeClaimName;
private String subClaimName;
@Override
public void configure(Map<String, ?> configs, String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
ConfigurationUtils cu = new ConfigurationUtils(configs, saslMechanism);
this.scopeClaimName = ClaimValidationUtils.validateClaimNameOverride(
DEFAULT_SASL_OAUTHBEARER_SCOPE_CLAIM_NAME,
cu.get(SASL_OAUTHBEARER_SCOPE_CLAIM_NAME)
);
this.subClaimName = ClaimValidationUtils.validateClaimNameOverride(
DEFAULT_SASL_OAUTHBEARER_SUB_CLAIM_NAME,
cu.get(SASL_OAUTHBEARER_SUB_CLAIM_NAME)
);
}
/**
* Accepts an OAuth JWT access token in base-64 encoded format, validates, and returns an
* OAuthBearerToken.
*
* @param accessToken Non-<code>null</code> JWT access token
* @return {@link OAuthBearerToken}
* @throws JwtValidatorException Thrown on errors performing validation of given token
*/
@SuppressWarnings("unchecked")
public OAuthBearerToken validate(String accessToken) throws JwtValidatorException {
SerializedJwt serializedJwt = new SerializedJwt(accessToken);
Map<String, Object> payload;
try {
payload = OAuthBearerUnsecuredJws.toMap(serializedJwt.getPayload());
} catch (OAuthBearerIllegalTokenException e) {
throw new JwtValidatorException(String.format("Could not validate the access token: %s", e.getMessage()), e);
}
Object scopeRaw = getClaim(payload, scopeClaimName);
Collection<String> scopeRawCollection;
if (scopeRaw instanceof String)
scopeRawCollection = Collections.singletonList((String) scopeRaw);
else if (scopeRaw instanceof Collection)
scopeRawCollection = (Collection<String>) scopeRaw;
else
scopeRawCollection = Collections.emptySet();
Number expirationRaw = (Number) getClaim(payload, EXPIRATION_CLAIM_NAME);
String subRaw = (String) getClaim(payload, subClaimName);
Number issuedAtRaw = (Number) getClaim(payload, ISSUED_AT_CLAIM_NAME);
Set<String> scopes = ClaimValidationUtils.validateScopes(scopeClaimName, scopeRawCollection);
long expiration = ClaimValidationUtils.validateExpiration(EXPIRATION_CLAIM_NAME,
expirationRaw != null ? expirationRaw.longValue() * 1000L : null);
String subject = ClaimValidationUtils.validateSubject(subClaimName, subRaw);
Long issuedAt = ClaimValidationUtils.validateIssuedAt(ISSUED_AT_CLAIM_NAME,
issuedAtRaw != null ? issuedAtRaw.longValue() * 1000L : null);
return new BasicOAuthBearerToken(accessToken,
scopes,
expiration,
subject,
issuedAt);
}
private Object getClaim(Map<String, Object> payload, String claimName) {
Object value = payload.get(claimName);
log.debug("getClaim - {}: {}", claimName, value);
return value;
}
}
| ClientJwtValidator |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/cglib/core/KeyFactory.java | {
"start": 2450,
"end": 4818
} | class ____ {
private static final Signature GET_NAME =
TypeUtils.parseSignature("String getName()");
private static final Signature GET_CLASS =
TypeUtils.parseSignature("Class getClass()");
private static final Signature HASH_CODE =
TypeUtils.parseSignature("int hashCode()");
private static final Signature EQUALS =
TypeUtils.parseSignature("boolean equals(Object)");
private static final Signature TO_STRING =
TypeUtils.parseSignature("String toString()");
private static final Signature APPEND_STRING =
TypeUtils.parseSignature("StringBuffer append(String)");
private static final Type KEY_FACTORY =
TypeUtils.parseType("org.springframework.cglib.core.KeyFactory");
private static final Signature GET_SORT =
TypeUtils.parseSignature("int getSort()");
//generated numbers:
private static final int PRIMES[] = {
11, 73, 179, 331,
521, 787, 1213, 1823,
2609, 3691, 5189, 7247,
10037, 13931, 19289, 26627,
36683, 50441, 69403, 95401,
131129, 180179, 247501, 340057,
467063, 641371, 880603, 1209107,
1660097, 2279161, 3129011, 4295723,
5897291, 8095873, 11114263, 15257791,
20946017, 28754629, 39474179, 54189869,
74391461, 102123817, 140194277, 192456917,
264202273, 362693231, 497900099, 683510293,
938313161, 1288102441, 1768288259};
public static final Customizer CLASS_BY_NAME = (e, type) -> {
if (type.equals(Constants.TYPE_CLASS)) {
e.invoke_virtual(Constants.TYPE_CLASS, GET_NAME);
}
};
public static final FieldTypeCustomizer STORE_CLASS_AS_STRING = new FieldTypeCustomizer() {
@Override
public void customize(CodeEmitter e, int index, Type type) {
if (type.equals(Constants.TYPE_CLASS)) {
e.invoke_virtual(Constants.TYPE_CLASS, GET_NAME);
}
}
@Override
public Type getOutType(int index, Type type) {
if (type.equals(Constants.TYPE_CLASS)) {
return Constants.TYPE_STRING;
}
return type;
}
};
/**
* {@link Type#hashCode()} is very expensive as it traverses full descriptor to calculate hash code.
* This customizer uses {@link Type#getSort()} as a hash code.
*/
public static final HashCodeCustomizer HASH_ASM_TYPE = (e, type) -> {
if (Constants.TYPE_TYPE.equals(type)) {
e.invoke_virtual(type, GET_SORT);
return true;
}
return false;
};
/**
* @deprecated this customizer might result in unexpected | KeyFactory |
java | google__gson | gson/src/main/java/com/google/gson/FieldAttributes.java | {
"start": 2487,
"end": 2755
} | class ____ {
* private String bar;
* private List<String> red;
* }
* </pre>
*
* <p>This method would return {@code String.class} for the {@code bar} field and {@code
* List.class} for the {@code red} field.
*
* @return the specific | Foo |
java | apache__camel | components/camel-aws/camel-aws2-mq/src/generated/java/org/apache/camel/component/aws2/mq/MQ2ComponentConfigurer.java | {
"start": 734,
"end": 10692
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private org.apache.camel.component.aws2.mq.MQ2Configuration getOrCreateConfiguration(MQ2Component target) {
if (target.getConfiguration() == null) {
target.setConfiguration(new org.apache.camel.component.aws2.mq.MQ2Configuration());
}
return target.getConfiguration();
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
MQ2Component target = (MQ2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": getOrCreateConfiguration(target).setAccessKey(property(camelContext, java.lang.String.class, value)); return true;
case "amazonmqclient":
case "amazonMqClient": getOrCreateConfiguration(target).setAmazonMqClient(property(camelContext, software.amazon.awssdk.services.mq.MqClient.class, value)); return true;
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.aws2.mq.MQ2Configuration.class, value)); return true;
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": target.setHealthCheckConsumerEnabled(property(camelContext, boolean.class, value)); return true;
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": target.setHealthCheckProducerEnabled(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "operation": getOrCreateConfiguration(target).setOperation(property(camelContext, org.apache.camel.component.aws2.mq.MQ2Operations.class, value)); return true;
case "overrideendpoint":
case "overrideEndpoint": getOrCreateConfiguration(target).setOverrideEndpoint(property(camelContext, boolean.class, value)); return true;
case "pojorequest":
case "pojoRequest": getOrCreateConfiguration(target).setPojoRequest(property(camelContext, boolean.class, value)); return true;
case "profilecredentialsname":
case "profileCredentialsName": getOrCreateConfiguration(target).setProfileCredentialsName(property(camelContext, java.lang.String.class, value)); return true;
case "proxyhost":
case "proxyHost": getOrCreateConfiguration(target).setProxyHost(property(camelContext, java.lang.String.class, value)); return true;
case "proxyport":
case "proxyPort": getOrCreateConfiguration(target).setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true;
case "proxyprotocol":
case "proxyProtocol": getOrCreateConfiguration(target).setProxyProtocol(property(camelContext, software.amazon.awssdk.core.Protocol.class, value)); return true;
case "region": getOrCreateConfiguration(target).setRegion(property(camelContext, java.lang.String.class, value)); return true;
case "secretkey":
case "secretKey": getOrCreateConfiguration(target).setSecretKey(property(camelContext, java.lang.String.class, value)); return true;
case "sessiontoken":
case "sessionToken": getOrCreateConfiguration(target).setSessionToken(property(camelContext, java.lang.String.class, value)); return true;
case "trustallcertificates":
case "trustAllCertificates": getOrCreateConfiguration(target).setTrustAllCertificates(property(camelContext, boolean.class, value)); return true;
case "uriendpointoverride":
case "uriEndpointOverride": getOrCreateConfiguration(target).setUriEndpointOverride(property(camelContext, java.lang.String.class, value)); return true;
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": getOrCreateConfiguration(target).setUseDefaultCredentialsProvider(property(camelContext, boolean.class, value)); return true;
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": getOrCreateConfiguration(target).setUseProfileCredentialsProvider(property(camelContext, boolean.class, value)); return true;
case "usesessioncredentials":
case "useSessionCredentials": getOrCreateConfiguration(target).setUseSessionCredentials(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"amazonMqClient"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": return java.lang.String.class;
case "amazonmqclient":
case "amazonMqClient": return software.amazon.awssdk.services.mq.MqClient.class;
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "configuration": return org.apache.camel.component.aws2.mq.MQ2Configuration.class;
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": return boolean.class;
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "operation": return org.apache.camel.component.aws2.mq.MQ2Operations.class;
case "overrideendpoint":
case "overrideEndpoint": return boolean.class;
case "pojorequest":
case "pojoRequest": return boolean.class;
case "profilecredentialsname":
case "profileCredentialsName": return java.lang.String.class;
case "proxyhost":
case "proxyHost": return java.lang.String.class;
case "proxyport":
case "proxyPort": return java.lang.Integer.class;
case "proxyprotocol":
case "proxyProtocol": return software.amazon.awssdk.core.Protocol.class;
case "region": return java.lang.String.class;
case "secretkey":
case "secretKey": return java.lang.String.class;
case "sessiontoken":
case "sessionToken": return java.lang.String.class;
case "trustallcertificates":
case "trustAllCertificates": return boolean.class;
case "uriendpointoverride":
case "uriEndpointOverride": return java.lang.String.class;
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": return boolean.class;
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": return boolean.class;
case "usesessioncredentials":
case "useSessionCredentials": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
MQ2Component target = (MQ2Component) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": return getOrCreateConfiguration(target).getAccessKey();
case "amazonmqclient":
case "amazonMqClient": return getOrCreateConfiguration(target).getAmazonMqClient();
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "configuration": return target.getConfiguration();
case "healthcheckconsumerenabled":
case "healthCheckConsumerEnabled": return target.isHealthCheckConsumerEnabled();
case "healthcheckproducerenabled":
case "healthCheckProducerEnabled": return target.isHealthCheckProducerEnabled();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "operation": return getOrCreateConfiguration(target).getOperation();
case "overrideendpoint":
case "overrideEndpoint": return getOrCreateConfiguration(target).isOverrideEndpoint();
case "pojorequest":
case "pojoRequest": return getOrCreateConfiguration(target).isPojoRequest();
case "profilecredentialsname":
case "profileCredentialsName": return getOrCreateConfiguration(target).getProfileCredentialsName();
case "proxyhost":
case "proxyHost": return getOrCreateConfiguration(target).getProxyHost();
case "proxyport":
case "proxyPort": return getOrCreateConfiguration(target).getProxyPort();
case "proxyprotocol":
case "proxyProtocol": return getOrCreateConfiguration(target).getProxyProtocol();
case "region": return getOrCreateConfiguration(target).getRegion();
case "secretkey":
case "secretKey": return getOrCreateConfiguration(target).getSecretKey();
case "sessiontoken":
case "sessionToken": return getOrCreateConfiguration(target).getSessionToken();
case "trustallcertificates":
case "trustAllCertificates": return getOrCreateConfiguration(target).isTrustAllCertificates();
case "uriendpointoverride":
case "uriEndpointOverride": return getOrCreateConfiguration(target).getUriEndpointOverride();
case "usedefaultcredentialsprovider":
case "useDefaultCredentialsProvider": return getOrCreateConfiguration(target).isUseDefaultCredentialsProvider();
case "useprofilecredentialsprovider":
case "useProfileCredentialsProvider": return getOrCreateConfiguration(target).isUseProfileCredentialsProvider();
case "usesessioncredentials":
case "useSessionCredentials": return getOrCreateConfiguration(target).isUseSessionCredentials();
default: return null;
}
}
}
| MQ2ComponentConfigurer |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SimpleTryCatchFinallyTest.java | {
"start": 973,
"end": 1946
} | class ____ extends ContextTestSupport {
@Test
public void testSimpleTryThrowExceptionFinally() throws Exception {
getMockEndpoint("mock:try").expectedMessageCount(1);
getMockEndpoint("mock:catch").expectedMessageCount(1);
getMockEndpoint("mock:finally").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").doTry().to("mock:try").throwException(new IllegalArgumentException("Damn"))
.doCatch(IllegalArgumentException.class).to("mock:catch")
.doFinally().to("mock:finally").end().to("mock:result");
}
};
}
}
| SimpleTryCatchFinallyTest |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/io/service/SoftServiceLoader.java | {
"start": 4004,
"end": 11171
} | class ____
* @param <S> The service generic type
* @return A new service loader
*/
public static <S> SoftServiceLoader<S> load(Class<S> service,
ClassLoader loader,
Predicate<String> condition) {
return new SoftServiceLoader<>(service, loader, condition);
}
public SoftServiceLoader<S> disableFork() {
allowFork = false;
return this;
}
/**
* @return Return the first such instance
*/
public Optional<ServiceDefinition<S>> first() {
Iterator<ServiceDefinition<S>> i = iterator();
if (i.hasNext()) {
return Optional.of(i.next());
}
return Optional.empty();
}
/**
* Find the first service definition that is {@link ServiceDefinition#isPresent() present}, and
* then {@link ServiceDefinition#load() load} it.
*
* @return Return the first such instance, or {@link Optional#empty()} if there is no definition
* or none of the definitions are present on the classpath.
*/
public Optional<S> firstAvailable() {
for (ServiceDefinition<S> def : this) {
if (def.isPresent()) {
return Optional.of(def.load());
}
}
return Optional.empty();
}
/**
* @param alternative An alternative type to use if this type is not present
* @param classLoader The classloader
* @return Return the first such instance
*/
public Optional<ServiceDefinition<S>> firstOr(String alternative, ClassLoader classLoader) {
Iterator<ServiceDefinition<S>> i = iterator();
if (i.hasNext()) {
return Optional.of(i.next());
}
@SuppressWarnings("unchecked") Class<S> alternativeClass = (Class<S>) ClassUtils.forName(alternative, classLoader)
.orElse(null);
if (alternativeClass != null) {
return Optional.of(createService(alternative, alternativeClass));
}
return Optional.empty();
}
/**
* Collects all initialized instances.
*
* @param values The collection to be populated.
* @param predicate The predicated to filter the instances or null if not needed.
*/
@SuppressWarnings("unchecked")
public void collectAll(@NonNull Collection<S> values, @Nullable Predicate<S> predicate) {
String name = serviceType.getName();
SoftServiceLoader.StaticServiceLoader<?> serviceLoader = STATIC_SERVICES.get(name);
if (serviceLoader != null) {
collectStaticServices(values, predicate, (StaticServiceLoader<S>) serviceLoader);
} else {
collectDynamicServices(values, predicate, name);
}
}
private void collectDynamicServices(
Collection<S> values,
Predicate<S> predicate,
String name) {
ServiceCollector<S> collector = newCollector(name, condition, classLoader, className -> {
try {
@SuppressWarnings("unchecked") final Class<S> loadedClass =
(Class<S>) Class.forName(className, false, classLoader);
// MethodHandler should more performant than the basic reflection
S result = (S) LOOKUP.findConstructor(loadedClass, VOID_TYPE).invoke();
if (predicate != null && !predicate.test(result)) {
return null;
}
return result;
} catch (NoClassDefFoundError | ClassNotFoundException | NoSuchMethodException | IllegalAccessException e) {
// Ignore
} catch (Throwable e) {
throw new ServiceLoadingException(e);
}
return null;
});
collector.collect(values, allowFork);
}
private void collectStaticServices(Collection<S> values, Predicate<S> predicate, StaticServiceLoader<S> loader) {
values.addAll(loader.load(predicate));
}
/**
* Collects all initialized instances.
*
* @param values The collection to be populated.
*/
public void collectAll(@NonNull Collection<S> values) {
collectAll(values, null);
}
/**
* Collects all initialized instances.
*
* @return The instances of this service.
*/
public List<S> collectAll() {
return collectAll((Predicate<S>) null);
}
/**
* Collects all initialized instances.
*
* @param predicate The predicated to filter the instances or null if not needed.
* @return The instances of this service.
*/
public List<S> collectAll(Predicate<S> predicate) {
List<S> values = new ArrayList<>();
collectAll(values, predicate);
return values;
}
/**
* @return The iterator
*/
@Override
@NonNull
public Iterator<ServiceDefinition<S>> iterator() {
if (servicesForIterator == null) {
if (STATIC_SERVICES.containsKey(serviceType.getName())) {
@SuppressWarnings("unchecked")
StaticServiceLoader<S> staticServiceLoader = (StaticServiceLoader<S>) STATIC_SERVICES.get(serviceType.getName());
this.servicesForIterator = staticServiceLoader.findAll(s -> condition == null || condition.test(s.getClass().getName()))
.collect(Collectors.toList());
} else {
List<ServiceDefinition<S>> serviceDefinitions = new ArrayList<>();
newCollector(serviceType.getName(), condition, classLoader, name -> {
try {
@SuppressWarnings("unchecked")
final Class<S> loadedClass = (Class<S>) Class.forName(name, false, classLoader);
return createService(name, loadedClass);
} catch (NoClassDefFoundError | ClassNotFoundException e) {
return createService(name, null);
}
}).collect(serviceDefinitions, false);
this.servicesForIterator = serviceDefinitions;
}
}
return servicesForIterator.iterator();
}
/**
* @param name The name
* @param loadedClass The loaded class
* @return The service definition
*/
private ServiceDefinition<S> createService(String name, Class<S> loadedClass) {
return new DefaultServiceDefinition<>(name, loadedClass);
}
public static <S> ServiceCollector<S> newCollector(String serviceName,
Predicate<String> lineCondition,
ClassLoader classLoader,
Function<String, S> transformer) {
return new ServiceScanner<>(classLoader, serviceName, lineCondition, transformer).createCollector();
}
/**
* A {@link ServiceDefinition} implementation that uses a {@link MethodHandles.Lookup} object to find a public constructor.
*
* @param <S> The service type
*/
public static final | name |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java | {
"start": 8127,
"end": 8351
} | interface ____<T extends SpatialPoint, V> extends Supplier<V> {
T getInternalCentroid();
BoundingBox<T> getInternalBoundingBox();
T getInternalLabelPosition();
}
public static | GeometrySupplier |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/beanmanager/BeanManagerTest.java | {
"start": 13533,
"end": 13676
} | class ____ extends Fool {
}
@Target({ TYPE, METHOD })
@Retention(RUNTIME)
@Documented
@InterceptorBinding
public @ | LowFool |
java | quarkusio__quarkus | test-framework/junit5-internal/src/main/java/io/quarkus/test/QuarkusDevModeTest.java | {
"start": 3896,
"end": 20737
} | class ____
implements BeforeAllCallback, AfterAllCallback, BeforeEachCallback, AfterEachCallback, InvocationInterceptor {
private static final Logger rootLogger;
public static final OpenOption[] OPEN_OPTIONS = { StandardOpenOption.SYNC, StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE };
private Handler[] originalRootLoggerHandlers;
static {
System.setProperty("java.util.logging.manager", "org.jboss.logmanager.LogManager");
java.util.logging.Logger logger = LogManager.getLogManager().getLogger("");
if (!(logger instanceof org.jboss.logmanager.Logger)) {
throw new IllegalStateException(
"QuarkusDevModeTest must be used with the the JBoss LogManager. See https://quarkus.io/guides/logging#how-to-configure-logging-for-quarkustest for an example of how to configure it in Maven.");
}
rootLogger = (org.jboss.logmanager.Logger) logger;
}
private DevModeMain devModeMain;
private Path deploymentDir;
private Supplier<JavaArchive> archiveProducer;
private Supplier<JavaArchive> testArchiveProducer;
private List<String> codeGenSources = Collections.emptyList();
private InMemoryLogHandler inMemoryLogHandler = new InMemoryLogHandler((r) -> false);
private Path deploymentSourceParentPath;
private Path deploymentSourcePath;
private Path deploymentResourcePath;
private Path deploymentTestSourceParentPath;
private Path deploymentTestSourcePath;
private Path deploymentTestResourcePath;
private Path projectSourceRoot;
private Path testLocation;
private String[] commandLineArgs = new String[0];
private final Map<String, String> oldSystemProps = new HashMap<>();
private final Map<String, String> buildSystemProperties = new HashMap<>();
private boolean allowFailedStart = false;
private static final List<CompilationProvider> compilationProviders;
static {
List<CompilationProvider> providers = new ArrayList<>();
for (CompilationProvider provider : ServiceLoader.load(CompilationProvider.class)) {
providers.add(provider);
}
compilationProviders = Collections.unmodifiableList(providers);
}
public Supplier<JavaArchive> getArchiveProducer() {
return archiveProducer;
}
public QuarkusDevModeTest setArchiveProducer(Supplier<JavaArchive> archiveProducer) {
this.archiveProducer = archiveProducer;
return this;
}
/**
* Customize the application root.
*
* @param applicationRootConsumer
* @return self
*/
public QuarkusDevModeTest withApplicationRoot(Consumer<JavaArchive> applicationRootConsumer) {
Objects.requireNonNull(applicationRootConsumer);
return setArchiveProducer(() -> {
JavaArchive jar = ShrinkWrap.create(JavaArchive.class);
applicationRootConsumer.accept(jar);
return jar;
});
}
/**
* Use an empty application for the test
*
* @return self
*/
public QuarkusDevModeTest withEmptyApplication() {
return withApplicationRoot(new Consumer<JavaArchive>() {
@Override
public void accept(JavaArchive javaArchive) {
}
});
}
public QuarkusDevModeTest setTestArchiveProducer(Supplier<JavaArchive> testArchiveProducer) {
this.testArchiveProducer = testArchiveProducer;
return this;
}
/**
* Customize the application root.
*
* @param testArchiveConsumer
* @return self
*/
public QuarkusDevModeTest withTestArchive(Consumer<JavaArchive> testArchiveConsumer) {
Objects.requireNonNull(testArchiveConsumer);
return setTestArchiveProducer(() -> {
JavaArchive jar = ShrinkWrap.create(JavaArchive.class);
testArchiveConsumer.accept(jar);
return jar;
});
}
public QuarkusDevModeTest setCodeGenSources(String... codeGenSources) {
this.codeGenSources = Arrays.asList(codeGenSources);
return this;
}
@Deprecated(forRemoval = true)
public QuarkusDevModeTest setLogFileName(String logFileName) {
PropertyTestUtil.setLogFileProperty(logFileName);
return this;
}
public QuarkusDevModeTest setLogRecordPredicate(Predicate<LogRecord> predicate) {
this.inMemoryLogHandler = new InMemoryLogHandler(predicate);
return this;
}
public List<LogRecord> getLogRecords() {
return inMemoryLogHandler.records;
}
public void clearLogRecords() {
inMemoryLogHandler.clearRecords();
}
public QuarkusDevModeTest setBuildSystemProperty(String name, String value) {
buildSystemProperties.put(name, value);
return this;
}
@Override
public <T> T interceptTestClassConstructor(Invocation<T> invocation,
ReflectiveInvocationContext<Constructor<T>> invocationContext,
ExtensionContext extensionContext) throws Throwable {
T actualTestInstance = invocation.proceed();
TestHTTPResourceManager.inject(actualTestInstance);
return actualTestInstance;
}
@Override
public void beforeAll(ExtensionContext context) {
((TestConfigProviderResolver) ConfigProviderResolver.instance()).getConfig(LaunchMode.DEVELOPMENT);
TestConfigUtil.cleanUp();
GroovyClassValue.disable();
originalRootLoggerHandlers = rootLogger.getHandlers();
rootLogger.addHandler(inMemoryLogHandler);
}
@Override
public void beforeEach(ExtensionContext extensionContext) throws Exception {
if (archiveProducer == null) {
throw new RuntimeException("QuarkusDevModeTest does not have archive producer set");
}
ExclusivityChecker.checkTestType(extensionContext, QuarkusDevModeTest.class);
ExtensionContext.Store store = extensionContext.getRoot().getStore(ExtensionContext.Namespace.GLOBAL);
if (store.get(TestResourceManager.class.getName()) == null) {
TestResourceManager testResourceManager = new TestResourceManager(extensionContext.getRequiredTestClass());
testResourceManager.init(null);
Map<String, String> properties = testResourceManager.start();
TestResourceManager tm = testResourceManager;
store.put(TestResourceManager.class.getName(), testResourceManager);
store.put(TestResourceManager.CLOSEABLE_NAME, tm);
}
TestResourceManager tm = (TestResourceManager) store.get(TestResourceManager.class.getName());
//dev mode tests just use system properties
//we set them here and clear them in afterAll
//so they don't interfere with other tests
for (Map.Entry<String, String> i : tm.getConfigProperties().entrySet()) {
oldSystemProps.put(i.getKey(), System.getProperty(i.getKey()));
if (i.getValue() == null) {
System.clearProperty(i.getKey());
} else {
System.setProperty(i.getKey(), i.getValue());
}
}
Class<?> testClass = extensionContext.getRequiredTestClass();
try {
deploymentDir = Files.createTempDirectory("quarkus-dev-mode-test");
testLocation = PathTestHelper.getTestClassesLocation(testClass);
//TODO: this is a huge hack, at the moment this just guesses the source location
//this can be improved, but as this is for testing extensions it is probably fine for now
String sourcePath = System.getProperty("quarkus.test.source-path");
if (sourcePath == null) {
//TODO: massive hack, make sure this works in eclipse
projectSourceRoot = testLocation.getParent().getParent().resolve("src/test/java");
} else {
projectSourceRoot = Paths.get(sourcePath);
}
devModeMain = newDevModeMain(extensionContext, deploymentDir, projectSourceRoot);
devModeMain.start();
ApplicationStateNotification.waitForApplicationStart();
} catch (Exception e) {
if (allowFailedStart) {
e.printStackTrace();
} else {
throw (e instanceof RuntimeException ? (RuntimeException) e : new RuntimeException(e));
}
}
}
@Override
public void afterAll(ExtensionContext context) {
for (Map.Entry<String, String> e : oldSystemProps.entrySet()) {
if (e.getValue() == null) {
System.clearProperty(e.getKey());
} else {
System.setProperty(e.getKey(), e.getValue());
}
}
rootLogger.setHandlers(originalRootLoggerHandlers);
inMemoryLogHandler.clearRecords();
inMemoryLogHandler.setFilter(null);
ClearCache.clearCaches();
TestConfigUtil.cleanUp();
((TestConfigProviderResolver) ConfigProviderResolver.instance()).restoreConfig();
}
@Override
public void afterEach(ExtensionContext extensionContext) throws Exception {
try {
if (devModeMain != null) {
devModeMain.close();
devModeMain = null;
}
} finally {
if (deploymentDir != null) {
FileUtil.deleteDirectory(deploymentDir);
}
}
inMemoryLogHandler.clearRecords();
}
private DevModeMain newDevModeMain(ExtensionContext extensionContext, Path deploymentDir, Path testSourceDir) {
try {
deploymentSourcePath = deploymentDir.resolve("src/main/java");
deploymentSourceParentPath = deploymentDir.resolve("src/main");
deploymentResourcePath = deploymentDir.resolve("src/main/resources");
Path classes = deploymentDir.resolve("target/classes");
Path targetDir = deploymentDir.resolve("target");
Path cache = deploymentDir.resolve("target/dev-cache");
Files.createDirectories(deploymentSourcePath);
Files.createDirectories(deploymentResourcePath);
Files.createDirectories(classes);
Files.createDirectories(cache);
JavaArchive archive = archiveProducer.get();
exportAndGenerateSourceTree(archive, classes, testSourceDir, deploymentSourcePath, deploymentResourcePath);
// TODO: again a hack, assumes the sources dir is one dir above java sources path
final Path testSourcesParentDir = projectSourceRoot.getParent();
copyCodeGenSources(testSourcesParentDir, deploymentSourceParentPath, codeGenSources);
//debugging code
ExportUtil.exportToQuarkusDeploymentPath(archive);
DevModeContext context = new DevModeContext();
context.setBaseName(extensionContext.getDisplayName() + " (QuarkusDevModeTest)");
context.setArgs(commandLineArgs);
context.setTest(true);
context.setAbortOnFailedStart(!allowFailedStart);
context.getBuildSystemProperties().put("quarkus.banner.enabled", "false");
context.getBuildSystemProperties().put("quarkus.console.disable-input", "true"); //surefire communicates via stdin, we don't want the test to be reading input
context.getBuildSystemProperties().putAll(buildSystemProperties);
context.setCacheDir(cache.toFile());
final DevModeContext.ModuleInfo.Builder moduleBuilder = new DevModeContext.ModuleInfo.Builder()
.setProjectDirectory(deploymentDir.toAbsolutePath().toString())
.setSourcePaths(PathList.of(deploymentSourcePath.toAbsolutePath()))
.setClassesPath(classes.toAbsolutePath().toString())
.setResourcePaths(PathList.of(deploymentResourcePath.toAbsolutePath()))
.setResourcesOutputPath(classes.toAbsolutePath().toString())
.setSourceParents(PathList.of(deploymentSourceParentPath.toAbsolutePath()))
.setPreBuildOutputDir(targetDir.resolve("generated-sources").toAbsolutePath().toString())
.setTargetDir(targetDir.toAbsolutePath().toString());
final WorkspaceModule.Mutable testModuleBuilder = WorkspaceModule.builder()
.addArtifactSources(ArtifactSources.main(
SourceDir.of(deploymentSourcePath, classes),
SourceDir.of(deploymentResourcePath, classes)))
.setBuildDir(targetDir)
.setModuleDir(deploymentDir);
//now tests, if required
if (testArchiveProducer != null) {
deploymentTestSourcePath = deploymentDir.resolve("src/test/java");
deploymentTestSourceParentPath = deploymentDir.resolve("src/test");
deploymentTestResourcePath = deploymentDir.resolve("src/test/resources");
Path testClasses = deploymentDir.resolve("target/test-classes");
Files.createDirectories(deploymentTestSourcePath);
Files.createDirectories(deploymentTestResourcePath);
Files.createDirectories(testClasses);
testModuleBuilder.addArtifactSources(ArtifactSources.test(
SourceDir.of(deploymentTestSourcePath, testClasses),
SourceDir.of(deploymentTestResourcePath, testClasses)));
exportAndGenerateSourceTree(testArchiveProducer.get(), testClasses, testSourceDir, deploymentTestSourcePath,
deploymentTestResourcePath);
moduleBuilder
.setTestSourcePaths(PathList.of(deploymentTestSourcePath.toAbsolutePath()))
.setTestClassesPath(testClasses.toAbsolutePath().toString())
.setTestResourcePaths(PathList.of(deploymentTestResourcePath.toAbsolutePath()))
.setTestResourcesOutputPath(testClasses.toAbsolutePath().toString());
}
final ApplicationModel originalAppModel = resolveOriginalAppModel();
var testArtifact = originalAppModel.getAppArtifact();
context.setApplicationRoot(moduleBuilder.setArtifactKey(testArtifact.getKey()).build());
return new DevModeMain(context, new DevModeTestApplicationModel(
ResolvedDependencyBuilder.newInstance()
.setCoords(testArtifact)
.setResolvedPath(classes)
.setWorkspaceModule(testModuleBuilder.setModuleId(
WorkspaceModuleId.of(testArtifact.getGroupId(),
testArtifact.getArtifactId(), testArtifact.getVersion()))
.build())
.build(),
originalAppModel));
} catch (Exception e) {
throw new RuntimeException("Unable to create the archive", e);
}
}
/**
* Resolves an {@link ApplicationModel} for the current module running the test.
*
* @return application model for the module runnign the test
*/
private static ApplicationModel resolveOriginalAppModel() {
try {
return BootstrapAppModelFactory.newInstance()
.setTest(true)
.setDevMode(true)
.setProjectRoot(Path.of("").normalize().toAbsolutePath())
.resolveAppModel()
.getApplicationModel();
} catch (BootstrapException e) {
throw new RuntimeException("Failed to resolve the ApplicationModel", e);
}
}
/**
* Exports the archive to the classes directory and creates a source tree.
*
* @param archive application archive
* @param classes target classes directory
* @param testSourceDir test sources directory
* @param deploymentSourcePath test application sources directory
* @param deploymentResourcePath test application resources directory
* @throws IOException in case of an IO failure
*/
private void exportAndGenerateSourceTree(JavaArchive archive, Path classes, Path testSourceDir,
Path deploymentSourcePath, Path deploymentResourcePath) throws IOException {
//first we export the archive
//then we attempt to generate a source tree
archive.as(ExplodedExporter.class).exportExplodedInto(classes.toFile());
copyFromSource(testSourceDir, deploymentSourcePath, classes);
//now copy resources
//we assume everything that is not a . | QuarkusDevModeTest |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/util/SpringCompatUtils.java | {
"start": 1749,
"end": 6694
} | class ____ {
private static volatile Boolean factoryMethodMetadataEnabled = null;
private static final Log logger = LogFactory.getLog(SpringCompatUtils.class);
public static <T> T getPropertyValue(PropertyValues pvs, String propertyName) {
PropertyValue pv = pvs.getPropertyValue(propertyName);
Object val = pv != null ? pv.getValue() : null;
if (val instanceof TypedStringValue) {
TypedStringValue typedString = (TypedStringValue) val;
return (T) typedString.getValue();
}
return (T) val;
}
public static boolean isFactoryMethodMetadataEnabled() {
if (factoryMethodMetadataEnabled == null) {
try {
// check AnnotatedBeanDefinition.getFactoryMethodMetadata() since spring 4.1
AnnotatedBeanDefinition.class.getMethod("getFactoryMethodMetadata");
// check MethodMetadata.getReturnTypeName() since spring 4.2
MethodMetadata.class.getMethod("getReturnTypeName");
factoryMethodMetadataEnabled = true;
} catch (NoSuchMethodException e) {
factoryMethodMetadataEnabled = false;
}
}
return factoryMethodMetadataEnabled;
}
public static String getFactoryMethodReturnType(AnnotatedBeanDefinition annotatedBeanDefinition) {
try {
if (isFactoryMethodMetadataEnabled()) {
MethodMetadata factoryMethodMetadata = annotatedBeanDefinition.getFactoryMethodMetadata();
return factoryMethodMetadata != null ? factoryMethodMetadata.getReturnTypeName() : null;
} else {
Object source = annotatedBeanDefinition.getSource();
if (source instanceof StandardMethodMetadata) {
StandardMethodMetadata methodMetadata = (StandardMethodMetadata) source;
Method introspectedMethod = methodMetadata.getIntrospectedMethod();
if (introspectedMethod != null) {
return introspectedMethod.getReturnType().getName();
}
}
}
} catch (Throwable e) {
if (logger.isInfoEnabled()) {
logger.info("get return type of AnnotatedBeanDefinition failed", e);
}
}
return null;
}
public static MethodMetadata getFactoryMethodMetadata(AnnotatedBeanDefinition annotatedBeanDefinition) {
if (isFactoryMethodMetadataEnabled()) {
return annotatedBeanDefinition.getFactoryMethodMetadata();
} else {
Object source = annotatedBeanDefinition.getSource();
if (source instanceof StandardMethodMetadata) {
return (MethodMetadata) source;
}
return null;
}
}
/**
* Get the generic type of return type of the method.
*
* <pre>
* Source method:
* ReferenceBean<DemoService> demoService()
*
* Result: DemoService.class
* </pre>
*
* @param factoryMethodMetadata
* @return
*/
public static Class getGenericTypeOfReturnType(MethodMetadata factoryMethodMetadata) {
if (factoryMethodMetadata instanceof StandardMethodMetadata) {
Method introspectedMethod = ((StandardMethodMetadata) factoryMethodMetadata).getIntrospectedMethod();
Type returnType = introspectedMethod.getGenericReturnType();
if (returnType instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) returnType;
Type actualTypeArgument = parameterizedType.getActualTypeArguments()[0];
if (actualTypeArgument instanceof Class) {
return (Class) actualTypeArgument;
}
}
}
return null;
}
public static Class<?> referenceAnnotationBeanPostProcessor() {
try {
return Class.forName(
"org.apache.dubbo.config.spring6.beans.factory.annotation.ReferenceAnnotationWithAotBeanPostProcessor");
} catch (ClassNotFoundException e) {
return ReferenceAnnotationBeanPostProcessor.class;
}
}
public static Class<?> serviceAnnotationPostProcessor() {
try {
return Class.forName(
"org.apache.dubbo.config.spring6.beans.factory.annotation.ServiceAnnotationWithAotPostProcessor");
} catch (ClassNotFoundException e) {
return ServiceAnnotationPostProcessor.class;
}
}
public static Class<?> dubboInfraBeanRegisterPostProcessor() {
try {
return Class.forName("org.apache.dubbo.config.spring6.context.DubboInfraBeanRegisterPostProcessor");
} catch (ClassNotFoundException e) {
return DubboInfraBeanRegisterPostProcessor.class;
}
}
}
| SpringCompatUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java | {
"start": 8010,
"end": 8459
} | class ____ {
void test(Object x, Object y) {
if (x instanceof Test) {
Test test = (Test) y;
test(test, null);
}
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void generic() {
helper
.addInputLines(
"Test.java",
"""
import java.util.Map;
| Test |
java | quarkusio__quarkus | integration-tests/opentelemetry-mongodb-client-instrumentation/src/main/java/io/quarkus/it/opentelemetry/BookResource.java | {
"start": 555,
"end": 2361
} | class ____ {
@Inject
MongoClient client;
private MongoCollection<Book> getCollection() {
return client.getDatabase("books").getCollection("my-collection", Book.class);
}
@DELETE
public Response clearBooks() {
getCollection().deleteMany(new Document());
return Response.ok().build();
}
@GET
public List<Book> getBooks() {
FindIterable<Book> iterable = getCollection().find();
return getBooks(iterable);
}
private List<Book> getBooks(FindIterable<Book> iterable) {
List<Book> books = new ArrayList<>();
WriteConcern writeConcern = client.getDatabase("temp").getWriteConcern();
// force a test failure if we're not getting the correct, and correctly configured named mongodb client
if (Boolean.TRUE.equals(writeConcern.getJournal())) {
for (Book doc : iterable) {
books.add(doc);
}
}
return books;
}
@GET
@Path("/invalid")
public List<Book> error() {
BsonDocument query = new BsonDocument();
query.put("$invalidop", new BsonDouble(0d));
FindIterable<Book> iterable = getCollection().find(query);
return getBooks(iterable);
}
@POST
public Response addBook(Book book) {
getCollection().insertOne(book);
return Response.accepted().build();
}
@GET
@Path("/{author}")
public List<Book> getBooksByAuthor(@PathParam("author") String author) {
FindIterable<Book> iterable = getCollection().find(eq("author", author));
List<Book> books = new ArrayList<>();
for (Book doc : iterable) {
String title = doc.title();
books.add(new Book(author, title));
}
return books;
}
}
| BookResource |
java | google__dagger | dagger-android/main/java/dagger/android/HasAndroidInjector.java | {
"start": 716,
"end": 839
} | interface ____ {
/** Returns an {@link AndroidInjector}. */
AndroidInjector<Object> androidInjector();
}
| HasAndroidInjector |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java | {
"start": 4811,
"end": 9163
} | enum ____ {
/**
* $USER
* Final, non-modifiable.
*/
USER("USER"),
/**
* $LOGNAME
* Final, non-modifiable.
*/
LOGNAME("LOGNAME"),
/**
* $HOME
* Final, non-modifiable.
*/
HOME("HOME"),
/**
* $PWD
* Final, non-modifiable.
*/
PWD("PWD"),
/**
* $PATH
*/
PATH("PATH"),
/**
* $SHELL
*/
SHELL("SHELL"),
/**
* $JAVA_HOME
*/
JAVA_HOME("JAVA_HOME"),
/**
* $CLASSPATH
*/
CLASSPATH("CLASSPATH"),
/**
* $APP_CLASSPATH
*/
APP_CLASSPATH("APP_CLASSPATH"),
/**
* $LD_LIBRARY_PATH
*/
LD_LIBRARY_PATH("LD_LIBRARY_PATH"),
/**
* $HADOOP_CONF_DIR
* Final, non-modifiable.
*/
HADOOP_CONF_DIR("HADOOP_CONF_DIR"),
/**
* $HADOOP_COMMON_HOME
*/
HADOOP_COMMON_HOME("HADOOP_COMMON_HOME"),
/**
* $HADOOP_HDFS_HOME
*/
HADOOP_HDFS_HOME("HADOOP_HDFS_HOME"),
/**
* $MALLOC_ARENA_MAX
*/
MALLOC_ARENA_MAX("MALLOC_ARENA_MAX"),
/**
* $HADOOP_YARN_HOME
*/
HADOOP_YARN_HOME("HADOOP_YARN_HOME"),
/**
* $CLASSPATH_PREPEND_DISTCACHE
* Private, Windows specific
*/
@Private
CLASSPATH_PREPEND_DISTCACHE("CLASSPATH_PREPEND_DISTCACHE"),
/**
* $LOCALIZATION_COUNTERS
*
* Since NM does not RPC Container JVM's we pass Localization counter
* vector as an environment variable
*
*/
LOCALIZATION_COUNTERS("LOCALIZATION_COUNTERS"),
/**
* $CONTAINER_ID
* Final, exported by NodeManager and non-modifiable by users.
*/
CONTAINER_ID("CONTAINER_ID"),
/**
* $NM_HOST
* Final, exported by NodeManager and non-modifiable by users.
*/
NM_HOST("NM_HOST"),
/**
* $NM_HTTP_PORT
* Final, exported by NodeManager and non-modifiable by users.
*/
NM_HTTP_PORT("NM_HTTP_PORT"),
/**
* $NM_PORT
* Final, exported by NodeManager and non-modifiable by users.
*/
NM_PORT("NM_PORT"),
/**
* $LOCAL_DIRS
* Final, exported by NodeManager and non-modifiable by users.
*/
LOCAL_DIRS("LOCAL_DIRS"),
/**
* $LOCAL_USER_DIRS
* Final, exported by NodeManager and non-modifiable by users.
*/
LOCAL_USER_DIRS("LOCAL_USER_DIRS"),
/**
* $LOG_DIRS
* Final, exported by NodeManager and non-modifiable by users.
* Comma separate list of directories that the container should use for
* logging.
*/
LOG_DIRS("LOG_DIRS"),
/**
* $YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE
* Final, Docker run support ENTRY_POINT.
*/
YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE(
"YARN_CONTAINER_RUNTIME_DOCKER_RUN_OVERRIDE_DISABLE"),
/**
* $YARN_CONTAINER_RUNTIME_YARN_SYSFS_ENABLE
* Final, expose cluster information to container.
*/
YARN_CONTAINER_RUNTIME_YARN_SYSFS_ENABLE(
"YARN_CONTAINER_RUNTIME_YARN_SYSFS_ENABLE");
private final String variable;
Environment(String variable) {
this.variable = variable;
}
public String key() {
return variable;
}
public String toString() {
return variable;
}
/**
* Expand the environment variable based on client OS environment variable
* expansion syntax (e.g. $VAR for Linux and %VAR% for Windows).
* <p>
* Note: Use $$() method for cross-platform practice i.e. submit an
* application from a Windows client to a Linux/Unix server or vice versa.
* </p>
* @return expanded environment variable.
*/
public String $() {
if (Shell.WINDOWS) {
return "%" + variable + "%";
} else {
return "$" + variable;
}
}
/**
* Expand the environment variable in platform-agnostic syntax. The
* parameter expansion marker "{{VAR}}" will be replaced with real parameter
* expansion marker ('%' for Windows and '$' for Linux) by NodeManager on
* container launch. For example: {{VAR}} will be replaced as $VAR on Linux,
* and %VAR% on Windows.
* @return expanded environment variable.
*/
@Public
@Unstable
public String $$() {
return PARAMETER_EXPANSION_LEFT + variable + PARAMETER_EXPANSION_RIGHT;
}
}
}
| Environment |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyAllocatorMetrics.java | {
"start": 523,
"end": 808
} | class ____ based on the MicroMeter NettyAllocatorMetrics class, but remove the "id" from the tags are it's
* computed from the `hashCode` which does not allow aggregation across processed.
* Instead, it gets a {@code name} label indicating an unique name for the allocator.
*/
public | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/CollectionMapWithComponentValueTest.java | {
"start": 9026,
"end": 9205
} | class ____ {
@Id
@GeneratedValue
Long id;
@OneToMany
Set<TestEntity> entities;
}
@Entity(name = "TestEntity")
@Table(name = "TEST_ENTITY")
public static | BaseTestEntity |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/creation/instance/ConstructorInstantiatorTest.java | {
"start": 513,
"end": 582
} | class ____ extends ConstructorInstantiatorTest {}
static | ChildOfThis |
java | netty__netty | handler/src/main/java/io/netty/handler/timeout/IdleStateHandler.java | {
"start": 8904,
"end": 9134
} | class ____ milliseconds.
*
*/
public long getWriterIdleTimeInMillis() {
return TimeUnit.NANOSECONDS.toMillis(writerIdleTimeNanos);
}
/**
* Return the allIdleTime that was given when instance this | in |
java | google__guava | android/guava/src/com/google/common/collect/Tables.java | {
"start": 11647,
"end": 15402
} | class ____
* {@code factory.get()} does.
*
* <p>Call this method only when the simpler factory methods in classes like {@link
* HashBasedTable} and {@link TreeBasedTable} won't suffice.
*
* <p>The views returned by the {@code Table} methods {@link Table#column}, {@link
* Table#columnKeySet}, and {@link Table#columnMap} have iterators that don't support {@code
* remove()}. Otherwise, all optional operations are supported. Null row keys, columns keys, and
* values are not supported.
*
* <p>Lookups by row key are often faster than lookups by column key, because the data is stored
* in a {@code Map<R, Map<C, V>>}. A method call like {@code column(columnKey).get(rowKey)} still
* runs quickly, since the row key is provided. However, {@code column(columnKey).size()} takes
* longer, since an iteration across all row keys occurs.
*
* <p>Note that this implementation is not synchronized. If multiple threads access this table
* concurrently and one of the threads modifies the table, it must be synchronized externally.
*
* <p>The table is serializable if {@code backingMap}, {@code factory}, the maps generated by
* {@code factory}, and the table contents are all serializable.
*
* <p>Note: the table assumes complete ownership over of {@code backingMap} and the maps returned
* by {@code factory}. Those objects should not be manually updated and they should not use soft,
* weak, or phantom references.
*
* @param backingMap place to store the mapping from each row key to its corresponding column key
* / value map
* @param factory supplier of new, empty maps that will each hold all column key / value mappings
* for a given row key
* @throws IllegalArgumentException if {@code backingMap} is not empty
* @since 10.0
*/
public static <R, C, V> Table<R, C, V> newCustomTable(
Map<R, Map<C, V>> backingMap, Supplier<? extends Map<C, V>> factory) {
checkArgument(backingMap.isEmpty());
checkNotNull(factory);
// TODO(jlevy): Wrap factory to validate that the supplied maps are empty?
return new StandardTable<>(backingMap, factory);
}
/**
* Returns a view of a table where each value is transformed by a function. All other properties
* of the table, such as iteration order, are left intact.
*
* <p>Changes in the underlying table are reflected in this view. Conversely, this view supports
* removal operations, and these are reflected in the underlying table.
*
* <p>It's acceptable for the underlying table to contain null keys, and even null values provided
* that the function is capable of accepting null input. The transformed table might contain null
* values, if the function sometimes gives a null result.
*
* <p>The returned table is not thread-safe or serializable, even if the underlying table is.
*
* <p>The function is applied lazily, invoked when needed. This is necessary for the returned
* table to be a view, but it means that the function will be applied many times for bulk
* operations like {@link Table#containsValue} and {@code Table.toString()}. For this to perform
* well, {@code function} should be fast. To avoid lazy evaluation when the returned table doesn't
* need to be a view, copy the returned table into a new table of your choosing.
*
* @since 10.0
*/
public static <
R extends @Nullable Object,
C extends @Nullable Object,
V1 extends @Nullable Object,
V2 extends @Nullable Object>
Table<R, C, V2> transformValues(
Table<R, C, V1> fromTable, Function<? super V1, V2> function) {
return new TransformedTable<>(fromTable, function);
}
private static final | than |
java | junit-team__junit5 | platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/GradleMissingEngineTests.java | {
"start": 904,
"end": 1706
} | class ____ {
@Test
void gradle_wrapper(@TempDir Path workspace, @FilePrefix("gradle") OutputFiles outputFiles) throws Exception {
var result = ProcessStarters.gradlew() //
.workingDir(copyToWorkspace(Projects.GRADLE_MISSING_ENGINE, workspace)) //
.addArguments("-Dmaven.repo=" + MavenRepo.dir()) //
.addArguments("build", "--no-daemon", "--stacktrace", "--no-build-cache", "--warning-mode=fail") //
.putEnvironment("JDK17", Helper.getJavaHome(17).orElseThrow(TestAbortedException::new).toString()) //
.redirectOutput(outputFiles).startAndWait();
assertEquals(1, result.exitCode());
assertThat(result.stdErr()) //
.contains("FAILURE: Build failed with an exception.") //
.contains("Cannot create Launcher without at least one TestEngine");
}
}
| GradleMissingEngineTests |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/MethodOrderer.java | {
"start": 6987,
"end": 8152
} | class ____ implements MethodOrderer {
public DisplayName() {
}
/**
* Sort the methods encapsulated in the supplied
* {@link MethodOrdererContext} alphanumerically based on their display
* names.
*/
@Override
public void orderMethods(MethodOrdererContext context) {
context.getMethodDescriptors().sort(comparator);
}
private static final Comparator<MethodDescriptor> comparator = Comparator.comparing(
MethodDescriptor::getDisplayName);
}
/**
* {@code MethodOrderer} that sorts methods based on the {@link Order @Order}
* annotation.
*
* <p>Any methods that are assigned the same order value will be sorted
* arbitrarily adjacent to each other.
*
* <p>Any methods not annotated with {@code @Order} will be assigned the
* {@linkplain Order#DEFAULT default order} value which will effectively cause them
* to appear at the end of the sorted list, unless certain methods are assigned
* an explicit order value greater than the default order value. Any methods
* assigned an explicit order value greater than the default order value will
* appear after non-annotated methods in the sorted list.
*/
| DisplayName |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/expression/SqmCollectionSize.java | {
"start": 749,
"end": 2681
} | class ____ extends AbstractSqmExpression<Integer> {
private final SqmPath<?> pluralPath;
public SqmCollectionSize(SqmPath<?> pluralPath, NodeBuilder nodeBuilder) {
this( pluralPath, nodeBuilder.getIntegerType(), nodeBuilder );
}
public SqmCollectionSize(SqmPath<?> pluralPath, SqmBindableType<Integer> sizeType, NodeBuilder nodeBuilder) {
super( sizeType, nodeBuilder );
this.pluralPath = pluralPath;
}
@Override
public SqmCollectionSize copy(SqmCopyContext context) {
final SqmCollectionSize existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
final SqmCollectionSize expression = context.registerCopy(
this,
new SqmCollectionSize(
pluralPath.copy( context ),
getNodeType(),
nodeBuilder()
)
);
copyTo( expression, context );
return expression;
}
public SqmPath<?> getPluralPath() {
return pluralPath;
}
@Override
public @NonNull SqmBindableType<Integer> getNodeType() {
return castNonNull( super.getNodeType() );
}
@Override
public <T> T accept(SemanticQueryWalker<T> walker) {
return walker.visitPluralAttributeSizeFunction( this );
}
@Override
public String asLoggableText() {
return "SIZE(" + pluralPath.asLoggableText() + ")";
}
@Override
public void appendHqlString(StringBuilder hql, SqmRenderContext context) {
hql.append( "size(" );
pluralPath.appendHqlString( hql, context );
hql.append( ')' );
}
@Override
public boolean equals(@Nullable Object object) {
return object instanceof SqmCollectionSize that
&& this.pluralPath.equals( that.pluralPath );
}
@Override
public int hashCode() {
return pluralPath.hashCode();
}
@Override
public boolean isCompatible(Object object) {
return object instanceof SqmCollectionSize that
&& this.pluralPath.isCompatible( that.pluralPath );
}
@Override
public int cacheHashCode() {
return pluralPath.cacheHashCode();
}
}
| SqmCollectionSize |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServiceProtocol.java | {
"start": 4404,
"end": 33744
} | interface ____ {
/**
* This method retrieves the cluster information, and it is reachable by using
* {@link RMWSConsts#INFO}.
*
* @return the cluster information
*/
ClusterInfo get();
/**
* This method retrieves the cluster information, and it is reachable by using
* {@link RMWSConsts#INFO}.
*
* @return the cluster information
*/
ClusterInfo getClusterInfo();
/**
* This method retrieves the cluster user information, and it is reachable by using
* {@link RMWSConsts#CLUSTER_USER_INFO}.
*
* @param hsr the servlet request
* @return the cluster user information
*/
ClusterUserInfo getClusterUserInfo(HttpServletRequest hsr);
/**
* This method retrieves the cluster metrics information, and it is reachable
* by using {@link RMWSConsts#METRICS}.
*
* @see ApplicationClientProtocol#getClusterMetrics
* @return the cluster metrics information
*/
ClusterMetricsInfo getClusterMetricsInfo();
/**
* This method retrieves the current scheduler status, and it is reachable by
* using {@link RMWSConsts#SCHEDULER}.
*
* @return the current scheduler status
*/
SchedulerTypeInfo getSchedulerInfo();
/**
* This method dumps the scheduler logs for the time got in input, and it is
* reachable by using {@link RMWSConsts#SCHEDULER_LOGS}.
*
* @param time the period of time. It is a FormParam.
* @param hsr the servlet request
* @return the result of the operation
* @throws IOException when it cannot create dump log file
*/
String dumpSchedulerLogs(String time, HttpServletRequest hsr)
throws IOException;
/**
* This method retrieves all the nodes information in the cluster, and it is
* reachable by using {@link RMWSConsts#NODES}.
*
* @see ApplicationClientProtocol#getClusterNodes
* @param states the states we want to filter. It is a QueryParam.
* @return all nodes in the cluster. If the states param is given, returns all
* nodes that are in the comma-separated list of states
*/
NodesInfo getNodes(String states);
/**
* This method retrieves a specific node information, and it is reachable by
* using {@link RMWSConsts#NODES_NODEID}.
*
* @param nodeId the node we want to retrieve the information. It is a
* PathParam.
* @return the information about the node in input
*/
NodeInfo getNode(String nodeId);
/**
* This method changes the resources of a specific node, and it is reachable
* by using {@link RMWSConsts#NODE_RESOURCE}.
*
* @param hsr The servlet request.
* @param nodeId The node we want to retrieve the information for.
* It is a PathParam.
* @param resourceOption The resource change.
* @throws AuthorizationException If the user is not authorized.
* @return the resources of a specific node.
*/
ResourceInfo updateNodeResource(HttpServletRequest hsr, String nodeId,
ResourceOptionInfo resourceOption) throws AuthorizationException;
/**
* This method retrieves all the app reports in the cluster, and it is
* reachable by using {@link RMWSConsts#APPS}.
*
* @see ApplicationClientProtocol#getApplications
* @param hsr the servlet request
* @param stateQuery right now the stateQuery is deprecated. It is a
* QueryParam.
* @param statesQuery filter the result by states. It is a QueryParam.
* @param finalStatusQuery filter the result by final states. It is a
* QueryParam.
* @param userQuery filter the result by user. It is a QueryParam.
* @param queueQuery filter the result by queue. It is a QueryParam.
* @param count set a limit of the result. It is a QueryParam.
* @param startedBegin filter the result by started begin time. It is a
* QueryParam.
* @param startedEnd filter the result by started end time. It is a
* QueryParam.
* @param finishBegin filter the result by finish begin time. It is a
* QueryParam.
* @param finishEnd filter the result by finish end time. It is a QueryParam.
* @param applicationTypes filter the result by types. It is a QueryParam.
* @param applicationTags filter the result by tags. It is a QueryParam.
* @param name filter the name of the application. It is a QueryParam.
* @param unselectedFields De-selected params to avoid from report. It is a
* QueryParam.
* @return all apps in the cluster
*/
@SuppressWarnings("checkstyle:parameternumber")
AppsInfo getApps(HttpServletRequest hsr, String stateQuery,
Set<String> statesQuery, String finalStatusQuery, String userQuery,
String queueQuery, String count, String startedBegin, String startedEnd,
String finishBegin, String finishEnd, Set<String> applicationTypes,
Set<String> applicationTags, String name, Set<String> unselectedFields);
/**
* This method retrieve all the activities in a specific node, and it is
* reachable by using {@link RMWSConsts#SCHEDULER_ACTIVITIES}.
*
* @param hsr the servlet request
* @param nodeId the node we want to retrieve the activities. It is a
* QueryParam.
* @param groupBy the groupBy type by which the activities should be
* aggregated. It is a QueryParam.
* @return all the activities in the specific node
*/
ActivitiesInfo getActivities(HttpServletRequest hsr, String nodeId,
String groupBy);
/**
* This method retrieve the last n activities inside scheduler, and it is
* reachable by using {@link RMWSConsts#SCHEDULER_BULK_ACTIVITIES}.
*
* @param hsr the servlet request
* @param groupBy the groupBy type by which the activities should be
* aggregated. It is a QueryParam.
* @param activitiesCount number of activities
* @return last n activities
* @throws InterruptedException if interrupted.
*/
BulkActivitiesInfo getBulkActivities(HttpServletRequest hsr,
String groupBy, int activitiesCount) throws InterruptedException;
/**
* This method retrieves all the activities for a specific app for a specific
* period of time, and it is reachable by using
* {@link RMWSConsts#SCHEDULER_APP_ACTIVITIES}.
*
* @param hsr the servlet request
* @param appId the applicationId we want to retrieve the activities. It is a
* QueryParam.
* @param time for how long we want to retrieve the activities. It is a
* QueryParam.
* @param requestPriorities the request priorities we want to retrieve the
* activities. It is a QueryParam.
* @param allocationRequestIds the allocation request ids we want to retrieve
* the activities. It is a QueryParam.
* @param groupBy the groupBy type by which the activities should be
* aggregated. It is a QueryParam.
* @param limit set a limit of the result. It is a QueryParam.
* @param actions the required actions of app activities. It is a QueryParam.
* @param summarize whether app activities in multiple scheduling processes
* need to be summarized. It is a QueryParam.
* @return all the activities about a specific app for a specific time
*/
AppActivitiesInfo getAppActivities(HttpServletRequest hsr, String appId,
String time, Set<String> requestPriorities,
Set<String> allocationRequestIds, String groupBy, String limit,
Set<String> actions, boolean summarize);
/**
* This method retrieves all the statistics for a specific app, and it is
* reachable by using {@link RMWSConsts#APP_STATISTICS}.
*
* @param hsr the servlet request
* @param stateQueries filter the result by states. It is a QueryParam.
* @param typeQueries filter the result by type names. It is a QueryParam.
* @return the application's statistics for specific states and types
*/
ApplicationStatisticsInfo getAppStatistics(HttpServletRequest hsr,
Set<String> stateQueries, Set<String> typeQueries);
/**
* This method retrieves the report for a specific app, and it is reachable by
* using {@link RMWSConsts#APPS_APPID}.
*
* @see ApplicationClientProtocol#getApplicationReport
* @param hsr the servlet request
* @param appId the Id of the application we want the report. It is a
* PathParam.
* @param unselectedFields De-selected param list to avoid from report. It is
* a QueryParam.
* @return the app report for a specific application
*/
AppInfo getApp(HttpServletRequest hsr, String appId,
Set<String> unselectedFields);
/**
* This method retrieves the state for a specific app, and it is reachable by
* using {@link RMWSConsts#APPS_APPID_STATE}.
*
* @param hsr the servlet request
* @param appId the Id of the application we want the state. It is a
* PathParam.
* @return the state for a specific application
* @throws AuthorizationException if the user is not authorized
*/
AppState getAppState(HttpServletRequest hsr, String appId)
throws AuthorizationException;
/**
* This method updates the state of the app in input, and it is reachable by
* using {@link RMWSConsts#APPS_APPID_STATE}.
*
* @param targetState the target state for the app. It is a content param.
* @param hsr the servlet request
* @param appId the Id of the application we want to update the state. It is a
* PathParam.
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws YarnException if app does not exist
* @throws InterruptedException if interrupted
* @throws IOException if doAs action throws an IOException
*/
Response updateAppState(AppState targetState, HttpServletRequest hsr,
String appId) throws AuthorizationException, YarnException,
InterruptedException, IOException;
/**
* This method retrieves all the node labels with the respective nodes in the
* cluster, and it is reachable by using
* {@link RMWSConsts#GET_NODE_TO_LABELS}.
*
* @see ApplicationClientProtocol#getNodeToLabels
* @param hsr the servlet request
* @return all the nodes within a node label
* @throws IOException if an IOException happened
*/
NodeToLabelsInfo getNodeToLabels(HttpServletRequest hsr) throws IOException;
NodeLabelsInfo getRMNodeLabels(HttpServletRequest hsr) throws IOException;
/**
* This method retrieves all the node within multiple node labels in the
* cluster, and it is reachable by using {@link RMWSConsts#LABEL_MAPPINGS}.
*
* @see ApplicationClientProtocol#getLabelsToNodes
* @param labels filter the result by node labels. It is a QueryParam.
* @return all the nodes within multiple node labels
* @throws IOException if an IOException happened
*/
LabelsToNodesInfo getLabelsToNodes(Set<String> labels) throws IOException;
/**
* This method replaces all the node labels for specific nodes, and it is
* reachable by using {@link RMWSConsts#REPLACE_NODE_TO_LABELS}.
*
* @see ResourceManagerAdministrationProtocol#replaceLabelsOnNode
* @param newNodeToLabels the list of new labels. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws Exception if an exception happened
*/
Response replaceLabelsOnNodes(NodeToLabelsEntryList newNodeToLabels,
HttpServletRequest hsr) throws Exception;
/**
* This method replaces all the node labels for specific node, and it is
* reachable by using {@link RMWSConsts#NODES_NODEID_REPLACE_LABELS}.
*
* @see ResourceManagerAdministrationProtocol#replaceLabelsOnNode
* @param newNodeLabelsName the list of new labels. It is a QueryParam.
* @param hsr the servlet request
* @param nodeId the node we want to replace the node labels. It is a
* PathParam.
* @return Response containing the status code
* @throws Exception if an exception happened
*/
Response replaceLabelsOnNode(Set<String> newNodeLabelsName,
HttpServletRequest hsr, String nodeId) throws Exception;
/**
* This method retrieves all the node labels in the cluster, and it is
* reachable by using {@link RMWSConsts#GET_NODE_LABELS}.
*
* @see ApplicationClientProtocol#getClusterNodeLabels
* @param hsr the servlet request
* @return all the node labels in the cluster
* @throws IOException if an IOException happened
*/
NodeLabelsInfo getClusterNodeLabels(HttpServletRequest hsr)
throws IOException;
/**
* This method adds specific node labels for specific nodes, and it is
* reachable by using {@link RMWSConsts#ADD_NODE_LABELS}.
*
* @see ResourceManagerAdministrationProtocol#addToClusterNodeLabels
* @param newNodeLabels the node labels to add. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws Exception in case of bad request
*/
Response addToClusterNodeLabels(NodeLabelsInfo newNodeLabels,
HttpServletRequest hsr) throws Exception;
/**
* This method removes all the node labels for specific nodes, and it is
* reachable by using {@link RMWSConsts#REMOVE_NODE_LABELS}.
*
* @see ResourceManagerAdministrationProtocol#removeFromClusterNodeLabels
* @param oldNodeLabels the node labels to remove. It is a QueryParam.
* @param hsr the servlet request
* @return Response containing the status code
* @throws Exception in case of bad request
*/
Response removeFromClusterNodeLabels(Set<String> oldNodeLabels,
HttpServletRequest hsr) throws Exception;
/**
* This method retrieves all the node labels for specific node, and it is
* reachable by using {@link RMWSConsts#NODES_NODEID_GETLABELS}.
*
* @param hsr the servlet request
* @param nodeId the node we want to get all the node labels. It is a
* PathParam.
* @return all the labels for a specific node.
* @throws IOException if an IOException happened
*/
NodeLabelsInfo getLabelsOnNode(HttpServletRequest hsr, String nodeId)
throws IOException;
/**
* This method retrieves the priority for a specific app, and it is reachable
* by using {@link RMWSConsts#APPS_APPID_PRIORITY}.
*
* @param hsr the servlet request
* @param appId the app we want to get the priority. It is a PathParam.
* @return the priority for a specific application
* @throws AuthorizationException in case of the user is not authorized
*/
AppPriority getAppPriority(HttpServletRequest hsr, String appId)
throws AuthorizationException;
/**
* This method updates the priority for a specific application, and it is
* reachable by using {@link RMWSConsts#APPS_APPID_PRIORITY}.
*
* @param targetPriority the priority we want to set for the app. It is a
* content param.
* @param hsr the servlet request
* @param appId the application we want to update its priority. It is a
* PathParam.
* @return Response containing the status code
* @throws AuthorizationException if the user is not authenticated
* @throws YarnException if the target is null
* @throws IOException if the update fails.
* @throws InterruptedException if interrupted.
*/
Response updateApplicationPriority(AppPriority targetPriority,
HttpServletRequest hsr, String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException;
/**
* This method retrieves the queue for a specific app, and it is reachable by
* using {@link RMWSConsts#APPS_APPID_QUEUE}.
*
* @param hsr the servlet request
* @param appId the application we want to retrieve its queue. It is a
* PathParam.
* @return the Queue for a specific application.
* @throws AuthorizationException if the user is not authenticated
*/
AppQueue getAppQueue(HttpServletRequest hsr, String appId)
throws AuthorizationException;
/**
* This method updates the queue for a specific application, and it is
* reachable by using {@link RMWSConsts#APPS_APPID_QUEUE}.
*
* @param targetQueue the queue we want to set. It is a content param.
* @param hsr the servlet request
* @param appId the application we want to change its queue. It is a
* PathParam.
* @return Response containing the status code
* @throws AuthorizationException if the user is not authenticated
* @throws YarnException if the app is not found
* @throws IOException if the update fails.
* @throws InterruptedException if interrupted.
*/
Response updateAppQueue(AppQueue targetQueue, HttpServletRequest hsr,
String appId) throws AuthorizationException, YarnException,
InterruptedException, IOException;
/**
* Generates a new ApplicationId which is then sent to the client. This method
* is reachable by using {@link RMWSConsts#APPS_NEW_APPLICATION}.
*
* @see ApplicationClientProtocol#getNewApplication
*
* @param hsr the servlet request
* @return Response containing the app id and the maximum resource
* capabilities
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws IOException if the creation fails
* @throws InterruptedException if interrupted
*/
Response createNewApplication(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* Function to submit an app to the RM. This method is reachable by using
* {@link RMWSConsts#APPS}.
*
* @see ApplicationClientProtocol#submitApplication
*
* @param newApp structure containing information to construct the
* ApplicationSubmissionContext. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws IOException if the submission failed
* @throws InterruptedException if interrupted
*/
Response submitApplication(ApplicationSubmissionContextInfo newApp,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* This method posts a delegation token from the client, and it is reachable
* by using {@link RMWSConsts#DELEGATION_TOKEN}.
*
* @see ApplicationBaseProtocol#getDelegationToken
* @param tokenData the token to delegate. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if Kerberos auth failed
* @throws IOException if the delegation failed
* @throws InterruptedException if interrupted
* @throws Exception in case of bad request
*/
Response postDelegationToken(DelegationToken tokenData,
HttpServletRequest hsr) throws AuthorizationException, IOException,
InterruptedException, Exception;
/**
* This method updates the expiration for a delegation token from the client,
* and it is reachable by using
* {@link RMWSConsts#DELEGATION_TOKEN_EXPIRATION}.
*
* @see ApplicationBaseProtocol#renewDelegationToken
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if Kerberos auth failed
* @throws IOException if the delegation failed
* @throws Exception in case of bad request
*/
Response postDelegationTokenExpiration(HttpServletRequest hsr)
throws AuthorizationException, IOException, Exception;
/**
* This method cancel the delegation token from the client, and it is
* reachable by using {@link RMWSConsts#DELEGATION_TOKEN}.
*
* @see ApplicationBaseProtocol#cancelDelegationToken
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if Kerberos auth failed
* @throws IOException if the delegation failed
* @throws InterruptedException if interrupted
* @throws Exception in case of bad request
*/
Response cancelDelegationToken(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException,
Exception;
/**
* Generates a new ReservationId which is then sent to the client. This method
* is reachable by using {@link RMWSConsts#RESERVATION_NEW}.
*
* @see ApplicationClientProtocol#getNewReservation
*
* @param hsr the servlet request
* @return Response containing the app id and the maximum resource
* capabilities
* @throws AuthorizationException if the user is not authorized to invoke this
* method.
* @throws IOException if creation failed
* @throws InterruptedException if interrupted
*/
Response createNewReservation(HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* Function to submit a Reservation to the RM.This method is reachable by
* using {@link RMWSConsts#RESERVATION_SUBMIT}.
*
* @see ApplicationClientProtocol#submitReservation
*
* @param resContext provides information to construct the
* ReservationSubmissionRequest. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws IOException if creation failed
* @throws InterruptedException if interrupted
*/
Response submitReservation(ReservationSubmissionRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* Function to update a Reservation to the RM. This method is reachable by
* using {@link RMWSConsts#RESERVATION_UPDATE}.
*
* @see ApplicationClientProtocol#updateReservation
*
* @param resContext provides information to construct the
* ReservationUpdateRequest. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws IOException if the operation failed
* @throws InterruptedException if interrupted
*/
Response updateReservation(ReservationUpdateRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* Function to delete a Reservation to the RM. This method is reachable by
* using {@link RMWSConsts#RESERVATION_DELETE}.
*
* @see ApplicationClientProtocol#deleteReservation
*
* @param resContext provides information to construct the
* ReservationDeleteRequest. It is a content param.
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException when the user group information cannot be
* retrieved.
* @throws IOException when a {@link ReservationDeleteRequest} cannot be
* created from the {@link ReservationDeleteRequestInfo}. This
* exception is also thrown on
* {@code ClientRMService.deleteReservation} invocation failure.
* @throws InterruptedException if doAs action throws an InterruptedException.
*/
Response deleteReservation(ReservationDeleteRequestInfo resContext,
HttpServletRequest hsr)
throws AuthorizationException, IOException, InterruptedException;
/**
* Function to retrieve a list of all the reservations. This method is
* reachable by using {@link RMWSConsts#RESERVATION_LIST}.
*
* @see ApplicationClientProtocol#listReservations
* @param queue filter the result by queue. It is a QueryParam.
* @param reservationId filter the result by reservationId. It is a
* QueryParam.
* @param startTime filter the result by start time. It is a QueryParam.
* @param endTime filter the result by end time. It is a QueryParam.
* @param includeResourceAllocations true if the resource allocation should be
* in the result, false otherwise. It is a QueryParam.
* @param hsr the servlet request
* @return Response containing the status code
* @throws Exception in case of bad request
*/
Response listReservation(String queue, String reservationId, long startTime,
long endTime, boolean includeResourceAllocations, HttpServletRequest hsr)
throws Exception;
/**
* This method retrieves the timeout information for a specific app with a
* specific type, and it is reachable by using
* {@link RMWSConsts#APPS_TIMEOUTS_TYPE}.
*
* @param hsr the servlet request
* @param appId the application we want to get the timeout. It is a PathParam.
* @param type the type of the timeouts. It is a PathParam.
* @return the timeout for a specific application with a specific type.
* @throws AuthorizationException if the user is not authorized
*/
AppTimeoutInfo getAppTimeout(HttpServletRequest hsr, String appId,
String type) throws AuthorizationException;
/**
* This method retrieves the timeout information for a specific app, and it is
* reachable by using {@link RMWSConsts#APPS_TIMEOUTS}.
*
* @param hsr the servlet request
* @param appId the application we want to get the timeouts. It is a
* PathParam.
* @return the timeouts for a specific application
* @throws AuthorizationException if the user is not authorized
*/
AppTimeoutsInfo getAppTimeouts(HttpServletRequest hsr, String appId)
throws AuthorizationException;
/**
* This method updates the timeout information for a specific app, and it is
* reachable by using {@link RMWSConsts#APPS_TIMEOUT}.
*
* @see ApplicationClientProtocol#updateApplicationTimeouts
* @param appTimeout the appTimeoutInfo. It is a content param.
* @param hsr the servlet request
* @param appId the application we want to update. It is a PathParam.
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws YarnException in case of bad request
* @throws IOException if the operation failed
* @throws InterruptedException if interrupted
*/
Response updateApplicationTimeout(AppTimeoutInfo appTimeout,
HttpServletRequest hsr, String appId) throws AuthorizationException,
YarnException, InterruptedException, IOException;
/**
* This method retrieves all the attempts information for a specific app, and
* it is reachable by using {@link RMWSConsts#APPS_APPID_APPATTEMPTS}.
*
* @see ApplicationBaseProtocol#getApplicationAttempts
* @param hsr the servlet request
* @param appId the application we want to get the attempts. It is a
* PathParam.
* @return all the attempts info for a specific application
*/
AppAttemptsInfo getAppAttempts(HttpServletRequest hsr, String appId);
/**
* This method verifies if a user has access to a specified queue.
*
* @return Response containing the status code.
*
* @param queue queue
* @param username user
* @param queueAclType acl type of queue, it could be
* SUBMIT_APPLICATIONS/ADMINISTER_QUEUE
* @param hsr request
*
* @throws AuthorizationException if the user is not authorized to invoke this
* method.
*/
RMQueueAclInfo checkUserAccessToQueue(String queue, String username,
String queueAclType, HttpServletRequest hsr)
throws AuthorizationException;
/**
* This method sends a signal to container.
* @param containerId containerId
* @param command signal command, it could be OUTPUT_THREAD_DUMP/
* GRACEFUL_SHUTDOWN/FORCEFUL_SHUTDOWN
* @param req request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method.
*/
Response signalToContainer(String containerId, String command,
HttpServletRequest req) throws AuthorizationException;
/**
* This method updates the Scheduler configuration, and it is reachable by
* using {@link RMWSConsts#SCHEDULER_CONF}.
*
* @param mutationInfo th information for making scheduler configuration
* changes (supports adding, removing, or updating a queue, as well
* as global scheduler conf changes)
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method
* @throws InterruptedException if interrupted
*/
Response updateSchedulerConfiguration(SchedConfUpdateInfo mutationInfo,
HttpServletRequest hsr) throws AuthorizationException, InterruptedException;
/**
* This method retrieves all the Scheduler configuration, and it is reachable
* by using {@link RMWSConsts#SCHEDULER_CONF}.
*
* @param hsr the servlet request
* @return Response containing the status code
* @throws AuthorizationException if the user is not authorized to invoke this
* method.
*/
Response getSchedulerConfiguration(HttpServletRequest hsr) throws AuthorizationException;
}
| RMWebServiceProtocol |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/privatemember/PrivateProducerMethodTest.java | {
"start": 832,
"end": 1206
} | class ____ {
private String name = null;
@PostConstruct
void init() {
name = "foo";
}
@Produces
private Head produce() {
return new Head() {
@Override
public String name() {
return name;
}
};
}
}
}
| HeadProducer |
java | quarkusio__quarkus | extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/TransactionRunnerImpl.java | {
"start": 109,
"end": 930
} | class ____ extends RunOptionsBase
implements TransactionRunnerOptions, TransactionRunner {
TransactionRunnerImpl(TransactionSemantics semantics) {
setSemantics(semantics);
}
@Override
public TransactionRunnerImpl timeout(int seconds) {
setTimeout(seconds);
return this;
}
@Override
public TransactionRunnerImpl exceptionHandler(Function<Throwable, TransactionExceptionResult> handler) {
setExceptionHandler(handler);
return this;
}
@Override
public void run(Runnable task) {
QuarkusTransactionImpl.call(this, () -> {
task.run();
return null;
});
}
@Override
public <T> T call(Callable<T> task) {
return QuarkusTransactionImpl.call(this, task);
}
}
| TransactionRunnerImpl |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/api/datastream/FinishedSourcesWatermarkITCase.java | {
"start": 5756,
"end": 6075
} | class ____ extends CoProcessFunction<String, String, String> {
@Override
public void processElement1(String val, Context context, Collector<String> collector) {}
@Override
public void processElement2(String val, Context context, Collector<String> collector) {}
}
}
| NoopCoProcessFunction |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/utils/AbstractIterator.java | {
"start": 1107,
"end": 2484
} | enum ____ {
READY, NOT_READY, DONE, FAILED
}
private State state = State.NOT_READY;
private T next;
@Override
public boolean hasNext() {
switch (state) {
case FAILED:
throw new IllegalStateException("Iterator is in failed state");
case DONE:
return false;
case READY:
return true;
default:
return maybeComputeNext();
}
}
@Override
public T next() {
if (!hasNext())
throw new NoSuchElementException();
state = State.NOT_READY;
if (next == null)
throw new IllegalStateException("Expected item but none found.");
return next;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Removal not supported");
}
public T peek() {
if (!hasNext())
throw new NoSuchElementException();
return next;
}
protected T allDone() {
state = State.DONE;
return null;
}
protected abstract T makeNext();
private Boolean maybeComputeNext() {
state = State.FAILED;
next = makeNext();
if (state == State.DONE) {
return false;
} else {
state = State.READY;
return true;
}
}
}
| State |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 15957,
"end": 16144
} | class ____ internally by the framework.
* Same for unchecked type safety warnings.
*/
@SuppressWarnings({
"deprecation",
"rawtypes",
"unchecked"
})
public final | used |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/TargetXmlAnnotation.java | {
"start": 466,
"end": 1161
} | class ____ implements Target {
private String value;
public TargetXmlAnnotation(ModelsContext modelContext) {
}
public TargetXmlAnnotation(Target annotation, ModelsContext modelContext) {
throw new UnsupportedOperationException( "Should only ever be sourced from XML" );
}
public TargetXmlAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
throw new UnsupportedOperationException( "Should only ever be sourced from XML" );
}
@Override
public Class<? extends Annotation> annotationType() {
return Target.class;
}
@Override
public String value() {
return value;
}
public void value(String value) {
this.value = value;
}
}
| TargetXmlAnnotation |
java | quarkusio__quarkus | integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KnativeWithImagePullSecretTest.java | {
"start": 792,
"end": 3106
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClasses(GreetingResource.class))
.setApplicationName("knative-with-image-pull-secret")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("knative-with-image-pull-secret.properties")
.setLogFileName("k8s.log")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-kubernetes", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
final Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("knative.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("knative.yml"));
assertThat(kubernetesList).filteredOn(i -> "Service".equals(i.getKind())).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(Service.class, s -> {
assertThat(s.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("knative-with-image-pull-secret");
});
assertThat(s.getSpec()).satisfies(serviceSpec -> {
assertThat(serviceSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(r -> {
assertThat(r.getServiceAccountName()).isEqualTo("knative-with-image-pull-secret");
});
});
});
});
});
assertThat(kubernetesList).filteredOn(i -> "ServiceAccount".equals(i.getKind())).singleElement().satisfies(i -> {
assertThat(i).isInstanceOfSatisfying(ServiceAccount.class, s -> {
assertTrue(s.getImagePullSecrets().stream().anyMatch(r -> r.getName().equals("my-secret")));
});
});
}
}
| KnativeWithImagePullSecretTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/jpa/event/spi/CallbackType.java | {
"start": 553,
"end": 1359
} | enum ____ {
/**
* @see PreUpdate
*/
PRE_UPDATE,
/**
* @see PostUpdate
*/
POST_UPDATE,
/**
* @see PrePersist
*/
PRE_PERSIST,
/**
* @see PostPersist
*/
POST_PERSIST,
/**
* @see PreRemove
*/
PRE_REMOVE,
/**
* @see PostRemove
*/
POST_REMOVE,
/**
* @see PostLoad
*/
POST_LOAD;
/**
* The JPA-defined callback annotation type corresponding
* to this lifecycle event type.
*/
public Class<? extends Annotation> getCallbackAnnotation() {
return switch ( this ) {
case PRE_PERSIST -> PrePersist.class;
case PRE_UPDATE -> PreUpdate.class;
case PRE_REMOVE -> PreRemove.class;
case POST_PERSIST -> PostPersist.class;
case POST_UPDATE -> PostUpdate.class;
case POST_REMOVE -> PostRemove.class;
case POST_LOAD -> PostLoad.class;
};
}
}
| CallbackType |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-http12/src/main/java/org/apache/dubbo/remoting/http12/message/DefaultHttpHeaders.java | {
"start": 1660,
"end": 2741
} | class ____ extends DefaultHeaders<CharSequence, CharSequence, HeadersMap> {
HeadersMap() {
this(16);
}
HeadersMap(Headers<?, ?, ?> headers) {
this(headers.size());
addImpl((Headers) headers);
}
HeadersMap(HttpHeaders headers) {
this(headers.size());
for (Entry<CharSequence, String> entry : headers) {
add(entry.getKey(), entry.getValue());
}
}
HeadersMap(int size) {
super(
AsciiString.CASE_INSENSITIVE_HASHER,
CharSequenceValueConverter.INSTANCE,
NameValidator.NOT_NULL,
size,
(ValueValidator) ValueValidator.NO_VALIDATION);
}
@Override
protected void validateName(NameValidator<CharSequence> validator, boolean forAdd, CharSequence name) {}
@Override
protected void validateValue(ValueValidator<CharSequence> validator, CharSequence name, CharSequence value) {}
}
}
| HeadersMap |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogErrorTests.java | {
"start": 797,
"end": 1407
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(LogTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Log(source, args.get(0), args.size() == 1 ? null : args.get(1));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, i) -> "numeric"));
}
}
| LogErrorTests |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/multipart/PartEventHttpMessageWriter.java | {
"start": 1623,
"end": 3918
} | class ____ extends MultipartWriterSupport implements HttpMessageWriter<PartEvent> {
public PartEventHttpMessageWriter() {
super(Collections.singletonList(MediaType.MULTIPART_FORM_DATA));
}
@Override
public boolean canWrite(ResolvableType elementType, @Nullable MediaType mediaType) {
if (PartEvent.class.isAssignableFrom(elementType.toClass())) {
if (mediaType == null) {
return true;
}
for (MediaType supportedMediaType : getWritableMediaTypes()) {
if (supportedMediaType.isCompatibleWith(mediaType)) {
return true;
}
}
}
return false;
}
@Override
public Mono<Void> write(Publisher<? extends PartEvent> partDataStream, ResolvableType elementType,
@Nullable MediaType mediaType, ReactiveHttpOutputMessage outputMessage,
Map<String, Object> hints) {
byte[] boundary = generateMultipartBoundary();
mediaType = getMultipartMediaType(mediaType, boundary);
outputMessage.getHeaders().setContentType(mediaType);
if (logger.isDebugEnabled()) {
logger.debug(Hints.getLogPrefix(hints) + "Encoding Publisher<PartEvent>");
}
Flux<DataBuffer> body = Flux.from(partDataStream)
.windowUntil(PartEvent::isLast)
.concatMap(partData -> partData.switchOnFirst((signal, flux) -> {
if (signal.hasValue()) {
PartEvent value = signal.get();
Assert.state(value != null, "Null value");
Flux<DataBuffer> dataBuffers = flux.map(PartEvent::content)
.filter(buffer -> buffer.readableByteCount() > 0);
return encodePartData(boundary, outputMessage.bufferFactory(), value.headers(), dataBuffers);
}
else {
return flux.cast(DataBuffer.class);
}
}))
.concatWith(generateLastLine(boundary, outputMessage.bufferFactory()))
.doOnDiscard(DataBuffer.class, DataBufferUtils::release);
if (logger.isDebugEnabled()) {
body = body.doOnNext(buffer -> Hints.touchDataBuffer(buffer, hints, logger));
}
return outputMessage.writeWith(body);
}
private Flux<DataBuffer> encodePartData(byte[] boundary, DataBufferFactory bufferFactory, HttpHeaders headers, Flux<DataBuffer> body) {
return Flux.concat(
generateBoundaryLine(boundary, bufferFactory),
generatePartHeaders(headers, bufferFactory),
body,
generateNewLine(bufferFactory));
}
}
| PartEventHttpMessageWriter |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AbstractTransformFuture.java | {
"start": 9381,
"end": 10488
} | class ____<
I extends @Nullable Object, O extends @Nullable Object>
extends AbstractTransformFuture<
I, O, AsyncFunction<? super I, ? extends O>, ListenableFuture<? extends O>> {
AsyncTransformFuture(
ListenableFuture<? extends I> inputFuture, AsyncFunction<? super I, ? extends O> function) {
super(inputFuture, function);
}
@Override
ListenableFuture<? extends O> doTransform(
AsyncFunction<? super I, ? extends O> function, @ParametricNullness I input)
throws Exception {
ListenableFuture<? extends O> output = function.apply(input);
checkNotNull(
output,
"AsyncFunction.apply returned null instead of a Future. "
+ "Did you mean to return immediateFuture(null)? %s",
function);
return output;
}
@Override
void setResult(ListenableFuture<? extends O> result) {
setFuture(result);
}
}
/**
* An {@link AbstractTransformFuture} that delegates to a {@link Function} and {@link
* #set(Object)}.
*/
private static final | AsyncTransformFuture |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.