language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-aws/camel-aws2-textract/src/main/java/org/apache/camel/component/aws2/textract/client/Textract2ClientFactory.java | {
"start": 1390,
"end": 2324
} | class ____ {
private Textract2ClientFactory() {
}
/**
* Return the correct aws Textract client (based on remote vs local).
*
* @param configuration configuration
* @return TextractClient
*/
public static Textract2InternalClient getTextractClient(Textract2Configuration configuration) {
if (Boolean.TRUE.equals(configuration.isUseDefaultCredentialsProvider())) {
return new Textract2ClientIAMOptimized(configuration);
} else if (Boolean.TRUE.equals(configuration.isUseProfileCredentialsProvider())) {
return new Textract2ClientIAMProfileOptimized(configuration);
} else if (Boolean.TRUE.equals(configuration.isUseSessionCredentials())) {
return new Textract2ClientSessionTokenImpl(configuration);
} else {
return new Textract2ClientStandardImpl(configuration);
}
}
}
| Textract2ClientFactory |
java | spring-projects__spring-boot | build-plugin/spring-boot-gradle-plugin/src/main/java/org/springframework/boot/gradle/tasks/bundling/BootArchiveSupport.java | {
"start": 1694,
"end": 8027
} | class ____ {
private static final byte[] ZIP_FILE_HEADER = new byte[] { 'P', 'K', 3, 4 };
private static final String UNSPECIFIED_VERSION = "unspecified";
private static final Set<String> DEFAULT_LAUNCHER_CLASSES;
static {
Set<String> defaultLauncherClasses = new HashSet<>();
defaultLauncherClasses.add("org.springframework.boot.loader.launch.JarLauncher");
defaultLauncherClasses.add("org.springframework.boot.loader.launch.PropertiesLauncher");
defaultLauncherClasses.add("org.springframework.boot.loader.launch.WarLauncher");
DEFAULT_LAUNCHER_CLASSES = Collections.unmodifiableSet(defaultLauncherClasses);
}
private final PatternSet requiresUnpack = new PatternSet();
private final PatternSet exclusions = new PatternSet();
private final String loaderMainClass;
private final Spec<FileCopyDetails> librarySpec;
private final Function<FileCopyDetails, ZipCompression> compressionResolver;
BootArchiveSupport(String loaderMainClass, Spec<FileCopyDetails> librarySpec,
Function<FileCopyDetails, ZipCompression> compressionResolver) {
this.loaderMainClass = loaderMainClass;
this.librarySpec = librarySpec;
this.compressionResolver = compressionResolver;
this.requiresUnpack.include(Specs.satisfyNone());
}
void configureManifest(Manifest manifest, String mainClass, String classes, String lib,
@Nullable String classPathIndex, @Nullable String layersIndex, String jdkVersion,
String implementationTitle, @Nullable Object implementationVersion) {
Attributes attributes = manifest.getAttributes();
attributes.putIfAbsent("Main-Class", this.loaderMainClass);
attributes.putIfAbsent("Start-Class", mainClass);
attributes.computeIfAbsent("Spring-Boot-Version", (name) -> determineSpringBootVersion());
attributes.putIfAbsent("Spring-Boot-Classes", classes);
attributes.putIfAbsent("Spring-Boot-Lib", lib);
if (classPathIndex != null) {
attributes.putIfAbsent("Spring-Boot-Classpath-Index", classPathIndex);
}
if (layersIndex != null) {
attributes.putIfAbsent("Spring-Boot-Layers-Index", layersIndex);
}
attributes.putIfAbsent("Build-Jdk-Spec", jdkVersion);
attributes.putIfAbsent("Implementation-Title", implementationTitle);
if (implementationVersion != null) {
String versionString = implementationVersion.toString();
if (!UNSPECIFIED_VERSION.equals(versionString)) {
attributes.putIfAbsent("Implementation-Version", versionString);
}
}
}
private String determineSpringBootVersion() {
String version = getClass().getPackage().getImplementationVersion();
return (version != null) ? version : "unknown";
}
CopyAction createCopyAction(Jar jar, ResolvedDependencies resolvedDependencies) {
return createCopyAction(jar, resolvedDependencies, null, null);
}
CopyAction createCopyAction(Jar jar, ResolvedDependencies resolvedDependencies,
@Nullable LayerResolver layerResolver, @Nullable String jarmodeToolsLocation) {
File output = jar.getArchiveFile().get().getAsFile();
Manifest manifest = jar.getManifest();
boolean preserveFileTimestamps = jar.isPreserveFileTimestamps();
Integer dirPermissions = getUnixNumericDirPermissions(jar);
Integer filePermissions = getUnixNumericFilePermissions(jar);
boolean includeDefaultLoader = isUsingDefaultLoader(jar);
Spec<FileTreeElement> requiresUnpack = this.requiresUnpack.getAsSpec();
Spec<FileTreeElement> exclusions = this.exclusions.getAsExcludeSpec();
Spec<FileCopyDetails> librarySpec = this.librarySpec;
Function<FileCopyDetails, ZipCompression> compressionResolver = this.compressionResolver;
String encoding = jar.getMetadataCharset();
CopyAction action = new BootZipCopyAction(output, manifest, preserveFileTimestamps, dirPermissions,
filePermissions, includeDefaultLoader, jarmodeToolsLocation, requiresUnpack, exclusions, librarySpec,
compressionResolver, encoding, resolvedDependencies, layerResolver);
return action;
}
private @Nullable Integer getUnixNumericDirPermissions(CopySpec copySpec) {
return (GradleVersion.current().compareTo(GradleVersion.version("8.3")) >= 0)
? asUnixNumeric(copySpec.getDirPermissions()) : getDirMode(copySpec);
}
private @Nullable Integer getUnixNumericFilePermissions(CopySpec copySpec) {
return (GradleVersion.current().compareTo(GradleVersion.version("8.3")) >= 0)
? asUnixNumeric(copySpec.getFilePermissions()) : getFileMode(copySpec);
}
private @Nullable Integer asUnixNumeric(Property<ConfigurableFilePermissions> permissions) {
return permissions.isPresent() ? permissions.get().toUnixNumeric() : null;
}
private @Nullable Integer getDirMode(CopySpec copySpec) {
try {
return (Integer) copySpec.getClass().getMethod("getDirMode").invoke(copySpec);
}
catch (Exception ex) {
throw new RuntimeException("Failed to get dir mode from CopySpec", ex);
}
}
private @Nullable Integer getFileMode(CopySpec copySpec) {
try {
return (Integer) copySpec.getClass().getMethod("getFileMode").invoke(copySpec);
}
catch (Exception ex) {
throw new RuntimeException("Failed to get file mode from CopySpec", ex);
}
}
private boolean isUsingDefaultLoader(Jar jar) {
return DEFAULT_LAUNCHER_CLASSES.contains(jar.getManifest().getAttributes().get("Main-Class"));
}
void requiresUnpack(String... patterns) {
this.requiresUnpack.include(patterns);
}
void requiresUnpack(Spec<FileTreeElement> spec) {
this.requiresUnpack.include(spec);
}
void excludeNonZipLibraryFiles(FileCopyDetails details) {
if (this.librarySpec.isSatisfiedBy(details)) {
excludeNonZipFiles(details);
}
}
void excludeNonZipFiles(FileCopyDetails details) {
if (!isZip(details.getFile())) {
details.exclude();
}
}
private boolean isZip(File file) {
try {
try (FileInputStream fileInputStream = new FileInputStream(file)) {
return isZip(fileInputStream);
}
}
catch (IOException ex) {
return false;
}
}
private boolean isZip(InputStream inputStream) throws IOException {
for (byte headerByte : ZIP_FILE_HEADER) {
if (inputStream.read() != headerByte) {
return false;
}
}
return true;
}
void moveModuleInfoToRoot(CopySpec spec) {
spec.filesMatching("module-info.class", this::moveToRoot);
}
void moveToRoot(FileCopyDetails details) {
details.setRelativePath(details.getRelativeSourcePath());
}
}
| BootArchiveSupport |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/longs/Longs_assertIsNotCloseToPercentage_Test.java | {
"start": 1674,
"end": 4666
} | class ____ extends LongsBaseTest {
private static final Long ZERO = 0L;
private static final Long ONE = 1L;
private static final Long TEN = 10L;
private static final Long ONE_HUNDRED = 100L;
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> longs.assertIsNotCloseToPercentage(someInfo(), null, ONE,
withPercentage(ONE)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_expected_value_is_null() {
assertThatNullPointerException().isThrownBy(() -> longs.assertIsNotCloseToPercentage(someInfo(), ONE, null,
withPercentage(ONE)));
}
@Test
void should_fail_if_percentage_is_null() {
assertThatNullPointerException().isThrownBy(() -> longs.assertIsNotCloseToPercentage(someInfo(), ONE, ZERO, null));
}
@Test
void should_fail_if_percentage_is_negative() {
assertThatIllegalArgumentException().isThrownBy(() -> longs.assertIsNotCloseToPercentage(someInfo(), ONE, ZERO,
withPercentage(-1L)));
}
@ParameterizedTest
@CsvSource({
"1, 2, 1",
"1, 11, 90",
"-1, -2, 1",
"-1, -11, 90",
"0, -1, 99"
})
void should_pass_if_difference_is_greater_than_given_percentage(Long actual, Long other, Long percentage) {
longs.assertIsNotCloseToPercentage(someInfo(), actual, other, withPercentage(percentage));
}
@ParameterizedTest
@CsvSource({
"1, 1, 0",
"2, 1, 100",
"1, 2, 50",
"-1, -1, 0",
"-2, -1, 100",
"-1, -2, 50"
})
void should_fail_if_difference_is_equal_to_given_percentage(Long actual, Long other, Long percentage) {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> longs.assertIsNotCloseToPercentage(someInfo(), actual, other,
withPercentage(percentage)));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEqualWithinPercentage(actual, other, withPercentage(percentage),
abs(actual - other)));
}
@Test
void should_fail_if_actual_is_too_close_to_expected_value() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> longs.assertIsNotCloseToPercentage(someInfo(), ONE, TEN, withPercentage(ONE_HUNDRED)));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEqualWithinPercentage(ONE, TEN, withinPercentage(100),
TEN - ONE));
}
}
| Longs_assertIsNotCloseToPercentage_Test |
java | grpc__grpc-java | netty/src/test/java/io/grpc/netty/NettyHandlerTestBase.java | {
"start": 3285,
"end": 3364
} | class ____ Netty handler unit tests.
*/
@RunWith(JUnit4.class)
public abstract | for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RemoveUnusedImportsTest.java | {
"start": 8525,
"end": 9019
} | class ____ {
/**
* @see Lib#f(Path[])
*/
void f() {}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void multipleTopLevelClasses() {
CompilationTestHelper.newInstance(RemoveUnusedImports.class, getClass())
.addSourceLines(
"MultipleTopLevelClasses.java",
"""
import java.util.List;
import java.util.Set;
public | Test |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/GetNamespaceInfoResponsePBImpl.java | {
"start": 1739,
"end": 3728
} | class ____
extends GetNamespaceInfoResponse implements PBRecord {
private FederationProtocolPBTranslator<GetNamespaceInfoResponseProto,
GetNamespaceInfoResponseProto.Builder,
GetNamespaceInfoResponseProtoOrBuilder> translator =
new FederationProtocolPBTranslator<GetNamespaceInfoResponseProto,
GetNamespaceInfoResponseProto.Builder,
GetNamespaceInfoResponseProtoOrBuilder>(
GetNamespaceInfoResponseProto.class);
public GetNamespaceInfoResponsePBImpl() {
}
@Override
public GetNamespaceInfoResponseProto getProto() {
return this.translator.build();
}
@Override
public void setProto(Message protocol) {
this.translator.setProto(protocol);
}
@Override
public void readInstance(String base64String) throws IOException {
this.translator.readInstance(base64String);
}
@Override
public Set<FederationNamespaceInfo> getNamespaceInfo() {
Set<FederationNamespaceInfo> ret = new HashSet<FederationNamespaceInfo>();
List<FederationNamespaceInfoProto> namespaceList =
this.translator.getProtoOrBuilder().getNamespaceInfosList();
for (FederationNamespaceInfoProto ns : namespaceList) {
FederationNamespaceInfo info = new FederationNamespaceInfo(
ns.getBlockPoolId(), ns.getClusterId(), ns.getNameserviceId());
ret.add(info);
}
return ret;
}
@Override
public void setNamespaceInfo(Set<FederationNamespaceInfo> namespaceInfo) {
int index = 0;
for (FederationNamespaceInfo item : namespaceInfo) {
FederationNamespaceInfoProto.Builder itemBuilder =
FederationNamespaceInfoProto.newBuilder();
itemBuilder.setClusterId(item.getClusterId());
itemBuilder.setBlockPoolId(item.getBlockPoolId());
itemBuilder.setNameserviceId(item.getNameserviceId());
this.translator.getBuilder().addNamespaceInfos(index,
itemBuilder.build());
index++;
}
}
} | GetNamespaceInfoResponsePBImpl |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng7255InferredGroupIdTest.java | {
"start": 931,
"end": 1447
} | class ____ extends AbstractMavenIntegrationTestCase {
private static final String PROJECT_PATH = "/mng-7255-inferred-groupid";
@Test
public void testInferredGroupId() throws IOException, VerificationException {
final File projectDir = extractResources(PROJECT_PATH);
final Verifier verifier = newVerifier(projectDir.getAbsolutePath());
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
}
}
| MavenITmng7255InferredGroupIdTest |
java | netty__netty | codec-stomp/src/main/java/io/netty/handler/codec/stomp/StompSubframeEncoder.java | {
"start": 2457,
"end": 10581
} | class ____ extends MessageToMessageEncoder<StompSubframe> {
private static final int ESCAPE_HEADER_KEY_CACHE_LIMIT = 32;
private static final float DEFAULT_LOAD_FACTOR = 0.75f;
private static final FastThreadLocal<LinkedHashMap<CharSequence, CharSequence>> ESCAPE_HEADER_KEY_CACHE =
new FastThreadLocal<LinkedHashMap<CharSequence, CharSequence>>() {
@Override
protected LinkedHashMap<CharSequence, CharSequence> initialValue() throws Exception {
LinkedHashMap<CharSequence, CharSequence> cache = new LinkedHashMap<CharSequence, CharSequence>(
ESCAPE_HEADER_KEY_CACHE_LIMIT, DEFAULT_LOAD_FACTOR, true) {
@Override
protected boolean removeEldestEntry(Entry eldest) {
return size() > ESCAPE_HEADER_KEY_CACHE_LIMIT;
}
};
cache.put(ACCEPT_VERSION, ACCEPT_VERSION);
cache.put(HOST, HOST);
cache.put(LOGIN, LOGIN);
cache.put(PASSCODE, PASSCODE);
cache.put(HEART_BEAT, HEART_BEAT);
cache.put(VERSION, VERSION);
cache.put(SESSION, SESSION);
cache.put(SERVER, SERVER);
cache.put(DESTINATION, DESTINATION);
cache.put(ID, ID);
cache.put(ACK, ACK);
cache.put(TRANSACTION, TRANSACTION);
cache.put(RECEIPT, RECEIPT);
cache.put(MESSAGE_ID, MESSAGE_ID);
cache.put(SUBSCRIPTION, SUBSCRIPTION);
cache.put(RECEIPT_ID, RECEIPT_ID);
cache.put(MESSAGE, MESSAGE);
cache.put(CONTENT_LENGTH, CONTENT_LENGTH);
cache.put(CONTENT_TYPE, CONTENT_TYPE);
return cache;
}
};
public StompSubframeEncoder() {
super(StompSubframe.class);
}
@Override
protected void encode(ChannelHandlerContext ctx, StompSubframe msg, List<Object> out) throws Exception {
if (msg instanceof StompFrame) {
StompFrame stompFrame = (StompFrame) msg;
ByteBuf buf = encodeFullFrame(stompFrame, ctx);
out.add(convertFullFrame(stompFrame, buf));
} else if (msg instanceof StompHeadersSubframe) {
StompHeadersSubframe stompHeadersSubframe = (StompHeadersSubframe) msg;
ByteBuf buf = ctx.alloc().buffer(headersSubFrameSize(stompHeadersSubframe));
encodeHeaders(stompHeadersSubframe, buf);
out.add(convertHeadersSubFrame(stompHeadersSubframe, buf));
} else if (msg instanceof StompContentSubframe) {
StompContentSubframe stompContentSubframe = (StompContentSubframe) msg;
ByteBuf buf = encodeContent(stompContentSubframe, ctx);
out.add(convertContentSubFrame(stompContentSubframe, buf));
}
}
/**
* An extension method to convert a STOMP encoded buffer to a different message type
* based on an original {@link StompFrame} full frame.
*
* <p>By default an encoded buffer is returned as is.
*/
protected Object convertFullFrame(StompFrame original, ByteBuf encoded) {
return encoded;
}
/**
* An extension method to convert a STOMP encoded buffer to a different message type
* based on an original {@link StompHeadersSubframe} headers sub frame.
*
* <p>By default an encoded buffer is returned as is.
*/
protected Object convertHeadersSubFrame(StompHeadersSubframe original, ByteBuf encoded) {
return encoded;
}
/**
* An extension method to convert a STOMP encoded buffer to a different message type
* based on an original {@link StompHeadersSubframe} content sub frame.
*
* <p>By default an encoded buffer is returned as is.
*/
protected Object convertContentSubFrame(StompContentSubframe original, ByteBuf encoded) {
return encoded;
}
/**
* Returns a heuristic size for headers (32 bytes per header line) + (2 bytes for colon and eol) + (additional
* command buffer).
*/
protected int headersSubFrameSize(StompHeadersSubframe headersSubframe) {
int estimatedSize = headersSubframe.headers().size() * 34 + 48;
if (estimatedSize < 128) {
return 128;
}
return Math.max(estimatedSize, 256);
}
private ByteBuf encodeFullFrame(StompFrame frame, ChannelHandlerContext ctx) {
int contentReadableBytes = frame.content().readableBytes();
ByteBuf buf = ctx.alloc().buffer(headersSubFrameSize(frame) + contentReadableBytes);
encodeHeaders(frame, buf);
if (contentReadableBytes > 0) {
buf.writeBytes(frame.content());
}
return buf.writeByte(NUL);
}
private static void encodeHeaders(StompHeadersSubframe frame, ByteBuf buf) {
StompCommand command = frame.command();
ByteBufUtil.writeUtf8(buf, command.toString());
buf.writeByte(StompConstants.LF);
boolean shouldEscape = shouldEscape(command);
LinkedHashMap<CharSequence, CharSequence> cache = ESCAPE_HEADER_KEY_CACHE.get();
for (Entry<CharSequence, CharSequence> entry : frame.headers()) {
CharSequence headerKey = entry.getKey();
if (shouldEscape) {
CharSequence cachedHeaderKey = cache.get(headerKey);
if (cachedHeaderKey == null) {
cachedHeaderKey = escape(headerKey);
cache.put(headerKey, cachedHeaderKey);
}
headerKey = cachedHeaderKey;
}
ByteBufUtil.writeUtf8(buf, headerKey);
buf.writeByte(StompConstants.COLON);
CharSequence headerValue = shouldEscape? escape(entry.getValue()) : entry.getValue();
ByteBufUtil.writeUtf8(buf, headerValue);
buf.writeByte(StompConstants.LF);
}
buf.writeByte(StompConstants.LF);
}
private static ByteBuf encodeContent(StompContentSubframe content, ChannelHandlerContext ctx) {
if (content instanceof LastStompContentSubframe) {
ByteBuf buf = ctx.alloc().buffer(content.content().readableBytes() + 1);
buf.writeBytes(content.content());
buf.writeByte(StompConstants.NUL);
return buf;
}
return content.content().retain();
}
private static boolean shouldEscape(StompCommand command) {
return command != StompCommand.CONNECT && command != StompCommand.CONNECTED;
}
private static CharSequence escape(CharSequence input) {
AppendableCharSequence builder = null;
for (int i = 0; i < input.length(); i++) {
char chr = input.charAt(i);
if (chr == '\\') {
builder = escapeBuilder(builder, input, i);
builder.append("\\\\");
} else if (chr == ':') {
builder = escapeBuilder(builder, input, i);
builder.append("\\c");
} else if (chr == '\n') {
builder = escapeBuilder(builder, input, i);
builder.append("\\n");
} else if (chr == '\r') {
builder = escapeBuilder(builder, input, i);
builder.append("\\r");
} else if (builder != null) {
builder.append(chr);
}
}
return builder != null? builder : input;
}
private static AppendableCharSequence escapeBuilder(AppendableCharSequence builder, CharSequence input,
int offset) {
if (builder != null) {
return builder;
}
// Add extra overhead to the input char sequence to avoid resizing during escaping.
return new AppendableCharSequence(input.length() + 8).append(input, 0, offset);
}
}
| StompSubframeEncoder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSRawErasureCoderFactory.java | {
"start": 1151,
"end": 1718
} | class ____ implements RawErasureCoderFactory {
public static final String CODER_NAME = "rs_java";
@Override
public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
return new RSRawEncoder(coderOptions);
}
@Override
public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
return new RSRawDecoder(coderOptions);
}
@Override
public String getCoderName() {
return CODER_NAME;
}
@Override
public String getCodecName() {
return ErasureCodeConstants.RS_CODEC_NAME;
}
}
| RSRawErasureCoderFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 39028,
"end": 39475
} | class ____ {
public void doTest() {
Client client = new Client();
client.after(/* isAdmin= */ false);
}
}
""")
.doTest();
}
@Test
public void trailingSemicolon() {
refactoringTestHelper
.addInputLines(
"Client.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final | Caller |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/test/TestWebAppTests.java | {
"start": 3173,
"end": 3634
} | class ____ {
}
@Test
void testRequestScope() {
Injector injector = WebAppTests.createMockInjector(this);
assertSame(injector.getInstance(ScopeTest.class),
injector.getInstance(ScopeTest.class));
}
private void logInstances(HttpServletRequest req, HttpServletResponse res,
PrintWriter out) {
LOG.info("request: {}", req);
LOG.info("response: {}", res);
LOG.info("writer: {}", out);
}
}
| ScopeTest |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/JSONPatch.java | {
"start": 2817,
"end": 2893
} | enum ____ {
add, remove, replace, move, copy, test
}
}
| OperationType |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/groups/Tuple_Test.java | {
"start": 3052,
"end": 3340
} | class ____ {
private byte[] pk;
private String name;
public SinteticClass(byte[] pk, String name) {
this.pk = pk;
this.name = name;
}
public byte[] getPk() {
return pk;
}
public String getName() {
return name;
}
}
}
| SinteticClass |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/BufferedIOStatisticsInputStream.java | {
"start": 1479,
"end": 2721
} | class ____
extends BufferedInputStream
implements IOStatisticsSource, StreamCapabilities {
/**
* Buffer an input stream with the default buffer size of 8k.
* @param in input stream
*/
public BufferedIOStatisticsInputStream(final InputStream in) {
super(in);
}
/**
* Buffer an input stream with the chosen buffer size.
* @param in input stream
* @param size buffer size
*/
public BufferedIOStatisticsInputStream(final InputStream in, final int size) {
super(in, size);
}
/**
* Return any IOStatistics offered by the inner stream.
* @return inner IOStatistics or null
*/
@Override
public IOStatistics getIOStatistics() {
return retrieveIOStatistics(in);
}
/**
* If the inner stream supports {@link StreamCapabilities},
* forward the probe to it.
* Otherwise: return false.
*
* @param capability string to query the stream support for.
* @return true if a capability is known to be supported.
*/
@Override
public boolean hasCapability(final String capability) {
if (in instanceof StreamCapabilities) {
return ((StreamCapabilities) in).hasCapability(capability);
} else {
return false;
}
}
}
| BufferedIOStatisticsInputStream |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/RequestMetricInfo.java | {
"start": 211,
"end": 3542
} | class ____ {
static final Logger log = Logger.getLogger(RequestMetricInfo.class);
public static final String ROOT = "/";
public static final String HTTP_REQUEST_PATH = "HTTP_REQUEST_PATH";
/** Store the sample used to measure the request */
protected Timer.Sample sample;
public RequestMetricInfo setSample(Timer.Sample sample) {
this.sample = sample;
return this;
}
public Timer.Sample getSample() {
return sample;
}
/**
* Normalize and filter request path against match patterns
*
* @param uri Uri for request
* @param ignorePatterns
* @param matchPatterns
* @return final uri for tag, or null to skip measurement
*/
protected String getNormalizedUriPath(Map<Pattern, String> matchPatterns, List<Pattern> ignorePatterns, String uri) {
// Normalize path
String path = normalizePath(uri);
if (path.length() > 1) {
String origPath = path;
// Look for configured matches, then inferred templates
path = applyMatchPatterns(origPath, matchPatterns);
if (path.equals(origPath)) {
path = normalizePath(applyTemplateMatching(origPath));
}
}
return filterIgnored(path, ignorePatterns);
}
/** Subclasses should override with appropriate mechanisms for finding templated urls */
protected String applyTemplateMatching(String path) {
return path;
}
static String applyMatchPatterns(String path, Map<Pattern, String> matchPatterns) {
if (!matchPatterns.isEmpty()) {
for (Map.Entry<Pattern, String> mp : matchPatterns.entrySet()) {
if (mp.getKey().matcher(path).matches()) {
log.debugf("Path %s matched pattern %s, using %s", path, mp.getKey(), mp.getValue());
return mp.getValue();
}
}
}
return path;
}
/** Return path or null if it should be ignored */
protected static String filterIgnored(String path, List<Pattern> ignorePatterns) {
if (!ignorePatterns.isEmpty()) {
for (Pattern p : ignorePatterns) {
if (p.matcher(path).matches()) {
log.debugf("Path %s ignored; matches pattern %s", path, p.pattern());
return null;
}
}
}
return path;
}
protected static String normalizePath(String uri) {
if (uri == null || uri.isEmpty() || ROOT.equals(uri)) {
return ROOT;
}
String workingPath = new String(uri);
// Remove all leading slashes
// detect
int start = 0;
while (start < workingPath.length() && workingPath.charAt(start) == '/') {
start++;
}
// Add missing / and remove multiple leading
if (start != 1) {
workingPath = "/" + workingPath.substring(start);
}
// Collapse multiple trailing slashes
int end = workingPath.length();
while (end > 1 && workingPath.charAt(end - 1) == '/') {
end--;
}
if (end != workingPath.length()) {
workingPath = workingPath.substring(0, end);
}
return workingPath;
}
}
| RequestMetricInfo |
java | spring-projects__spring-boot | module/spring-boot-jersey/src/test/java/org/springframework/boot/jersey/autoconfigure/JerseyAutoConfigurationServletContainerTests.java | {
"start": 3038,
"end": 3314
} | class ____ extends ResourceConfig {
@Value("${message:World}")
private String msg;
Application() {
register(Application.class);
}
@GET
public String message() {
return "Hello " + this.msg;
}
}
@Configuration(proxyBeanMethods = false)
static | Application |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/util/QueueDrainHelperTest.java | {
"start": 1357,
"end": 23287
} | class ____ extends RxJavaTest {
@Test
public void isCancelled() {
assertTrue(QueueDrainHelper.isCancelled(new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
throw new IOException();
}
}));
}
@Test
public void requestMaxInt() {
QueueDrainHelper.request(new Subscription() {
@Override
public void request(long n) {
assertEquals(Integer.MAX_VALUE, n);
}
@Override
public void cancel() {
}
}, Integer.MAX_VALUE);
}
@Test
public void requestMinInt() {
QueueDrainHelper.request(new Subscription() {
@Override
public void request(long n) {
assertEquals(Long.MAX_VALUE, n);
}
@Override
public void cancel() {
}
}, Integer.MIN_VALUE);
}
@Test
public void requestAlmostMaxInt() {
QueueDrainHelper.request(new Subscription() {
@Override
public void request(long n) {
assertEquals(Integer.MAX_VALUE - 1, n);
}
@Override
public void cancel() {
}
}, Integer.MAX_VALUE - 1);
}
@Test
public void postCompleteEmpty() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ArrayDeque<Integer> queue = new ArrayDeque<>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
};
ts.onSubscribe(new BooleanSubscription());
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertResult();
}
@Test
public void postCompleteWithRequest() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ArrayDeque<Integer> queue = new ArrayDeque<>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertResult(1);
}
@Test
public void completeRequestRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final TestSubscriber<Integer> ts = new TestSubscriber<>();
final ArrayDeque<Integer> queue = new ArrayDeque<>();
final AtomicLong state = new AtomicLong();
final BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
Runnable r1 = new Runnable() {
@Override
public void run() {
QueueDrainHelper.postCompleteRequest(1, ts, queue, state, isCancelled);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
}
};
TestHelper.race(r1, r2);
ts.assertResult(1);
}
}
@Test
public void postCompleteCancelled() {
final TestSubscriber<Integer> ts = new TestSubscriber<>();
ArrayDeque<Integer> queue = new ArrayDeque<>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return ts.isCancelled();
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
ts.cancel();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertEmpty();
}
@Test
public void postCompleteCancelledAfterOne() {
final TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
}
};
ArrayDeque<Integer> queue = new ArrayDeque<>();
AtomicLong state = new AtomicLong();
BooleanSupplier isCancelled = new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return ts.isCancelled();
}
};
ts.onSubscribe(new BooleanSubscription());
queue.offer(1);
state.getAndIncrement();
QueueDrainHelper.postComplete(ts, queue, state, isCancelled);
ts.assertValue(1).assertNoErrors().assertNotComplete();
}
@Test
public void drainMaxLoopMissingBackpressure() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
q.offer(1);
QueueDrainHelper.drainMaxLoop(q, ts, false, null, qd);
ts.assertFailure(MissingBackpressureException.class);
}
@Test
public void drainMaxLoopMissingBackpressureWithResource() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
q.offer(1);
Disposable d = Disposable.empty();
QueueDrainHelper.drainMaxLoop(q, ts, false, d, qd);
ts.assertFailure(MissingBackpressureException.class);
assertTrue(d.isDisposed());
}
@Test
public void drainMaxLoopDontAccept() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 1;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
q.offer(1);
QueueDrainHelper.drainMaxLoop(q, ts, false, null, qd);
ts.assertEmpty();
}
@Test
public void checkTerminatedDelayErrorEmpty() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, true, ts, true, q, qd);
ts.assertResult();
}
@Test
public void checkTerminatedDelayErrorNonEmpty() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, false, ts, true, q, qd);
ts.assertEmpty();
}
@Test
public void checkTerminatedDelayErrorEmptyError() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return new TestException();
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, true, ts, true, q, qd);
ts.assertFailure(TestException.class);
}
@Test
public void checkTerminatedNonDelayErrorError() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
ts.onSubscribe(new BooleanSubscription());
QueueDrain<Integer, Integer> qd = new QueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return new TestException();
}
@Override
public boolean enter() {
return true;
}
@Override
public long requested() {
return 0;
}
@Override
public long produced(long n) {
return 0;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public boolean accept(Subscriber<? super Integer> a, Integer v) {
return false;
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, false, ts, false, q, qd);
ts.assertFailure(TestException.class);
}
@Test
public void observerCheckTerminatedDelayErrorEmpty() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, true, to, true, q, null, qd);
to.assertResult();
}
@Test
public void observerCheckTerminatedDelayErrorEmptyResource() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
Disposable d = Disposable.empty();
QueueDrainHelper.checkTerminated(true, true, to, true, q, d, qd);
to.assertResult();
assertTrue(d.isDisposed());
}
@Test
public void observerCheckTerminatedDelayErrorNonEmpty() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return null;
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, false, to, true, q, null, qd);
to.assertEmpty();
}
@Test
public void observerCheckTerminatedDelayErrorEmptyError() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return new TestException();
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, true, to, true, q, null, qd);
to.assertFailure(TestException.class);
}
@Test
public void observerCheckTerminatedNonDelayErrorError() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return new TestException();
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
QueueDrainHelper.checkTerminated(true, false, to, false, q, null, qd);
to.assertFailure(TestException.class);
}
@Test
public void observerCheckTerminatedNonDelayErrorErrorResource() {
TestObserver<Integer> to = new TestObserver<>();
to.onSubscribe(Disposable.empty());
ObservableQueueDrain<Integer, Integer> qd = new ObservableQueueDrain<Integer, Integer>() {
@Override
public boolean cancelled() {
return false;
}
@Override
public boolean done() {
return false;
}
@Override
public Throwable error() {
return new TestException();
}
@Override
public boolean enter() {
return true;
}
@Override
public int leave(int m) {
return 0;
}
@Override
public void accept(Observer<? super Integer> a, Integer v) {
}
};
SpscArrayQueue<Integer> q = new SpscArrayQueue<>(32);
Disposable d = Disposable.empty();
QueueDrainHelper.checkTerminated(true, false, to, false, q, d, qd);
to.assertFailure(TestException.class);
assertTrue(d.isDisposed());
}
@Test
public void postCompleteAlreadyComplete() {
TestSubscriber<Integer> ts = new TestSubscriber<>();
Queue<Integer> q = new ArrayDeque<>();
q.offer(1);
AtomicLong state = new AtomicLong(QueueDrainHelper.COMPLETED_MASK);
QueueDrainHelper.postComplete(ts, q, state, new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
});
}
}
| QueueDrainHelperTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/CloseEvents.java | {
"start": 627,
"end": 710
} | interface ____ {
void resourceClosed(Object resource);
}
}
| CloseListener |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/query/RescoreKnnVectorQueryIT.java | {
"start": 2571,
"end": 3045
} | class ____ extends ESIntegTestCase {
public static final String INDEX_NAME = "test";
public static final String VECTOR_FIELD = "vector";
public static final String VECTOR_SCORE_SCRIPT = "vector_scoring";
public static final String QUERY_VECTOR_PARAM = "query_vector";
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(CustomScriptPlugin.class);
}
public static | RescoreKnnVectorQueryIT |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/loader/ast/internal/SingleIdEntityLoaderProvidedQueryImpl.java | {
"start": 784,
"end": 2275
} | class ____<T> implements SingleIdEntityLoader<T> {
private final EntityMappingType entityDescriptor;
private final NamedQueryMemento<T> namedQueryMemento;
public SingleIdEntityLoaderProvidedQueryImpl(
EntityMappingType entityDescriptor,
NamedQueryMemento<T> namedQueryMemento) {
this.entityDescriptor = entityDescriptor;
this.namedQueryMemento = namedQueryMemento;
}
@Override
public EntityMappingType getLoadable() {
return entityDescriptor;
}
@Override @SuppressWarnings("unchecked")
public T load(Object pkValue, LockOptions lockOptions, Boolean readOnly, SharedSessionContractImplementor session) {
final var mappedJavaType = (JavaType<T>) entityDescriptor.getMappedJavaType();
final var query = namedQueryMemento.toQuery( session, mappedJavaType.getJavaTypeClass() );
query.setParameter( (Parameter<Object>) query.getParameters().iterator().next(), pkValue );
query.setQueryFlushMode( QueryFlushMode.NO_FLUSH );
return query.uniqueResult();
}
@Override
public T load(
Object pkValue,
Object entityInstance,
LockOptions lockOptions,
Boolean readOnly,
SharedSessionContractImplementor session) {
if ( entityInstance != null ) {
throw new UnsupportedOperationException("null entity instance");
}
return load( pkValue, lockOptions, readOnly, session );
}
@Override
public Object[] loadDatabaseSnapshot(Object id, SharedSessionContractImplementor session) {
return EMPTY_OBJECT_ARRAY;
}
}
| SingleIdEntityLoaderProvidedQueryImpl |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/HttpDisableStreamCacheTest.java | {
"start": 1457,
"end": 3401
} | class ____ extends BaseHttpTest {
private HttpServer localServer;
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/test/", new BasicValidationHandler(GET.name(), null, null, getExpectedContent())).create();
localServer.start();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void httpDisableStreamCache() {
Object out = template.requestBody("direct:start", (String) null);
assertEquals("camel rocks!", context.getTypeConverter().convertTo(String.class, out));
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").streamCache("false")
.to("http://localhost:" + localServer.getLocalPort() + "/test/?disableStreamCache=true")
.process(e -> {
InputStream is = (InputStream) e.getMessage().getBody();
assertNotNull(is);
// we can only read the raw http stream once
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOHelper.copy(is, bos);
e.setVariable("newBody", bos.toString());
})
.setBody().variable("newBody");
}
};
}
}
| HttpDisableStreamCacheTest |
java | spring-projects__spring-boot | module/spring-boot-webclient/src/main/java/org/springframework/boot/webclient/observation/ObservationWebClientCustomizer.java | {
"start": 1123,
"end": 2060
} | class ____ implements WebClientCustomizer {
private final ObservationRegistry observationRegistry;
private final ClientRequestObservationConvention observationConvention;
/**
* Create a new {@code ObservationWebClientCustomizer} that will configure the
* {@code Observation} setup on the client.
* @param observationRegistry the registry to publish observations to
* @param observationConvention the convention to use to populate observations
*/
public ObservationWebClientCustomizer(ObservationRegistry observationRegistry,
ClientRequestObservationConvention observationConvention) {
this.observationRegistry = observationRegistry;
this.observationConvention = observationConvention;
}
@Override
public void customize(WebClient.Builder webClientBuilder) {
webClientBuilder.observationRegistry(this.observationRegistry)
.observationConvention(this.observationConvention);
}
}
| ObservationWebClientCustomizer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificanceLookup.java | {
"start": 2366,
"end": 9382
} | interface ____ extends Releasable {
long freq(long term) throws IOException;
}
private final AggregationContext context;
private final MappedFieldType fieldType;
private final DocValueFormat format;
private final Query backgroundFilter;
private final int supersetNumDocs;
private TermsEnum termsEnum;
SignificanceLookup(
AggregationContext context,
SamplingContext samplingContext,
MappedFieldType fieldType,
DocValueFormat format,
QueryBuilder backgroundFilter
) throws IOException {
this.context = context;
this.fieldType = fieldType;
this.format = format;
// If there is no provided background filter, but we are within a sampling context, our background docs need to take the sampling
// context into account.
// If there is a filter, that filter needs to take the sampling into account (if we are within a sampling context)
Query backgroundQuery = backgroundFilter == null
? samplingContext.buildSamplingQueryIfNecessary(context).orElse(null)
: samplingContext.buildQueryWithSampler(backgroundFilter, context);
// Refilter to account for alias filters, if there are any.
if (backgroundQuery == null) {
Query matchAllDocsQuery = new MatchAllDocsQuery();
Query contextFiltered = context.filterQuery(matchAllDocsQuery);
if (contextFiltered != matchAllDocsQuery) {
this.backgroundFilter = contextFiltered;
} else {
this.backgroundFilter = null;
}
} else {
this.backgroundFilter = context.filterQuery(backgroundQuery);
}
/*
* We need to use a superset size that includes deleted docs or we
* could end up blowing up with bad statistics that cause us to blow
* up later on.
*/
IndexSearcher searcher = context.searcher();
supersetNumDocs = this.backgroundFilter == null ? searcher.getIndexReader().maxDoc() : searcher.count(this.backgroundFilter);
}
/**
* Get the number of docs in the superset.
*/
long supersetSize() {
return supersetNumDocs;
}
/**
* Get the background frequency of a {@link BytesRef} term.
*/
BackgroundFrequencyForBytes bytesLookup(BigArrays bigArrays, CardinalityUpperBound cardinality) {
if (cardinality == CardinalityUpperBound.ONE) {
return new BackgroundFrequencyForBytes() {
@Override
public long freq(BytesRef term) throws IOException {
return getBackgroundFrequency(term);
}
@Override
public void close() {}
};
}
final BytesRefHash termToPosition = new BytesRefHash(1, bigArrays);
boolean success = false;
try {
BackgroundFrequencyForBytes b = new BackgroundFrequencyForBytes() {
private LongArray positionToFreq = bigArrays.newLongArray(1, false);
@Override
public long freq(BytesRef term) throws IOException {
long position = termToPosition.add(term);
if (position < 0) {
return positionToFreq.get(-1 - position);
}
long freq = getBackgroundFrequency(term);
positionToFreq = bigArrays.grow(positionToFreq, position + 1);
positionToFreq.set(position, freq);
return freq;
}
@Override
public void close() {
Releasables.close(termToPosition, positionToFreq);
}
};
success = true;
return b;
} finally {
if (success == false) {
termToPosition.close();
}
}
}
/**
* Get the background frequency of a {@link BytesRef} term.
*/
private long getBackgroundFrequency(BytesRef term) throws IOException {
return getBackgroundFrequency(context.buildQuery(makeBackgroundFrequencyQuery(format.format(term).toString())));
}
/**
* Get the background frequency of a {@code long} term.
*/
BackgroundFrequencyForLong longLookup(BigArrays bigArrays, CardinalityUpperBound cardinality) {
if (cardinality == CardinalityUpperBound.ONE) {
return new BackgroundFrequencyForLong() {
@Override
public long freq(long term) throws IOException {
return getBackgroundFrequency(term);
}
@Override
public void close() {}
};
}
final LongHash termToPosition = new LongHash(1, bigArrays);
boolean success = false;
try {
BackgroundFrequencyForLong b = new BackgroundFrequencyForLong() {
private LongArray positionToFreq = bigArrays.newLongArray(1, false);
@Override
public long freq(long term) throws IOException {
long position = termToPosition.add(term);
if (position < 0) {
return positionToFreq.get(-1 - position);
}
long freq = getBackgroundFrequency(term);
positionToFreq = bigArrays.grow(positionToFreq, position + 1);
positionToFreq.set(position, freq);
return freq;
}
@Override
public void close() {
Releasables.close(termToPosition, positionToFreq);
}
};
success = true;
return b;
} finally {
if (success == false) {
termToPosition.close();
}
}
}
/**
* Get the background frequency of a {@code long} term.
*/
private long getBackgroundFrequency(long term) throws IOException {
return getBackgroundFrequency(context.buildQuery(makeBackgroundFrequencyQuery(format.format(term).toString())));
}
private QueryBuilder makeBackgroundFrequencyQuery(String value) {
QueryBuilder queryBuilder = new TermQueryBuilder(fieldType.name(), value);
var nestedParentField = context.nestedLookup().getNestedParent(fieldType.name());
if (nestedParentField != null) {
queryBuilder = new NestedQueryBuilder(nestedParentField, queryBuilder, ScoreMode.Avg);
}
return queryBuilder;
}
private long getBackgroundFrequency(Query query) throws IOException {
// Note that `getTermsEnum` takes into account the backgroundFilter, with already has the sampling query applied
if (query instanceof TermQuery) {
// for types that use the inverted index, we prefer using a terms
// | BackgroundFrequencyForLong |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/id/generators/UnnamedGeneratorTests.java | {
"start": 4670,
"end": 4895
} | class ____ {
@Id
@GeneratedValue
@SequenceGenerator(sequenceName = "my_seq")
private Integer id;
private String name;
}
@Entity(name="Entity4")
@SequenceGenerator(sequenceName = "another_seq")
public static | Entity3 |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestRollingFileSystemSinkWithLocal.java | {
"start": 1138,
"end": 1199
} | class ____ the context of the local file
* system.
*/
public | in |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/filter/ExpressionMessageFilter.java | {
"start": 1461,
"end": 6220
} | class ____ implements MessageFilter {
protected static final Logger log = LoggerFactory.getLogger(LoggerName.FILTER_LOGGER_NAME);
protected final SubscriptionData subscriptionData;
protected final ConsumerFilterData consumerFilterData;
protected final ConsumerFilterManager consumerFilterManager;
protected final boolean bloomDataValid;
public ExpressionMessageFilter(SubscriptionData subscriptionData, ConsumerFilterData consumerFilterData,
ConsumerFilterManager consumerFilterManager) {
this.subscriptionData = subscriptionData;
this.consumerFilterData = consumerFilterData;
this.consumerFilterManager = consumerFilterManager;
if (consumerFilterData == null) {
bloomDataValid = false;
return;
}
BloomFilter bloomFilter = this.consumerFilterManager.getBloomFilter();
if (bloomFilter != null && bloomFilter.isValid(consumerFilterData.getBloomFilterData())) {
bloomDataValid = true;
} else {
bloomDataValid = false;
}
}
@Override
public boolean isMatchedByConsumeQueue(Long tagsCode, ConsumeQueueExt.CqExtUnit cqExtUnit) {
if (null == subscriptionData) {
return true;
}
if (subscriptionData.isClassFilterMode()) {
return true;
}
// by tags code.
if (ExpressionType.isTagType(subscriptionData.getExpressionType())) {
if (tagsCode == null) {
return true;
}
if (subscriptionData.getSubString().equals(SubscriptionData.SUB_ALL)) {
return true;
}
return subscriptionData.getCodeSet().contains(tagsCode.intValue());
} else {
// no expression or no bloom
if (consumerFilterData == null || consumerFilterData.getExpression() == null
|| consumerFilterData.getCompiledExpression() == null || consumerFilterData.getBloomFilterData() == null) {
return true;
}
// message is before consumer
if (cqExtUnit == null || !consumerFilterData.isMsgInLive(cqExtUnit.getMsgStoreTime())) {
log.debug("Pull matched because not in live: {}, {}", consumerFilterData, cqExtUnit);
return true;
}
byte[] filterBitMap = cqExtUnit.getFilterBitMap();
BloomFilter bloomFilter = this.consumerFilterManager.getBloomFilter();
if (filterBitMap == null || !this.bloomDataValid
|| filterBitMap.length * Byte.SIZE != consumerFilterData.getBloomFilterData().getBitNum()) {
return true;
}
BitsArray bitsArray = null;
try {
bitsArray = BitsArray.create(filterBitMap);
boolean ret = bloomFilter.isHit(consumerFilterData.getBloomFilterData(), bitsArray);
log.debug("Pull {} by bit map:{}, {}, {}", ret, consumerFilterData, bitsArray, cqExtUnit);
return ret;
} catch (Throwable e) {
log.error("bloom filter error, sub=" + subscriptionData
+ ", filter=" + consumerFilterData + ", bitMap=" + bitsArray, e);
}
}
return true;
}
@Override
public boolean isMatchedByCommitLog(ByteBuffer msgBuffer, Map<String, String> properties) {
if (subscriptionData == null) {
return true;
}
if (subscriptionData.isClassFilterMode()) {
return true;
}
if (ExpressionType.isTagType(subscriptionData.getExpressionType())) {
return true;
}
ConsumerFilterData realFilterData = this.consumerFilterData;
Map<String, String> tempProperties = properties;
// no expression
if (realFilterData == null || realFilterData.getExpression() == null
|| realFilterData.getCompiledExpression() == null) {
return true;
}
if (tempProperties == null && msgBuffer != null) {
tempProperties = MessageDecoder.decodeProperties(msgBuffer);
}
Object ret = null;
try {
MessageEvaluationContext context = new MessageEvaluationContext(tempProperties);
ret = realFilterData.getCompiledExpression().evaluate(context);
} catch (Throwable e) {
log.error("Message Filter error, " + realFilterData + ", " + tempProperties, e);
}
log.debug("Pull eval result: {}, {}, {}", ret, realFilterData, tempProperties);
if (ret == null || !(ret instanceof Boolean)) {
return false;
}
return (Boolean) ret;
}
}
| ExpressionMessageFilter |
java | quarkusio__quarkus | extensions/security/deployment/src/test/java/io/quarkus/security/test/cdi/events/AsyncCDISecurityEventTest.java | {
"start": 1302,
"end": 4182
} | class ____ {
@Inject
@Named(BeanWithSecurityAnnotations.NAME)
BeanWithSecurityAnnotations beanWithSecurityAnnotations;
@Inject
SecurityEventObserver observer;
@Inject
AsyncSecurityEventObserver asyncObserver;
@Inject
AsyncAuthZSuccessEventObserver asyncAuthZSuccessEventObserver;
@Inject
AsyncAuthZFailureEventObserver asyncAuthZFailureObserver;
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BeanWithNoSecurityAnnotations.class, BeanWithSecurityAnnotations.class,
SecurityTestUtils.class, IdentityMock.class, SecurityEventObserver.class,
AsyncSecurityEventObserver.class, AsyncAuthZFailureEventObserver.class,
AsyncAuthZSuccessEventObserver.class));
@BeforeEach
public void beforeEach() {
observer.getObserverEvents().clear();
asyncObserver.getObserverEvents().clear();
asyncAuthZFailureObserver.getObserverEvents().clear();
asyncAuthZSuccessEventObserver.getObserverEvents().clear();
}
@Test
public void testAuthSuccessAsyncObserverNotified() {
assertSuccess(beanWithSecurityAnnotations::restricted, "accessibleForAdminOnly", ADMIN);
assertEquals(1, observer.getObserverEvents().size());
Awaitility.await().atMost(Duration.ofSeconds(2))
.untilAsserted(() -> assertEquals(1, asyncObserver.getObserverEvents().size()));
Awaitility.await().atMost(Duration.ofSeconds(2))
.untilAsserted(() -> assertEquals(1, asyncAuthZSuccessEventObserver.getObserverEvents().size()));
assertEquals(0, asyncAuthZFailureObserver.getObserverEvents().size());
}
@Test
public void testAuthFailureAsyncObserverNotified() {
assertFailureFor(beanWithSecurityAnnotations::restricted, UnauthorizedException.class, ANONYMOUS);
assertEquals(1, observer.getObserverEvents().size());
Awaitility.await().atMost(Duration.ofSeconds(2))
.untilAsserted(() -> assertEquals(1, asyncObserver.getObserverEvents().size()));
Awaitility.await().atMost(Duration.ofSeconds(2))
.untilAsserted(() -> assertEquals(1, asyncAuthZFailureObserver.getObserverEvents().size()));
AuthorizationFailureEvent event = asyncAuthZFailureObserver.getObserverEvents().get(0);
assertTrue(event.getAuthorizationFailure() instanceof UnauthorizedException);
assertNotNull(event.getSecurityIdentity());
assertTrue(event.getSecurityIdentity().isAnonymous());
assertEquals(RolesAllowedCheck.class.getName(), event.getAuthorizationContext());
assertEquals(0, asyncAuthZSuccessEventObserver.getObserverEvents().size());
}
}
| AsyncCDISecurityEventTest |
java | mockito__mockito | mockito-core/src/testFixtures/java/org/mockito/internal/configuration/ConfigurationAccess.java | {
"start": 228,
"end": 391
} | class ____ {
public static MockitoConfiguration getConfig() {
return (MockitoConfiguration) new GlobalConfiguration().getIt();
}
}
| ConfigurationAccess |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/EmbeddableSubqueryWithSameAliasOfRootQueryTest.java | {
"start": 3218,
"end": 3552
} | class ____{
@ManyToOne
private AnotherEntity otherEntity;
public MyEntityEmbeddable() {
}
public MyEntityEmbeddable(AnotherEntity otherEntity) {
this.otherEntity = otherEntity;
}
public AnotherEntity getOtherEntity() {
return otherEntity;
}
}
@Entity(name = "AnotherEntity")
public static | MyEntityEmbeddable |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/TestRouterHDFSContractAppend.java | {
"start": 995,
"end": 1429
} | class ____ extends AbstractContractAppendTest {
@BeforeAll
public static void createCluster() throws IOException {
RouterHDFSContract.createCluster();
}
@AfterAll
public static void teardownCluster() throws IOException {
RouterHDFSContract.destroyCluster();
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new RouterHDFSContract(conf);
}
}
| TestRouterHDFSContractAppend |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java | {
"start": 122940,
"end": 146149
} | class ____ extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan plan) {
// The mapping between explicit conversion functions and the corresponding attributes in the UnionAll output,
// if the conversion functions in the main query are pushed down into the UnionAll branches, a new ReferenceAttribute
// is created for the corresponding output of UnionAll, the value is the new ReferenceAttribute
Map<AbstractConvertFunction, Attribute> convertFunctionsToAttributes = new HashMap<>();
// The list of attributes in the UnionAll output that have been updated.
// The parent plans that reference these attributes need to be updated accordingly.
List<Attribute> updatedUnionAllOutput = new ArrayList<>();
// First push down the conversion functions into the UnionAll branches
LogicalPlan planWithConvertFunctionsPushedDown = plan.transformUp(
UnionAll.class,
unionAll -> unionAll.childrenResolved()
? maybePushDownConvertFunctions(unionAll, plan, convertFunctionsToAttributes)
: unionAll
);
// Then replace the conversion functions with the corresponding attributes in the UnionAll output
LogicalPlan planWithConvertFunctionsReplaced = replaceConvertFunctions(
planWithConvertFunctionsPushedDown,
convertFunctionsToAttributes
);
// Next implicitly cast the outputs of the UnionAll branches to the common type, this applies to date and date_nanos types only
LogicalPlan planWithImplicitCasting = planWithConvertFunctionsReplaced.transformUp(
UnionAll.class,
unionAll -> unionAll.resolved()
? implicitCastingUnionAllOutput(unionAll, planWithConvertFunctionsReplaced, updatedUnionAllOutput)
: unionAll
);
// Finally update the attributes referencing the updated UnionAll output
return updatedUnionAllOutput.isEmpty()
? planWithImplicitCasting
: updateAttributesReferencingUpdatedUnionAllOutput(planWithImplicitCasting, updatedUnionAllOutput);
}
/**
* Push down the explicit conversion functions into the UnionAll branches
*/
private static LogicalPlan maybePushDownConvertFunctions(
UnionAll unionAll,
LogicalPlan plan,
Map<AbstractConvertFunction, Attribute> convertFunctionsToAttributes
) {
// Collect all conversion functions that convert the UnionAll outputs to a different type
Map<String, Set<AbstractConvertFunction>> oldOutputToConvertFunctions = collectConvertFunctions(unionAll, plan);
if (oldOutputToConvertFunctions.isEmpty()) { // nothing to push down
return unionAll;
}
// push down the conversion functions into the unionAll branches
List<LogicalPlan> newChildren = new ArrayList<>(unionAll.children().size());
Map<String, AbstractConvertFunction> newOutputToConvertFunctions = new HashMap<>();
boolean outputChanged = false;
for (LogicalPlan child : unionAll.children()) {
List<Attribute> childOutput = child.output();
List<Alias> newAliases = new ArrayList<>();
List<Attribute> newChildOutput = new ArrayList<>(childOutput.size());
for (Attribute oldAttr : childOutput) {
newChildOutput.add(oldAttr);
if (oldOutputToConvertFunctions.containsKey(oldAttr.name())) {
Set<AbstractConvertFunction> converts = oldOutputToConvertFunctions.get(oldAttr.name());
// create a new alias for each conversion function and add it to the new aliases list
for (AbstractConvertFunction convert : converts) {
// create a new alias for the conversion function
String newAliasName = Attribute.rawTemporaryName(oldAttr.name(), "converted_to", convert.dataType().typeName());
Alias newAlias = new Alias(
oldAttr.source(),
newAliasName, // oldAttrName$$converted_to$$targetType
convert.replaceChildren(Collections.singletonList(oldAttr))
);
newAliases.add(newAlias);
newChildOutput.add(newAlias.toAttribute());
outputChanged = true;
newOutputToConvertFunctions.putIfAbsent(newAliasName, convert);
}
}
}
newChildren.add(maybePushDownConvertFunctionsToChild(child, newAliases, newChildOutput));
}
// Populate convertFunctionsToAttributes. The values of convertFunctionsToAttributes are the new ReferenceAttributes
// in the new UnionAll outputs created for the updated unionAll output after pushing down the conversion functions.
return outputChanged
? rebuildUnionAll(unionAll, newChildren, newOutputToConvertFunctions, convertFunctionsToAttributes)
: unionAll;
}
/**
* Collect all conversion functions in the plan that convert the unionAll outputs to a different type,
* the keys are the name of the old/existing attributes in the unionAll output, the values are all the conversion functions.
*/
private static Map<String, Set<AbstractConvertFunction>> collectConvertFunctions(UnionAll unionAll, LogicalPlan plan) {
Map<String, Set<AbstractConvertFunction>> convertFunctions = new HashMap<>();
plan.forEachExpressionDown(AbstractConvertFunction.class, f -> {
if (f.field() instanceof Attribute attr) {
// get the attribute from the UnionAll output by name and id
unionAll.output()
.stream()
.filter(a -> a.name().equals(attr.name()) && a.id() == attr.id())
.findFirst()
.ifPresent(unionAllAttr -> convertFunctions.computeIfAbsent(attr.name(), k -> new HashSet<>()).add(f));
}
});
return convertFunctions;
}
/**
* Push down the conversion functions into the child plan by adding an Eval with the new aliases on top of the child plan.
*/
private static LogicalPlan maybePushDownConvertFunctionsToChild(LogicalPlan child, List<Alias> aliases, List<Attribute> output) {
// Fork/UnionAll adds an EsqlProject on top of each child plan during resolveFork, check this pattern before pushing down
// If the pattern doesn't match, something unexpected happened, just return the child as is
if (aliases.isEmpty() == false && child instanceof EsqlProject esqlProject) {
LogicalPlan childOfProject = esqlProject.child();
Eval eval = new Eval(childOfProject.source(), childOfProject, aliases);
return new EsqlProject(esqlProject.source(), eval, output);
}
return child;
}
/**
* Rebuild the UnionAll with the new children and the new output after pushing down the conversion functions,
* and populate convertFunctionsToAttributes with the mapping between conversion functions and the
* new ReferenceAttributes in the new UnionAll output.
*/
private static LogicalPlan rebuildUnionAll(
UnionAll unionAll,
List<LogicalPlan> newChildren,
Map<String, AbstractConvertFunction> newOutputToConvertFunctions,
Map<AbstractConvertFunction, Attribute> convertFunctionsToAttributes
) {
// check if the new children has the same number of outputs, it could be different from the original unionAll output
// if there are multiple explicit conversion functions on the same unionAll output attribute
List<String> newChildrenOutputNames = newChildren.getFirst().output().stream().map(Attribute::name).toList();
Holder<Boolean> childrenMatch = new Holder<>(true);
newChildren.stream().skip(1).forEach(childPlan -> {
List<String> names = childPlan.output().stream().map(Attribute::name).toList();
if (names.equals(newChildrenOutputNames) == false) {
childrenMatch.set(false);
}
});
if (childrenMatch.get() == false) {
// new UnionAll children outputs do not match after pushing down convert functions,
// cannot move on, return the original UnionAll
return unionAll;
}
// rebuild the unionAll output according to its new children's output, and populate convertFunctionsToAttributes
List<Attribute> newOutput = new ArrayList<>(newChildrenOutputNames.size());
List<Attribute> oldOutput = unionAll.output();
for (String attrName : newChildrenOutputNames) {
// find the old attribute by name
Attribute oldAttr = null;
for (Attribute attr : oldOutput) {
if (attr.name().equals(attrName)) {
oldAttr = attr;
break;
}
}
if (oldAttr != null) { // keep the old UnionAll output unchanged
newOutput.add(oldAttr);
} else { // this is a new attribute created by pushing down convert functions find the corresponding convert function
AbstractConvertFunction convert = newOutputToConvertFunctions.get(attrName);
if (convert != null) {
ReferenceAttribute newAttr = new ReferenceAttribute(
convert.source(),
null,
attrName,
convert.dataType(),
convert.nullable(),
null,
true
);
newOutput.add(newAttr);
convertFunctionsToAttributes.putIfAbsent(convert, newAttr);
} else {
// something unexpected happened, the attribute is neither the old attribute nor created by a convert function,
// return the original UnionAll
return unionAll;
}
}
}
return new UnionAll(unionAll.source(), newChildren, newOutput);
}
/**
* Replace the conversion functions with the corresponding attributes in the UnionAll output
*/
private static LogicalPlan replaceConvertFunctions(
LogicalPlan plan,
Map<AbstractConvertFunction, Attribute> convertFunctionsToAttributes
) {
if (convertFunctionsToAttributes.isEmpty()) {
return plan;
}
return plan.transformExpressionsUp(AbstractConvertFunction.class, convertFunction -> {
if (convertFunction.field() instanceof Attribute attr) {
for (Map.Entry<AbstractConvertFunction, Attribute> entry : convertFunctionsToAttributes.entrySet()) {
AbstractConvertFunction candidate = entry.getKey();
Attribute replacement = entry.getValue();
if (candidate == convertFunction
&& candidate.field() instanceof Attribute candidateAttr
&& candidateAttr.id() == attr.id()) {
// Make sure to match by attribute id, as ReferenceAttribute with the same name
// but with different id might be considered equal
return replacement;
}
}
}
return convertFunction;
});
}
/**
* Implicitly cast the outputs of the UnionAll branches to the common type, this applies to date and date_nanos types only
*/
private static LogicalPlan implicitCastingUnionAllOutput(
UnionAll unionAll,
LogicalPlan plan,
List<Attribute> updatedUnionAllOutput
) {
// build a map of UnionAll output to a list of LogicalPlan that reference this output
Map<Attribute, List<LogicalPlan>> outputToPlans = outputToPlans(unionAll, plan);
List<List<Attribute>> outputs = unionAll.children().stream().map(LogicalPlan::output).toList();
// only do implicit casting for date and date_nanos types for now, to be consistent with queries without subqueries
List<DataType> commonTypes = commonTypes(outputs);
Map<Integer, DataType> indexToCommonType = new HashMap<>();
// Cast each branch's output to the common type
List<LogicalPlan> newChildren = new ArrayList<>(unionAll.children().size());
boolean outputChanged = false;
for (LogicalPlan child : unionAll.children()) {
List<Alias> newAliases = new ArrayList<>();
List<Attribute> oldChildOutput = child.output();
List<Attribute> newChildOutput = new ArrayList<>(oldChildOutput.size());
for (int i = 0; i < oldChildOutput.size(); i++) {
Attribute oldOutput = oldChildOutput.get(i);
DataType targetType = commonTypes.get(i);
Attribute resolved = resolveAttribute(
oldOutput,
targetType,
i,
outputs,
unionAll,
outputToPlans,
newAliases,
indexToCommonType
);
newChildOutput.add(resolved);
if (resolved != oldOutput) {
outputChanged = true;
}
}
// create a new eval for the casting expressions, and push it down under the EsqlProject
newChildren.add(maybePushDownConvertFunctionsToChild(child, newAliases, newChildOutput));
}
// Update common types with overrides
indexToCommonType.forEach(commonTypes::set);
return outputChanged ? rebuildUnionAllOutput(unionAll, newChildren, commonTypes, updatedUnionAllOutput) : unionAll;
}
/**
* Build a map of UnionAll output to a list of LogicalPlan that reference this output
*/
private static Map<Attribute, List<LogicalPlan>> outputToPlans(UnionAll unionAll, LogicalPlan plan) {
Map<Attribute, List<LogicalPlan>> outputToPlans = new HashMap<>();
plan.forEachDown(p -> p.forEachExpression(Attribute.class, attr -> {
if (p instanceof UnionAll == false && p instanceof Project == false) {
// get the attribute from the UnionAll output by name and id
unionAll.output()
.stream()
.filter(a -> a.name().equals(attr.name()) && a.id() == attr.id())
.findFirst()
.ifPresent(unionAllAttr -> outputToPlans.computeIfAbsent(attr, k -> new ArrayList<>()).add(p));
}
}));
return outputToPlans;
}
private static List<DataType> commonTypes(List<List<Attribute>> outputs) {
int columnCount = outputs.get(0).size();
List<DataType> commonTypes = new ArrayList<>(columnCount);
for (int i = 0; i < columnCount; i++) {
DataType type = outputs.get(0).get(i).dataType();
for (List<Attribute> out : outputs) {
type = commonType(type, out.get(i).dataType());
}
commonTypes.add(type);
}
return commonTypes;
}
private static DataType commonType(DataType t1, DataType t2) {
if (t1 == null || t2 == null) {
return null;
}
t1 = t1.isCounter() ? t1.noCounter() : t1;
t2 = t2.isCounter() ? t2.noCounter() : t2;
if (t1 == t2) {
return t1;
}
if (t1.isDate() && t2.isDate()) {
return DATE_NANOS;
}
return null;
}
/**
* Resolve the attribute to the target type, if target type is null, create:
* an UnsupportedAttribute if the attribute is referenced in the parent plans,
* a Null alias with keyword type if the attribute is not referenced in the parent plans.
*/
private static Attribute resolveAttribute(
Attribute oldAttr,
DataType targetType,
int columnIndex,
List<List<Attribute>> outputs,
UnionAll unionAll,
Map<Attribute, List<LogicalPlan>> outputToPlans,
List<Alias> newAliases,
Map<Integer, DataType> indexToCommonType
) {
if (targetType == null) {
return createUnsupportedOrNull(oldAttr, columnIndex, outputs, unionAll, outputToPlans, newAliases, indexToCommonType);
}
if (targetType != NULL && oldAttr.dataType() != targetType) {
var converterFactory = EsqlDataTypeConverter.converterFunctionFactory(targetType);
if (converterFactory != null) {
var converter = converterFactory.apply(oldAttr.source(), oldAttr);
if (converter != null) {
Alias alias = new Alias(oldAttr.source(), oldAttr.name(), converter);
newAliases.add(alias);
return alias.toAttribute();
}
}
}
return oldAttr;
}
private static Attribute createUnsupportedOrNull(
Attribute oldAttr,
int columnIndex,
List<List<Attribute>> outputs,
UnionAll unionAll,
Map<Attribute, List<LogicalPlan>> outputToPlans,
List<Alias> newAliases,
Map<Integer, DataType> indexToCommonType
) {
Attribute unionAttr = unionAll.output().get(columnIndex);
if (outputToPlans.containsKey(unionAttr)) {
// Unsupported attribute
List<String> dataTypes = collectIncompatibleTypes(columnIndex, outputs);
UnsupportedAttribute unsupported = new UnsupportedAttribute(
oldAttr.source(),
oldAttr.name(),
new UnsupportedEsField(oldAttr.name(), dataTypes),
"Column [" + oldAttr.name() + "] has conflicting data types in subqueries: " + dataTypes,
oldAttr.id()
);
newAliases.add(new Alias(oldAttr.source(), oldAttr.name(), unsupported));
indexToCommonType.putIfAbsent(columnIndex, UNSUPPORTED);
return unsupported;
} else {
// Null alias with keyword type
Alias nullAlias = new Alias(oldAttr.source(), oldAttr.name(), new Literal(oldAttr.source(), null, KEYWORD));
newAliases.add(nullAlias);
indexToCommonType.putIfAbsent(columnIndex, KEYWORD);
return nullAlias.toAttribute();
}
}
private static List<String> collectIncompatibleTypes(int columnIndex, List<List<Attribute>> outputs) {
List<String> dataTypes = new ArrayList<>();
for (List<Attribute> out : outputs) {
Attribute attr = out.get(columnIndex);
if (attr instanceof FieldAttribute fa && fa.field() instanceof InvalidMappedField imf) {
dataTypes.addAll(imf.types().stream().map(DataType::typeName).toList());
} else {
dataTypes.add(attr.dataType().typeName());
}
}
return dataTypes;
}
/**
* Rebuild the UnionAll with the new children and the new output after implicit casting date and date_nanos types,
* and populate updatedUnionAllOutput with the list of attributes in the UnionAll output that have been updated.
*/
private static UnionAll rebuildUnionAllOutput(
UnionAll unionAll,
List<LogicalPlan> newChildren,
List<DataType> commonTypes,
List<Attribute> updatedUnionAllOutput
) {
// Rebuild the newUnionAll's output to ensure the correct attributes are used
List<Attribute> oldOutput = unionAll.output();
List<Attribute> newOutput = new ArrayList<>(oldOutput.size());
for (int i = 0; i < oldOutput.size(); i++) {
Attribute oldAttr = oldOutput.get(i);
DataType commonType = commonTypes.get(i);
if (oldAttr.dataType() != commonType) {
// keep the id unchanged, otherwise the downstream operators won't recognize the attribute
ReferenceAttribute newAttr = new ReferenceAttribute(
oldAttr.source(),
null,
oldAttr.name(),
commonType,
oldAttr.nullable(),
oldAttr.id(),
oldAttr.synthetic()
);
newOutput.add(newAttr);
updatedUnionAllOutput.add(newAttr);
} else {
newOutput.add(oldAttr);
}
}
return new UnionAll(unionAll.source(), newChildren, newOutput);
}
/**
* Update the attributes referencing the updated UnionAll output
*/
private static LogicalPlan updateAttributesReferencingUpdatedUnionAllOutput(
LogicalPlan plan,
List<Attribute> updatedUnionAllOutput
) {
Map<NameId, Attribute> idToUpdatedAttr = updatedUnionAllOutput.stream().collect(Collectors.toMap(Attribute::id, attr -> attr));
return plan.transformExpressionsUp(Attribute.class, expr -> {
Attribute updated = idToUpdatedAttr.get(expr.id());
return (updated != null && expr.dataType() != updated.dataType()) ? updated : expr;
});
}
}
}
| ResolveUnionTypesInUnionAll |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/spi/CompositeUserTypeRegistration.java | {
"start": 446,
"end": 900
} | class ____ {
private final ClassDetails embeddableClass;
private final ClassDetails userTypeClass;
public CompositeUserTypeRegistration(ClassDetails embeddableClass, ClassDetails userTypeClass) {
this.embeddableClass = embeddableClass;
this.userTypeClass = userTypeClass;
}
public ClassDetails getEmbeddableClass() {
return embeddableClass;
}
public ClassDetails getUserTypeClass() {
return userTypeClass;
}
}
| CompositeUserTypeRegistration |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Olingo4ComponentBuilderFactory.java | {
"start": 13327,
"end": 16582
} | class ____
extends AbstractComponentBuilder<Olingo4Component>
implements Olingo4ComponentBuilder {
@Override
protected Olingo4Component buildConcreteComponent() {
return new Olingo4Component();
}
private org.apache.camel.component.olingo4.Olingo4Configuration getOrCreateConfiguration(Olingo4Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.olingo4.Olingo4Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((Olingo4Component) component).setConfiguration((org.apache.camel.component.olingo4.Olingo4Configuration) value); return true;
case "connectTimeout": getOrCreateConfiguration((Olingo4Component) component).setConnectTimeout((int) value); return true;
case "contentType": getOrCreateConfiguration((Olingo4Component) component).setContentType((java.lang.String) value); return true;
case "filterAlreadySeen": getOrCreateConfiguration((Olingo4Component) component).setFilterAlreadySeen((boolean) value); return true;
case "httpHeaders": getOrCreateConfiguration((Olingo4Component) component).setHttpHeaders((java.util.Map) value); return true;
case "proxy": getOrCreateConfiguration((Olingo4Component) component).setProxy((org.apache.http.HttpHost) value); return true;
case "serviceUri": getOrCreateConfiguration((Olingo4Component) component).setServiceUri((java.lang.String) value); return true;
case "socketTimeout": getOrCreateConfiguration((Olingo4Component) component).setSocketTimeout((int) value); return true;
case "bridgeErrorHandler": ((Olingo4Component) component).setBridgeErrorHandler((boolean) value); return true;
case "splitResult": getOrCreateConfiguration((Olingo4Component) component).setSplitResult((boolean) value); return true;
case "lazyStartProducer": ((Olingo4Component) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((Olingo4Component) component).setAutowiredEnabled((boolean) value); return true;
case "httpAsyncClientBuilder": getOrCreateConfiguration((Olingo4Component) component).setHttpAsyncClientBuilder((org.apache.http.impl.nio.client.HttpAsyncClientBuilder) value); return true;
case "httpClientBuilder": getOrCreateConfiguration((Olingo4Component) component).setHttpClientBuilder((org.apache.http.impl.client.HttpClientBuilder) value); return true;
case "sslContextParameters": getOrCreateConfiguration((Olingo4Component) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((Olingo4Component) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
} | Olingo4ComponentBuilderImpl |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/intTest/java/org/springframework/boot/devtools/tests/LocalApplicationLauncher.java | {
"start": 1024,
"end": 2674
} | class ____ extends AbstractApplicationLauncher {
LocalApplicationLauncher(Directories directories) {
super(directories);
}
@Override
public LaunchedApplication launchApplication(JvmLauncher jvmLauncher, File serverPortFile) throws Exception {
LaunchedJvm jvm = jvmLauncher.launch("local", createApplicationClassPath(),
"com.example.DevToolsTestApplication", serverPortFile.getAbsolutePath(), "--server.port=0");
return new LaunchedApplication(getDirectories().getAppDirectory(), jvm.getStandardOut(), jvm.getStandardError(),
jvm.getProcess(), null, null);
}
@Override
public LaunchedApplication launchApplication(JvmLauncher jvmLauncher, File serverPortFile, String... additionalArgs)
throws Exception {
List<String> args = new ArrayList<>(Arrays.asList("com.example.DevToolsTestApplication",
serverPortFile.getAbsolutePath(), "--server.port=0"));
args.addAll(Arrays.asList(additionalArgs));
LaunchedJvm jvm = jvmLauncher.launch("local", createApplicationClassPath(), args.toArray(new String[] {}));
return new LaunchedApplication(getDirectories().getAppDirectory(), jvm.getStandardOut(), jvm.getStandardError(),
jvm.getProcess(), null, null);
}
protected String createApplicationClassPath() throws Exception {
File appDirectory = getDirectories().getAppDirectory();
copyApplicationTo(appDirectory);
List<String> entries = new ArrayList<>();
entries.add(appDirectory.getAbsolutePath());
entries.addAll(getDependencyJarPaths());
return StringUtils.collectionToDelimitedString(entries, File.pathSeparator);
}
@Override
public String toString() {
return "local";
}
}
| LocalApplicationLauncher |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java | {
"start": 2568,
"end": 9231
} | class ____ extends LoadGenerator {
public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class);
private static int numMapTasks = 1;
private String mrOutDir;
final private static String USAGE_CMD = "java LoadGeneratorMR\n";
final private static String USAGE = USAGE_CMD
+ "-mr <numMapJobs> <outputDir> [MUST be first 3 args] \n" + USAGE_ARGS ;
// Constant "keys" used to communicate between map and reduce
final private static Text OPEN_EXECTIME = new Text("OpenExecutionTime");
final private static Text NUMOPS_OPEN = new Text("NumOpsOpen");
final private static Text LIST_EXECTIME = new Text("ListExecutionTime");
final private static Text NUMOPS_LIST = new Text("NumOpsList");
final private static Text DELETE_EXECTIME = new Text("DeletionExecutionTime");
final private static Text NUMOPS_DELETE = new Text("NumOpsDelete");
final private static Text CREATE_EXECTIME = new Text("CreateExecutionTime");
final private static Text NUMOPS_CREATE = new Text("NumOpsCreate");
final private static Text WRITE_CLOSE_EXECTIME = new Text("WriteCloseExecutionTime");
final private static Text NUMOPS_WRITE_CLOSE = new Text("NumOpsWriteClose");
final private static Text ELAPSED_TIME = new Text("ElapsedTime");
final private static Text TOTALOPS = new Text("TotalOps");
// Config keys to pass args from Main to the Job
final private static String LG_ROOT = "LG.root";
final private static String LG_SCRIPTFILE = "LG.scriptFile";
final private static String LG_MAXDELAYBETWEENOPS = "LG.maxDelayBetweenOps";
final private static String LG_NUMOFTHREADS = "LG.numOfThreads";
final private static String LG_READPR = "LG.readPr";
final private static String LG_WRITEPR = "LG.writePr";
final private static String LG_SEED = "LG.r";
final private static String LG_NUMMAPTASKS = "LG.numMapTasks";
final private static String LG_ELAPSEDTIME = "LG.elapsedTime";
final private static String LG_STARTTIME = "LG.startTime";
final private static String LG_FLAGFILE = "LG.flagFile";
/** Constructor */
public LoadGeneratorMR() throws IOException, UnknownHostException {
super();
}
public LoadGeneratorMR(Configuration conf) throws IOException, UnknownHostException {
this();
setConf(conf);
}
/** Main function called by tool runner.
* It first initializes data by parsing the command line arguments.
* It then calls the loadGenerator
*/
@Override
public int run(String[] args) throws Exception {
int exitCode = parseArgsMR(args);
if (exitCode != 0) {
return exitCode;
}
System.out.println("Running LoadGeneratorMR against fileSystem: " +
FileContext.getFileContext().getDefaultFileSystem().getUri());
return submitAsMapReduce(); // reducer will print the results
}
/**
* Parse the command line arguments and initialize the data.
* Only parse the first arg: -mr <numMapTasks> <mrOutDir> (MUST be first three Args)
* The rest are parsed by the Parent LoadGenerator
**/
private int parseArgsMR(String[] args) throws IOException {
try {
if (args.length >= 3 && args[0].equals("-mr")) {
numMapTasks = Integer.parseInt(args[1]);
mrOutDir = args[2];
if (mrOutDir.startsWith("-")) {
System.err.println("Missing output file parameter, instead got: "
+ mrOutDir);
System.err.println(USAGE);
return -1;
}
} else {
System.err.println(USAGE);
ToolRunner.printGenericCommandUsage(System.err);
return -1;
}
String[] strippedArgs = new String[args.length - 3];
for (int i = 0; i < strippedArgs.length; i++) {
strippedArgs[i] = args[i + 3];
}
super.parseArgs(true, strippedArgs); // Parse normal LoadGenerator args
} catch (NumberFormatException e) {
System.err.println("Illegal parameter: " + e.getLocalizedMessage());
System.err.println(USAGE);
return -1;
}
return 0;
}
/** Main program
*
* @param args command line arguments
* @throws Exception
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new LoadGeneratorMR(), args);
System.exit(res);
}
// The following methods are only used when LoadGenerator is run a MR job
/**
* Based on args we submit the LoadGenerator as MR job.
* Number of MapTasks is numMapTasks
* @return exitCode for job submission
*/
private int submitAsMapReduce() {
System.out.println("Running as a MapReduce job with " +
numMapTasks + " mapTasks; Output to file " + mrOutDir);
Configuration conf = new Configuration(getConf());
// First set all the args of LoadGenerator as Conf vars to pass to MR tasks
conf.set(LG_ROOT , root.toString());
conf.setInt(LG_MAXDELAYBETWEENOPS, maxDelayBetweenOps);
conf.setInt(LG_NUMOFTHREADS, numOfThreads);
conf.set(LG_READPR, readProbs[0]+""); //Pass Double as string
conf.set(LG_WRITEPR, writeProbs[0]+""); //Pass Double as string
conf.setLong(LG_SEED, seed); //No idea what this is
conf.setInt(LG_NUMMAPTASKS, numMapTasks);
if (scriptFile == null && durations[0] <=0) {
System.err.println("When run as a MapReduce job, elapsed Time or ScriptFile must be specified");
System.exit(-1);
}
conf.setLong(LG_ELAPSEDTIME, durations[0]);
conf.setLong(LG_STARTTIME, startTime);
if (scriptFile != null) {
conf.set(LG_SCRIPTFILE , scriptFile);
}
conf.set(LG_FLAGFILE, flagFile.toString());
// Now set the necessary conf variables that apply to run MR itself.
JobConf jobConf = new JobConf(conf, LoadGenerator.class);
jobConf.setJobName("NNLoadGeneratorViaMR");
jobConf.setNumMapTasks(numMapTasks);
jobConf.setNumReduceTasks(1); // 1 reducer to collect the results
jobConf.setOutputKeyClass(Text.class);
jobConf.setOutputValueClass(IntWritable.class);
jobConf.setMapperClass(MapperThatRunsNNLoadGenerator.class);
jobConf.setReducerClass(ReducerThatCollectsLGdata.class);
jobConf.setInputFormat(DummyInputFormat.class);
jobConf.setOutputFormat(TextOutputFormat.class);
// Explicitly set number of max map attempts to 1.
jobConf.setMaxMapAttempts(1);
// Explicitly turn off speculative execution
jobConf.setSpeculativeExecution(false);
// This mapReduce job has no input but has output
FileOutputFormat.setOutputPath(jobConf, new Path(mrOutDir));
try {
JobClient.runJob(jobConf);
} catch (IOException e) {
System.err.println("Failed to run job: " + e.getMessage());
return -1;
}
return 0;
}
// Each split is empty
public static | LoadGeneratorMR |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/GeneralScriptException.java | {
"start": 704,
"end": 1110
} | class ____ a script.
* <p>
* Use of this exception should generally be avoided, it doesn't provide
* much context or structure to users trying to debug scripting when
* things go wrong.
* @deprecated Use ScriptException for exceptions from the scripting engine,
* otherwise use a more appropriate exception (e.g. if thrown
* from various abstractions)
*/
@Deprecated
public | from |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleChannelHandler.java | {
"start": 18450,
"end": 25232
} | class ____ {
final Path mapOutputFileName;
final IndexRecord indexRecord;
MapOutputInfo(Path mapOutputFileName, IndexRecord indexRecord) {
this.mapOutputFileName = mapOutputFileName;
this.indexRecord = indexRecord;
}
}
protected void verifyRequest(String appid, ChannelHandlerContext ctx,
HttpRequest request, HttpResponse response, URL requestUri)
throws IOException {
SecretKey tokenSecret = handlerCtx.secretManager.retrieveTokenSecret(appid);
if (null == tokenSecret) {
LOG.info("Request for unknown token {}, channel id: {}", appid, ctx.channel().id());
throw new IOException("Could not find jobid");
}
// encrypting URL
String encryptedURL = SecureShuffleUtils.buildMsgFrom(requestUri);
// hash from the fetcher
String urlHashStr =
request.headers().get(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
if (urlHashStr == null) {
LOG.info("Missing header hash for {}, channel id: {}", appid, ctx.channel().id());
throw new IOException("fetcher cannot be authenticated");
}
if (LOG.isDebugEnabled()) {
int len = urlHashStr.length();
LOG.debug("Verifying request. encryptedURL:{}, hash:{}, channel id: " +
"{}", encryptedURL,
urlHashStr.substring(len - len / 2, len - 1), ctx.channel().id());
}
// verify - throws exception
SecureShuffleUtils.verifyReply(urlHashStr, encryptedURL, tokenSecret);
// verification passed - encode the reply
String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(StandardCharsets.UTF_8),
tokenSecret);
response.headers().set(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
// Put shuffle version into http header
response.headers().set(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
if (LOG.isDebugEnabled()) {
int len = reply.length();
LOG.debug("Fetcher request verified. " +
"encryptedURL: {}, reply: {}, channel id: {}",
encryptedURL, reply.substring(len - len / 2, len - 1),
ctx.channel().id());
}
}
public static ByteBuf shuffleHeaderToBytes(ShuffleHeader header) throws IOException {
final DataOutputBuffer dob = new DataOutputBuffer();
header.write(dob);
return wrappedBuffer(dob.getData(), 0, dob.getLength());
}
protected ChannelFuture sendMapOutput(Channel ch, String user, String mapId, int reduce,
MapOutputInfo mapOutputInfo)
throws IOException {
final IndexRecord info = mapOutputInfo.indexRecord;
ch.write(shuffleHeaderToBytes(
new ShuffleHeader(mapId, info.partLength, info.rawLength, reduce)));
final File spillFile =
new File(mapOutputInfo.mapOutputFileName.toString());
RandomAccessFile spill = SecureIOUtils.openForRandomRead(spillFile, "r", user, null);
ChannelFuture writeFuture;
if (ch.pipeline().get(SslHandler.class) == null) {
final FadvisedFileRegion partition = new FadvisedFileRegion(spill,
info.startOffset, info.partLength, handlerCtx.manageOsCache, handlerCtx.readaheadLength,
handlerCtx.readaheadPool, spillFile.getAbsolutePath(),
handlerCtx.shuffleBufferSize, handlerCtx.shuffleTransferToAllowed);
writeFuture = ch.writeAndFlush(partition);
// TODO error handling; distinguish IO/connection failures,
// attribute to appropriate spill output
writeFuture.addListener((ChannelFutureListener) future -> {
if (future.isSuccess()) {
partition.transferSuccessful();
}
partition.deallocate();
});
} else {
// HTTPS cannot be done with zero copy.
final FadvisedChunkedFile chunk = new FadvisedChunkedFile(spill,
info.startOffset, info.partLength, handlerCtx.sslFileBufferSize,
handlerCtx.manageOsCache, handlerCtx.readaheadLength, handlerCtx.readaheadPool,
spillFile.getAbsolutePath());
writeFuture = ch.writeAndFlush(chunk);
}
handlerCtx.metrics.shuffleConnections.incr();
handlerCtx.metrics.shuffleOutputBytes.incr(info.partLength); // optimistic
return writeFuture;
}
protected void sendError(ChannelHandlerContext ctx,
HttpResponseStatus status) {
sendError(ctx, "", status);
}
protected void sendError(ChannelHandlerContext ctx, String message,
HttpResponseStatus status) {
sendError(ctx, message, status, Collections.emptyMap());
}
protected void sendError(ChannelHandlerContext ctx, String msg,
HttpResponseStatus status, Map<String, String> headers) {
FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, status,
Unpooled.copiedBuffer(msg, CharsetUtil.UTF_8));
response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8");
// Put shuffle version into http header
response.headers().set(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
for (Map.Entry<String, String> header : headers.entrySet()) {
response.headers().set(header.getKey(), header.getValue());
}
HttpUtil.setContentLength(response, response.content().readableBytes());
// Close the connection as soon as the error message is sent.
ctx.channel().writeAndFlush(response).addListener(ChannelFutureListener.CLOSE);
// TODO: missing keep-alive handling
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
throws Exception {
Channel ch = ctx.channel();
if (cause instanceof TooLongFrameException) {
LOG.trace("TooLongFrameException, channel id: {}", ch.id());
sendError(ctx, BAD_REQUEST);
return;
} else if (cause instanceof IOException) {
if (cause instanceof ClosedChannelException) {
LOG.debug("Ignoring closed channel error, channel id: " + ch.id(), cause);
return;
}
String message = String.valueOf(cause.getMessage());
if (IGNORABLE_ERROR_MESSAGE.matcher(message).matches()) {
LOG.debug("Ignoring client socket close, channel id: " + ch.id(), cause);
return;
}
}
LOG.error("Shuffle error. Channel id: " + ch.id(), cause);
if (ch.isActive()) {
sendError(ctx, INTERNAL_SERVER_ERROR);
}
}
/**
* Maintain parameters per messageReceived() Netty context.
* Allows sendMapOutput calls from operationComplete()
*/
public static | MapOutputInfo |
java | apache__spark | examples/src/main/java/org/apache/spark/examples/ml/JavaModelSelectionViaTrainValidationSplitExample.java | {
"start": 1503,
"end": 3469
} | class ____ {
public static void main(String[] args) {
SparkSession spark = SparkSession
.builder()
.appName("JavaModelSelectionViaTrainValidationSplitExample")
.getOrCreate();
// $example on$
Dataset<Row> data = spark.read().format("libsvm")
.load("data/mllib/sample_linear_regression_data.txt");
// Prepare training and test data.
Dataset<Row>[] splits = data.randomSplit(new double[] {0.9, 0.1}, 12345);
Dataset<Row> training = splits[0];
Dataset<Row> test = splits[1];
LinearRegression lr = new LinearRegression();
// We use a ParamGridBuilder to construct a grid of parameters to search over.
// TrainValidationSplit will try all combinations of values and determine best model using
// the evaluator.
ParamMap[] paramGrid = new ParamGridBuilder()
.addGrid(lr.regParam(), new double[] {0.1, 0.01})
.addGrid(lr.fitIntercept())
.addGrid(lr.elasticNetParam(), new double[] {0.0, 0.5, 1.0})
.build();
// In this case the estimator is simply the linear regression.
// A TrainValidationSplit requires an Estimator, a set of Estimator ParamMaps, and an Evaluator.
TrainValidationSplit trainValidationSplit = new TrainValidationSplit()
.setEstimator(lr)
.setEvaluator(new RegressionEvaluator())
.setEstimatorParamMaps(paramGrid)
.setTrainRatio(0.8) // 80% for training and the remaining 20% for validation
.setParallelism(2); // Evaluate up to 2 parameter settings in parallel
// Run train validation split, and choose the best set of parameters.
TrainValidationSplitModel model = trainValidationSplit.fit(training);
// Make predictions on test data. model is the model with combination of parameters
// that performed best.
model.transform(test)
.select("features", "label", "prediction")
.show();
// $example off$
spark.stop();
}
}
| JavaModelSelectionViaTrainValidationSplitExample |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/annotation/JsonDeserialize.java | {
"start": 1013,
"end": 1584
} | class ____ NOT on target POJO class: for example @JsonIgnoreProperties should be on
* Builder to prevent "unknown property" errors.
* </li>
* <li>Similarly configuration overrides (see {@link tools.jackson.databind.cfg.MapperBuilder#withConfigOverride})
* should be targeted at Builder class, not target POJO class.
* </li>
* </ul>
*
*/
@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.FIELD, ElementType.TYPE, ElementType.PARAMETER})
@Retention(RetentionPolicy.RUNTIME)
@com.fasterxml.jackson.annotation.JacksonAnnotation
public @ | and |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/PreferInstanceofOverGetKindTest.java | {
"start": 1563,
"end": 1929
} | class ____ {
boolean isMemberSelect(Tree tree) {
return tree instanceof MemberSelectTree;
}
}
""")
.doTest();
}
@Test
public void instanceEquals() {
helper
.addInputLines(
"Test.java",
"""
import com.sun.source.tree.Tree;
| Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java | {
"start": 9355,
"end": 33074
} | enum ____ {
FIXED("fixed"),
SCALING("scaling");
private final String type;
public String getType() {
return type;
}
ThreadPoolType(String type) {
this.type = type;
}
private static final Map<String, ThreadPoolType> TYPE_MAP = Arrays.stream(ThreadPoolType.values())
.collect(Collectors.toUnmodifiableMap(ThreadPoolType::getType, Function.identity()));
public static ThreadPoolType fromType(String type) {
ThreadPoolType threadPoolType = TYPE_MAP.get(type);
if (threadPoolType == null) {
throw new IllegalArgumentException("no ThreadPoolType for " + type);
}
return threadPoolType;
}
}
public static final Map<String, ThreadPoolType> THREAD_POOL_TYPES = Map.ofEntries(
entry(Names.GENERIC, ThreadPoolType.SCALING),
entry(Names.CLUSTER_COORDINATION, ThreadPoolType.FIXED),
entry(Names.GET, ThreadPoolType.FIXED),
entry(Names.ANALYZE, ThreadPoolType.FIXED),
entry(Names.WRITE, ThreadPoolType.FIXED),
entry(Names.WRITE_COORDINATION, ThreadPoolType.FIXED),
entry(Names.SEARCH, ThreadPoolType.FIXED),
entry(Names.SEARCH_COORDINATION, ThreadPoolType.FIXED),
entry(Names.AUTO_COMPLETE, ThreadPoolType.FIXED),
entry(Names.MANAGEMENT, ThreadPoolType.SCALING),
entry(Names.FLUSH, ThreadPoolType.SCALING),
entry(Names.REFRESH, ThreadPoolType.SCALING),
entry(Names.WARMER, ThreadPoolType.SCALING),
entry(Names.SNAPSHOT, ThreadPoolType.SCALING),
entry(Names.SNAPSHOT_META, ThreadPoolType.SCALING),
entry(Names.MERGE, ThreadPoolType.SCALING),
entry(Names.FORCE_MERGE, ThreadPoolType.FIXED),
entry(Names.FETCH_SHARD_STARTED, ThreadPoolType.SCALING),
entry(Names.FETCH_SHARD_STORE, ThreadPoolType.SCALING),
entry(Names.SYSTEM_READ, ThreadPoolType.FIXED),
entry(Names.SYSTEM_WRITE, ThreadPoolType.FIXED),
entry(Names.SYSTEM_WRITE_COORDINATION, ThreadPoolType.FIXED),
entry(Names.SYSTEM_CRITICAL_READ, ThreadPoolType.FIXED),
entry(Names.SYSTEM_CRITICAL_WRITE, ThreadPoolType.FIXED)
);
public static final double searchAutoscalingEWMA = 0.1;
// This value is chosen such that a sudden increase in the task durations would need to persist roughly for 120 samples
// for the EWMA value to be mostly representative of the increased task durations. Mostly representative means that the
// EWMA value is at least within 90% of the new increased task duration. This value also determines the impact of a single
// long-running task on the moving average and limits it roughly to 2% of the (long) task duration, e.g. if the current
// moving average is 100ms, and we get one task which takes 20s the new EWMA will be ~500ms.
public static final double DEFAULT_INDEX_AUTOSCALING_EWMA_ALPHA = 0.02;
private final Map<String, ExecutorHolder> executors;
private final ThreadPoolInfo threadPoolInfo;
private final CachedTimeThread cachedTimeThread;
private final LongSupplier relativeTimeInMillisSupplier;
private final ThreadContext threadContext;
@SuppressWarnings("rawtypes")
private final Map<String, ExecutorBuilder> builders;
private final ScheduledThreadPoolExecutor scheduler;
private final long slowSchedulerWarnThresholdNanos;
private Map<String, ArrayList<Instrument>> instruments;
@SuppressWarnings("rawtypes")
public Collection<ExecutorBuilder> builders() {
return Collections.unmodifiableCollection(builders.values());
}
public static final Setting<TimeValue> ESTIMATED_TIME_INTERVAL_SETTING = Setting.timeSetting(
"thread_pool.estimated_time_interval",
TimeValue.timeValueMillis(200),
TimeValue.ZERO,
Setting.Property.NodeScope
);
public static final Setting<TimeValue> LATE_TIME_INTERVAL_WARN_THRESHOLD_SETTING = Setting.timeSetting(
"thread_pool.estimated_time_interval.warn_threshold",
TimeValue.timeValueSeconds(5),
TimeValue.ZERO,
Setting.Property.NodeScope
);
public static final Setting<TimeValue> SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING = Setting.timeSetting(
"thread_pool.scheduler.warn_threshold",
TimeValue.timeValueSeconds(5),
TimeValue.ZERO,
Setting.Property.NodeScope
);
// A setting to change the alpha parameter of the EWMA used in WRITE, SYSTEM_WRITE and SYSTEM_CRITICAL_WRITE thread pools
public static final Setting<Double> WRITE_THREAD_POOLS_EWMA_ALPHA_SETTING = Setting.doubleSetting(
"thread_pool.write.ewma_alpha",
DEFAULT_INDEX_AUTOSCALING_EWMA_ALPHA,
0.0,
1.0,
Setting.Property.NodeScope
);
/**
* Defines and builds the many thread pools delineated in {@link Names}.
*
* @param settings
* @param meterRegistry
* @param builtInExecutorBuilders used to construct builders for the built-in thread pools
* @param customBuilders a list of additional thread pool builders that were defined elsewhere (like a Plugin).
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public ThreadPool(
final Settings settings,
MeterRegistry meterRegistry,
BuiltInExecutorBuilders builtInExecutorBuilders,
final ExecutorBuilder<?>... customBuilders
) {
assert Node.NODE_NAME_SETTING.exists(settings);
final Map<String, ArrayList<Instrument>> instruments = new HashMap<>();
final int allocatedProcessors = EsExecutors.allocatedProcessors(settings);
final Map<String, ExecutorBuilder> builders = new HashMap<>(builtInExecutorBuilders.getBuilders(settings, allocatedProcessors));
for (final ExecutorBuilder<?> builder : customBuilders) {
if (builders.containsKey(builder.name())) {
throw new IllegalArgumentException("builder with name [" + builder.name() + "] already exists");
}
builders.put(builder.name(), builder);
}
this.builders = Collections.unmodifiableMap(builders);
threadContext = new ThreadContext(settings);
// Now that all the thread pools have been defined, actually build them.
final Map<String, ExecutorHolder> executors = new HashMap<>();
for (final Map.Entry<String, ExecutorBuilder> entry : builders.entrySet()) {
final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings);
final ExecutorHolder executorHolder = entry.getValue().build(executorSettings, threadContext);
if (executors.containsKey(executorHolder.info.getName())) {
throw new IllegalStateException("duplicate executors with name [" + executorHolder.info.getName() + "] registered");
}
logger.debug("created thread pool: {}", entry.getValue().formatInfo(executorHolder.info));
executors.put(entry.getKey(), executorHolder);
}
this.executors = Map.copyOf(executors);
this.executors.forEach((k, v) -> instruments.put(k, setupMetrics(meterRegistry, k, v)));
this.instruments = instruments;
this.threadPoolInfo = new ThreadPoolInfo(executors.values().stream().map(holder -> holder.info).toList());
this.scheduler = Scheduler.initScheduler(settings, "scheduler");
this.slowSchedulerWarnThresholdNanos = SLOW_SCHEDULER_TASK_WARN_THRESHOLD_SETTING.get(settings).nanos();
this.cachedTimeThread = new CachedTimeThread(
EsExecutors.threadName(settings, "[timer]"),
ESTIMATED_TIME_INTERVAL_SETTING.get(settings).millis(),
LATE_TIME_INTERVAL_WARN_THRESHOLD_SETTING.get(settings).millis()
);
this.cachedTimeThread.start();
this.relativeTimeInMillisSupplier = new RelativeTimeInMillisSupplier(cachedTimeThread);
}
private static ArrayList<Instrument> setupMetrics(MeterRegistry meterRegistry, String name, ExecutorHolder holder) {
Map<String, Object> at = Map.of();
ArrayList<Instrument> instruments = new ArrayList<>();
if (holder.executor() instanceof ThreadPoolExecutor threadPoolExecutor) {
String prefix = THREAD_POOL_METRIC_PREFIX + name;
instruments.add(
meterRegistry.registerLongGauge(
prefix + THREAD_POOL_METRIC_NAME_CURRENT,
"number of threads for " + name,
"count",
() -> new LongWithAttributes(threadPoolExecutor.getPoolSize(), at)
)
);
instruments.add(
meterRegistry.registerLongGauge(
prefix + THREAD_POOL_METRIC_NAME_QUEUE,
"number queue size for " + name,
"count",
() -> new LongWithAttributes(threadPoolExecutor.getQueue().size(), at)
)
);
instruments.add(
meterRegistry.registerLongGauge(
prefix + THREAD_POOL_METRIC_NAME_ACTIVE,
"number of active threads for " + name,
"count",
() -> new LongWithAttributes(threadPoolExecutor.getActiveCount(), at)
)
);
instruments.add(
meterRegistry.registerLongGauge(
prefix + THREAD_POOL_METRIC_NAME_LARGEST,
"largest pool size for " + name,
"count",
() -> new LongWithAttributes(threadPoolExecutor.getLargestPoolSize(), at)
)
);
instruments.add(
meterRegistry.registerLongAsyncCounter(
prefix + THREAD_POOL_METRIC_NAME_COMPLETED,
"number of completed threads for " + name,
"count",
() -> new LongWithAttributes(threadPoolExecutor.getCompletedTaskCount(), at)
)
);
RejectedExecutionHandler rejectedExecutionHandler = threadPoolExecutor.getRejectedExecutionHandler();
if (rejectedExecutionHandler instanceof EsRejectedExecutionHandler handler) {
handler.registerCounter(meterRegistry, prefix + THREAD_POOL_METRIC_NAME_REJECTED, name);
}
if (threadPoolExecutor instanceof TaskExecutionTimeTrackingEsThreadPoolExecutor timeTrackingExecutor) {
instruments.addAll(timeTrackingExecutor.setupMetrics(meterRegistry, name));
}
}
return instruments;
}
// for subclassing by tests that don't actually use any of the machinery that the regular constructor sets up
protected ThreadPool() {
this.builders = Map.of();
this.executors = Map.of();
this.cachedTimeThread = null;
this.relativeTimeInMillisSupplier = this::relativeTimeInMillis;
this.threadPoolInfo = new ThreadPoolInfo(List.of());
this.slowSchedulerWarnThresholdNanos = 0L;
this.threadContext = new ThreadContext(Settings.EMPTY);
this.scheduler = null;
}
@Override
public long relativeTimeInMillis() {
return cachedTimeThread.relativeTimeInMillis();
}
/**
* Effectively the same as {@code this::relativeTimeInMillis}, except that it returns a constant to save on allocation.
*/
public LongSupplier relativeTimeInMillisSupplier() {
return relativeTimeInMillisSupplier;
}
@Override
public long relativeTimeInNanos() {
return cachedTimeThread.relativeTimeInNanos();
}
@Override
public long rawRelativeTimeInMillis() {
return TimeValue.nsecToMSec(System.nanoTime());
}
@Override
public long absoluteTimeInMillis() {
return cachedTimeThread.absoluteTimeInMillis();
}
@Override
public ThreadPoolInfo info() {
return threadPoolInfo;
}
public Info info(String name) {
ExecutorHolder holder = executors.get(name);
if (holder == null) {
return null;
}
return holder.info;
}
public ThreadPoolStats stats() {
List<ThreadPoolStats.Stats> stats = new ArrayList<>();
for (ExecutorHolder holder : executors.values()) {
final String name = holder.info.getName();
int threads = -1;
int queue = -1;
int active = -1;
long rejected = -1;
int largest = -1;
long completed = -1;
if (holder.executor() instanceof ThreadPoolExecutor threadPoolExecutor) {
threads = threadPoolExecutor.getPoolSize();
queue = threadPoolExecutor.getQueue().size();
active = threadPoolExecutor.getActiveCount();
largest = threadPoolExecutor.getLargestPoolSize();
completed = threadPoolExecutor.getCompletedTaskCount();
RejectedExecutionHandler rejectedExecutionHandler = threadPoolExecutor.getRejectedExecutionHandler();
if (rejectedExecutionHandler instanceof EsRejectedExecutionHandler handler) {
rejected = handler.rejected();
}
}
stats.add(new ThreadPoolStats.Stats(name, threads, queue, active, rejected, largest, completed));
}
return new ThreadPoolStats(stats);
}
/**
* Get the generic {@link ExecutorService}. This executor service
* {@link Executor#execute(Runnable)} method will run the {@link Runnable} it is given in the
* {@link ThreadContext} of the thread that queues it.
* <p>
* Warning: this {@linkplain ExecutorService} will not throw {@link RejectedExecutionException}
* if you submit a task while it shutdown. It will instead silently queue it and not run it.
*/
public ExecutorService generic() {
return executor(Names.GENERIC);
}
/**
* Get the {@link ExecutorService} with the given name. This executor service's
* {@link Executor#execute(Runnable)} method will run the {@link Runnable} it is given in the
* {@link ThreadContext} of the thread that queues it.
* <p>
* Warning: this {@linkplain ExecutorService} might not throw {@link RejectedExecutionException}
* if you submit a task while it shutdown. It will instead silently queue it and not run it.
*
* @param name the name of the executor service to obtain
* @throws IllegalArgumentException if no executor service with the specified name exists
*/
public ExecutorService executor(String name) {
final ExecutorHolder holder = executors.get(name);
if (holder == null) {
final var message = "no executor service found for [" + name + "]";
assert false : message;
throw new IllegalArgumentException(message);
}
return holder.executor();
}
/**
* Schedules a one-shot command to run after a given delay. The command is run in the context of the calling thread.
*
* @param command the command to run
* @param delay delay before the task executes
* @param executor the name of the thread pool on which to execute this task. SAME means "execute on the scheduler thread" which changes
* the meaning of the ScheduledFuture returned by this method. In that case the ScheduledFuture will complete only when the
* command completes.
* @return a ScheduledFuture who's get will return when the task is has been added to its target thread pool and throw an exception if
* the task is canceled before it was added to its target thread pool. Once the task has been added to its target thread pool
* the ScheduledFuture will cannot interact with it.
* @throws org.elasticsearch.common.util.concurrent.EsRejectedExecutionException if the task cannot be scheduled for execution
*/
@Override
public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor executor) {
final Runnable contextPreservingRunnable = threadContext.preserveContext(command);
final Runnable toSchedule;
if (executor != EsExecutors.DIRECT_EXECUTOR_SERVICE) {
toSchedule = new ThreadedRunnable(contextPreservingRunnable, executor);
} else if (slowSchedulerWarnThresholdNanos > 0) {
toSchedule = new Runnable() {
@Override
public void run() {
final long startTime = ThreadPool.this.relativeTimeInNanos();
try {
contextPreservingRunnable.run();
} finally {
final long took = ThreadPool.this.relativeTimeInNanos() - startTime;
if (took > slowSchedulerWarnThresholdNanos) {
logger.warn(
"execution of [{}] took [{}ms] which is above the warn threshold of [{}ms]",
contextPreservingRunnable,
TimeUnit.NANOSECONDS.toMillis(took),
TimeUnit.NANOSECONDS.toMillis(slowSchedulerWarnThresholdNanos)
);
}
}
}
@Override
public String toString() {
return contextPreservingRunnable.toString();
}
};
} else {
toSchedule = contextPreservingRunnable;
}
return new ScheduledCancellableAdapter(scheduler.schedule(toSchedule, delay.millis(), TimeUnit.MILLISECONDS));
}
public void scheduleUnlessShuttingDown(TimeValue delay, Executor executor, Runnable command) {
try {
schedule(command, delay, executor);
} catch (EsRejectedExecutionException e) {
if (e.isExecutorShutdown()) {
logger.debug(
() -> format(
"could not schedule execution of [%s] after [%s] on [%s] as executor is shut down",
command,
delay,
executor
),
e
);
} else {
throw e;
}
}
}
public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, Executor executor) {
var runnable = new ReschedulingRunnable(
command,
interval,
executor,
this,
e -> logger.debug(() -> format("scheduled task [%s] was rejected on thread pool [%s]", command, executor), e),
e -> logger.warn(() -> format("failed to run scheduled task [%s] on thread pool [%s]", command, executor), e)
);
runnable.start();
return runnable;
}
protected final void stopCachedTimeThread() {
cachedTimeThread.running = false;
cachedTimeThread.interrupt();
}
private void closeMetrics(ExecutorHolder executor) {
if (this.instruments.containsKey(executor.info.getName())) {
this.instruments.get(executor.info.getName()).forEach((instrument) -> {
if (instrument instanceof LongAsyncCounter longasynccounter) {
try {
longasynccounter.close();
} catch (Exception e) {
logger.warn(format("Failed to close LongAsyncCounter for %s. %s", executor.info.getName(), e.getMessage()), e);
}
} else if (instrument instanceof LongGauge longgauge) {
try {
longgauge.close();
} catch (Exception e) {
logger.warn(format("Failed to close LongGauge for %s. %s", executor.info.getName(), e.getMessage()), e);
}
}
});
}
this.instruments.remove(executor.info.getName());
}
public void shutdown() {
stopCachedTimeThread();
scheduler.shutdown();
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
closeMetrics(executor);
executor.executor().shutdown();
}
}
}
public void shutdownNow() {
stopCachedTimeThread();
scheduler.shutdownNow();
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
closeMetrics(executor);
executor.executor().shutdownNow();
}
}
}
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
boolean result = scheduler.awaitTermination(timeout, unit);
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
closeMetrics(executor);
result &= executor.executor().awaitTermination(timeout, unit);
}
}
cachedTimeThread.join(unit.toMillis(timeout));
return result;
}
public ScheduledExecutorService scheduler() {
return this.scheduler;
}
/**
* Constrains a value between minimum and maximum values
* (inclusive).
*
* @param value the value to constrain
* @param min the minimum acceptable value
* @param max the maximum acceptable value
* @return min if value is less than min, max if value is greater
* than value, otherwise value
*/
static int boundedBy(int value, int min, int max) {
assert min < max : min + " vs " + max;
return Math.min(max, Math.max(min, value));
}
static int halfAllocatedProcessors(final int allocatedProcessors) {
return (allocatedProcessors + 1) / 2;
}
static int halfAllocatedProcessorsMaxFive(final int allocatedProcessors) {
return boundedBy(halfAllocatedProcessors(allocatedProcessors), 1, 5);
}
static int halfAllocatedProcessorsMaxTen(final int allocatedProcessors) {
return boundedBy(halfAllocatedProcessors(allocatedProcessors), 1, 10);
}
static int twiceAllocatedProcessors(final int allocatedProcessors) {
return boundedBy(2 * allocatedProcessors, 2, Integer.MAX_VALUE);
}
public static int oneEighthAllocatedProcessors(final int allocatedProcessors) {
return boundedBy(allocatedProcessors / 8, 1, Integer.MAX_VALUE);
}
public static int searchOrGetThreadPoolSize(final int allocatedProcessors) {
return ((allocatedProcessors * 3) / 2) + 1;
}
public static int getMaxSnapshotThreadPoolSize(int allocatedProcessors) {
final ByteSizeValue maxHeapSize = ByteSizeValue.ofBytes(Runtime.getRuntime().maxMemory());
return getMaxSnapshotThreadPoolSize(allocatedProcessors, maxHeapSize);
}
static int getMaxSnapshotThreadPoolSize(int allocatedProcessors, final ByteSizeValue maxHeapSize) {
// While on larger data nodes, larger snapshot threadpool size improves snapshotting on high latency blob stores,
// smaller instances can run into OOM issues and need a smaller snapshot threadpool size.
if (maxHeapSize.compareTo(ByteSizeValue.of(750, ByteSizeUnit.MB)) < 0) {
return halfAllocatedProcessorsMaxFive(allocatedProcessors);
}
return 10;
}
static | ThreadPoolType |
java | resilience4j__resilience4j | resilience4j-rxjava2/src/test/java/io/github/resilience4j/micrometer/transformer/CompletableTimerTest.java | {
"start": 1161,
"end": 2311
} | class ____ {
@Test
public void shouldTimeSuccessfulCompletable() {
MeterRegistry registry = new SimpleMeterRegistry();
Timer timer = Timer.of("timer 1", registry);
Throwable result = Completable.complete()
.compose(TimerTransformer.of(timer))
.blockingGet();
then(result).isNull();
thenSuccessTimed(registry, timer);
}
@Test
public void shouldTimeFailedCompletable() {
IllegalStateException exception = new IllegalStateException();
MeterRegistry registry = new SimpleMeterRegistry();
TimerConfig config = TimerConfig.custom()
.onFailureTagResolver(ex -> {
then(ex).isEqualTo(exception);
return ex.toString();
})
.build();
Timer timer = Timer.of("timer 1", registry, config);
Throwable result = Completable.error(exception)
.compose(TimerTransformer.of(timer))
.blockingGet();
then(result).isEqualTo(exception);
thenFailureTimed(registry, timer, result);
}
}
| CompletableTimerTest |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/BigIntegerFormatFactory.java | {
"start": 1423,
"end": 1770
} | class ____ extends AbstractNumberFormat<BigInteger> {
@Override
public String format(BigInteger object) throws Exception {
return object.toString();
}
@Override
public BigInteger parse(String string) throws Exception {
return new BigInteger(string);
}
}
}
| BigIntegerFormat |
java | spring-projects__spring-framework | spring-oxm/src/test/java/org/springframework/oxm/xstream/Flights.java | {
"start": 727,
"end": 1121
} | class ____ {
private List<Flight> flights = new ArrayList<>();
private List<String> strings = new ArrayList<>();
public List<Flight> getFlights() {
return flights;
}
public void setFlights(List<Flight> flights) {
this.flights = flights;
}
public List<String> getStrings() {
return strings;
}
public void setStrings(List<String> strings) {
this.strings = strings;
}
}
| Flights |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/converter/RsaKeyConverters.java | {
"start": 5712,
"end": 6549
} | class ____ implements Converter<List<String>, RSAPublicKey> {
private final KeyFactory keyFactory;
X509PemDecoder(KeyFactory keyFactory) {
this.keyFactory = keyFactory;
}
@Override
public @NonNull RSAPublicKey convert(List<String> lines) {
StringBuilder base64Encoded = new StringBuilder();
for (String line : lines) {
if (isNotX509PemWrapper(line)) {
base64Encoded.append(line);
}
}
byte[] x509 = Base64.getDecoder().decode(base64Encoded.toString());
try {
return (RSAPublicKey) this.keyFactory.generatePublic(new X509EncodedKeySpec(x509));
}
catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
private boolean isNotX509PemWrapper(String line) {
return !X509_PEM_HEADER.equals(line) && !X509_PEM_FOOTER.equals(line);
}
}
private static | X509PemDecoder |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-rsocket/src/main/java/smoketest/rsocket/SampleRSocketApplication.java | {
"start": 805,
"end": 950
} | class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleRSocketApplication.class, args);
}
}
| SampleRSocketApplication |
java | quarkusio__quarkus | extensions/amazon-lambda/deployment/src/test/java/io/quarkus/amazon/lambda/deployment/RequestHandlerJandexUtilTest.java | {
"start": 21263,
"end": 21494
} | interface ____ extends RequestHandler<Double, Long> {
@Override
default Long handleRequest(Double input, Context context) {
return input.longValue();
}
}
public static | DefaultMethodInterface |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/translators/PartitionTransformationTranslator.java | {
"start": 1744,
"end": 3836
} | class ____<OUT>
extends SimpleTransformationTranslator<OUT, PartitionTransformation<OUT>> {
@Override
protected Collection<Integer> translateForBatchInternal(
final PartitionTransformation<OUT> transformation, final Context context) {
return translateInternal(transformation, context, true);
}
@Override
protected Collection<Integer> translateForStreamingInternal(
final PartitionTransformation<OUT> transformation, final Context context) {
return translateInternal(transformation, context, false);
}
private Collection<Integer> translateInternal(
final PartitionTransformation<OUT> transformation,
final Context context,
boolean supportsBatchExchange) {
checkNotNull(transformation);
checkNotNull(context);
final StreamGraph streamGraph = context.getStreamGraph();
final List<Transformation<?>> parentTransformations = transformation.getInputs();
checkState(
parentTransformations.size() == 1,
"Expected exactly one input transformation but found "
+ parentTransformations.size());
final Transformation<?> input = parentTransformations.get(0);
List<Integer> resultIds = new ArrayList<>();
StreamExchangeMode exchangeMode = transformation.getExchangeMode();
// StreamExchangeMode#BATCH has no effect in streaming mode so we can safely reset it to
// UNDEFINED and let Flink decide on the best exchange mode.
if (!supportsBatchExchange && exchangeMode == StreamExchangeMode.BATCH) {
exchangeMode = StreamExchangeMode.UNDEFINED;
}
for (Integer inputId : context.getStreamNodeIds(input)) {
final int virtualId = Transformation.getNewNodeId();
streamGraph.addVirtualPartitionNode(
inputId, virtualId, transformation.getPartitioner(), exchangeMode);
resultIds.add(virtualId);
}
return resultIds;
}
}
| PartitionTransformationTranslator |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/domain/DomainTableRW.java | {
"start": 1547,
"end": 3565
} | class ____ extends BaseTableRW<DomainTable> {
/** domain prefix. */
private static final String PREFIX =
YarnConfiguration.TIMELINE_SERVICE_PREFIX + "domain";
/** config param name that specifies the domain table name. */
public static final String TABLE_NAME_CONF_NAME = PREFIX + ".table.name";
/** default value for domain table name. */
public static final String DEFAULT_TABLE_NAME = "timelineservice.domain";
private static final Logger LOG =
LoggerFactory.getLogger(DomainTableRW.class);
public DomainTableRW() {
super(TABLE_NAME_CONF_NAME, DEFAULT_TABLE_NAME);
}
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.yarn.server.timelineservice.storage.BaseTableRW#
* createTable(org.apache.hadoop.hbase.client.Admin,
* org.apache.hadoop.conf.Configuration)
*/
public void createTable(Admin admin, Configuration hbaseConf)
throws IOException {
TableName table = getTableName(hbaseConf);
if (admin.tableExists(table)) {
// do not disable / delete existing table
// similar to the approach taken by map-reduce jobs when
// output directory exists
throw new IOException("Table " + table.getNameAsString()
+ " already exists.");
}
HTableDescriptor domainTableDescp = new HTableDescriptor(table);
HColumnDescriptor mappCF =
new HColumnDescriptor(DomainColumnFamily.INFO.getBytes());
mappCF.setBloomFilterType(BloomType.ROWCOL);
domainTableDescp.addFamily(mappCF);
domainTableDescp
.setRegionSplitPolicyClassName(
"org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy");
domainTableDescp.setValue("KeyPrefixRegionSplitPolicy.prefix_length",
TimelineHBaseSchemaConstants.USERNAME_SPLIT_KEY_PREFIX_LENGTH);
admin.createTable(domainTableDescp,
TimelineHBaseSchemaConstants.getUsernameSplits());
LOG.info("Status of table creation for " + table.getNameAsString() + "="
+ admin.tableExists(table));
}
}
| DomainTableRW |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/errors/EofExceptionWriterInterceptorJerseyTest.java | {
"start": 913,
"end": 1693
} | class ____ extends AbstractJerseyTest {
@Override
protected TestContainerFactory getTestContainerFactory() throws TestContainerException {
return new JettyTestContainerFactory();
}
@Override
protected Application configure() {
return DropwizardResourceConfig.forTesting()
.register(EofExceptionWriterInterceptor.class)
.register(EofExceptionCountingInterceptor.class)
.register(TestResource.class);
}
@Test
void shouldCountZeroEofExceptions() throws IOException {
target("/").request().get(InputStream.class).close();
assertThat(EofExceptionCountingInterceptor.exceptionCount).isEqualByComparingTo(0L);
}
@Path("/")
public static | EofExceptionWriterInterceptorJerseyTest |
java | spring-projects__spring-framework | spring-oxm/src/main/java/org/springframework/oxm/support/AbstractMarshaller.java | {
"start": 2596,
"end": 24631
} | class ____ implements Marshaller, Unmarshaller {
private static final EntityResolver NO_OP_ENTITY_RESOLVER =
(publicId, systemId) -> new InputSource(new StringReader(""));
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
private boolean supportDtd = false;
private boolean processExternalEntities = false;
private volatile @Nullable DocumentBuilderFactory documentBuilderFactory;
private volatile @Nullable SAXParserFactory saxParserFactory;
/**
* Indicate whether DTD parsing should be supported.
* <p>Default is {@code false} meaning that DTD is disabled.
*/
public void setSupportDtd(boolean supportDtd) {
this.supportDtd = supportDtd;
this.documentBuilderFactory = null;
this.saxParserFactory = null;
}
/**
* Return whether DTD parsing is supported.
*/
public boolean isSupportDtd() {
return this.supportDtd;
}
/**
* Indicate whether external XML entities are processed when unmarshalling.
* <p>Default is {@code false}, meaning that external entities are not resolved.
* Note that processing of external entities will only be enabled/disabled when the
* {@code Source} passed to {@link #unmarshal(Source)} is a {@link SAXSource} or
* {@link StreamSource}. It has no effect for {@link DOMSource} or {@link StAXSource}
* instances.
* <p><strong>Note:</strong> setting this option to {@code true} also
* automatically sets {@link #setSupportDtd} to {@code true}.
*/
public void setProcessExternalEntities(boolean processExternalEntities) {
this.processExternalEntities = processExternalEntities;
if (processExternalEntities) {
this.supportDtd = true;
}
this.documentBuilderFactory = null;
this.saxParserFactory = null;
}
/**
* Return whether XML external entities are allowed.
* @see #createXmlReader()
*/
public boolean isProcessExternalEntities() {
return this.processExternalEntities;
}
/**
* Build a new {@link Document} from this marshaller's {@link DocumentBuilderFactory},
* as a placeholder for a DOM node.
* @see #createDocumentBuilderFactory()
* @see #createDocumentBuilder(DocumentBuilderFactory)
*/
protected Document buildDocument() {
try {
DocumentBuilderFactory builderFactory = this.documentBuilderFactory;
if (builderFactory == null) {
builderFactory = createDocumentBuilderFactory();
this.documentBuilderFactory = builderFactory;
}
DocumentBuilder builder = createDocumentBuilder(builderFactory);
return builder.newDocument();
}
catch (ParserConfigurationException ex) {
throw new UnmarshallingFailureException("Could not create document placeholder: " + ex.getMessage(), ex);
}
}
/**
* Create a {@code DocumentBuilder} that this marshaller will use for creating
* DOM documents when passed an empty {@code DOMSource}.
* <p>The resulting {@code DocumentBuilderFactory} is cached, so this method
* will only be called once.
* @return the DocumentBuilderFactory
* @throws ParserConfigurationException if thrown by JAXP methods
*/
protected DocumentBuilderFactory createDocumentBuilderFactory() throws ParserConfigurationException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setValidating(false);
factory.setNamespaceAware(true);
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", !isSupportDtd());
factory.setFeature("http://xml.org/sax/features/external-general-entities", isProcessExternalEntities());
return factory;
}
/**
* Create a {@code DocumentBuilder} that this marshaller will use for creating
* DOM documents when passed an empty {@code DOMSource}.
* <p>Can be overridden in subclasses, adding further initialization of the builder.
* @param factory the {@code DocumentBuilderFactory} that the DocumentBuilder should be created with
* @return the {@code DocumentBuilder}
* @throws ParserConfigurationException if thrown by JAXP methods
*/
protected DocumentBuilder createDocumentBuilder(DocumentBuilderFactory factory)
throws ParserConfigurationException {
DocumentBuilder builder = factory.newDocumentBuilder();
if (!isProcessExternalEntities()) {
builder.setEntityResolver(NO_OP_ENTITY_RESOLVER);
}
return builder;
}
/**
* Create an {@code XMLReader} that this marshaller will when passed an empty {@code SAXSource}.
* @return the XMLReader
* @throws SAXException if thrown by JAXP methods
* @throws ParserConfigurationException if thrown by JAXP methods
*/
protected XMLReader createXmlReader() throws SAXException, ParserConfigurationException {
SAXParserFactory parserFactory = this.saxParserFactory;
if (parserFactory == null) {
parserFactory = SAXParserFactory.newInstance();
parserFactory.setNamespaceAware(true);
parserFactory.setFeature(
"http://apache.org/xml/features/disallow-doctype-decl", !isSupportDtd());
parserFactory.setFeature(
"http://xml.org/sax/features/external-general-entities", isProcessExternalEntities());
this.saxParserFactory = parserFactory;
}
SAXParser saxParser = parserFactory.newSAXParser();
XMLReader xmlReader = saxParser.getXMLReader();
if (!isProcessExternalEntities()) {
xmlReader.setEntityResolver(NO_OP_ENTITY_RESOLVER);
}
return xmlReader;
}
/**
* Determine the default encoding to use for marshalling or unmarshalling from
* a byte stream, or {@code null} if none.
* <p>The default implementation returns {@code null}.
*/
protected @Nullable String getDefaultEncoding() {
return null;
}
// Marshalling
/**
* Marshals the object graph with the given root into the provided {@code javax.xml.transform.Result}.
* <p>This implementation inspects the given result, and calls {@code marshalDomResult},
* {@code marshalSaxResult}, or {@code marshalStreamResult}.
* @param graph the root of the object graph to marshal
* @param result the result to marshal to
* @throws IOException if an I/O exception occurs
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if {@code result} if neither a {@code DOMResult},
* a {@code SAXResult}, nor a {@code StreamResult}
* @see #marshalDomResult(Object, javax.xml.transform.dom.DOMResult)
* @see #marshalSaxResult(Object, javax.xml.transform.sax.SAXResult)
* @see #marshalStreamResult(Object, javax.xml.transform.stream.StreamResult)
*/
@Override
public final void marshal(Object graph, Result result) throws IOException, XmlMappingException {
if (result instanceof DOMResult domResult) {
marshalDomResult(graph, domResult);
}
else if (StaxUtils.isStaxResult(result)) {
marshalStaxResult(graph, result);
}
else if (result instanceof SAXResult saxResult) {
marshalSaxResult(graph, saxResult);
}
else if (result instanceof StreamResult streamResult) {
marshalStreamResult(graph, streamResult);
}
else {
throw new IllegalArgumentException("Unknown Result type: " + result.getClass());
}
}
/**
* Template method for handling {@code DOMResult}s.
* <p>This implementation delegates to {@code marshalDomNode}.
* @param graph the root of the object graph to marshal
* @param domResult the {@code DOMResult}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if the {@code domResult} is empty
* @see #marshalDomNode(Object, org.w3c.dom.Node)
*/
protected void marshalDomResult(Object graph, DOMResult domResult) throws XmlMappingException {
if (domResult.getNode() == null) {
domResult.setNode(buildDocument());
}
marshalDomNode(graph, domResult.getNode());
}
/**
* Template method for handling {@code StaxResult}s.
* <p>This implementation delegates to {@code marshalXMLSteamWriter} or
* {@code marshalXMLEventConsumer}, depending on what is contained in the
* {@code StaxResult}.
* @param graph the root of the object graph to marshal
* @param staxResult a JAXP 1.4 {@link StAXSource}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if the {@code domResult} is empty
* @see #marshalDomNode(Object, org.w3c.dom.Node)
*/
protected void marshalStaxResult(Object graph, Result staxResult) throws XmlMappingException {
XMLStreamWriter streamWriter = StaxUtils.getXMLStreamWriter(staxResult);
if (streamWriter != null) {
marshalXmlStreamWriter(graph, streamWriter);
}
else {
XMLEventWriter eventWriter = StaxUtils.getXMLEventWriter(staxResult);
if (eventWriter != null) {
marshalXmlEventWriter(graph, eventWriter);
}
else {
throw new IllegalArgumentException("StaxResult contains neither XMLStreamWriter nor XMLEventConsumer");
}
}
}
/**
* Template method for handling {@code SAXResult}s.
* <p>This implementation delegates to {@code marshalSaxHandlers}.
* @param graph the root of the object graph to marshal
* @param saxResult the {@code SAXResult}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @see #marshalSaxHandlers(Object, org.xml.sax.ContentHandler, org.xml.sax.ext.LexicalHandler)
*/
protected void marshalSaxResult(Object graph, SAXResult saxResult) throws XmlMappingException {
ContentHandler contentHandler = saxResult.getHandler();
Assert.notNull(contentHandler, "ContentHandler not set on SAXResult");
LexicalHandler lexicalHandler = saxResult.getLexicalHandler();
marshalSaxHandlers(graph, contentHandler, lexicalHandler);
}
/**
* Template method for handling {@code StreamResult}s.
* <p>This implementation delegates to {@code marshalOutputStream} or {@code marshalWriter},
* depending on what is contained in the {@code StreamResult}
* @param graph the root of the object graph to marshal
* @param streamResult the {@code StreamResult}
* @throws IOException if an I/O Exception occurs
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if {@code streamResult} does neither
* contain an {@code OutputStream} nor a {@code Writer}
*/
protected void marshalStreamResult(Object graph, StreamResult streamResult)
throws XmlMappingException, IOException {
if (streamResult.getOutputStream() != null) {
marshalOutputStream(graph, streamResult.getOutputStream());
}
else if (streamResult.getWriter() != null) {
marshalWriter(graph, streamResult.getWriter());
}
else {
throw new IllegalArgumentException("StreamResult contains neither OutputStream nor Writer");
}
}
// Unmarshalling
/**
* Unmarshals the given provided {@code javax.xml.transform.Source} into an object graph.
* <p>This implementation inspects the given result, and calls {@code unmarshalDomSource},
* {@code unmarshalSaxSource}, or {@code unmarshalStreamSource}.
* @param source the source to marshal from
* @return the object graph
* @throws IOException if an I/O Exception occurs
* @throws XmlMappingException if the given source cannot be mapped to an object
* @throws IllegalArgumentException if {@code source} is neither a {@code DOMSource},
* a {@code SAXSource}, nor a {@code StreamSource}
* @see #unmarshalDomSource(javax.xml.transform.dom.DOMSource)
* @see #unmarshalSaxSource(javax.xml.transform.sax.SAXSource)
* @see #unmarshalStreamSource(javax.xml.transform.stream.StreamSource)
*/
@Override
public final Object unmarshal(Source source) throws IOException, XmlMappingException {
if (source instanceof DOMSource domSource) {
return unmarshalDomSource(domSource);
}
else if (StaxUtils.isStaxSource(source)) {
return unmarshalStaxSource(source);
}
else if (source instanceof SAXSource saxSource) {
return unmarshalSaxSource(saxSource);
}
else if (source instanceof StreamSource streamSource) {
return unmarshalStreamSource(streamSource);
}
else {
throw new IllegalArgumentException("Unknown Source type: " + source.getClass());
}
}
/**
* Template method for handling {@code DOMSource}s.
* <p>This implementation delegates to {@code unmarshalDomNode}.
* If the given source is empty, an empty source {@code Document}
* will be created as a placeholder.
* @param domSource the {@code DOMSource}
* @return the object graph
* @throws XmlMappingException if the given source cannot be mapped to an object
* @throws IllegalArgumentException if the {@code domSource} is empty
* @see #unmarshalDomNode(org.w3c.dom.Node)
*/
protected Object unmarshalDomSource(DOMSource domSource) throws XmlMappingException {
if (domSource.getNode() == null) {
domSource.setNode(buildDocument());
}
try {
return unmarshalDomNode(domSource.getNode());
}
catch (NullPointerException ex) {
if (!isSupportDtd()) {
throw new UnmarshallingFailureException("NPE while unmarshalling. " +
"This can happen on JDK 1.6 due to the presence of DTD " +
"declarations, which are disabled.", ex);
}
throw ex;
}
}
/**
* Template method for handling {@code StaxSource}s.
* <p>This implementation delegates to {@code unmarshalXmlStreamReader} or
* {@code unmarshalXmlEventReader}.
* @param staxSource the {@code StaxSource}
* @return the object graph
* @throws XmlMappingException if the given source cannot be mapped to an object
*/
protected Object unmarshalStaxSource(Source staxSource) throws XmlMappingException {
XMLStreamReader streamReader = StaxUtils.getXMLStreamReader(staxSource);
if (streamReader != null) {
return unmarshalXmlStreamReader(streamReader);
}
else {
XMLEventReader eventReader = StaxUtils.getXMLEventReader(staxSource);
if (eventReader != null) {
return unmarshalXmlEventReader(eventReader);
}
else {
throw new IllegalArgumentException("StaxSource contains neither XMLStreamReader nor XMLEventReader");
}
}
}
/**
* Template method for handling {@code SAXSource}s.
* <p>This implementation delegates to {@code unmarshalSaxReader}.
* @param saxSource the {@code SAXSource}
* @return the object graph
* @throws XmlMappingException if the given source cannot be mapped to an object
* @throws IOException if an I/O Exception occurs
* @see #unmarshalSaxReader(org.xml.sax.XMLReader, org.xml.sax.InputSource)
*/
protected Object unmarshalSaxSource(SAXSource saxSource) throws XmlMappingException, IOException {
if (saxSource.getXMLReader() == null) {
try {
saxSource.setXMLReader(createXmlReader());
}
catch (SAXException | ParserConfigurationException ex) {
throw new UnmarshallingFailureException("Could not create XMLReader for SAXSource", ex);
}
}
if (saxSource.getInputSource() == null) {
saxSource.setInputSource(new InputSource());
}
try {
return unmarshalSaxReader(saxSource.getXMLReader(), saxSource.getInputSource());
}
catch (NullPointerException ex) {
if (!isSupportDtd()) {
throw new UnmarshallingFailureException("NPE while unmarshalling. " +
"This can happen on JDK 1.6 due to the presence of DTD " +
"declarations, which are disabled.");
}
throw ex;
}
}
/**
* Template method for handling {@code StreamSource}s.
* <p>This implementation delegates to {@code unmarshalInputStream} or {@code unmarshalReader}.
* @param streamSource the {@code StreamSource}
* @return the object graph
* @throws IOException if an I/O exception occurs
* @throws XmlMappingException if the given source cannot be mapped to an object
*/
protected Object unmarshalStreamSource(StreamSource streamSource) throws XmlMappingException, IOException {
if (streamSource.getInputStream() != null) {
if (isProcessExternalEntities() && isSupportDtd()) {
return unmarshalInputStream(streamSource.getInputStream());
}
else {
InputSource inputSource = new InputSource(streamSource.getInputStream());
inputSource.setEncoding(getDefaultEncoding());
return unmarshalSaxSource(new SAXSource(inputSource));
}
}
else if (streamSource.getReader() != null) {
if (isProcessExternalEntities() && isSupportDtd()) {
return unmarshalReader(streamSource.getReader());
}
else {
return unmarshalSaxSource(new SAXSource(new InputSource(streamSource.getReader())));
}
}
else {
return unmarshalSaxSource(new SAXSource(new InputSource(streamSource.getSystemId())));
}
}
// Abstract template methods
/**
* Abstract template method for marshalling the given object graph to a DOM {@code Node}.
* <p>In practice, {@code node} is a {@code Document} node, a {@code DocumentFragment} node,
* or a {@code Element} node. In other words, a node that accepts children.
* @param graph the root of the object graph to marshal
* @param node the DOM node that will contain the result tree
* @throws XmlMappingException if the given object cannot be marshalled to the DOM node
* @see org.w3c.dom.Document
* @see org.w3c.dom.DocumentFragment
* @see org.w3c.dom.Element
*/
protected abstract void marshalDomNode(Object graph, Node node)
throws XmlMappingException;
/**
* Abstract template method for marshalling the given object to a StAX {@code XMLEventWriter}.
* @param graph the root of the object graph to marshal
* @param eventWriter the {@code XMLEventWriter} to write to
* @throws XmlMappingException if the given object cannot be marshalled to the DOM node
*/
protected abstract void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter)
throws XmlMappingException;
/**
* Abstract template method for marshalling the given object to a StAX {@code XMLStreamWriter}.
* @param graph the root of the object graph to marshal
* @param streamWriter the {@code XMLStreamWriter} to write to
* @throws XmlMappingException if the given object cannot be marshalled to the DOM node
*/
protected abstract void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter)
throws XmlMappingException;
/**
* Abstract template method for marshalling the given object graph to a SAX {@code ContentHandler}.
* @param graph the root of the object graph to marshal
* @param contentHandler the SAX {@code ContentHandler}
* @param lexicalHandler the SAX2 {@code LexicalHandler}. Can be {@code null}.
* @throws XmlMappingException if the given object cannot be marshalled to the handlers
*/
protected abstract void marshalSaxHandlers(
Object graph, ContentHandler contentHandler, @Nullable LexicalHandler lexicalHandler)
throws XmlMappingException;
/**
* Abstract template method for marshalling the given object graph to a {@code OutputStream}.
* @param graph the root of the object graph to marshal
* @param outputStream the {@code OutputStream} to write to
* @throws XmlMappingException if the given object cannot be marshalled to the writer
* @throws IOException if an I/O exception occurs
*/
protected abstract void marshalOutputStream(Object graph, OutputStream outputStream)
throws XmlMappingException, IOException;
/**
* Abstract template method for marshalling the given object graph to a {@code Writer}.
* @param graph the root of the object graph to marshal
* @param writer the {@code Writer} to write to
* @throws XmlMappingException if the given object cannot be marshalled to the writer
* @throws IOException if an I/O exception occurs
*/
protected abstract void marshalWriter(Object graph, Writer writer)
throws XmlMappingException, IOException;
/**
* Abstract template method for unmarshalling from a given DOM {@code Node}.
* @param node the DOM node that contains the objects to be unmarshalled
* @return the object graph
* @throws XmlMappingException if the given DOM node cannot be mapped to an object
*/
protected abstract Object unmarshalDomNode(Node node) throws XmlMappingException;
/**
* Abstract template method for unmarshalling from a given Stax {@code XMLEventReader}.
* @param eventReader the {@code XMLEventReader} to read from
* @return the object graph
* @throws XmlMappingException if the given event reader cannot be converted to an object
*/
protected abstract Object unmarshalXmlEventReader(XMLEventReader eventReader)
throws XmlMappingException;
/**
* Abstract template method for unmarshalling from a given Stax {@code XMLStreamReader}.
* @param streamReader the {@code XMLStreamReader} to read from
* @return the object graph
* @throws XmlMappingException if the given stream reader cannot be converted to an object
*/
protected abstract Object unmarshalXmlStreamReader(XMLStreamReader streamReader)
throws XmlMappingException;
/**
* Abstract template method for unmarshalling using a given SAX {@code XMLReader}
* and {@code InputSource}.
* @param xmlReader the SAX {@code XMLReader} to parse with
* @param inputSource the input source to parse from
* @return the object graph
* @throws XmlMappingException if the given reader and input source cannot be converted to an object
* @throws IOException if an I/O exception occurs
*/
protected abstract Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
throws XmlMappingException, IOException;
/**
* Abstract template method for unmarshalling from a given {@code InputStream}.
* @param inputStream the {@code InputStreamStream} to read from
* @return the object graph
* @throws XmlMappingException if the given stream cannot be converted to an object
* @throws IOException if an I/O exception occurs
*/
protected abstract Object unmarshalInputStream(InputStream inputStream)
throws XmlMappingException, IOException;
/**
* Abstract template method for unmarshalling from a given {@code Reader}.
* @param reader the {@code Reader} to read from
* @return the object graph
* @throws XmlMappingException if the given reader cannot be converted to an object
* @throws IOException if an I/O exception occurs
*/
protected abstract Object unmarshalReader(Reader reader)
throws XmlMappingException, IOException;
}
| AbstractMarshaller |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/provisioning/UserDetailsManagerResourceFactoryBeanPropertiesResourceITests.java | {
"start": 1292,
"end": 1563
} | class ____ {
@Autowired
UserDetailsManager users;
@Test
public void loadUserByUsernameWhenUserFoundThenNotNull() {
assertThat(this.users.loadUserByUsername("user")).isNotNull();
}
@Configuration
static | UserDetailsManagerResourceFactoryBeanPropertiesResourceITests |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableProcessorSupplier.java | {
"start": 925,
"end": 1918
} | interface ____<KIn, VIn, KOut, VOut> extends ProcessorSupplier<KIn, Change<VIn>, KOut, Change<VOut>> {
KTableValueGetterSupplier<KOut, VOut> view();
/**
* Potentially enables sending old values.
* <p>
* If {@code forceMaterialization} is {@code true}, the method will force the materialization of upstream nodes to
* enable sending old values.
* <p>
* If {@code forceMaterialization} is {@code false}, the method will only enable the sending of old values <i>if</i>
* an upstream node is already materialized.
*
* @param forceMaterialization indicates if an upstream node should be forced to materialize to enable sending old
* values.
* @return {@code true} if sending old values is enabled, i.e. either because {@code forceMaterialization} was
* {@code true} or some upstream node is materialized.
*/
boolean enableSendingOldValues(boolean forceMaterialization);
}
| KTableProcessorSupplier |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java | {
"start": 31806,
"end": 32254
} | class ____ {
final Class clazz = Test.class;
}
""")
.doTest();
}
@Ignore("b/26797524 - add tests for generic arguments")
@Test
public void threadSafeTypeParam() {
compilationHelper
.addSourceLines(
"X.java",
"""
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.ThreadSafe;
public | Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/IAM2EndpointBuilderFactory.java | {
"start": 1572,
"end": 17127
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedIAM2EndpointBuilder advanced() {
return (AdvancedIAM2EndpointBuilder) this;
}
/**
* To use an existing configured AWS IAM client.
*
* The option is a:
* <code>software.amazon.awssdk.services.iam.IamClient</code> type.
*
* Group: producer
*
* @param iamClient the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder iamClient(software.amazon.awssdk.services.iam.IamClient iamClient) {
doSetProperty("iamClient", iamClient);
return this;
}
/**
* To use an existing configured AWS IAM client.
*
* The option will be converted to a
* <code>software.amazon.awssdk.services.iam.IamClient</code> type.
*
* Group: producer
*
* @param iamClient the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder iamClient(String iamClient) {
doSetProperty("iamClient", iamClient);
return this;
}
/**
* The operation to perform. You can configure a default operation on
* the component level, or the operation as part of the endpoint, or via
* a message header with the key CamelAwsIAMOperation.
*
* The option is a:
* <code>org.apache.camel.component.aws2.iam.IAM2Operations</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder operation(org.apache.camel.component.aws2.iam.IAM2Operations operation) {
doSetProperty("operation", operation);
return this;
}
/**
* The operation to perform. You can configure a default operation on
* the component level, or the operation as part of the endpoint, or via
* a message header with the key CamelAwsIAMOperation.
*
* The option will be converted to a
* <code>org.apache.camel.component.aws2.iam.IAM2Operations</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder operation(String operation) {
doSetProperty("operation", operation);
return this;
}
/**
* Set the need for overriding the endpoint. This option needs to be
* used in combination with the uriEndpointOverride option.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param overrideEndpoint the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder overrideEndpoint(boolean overrideEndpoint) {
doSetProperty("overrideEndpoint", overrideEndpoint);
return this;
}
/**
* Set the need for overriding the endpoint. This option needs to be
* used in combination with the uriEndpointOverride option.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param overrideEndpoint the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder overrideEndpoint(String overrideEndpoint) {
doSetProperty("overrideEndpoint", overrideEndpoint);
return this;
}
/**
* If we want to use a POJO request as body or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param pojoRequest the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder pojoRequest(boolean pojoRequest) {
doSetProperty("pojoRequest", pojoRequest);
return this;
}
/**
* If we want to use a POJO request as body or not.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param pojoRequest the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder pojoRequest(String pojoRequest) {
doSetProperty("pojoRequest", pojoRequest);
return this;
}
/**
* The region in which IAM client needs to work. When using this
* parameter, the configuration will expect the lowercase name of the
* region (for example, ap-east-1) You'll need to use the name
* Region.EU_WEST_1.id().
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: aws-global
* Group: producer
*
* @param region the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder region(String region) {
doSetProperty("region", region);
return this;
}
/**
* Set the overriding uri endpoint. This option needs to be used in
* combination with overrideEndpoint option.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param uriEndpointOverride the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder uriEndpointOverride(String uriEndpointOverride) {
doSetProperty("uriEndpointOverride", uriEndpointOverride);
return this;
}
/**
* To define a proxy host when instantiating the IAM client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder proxyHost(String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* To define a proxy port when instantiating the IAM client.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder proxyPort(Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* To define a proxy port when instantiating the IAM client.
*
* The option will be converted to a <code>java.lang.Integer</code>
* type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder proxyPort(String proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* To define a proxy protocol when instantiating the IAM client.
*
* The option is a: <code>software.amazon.awssdk.core.Protocol</code>
* type.
*
* Default: HTTPS
* Group: proxy
*
* @param proxyProtocol the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder proxyProtocol(software.amazon.awssdk.core.Protocol proxyProtocol) {
doSetProperty("proxyProtocol", proxyProtocol);
return this;
}
/**
* To define a proxy protocol when instantiating the IAM client.
*
* The option will be converted to a
* <code>software.amazon.awssdk.core.Protocol</code> type.
*
* Default: HTTPS
* Group: proxy
*
* @param proxyProtocol the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder proxyProtocol(String proxyProtocol) {
doSetProperty("proxyProtocol", proxyProtocol);
return this;
}
/**
* Amazon AWS Access Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param accessKey the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder accessKey(String accessKey) {
doSetProperty("accessKey", accessKey);
return this;
}
/**
* If using a profile credentials provider, this parameter will set the
* profile name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param profileCredentialsName the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder profileCredentialsName(String profileCredentialsName) {
doSetProperty("profileCredentialsName", profileCredentialsName);
return this;
}
/**
* Amazon AWS Secret Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param secretKey the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder secretKey(String secretKey) {
doSetProperty("secretKey", secretKey);
return this;
}
/**
* Amazon AWS Session Token used when the user needs to assume an IAM
* role.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sessionToken the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder sessionToken(String sessionToken) {
doSetProperty("sessionToken", sessionToken);
return this;
}
/**
* If we want to trust all certificates in case of overriding the
* endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param trustAllCertificates the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder trustAllCertificates(boolean trustAllCertificates) {
doSetProperty("trustAllCertificates", trustAllCertificates);
return this;
}
/**
* If we want to trust all certificates in case of overriding the
* endpoint.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param trustAllCertificates the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder trustAllCertificates(String trustAllCertificates) {
doSetProperty("trustAllCertificates", trustAllCertificates);
return this;
}
/**
* Set whether the IAM client should expect to load credentials through
* a default credentials provider or to expect static credentials to be
* passed in.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useDefaultCredentialsProvider the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useDefaultCredentialsProvider(boolean useDefaultCredentialsProvider) {
doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider);
return this;
}
/**
* Set whether the IAM client should expect to load credentials through
* a default credentials provider or to expect static credentials to be
* passed in.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useDefaultCredentialsProvider the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useDefaultCredentialsProvider(String useDefaultCredentialsProvider) {
doSetProperty("useDefaultCredentialsProvider", useDefaultCredentialsProvider);
return this;
}
/**
* Set whether the IAM client should expect to load credentials through
* a profile credentials provider.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useProfileCredentialsProvider the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useProfileCredentialsProvider(boolean useProfileCredentialsProvider) {
doSetProperty("useProfileCredentialsProvider", useProfileCredentialsProvider);
return this;
}
/**
* Set whether the IAM client should expect to load credentials through
* a profile credentials provider.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useProfileCredentialsProvider the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useProfileCredentialsProvider(String useProfileCredentialsProvider) {
doSetProperty("useProfileCredentialsProvider", useProfileCredentialsProvider);
return this;
}
/**
* Set whether the IAM client should expect to use Session Credentials.
* This is useful in a situation in which the user needs to assume a IAM
* role for doing operations in IAM.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useSessionCredentials the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useSessionCredentials(boolean useSessionCredentials) {
doSetProperty("useSessionCredentials", useSessionCredentials);
return this;
}
/**
* Set whether the IAM client should expect to use Session Credentials.
* This is useful in a situation in which the user needs to assume a IAM
* role for doing operations in IAM.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useSessionCredentials the value to set
* @return the dsl builder
*/
default IAM2EndpointBuilder useSessionCredentials(String useSessionCredentials) {
doSetProperty("useSessionCredentials", useSessionCredentials);
return this;
}
}
/**
* Advanced builder for endpoint for the AWS Identity and Access Management (IAM) component.
*/
public | IAM2EndpointBuilder |
java | apache__spark | sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaSimpleReaderFactory.java | {
"start": 1154,
"end": 1776
} | class ____ implements PartitionReaderFactory {
@Override
public PartitionReader<InternalRow> createReader(InputPartition partition) {
JavaRangeInputPartition p = (JavaRangeInputPartition) partition;
return new PartitionReader<InternalRow>() {
private int current = p.start - 1;
@Override
public boolean next() {
current += 1;
return current < p.end;
}
@Override
public InternalRow get() {
return new GenericInternalRow(new Object[] {current, -current});
}
@Override
public void close() {
}
};
}
}
| JavaSimpleReaderFactory |
java | grpc__grpc-java | util/src/main/java/io/grpc/util/OutlierDetectionLoadBalancer.java | {
"start": 28191,
"end": 31688
} | class ____ implements OutlierEjectionAlgorithm {
private final OutlierDetectionLoadBalancerConfig config;
private final ChannelLogger logger;
SuccessRateOutlierEjectionAlgorithm(OutlierDetectionLoadBalancerConfig config,
ChannelLogger logger) {
checkArgument(config.successRateEjection != null, "success rate ejection config is null");
this.config = config;
this.logger = logger;
}
@Override
public void ejectOutliers(EndpointTrackerMap trackerMap, long ejectionTimeNanos) {
// Only consider addresses that have the minimum request volume specified in the config.
List<EndpointTracker> trackersWithVolume = trackersWithVolume(trackerMap,
config.successRateEjection.requestVolume);
// If we don't have enough endpoints with significant volume then there's nothing to do.
if (trackersWithVolume.size() < config.successRateEjection.minimumHosts
|| trackersWithVolume.size() == 0) {
return;
}
// Calculate mean and standard deviation of the fractions of successful calls.
List<Double> successRates = new ArrayList<>();
for (EndpointTracker tracker : trackersWithVolume) {
successRates.add(tracker.successRate());
}
double mean = mean(successRates);
double stdev = standardDeviation(successRates, mean);
double requiredSuccessRate =
mean - stdev * (config.successRateEjection.stdevFactor / 1000f);
for (EndpointTracker tracker : trackersWithVolume) {
// If we are above or equal to the max ejection percentage, don't eject any more. This will
// allow the total ejections to go one above the max, but at the same time it assures at
// least one ejection, which the spec calls for. This behavior matches what Envoy proxy
// does.
if (trackerMap.ejectionPercentage() >= config.maxEjectionPercent) {
return;
}
// If success rate is below the threshold, eject the address.
if (tracker.successRate() < requiredSuccessRate) {
logger.log(ChannelLogLevel.DEBUG,
"SuccessRate algorithm detected outlier: {0}. "
+ "Parameters: successRate={1}, mean={2}, stdev={3}, "
+ "requiredSuccessRate={4}",
tracker, tracker.successRate(), mean, stdev, requiredSuccessRate);
// Only eject some endpoints based on the enforcement percentage.
if (new Random().nextInt(100) < config.successRateEjection.enforcementPercentage) {
tracker.ejectSubchannels(ejectionTimeNanos);
}
}
}
}
/** Calculates the mean of the given values. */
@VisibleForTesting
static double mean(Collection<Double> values) {
double totalValue = 0;
for (double value : values) {
totalValue += value;
}
return totalValue / values.size();
}
/** Calculates the standard deviation for the given values and their mean. */
@VisibleForTesting
static double standardDeviation(Collection<Double> values, double mean) {
double squaredDifferenceSum = 0;
for (double value : values) {
double difference = value - mean;
squaredDifferenceSum += difference * difference;
}
double variance = squaredDifferenceSum / values.size();
return Math.sqrt(variance);
}
}
static final | SuccessRateOutlierEjectionAlgorithm |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/task/engine/NacosDelayTaskExecuteEngine.java | {
"start": 1295,
"end": 4735
} | class ____ extends AbstractNacosTaskExecuteEngine<AbstractDelayTask> {
private final ScheduledExecutorService processingExecutor;
protected final ConcurrentHashMap<Object, AbstractDelayTask> tasks;
protected final ReentrantLock lock = new ReentrantLock();
public NacosDelayTaskExecuteEngine(String name) {
this(name, null);
}
public NacosDelayTaskExecuteEngine(String name, Logger logger) {
this(name, 32, logger, 100L);
}
public NacosDelayTaskExecuteEngine(String name, int initCapacity, Logger logger, long processInterval) {
super(logger);
tasks = new ConcurrentHashMap<>(initCapacity);
processingExecutor = ExecutorFactory.newSingleScheduledExecutorService(new NameThreadFactory(name));
processingExecutor
.scheduleWithFixedDelay(new ProcessRunnable(), processInterval, processInterval, TimeUnit.MILLISECONDS);
}
@Override
public int size() {
lock.lock();
try {
return tasks.size();
} finally {
lock.unlock();
}
}
@Override
public boolean isEmpty() {
lock.lock();
try {
return tasks.isEmpty();
} finally {
lock.unlock();
}
}
@Override
public AbstractDelayTask removeTask(Object key) {
lock.lock();
try {
AbstractDelayTask task = tasks.get(key);
if (null != task && task.shouldProcess()) {
return tasks.remove(key);
} else {
return null;
}
} finally {
lock.unlock();
}
}
@Override
public Collection<Object> getAllTaskKeys() {
Collection<Object> keys = new HashSet<>();
lock.lock();
try {
keys.addAll(tasks.keySet());
} finally {
lock.unlock();
}
return keys;
}
@Override
public void shutdown() throws NacosException {
tasks.clear();
processingExecutor.shutdown();
}
@Override
public void addTask(Object key, AbstractDelayTask newTask) {
lock.lock();
try {
AbstractDelayTask existTask = tasks.get(key);
if (null != existTask) {
newTask.merge(existTask);
}
tasks.put(key, newTask);
} finally {
lock.unlock();
}
}
/**
* process tasks in execute engine.
*/
protected void processTasks() {
Collection<Object> keys = getAllTaskKeys();
for (Object taskKey : keys) {
AbstractDelayTask task = removeTask(taskKey);
if (null == task) {
continue;
}
NacosTaskProcessor processor = getProcessor(taskKey);
try {
// ReAdd task if process failed
if (!processor.process(task)) {
retryFailedTask(taskKey, task);
}
} catch (Throwable e) {
getEngineLog().error("Nacos task execute error ", e);
retryFailedTask(taskKey, task);
}
}
}
private void retryFailedTask(Object key, AbstractDelayTask task) {
task.setLastProcessTime(System.currentTimeMillis());
addTask(key, task);
}
private | NacosDelayTaskExecuteEngine |
java | apache__camel | components/camel-jq/src/test/java/org/apache/camel/language/jq/JqSimpleTransformTest.java | {
"start": 984,
"end": 2086
} | class ____ extends JqTestSupport {
private static String EXPECTED = """
{
"roll": 123,
"country": "sweden",
"fullname": "scott"
}""";
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.transform().simple("""
{
"roll": ${jq(.id)},
"country": "${jq(.country // constant(sweden))}",
"fullname": "${jq(.name)}"
}""")
.to("mock:result");
}
};
}
@Test
public void testTransform() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived(EXPECTED);
template.sendBody("direct:start", "{\"id\": 123, \"age\": 42, \"name\": \"scott\"}");
MockEndpoint.assertIsSatisfied(context);
}
}
| JqSimpleTransformTest |
java | alibaba__nacos | common/src/test/java/com/alibaba/nacos/common/labels/impl/DefaultLabelsCollectorManagerTest.java | {
"start": 930,
"end": 1961
} | class ____ {
@Test
void tagV2LabelsCollectorTest() {
Properties properties = new Properties();
properties.put(Constants.APP_CONN_LABELS_KEY, "k1=v1,gray=properties_pre");
properties.put(Constants.CONFIG_GRAY_LABEL, "properties_after");
DefaultLabelsCollectorManager defaultLabelsCollectorManager = new DefaultLabelsCollectorManager();
Map<String, String> labels = defaultLabelsCollectorManager.getLabels(properties);
assertEquals("properties_after", labels.get(Constants.CONFIG_GRAY_LABEL));
assertEquals("v1", labels.get("k1"));
}
@Test
void tagV2LabelsCollectorOrderTest() {
Properties properties = new Properties();
DefaultLabelsCollectorManager defaultLabelsCollectorManager = new DefaultLabelsCollectorManager();
Map<String, String> labels = defaultLabelsCollectorManager.getLabels(properties);
String test = labels.get("test");
assertEquals("test2", test);
}
}
| DefaultLabelsCollectorManagerTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/select/OracleSelectTest77.java | {
"start": 976,
"end": 5738
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"SELECT\n" +
" a.contract_header_id,\n" +
" a.contract_number,\n" +
" a.contract_name,\n" +
" b.field_value contract_amount,\n" +
" c.payment_rate,\n" +
" d.id customer_id,\n" +
" d.name customer_name,\n" +
" a.salesrep_id\n" +
"FROM (\n" +
" tcc_cpr.tcc_cpr_contract_headers a\n" +
" LEFT JOIN tcc_cpr.virtual_value2004 b ON\n" +
" a.contract_header_id = b.contract_header_id\n" +
" AND\n" +
" b.template_id =\n" +
" CASE\n" +
" WHEN a.contract_category = 'SALES' THEN 1\n" +
" WHEN a.contract_category = 'INTERNATIONAL' THEN 49\n" +
" END\n" +
" AND\n" +
" b.enabled_flag = 'Y'\n" +
" LEFT JOIN tcc_cpr.tcc_cpr_payment c ON\n" +
" a.contract_header_id = c.contract_header_id\n" +
" AND\n" +
" c.payment_condition_code = 'ZTE_PAYMENT_YUFU'\n" +
" AND\n" +
" c.enabled_flag = 'Y'\n" +
")\n" +
" LEFT JOIN tcc_cust.tcc_cust_customer d ON (\n" +
" a.customer_id = d.id\n" +
" AND (\n" +
" d.enable_flag = 'Y'\n" +
" OR\n" +
" d.enable_flag = 'T'\n" +
" )\n" +
" )\n" +
"WHERE\n" +
" a.enabled_flag = 'Y'";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(4, visitor.getTables().size());
assertEquals(18, visitor.getColumns().size());
assertEquals(12, visitor.getConditions().size());
assertEquals(3, visitor.getRelationships().size());
assertEquals(0, visitor.getOrderByColumns().size());
{
String text = SQLUtils.toOracleString(stmt);
assertEquals("SELECT a.contract_header_id, a.contract_number, a.contract_name, b.field_value AS contract_amount, c.payment_rate\n" +
"\t, d.id AS customer_id, d.name AS customer_name, a.salesrep_id\n" +
"FROM tcc_cpr.tcc_cpr_contract_headers a\n" +
"LEFT JOIN tcc_cpr.virtual_value2004 b ON a.contract_header_id = b.contract_header_id\n" +
"\tAND b.template_id = CASE\n" +
"\t\tWHEN a.contract_category = 'SALES' THEN 1\n" +
"\t\tWHEN a.contract_category = 'INTERNATIONAL' THEN 49\n" +
"\tEND\n" +
"\tAND b.enabled_flag = 'Y' \n" +
"LEFT JOIN tcc_cpr.tcc_cpr_payment c ON a.contract_header_id = c.contract_header_id\n" +
"\tAND c.payment_condition_code = 'ZTE_PAYMENT_YUFU'\n" +
"\tAND c.enabled_flag = 'Y' \n" +
"\tLEFT JOIN tcc_cust.tcc_cust_customer d ON (a.customer_id = d.id\n" +
"\tAND (d.enable_flag = 'Y'\n" +
"\t\tOR d.enable_flag = 'T')) \n" +
"WHERE a.enabled_flag = 'Y'", text);
}
// assertTrue(visitor.getColumns().contains(new TableStat.Column("acduser.vw_acd_info", "xzqh")));
// assertTrue(visitor.getOrderByColumns().contains(new TableStat.Column("employees", "last_name")));
}
}
| OracleSelectTest77 |
java | apache__rocketmq | tieredstore/src/test/java/org/apache/rocketmq/tieredstore/file/FlatFileFactoryTest.java | {
"start": 1163,
"end": 2166
} | class ____ {
@Test
public void factoryTest() {
MessageStoreConfig storeConfig = new MessageStoreConfig();
storeConfig.setTieredStoreFilePath(MessageStoreUtilTest.getRandomStorePath());
MetadataStore metadataStore = new DefaultMetadataStore(storeConfig);
FlatFileFactory factory = new FlatFileFactory(metadataStore, storeConfig);
Assert.assertEquals(storeConfig, factory.getStoreConfig());
Assert.assertEquals(metadataStore, factory.getMetadataStore());
FlatAppendFile flatFile1 = factory.createFlatFileForCommitLog("CommitLog");
FlatAppendFile flatFile2 = factory.createFlatFileForConsumeQueue("ConsumeQueue");
FlatAppendFile flatFile3 = factory.createFlatFileForIndexFile("IndexFile");
Assert.assertNotNull(flatFile1);
Assert.assertNotNull(flatFile2);
Assert.assertNotNull(flatFile3);
flatFile1.destroy();
flatFile2.destroy();
flatFile3.destroy();
}
} | FlatFileFactoryTest |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/monitor/ThreadTaskQueueMonitorTask.java | {
"start": 947,
"end": 1670
} | class ____ implements Runnable {
private final AsyncNotifyService notifySingleService;
ThreadTaskQueueMonitorTask(AsyncNotifyService notifySingleService) {
this.notifySingleService = notifySingleService;
}
@Override
public void run() {
int size = ConfigExecutor.asyncNotifyQueueSize();
int notifierClientSize = ConfigExecutor.asyncConfigChangeClientNotifyQueueSize();
MEMORY_LOG.info("toNotifyTaskSize = {}", size);
MEMORY_LOG.info("toClientNotifyTaskSize = {}", notifierClientSize);
MetricsMonitor.getNotifyTaskMonitor().set(size);
MetricsMonitor.getNotifyClientTaskMonitor().set(notifierClientSize);
}
}
| ThreadTaskQueueMonitorTask |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/SizeAttributeReferenceTest.java | {
"start": 1721,
"end": 1987
} | class ____ {
@Id
public Integer id;
@ManyToOne
public EntityWithAttributeNamedSize parent;
@OneToMany(mappedBy = "parent")
public Set<EntityWithAttributeNamedSize> children;
@Column(name = "`size`")
private String size;
}
}
| EntityWithAttributeNamedSize |
java | apache__avro | lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/TestProto3.java | {
"start": 229,
"end": 901
} | class ____ {
private TestProto3() {
}
static {
com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion(
com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, /* major= */ 4, /* minor= */ 29, /* patch= */ 3,
/* suffix= */ "", TestProto3.class.getName());
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf enum {@code org.apache.avro.protobuf.noopt.Status}
*/
public | TestProto3 |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/core/v2/client/factory/ClientFactoryHolder.java | {
"start": 1009,
"end": 2324
} | class ____ {
private static final ClientFactoryHolder INSTANCE = new ClientFactoryHolder();
private final HashMap<String, ClientFactory> clientFactories;
private ClientFactoryHolder() {
clientFactories = new HashMap<>(4);
Collection<ClientFactory> clientFactories = NacosServiceLoader.load(ClientFactory.class);
for (ClientFactory each : clientFactories) {
if (this.clientFactories.containsKey(each.getType())) {
Loggers.SRV_LOG.warn("Client type {} found multiple factory, use {} default", each.getType(),
each.getClass().getCanonicalName());
}
this.clientFactories.put(each.getType(), each);
}
}
public static ClientFactoryHolder getInstance() {
return INSTANCE;
}
/**
* Find target type {@link ClientFactory}.
*
* @param type target type
* @return target type {@link ClientFactory}, if not fount, return 'default' client factory.
*/
public ClientFactory findClientFactory(String type) {
if (StringUtils.isEmpty(type) || !clientFactories.containsKey(type)) {
return clientFactories.get(ClientConstants.DEFAULT_FACTORY);
}
return clientFactories.get(type);
}
}
| ClientFactoryHolder |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java | {
"start": 43996,
"end": 47209
} | class ____ extends ReplicationAction<ResyncReplicationRequest, ResyncReplicationRequest, ResyncReplicationResponse> {
ResyncAction(ResyncReplicationRequest request, ActionListener<ResyncReplicationResponse> listener, ReplicationGroup group) {
super(request, listener, group, "resync");
}
@Override
protected void performOnPrimary(IndexShard primary, ResyncReplicationRequest request, ActionListener<PrimaryResult> listener) {
ActionListener.completeWith(listener, () -> {
final TransportWriteAction.WritePrimaryResult<ResyncReplicationRequest, ResyncReplicationResponse> result =
executeResyncOnPrimary(primary, request);
return new PrimaryResult(result.replicaRequest(), result.replicationResponse);
});
}
@Override
protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception {
executeResyncOnReplica(
replica,
request,
getPrimaryShard().getPendingPrimaryTerm(),
getPrimaryShard().getLastKnownGlobalCheckpoint(),
getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes()
);
}
}
private TransportWriteAction.WritePrimaryResult<ResyncReplicationRequest, ResyncReplicationResponse> executeResyncOnPrimary(
IndexShard primary,
ResyncReplicationRequest request
) {
final var threadpool = mock(ThreadPool.class);
final var transportService = mock(TransportService.class);
when(transportService.getThreadPool()).thenReturn(threadpool);
final TransportWriteAction.WritePrimaryResult<ResyncReplicationRequest, ResyncReplicationResponse> result =
new TransportWriteAction.WritePrimaryResult<>(
TransportResyncReplicationAction.performOnPrimary(request),
new ResyncReplicationResponse(),
null,
primary,
logger,
// TODO: Fix
new PostWriteRefresh(transportService)
);
TransportWriteActionTestHelper.performPostWriteActions(primary, request, result.location, logger);
return result;
}
private void executeResyncOnReplica(
IndexShard replica,
ResyncReplicationRequest request,
long operationPrimaryTerm,
long globalCheckpointOnPrimary,
long maxSeqNoOfUpdatesOrDeletes
) throws Exception {
final Translog.Location location;
final PlainActionFuture<Releasable> acquirePermitFuture = new PlainActionFuture<>();
replica.acquireReplicaOperationPermit(
operationPrimaryTerm,
globalCheckpointOnPrimary,
maxSeqNoOfUpdatesOrDeletes,
acquirePermitFuture,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
try (Releasable ignored = acquirePermitFuture.actionGet()) {
location = TransportResyncReplicationAction.performOnReplica(request, replica);
}
TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger);
}
| ResyncAction |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/builder/ToStringBuilder.java | {
"start": 1540,
"end": 3086
} | class ____ {
* String name;
* int age;
* boolean smoker;
*
* ...
*
* public String toString() {
* return new ToStringBuilder(this).
* append("name", name).
* append("age", age).
* append("smoker", smoker).
* toString();
* }
* }
* </pre>
*
* <p>This will produce a toString of the format:
* {@code Person@7f54[name=Stephen,age=29,smoker=false]}</p>
*
* <p>To add the superclass {@code toString}, use {@link #appendSuper}.
* To append the {@code toString} from an object that is delegated
* to (or any other object), use {@link #appendToString}.</p>
*
* <p>Alternatively, there is a method that uses reflection to determine
* the fields to test. Because these fields are usually private, the method,
* {@code reflectionToString}, uses {@code AccessibleObject.setAccessible} to
* change the visibility of the fields. This will fail under a security manager,
* unless the appropriate permissions are set up correctly. It is also
* slower than testing explicitly.</p>
*
* <p>A typical invocation for this method would look like:</p>
*
* <pre>
* public String toString() {
* return ToStringBuilder.reflectionToString(this);
* }
* </pre>
*
* <p>You can also use the builder to debug 3rd party objects:</p>
*
* <pre>
* System.out.println("An object: " + ToStringBuilder.reflectionToString(anObject));
* </pre>
*
* <p>The exact format of the {@code toString} is determined by
* the {@link ToStringStyle} passed into the constructor.</p>
*
* @since 1.0
*/
public | Person |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/transaction/TransactionFactory.java | {
"start": 924,
"end": 1805
} | interface ____ {
/**
* Sets transaction factory custom properties.
*
* @param props
* the new properties
*/
default void setProperties(Properties props) {
// NOP
}
/**
* Creates a {@link Transaction} out of an existing connection.
*
* @param conn
* Existing database connection
*
* @return Transaction
*
* @since 3.1.0
*/
Transaction newTransaction(Connection conn);
/**
* Creates a {@link Transaction} out of a datasource.
*
* @param dataSource
* DataSource to take the connection from
* @param level
* Desired isolation level
* @param autoCommit
* Desired autocommit
*
* @return Transaction
*
* @since 3.1.0
*/
Transaction newTransaction(DataSource dataSource, TransactionIsolationLevel level, boolean autoCommit);
}
| TransactionFactory |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/BDDSoftAssertionsProvider.java | {
"start": 2944,
"end": 53689
} | interface ____ extends SoftAssertionsProvider {
/**
* Creates a new instance of <code>{@link BigDecimalAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default BigDecimalAssert then(BigDecimal actual) {
return proxy(BigDecimalAssert.class, BigDecimal.class, actual);
}
/**
* Creates a new instance of <code>{@link BigIntegerAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default BigIntegerAssert then(BigInteger actual) {
return proxy(BigIntegerAssert.class, BigInteger.class, actual);
}
/**
* Creates a new instance of <code>{@link BooleanAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default BooleanAssert then(boolean actual) {
return proxy(BooleanAssert.class, Boolean.class, actual);
}
/**
* Creates a new instance of <code>{@link BooleanAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default BooleanAssert then(Boolean actual) {
return proxy(BooleanAssert.class, Boolean.class, actual);
}
/**
* Creates a new instance of <code>{@link BooleanArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default BooleanArrayAssert then(boolean[] actual) {
return proxy(BooleanArrayAssert.class, boolean[].class, actual);
}
/**
* Creates a new instance of <code>{@link Boolean2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Boolean2DArrayAssert then(boolean[][] actual) {
return proxy(Boolean2DArrayAssert.class, boolean[][].class, actual);
}
/**
* Creates a new instance of <code>{@link ByteAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ByteAssert then(byte actual) {
return proxy(ByteAssert.class, Byte.class, actual);
}
/**
* Creates a new instance of <code>{@link ByteAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ByteAssert then(Byte actual) {
return proxy(ByteAssert.class, Byte.class, actual);
}
/**
* Creates a new instance of <code>{@link ByteArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ByteArrayAssert then(byte[] actual) {
return proxy(ByteArrayAssert.class, byte[].class, actual);
}
/**
* Creates a new instance of <code>{@link Byte2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Byte2DArrayAssert then(byte[][] actual) {
return proxy(Byte2DArrayAssert.class, byte[][].class, actual);
}
/**
* Creates a new instance of <code>{@link CharacterAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default CharacterAssert then(char actual) {
return proxy(CharacterAssert.class, Character.class, actual);
}
/**
* Creates a new instance of <code>{@link CharArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default CharArrayAssert then(char[] actual) {
return proxy(CharArrayAssert.class, char[].class, actual);
}
/**
* Creates a new instance of <code>{@link Char2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Char2DArrayAssert then(char[][] actual) {
return proxy(Char2DArrayAssert.class, char[][].class, actual);
}
/**
* Creates a new instance of <code>{@link CharacterAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default CharacterAssert then(Character actual) {
return proxy(CharacterAssert.class, Character.class, actual);
}
/**
* Creates a new instance of <code>{@link ClassAssert}</code>
* <p>
* We don't return {@link ClassAssert} as it has overridden methods to annotated with {@link SafeVarargs}.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ClassAssert then(Class<?> actual) {
return proxy(ClassAssert.class, Class.class, actual);
}
/**
* Creates a new instance of <code>{@link CollectionAssert}</code>.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.21.0
*/
@SuppressWarnings("unchecked")
default <T> CollectionAssert<T> then(Collection<? extends T> actual) {
return proxy(CollectionAssert.class, Collection.class, actual);
}
/**
* Creates a new instance of <code>{@link CollectionAssert}</code>.
* <p>
* Use this over {@link #then(Collection)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param <E> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
default <E> AbstractCollectionAssert<?, Collection<? extends E>, E, ObjectAssert<E>> thenCollection(Collection<? extends E> actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link GenericComparableAssert}</code> with standard comparison semantics.
*
* @param <T> the type of actual.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T extends Comparable<? super T>> AbstractComparableAssert<?, T> then(T actual) {
return proxy(GenericComparableAssert.class, Comparable.class, actual);
}
/**
* Creates a new instance of <code>{@link UniversalComparableAssert}</code> with standard comparison semantics.
* <p>
* Use this over {@link #then(Comparable)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param <T> the type of actual.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
@SuppressWarnings("unchecked")
default <T> AbstractUniversalComparableAssert<?, T> thenComparable(Comparable<T> actual) {
return proxy(UniversalComparableAssert.class, Comparable.class, actual);
}
/**
* Creates a new instance of <code>{@link IterableAssert}</code>.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T> IterableAssert<T> then(Iterable<? extends T> actual) {
return proxy(IterableAssert.class, Iterable.class, actual);
}
/**
* Creates a new instance of <code>{@link IterableAssert}</code>.
* <p>
* Use this over {@link #then(Iterable)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
default <ELEMENT> IterableAssert<ELEMENT> thenIterable(Iterable<? extends ELEMENT> actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link IteratorAssert}</code>.
* <p>
* <b>This is a breaking change in version 3.12.0:</b> this method used to return an {@link IterableAssert}.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T> IteratorAssert<T> then(Iterator<? extends T> actual) {
return proxy(IteratorAssert.class, Iterator.class, actual);
}
/**
* Creates a new instance of <code>{@link IteratorAssert}</code>.
* <p>
* Use this over {@link #then(Iterator)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
default <ELEMENT> IteratorAssert<ELEMENT> thenIterator(Iterator<? extends ELEMENT> actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link DoubleAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default DoubleAssert then(double actual) {
return proxy(DoubleAssert.class, Double.class, actual);
}
/**
* Creates a new instance of <code>{@link DoubleAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default DoubleAssert then(Double actual) {
return proxy(DoubleAssert.class, Double.class, actual);
}
/**
* Creates a new instance of <code>{@link DoubleArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default DoubleArrayAssert then(double[] actual) {
return proxy(DoubleArrayAssert.class, double[].class, actual);
}
/**
* Creates a new instance of <code>{@link Double2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Double2DArrayAssert then(double[][] actual) {
return proxy(Double2DArrayAssert.class, double[][].class, actual);
}
/**
* Creates a new instance of <code>{@link FileAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default FileAssert then(File actual) {
return proxy(FileAssert.class, File.class, actual);
}
/**
* Creates a new instance of <code>{@link FutureAssert}</code>.
*
* @param <RESULT> the type of the value contained in the {@link java.util.concurrent.Future}.
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <RESULT> FutureAssert<RESULT> then(Future<RESULT> actual) {
return proxy(FutureAssert.class, Future.class, actual);
}
/**
* Creates a new instance of <code>{@link InputStreamAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default InputStreamAssert then(InputStream actual) {
return proxy(InputStreamAssert.class, InputStream.class, actual);
}
/**
* Creates a new instance of <code>{@link FloatAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default FloatAssert then(float actual) {
return proxy(FloatAssert.class, Float.class, actual);
}
/**
* Creates a new instance of <code>{@link FloatAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default FloatAssert then(Float actual) {
return proxy(FloatAssert.class, Float.class, actual);
}
/**
* Creates a new instance of <code>{@link FloatArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default FloatArrayAssert then(float[] actual) {
return proxy(FloatArrayAssert.class, float[].class, actual);
}
/**
* Creates a new instance of <code>{@link Float2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Float2DArrayAssert then(float[][] actual) {
return proxy(Float2DArrayAssert.class, float[][].class, actual);
}
/**
* Creates a new instance of <code>{@link IntegerAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default IntegerAssert then(int actual) {
return proxy(IntegerAssert.class, Integer.class, actual);
}
/**
* Creates a new instance of <code>{@link IntArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default IntArrayAssert then(int[] actual) {
return proxy(IntArrayAssert.class, int[].class, actual);
}
/**
* Creates a new instance of <code>{@link Int2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Int2DArrayAssert then(int[][] actual) {
return proxy(Int2DArrayAssert.class, int[][].class, actual);
}
/**
* Creates a new instance of <code>{@link IntegerAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default IntegerAssert then(Integer actual) {
return proxy(IntegerAssert.class, Integer.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code>.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T> ListAssert<T> then(List<? extends T> actual) {
return proxy(ListAssert.class, List.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code>.
* <p>
* Use this over {@link #then(List)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
default <ELEMENT> ListAssert<ELEMENT> thenList(List<? extends ELEMENT> actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link LongAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LongAssert then(long actual) {
return proxy(LongAssert.class, Long.class, actual);
}
/**
* Creates a new instance of <code>{@link LongAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LongAssert then(Long actual) {
return proxy(LongAssert.class, Long.class, actual);
}
/**
* Creates a new instance of <code>{@link LongArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LongArrayAssert then(long[] actual) {
return proxy(LongArrayAssert.class, long[].class, actual);
}
/**
* Creates a new instance of <code>{@link Long2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Long2DArrayAssert then(long[][] actual) {
return proxy(Long2DArrayAssert.class, long[][].class, actual);
}
/**
* Creates a new instance of <code>{@link ObjectAssert}</code>.
*
* @param actual the actual value.
* @param <T> the type of the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T> ObjectAssert<T> then(T actual) {
return proxy(ObjectAssert.class, Object.class, actual);
}
/**
* Creates a new instance of <code>{@link ObjectArrayAssert}</code>.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <T> ObjectArrayAssert<T> then(T[] actual) {
return proxy(ObjectArrayAssert.class, Object[].class, actual);
}
/**
* Creates a new instance of <code>{@link Object2DArrayAssert}</code>.
*
* @param <T> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
@SuppressWarnings("unchecked")
default <T> Object2DArrayAssert<T> then(T[][] actual) {
return proxy(Object2DArrayAssert.class, Object[][].class, actual);
}
/**
* Creates a new instance of <code>{@link MapAssert}</code>.
* <p>
* We don't return {@link MapAssert} as it has overridden methods to annotated with {@link SafeVarargs}.
*
* @param <K> the type of keys in the map.
* @param <V> the type of values in the map.
* @param actual the actual value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <K, V> MapAssert<K, V> then(Map<K, V> actual) {
return proxy(MapAssert.class, Map.class, actual);
}
/**
* Creates a new instance of <code>{@link ShortAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ShortAssert then(short actual) {
return proxy(ShortAssert.class, Short.class, actual);
}
/**
* Creates a new instance of <code>{@link ShortAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ShortAssert then(Short actual) {
return proxy(ShortAssert.class, Short.class, actual);
}
/**
* Creates a new instance of <code>{@link ShortArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ShortArrayAssert then(short[] actual) {
return proxy(ShortArrayAssert.class, short[].class, actual);
}
/**
* Creates a new instance of <code>{@link Short2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default Short2DArrayAssert then(short[][] actual) {
return proxy(Short2DArrayAssert.class, short[][].class, actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default CharSequenceAssert then(CharSequence actual) {
return proxy(CharSequenceAssert.class, CharSequence.class, actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code>.
* <p>
* Use this over {@link #then(CharSequence)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.25.0
*/
default CharSequenceAssert thenCharSequence(CharSequence actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.11.0
*/
default CharSequenceAssert then(StringBuilder actual) {
return proxy(CharSequenceAssert.class, CharSequence.class, actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.11.0
*/
default CharSequenceAssert then(StringBuffer actual) {
return proxy(CharSequenceAssert.class, CharSequence.class, actual);
}
/**
* Creates a new instance of <code>{@link StringAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default StringAssert then(String actual) {
return proxy(StringAssert.class, String.class, actual);
}
/**
* Creates a new instance of <code>{@link DateAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default DateAssert then(Date actual) {
return proxy(DateAssert.class, Date.class, actual);
}
/**
* Create assertion for {@link AtomicBoolean}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default AtomicBooleanAssert then(AtomicBoolean actual) {
return proxy(AtomicBooleanAssert.class, AtomicBoolean.class, actual);
}
/**
* Create assertion for {@link AtomicInteger}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default AtomicIntegerAssert then(AtomicInteger actual) {
return proxy(AtomicIntegerAssert.class, AtomicInteger.class, actual);
}
/**
* Create assertion for {@link AtomicIntegerArray}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default AtomicIntegerArrayAssert then(AtomicIntegerArray actual) {
return proxy(AtomicIntegerArrayAssert.class, AtomicIntegerArray.class, actual);
}
/**
* Create assertion for {@link AtomicIntegerFieldUpdater}.
*
* @param <OBJECT> The type of the object holding the updatable field
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <OBJECT> AtomicIntegerFieldUpdaterAssert<OBJECT> then(AtomicIntegerFieldUpdater<OBJECT> actual) {
return proxy(AtomicIntegerFieldUpdaterAssert.class, AtomicIntegerFieldUpdater.class, actual);
}
/**
* Create assertion for {@link AtomicLong}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default AtomicLongAssert then(AtomicLong actual) {
return proxy(AtomicLongAssert.class, AtomicLong.class, actual);
}
/**
* Create assertion for {@link AtomicLongArray}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
default AtomicLongArrayAssert then(AtomicLongArray actual) {
return proxy(AtomicLongArrayAssert.class, AtomicLongArray.class, actual);
}
/**
* Create assertion for {@link AtomicLongFieldUpdater}.
*
* @param actual the actual value.
* @param <OBJECT> the type of the object holding the updatable field
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <OBJECT> AtomicLongFieldUpdaterAssert<OBJECT> then(AtomicLongFieldUpdater<OBJECT> actual) {
return proxy(AtomicLongFieldUpdaterAssert.class, AtomicLongFieldUpdater.class, actual);
}
/**
* Create assertion for {@link AtomicReference}.
*
* @param actual the actual value.
* @param <VALUE> the type of object referred to by this reference
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <VALUE> AtomicReferenceAssert<VALUE> then(AtomicReference<VALUE> actual) {
return proxy(AtomicReferenceAssert.class, AtomicReference.class, actual);
}
/**
* Create assertion for {@link AtomicReferenceArray}.
*
* @param actual the actual value.
* @param <ELEMENT> the type of object referred to by the {@link AtomicReferenceArray}.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <ELEMENT> AtomicReferenceArrayAssert<ELEMENT> then(AtomicReferenceArray<ELEMENT> actual) {
return proxy(AtomicReferenceArrayAssert.class, AtomicReferenceArray.class, actual);
}
/**
* Create assertion for {@link AtomicReferenceFieldUpdater}.
*
* @param <FIELD> The type of the field
* @param <OBJECT> the type of the object holding the updatable field
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <FIELD, OBJECT> AtomicReferenceFieldUpdaterAssert<FIELD, OBJECT> then(AtomicReferenceFieldUpdater<OBJECT, FIELD> actual) {
return proxy(AtomicReferenceFieldUpdaterAssert.class, AtomicReferenceFieldUpdater.class, actual);
}
/**
* Create assertion for {@link AtomicMarkableReference}.
*
* @param <VALUE> the type of object referred to by this reference
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <VALUE> AtomicMarkableReferenceAssert<VALUE> then(AtomicMarkableReference<VALUE> actual) {
return proxy(AtomicMarkableReferenceAssert.class, AtomicMarkableReference.class, actual);
}
/**
* Create assertion for {@link AtomicStampedReference}.
*
* @param <VALUE> the type of value referred to by this reference
* @param actual the actual value.
* @return the created assertion object.
* @since 2.7.0 / 3.7.0
*/
@SuppressWarnings("unchecked")
default <VALUE> AtomicStampedReferenceAssert<VALUE> then(AtomicStampedReference<VALUE> actual) {
return proxy(AtomicStampedReferenceAssert.class, AtomicStampedReference.class, actual);
}
/**
* Creates a new instance of <code>{@link ThrowableAssert}</code>.
*
* @param <T> the type of the actual throwable.
* @param actual the actual value.
* @return the created assertion Throwable.
*/
@SuppressWarnings("unchecked")
default <T extends Throwable> ThrowableAssert<T> then(T actual) {
return proxy(ThrowableAssert.class, Throwable.class, actual);
}
/**
* Creates a new instance of <code>{@link ThrowableAssert}</code> with a {@link SQLException}.
*
* @param <T> the type of the actual SQLException.
* @param actual the actual value.
* @return the created assertion for SQLException.
*/
@SuppressWarnings("unchecked")
default <T extends SQLException> ThrowableAssert<T> then(T actual) {
return proxy(ThrowableAssert.class, SQLException.class, actual);
}
/**
* Allows to capture and then assert on a {@link Throwable} more easily when used with Java 8 lambdas.
*
* <p>
* Java 8 example :
* <pre><code class='java'> {@literal @}Test
* default void testException() {
* BDDSoftAssertions softly = new BDDSoftAssertions();
* softly.thenThrownBy(() -> { throw new Exception("boom!"); }).isInstanceOf(Exception.class)
* .hasMessageContaining("boom");
* }</code></pre>
* <p>
* Java 7 example :
* <pre><code class='java'> BDDSoftAssertions softly = new BDDSoftAssertions();
* softly.thenThrownBy(new ThrowingCallable() {
*
* {@literal @}Override
* default Void call() throws Exception {
* throw new Exception("boom!");
* }
*
* }).isInstanceOf(Exception.class)
* .hasMessageContaining("boom");</code></pre>
* <p>
* If the provided {@link ThrowingCallable} does not raise an exception, an error is immediately thrown,
* in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br>
* To use a test description, use {@link Assertions#catchThrowable(ThrowingCallable)} as shown below:
* <pre><code class='java'> // assertion will fail but "display me" won't appear in the error
* softly.thenThrownBy(() -> {}).as("display me")
* .isInstanceOf(Exception.class);
*
* // assertion will fail AND "display me" will appear in the error
* Throwable thrown = catchThrowable(() -> {});
* softly.then(thrown).as("display me")
* .isInstanceOf(Exception.class); </code></pre>
* <p>
* Alternatively you can also use {@code thenCode(ThrowingCallable)} for the test description provided
* with {@link AbstractAssert#as(String, Object...) as(String, Object...)} to always be honored.
*
* @param shouldRaiseThrowable The {@link ThrowingCallable} or lambda with the code that should raise the throwable.
* @return The captured exception or <code>null</code> if none was raised by the callable.
*/
@CanIgnoreReturnValue
default AbstractThrowableAssert<?, ? extends Throwable> thenThrownBy(ThrowingCallable shouldRaiseThrowable) {
return then(catchThrowable(shouldRaiseThrowable)).hasBeenThrown();
}
/**
* Allows to capture and then assert on a {@link Throwable} like {@code thenThrownBy(ThrowingCallable)} but this method
* let you set the assertion description the same way you do with {@link AbstractAssert#as(String, Object...) as(String, Object...)}.
* <p>
* Example:
* <pre><code class='java'> {@literal @}Test
* default void testException() {
* BDDSoftAssertions softly = new BDDSoftAssertions();
* // if this assertion failed (but it doesn't), the error message would start with [Test explosive code]
* softly.thenThrownBy(() -> { throw new IOException("boom!") }, "Test explosive code")
* .isInstanceOf(IOException.class)
* .hasMessageContaining("boom");
* }</code></pre>
* <p>
* If the provided {@link ThrowingCallable ThrowingCallable} does not raise an exception, an error is immediately thrown.
* <p>
* The test description provided is honored but not the one with {@link AbstractAssert#as(String, Object...) as(String, Object...)}, example:
* <pre><code class='java'> // assertion will fail but "display me" won't appear in the error message
* softly.thenThrownBy(() -> {}).as("display me")
* .isInstanceOf(Exception.class);
*
* // assertion will fail AND "display me" will appear in the error message
* softly.thenThrownBy(() -> {}, "display me")
* .isInstanceOf(Exception.class);</code></pre>
*
* @param shouldRaiseThrowable The {@link ThrowingCallable} or lambda with the code that should raise the throwable.
* @param description the new description to set.
* @param args optional parameter if description is a format String.
* @return the created {@link ThrowableAssert}.
* @since 3.9.0
*/
@CanIgnoreReturnValue
default AbstractThrowableAssert<?, ? extends Throwable> thenThrownBy(ThrowingCallable shouldRaiseThrowable,
String description, Object... args) {
return then(catchThrowable(shouldRaiseThrowable)).as(description, args).hasBeenThrown();
}
/**
* Allows to capture and then assert on a {@link Throwable} more easily when used with Java 8 lambdas.
*
* <p>
* Example :
* </p>
*
* <pre><code class='java'> ThrowingCallable callable = () -> {
* throw new Exception("boom!");
* };
*
* // assertion succeeds
* thenCode(callable).isInstanceOf(Exception.class)
* .hasMessageContaining("boom");
*
* // assertion fails
* thenCode(callable).doesNotThrowAnyException();</code></pre>
* <p>
* Contrary to {@code thenThrownBy(ThrowingCallable)} the test description provided with
* {@link AbstractAssert#as(String, Object...) as(String, Object...)} is always honored as shown below.
*
* <pre><code class='java'> ThrowingCallable doNothing = () -> {
* // do nothing
* };
*
* // assertion fails and "display me" appears in the assertion error
* thenCode(doNothing).as("display me")
* .isInstanceOf(Exception.class);</code></pre>
* <p>
* This method was not named {@code then} because the java compiler reported it ambiguous when used directly with a lambda :(
*
* @param shouldRaiseOrNotThrowable The {@link ThrowingCallable} or lambda with the code that should raise the throwable.
* @return The captured exception or <code>null</code> if none was raised by the callable.
* @since 3.7.0
*/
default AbstractThrowableAssert<?, ? extends Throwable> thenCode(ThrowingCallable shouldRaiseOrNotThrowable) {
return then(catchThrowable(shouldRaiseOrNotThrowable));
}
/**
* Creates a new instance of <code>{@link org.assertj.core.api.ObjectAssert}</code> for any object.
*
* <p>
* This overload is useful, when an overloaded method of then(...) takes precedence over the generic {@link #then(Object)}.
* </p>
*
* <p>
* Example:
* </p>
* <p>
* Cast necessary because {@link #then(List)} "forgets" actual type:
* <pre>{@code then(new LinkedList<>(asList("abc"))).matches(list -> ((Deque<String>) list).getFirst().equals("abc")); }</pre>
* No cast needed, but also no additional list assertions:
* <pre>{@code thenObject(new LinkedList<>(asList("abc"))).matches(list -> list.getFirst().equals("abc")); }</pre>
*
* @param <T> the type of the actual value.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.12.0
*/
default <T> ObjectAssert<T> thenObject(T actual) {
return then(actual);
}
/**
* Creates a new instance of <code>{@link UriAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default UriAssert then(URI actual) {
return proxy(UriAssert.class, URI.class, actual);
}
/**
* Creates a new instance of <code>{@link UrlAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default AbstractUrlAssert<?> then(URL actual) {
return proxy(UrlAssert.class, URL.class, actual);
}
/**
* Entry point to check that an exception of type T is thrown by a given {@code throwingCallable}
* which allows to chain assertions on the thrown exception.
* <p>
* Example:
* <pre><code class='java'> softly.thenExceptionOfType(IOException.class)
* .isThrownBy(() -> { throw new IOException("boom!"); })
* .withMessage("boom!"); </code></pre>
* <p>
* This method is more or less the same of {@link #thenThrownBy(ThrowingCallable)} but in a more natural way.
*
* @param <T> the Throwable type.
* @param throwableType the Throwable type class.
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0.
*/
default <T extends Throwable> ThrowableTypeAssert<T> thenExceptionOfType(final Class<T> throwableType) {
return new SoftThrowableTypeAssert<>(throwableType, this);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link RuntimeException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<RuntimeException> thenRuntimeException() {
return thenExceptionOfType(RuntimeException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link NullPointerException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<NullPointerException> thenNullPointerException() {
return thenExceptionOfType(NullPointerException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link IllegalArgumentException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<IllegalArgumentException> thenIllegalArgumentException() {
return thenExceptionOfType(IllegalArgumentException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link IOException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<IOException> thenIOException() {
return thenExceptionOfType(IOException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link IllegalStateException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<IllegalStateException> thenIllegalStateException() {
return thenExceptionOfType(IllegalStateException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link Exception}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<Exception> thenException() {
return thenExceptionOfType(Exception.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link ReflectiveOperationException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<ReflectiveOperationException> thenReflectiveOperationException() {
return thenExceptionOfType(ReflectiveOperationException.class);
}
/**
* Alias for {@link #thenExceptionOfType(Class)} for {@link IndexOutOfBoundsException}.
*
* @return the created {@link ThrowableTypeAssert}.
* @since 3.23.0
*/
default ThrowableTypeAssert<IndexOutOfBoundsException> thenIndexOutOfBoundsException() {
return thenExceptionOfType(IndexOutOfBoundsException.class);
}
/**
* Creates a new, proxied instance of a {@link PathAssert}
*
* @param actual the path
* @return the created assertion object
*/
default PathAssert then(Path actual) {
return proxy(PathAssert.class, Path.class, actual);
}
/**
* Creates a new instance of {@link PathAssert}
* <p>
* Use this over {@link #then(Path)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param actual the path to test
* @return the created assertion object
* @since 3.23.0
*/
default AbstractPathAssert<?> thenPath(Path actual) {
return then(actual);
}
/**
* Create assertion for {@link Optional}.
*
* @param actual the actual value.
* @param <VALUE> the type of the value contained in the {@link Optional}.
*
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <VALUE> OptionalAssert<VALUE> then(Optional<VALUE> actual) {
return proxy(OptionalAssert.class, Optional.class, actual);
}
/**
* Create assertion for {@link java.util.OptionalDouble}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
default OptionalDoubleAssert then(OptionalDouble actual) {
return proxy(OptionalDoubleAssert.class, OptionalDouble.class, actual);
}
/**
* Create assertion for {@link java.util.OptionalInt}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
default OptionalIntAssert then(OptionalInt actual) {
return proxy(OptionalIntAssert.class, OptionalInt.class, actual);
}
/**
* Create assertion for {@link java.util.OptionalLong}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
default OptionalLongAssert then(OptionalLong actual) {
return proxy(OptionalLongAssert.class, OptionalLong.class, actual);
}
/**
* Create assertion for {@link java.util.regex.Matcher}.
*
* @param actual the actual matcher
*
* @return the created assertion object.
*/
@CheckReturnValue
default MatcherAssert then(Matcher actual) {
return proxy(MatcherAssert.class, Matcher.class, actual);
}
/**
* Creates a new instance of <code>{@link TemporalAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.26.1
*/
default TemporalAssert thenTemporal(Temporal actual) {
return proxy(TemporalAssert.class, Temporal.class, actual);
}
/**
* Creates a new instance of <code>{@link LocalDateAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LocalDateAssert then(LocalDate actual) {
return proxy(LocalDateAssert.class, LocalDate.class, actual);
}
/**
* Creates a new instance of <code>{@link YearMonthAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.26.0
*/
default YearMonthAssert then(YearMonth actual) {
return proxy(YearMonthAssert.class, YearMonth.class, actual);
}
/**
* Creates a new instance of <code>{@link LocalDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LocalDateTimeAssert then(LocalDateTime actual) {
return proxy(LocalDateTimeAssert.class, LocalDateTime.class, actual);
}
/**
* Creates a new instance of <code>{@link ZonedDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default ZonedDateTimeAssert then(ZonedDateTime actual) {
return proxy(ZonedDateTimeAssert.class, ZonedDateTime.class, actual);
}
/**
* Creates a new instance of <code>{@link LocalTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default LocalTimeAssert then(LocalTime actual) {
return proxy(LocalTimeAssert.class, LocalTime.class, actual);
}
/**
* Creates a new instance of <code>{@link OffsetTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default OffsetTimeAssert then(OffsetTime actual) {
return proxy(OffsetTimeAssert.class, OffsetTime.class, actual);
}
/**
* Creates a new instance of <code>{@link OffsetDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
default OffsetDateTimeAssert then(OffsetDateTime actual) {
return proxy(OffsetDateTimeAssert.class, OffsetDateTime.class, actual);
}
/**
* Creates a new instance of <code>{@link InstantAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.7.0
*/
default InstantAssert then(Instant actual) {
return proxy(InstantAssert.class, Instant.class, actual);
}
/**
* Creates a new instance of <code>{@link DurationAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.15.0
*/
default DurationAssert then(Duration actual) {
return proxy(DurationAssert.class, Duration.class, actual);
}
/**
* Creates a new instance of <code>{@link PeriodAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
default PeriodAssert then(Period actual) {
return proxy(PeriodAssert.class, Period.class, actual);
}
/**
* Create assertion for {@link java.util.concurrent.CompletableFuture}.
*
* @param actual the actual value.
* @param <RESULT> the type of the value contained in the {@link java.util.concurrent.CompletableFuture}.
*
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <RESULT> CompletableFutureAssert<RESULT> then(CompletableFuture<RESULT> actual) {
return proxy(CompletableFutureAssert.class, CompletableFuture.class, actual);
}
/**
* Create assertion for {@link java.util.concurrent.CompletionStage} by converting it to a {@link CompletableFuture} and returning a {@link CompletableFutureAssert}.
* <p>
* If the given {@link java.util.concurrent.CompletionStage} is null, the {@link CompletableFuture} in the returned {@link CompletableFutureAssert} will also be null.
*
* @param actual the actual value.
* @param <RESULT> the type of the value contained in the {@link java.util.concurrent.CompletionStage}.
*
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <RESULT> CompletableFutureAssert<RESULT> then(CompletionStage<RESULT> actual) {
return proxy(CompletableFutureAssert.class, CompletionStage.class, actual);
}
/**
* Create assertion for {@link Predicate}.
*
* @param actual the actual value.
* @param <T> the type of the value contained in the {@link Predicate}.
*
* @return the created assertion object.
*
* @since 3.5.0
*/
@SuppressWarnings("unchecked")
default <T> PredicateAssert<T> then(Predicate<T> actual) {
return proxy(PredicateAssert.class, Predicate.class, actual);
}
/**
* Create assertion for {@link Predicate}.
* <p>
* Use this over {@link #then(Predicate)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
*
* @param actual the actual value.
* @param <T> the type of the value contained in the {@link Predicate}.
* @return the created assertion object.
* @since 3.23.0
*/
default <T> PredicateAssert<T> thenPredicate(Predicate<T> actual) {
return then(actual);
}
/**
* Create assertion for {@link IntPredicate}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.5.0
*/
default IntPredicateAssert then(IntPredicate actual) {
return proxy(IntPredicateAssert.class, IntPredicate.class, actual);
}
/**
* Create assertion for {@link DoublePredicate}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.5.0
*/
default DoublePredicateAssert then(DoublePredicate actual) {
return proxy(DoublePredicateAssert.class, DoublePredicate.class, actual);
}
/**
* Create assertion for {@link DoublePredicate}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.5.0
*/
default LongPredicateAssert then(LongPredicate actual) {
return proxy(LongPredicateAssert.class, LongPredicate.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code> from the given {@link Stream}.
* <p>
* <b>Be aware that to create the returned {@link ListAssert} the given the {@link Stream} is consumed so it won't be
* possible to use it again.</b> Calling multiple methods on the returned {@link ListAssert} is safe as it only
* interacts with the {@link List} built from the {@link Stream}. The stream is closed after the list is built.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual {@link Stream} value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default <ELEMENT> AbstractListAssert<?, List<? extends ELEMENT>, ELEMENT, ObjectAssert<ELEMENT>> then(Stream<? extends ELEMENT> actual) {
return proxy(ListAssert.class, Stream.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code> from the given {@link Stream}.
* <p>
* Use this over {@link #then(Stream)} in case of ambiguous method resolution when the object under test
* implements several interfaces Assertj provides <code>then</code> for.
* <p>
* <b>Be aware that to create the returned {@link ListAssert} the given the {@link Stream} is consumed so it won't be
* possible to use it again.</b> Calling multiple methods on the returned {@link ListAssert} is safe as it only
* interacts with the {@link List} built from the {@link Stream}. The stream is closed after the list is built.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.23.0
*/
@SuppressWarnings("unchecked")
default <ELEMENT> ListAssert<ELEMENT> thenStream(Stream<? extends ELEMENT> actual) {
return proxy(ListAssert.class, Stream.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code> from the given {@link DoubleStream}.
* <p>
* <b>Be aware that to create the returned {@link ListAssert} the given the {@link DoubleStream} is consumed so it won't be
* possible to use it again.</b> Calling multiple methods on the returned {@link ListAssert} is safe as it only
* interacts with the {@link List} built from the {@link DoubleStream}. The stream is closed after the list is built.
*
* @param actual the actual {@link DoubleStream} value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default AbstractListAssert<?, List<? extends Double>, Double, ObjectAssert<Double>> then(DoubleStream actual) {
return proxy(ListAssert.class, DoubleStream.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code> from the given {@link LongStream}.
* <p>
* <b>Be aware that to create the returned {@link ListAssert} the given the {@link LongStream} is consumed so it won't be
* possible to use it again.</b> Calling multiple methods on the returned {@link ListAssert} is safe as it only
* interacts with the {@link List} built from the {@link LongStream}. The stream is closed after the list is built.
*
* @param actual the actual {@link LongStream} value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default AbstractListAssert<?, List<? extends Long>, Long, ObjectAssert<Long>> then(LongStream actual) {
return proxy(ListAssert.class, LongStream.class, actual);
}
/**
* Creates a new instance of <code>{@link ListAssert}</code> from the given {@link IntStream}.
* <p>
* <b>Be aware that to create the returned {@link ListAssert} the given the {@link IntStream} is consumed so it won't be
* possible to use it again.</b> Calling multiple methods on the returned {@link ListAssert} is safe as it only
* interacts with the {@link List} built from the {@link IntStream}. The stream is closed after the list is built.
*
* @param actual the actual {@link IntStream} value.
* @return the created assertion object.
*/
@SuppressWarnings("unchecked")
default AbstractListAssert<?, List<? extends Integer>, Integer, ObjectAssert<Integer>> then(IntStream actual) {
return proxy(ListAssert.class, IntStream.class, actual);
}
/**
* Creates a new instance of <code>{@link SpliteratorAssert}</code> from the given {@link Spliterator}.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual {@link Spliterator} value.
* @return the created assertion object.
* @since 3.14.0
*/
@SuppressWarnings("unchecked")
default <ELEMENT> SpliteratorAssert<ELEMENT> then(Spliterator<ELEMENT> actual) {
return proxy(SpliteratorAssert.class, Spliterator.class, actual);
}
/**
* Creates a new instance of <code>{@link HashSetAssert}</code>.
*
* @param <ELEMENT> the type of elements.
* @param actual the actual value.
* @return the created assertion object.
* @since 4.0.0
*/
default <ELEMENT> HashSetAssert<ELEMENT> then(HashSet<? extends ELEMENT> actual) {
return proxy(HashSetAssert.class, HashSet.class, actual);
}
/**
* Create assertion for {@link LongAdder}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.16.0
*/
default LongAdderAssert then(LongAdder actual) {
return proxy(LongAdderAssert.class, LongAdder.class, actual);
}
}
| BDDSoftAssertionsProvider |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/web/servlet/support/ErrorPageFilterTests.java | {
"start": 20582,
"end": 20764
} | interface ____ {
void handle(HttpServletRequest request, HttpServletResponse response, Chain chain)
throws IOException, ServletException;
}
@FunctionalInterface
| FilterHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ondemandload/LazyLoadingTest.java | {
"start": 1229,
"end": 5374
} | class ____ {
@BeforeEach
public void setUpData(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Store store = new Store( 1 )
.setName( "Acme Super Outlet" );
session.persist( store );
Product product = new Product( "007" )
.setName( "widget" )
.setDescription( "FooBar" );
session.persist( product );
store.addInventoryProduct( product )
.setQuantity( 10L )
.setStorePrice( new BigDecimal( 500 ) );
}
);
}
@AfterEach
public void cleanUpData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testLazyCollectionLoadingWithClearedSession(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
scope.inTransaction(
session -> {
// first load the store, making sure collection is not initialized
Store store = (Store) session.get( Store.class, 1 );
assertNotNull( store );
assertFalse( Hibernate.isInitialized( store.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 1l ) );
assertThat( statistics.getSessionCloseCount(), is( 0l ) );
// then clear session and try to initialize collection
session.clear();
store.getInventories().size();
assertTrue( Hibernate.isInitialized( store.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 2l ) );
assertThat( statistics.getSessionCloseCount(), is( 1l ) );
session.clear();
store = (Store) session.get( Store.class, 1 );
assertNotNull( store );
assertFalse( Hibernate.isInitialized( store.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 2l ) );
assertThat( statistics.getSessionCloseCount(), is( 1l ) );
session.clear();
store.getInventories().iterator();
assertTrue( Hibernate.isInitialized( store.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 3l ) );
assertThat( statistics.getSessionCloseCount(), is( 2l ) );
}
);
}
@Test
public void testLazyCollectionLoadingWithClosedSession(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
Store s = scope.fromTransaction(
session -> {
// first load the store, making sure collection is not initialized
Store store = session.get( Store.class, 1 );
assertNotNull( store );
assertFalse( Hibernate.isInitialized( store.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 1l ) );
assertThat( statistics.getSessionCloseCount(), is( 0l ) );
return store;
}
);
// close the session and try to initialize collection
assertThat( statistics.getSessionOpenCount(), is( 1l ) );
assertThat( statistics.getSessionCloseCount(), is( 1l ) );
s.getInventories().size();
assertTrue( Hibernate.isInitialized( s.getInventories() ) );
assertThat( statistics.getSessionOpenCount(), is( 2l ) );
assertThat( statistics.getSessionCloseCount(), is( 2l ) );
}
@Test
public void testLazyEntityLoadingWithClosedSession(SessionFactoryScope scope) {
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.clear();
Store s = scope.fromTransaction(
session -> {
// first load the store, making sure it is not initialized
Store store = session.getReference( Store.class, 1 );
assertNotNull( store );
assertFalse( Hibernate.isInitialized( store ) );
assertThat( statistics.getSessionOpenCount(), is( 1l ) );
assertThat( statistics.getSessionCloseCount(), is( 0l ) );
return store;
}
);
// close the session and try to initialize store
assertThat( statistics.getSessionOpenCount(), is( 1l ) );
assertThat( statistics.getSessionCloseCount(), is( 1l ) );
s.getName();
assertTrue( Hibernate.isInitialized( s ) );
assertThat( statistics.getSessionOpenCount(), is( 2l ) );
assertThat( statistics.getSessionCloseCount(), is( 2l ) );
}
}
| LazyLoadingTest |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-class-loader/maven-it-plugin-class-loader/src/main/java/org/apache/maven/plugin/coreit/InstanceofMojo.java | {
"start": 4190,
"end": 4834
} | class ____ "
+ object.getClass().getClassLoader());
}
instanceofProperties.setProperty(
expression.replace('/', '.'), Boolean.toString(type.isInstance(object)));
}
}
if (components != null && !components.isEmpty()) {
for (Object object : components) {
getLog().info("[MAVEN-CORE-IT-LOG] Checking component " + object);
getLog().info("[MAVEN-CORE-IT-LOG] Loaded class "
+ object.getClass().getName());
getLog().info("[MAVEN-CORE-IT-LOG] Loaded | from |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/JavaType.java | {
"start": 509,
"end": 894
} | class ____
extends ResolvedType
implements java.io.Serializable,
java.lang.reflect.Type
{
private static final long serialVersionUID = 1;
/**
* This is the nominal type-erased Class that would be close to the
* type represented (but not exactly type, due to type erasure: type
* instance may have more information on this).
* May be an | JavaType |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/EntityTypeChangeAuditTest.java | {
"start": 1218,
"end": 3739
} | class ____ extends EntityManagerFactoryBasedFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
Customer.class,
CustomTrackingRevisionEntity.class
};
}
@Test
public void test() {
try (final EntityManagerFactory testEmf = produceEntityManagerFactory()) {
testEmf.runInTransaction( entityManager -> {
Customer customer = new Customer();
customer.setId( 1L );
customer.setFirstName( "John" );
customer.setLastName( "Doe" );
entityManager.persist( customer );
} );
testEmf.runInTransaction( entityManager -> {
//tag::envers-tracking-modified-entities-queries-example1[]
assertThat(
AuditReaderFactory
.get( entityManager )
.getCrossTypeRevisionChangesReader()
.findEntityTypes( 1 )
.iterator().next()
.getFirst()
).isEqualTo( "org.hibernate.orm.test.envers.EntityTypeChangeAuditTest$Customer" );
//end::envers-tracking-modified-entities-queries-example1[]
} );
try (EntityManagerFactory entityManagerFactory = buildEntityManagerFactory()) {
final EntityManagerFactory emf = entityManagerFactory;
entityManagerFactory.runInTransaction( entityManager -> {
ApplicationCustomer customer = entityManager.find( ApplicationCustomer.class, 1L );
customer.setLastName( "Doe Jr." );
} );
entityManagerFactory.runInTransaction( entityManager -> {
//tag::envers-tracking-modified-entities-queries-example2[]
assertThat(
AuditReaderFactory
.get( entityManager )
.getCrossTypeRevisionChangesReader()
.findEntityTypes( 2 )
.iterator().next()
.getFirst()
).isEqualTo( "org.hibernate.orm.test.envers.EntityTypeChangeAuditTest$ApplicationCustomer" );
//end::envers-tracking-modified-entities-queries-example2[]
} );
}
}
}
private EntityManagerFactory buildEntityManagerFactory() {
Map<Object, Object> settings = buildSettings();
settings.put(
AvailableSettings.LOADED_CLASSES,
Arrays.asList(
ApplicationCustomer.class,
CustomTrackingRevisionEntity.class
)
);
settings.put( AvailableSettings.HBM2DDL_AUTO, "update" );
return Bootstrap.getEntityManagerFactoryBuilder(
new TestingPersistenceUnitDescriptorImpl( getClass().getSimpleName() ),
settings )
.build();
}
//tag::envers-tracking-modified-entities-revchanges-before-rename-example[]
@Audited
@Entity(name = "Customer")
public static | EntityTypeChangeAuditTest |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/model/source/Method.java | {
"start": 1308,
"end": 4570
} | interface ____ processed
* but by another mapper imported via {@code Mapper#users()}.
*
* @return The declaring mapper type
*/
Type getDeclaringMapper();
/**
* Returns then name of the method.
*
* @return method name
*/
String getName();
/**
* In contrast to {@link #getSourceParameters()} this method returns all parameters
*
* @return all parameters
*/
List<Parameter> getParameters();
/**
* returns the list of 'true' source parameters excluding the parameter(s) that are designated as target, target
* type or context parameter.
*
* @return list of 'true' source parameters
*/
List<Parameter> getSourceParameters();
/**
* returns the list of mapping context parameters, i.e. those parameters that are annotated with
* {@link org.mapstruct.Context}.
*
* @return list of context parameters
*/
List<Parameter> getContextParameters();
/**
* @return a mapping between {@link #getContextParameters()} to factory and lifecycle methods provided by them.
*/
ParameterProvidedMethods getContextProvidedMethods();
/**
* Returns the parameter designated as mapping target (if present) {@link org.mapstruct.MappingTarget}
*
* @return mapping target parameter (when present) null otherwise.
*/
Parameter getMappingTargetParameter();
/**
* Returns whether the method is designated as bean factory for
* mapping target {@link org.mapstruct.ObjectFactory }
*
* @return true if it is a target bean factory.
*/
boolean isObjectFactory();
/**
* Returns the parameter designated as target type (if present) {@link org.mapstruct.TargetType }
*
* @return target type parameter (when present) null otherwise.
*/
Parameter getTargetTypeParameter();
/**
* Returns the {@link Accessibility} of this method.
*
* @return the {@link Accessibility} of this method
*/
Accessibility getAccessibility();
/**
* Returns the return type of the method
*
* @return return type
*/
Type getReturnType();
/**
* Returns all exceptions thrown by this method
*
* @return exceptions thrown
*/
List<Type> getThrownTypes();
/**
* Returns the type of the result. The result is defined as the type of the parameter designated with
* {@link org.mapstruct.MappingTarget}, or in absence the return type.
*
* @return result type
*/
Type getResultType();
/**
*
* @return the names of the parameters of this mapping method
*/
List<String> getParameterNames();
/**
* Whether this method overrides an abstract method.
*
* @return true when an abstract method is overridden.
*/
boolean overridesMethod();
ExecutableElement getExecutable();
/**
* Whether this method is static or an instance method
*
* @return true when static.
*/
boolean isStatic();
/**
* Whether this method is Java 8 default method
*
* @return true when Java 8 default method
*/
boolean isDefault();
/**
*
* @return the Type ( | currently |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/RouteConfigurationsDefinition.java | {
"start": 1512,
"end": 4393
} | class ____ extends OptionalIdentifiedDefinition<RouteConfigurationsDefinition>
implements RouteConfigurationContainer, ResourceAware {
@XmlTransient
private CamelContext camelContext;
@XmlTransient
private Resource resource;
@XmlElementRef
private List<RouteConfigurationDefinition> routeConfigurations = new ArrayList<>();
public RouteConfigurationsDefinition() {
}
@Override
public String getShortName() {
return "routeConfigurations";
}
@Override
public String getLabel() {
return "RouteConfigurations " + getId();
}
@Override
public String toString() {
return "RouteConfigurations";
}
public List<RouteConfigurationDefinition> getRouteConfigurations() {
return routeConfigurations;
}
public void setRouteConfigurations(List<RouteConfigurationDefinition> routeConfigurations) {
this.routeConfigurations = routeConfigurations;
}
public CamelContext getCamelContext() {
return camelContext;
}
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public Resource getResource() {
return resource;
}
@Override
public void setResource(Resource resource) {
this.resource = resource;
}
// Fluent API
// -------------------------------------------------------------------------
/**
* Creates a new route configuration
*
* @return the builder
*/
public RouteConfigurationDefinition routeConfiguration() {
RouteConfigurationDefinition config = createRouteConfiguration(null);
getRouteConfigurations().add(config);
return config;
}
/**
* Creates a new route configuration
*
* @return the builder
*/
public RouteConfigurationDefinition routeConfiguration(String id) {
RouteConfigurationDefinition config = createRouteConfiguration(id);
getRouteConfigurations().add(config);
return config;
}
/**
* Adds the route configuration
*
* @return the builder
*/
public RouteConfigurationDefinition routeConfiguration(RouteConfigurationDefinition config) {
getRouteConfigurations().add(config);
return config;
}
// Implementation methods
// -------------------------------------------------------------------------
protected RouteConfigurationDefinition createRouteConfiguration(String id) {
RouteConfigurationDefinition config = new RouteConfigurationDefinition();
if (id != null) {
config.setId(id);
}
if (resource != null) {
config.setResource(resource);
}
CamelContextAware.trySetCamelContext(config, camelContext);
return config;
}
}
| RouteConfigurationsDefinition |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/MapKeyDeserialization3143Test.java | {
"start": 1688,
"end": 3276
} | class ____ {
protected String value;
public static Key3143Ctor valueOf(String id) {
return new Key3143Ctor(id.toUpperCase());
}
@JsonCreator
private Key3143Ctor(String v) {
value = v;
}
}
private final ObjectMapper MAPPER = newJsonMapper();
// [databind#3143]
@Test
public void testKeyWithCtorAndCreator3143() throws Exception
{
// Use Constructor if annotated:
Map<Key3143Ctor,Integer> map = MAPPER.readValue("{\"bar\":3}",
new TypeReference<Map<Key3143Ctor,Integer>>() {} );
assertEquals(1, map.size());
assertEquals("bar", map.keySet().iterator().next().value);
}
// [databind#3143]
@Test
public void testKeyWith2Creators3143() throws Exception
{
// Select explicitly annotated factory method
Map<Key3143Factories,Integer> map = MAPPER.readValue("{\"Foo\":3}",
new TypeReference<Map<Key3143Factories,Integer>>() {} );
assertEquals(1, map.size());
assertEquals("foo", map.keySet().iterator().next().value);
}
// [databind#3143]
@Test
public void testKeyWithCreatorConflicts3143() throws Exception
{
try {
MAPPER.readValue("{\"Foo\":3}",
new TypeReference<Map<Key3143FactoriesFail,Integer>>() {} );
fail("Should not pass");
} catch (InvalidDefinitionException e) {
verifyException(e, "Multiple");
verifyException(e, "Creator factory methods");
}
}
}
| Key3143Ctor |
java | netty__netty | codec-xml/src/main/java/io/netty/handler/codec/xml/XmlDecoder.java | {
"start": 1312,
"end": 5623
} | class ____ extends ByteToMessageDecoder {
private static final AsyncXMLInputFactory XML_INPUT_FACTORY = new InputFactoryImpl();
private static final XmlDocumentEnd XML_DOCUMENT_END = XmlDocumentEnd.INSTANCE;
private final AsyncXMLStreamReader<AsyncByteArrayFeeder> streamReader = XML_INPUT_FACTORY.createAsyncForByteArray();
private final AsyncByteArrayFeeder streamFeeder = streamReader.getInputFeeder();
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
byte[] buffer = new byte[in.readableBytes()];
in.readBytes(buffer);
try {
streamFeeder.feedInput(buffer, 0, buffer.length);
} catch (XMLStreamException exception) {
in.skipBytes(in.readableBytes());
throw exception;
}
while (!streamFeeder.needMoreInput()) {
int type = streamReader.next();
switch (type) {
case XMLStreamConstants.START_DOCUMENT:
out.add(new XmlDocumentStart(streamReader.getEncoding(), streamReader.getVersion(),
streamReader.isStandalone(), streamReader.getCharacterEncodingScheme()));
break;
case XMLStreamConstants.END_DOCUMENT:
out.add(XML_DOCUMENT_END);
break;
case XMLStreamConstants.START_ELEMENT:
XmlElementStart elementStart = new XmlElementStart(streamReader.getLocalName(),
streamReader.getName().getNamespaceURI(), streamReader.getPrefix());
for (int x = 0; x < streamReader.getAttributeCount(); x++) {
XmlAttribute attribute = new XmlAttribute(streamReader.getAttributeType(x),
streamReader.getAttributeLocalName(x), streamReader.getAttributePrefix(x),
streamReader.getAttributeNamespace(x), streamReader.getAttributeValue(x));
elementStart.attributes().add(attribute);
}
for (int x = 0; x < streamReader.getNamespaceCount(); x++) {
XmlNamespace namespace = new XmlNamespace(streamReader.getNamespacePrefix(x),
streamReader.getNamespaceURI(x));
elementStart.namespaces().add(namespace);
}
out.add(elementStart);
break;
case XMLStreamConstants.END_ELEMENT:
XmlElementEnd elementEnd = new XmlElementEnd(streamReader.getLocalName(),
streamReader.getName().getNamespaceURI(), streamReader.getPrefix());
for (int x = 0; x < streamReader.getNamespaceCount(); x++) {
XmlNamespace namespace = new XmlNamespace(streamReader.getNamespacePrefix(x),
streamReader.getNamespaceURI(x));
elementEnd.namespaces().add(namespace);
}
out.add(elementEnd);
break;
case XMLStreamConstants.PROCESSING_INSTRUCTION:
out.add(new XmlProcessingInstruction(streamReader.getPIData(), streamReader.getPITarget()));
break;
case XMLStreamConstants.CHARACTERS:
out.add(new XmlCharacters(streamReader.getText()));
break;
case XMLStreamConstants.COMMENT:
out.add(new XmlComment(streamReader.getText()));
break;
case XMLStreamConstants.SPACE:
out.add(new XmlSpace(streamReader.getText()));
break;
case XMLStreamConstants.ENTITY_REFERENCE:
out.add(new XmlEntityReference(streamReader.getLocalName(), streamReader.getText()));
break;
case XMLStreamConstants.DTD:
out.add(new XmlDTD(streamReader.getText()));
break;
case XMLStreamConstants.CDATA:
out.add(new XmlCdata(streamReader.getText()));
break;
}
}
}
}
| XmlDecoder |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/interceptor/TransactionAspectSupport.java | {
"start": 33745,
"end": 40904
} | class ____ {
private final ReactiveAdapter adapter;
public ReactiveTransactionSupport(ReactiveAdapter adapter) {
this.adapter = adapter;
}
public Object invokeWithinTransaction(Method method, @Nullable Class<?> targetClass,
InvocationCallback invocation, @Nullable TransactionAttribute txAttr, ReactiveTransactionManager rtm) {
String joinpointIdentification = methodIdentification(method, targetClass, txAttr);
// For Mono and suspending functions not returning kotlinx.coroutines.flow.Flow
if (Mono.class.isAssignableFrom(method.getReturnType()) || (KotlinDetector.isSuspendingFunction(method) &&
!COROUTINES_FLOW_CLASS_NAME.equals(new MethodParameter(method, -1).getParameterType().getName()))) {
return TransactionContextManager.currentContext().flatMap(context ->
Mono.<Object, ReactiveTransactionInfo>usingWhen(
createTransactionIfNecessary(rtm, txAttr, joinpointIdentification),
tx -> {
try {
return (Mono<?>) Objects.requireNonNull(invocation.proceedWithInvocation());
}
catch (Throwable ex) {
return Mono.error(ex);
}
},
this::commitTransactionAfterReturning,
this::completeTransactionAfterThrowing,
this::rollbackTransactionOnCancel)
.onErrorMap(this::unwrapIfResourceCleanupFailure))
.contextWrite(TransactionContextManager.getOrCreateContext())
.contextWrite(TransactionContextManager.getOrCreateContextHolder());
}
// Any other reactive type, typically a Flux
return this.adapter.fromPublisher(TransactionContextManager.currentContext().flatMapMany(context ->
Flux.usingWhen(
createTransactionIfNecessary(rtm, txAttr, joinpointIdentification),
tx -> {
try {
return this.adapter.toPublisher(invocation.proceedWithInvocation());
}
catch (Throwable ex) {
return Mono.error(ex);
}
},
this::commitTransactionAfterReturning,
this::completeTransactionAfterThrowing,
this::rollbackTransactionOnCancel)
.onErrorMap(this::unwrapIfResourceCleanupFailure))
.contextWrite(TransactionContextManager.getOrCreateContext())
.contextWrite(TransactionContextManager.getOrCreateContextHolder()));
}
@SuppressWarnings("serial")
private Mono<ReactiveTransactionInfo> createTransactionIfNecessary(ReactiveTransactionManager tm,
@Nullable TransactionAttribute txAttr, final String joinpointIdentification) {
// If no name specified, apply method identification as transaction name.
if (txAttr != null && txAttr.getName() == null) {
txAttr = new DelegatingTransactionAttribute(txAttr) {
@Override
public String getName() {
return joinpointIdentification;
}
};
}
final TransactionAttribute attrToUse = txAttr;
Mono<ReactiveTransaction> tx = (attrToUse != null ? tm.getReactiveTransaction(attrToUse) : Mono.empty());
return tx.map(it -> prepareTransactionInfo(tm, attrToUse, joinpointIdentification, it)).switchIfEmpty(
Mono.defer(() -> Mono.just(prepareTransactionInfo(tm, attrToUse, joinpointIdentification, null))));
}
private ReactiveTransactionInfo prepareTransactionInfo(@Nullable ReactiveTransactionManager tm,
@Nullable TransactionAttribute txAttr, String joinpointIdentification,
@Nullable ReactiveTransaction transaction) {
ReactiveTransactionInfo txInfo = new ReactiveTransactionInfo(tm, txAttr, joinpointIdentification);
if (txAttr != null) {
// We need a transaction for this method...
if (logger.isTraceEnabled()) {
logger.trace("Getting transaction for [" + txInfo.getJoinpointIdentification() + "]");
}
// The transaction manager will flag an error if an incompatible tx already exists.
txInfo.newReactiveTransaction(transaction);
}
else {
// The TransactionInfo.hasTransaction() method will return false. We created it only
// to preserve the integrity of the ThreadLocal stack maintained in this class.
if (logger.isTraceEnabled()) {
logger.trace("Don't need to create transaction for [" + joinpointIdentification +
"]: This method isn't transactional.");
}
}
return txInfo;
}
private Mono<Void> commitTransactionAfterReturning(@Nullable ReactiveTransactionInfo txInfo) {
if (txInfo != null && txInfo.getReactiveTransaction() != null) {
if (logger.isTraceEnabled()) {
logger.trace("Completing transaction for [" + txInfo.getJoinpointIdentification() + "]");
}
return txInfo.getTransactionManager().commit(txInfo.getReactiveTransaction());
}
return Mono.empty();
}
private Mono<Void> rollbackTransactionOnCancel(@Nullable ReactiveTransactionInfo txInfo) {
if (txInfo != null && txInfo.getReactiveTransaction() != null) {
if (logger.isTraceEnabled()) {
logger.trace("Rolling back transaction for [" + txInfo.getJoinpointIdentification() + "] after cancellation");
}
return txInfo.getTransactionManager().rollback(txInfo.getReactiveTransaction());
}
return Mono.empty();
}
private Mono<Void> completeTransactionAfterThrowing(@Nullable ReactiveTransactionInfo txInfo, Throwable ex) {
if (txInfo != null && txInfo.getReactiveTransaction() != null) {
if (logger.isTraceEnabled()) {
logger.trace("Completing transaction for [" + txInfo.getJoinpointIdentification() +
"] after exception: " + ex);
}
if (txInfo.transactionAttribute != null && txInfo.transactionAttribute.rollbackOn(ex)) {
return txInfo.getTransactionManager().rollback(txInfo.getReactiveTransaction()).onErrorMap(ex2 -> {
logger.error("Application exception overridden by rollback exception", ex);
if (ex2 instanceof TransactionSystemException systemException) {
systemException.initApplicationException(ex);
}
else {
ex2.addSuppressed(ex);
}
return ex2;
}
);
}
else {
// We don't roll back on this exception.
// Will still roll back if TransactionStatus.isRollbackOnly() is true.
return txInfo.getTransactionManager().commit(txInfo.getReactiveTransaction()).onErrorMap(ex2 -> {
logger.error("Application exception overridden by commit exception", ex);
if (ex2 instanceof TransactionSystemException systemException) {
systemException.initApplicationException(ex);
}
else {
ex2.addSuppressed(ex);
}
return ex2;
}
);
}
}
return Mono.empty();
}
/**
* Unwrap the cause of a throwable, if produced by a failure
* during the async resource cleanup in {@link Flux#usingWhen}.
* @param ex the throwable to try to unwrap
*/
private Throwable unwrapIfResourceCleanupFailure(Throwable ex) {
if (ex instanceof RuntimeException && ex.getCause() != null) {
String msg = ex.getMessage();
if (msg != null && msg.startsWith("Async resource cleanup failed")) {
return ex.getCause();
}
}
return ex;
}
}
/**
* Opaque object used to hold transaction information for reactive methods.
*/
private static final | ReactiveTransactionSupport |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumDoubleEvaluator.java | {
"start": 3550,
"end": 4419
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory block;
private final Function<DriverContext, CompensatedSum> sum;
private final double p;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory block,
Function<DriverContext, CompensatedSum> sum, double p) {
this.source = source;
this.block = block;
this.sum = sum;
this.p = p;
}
@Override
public MvPSeriesWeightedSumDoubleEvaluator get(DriverContext context) {
return new MvPSeriesWeightedSumDoubleEvaluator(source, block.get(context), sum.apply(context), p, context);
}
@Override
public String toString() {
return "MvPSeriesWeightedSumDoubleEvaluator[" + "block=" + block + ", p=" + p + "]";
}
}
}
| Factory |
java | spring-projects__spring-boot | module/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/env/EnvironmentEndpointAutoConfigurationTests.java | {
"start": 1971,
"end": 6888
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(EnvironmentEndpointAutoConfiguration.class));
@Test
void runShouldHaveEndpointBean() {
this.contextRunner.withPropertyValues("management.endpoints.web.exposure.include=env")
.withSystemProperties("dbPassword=123456", "apiKey=123456")
.run(validateSystemProperties("******", "******"));
}
@Test
void runWhenEnabledPropertyIsFalseShouldNotHaveEndpointBean() {
this.contextRunner.withPropertyValues("management.endpoint.env.enabled:false")
.run((context) -> assertThat(context).doesNotHaveBean(EnvironmentEndpoint.class));
}
@Test
void runWhenNotExposedShouldNotHaveEndpointBean() {
this.contextRunner.run((context) -> assertThat(context).doesNotHaveBean(EnvironmentEndpoint.class));
}
@Test
void customSanitizingFunctionsAreAppliedInOrder() {
this.contextRunner.withUserConfiguration(SanitizingFunctionConfiguration.class)
.withPropertyValues("management.endpoint.env.show-values: WHEN_AUTHORIZED")
.withPropertyValues("management.endpoints.web.exposure.include=env")
.withSystemProperties("custom=123456", "password=123456")
.run((context) -> {
assertThat(context).hasSingleBean(EnvironmentEndpoint.class);
EnvironmentEndpoint endpoint = context.getBean(EnvironmentEndpoint.class);
EnvironmentDescriptor env = endpoint.environment(null);
Map<String, PropertyValueDescriptor> systemProperties = getSource("systemProperties", env)
.getProperties();
PropertyValueDescriptor custom = systemProperties.get("custom");
assertThat(custom).isNotNull();
assertThat(custom.getValue()).isEqualTo("$$$111$$$");
PropertyValueDescriptor password = systemProperties.get("password");
assertThat(password).isNotNull();
assertThat(password.getValue()).isEqualTo("$$$222$$$");
});
}
@Test
@SuppressWarnings("unchecked")
void rolesCanBeConfiguredViaTheEnvironment() {
this.contextRunner.withPropertyValues("management.endpoint.env.roles: test")
.withPropertyValues("management.endpoints.web.exposure.include=env")
.withSystemProperties("dbPassword=123456", "apiKey=123456")
.run((context) -> {
assertThat(context).hasSingleBean(EnvironmentEndpointWebExtension.class);
EnvironmentEndpointWebExtension endpoint = context.getBean(EnvironmentEndpointWebExtension.class);
Set<String> roles = (Set<String>) ReflectionTestUtils.getField(endpoint, "roles");
assertThat(roles).contains("test");
});
}
@Test
void showValuesCanBeConfiguredViaTheEnvironment() {
this.contextRunner.withPropertyValues("management.endpoint.env.show-values: WHEN_AUTHORIZED")
.withPropertyValues("management.endpoints.web.exposure.include=env")
.withSystemProperties("dbPassword=123456", "apiKey=123456")
.run((context) -> {
assertThat(context).hasSingleBean(EnvironmentEndpoint.class);
assertThat(context).hasSingleBean(EnvironmentEndpointWebExtension.class);
EnvironmentEndpointWebExtension webExtension = context.getBean(EnvironmentEndpointWebExtension.class);
EnvironmentEndpoint endpoint = context.getBean(EnvironmentEndpoint.class);
assertThat(webExtension).extracting("showValues").isEqualTo(Show.WHEN_AUTHORIZED);
assertThat(endpoint).extracting("showValues").isEqualTo(Show.WHEN_AUTHORIZED);
});
}
@Test
void runWhenOnlyExposedOverJmxShouldHaveEndpointBeanWithoutWebExtension() {
this.contextRunner
.withPropertyValues("management.endpoints.web.exposure.include=info", "spring.jmx.enabled=true",
"management.endpoints.jmx.exposure.include=env")
.run((context) -> assertThat(context).hasSingleBean(EnvironmentEndpoint.class)
.doesNotHaveBean(EnvironmentEndpointWebExtension.class));
}
private ContextConsumer<AssertableApplicationContext> validateSystemProperties(String expectedPassword,
String expectedApiKey) {
return (context) -> {
assertThat(context).hasSingleBean(EnvironmentEndpoint.class);
EnvironmentEndpoint endpoint = context.getBean(EnvironmentEndpoint.class);
EnvironmentDescriptor env = endpoint.environment(null);
Map<String, PropertyValueDescriptor> systemProperties = getSource("systemProperties", env).getProperties();
PropertyValueDescriptor dbPassword = systemProperties.get("dbPassword");
assertThat(dbPassword).isNotNull();
assertThat(dbPassword.getValue()).isEqualTo(expectedPassword);
PropertyValueDescriptor apiKey = systemProperties.get("apiKey");
assertThat(apiKey).isNotNull();
assertThat(apiKey.getValue()).isEqualTo(expectedApiKey);
};
}
private PropertySourceDescriptor getSource(String name, EnvironmentDescriptor descriptor) {
return descriptor.getPropertySources()
.stream()
.filter((source) -> name.equals(source.getName()))
.findFirst()
.get();
}
@Configuration(proxyBeanMethods = false)
static | EnvironmentEndpointAutoConfigurationTests |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/topic/TopicRouteInfoManager.java | {
"start": 2191,
"end": 11115
} | class ____ {
private static final long GET_TOPIC_ROUTE_TIMEOUT = 3000L;
private static final long LOCK_TIMEOUT_MILLIS = 3000L;
private static final Logger log = LoggerFactory.getLogger(LoggerName.BROKER_LOGGER_NAME);
private final Lock lockNamesrv = new ReentrantLock();
private final ConcurrentMap<String/* Topic */, TopicRouteData> topicRouteTable = new ConcurrentHashMap<>();
private final ConcurrentMap<String/* Broker Name */, HashMap<Long/* brokerId */, String/* address */>> brokerAddrTable =
new ConcurrentHashMap<>();
private final ConcurrentMap<String/* topic */, TopicPublishInfo> topicPublishInfoTable = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, Set<MessageQueue>> topicSubscribeInfoTable = new ConcurrentHashMap<>();
private ScheduledExecutorService scheduledExecutorService;
private BrokerController brokerController;
public TopicRouteInfoManager(BrokerController brokerController) {
this.brokerController = brokerController;
}
public void start() {
this.scheduledExecutorService = ThreadUtils.newSingleThreadScheduledExecutor(new ThreadFactoryImpl("TopicRouteInfoManagerScheduledThread"));
this.scheduledExecutorService.scheduleAtFixedRate(() -> {
try {
updateTopicRouteInfoFromNameServer();
} catch (Exception e) {
log.error("ScheduledTask: failed to pull TopicRouteData from NameServer", e);
}
}, 1000, this.brokerController.getBrokerConfig().getLoadBalancePollNameServerInterval(), TimeUnit.MILLISECONDS);
}
private void updateTopicRouteInfoFromNameServer() {
final Set<String> topicSetForPopAssignment = this.topicSubscribeInfoTable.keySet();
final Set<String> topicSetForEscapeBridge = this.topicRouteTable.keySet();
final Set<String> topicsAll = Sets.union(topicSetForPopAssignment, topicSetForEscapeBridge);
for (String topic : topicsAll) {
boolean isNeedUpdatePublishInfo = topicSetForEscapeBridge.contains(topic);
boolean isNeedUpdateSubscribeInfo = topicSetForPopAssignment.contains(topic);
updateTopicRouteInfoFromNameServer(topic, isNeedUpdatePublishInfo, isNeedUpdateSubscribeInfo);
}
}
public void updateTopicRouteInfoFromNameServer(String topic, boolean isNeedUpdatePublishInfo,
boolean isNeedUpdateSubscribeInfo) {
try {
if (this.lockNamesrv.tryLock(LOCK_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS)) {
try {
final TopicRouteData topicRouteData = this.brokerController.getBrokerOuterAPI()
.getTopicRouteInfoFromNameServer(topic, GET_TOPIC_ROUTE_TIMEOUT);
if (null == topicRouteData) {
log.warn("TopicRouteInfoManager: updateTopicRouteInfoFromNameServer, getTopicRouteInfoFromNameServer return null, Topic: {}.", topic);
return;
}
if (isNeedUpdateSubscribeInfo) {
this.updateSubscribeInfoTable(topicRouteData, topic);
}
if (isNeedUpdatePublishInfo) {
this.updateTopicRouteTable(topic, topicRouteData);
}
} catch (RemotingException e) {
log.error("updateTopicRouteInfoFromNameServer Exception", e);
} catch (MQBrokerException e) {
log.error("updateTopicRouteInfoFromNameServer Exception", e);
if (!NamespaceUtil.isRetryTopic(topic)
&& ResponseCode.TOPIC_NOT_EXIST == e.getResponseCode()) {
// clean no used topic
cleanNoneRouteTopic(topic);
}
} finally {
this.lockNamesrv.unlock();
}
}
} catch (InterruptedException e) {
log.warn("updateTopicRouteInfoFromNameServer Exception", e);
}
}
private boolean updateTopicRouteTable(String topic, TopicRouteData topicRouteData) {
TopicRouteData old = this.topicRouteTable.get(topic);
boolean changed = topicRouteData.topicRouteDataChanged(old);
if (!changed) {
if (!this.isNeedUpdateTopicRouteInfo(topic)) {
return false;
}
} else {
log.info("the topic[{}] route info changed, old[{}] ,new[{}]", topic, old, topicRouteData);
}
for (BrokerData bd : topicRouteData.getBrokerDatas()) {
this.brokerAddrTable.put(bd.getBrokerName(), bd.getBrokerAddrs());
}
TopicPublishInfo publishInfo = MQClientInstance.topicRouteData2TopicPublishInfo(topic, topicRouteData);
publishInfo.setHaveTopicRouterInfo(true);
this.updateTopicPublishInfo(topic, publishInfo);
TopicRouteData cloneTopicRouteData = new TopicRouteData(topicRouteData);
log.info("topicRouteTable.put. Topic = {}, TopicRouteData[{}]", topic, cloneTopicRouteData);
this.topicRouteTable.put(topic, cloneTopicRouteData);
return true;
}
private boolean updateSubscribeInfoTable(TopicRouteData topicRouteData, String topic) {
final TopicRouteData tmp = new TopicRouteData(topicRouteData);
tmp.setTopicQueueMappingByBroker(null);
Set<MessageQueue> newSubscribeInfo = MQClientInstance.topicRouteData2TopicSubscribeInfo(topic, tmp);
Set<MessageQueue> oldSubscribeInfo = topicSubscribeInfoTable.get(topic);
if (Objects.equals(newSubscribeInfo, oldSubscribeInfo)) {
return false;
}
log.info("the topic[{}] subscribe message queue changed, old[{}] ,new[{}]", topic, oldSubscribeInfo, newSubscribeInfo);
topicSubscribeInfoTable.put(topic, newSubscribeInfo);
return true;
}
private boolean isNeedUpdateTopicRouteInfo(final String topic) {
final TopicPublishInfo prev = this.topicPublishInfoTable.get(topic);
return null == prev || !prev.ok();
}
private void cleanNoneRouteTopic(String topic) {
// clean no used topic
topicSubscribeInfoTable.remove(topic);
}
private void updateTopicPublishInfo(final String topic, final TopicPublishInfo info) {
if (info != null && topic != null) {
TopicPublishInfo prev = this.topicPublishInfoTable.put(topic, info);
if (prev != null) {
log.info("updateTopicPublishInfo prev is not null, " + prev);
}
}
}
public void shutdown() {
if (null != this.scheduledExecutorService) {
this.scheduledExecutorService.shutdown();
}
}
public TopicPublishInfo tryToFindTopicPublishInfo(final String topic) {
TopicPublishInfo topicPublishInfo = this.topicPublishInfoTable.get(topic);
if (null == topicPublishInfo || !topicPublishInfo.ok()) {
this.updateTopicRouteInfoFromNameServer(topic, true, false);
topicPublishInfo = this.topicPublishInfoTable.get(topic);
}
return topicPublishInfo;
}
public String findBrokerAddressInPublish(String brokerName) {
if (brokerName == null) {
return null;
}
Map<Long/* brokerId */, String/* address */> map = this.brokerAddrTable.get(brokerName);
if (map != null && !map.isEmpty()) {
return map.get(MixAll.MASTER_ID);
}
return null;
}
public String findBrokerAddressInSubscribe(
final String brokerName,
final long brokerId,
final boolean onlyThisBroker
) {
if (brokerName == null) {
return null;
}
String brokerAddr = null;
boolean found = false;
Map<Long/* brokerId */, String/* address */> map = this.brokerAddrTable.get(brokerName);
if (map != null && !map.isEmpty()) {
brokerAddr = map.get(brokerId);
boolean slave = brokerId != MixAll.MASTER_ID;
found = brokerAddr != null;
if (!found && slave) {
brokerAddr = map.get(brokerId + 1);
found = brokerAddr != null;
}
if (!found && !onlyThisBroker) {
Map.Entry<Long, String> entry = map.entrySet().iterator().next();
brokerAddr = entry.getValue();
found = true;
}
}
return brokerAddr;
}
public Set<MessageQueue> getTopicSubscribeInfo(String topic) {
Set<MessageQueue> queues = topicSubscribeInfoTable.get(topic);
if (null == queues || queues.isEmpty()) {
this.updateTopicRouteInfoFromNameServer(topic, false, true);
queues = this.topicSubscribeInfoTable.get(topic);
}
return queues;
}
}
| TopicRouteInfoManager |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/ldap/LdapServerBeanDefinitionParser.java | {
"start": 1737,
"end": 7921
} | class ____ implements BeanDefinitionParser {
private static final String CONTEXT_SOURCE_CLASS = "org.springframework.security.ldap.DefaultSpringSecurityContextSource";
/**
* Defines the Url of the ldap server to use. If not specified, an embedded UnboundID
* instance will be created
*/
private static final String ATT_URL = "url";
private static final String ATT_PRINCIPAL = "manager-dn";
private static final String ATT_PASSWORD = "manager-password";
// Properties which apply to embedded server only - when no Url is set
/** sets the configuration suffix (default is "dc=springframework,dc=org"). */
public static final String ATT_ROOT_SUFFIX = "root";
private static final String OPT_DEFAULT_ROOT_SUFFIX = "dc=springframework,dc=org";
/**
* Optionally defines an ldif resource to be loaded. Otherwise an attempt will be made
* to load all ldif files found on the classpath.
*/
public static final String ATT_LDIF_FILE = "ldif";
private static final String OPT_DEFAULT_LDIF_FILE = "classpath*:*.ldif";
/** Defines the port the LDAP_PROVIDER server should run on */
public static final String ATT_PORT = "port";
private static final String RANDOM_PORT = "0";
private static final int DEFAULT_PORT = 33389;
private static final String UNBOUNID_CLASSNAME = "com.unboundid.ldap.listener.InMemoryDirectoryServer";
private static final String UNBOUNDID_CONTAINER_CLASSNAME = "org.springframework.security.ldap.server.UnboundIdContainer";
private static final boolean unboundIdPresent;
static {
ClassLoader classLoader = LdapServerBeanDefinitionParser.class.getClassLoader();
unboundIdPresent = ClassUtils.isPresent(UNBOUNID_CLASSNAME, classLoader);
}
@Override
public BeanDefinition parse(Element elt, ParserContext parserContext) {
String url = elt.getAttribute(ATT_URL);
RootBeanDefinition contextSource;
if (!StringUtils.hasText(url)) {
contextSource = createEmbeddedServer(elt, parserContext);
}
else {
contextSource = new RootBeanDefinition();
contextSource.setBeanClassName(CONTEXT_SOURCE_CLASS);
contextSource.getConstructorArgumentValues().addIndexedArgumentValue(0, url);
}
contextSource.setSource(parserContext.extractSource(elt));
String managerDn = elt.getAttribute(ATT_PRINCIPAL);
String managerPassword = elt.getAttribute(ATT_PASSWORD);
if (StringUtils.hasText(managerDn)) {
if (!StringUtils.hasText(managerPassword)) {
parserContext.getReaderContext()
.error("You must specify the " + ATT_PASSWORD + " if you supply a " + managerDn, elt);
}
contextSource.getPropertyValues().addPropertyValue("userDn", managerDn);
contextSource.getPropertyValues().addPropertyValue("password", managerPassword);
}
String id = elt.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
String contextSourceId = StringUtils.hasText(id) ? id : BeanIds.CONTEXT_SOURCE;
parserContext.getRegistry().registerBeanDefinition(contextSourceId, contextSource);
return null;
}
/**
* Will be called if no url attribute is supplied.
*
* Registers beans to create an embedded UnboundID Server.
* @return the BeanDefinition for the ContextSource for the embedded server.
*
* @see UnboundIdContainer
*/
private RootBeanDefinition createEmbeddedServer(Element element, ParserContext parserContext) {
Object source = parserContext.extractSource(element);
String suffix = element.getAttribute(ATT_ROOT_SUFFIX);
if (!StringUtils.hasText(suffix)) {
suffix = OPT_DEFAULT_ROOT_SUFFIX;
}
BeanDefinitionBuilder contextSource = BeanDefinitionBuilder.rootBeanDefinition(CONTEXT_SOURCE_CLASS);
contextSource.addConstructorArgValue(suffix);
contextSource.addPropertyValue("userDn", "uid=admin,ou=system");
contextSource.addPropertyValue("password", "secret");
BeanDefinition embeddedLdapServerConfigBean = BeanDefinitionBuilder
.rootBeanDefinition(EmbeddedLdapServerConfigBean.class)
.getBeanDefinition();
String embeddedLdapServerConfigBeanName = parserContext.getReaderContext()
.generateBeanName(embeddedLdapServerConfigBean);
parserContext.registerBeanComponent(
new BeanComponentDefinition(embeddedLdapServerConfigBean, embeddedLdapServerConfigBeanName));
contextSource.setFactoryMethodOnBean("createEmbeddedContextSource", embeddedLdapServerConfigBeanName);
String mode = element.getAttribute("mode");
RootBeanDefinition ldapContainer = getRootBeanDefinition(mode);
ldapContainer.setSource(source);
ldapContainer.getConstructorArgumentValues().addGenericArgumentValue(suffix);
String ldifs = element.getAttribute(ATT_LDIF_FILE);
if (!StringUtils.hasText(ldifs)) {
ldifs = OPT_DEFAULT_LDIF_FILE;
}
ldapContainer.getConstructorArgumentValues().addGenericArgumentValue(ldifs);
ldapContainer.getPropertyValues().addPropertyValue("port", getPort(element));
if (parserContext.getRegistry().containsBeanDefinition(BeanIds.EMBEDDED_UNBOUNDID)) {
parserContext.getReaderContext()
.error("Only one embedded server bean is allowed per application context", element);
}
String beanId = resolveBeanId(mode);
if (beanId != null) {
parserContext.getRegistry().registerBeanDefinition(beanId, ldapContainer);
}
return (RootBeanDefinition) contextSource.getBeanDefinition();
}
private RootBeanDefinition getRootBeanDefinition(String mode) {
if (isUnboundidEnabled(mode)) {
return new RootBeanDefinition(UNBOUNDID_CONTAINER_CLASSNAME, null, null);
}
throw new IllegalStateException("Embedded LDAP server is not provided");
}
private String resolveBeanId(String mode) {
if (isUnboundidEnabled(mode)) {
return BeanIds.EMBEDDED_UNBOUNDID;
}
return null;
}
private boolean isUnboundidEnabled(String mode) {
return "unboundid".equals(mode) || unboundIdPresent;
}
private String getPort(Element element) {
String port = element.getAttribute(ATT_PORT);
return (StringUtils.hasText(port) ? port : getDefaultPort());
}
private String getDefaultPort() {
try (ServerSocket serverSocket = new ServerSocket(DEFAULT_PORT)) {
return String.valueOf(serverSocket.getLocalPort());
}
catch (IOException ex) {
return RANDOM_PORT;
}
}
private static | LdapServerBeanDefinitionParser |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/sink/compactor/RecordWiseFileCompactor.java | {
"start": 2510,
"end": 2828
} | interface ____<T> extends Serializable {
/**
* @return A reader that reads elements from the given file.
* @throws IOException Thrown if an I/O error occurs when opening the file.
*/
Reader<T> createFor(Path path) throws IOException;
}
}
}
| Factory |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/ResolvedCatalogModelJsonSerializer.java | {
"start": 1427,
"end": 3445
} | class ____ extends StdSerializer<ResolvedCatalogModel> {
private static final long serialVersionUID = 1L;
static final String SERIALIZE_OPTIONS = "serialize_options";
static final String INPUT_SCHEMA = "inputSchema";
static final String OUTPUT_SCHEMA = "outputSchema";
static final String OPTIONS = "options";
static final String COMMENT = "comment";
public ResolvedCatalogModelJsonSerializer() {
super(ResolvedCatalogModel.class);
}
@Override
public void serialize(
ResolvedCatalogModel model,
JsonGenerator jsonGenerator,
SerializerProvider serializerProvider)
throws IOException {
boolean serializeOptions =
serializerProvider.getAttribute(SERIALIZE_OPTIONS) == null
|| (boolean) serializerProvider.getAttribute(SERIALIZE_OPTIONS);
serialize(model, serializeOptions, jsonGenerator, serializerProvider);
}
static void serialize(
ResolvedCatalogModel resolvedCatalogModel,
boolean serializeOptions,
JsonGenerator jsonGenerator,
SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeStartObject();
serializerProvider.defaultSerializeField(
INPUT_SCHEMA, resolvedCatalogModel.getResolvedInputSchema(), jsonGenerator);
serializerProvider.defaultSerializeField(
OUTPUT_SCHEMA, resolvedCatalogModel.getResolvedOutputSchema(), jsonGenerator);
if (serializeOptions) {
serializerProvider.defaultSerializeField(
OPTIONS, resolvedCatalogModel.getOptions(), jsonGenerator);
if (resolvedCatalogModel.getComment() != null) {
serializerProvider.defaultSerializeField(
COMMENT, resolvedCatalogModel.getComment(), jsonGenerator);
}
}
jsonGenerator.writeEndObject();
}
}
| ResolvedCatalogModelJsonSerializer |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java | {
"start": 732,
"end": 2612
} | class ____ extends AbstractXContentSerializingTestCase<GetRollupJobsAction.JobWrapper> {
@Override
protected GetRollupJobsAction.JobWrapper doParseInstance(XContentParser parser) throws IOException {
return GetRollupJobsAction.JobWrapper.PARSER.apply(parser, null);
}
@Override
protected Writeable.Reader<GetRollupJobsAction.JobWrapper> instanceReader() {
return GetRollupJobsAction.JobWrapper::new;
}
@Override
protected GetRollupJobsAction.JobWrapper createTestInstance() {
IndexerState state = null;
int num = randomIntBetween(0, 3);
if (num == 0) {
state = IndexerState.STOPPED;
} else if (num == 1) {
state = IndexerState.STARTED;
} else if (num == 2) {
state = IndexerState.STOPPING;
} else if (num == 3) {
state = IndexerState.ABORTING;
}
return new GetRollupJobsAction.JobWrapper(
ConfigTestHelpers.randomRollupJobConfig(random()),
new RollupIndexerJobStats(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
),
new RollupJobStatus(state, Collections.emptyMap())
);
}
@Override
protected GetRollupJobsAction.JobWrapper mutateInstance(GetRollupJobsAction.JobWrapper instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| JobWrapperSerializingTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java | {
"start": 1399,
"end": 10228
} | class ____ extends ActionResponse implements ChunkedToXContentObject {
private Map<String, MappingMetadata> mappings = Map.of();
private Map<String, List<AliasMetadata>> aliases = Map.of();
private Map<String, Settings> settings = Map.of();
private Map<String, Settings> defaultSettings = Map.of();
private Map<String, String> dataStreams = Map.of();
private final String[] indices;
public GetIndexResponse(
String[] indices,
Map<String, MappingMetadata> mappings,
Map<String, List<AliasMetadata>> aliases,
Map<String, Settings> settings,
Map<String, Settings> defaultSettings,
Map<String, String> dataStreams
) {
this.indices = indices;
// to have deterministic order
Arrays.sort(indices);
if (mappings != null) {
this.mappings = mappings;
}
if (aliases != null) {
this.aliases = aliases;
}
if (settings != null) {
this.settings = settings;
}
if (defaultSettings != null) {
this.defaultSettings = defaultSettings;
}
if (dataStreams != null) {
this.dataStreams = dataStreams;
}
}
/**
* The only usage of this constructor is for BwC cross-cluster transforms for clusters before v8.2. The ML team is aware that we
* don't need to support that anymore now that we're on v9. Once they remove that BwC code, we can remove this constructor as well.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
GetIndexResponse(StreamInput in) throws IOException {
this.indices = in.readStringArray();
mappings = in.readImmutableOpenMap(StreamInput::readString, in.getTransportVersion().before(TransportVersions.V_8_0_0) ? i -> {
int numMappings = i.readVInt();
assert numMappings == 0 || numMappings == 1 : "Expected 0 or 1 mappings but got " + numMappings;
if (numMappings == 1) {
String type = i.readString();
assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but got [" + type + "]";
return new MappingMetadata(i);
} else {
return MappingMetadata.EMPTY_MAPPINGS;
}
} : i -> i.readBoolean() ? new MappingMetadata(i) : MappingMetadata.EMPTY_MAPPINGS);
aliases = in.readImmutableOpenMap(StreamInput::readString, i -> i.readCollectionAsList(AliasMetadata::new));
settings = in.readImmutableOpenMap(StreamInput::readString, Settings::readSettingsFromStream);
defaultSettings = in.readImmutableOpenMap(StreamInput::readString, Settings::readSettingsFromStream);
dataStreams = in.readImmutableOpenMap(StreamInput::readString, StreamInput::readOptionalString);
}
public String[] indices() {
return indices;
}
public String[] getIndices() {
return indices();
}
public Map<String, MappingMetadata> mappings() {
return mappings;
}
public Map<String, MappingMetadata> getMappings() {
return mappings();
}
public Map<String, List<AliasMetadata>> aliases() {
return aliases;
}
public Map<String, List<AliasMetadata>> getAliases() {
return aliases();
}
public Map<String, Settings> settings() {
return settings;
}
public Map<String, String> dataStreams() {
return dataStreams;
}
public Map<String, String> getDataStreams() {
return dataStreams();
}
/**
* If the originating {@link GetIndexRequest} object was configured to include
* defaults, this will contain a mapping of index name to {@link Settings} objects.
* The returned {@link Settings} objects will contain only those settings taking
* effect as defaults. Any settings explicitly set on the index will be available
* via {@link #settings()}.
* See also {@link GetIndexRequest#includeDefaults(boolean)}
*/
public Map<String, Settings> defaultSettings() {
return defaultSettings;
}
public Map<String, Settings> getSettings() {
return settings();
}
/**
* Returns the string value for the specified index and setting. If the includeDefaults flag was not set or set to
* false on the {@link GetIndexRequest}, this method will only return a value where the setting was explicitly set
* on the index. If the includeDefaults flag was set to true on the {@link GetIndexRequest}, this method will fall
* back to return the default value if the setting was not explicitly set.
*/
public String getSetting(String index, String setting) {
Settings indexSettings = settings.get(index);
if (setting != null) {
if (indexSettings != null && indexSettings.hasValue(setting)) {
return indexSettings.get(setting);
} else {
Settings defaultIndexSettings = defaultSettings.get(index);
if (defaultIndexSettings != null) {
return defaultIndexSettings.get(setting);
} else {
return null;
}
}
} else {
return null;
}
}
/**
* NB prior to 9.1 this was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until
* we no longer need to support calling this action remotely.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(indices);
MappingMetadata.writeMappingMetadata(out, mappings);
out.writeMap(aliases, StreamOutput::writeCollection);
out.writeMap(settings, StreamOutput::writeWriteable);
out.writeMap(defaultSettings, StreamOutput::writeWriteable);
out.writeMap(dataStreams, StreamOutput::writeOptionalString);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params ignored) {
return Iterators.concat(
Iterators.single((builder, params) -> builder.startObject()),
Iterators.map(Iterators.forArray(indices), index -> (builder, params) -> {
builder.startObject(index);
builder.startObject("aliases");
List<AliasMetadata> indexAliases = aliases.get(index);
if (indexAliases != null) {
for (final AliasMetadata alias : indexAliases) {
AliasMetadata.Builder.toXContent(alias, builder, params);
}
}
builder.endObject();
MappingMetadata indexMappings = mappings.get(index);
if (indexMappings == null) {
builder.startObject("mappings").endObject();
} else {
builder.field("mappings", indexMappings.sourceAsMap());
}
builder.startObject("settings");
Settings indexSettings = settings.get(index);
if (indexSettings != null) {
indexSettings.toXContent(builder, params);
}
builder.endObject();
Settings defaultIndexSettings = defaultSettings.get(index);
if (defaultIndexSettings != null && defaultIndexSettings.isEmpty() == false) {
builder.startObject("defaults");
defaultIndexSettings.toXContent(builder, params);
builder.endObject();
}
String dataStream = dataStreams.get(index);
if (dataStream != null) {
builder.field("data_stream", dataStream);
}
return builder.endObject();
}),
Iterators.single((builder, params) -> builder.endObject())
);
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetIndexResponse that = (GetIndexResponse) o;
return Arrays.equals(indices, that.indices)
&& Objects.equals(aliases, that.aliases)
&& Objects.equals(mappings, that.mappings)
&& Objects.equals(settings, that.settings)
&& Objects.equals(defaultSettings, that.defaultSettings)
&& Objects.equals(dataStreams, that.dataStreams);
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(indices), aliases, mappings, settings, defaultSettings, dataStreams);
}
}
| GetIndexResponse |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/h12/http1/DefaultHttp11ServerTransportListener.java | {
"start": 2300,
"end": 5521
} | class ____
extends AbstractServerTransportListener<RequestMetadata, HttpInputMessage>
implements Http1ServerTransportListener {
private final HttpChannel httpChannel;
private Http1ServerChannelObserver responseObserver;
public DefaultHttp11ServerTransportListener(HttpChannel httpChannel, URL url, FrameworkModel frameworkModel) {
super(frameworkModel, url, httpChannel);
this.httpChannel = httpChannel;
responseObserver = prepareResponseObserver(new Http1UnaryServerChannelObserver(httpChannel));
}
private Http1ServerChannelObserver prepareResponseObserver(Http1ServerChannelObserver responseObserver) {
responseObserver.setExceptionCustomizer(getExceptionCustomizer());
RpcInvocationBuildContext context = getContext();
responseObserver.setResponseEncoder(context == null ? JsonCodec.INSTANCE : context.getHttpMessageEncoder());
return responseObserver;
}
@Override
protected HttpMessageListener buildHttpMessageListener() {
RpcInvocationBuildContext context = getContext();
RpcInvocation rpcInvocation = buildRpcInvocation(context);
ServerCallListener serverCallListener =
startListener(rpcInvocation, context.getMethodDescriptor(), context.getInvoker());
DefaultListeningDecoder listeningDecoder = new DefaultListeningDecoder(
context.getHttpMessageDecoder(), context.getMethodMetadata().getActualRequestTypes());
listeningDecoder.setListener(serverCallListener::onMessage);
return new DefaultHttpMessageListener(listeningDecoder);
}
private ServerCallListener startListener(
RpcInvocation invocation, MethodDescriptor methodDescriptor, Invoker<?> invoker) {
switch (methodDescriptor.getRpcType()) {
case UNARY:
return new AutoCompleteUnaryServerCallListener(invocation, invoker, responseObserver);
case SERVER_STREAM:
responseObserver = prepareResponseObserver(new Http1SseServerChannelObserver(httpChannel));
responseObserver.addHeadersCustomizer((hs, t) ->
hs.set(HttpHeaderNames.CONTENT_TYPE.getKey(), MediaType.TEXT_EVENT_STREAM.getName()));
return new AutoCompleteServerStreamServerCallListener(invocation, invoker, responseObserver);
default:
throw new UnsupportedOperationException("HTTP1.x only support unary and server-stream");
}
}
@Override
protected void onMetadataCompletion(RequestMetadata metadata) {
responseObserver.setResponseEncoder(getContext().getHttpMessageEncoder());
}
@Override
protected void onError(Throwable throwable) {
responseObserver.onError(throwable);
}
@Override
protected void initializeAltSvc(URL url) {
String protocolId = Http3Exchanger.isEnabled(url) ? "h3" : "h2";
String value = protocolId + "=\":" + url.getParameter(Constants.BIND_PORT_KEY, url.getPort()) + '"';
responseObserver.addHeadersCustomizer((hs, t) -> hs.set(HttpHeaderNames.ALT_SVC.getKey(), value));
}
private static final | DefaultHttp11ServerTransportListener |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/error/BlockingExceptionMapperTest.java | {
"start": 6893,
"end": 7325
} | interface ____ {
@GET
@Path("/blocking")
InputStream blocking();
@Blocking
@ClientExceptionMapper
static RuntimeException map(Response response) {
EVENT_LOOP_THREAD_USED_BY_MAPPER.set(Context.isOnEventLoopThread());
return new RuntimeException(response.readEntity(String.class));
}
}
public static | ClientUsingBlockingExceptionMapperWithAnnotation |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunnerTests.java | {
"start": 2882,
"end": 2978
} | interface ____ {
}
@Timed(millis = 1000)
@Retention(RetentionPolicy.RUNTIME)
private @ | MetaTimed |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/language/SpelLanguageTest.java | {
"start": 1046,
"end": 1243
} | class ____ extends AbstractTypedLanguageTest<SpELExpression.Builder, SpELExpression> {
SpelLanguageTest() {
super("#{message.body}", LanguageBuilderFactory::spel);
}
}
| SpelLanguageTest |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/invoker/mvnup/UpgradeContext.java | {
"start": 1392,
"end": 4857
} | class ____ extends LookupContext {
public UpgradeContext(InvokerRequest invokerRequest, UpgradeOptions upgradeOptions) {
super(invokerRequest, true, upgradeOptions);
}
public Map<String, Goal> goals;
public List<AttributedString> header;
public AttributedStyle style;
public LineReader reader;
// Indentation control for nested logging
private int indentLevel = 0;
private String indentString = Indentation.DEFAULT;
public void addInHeader(String text) {
addInHeader(AttributedStyle.DEFAULT, text);
}
public void addInHeader(AttributedStyle style, String text) {
AttributedStringBuilder asb = new AttributedStringBuilder();
asb.style(style).append(text);
header.add(asb.toAttributedString());
}
/**
* Increases the indentation level for nested logging.
*/
public void indent() {
indentLevel++;
}
/**
* Decreases the indentation level for nested logging.
*/
public void unindent() {
if (indentLevel > 0) {
indentLevel--;
}
}
/**
* Sets the indentation string to use (e.g., " ", " ", "\t").
*/
public void setIndentString(String indentString) {
this.indentString = indentString != null ? indentString : Indentation.DEFAULT;
}
/**
* Gets the current indentation prefix based on the current level.
*/
private String getCurrentIndent() {
if (indentLevel == 0) {
return "";
}
return indentString.repeat(indentLevel);
}
/**
* Logs an informational message with current indentation.
*/
public void info(String message) {
logger.info(getCurrentIndent() + message);
}
/**
* Logs a debug message with current indentation.
*/
public void debug(String message) {
logger.debug(getCurrentIndent() + message);
}
/**
* Prints a new line.
*/
public void println() {
logger.info("");
}
// Semantic logging methods with icons for upgrade operations
/**
* Logs a successful operation with a checkmark icon.
*/
public void success(String message) {
logger.info(getCurrentIndent() + ConsoleIcon.SUCCESS.getIcon(terminal) + " " + message);
}
/**
* Logs an error with an X icon.
*/
public void failure(String message) {
logger.error(getCurrentIndent() + ConsoleIcon.ERROR.getIcon(terminal) + " " + message);
}
/**
* Logs a warning with a warning icon.
*/
public void warning(String message) {
logger.warn(getCurrentIndent() + ConsoleIcon.WARNING.getIcon(terminal) + " " + message);
}
/**
* Logs detailed information with a bullet point.
*/
public void detail(String message) {
logger.info(getCurrentIndent() + ConsoleIcon.DETAIL.getIcon(terminal) + " " + message);
}
/**
* Logs a performed action with an arrow icon.
*/
public void action(String message) {
logger.info(getCurrentIndent() + ConsoleIcon.ACTION.getIcon(terminal) + " " + message);
}
/**
* Gets the UpgradeOptions from the invoker request.
* This provides convenient access to upgrade-specific options without casting.
*
* @return the UpgradeOptions
*/
@Nonnull
public UpgradeOptions options() {
return (UpgradeOptions) super.options();
}
}
| UpgradeContext |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/graph/WithMasterCheckpointHookConfigTest.java | {
"start": 7303,
"end": 7649
} | class ____<T> extends Identity<T>
implements WithMasterCheckpointHook<String> {
private final String id;
IdentityWithHook(String id) {
this.id = id;
}
@Override
public TestHook createMasterTriggerRestoreHook() {
return new TestHook(id);
}
}
}
| IdentityWithHook |
java | elastic__elasticsearch | x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SSLReloadDuringStartupIntegTests.java | {
"start": 3580,
"end": 10435
} | class ____ a cluster. For this test, restart a random node.
* While stopped, replace the Transport keystore with a bad one, so the node cannot rejoin the cluster.
* While restarting, replace the keystore with a good one, and verify if ES reloaded it by checking it if rejoined the cluster.
*
* @throws Exception Compare ES startup logs to diagnostic and timing logs for the test, to narrow down why ES startup failed.
*/
@TestLogging(value = "org.elasticsearch.xpack.ssl.SSLReloadDuringStartupIntegTests:TRACE", reason = "See Keystore update vs ES restart")
public void testReloadDuringStartup() throws Exception {
final String[] nodeNames = internalCluster().getNodeNames();
final String nodeName = randomFrom(nodeNames);
final Environment env = internalCluster().getInstance(Environment.class, nodeName);
final CountDownLatch beforeKeystoreFix = new CountDownLatch(2); // SYNC: Cert update & ES restart
final CountDownLatch afterKeystoreFix = new CountDownLatch(1); // SYNC: Verify cluster after cert update
final Path nodeKeystorePath = env.configDir().resolve("testnode.jks"); // all nodes have good keystore
final Path badKeystorePath = getDataPath(badKeyStoreFilePath); // stop a node, and apply this bad keystore
final Path goodKeystorePath = getDataPath(goodKeyStoreFilePath); // start the node, and apply this good keystore
assertTrue(Files.exists(nodeKeystorePath));
logger.trace("Stopping node [{}] in {}-node cluster {}...", nodeName, nodeNames.length, nodeNames);
final long stopNanos = System.nanoTime();
internalCluster().restartNode(nodeName, new RestartCallback() {
@Override
public Settings onNodeStopped(String nodeName) throws Exception {
logger.debug("Node [{}] stopped in {}ms.", nodeName, TimeValue.timeValueNanos(System.nanoTime() - stopNanos).millisFrac());
atomicCopyIfPossible(badKeystorePath, nodeKeystorePath);
final Thread fixKeystoreThread = new Thread(() -> {
waitUntilNodeStartupIsReadyToBegin(beforeKeystoreFix); // SYNC: Cert update & ES restart
try {
atomicCopyIfPossible(goodKeystorePath, nodeKeystorePath);
logger.trace("Waiting for ES restart...");
afterKeystoreFix.countDown(); // SYNC: Cert update & ES restart
} catch (IOException e) {
throw new UncheckedIOException(e);
}
});
fixKeystoreThread.setName("Fix Keystore");
fixKeystoreThread.start();
waitUntilFixKeystoreIsReadyToBegin(beforeKeystoreFix); // SYNC: Cert update & ES restart
return super.onNodeStopped(nodeName); // ASSUME: RestartCallback will do ES start next
}
});
logger.trace("Waiting for keystore fix...");
timed(Level.DEBUG, "Awaited {}ms. Verifying the cluster...", () -> {
try {
afterKeystoreFix.await(); // SYNC: Verify cluster after cert update
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
timed(Level.TRACE, "Ensure cluster size consistency took {}ms.", this::ensureClusterSizeConsistency);
timed(Level.TRACE, "Ensure fully connected cluster took {}ms.", this::ensureFullyConnectedCluster);
}
private void waitUntilNodeStartupIsReadyToBegin(final CountDownLatch beforeKeystoreFix) {
logger.trace("Waiting for ES start to begin...");
beforeKeystoreFix.countDown(); // SYNC: Cert update & ES restart
final long sleepMillis = randomLongBetween(1L, 2000L); // intended sleepMillis
timed(Level.DEBUG, "Awaited {}ms. Sleeping " + sleepMillis + "ms before fixing...", () -> {
try {
beforeKeystoreFix.await(); // SYNC: Cert update & ES restart
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
timed(Level.DEBUG, "Slept {}ms, intended " + sleepMillis + "ms. Fixing can start now...", () -> {
try {
Thread.sleep(sleepMillis); // Simulate cert update delay relative to ES start
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
}
private void waitUntilFixKeystoreIsReadyToBegin(final CountDownLatch beforeKeystoreFix) {
logger.trace("Waiting for keystore fix to begin...");
beforeKeystoreFix.countDown(); // SYNC: Cert update & ES restart
timed(Level.DEBUG, "Awaited {}ms. Node can start now...", () -> {
try {
beforeKeystoreFix.await(); // SYNC: Cert update & ES restart
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
}
private void timed(final Level level, final String message, final Runnable runnable) {
assert level != null;
assert Strings.isEmpty(message) == false;
assert message.contains("{}ms") : "Message must contain {}ms";
assert message.replace("{}", "").contains("{}") == false : "Message can only contain one binding.";
assert runnable != null;
final long startNanos = System.nanoTime();
try {
runnable.run();
} finally {
logger.log(level, message, TimeValue.timeValueNanos(System.nanoTime() - startNanos).millisFrac());
}
}
/**
* Copy a source file to a destination file, overwriting if necessary.
* Use an intermediate temporary file, to attempt an atomic move.
* If atomic move fails, fall back to non-atomic move.
* @param source File to be copied.
* @param target File to be created or overwritten.
* @throws IOException Cannot create temp file, or copy source file to temp file, or non-atomic move temp file to target file.
*/
private void atomicCopyIfPossible(Path source, Path target) throws IOException {
logger.trace("Copying [{}] to [{}]", source, target);
Path tmp = createTempFile();
logger.trace("Created temporary file [{}]", tmp);
Files.copy(source, tmp, StandardCopyOption.REPLACE_EXISTING);
try {
Files.move(tmp, target, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
logger.debug("Atomic move succeeded from [{}] to [{}]", tmp, target);
} catch (AtomicMoveNotSupportedException e) {
Files.move(tmp, target, StandardCopyOption.REPLACE_EXISTING);
logger.debug("Non-atomic move succeeded from [{}] to [{}]", tmp, target);
}
}
}
| start |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java | {
"start": 7256,
"end": 8062
} | class ____ implementing DBWritable, which is the
* Java object holding tuple fields.
* @param inputQuery the input query to select fields. Example :
* "SELECT f1, f2, f3 FROM Mytable ORDER BY f1"
* @param inputCountQuery the input query that returns the number of records in
* the table.
* Example : "SELECT COUNT(f1) FROM Mytable"
* @see #setInput(JobConf, Class, String, String, String, String...)
*/
public static void setInput(JobConf job, Class<? extends DBWritable> inputClass,
String inputQuery, String inputCountQuery) {
job.setInputFormat(DBInputFormat.class);
DBConfiguration dbConf = new DBConfiguration(job);
dbConf.setInputClass(inputClass);
dbConf.setInputQuery(inputQuery);
dbConf.setInputCountQuery(inputCountQuery);
}
}
| object |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/form/FormWithConverterTest.java | {
"start": 733,
"end": 1374
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(ResourceClient.class, Resource.class, Input.class, InputParamConverterProvider.class));
@TestHTTPResource
URI baseUri;
@Test
void test() {
ResourceClient client = RestClientBuilder.newBuilder().baseUri(baseUri).register(InputParamConverterProvider.class)
.build(ResourceClient.class);
String result = client.hello(new Input("hey"));
assertThat(result).isEqualTo("hey!");
}
public | FormWithConverterTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_2541/Issue2541Test.java | {
"start": 444,
"end": 693
} | class ____ {
@ProcessorTest
void shouldGenerateCorrectCode() {
Issue2541Mapper.Target target = Issue2541Mapper.INSTANCE.map( new Issue2541Mapper.Source( null ) );
assertThat( target.getValue() ).isEmpty();
}
}
| Issue2541Test |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/grpc/v2/GrpcMessagingApplication.java | {
"start": 3610,
"end": 21628
} | class ____ extends MessagingServiceGrpc.MessagingServiceImplBase implements StartAndShutdown {
private final static Logger log = LoggerFactory.getLogger(LoggerName.PROXY_LOGGER_NAME);
private final GrpcMessingActivity grpcMessingActivity;
protected final RequestPipeline requestPipeline;
protected ThreadPoolExecutor routeThreadPoolExecutor;
protected ThreadPoolExecutor producerThreadPoolExecutor;
protected ThreadPoolExecutor consumerThreadPoolExecutor;
protected ThreadPoolExecutor clientManagerThreadPoolExecutor;
protected ThreadPoolExecutor transactionThreadPoolExecutor;
protected GrpcMessagingApplication(GrpcMessingActivity grpcMessingActivity, RequestPipeline requestPipeline) {
this.grpcMessingActivity = grpcMessingActivity;
this.requestPipeline = requestPipeline;
ProxyConfig config = ConfigurationManager.getProxyConfig();
this.routeThreadPoolExecutor = ThreadPoolMonitor.createAndMonitor(
config.getGrpcRouteThreadPoolNums(),
config.getGrpcRouteThreadPoolNums(),
1,
TimeUnit.MINUTES,
"GrpcRouteThreadPool",
config.getGrpcRouteThreadQueueCapacity()
);
this.producerThreadPoolExecutor = ThreadPoolMonitor.createAndMonitor(
config.getGrpcProducerThreadPoolNums(),
config.getGrpcProducerThreadPoolNums(),
1,
TimeUnit.MINUTES,
"GrpcProducerThreadPool",
config.getGrpcProducerThreadQueueCapacity()
);
this.consumerThreadPoolExecutor = ThreadPoolMonitor.createAndMonitor(
config.getGrpcConsumerThreadPoolNums(),
config.getGrpcConsumerThreadPoolNums(),
1,
TimeUnit.MINUTES,
"GrpcConsumerThreadPool",
config.getGrpcConsumerThreadQueueCapacity()
);
this.clientManagerThreadPoolExecutor = ThreadPoolMonitor.createAndMonitor(
config.getGrpcClientManagerThreadPoolNums(),
config.getGrpcClientManagerThreadPoolNums(),
1,
TimeUnit.MINUTES,
"GrpcClientManagerThreadPool",
config.getGrpcClientManagerThreadQueueCapacity()
);
this.transactionThreadPoolExecutor = ThreadPoolMonitor.createAndMonitor(
config.getGrpcTransactionThreadPoolNums(),
config.getGrpcTransactionThreadPoolNums(),
1,
TimeUnit.MINUTES,
"GrpcTransactionThreadPool",
config.getGrpcTransactionThreadQueueCapacity()
);
this.init();
}
protected void init() {
GrpcTaskRejectedExecutionHandler rejectedExecutionHandler = new GrpcTaskRejectedExecutionHandler();
this.routeThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
this.routeThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
this.producerThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
this.consumerThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
this.clientManagerThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
this.transactionThreadPoolExecutor.setRejectedExecutionHandler(rejectedExecutionHandler);
}
public static GrpcMessagingApplication create(MessagingProcessor messagingProcessor) {
RequestPipeline pipeline = (context, headers, request) -> {
};
// add pipeline
// the last pipe add will execute at the first
AuthConfig authConfig = ConfigurationManager.getAuthConfig();
if (authConfig != null) {
pipeline = pipeline
.pipe(new AuthorizationPipeline(authConfig, messagingProcessor))
.pipe(new AuthenticationPipeline(authConfig, messagingProcessor));
}
pipeline = pipeline.pipe(new ContextInitPipeline());
return new GrpcMessagingApplication(new DefaultGrpcMessingActivity(messagingProcessor), pipeline);
}
protected Status flowLimitStatus() {
return ResponseBuilder.getInstance().buildStatus(Code.TOO_MANY_REQUESTS, "flow limit");
}
protected Status convertExceptionToStatus(Throwable t) {
return ResponseBuilder.getInstance().buildStatus(t);
}
protected <V, T> void addExecutor(ExecutorService executor, ProxyContext context, V request, Runnable runnable,
StreamObserver<T> responseObserver, Function<Status, T> statusResponseCreator) {
if (request instanceof GeneratedMessageV3) {
requestPipeline.execute(context, GrpcConstants.METADATA.get(Context.current()), (GeneratedMessageV3) request);
validateContext(context);
} else {
log.error("[BUG]grpc request pipe is not been executed");
}
executor.submit(new GrpcTask<>(runnable, context, request, responseObserver, statusResponseCreator.apply(flowLimitStatus())));
}
protected <V, T> void writeResponse(ProxyContext context, V request, T response, StreamObserver<T> responseObserver,
Throwable t, Function<Status, T> errorResponseCreator) {
if (t != null) {
ResponseWriter.getInstance().write(
responseObserver,
errorResponseCreator.apply(convertExceptionToStatus(t))
);
} else {
ResponseWriter.getInstance().write(responseObserver, response);
}
}
protected ProxyContext createContext() {
return ProxyContext.create();
}
protected void validateContext(ProxyContext context) {
if (StringUtils.isBlank(context.getClientID())) {
throw new GrpcProxyException(Code.CLIENT_ID_REQUIRED, "client id cannot be empty");
}
}
@Override
public void queryRoute(QueryRouteRequest request, StreamObserver<QueryRouteResponse> responseObserver) {
Function<Status, QueryRouteResponse> statusResponseCreator = status -> QueryRouteResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.routeThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.queryRoute(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void heartbeat(HeartbeatRequest request, StreamObserver<HeartbeatResponse> responseObserver) {
Function<Status, HeartbeatResponse> statusResponseCreator = status -> HeartbeatResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.clientManagerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.heartbeat(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void sendMessage(SendMessageRequest request, StreamObserver<SendMessageResponse> responseObserver) {
Function<Status, SendMessageResponse> statusResponseCreator = status -> SendMessageResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.producerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.sendMessage(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void queryAssignment(QueryAssignmentRequest request,
StreamObserver<QueryAssignmentResponse> responseObserver) {
Function<Status, QueryAssignmentResponse> statusResponseCreator = status -> QueryAssignmentResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.routeThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.queryAssignment(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void receiveMessage(ReceiveMessageRequest request, StreamObserver<ReceiveMessageResponse> responseObserver) {
Function<Status, ReceiveMessageResponse> statusResponseCreator = status -> ReceiveMessageResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.consumerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.receiveMessage(context, request, responseObserver),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void ackMessage(AckMessageRequest request, StreamObserver<AckMessageResponse> responseObserver) {
Function<Status, AckMessageResponse> statusResponseCreator = status -> AckMessageResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.consumerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.ackMessage(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void forwardMessageToDeadLetterQueue(ForwardMessageToDeadLetterQueueRequest request,
StreamObserver<ForwardMessageToDeadLetterQueueResponse> responseObserver) {
Function<Status, ForwardMessageToDeadLetterQueueResponse> statusResponseCreator = status -> ForwardMessageToDeadLetterQueueResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.producerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.forwardMessageToDeadLetterQueue(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void endTransaction(EndTransactionRequest request, StreamObserver<EndTransactionResponse> responseObserver) {
Function<Status, EndTransactionResponse> statusResponseCreator = status -> EndTransactionResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.transactionThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.endTransaction(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void notifyClientTermination(NotifyClientTerminationRequest request,
StreamObserver<NotifyClientTerminationResponse> responseObserver) {
Function<Status, NotifyClientTerminationResponse> statusResponseCreator = status -> NotifyClientTerminationResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.clientManagerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.notifyClientTermination(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void changeInvisibleDuration(ChangeInvisibleDurationRequest request,
StreamObserver<ChangeInvisibleDurationResponse> responseObserver) {
Function<Status, ChangeInvisibleDurationResponse> statusResponseCreator = status -> {
ChangeInvisibleDurationResponse.Builder builder =
ChangeInvisibleDurationResponse.newBuilder().setStatus(status);
if (Code.TOO_MANY_REQUESTS.equals(status.getCode())) {
builder.setReceiptHandle(request.getReceiptHandle());
}
return builder.build();
};
ProxyContext context = createContext();
try {
this.addExecutor(this.consumerThreadPoolExecutor,
context,
request,
() -> grpcMessingActivity.changeInvisibleDuration(context, request)
.whenComplete((response, throwable) -> writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void recallMessage(RecallMessageRequest request, StreamObserver<RecallMessageResponse> responseObserver) {
Function<Status, RecallMessageResponse> statusResponseCreator =
status -> RecallMessageResponse.newBuilder().setStatus(status).build();
ProxyContext context = createContext();
try {
this.addExecutor(this.producerThreadPoolExecutor, // reuse producer thread pool
context,
request,
() -> grpcMessingActivity.recallMessage(context, request)
.whenComplete((response, throwable) ->
writeResponse(context, request, response, responseObserver, throwable, statusResponseCreator)),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, request, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public StreamObserver<TelemetryCommand> telemetry(StreamObserver<TelemetryCommand> responseObserver) {
Function<Status, TelemetryCommand> statusResponseCreator = status -> TelemetryCommand.newBuilder().setStatus(status).build();
ContextStreamObserver<TelemetryCommand> responseTelemetryCommand = grpcMessingActivity.telemetry(responseObserver);
return new StreamObserver<TelemetryCommand>() {
@Override
public void onNext(TelemetryCommand value) {
ProxyContext context = createContext();
try {
addExecutor(clientManagerThreadPoolExecutor,
context,
value,
() -> responseTelemetryCommand.onNext(context, value),
responseObserver,
statusResponseCreator);
} catch (Throwable t) {
writeResponse(context, value, null, responseObserver, t, statusResponseCreator);
}
}
@Override
public void onError(Throwable t) {
responseTelemetryCommand.onError(t);
}
@Override
public void onCompleted() {
responseTelemetryCommand.onCompleted();
}
};
}
@Override
public void shutdown() throws Exception {
this.grpcMessingActivity.shutdown();
this.routeThreadPoolExecutor.shutdown();
this.routeThreadPoolExecutor.shutdown();
this.producerThreadPoolExecutor.shutdown();
this.consumerThreadPoolExecutor.shutdown();
this.clientManagerThreadPoolExecutor.shutdown();
this.transactionThreadPoolExecutor.shutdown();
}
@Override
public void start() throws Exception {
this.grpcMessingActivity.start();
}
protected static | GrpcMessagingApplication |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/spi/JdbcTypeRegistry.java | {
"start": 1408,
"end": 14436
} | class ____ implements JdbcTypeBaseline.BaselineTarget, Serializable {
// private static final Logger LOG = Logger.getLogger( JdbcTypeRegistry.class );
private final TypeConfiguration typeConfiguration;
private final ConcurrentHashMap<Integer, JdbcType> descriptorMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<Integer, JdbcTypeConstructor> descriptorConstructorMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, AggregateJdbcType> aggregateDescriptorMap = new ConcurrentHashMap<>();
/**
* A registry for storing the constructed {@link JdbcType} for both
* {@link JdbcTypeConstructor#resolveType(TypeConfiguration, Dialect, JdbcType, ColumnTypeInformation)} and
* {@link JdbcTypeConstructor#resolveType(TypeConfiguration, Dialect, BasicType, ColumnTypeInformation)} in a single
* map.
*/
private final ConcurrentHashMap<TypeConstructedJdbcTypeKey, JdbcType> typeConstructorDescriptorMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, SqlTypedJdbcType> sqlTypedDescriptorMap = new ConcurrentHashMap<>();
public JdbcTypeRegistry(TypeConfiguration typeConfiguration) {
this.typeConfiguration = typeConfiguration;
JdbcTypeBaseline.prime( this );
}
public TypeConfiguration getTypeConfiguration() {
return typeConfiguration;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// baseline descriptors
@Override
public void addDescriptor(JdbcType jdbcType) {
final var previous = descriptorMap.put( jdbcType.getDefaultSqlTypeCode(), jdbcType );
// if ( previous != null && previous != jdbcType ) {
// LOG.tracef( "addDescriptor(%s) replaced previous registration(%s)", jdbcType, previous );
// }
}
@Override
public void addDescriptor(int typeCode, JdbcType jdbcType) {
final var previous = descriptorMap.put( typeCode, jdbcType );
// if ( previous != null && previous != jdbcType ) {
// LOG.tracef( "addDescriptor(%d, %s) replaced previous registration(%s)", typeCode, jdbcType, previous );
// }
}
public void addDescriptorIfAbsent(JdbcType jdbcType) {
descriptorMap.putIfAbsent( jdbcType.getDefaultSqlTypeCode(), jdbcType );
}
public void addDescriptorIfAbsent(int typeCode, JdbcType jdbcType) {
descriptorMap.putIfAbsent( typeCode, jdbcType );
}
public JdbcType findDescriptor(int jdbcTypeCode) {
return descriptorMap.get( jdbcTypeCode );
}
public JdbcType getDescriptor(int jdbcTypeCode) {
final var descriptor = descriptorMap.get( jdbcTypeCode );
if ( descriptor != null ) {
return descriptor;
}
else {
// if ( isStandardTypeCode( jdbcTypeCode ) ) {
// LOG.debugf( "A standard JDBC type code [%s] was not defined in SqlTypeDescriptorRegistry",
// jdbcTypeCode );
// }
// see if the typecode is part of a known type family...
final var potentialAlternateDescriptor = getFamilyDescriptor( jdbcTypeCode );
if ( potentialAlternateDescriptor != null ) {
return potentialAlternateDescriptor;
}
else {
// finally, create a new descriptor mapping to getObject/setObject for this type code...
final ObjectJdbcType fallBackDescriptor = new ObjectJdbcType( jdbcTypeCode );
addDescriptor( fallBackDescriptor );
return fallBackDescriptor;
}
}
}
private JdbcType getFamilyDescriptor(int jdbcTypeCode) {
final JdbcTypeFamilyInformation.Family family =
JdbcTypeFamilyInformation.INSTANCE.locateJdbcTypeFamilyByTypeCode( jdbcTypeCode );
if ( family != null ) {
for ( int potentialAlternateTypeCode : family.getTypeCodes() ) {
if ( potentialAlternateTypeCode != jdbcTypeCode ) {
final var potentialAlternateDescriptor = descriptorMap.get( potentialAlternateTypeCode );
if ( potentialAlternateDescriptor != null ) {
// todo (6.0) : add a SqlTypeDescriptor#canBeAssignedFrom method ?
return potentialAlternateDescriptor;
}
// if ( isStandardTypeCode( potentialAlternateTypeCode ) ) {
// LOG.debugf( "A standard JDBC type code [%s] was not defined in SqlTypeDescriptorRegistry",
// potentialAlternateTypeCode );
// }
}
}
}
return null;
}
public AggregateJdbcType resolveAggregateDescriptor(
int jdbcTypeCode,
String typeName,
EmbeddableMappingType embeddableMappingType,
RuntimeModelCreationContext context) {
final String registrationKey;
if ( typeName != null ) {
registrationKey = typeName.toLowerCase( Locale.ROOT );
final AggregateJdbcType aggregateJdbcType = aggregateDescriptorMap.get( registrationKey );
if ( aggregateJdbcType != null ) {
if ( aggregateJdbcType.getEmbeddableMappingType() != embeddableMappingType ) {
// We only register a single aggregate descriptor for reading native query results,
// but we still return a special JdbcType per EmbeddableMappingType.
// We do this because EmbeddableMappingType#forEachSelectable uses the SelectableMappings,
// which are prefixed with the aggregateMapping.
// Since the columnExpression is used as key for mutation parameters, this is important.
// We could get rid of this if ColumnValueParameter drops the ColumnReference
return aggregateJdbcType.resolveAggregateJdbcType( embeddableMappingType, typeName, context );
}
else {
return aggregateJdbcType;
}
}
}
else {
registrationKey = null;
}
return resolveAggregateDescriptor( jdbcTypeCode, typeName, embeddableMappingType, context, registrationKey );
}
private AggregateJdbcType resolveAggregateDescriptor(
int jdbcTypeCode,
String typeName,
EmbeddableMappingType embeddableMappingType,
RuntimeModelCreationContext context,
String registrationKey) {
final var descriptor = getDescriptor( jdbcTypeCode );
if ( descriptor instanceof AggregateJdbcType aggregateJdbcType ) {
final AggregateJdbcType resolvedJdbcType =
aggregateJdbcType.resolveAggregateJdbcType( embeddableMappingType, typeName, context );
cacheAggregateJdbcType( registrationKey, resolvedJdbcType );
return resolvedJdbcType;
}
else {
throw new IllegalArgumentException(
String.format(
"Tried to resolve the JdbcType [%s] as AggregateJdbcType but it does not implement that interface!",
descriptor.getClass().getName()
)
);
}
}
private void cacheAggregateJdbcType(String registrationKey, AggregateJdbcType resolvedJdbcType) {
if ( registrationKey != null ) {
aggregateDescriptorMap.put( registrationKey, resolvedJdbcType );
if ( resolvedJdbcType instanceof SqlTypedJdbcType sqlTypedJdbcType ) {
sqlTypedDescriptorMap.put(
sqlTypedJdbcType.getSqlTypeName().toLowerCase( Locale.ROOT ),
sqlTypedJdbcType
);
}
}
}
public AggregateJdbcType findAggregateDescriptor(String typeName) {
return aggregateDescriptorMap.get( typeName.toLowerCase( Locale.ROOT ) );
}
public SqlTypedJdbcType findSqlTypedDescriptor(String sqlTypeName) {
return sqlTypedDescriptorMap.get( sqlTypeName.toLowerCase( Locale.ROOT ) );
}
/**
* Construct a {@link JdbcType} via {@link JdbcTypeConstructor#resolveType(TypeConfiguration, Dialect, BasicType, ColumnTypeInformation)}
* or return a compatible one from this registry.
*/
public JdbcType resolveTypeConstructorDescriptor(
int jdbcTypeConstructorCode,
BasicType<?> elementType,
@Nullable ColumnTypeInformation columnTypeInformation) {
return resolveTypeConstructorDescriptor( jdbcTypeConstructorCode, (Object) elementType, columnTypeInformation );
}
/**
* Construct a {@link JdbcType} via {@link JdbcTypeConstructor#resolveType(TypeConfiguration, Dialect, JdbcType, ColumnTypeInformation)}
* or return a compatible one from this registry.
*/
public JdbcType resolveTypeConstructorDescriptor(
int jdbcTypeConstructorCode,
JdbcType elementType,
@Nullable ColumnTypeInformation columnTypeInformation) {
return resolveTypeConstructorDescriptor( jdbcTypeConstructorCode, (Object) elementType, columnTypeInformation );
}
private JdbcType resolveTypeConstructorDescriptor(
int jdbcTypeConstructorCode,
Object elementType,
@Nullable ColumnTypeInformation columnTypeInformation) {
final TypeConstructedJdbcTypeKey key =
columnTypeInformation == null
? new TypeConstructedJdbcTypeKey( jdbcTypeConstructorCode, elementType )
: new TypeConstructedJdbcTypeKey( jdbcTypeConstructorCode, elementType, columnTypeInformation );
final var descriptor = typeConstructorDescriptorMap.get( key );
if ( descriptor != null ) {
return descriptor;
}
else {
final JdbcTypeConstructor jdbcTypeConstructor = getConstructor( jdbcTypeConstructorCode );
if ( jdbcTypeConstructor != null ) {
final var jdbcType = jdbcElementType( elementType, columnTypeInformation, jdbcTypeConstructor );
final var existingType = typeConstructorDescriptorMap.putIfAbsent( key, jdbcType );
if ( existingType != null ) {
return existingType;
}
else {
if ( jdbcType instanceof SqlTypedJdbcType sqlTypedJdbcType ) {
sqlTypedDescriptorMap.put(
sqlTypedJdbcType.getSqlTypeName().toLowerCase( Locale.ROOT ),
sqlTypedJdbcType
);
}
return jdbcType;
}
}
else {
return getDescriptor( jdbcTypeConstructorCode );
}
}
}
private JdbcType jdbcElementType(
Object elementType,
ColumnTypeInformation columnTypeInformation,
JdbcTypeConstructor jdbcTypeConstructor) {
final Dialect dialect = typeConfiguration.getCurrentBaseSqlTypeIndicators().getDialect();
if ( elementType instanceof BasicType<?> basicType ) {
return jdbcTypeConstructor.resolveType(
typeConfiguration,
dialect,
basicType,
columnTypeInformation
);
}
else {
return jdbcTypeConstructor.resolveType(
typeConfiguration,
dialect,
(JdbcType) elementType,
columnTypeInformation
);
}
}
public boolean hasRegisteredDescriptor(int jdbcTypeCode) {
return descriptorMap.containsKey( jdbcTypeCode )
|| isStandardTypeCode( jdbcTypeCode )
|| JdbcTypeFamilyInformation.INSTANCE.locateJdbcTypeFamilyByTypeCode( jdbcTypeCode ) != null
|| locateConstructedJdbcType( jdbcTypeCode );
}
private boolean locateConstructedJdbcType(int jdbcTypeCode) {
for ( TypeConstructedJdbcTypeKey key : typeConstructorDescriptorMap.keySet() ) {
if ( key.typeCode() == jdbcTypeCode ) {
return true;
}
}
return false;
}
public JdbcTypeConstructor getConstructor(int jdbcTypeCode) {
return descriptorConstructorMap.get( jdbcTypeCode );
}
public void addTypeConstructor(int jdbcTypeCode, JdbcTypeConstructor jdbcTypeConstructor) {
descriptorConstructorMap.put( jdbcTypeCode, jdbcTypeConstructor );
}
public void addTypeConstructor(JdbcTypeConstructor jdbcTypeConstructor) {
addTypeConstructor( jdbcTypeConstructor.getDefaultSqlTypeCode(), jdbcTypeConstructor );
}
public void addTypeConstructorIfAbsent(int jdbcTypeCode, JdbcTypeConstructor jdbcTypeConstructor) {
descriptorConstructorMap.putIfAbsent( jdbcTypeCode, jdbcTypeConstructor );
}
public void addTypeConstructorIfAbsent(JdbcTypeConstructor jdbcTypeConstructor) {
addTypeConstructorIfAbsent( jdbcTypeConstructor.getDefaultSqlTypeCode(), jdbcTypeConstructor );
}
private record TypeConstructedJdbcTypeKey(
int typeConstructorTypeCode,
int typeCode,
@Nullable Boolean nullable,
@Nullable String typeName,
int columnSize,
int decimalDigits,
Object jdbcTypeOrBasicType) {
private TypeConstructedJdbcTypeKey(
int typeConstructorTypeCode,
Object jdbcTypeOrBasicType) {
this( typeConstructorTypeCode,
Types.OTHER,
null,
null,
0,
0,
jdbcTypeOrBasicType );
}
private TypeConstructedJdbcTypeKey(
int typeConstructorTypeCode,
Object jdbcTypeOrBasicType,
ColumnTypeInformation columnTypeInformation) {
this( typeConstructorTypeCode,
columnTypeInformation.getTypeCode(),
columnTypeInformation.getNullable(),
columnTypeInformation.getTypeName(),
columnTypeInformation.getColumnSize(),
columnTypeInformation.getDecimalDigits(),
jdbcTypeOrBasicType
);
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof TypeConstructedJdbcTypeKey that) ) {
return false;
}
return typeConstructorTypeCode == that.typeConstructorTypeCode
&& typeCode == that.typeCode
&& columnSize == that.columnSize
&& decimalDigits == that.decimalDigits
&& Objects.equals( nullable, that.nullable )
&& Objects.equals( typeName, that.typeName )
&& jdbcTypeOrBasicType.equals( that.jdbcTypeOrBasicType );
}
@Override
public int hashCode() {
int result = typeConstructorTypeCode;
result = 31 * result + jdbcTypeOrBasicType.hashCode();
result = 31 * result + (nullable == null ? 0 : nullable.hashCode());
result = 31 * result + typeCode;
result = 31 * result + (typeName == null ? 0 : typeName.hashCode());
result = 31 * result + columnSize;
result = 31 * result + decimalDigits;
return result;
}
}
}
| JdbcTypeRegistry |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.