language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | independent-projects/arc/processor/src/test/java/io/quarkus/arc/processor/types/Bottom.java | {
"start": 104,
"end": 851
} | class ____ extends Middle {
@Override
public String publicMethod(String param) {
return null;
}
@Override
protected String protectedMethod(String param) {
return null;
}
@Override
String packagePrivateMethod(String param) {
return null;
}
// no override
private String privateMethod(String param) {
return null;
}
// ---
@Override
public String protectedMethodToBecomePublic(String param) {
return null;
}
@Override
protected String packagePrivateMethodToBecomeProtected(String param) {
return null;
}
@Override
public String packagePrivateMethodToBecomePublic(String param) {
return null;
}
}
| Bottom |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/async/AsyncLoggerConfigDisruptor.java | {
"start": 3231,
"end": 4175
} | class ____ {
public Log4jEventWrapper() {}
public Log4jEventWrapper(final MutableLogEvent mutableLogEvent) {
event = mutableLogEvent;
}
private AsyncLoggerConfig loggerConfig;
private LogEvent event;
/**
* Release references held by ring buffer to allow objects to be garbage-collected.
*/
public void clear() {
loggerConfig = null;
if (event instanceof MutableLogEvent) {
((MutableLogEvent) event).clear();
} else {
event = null;
}
}
@Override
public String toString() {
return String.valueOf(event);
}
}
/**
* EventHandler performs the work in a separate thread.
* <p>
* <strong>Warning:</strong> this implementation only works with Disruptor 4.x.
* </p>
*/
private static | Log4jEventWrapper |
java | apache__dubbo | dubbo-maven-plugin/src/main/java/org/apache/dubbo/maven/plugin/aot/AbstractAotMojo.java | {
"start": 1737,
"end": 7455
} | class ____ extends AbstractDependencyFilterMojo {
/**
* The current Maven session. This is used for toolchain manager API calls.
*/
@Parameter(defaultValue = "${session}", readonly = true)
private MavenSession session;
/**
* The toolchain manager to use to locate a custom JDK.
*/
@Component
private ToolchainManager toolchainManager;
/**
* Skip the execution.
*/
@Parameter(property = "dubbo.aot.skip", defaultValue = "false")
private boolean skip;
/**
* List of JVM system properties to pass to the AOT process.
*/
@Parameter
private Map<String, String> systemPropertyVariables;
/**
* JVM arguments that should be associated with the AOT process. On command line, make
* sure to wrap multiple values between quotes.
*/
@Parameter(property = "dubbo.aot.jvmArguments")
private String jvmArguments;
/**
* Arguments that should be provided to the AOT compile process. On command line, make
* sure to wrap multiple values between quotes.
*/
@Parameter(property = "dubbo.aot.compilerArguments")
private String compilerArguments;
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
if (this.skip) {
getLog().debug("Skipping AOT execution as per configuration");
return;
}
try {
executeAot();
} catch (Exception ex) {
throw new MojoExecutionException(ex.getMessage(), ex);
}
}
protected abstract void executeAot() throws Exception;
protected void generateAotAssets(URL[] classPath, String processorClassName, String... arguments) throws Exception {
List<String> command = CommandLineBuilder.forMainClass(processorClassName)
.withSystemProperties(this.systemPropertyVariables)
.withJvmArguments(new RunArguments(this.jvmArguments).asArray()).withClasspath(classPath)
.withArguments(arguments).build();
if (getLog().isDebugEnabled()) {
getLog().debug("Generating AOT assets using command: " + command);
}
JavaProcessExecutor processExecutor = new JavaProcessExecutor(this.session, this.toolchainManager);
getLog().info("dir: " + this.project.getBasedir());
processExecutor.run(this.project.getBasedir(), command, Collections.emptyMap());
}
protected final void compileSourceFiles(URL[] classPath, File sourcesDirectory, File outputDirectory)
throws Exception {
List<File> sourceFiles;
try (Stream<Path> pathStream = Files.walk(sourcesDirectory.toPath())) {
sourceFiles = pathStream.filter(Files::isRegularFile).map(Path::toFile).collect(Collectors.toList());
}
if (sourceFiles.isEmpty()) {
return;
}
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
try (StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null)) {
JavaCompilerPluginConfiguration compilerConfiguration = new JavaCompilerPluginConfiguration(this.project);
List<String> options = new ArrayList<>();
options.add("-cp");
options.add(CommandLineBuilder.ClasspathBuilder.build(Arrays.asList(classPath)));
options.add("-d");
options.add(outputDirectory.toPath().toAbsolutePath().toString());
String releaseVersion = compilerConfiguration.getReleaseVersion();
if (releaseVersion != null) {
options.add("--release");
options.add(releaseVersion);
}
else {
options.add("--source");
options.add(compilerConfiguration.getSourceMajorVersion());
options.add("--target");
options.add(compilerConfiguration.getTargetMajorVersion());
}
options.addAll(new RunArguments(this.compilerArguments).getArgs());
Iterable<? extends JavaFileObject> compilationUnits = fileManager.getJavaFileObjectsFromFiles(sourceFiles);
Errors errors = new Errors();
JavaCompiler.CompilationTask task = compiler.getTask(null, fileManager, errors, options, null, compilationUnits);
boolean result = task.call();
if (!result || errors.hasReportedErrors()) {
throw new IllegalStateException("Unable to compile generated source" + errors);
}
}
}
protected final List<URL> getClassPath(File[] directories, ArtifactsFilter... artifactFilters)
throws MojoExecutionException {
List<URL> urls = new ArrayList<>();
Arrays.stream(directories).map(this::toURL).forEach(urls::add);
urls.addAll(getDependencyURLs(artifactFilters));
return urls;
}
protected final void copyAll(Path from, Path to) throws IOException {
if (!Files.exists(from)) {
return;
}
List<Path> files;
try (Stream<Path> pathStream = Files.walk(from)) {
files = pathStream.filter(Files::isRegularFile).collect(Collectors.toList());
}
for (Path file : files) {
String relativeFileName = file.subpath(from.getNameCount(), file.getNameCount()).toString();
getLog().debug("Copying '" + relativeFileName + "' to " + to);
Path target = to.resolve(relativeFileName);
Files.createDirectories(target.getParent());
Files.copy(file, target, StandardCopyOption.REPLACE_EXISTING);
}
}
/**
* {@link DiagnosticListener} used to collect errors.
*/
protected static | AbstractAotMojo |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/internal/security/ReferenceCountingMapTest.java | {
"start": 1333,
"end": 5183
} | class ____ {
@Rule public final MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock
ValueFactory<Integer, SslContextProvider> mockFactory;
ReferenceCountingMap<Integer, SslContextProvider> map;
@Before
public void setUp() {
map = new ReferenceCountingMap<>(mockFactory);
}
@Test
public void referenceCountingMap_getAndRelease_closeCalled() throws InterruptedException {
SslContextProvider valueFor3 = getTypedMock();
when(mockFactory.create(3)).thenReturn(valueFor3);
SslContextProvider val = map.get(3);
assertThat(val).isSameInstanceAs(valueFor3);
verify(valueFor3, never()).close();
val = map.get(3);
assertThat(val).isSameInstanceAs(valueFor3);
// at this point ref-count is 2
assertThat(map.release(3, val)).isNull();
verify(valueFor3, never()).close();
assertThat(map.release(3, val)).isNull(); // after this ref-count is 0
verify(valueFor3, times(1)).close();
}
private static SslContextProvider getTypedMock() {
return mock(SslContextProvider.class);
}
@Test
public void referenceCountingMap_distinctElements() throws InterruptedException {
SslContextProvider valueFor3 = getTypedMock();
SslContextProvider valueFor4 = getTypedMock();
when(mockFactory.create(3)).thenReturn(valueFor3);
when(mockFactory.create(4)).thenReturn(valueFor4);
SslContextProvider val3 = map.get(3);
assertThat(val3).isSameInstanceAs(valueFor3);
SslContextProvider val4 = map.get(4);
assertThat(val4).isSameInstanceAs(valueFor4);
assertThat(map.release(3, val3)).isNull();
verify(valueFor3, times(1)).close();
verify(valueFor4, never()).close();
assertThat(map.release(4, val4)).isNull();
verify(valueFor4, times(1)).close();
}
@Test
public void referenceCountingMap_releaseWrongElement_expectException()
throws InterruptedException {
SslContextProvider valueFor3 = getTypedMock();
SslContextProvider valueFor4 = getTypedMock();
when(mockFactory.create(3)).thenReturn(valueFor3);
when(mockFactory.create(4)).thenReturn(valueFor4);
SslContextProvider unused = map.get(3);
SslContextProvider val4 = map.get(4);
// now provide wrong key (3) and value (val4) combination
try {
map.release(3, val4);
fail("exception expected");
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().contains("Releasing the wrong instance");
}
}
@Test
public void referenceCountingMap_excessRelease_expectException() throws InterruptedException {
SslContextProvider valueFor4 = getTypedMock();
when(mockFactory.create(4)).thenReturn(valueFor4);
SslContextProvider val = map.get(4);
assertThat(val).isSameInstanceAs(valueFor4);
// at this point ref-count is 1
map.release(4, val);
// at this point ref-count is 0
try {
map.release(4, val);
fail("exception expected");
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().contains("No cached instance found for 4");
}
}
@Test
public void referenceCountingMap_releaseAndGet_differentInstance() throws InterruptedException {
SslContextProvider valueFor4 = getTypedMock();
when(mockFactory.create(4)).thenReturn(valueFor4);
SslContextProvider val = map.get(4);
assertThat(val).isSameInstanceAs(valueFor4);
// at this point ref-count is 1
map.release(4, val);
// at this point ref-count is 0 and val is removed
// should get another instance for 4
SslContextProvider valueFor4a = getTypedMock();
when(mockFactory.create(4)).thenReturn(valueFor4a);
val = map.get(4);
assertThat(val).isSameInstanceAs(valueFor4a);
// verify it is a different instance from before
assertThat(valueFor4).isNotSameInstanceAs(valueFor4a);
}
}
| ReferenceCountingMapTest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/client/AmazonBedrockExecuteOnlyRequestSender.java | {
"start": 1526,
"end": 5034
} | class ____ implements RequestSender {
protected final AmazonBedrockClientCache clientCache;
private final ThrottlerManager throttleManager;
public AmazonBedrockExecuteOnlyRequestSender(AmazonBedrockClientCache clientCache, ThrottlerManager throttlerManager) {
this.clientCache = Objects.requireNonNull(clientCache);
this.throttleManager = Objects.requireNonNull(throttlerManager);
}
@Override
public void send(
Logger logger,
Request request,
Supplier<Boolean> hasRequestTimedOutFunction,
ResponseHandler responseHandler,
ActionListener<InferenceServiceResults> listener
) {
if (request instanceof AmazonBedrockRequest awsRequest && responseHandler instanceof AmazonBedrockResponseHandler awsResponse) {
try {
var executor = createExecutor(awsRequest, awsResponse, logger, hasRequestTimedOutFunction, listener);
// the run method will call the listener to return the proper value
executor.run();
return;
} catch (Exception e) {
logException(logger, request, e);
listener.onFailure(wrapWithElasticsearchException(e, request.getInferenceEntityId()));
}
}
listener.onFailure(new ElasticsearchException("Amazon Bedrock request was not the correct type"));
}
// allow this to be overridden for testing
protected AmazonBedrockExecutor createExecutor(
AmazonBedrockRequest awsRequest,
AmazonBedrockResponseHandler awsResponse,
Logger logger,
Supplier<Boolean> hasRequestTimedOutFunction,
ActionListener<InferenceServiceResults> listener
) {
switch (awsRequest.taskType()) {
case COMPLETION -> {
return new AmazonBedrockChatCompletionExecutor(
(AmazonBedrockChatCompletionRequest) awsRequest,
awsResponse,
logger,
hasRequestTimedOutFunction,
listener,
clientCache
);
}
case TEXT_EMBEDDING -> {
return new AmazonBedrockEmbeddingsExecutor(
(AmazonBedrockEmbeddingsRequest) awsRequest,
awsResponse,
logger,
hasRequestTimedOutFunction,
listener,
clientCache
);
}
default -> {
throw new UnsupportedOperationException("Unsupported task type [" + awsRequest.taskType() + "] for Amazon Bedrock request");
}
}
}
private void logException(Logger logger, Request request, Exception exception) {
var causeException = ExceptionsHelper.unwrapCause(exception);
throttleManager.warn(
logger,
format("Failed while sending request from inference entity id [%s] of type [amazonbedrock]", request.getInferenceEntityId()),
causeException
);
}
private Exception wrapWithElasticsearchException(Exception e, String inferenceEntityId) {
return new ElasticsearchException(
format("Amazon Bedrock client failed to send request from inference entity id [%s]", inferenceEntityId),
e
);
}
public void shutdown() throws IOException {
this.clientCache.close();
}
}
| AmazonBedrockExecuteOnlyRequestSender |
java | quarkusio__quarkus | extensions/hibernate-search-standalone-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/standalone/elasticsearch/test/configuration/IndexedEntityInNamedBackend.java | {
"start": 496,
"end": 712
} | class ____ {
@DocumentId
public Long id;
@FullTextField
public String name;
public IndexedEntityInNamedBackend(long id, String name) {
this.name = name;
}
}
| IndexedEntityInNamedBackend |
java | quarkusio__quarkus | integration-tests/jpa-postgresql/src/test/java/io/quarkus/it/jpa/postgresql/JPAReflectionInGraalITCase.java | {
"start": 337,
"end": 554
} | class ____ {
@Test
public void testFieldAndGetterReflectionOnEntityFromServlet() throws Exception {
RestAssured.when().get("/jpa/testreflection").then().body(is("OK"));
}
}
| JPAReflectionInGraalITCase |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/UnresolvedId.java | {
"start": 272,
"end": 1005
} | class ____ {
private final Object _id;
private final TokenStreamLocation _location;
private final Class<?> _type;
public UnresolvedId(Object id, Class<?> type, TokenStreamLocation where) {
_id = id;
_type = type;
_location = where;
}
/**
* The id which is unresolved.
*/
public Object getId() { return _id; }
/**
* The type of object which was expected.
*/
public Class<?> getType() { return _type; }
public TokenStreamLocation getLocation() { return _location; }
@Override
public String toString() {
return String.format("Object id [%s] (for %s) at %s", _id,
ClassUtil.nameOf(_type), _location);
}
}
| UnresolvedId |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/views/ViewsWithCreatorTest.java | {
"start": 410,
"end": 717
} | class ____ extends View { }
@JsonAutoDetect(
fieldVisibility = JsonAutoDetect.Visibility.PUBLIC_ONLY,
isGetterVisibility = JsonAutoDetect.Visibility.NONE,
getterVisibility = JsonAutoDetect.Visibility.NONE)
@JsonIgnoreProperties(ignoreUnknown = true)
static | View2 |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/PathMatchingResourcePatternResolver.java | {
"start": 18124,
"end": 18333
} | class ____ resources with the given path via the configured
* {@link #getClassLoader() ClassLoader}.
* <p>Called by {@link #findAllClassPathResources(String)}.
* @param path the absolute path within the | path |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java | {
"start": 1778,
"end": 9598
} | enum ____ {
/**
* No headers/trailers/checksums.
*/
NO_HEADER (-15),
/**
* Default headers/trailers/checksums.
*/
DEFAULT_HEADER (15),
/**
* Simple gzip headers/trailers.
*/
GZIP_FORMAT (31),
/**
* Autodetect gzip/zlib headers/trailers.
*/
AUTODETECT_GZIP_ZLIB (47);
private final int windowBits;
CompressionHeader(int windowBits) {
this.windowBits = windowBits;
}
public int windowBits() {
return windowBits;
}
}
private static boolean nativeZlibLoaded = false;
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
// Initialize the native library
initIDs();
nativeZlibLoaded = true;
} catch (Throwable t) {
// Ignore failure to load/initialize native-zlib
}
}
}
static boolean isNativeZlibLoaded() {
return nativeZlibLoaded;
}
/**
* Creates a new decompressor.
* @param header header.
* @param directBufferSize directBufferSize.
*/
public ZlibDecompressor(CompressionHeader header, int directBufferSize) {
this.header = header;
this.directBufferSize = directBufferSize;
compressedDirectBuf = ByteBuffer.allocateDirect(directBufferSize);
uncompressedDirectBuf = ByteBuffer.allocateDirect(directBufferSize);
uncompressedDirectBuf.position(directBufferSize);
stream = init(this.header.windowBits());
}
public ZlibDecompressor() {
this(CompressionHeader.DEFAULT_HEADER, DEFAULT_DIRECT_BUFFER_SIZE);
}
@Override
public void setInput(byte[] b, int off, int len) {
if (b == null) {
throw new NullPointerException();
}
if (off < 0 || len < 0 || off > b.length - len) {
throw new ArrayIndexOutOfBoundsException();
}
this.userBuf = b;
this.userBufOff = off;
this.userBufLen = len;
setInputFromSavedData();
// Reinitialize zlib's output direct buffer
uncompressedDirectBuf.limit(directBufferSize);
uncompressedDirectBuf.position(directBufferSize);
}
void setInputFromSavedData() {
compressedDirectBufOff = 0;
compressedDirectBufLen = userBufLen;
if (compressedDirectBufLen > directBufferSize) {
compressedDirectBufLen = directBufferSize;
}
// Reinitialize zlib's input direct buffer
compressedDirectBuf.rewind();
((ByteBuffer)compressedDirectBuf).put(userBuf, userBufOff,
compressedDirectBufLen);
// Note how much data is being fed to zlib
userBufOff += compressedDirectBufLen;
userBufLen -= compressedDirectBufLen;
}
@Override
public void setDictionary(byte[] b, int off, int len) {
if (stream == 0 || b == null) {
throw new NullPointerException();
}
if (off < 0 || len < 0 || off > b.length - len) {
throw new ArrayIndexOutOfBoundsException();
}
setDictionary(stream, b, off, len);
needDict = false;
}
@Override
public boolean needsInput() {
// Consume remaining compressed data?
if (uncompressedDirectBuf.remaining() > 0) {
return false;
}
// Check if zlib has consumed all input
if (compressedDirectBufLen <= 0) {
// Check if we have consumed all user-input
if (userBufLen <= 0) {
return true;
} else {
setInputFromSavedData();
}
}
return false;
}
@Override
public boolean needsDictionary() {
return needDict;
}
@Override
public boolean finished() {
// Check if 'zlib' says it's 'finished' and
// all compressed data has been consumed
return (finished && uncompressedDirectBuf.remaining() == 0);
}
@Override
public int decompress(byte[] b, int off, int len)
throws IOException {
if (b == null) {
throw new NullPointerException();
}
if (off < 0 || len < 0 || off > b.length - len) {
throw new ArrayIndexOutOfBoundsException();
}
int n = 0;
// Check if there is uncompressed data
n = uncompressedDirectBuf.remaining();
if (n > 0) {
n = Math.min(n, len);
((ByteBuffer)uncompressedDirectBuf).get(b, off, n);
return n;
}
// Re-initialize the zlib's output direct buffer
uncompressedDirectBuf.rewind();
uncompressedDirectBuf.limit(directBufferSize);
// Decompress data
n = inflateBytesDirect();
uncompressedDirectBuf.limit(n);
// Get at most 'len' bytes
n = Math.min(n, len);
((ByteBuffer)uncompressedDirectBuf).get(b, off, n);
return n;
}
/**
* Returns the total number of uncompressed bytes output so far.
*
* @return the total (non-negative) number of uncompressed bytes output so far
*/
public long getBytesWritten() {
checkStream();
return getBytesWritten(stream);
}
/**
* Returns the total number of compressed bytes input so far.
*
* @return the total (non-negative) number of compressed bytes input so far
*/
public long getBytesRead() {
checkStream();
return getBytesRead(stream);
}
/**
* Returns the number of bytes remaining in the input buffers; normally
* called when finished() is true to determine amount of post-gzip-stream
* data.
*
* @return the total (non-negative) number of unprocessed bytes in input
*/
@Override
public int getRemaining() {
checkStream();
return userBufLen + getRemaining(stream); // userBuf + compressedDirectBuf
}
/**
* Resets everything including the input buffers (user and direct).
*/
@Override
public void reset() {
checkStream();
reset(stream);
finished = false;
needDict = false;
compressedDirectBufOff = compressedDirectBufLen = 0;
uncompressedDirectBuf.limit(directBufferSize);
uncompressedDirectBuf.position(directBufferSize);
userBufOff = userBufLen = 0;
}
@Override
public void end() {
if (stream != 0) {
end(stream);
stream = 0;
}
}
@Override
protected void finalize() {
end();
}
private void checkStream() {
if (stream == 0)
throw new NullPointerException();
}
private native static void initIDs();
private native static long init(int windowBits);
private native static void setDictionary(long strm, byte[] b, int off,
int len);
private native int inflateBytesDirect();
private native static long getBytesRead(long strm);
private native static long getBytesWritten(long strm);
private native static int getRemaining(long strm);
private native static void reset(long strm);
private native static void end(long strm);
int inflateDirect(ByteBuffer src, ByteBuffer dst) throws IOException {
assert (this instanceof ZlibDirectDecompressor);
ByteBuffer presliced = dst;
if (dst.position() > 0) {
presliced = dst;
dst = dst.slice();
}
Buffer originalCompressed = compressedDirectBuf;
Buffer originalUncompressed = uncompressedDirectBuf;
int originalBufferSize = directBufferSize;
compressedDirectBuf = src;
compressedDirectBufOff = src.position();
compressedDirectBufLen = src.remaining();
uncompressedDirectBuf = dst;
directBufferSize = dst.remaining();
int n = 0;
try {
n = inflateBytesDirect();
presliced.position(presliced.position() + n);
if (compressedDirectBufLen > 0) {
src.position(compressedDirectBufOff);
} else {
src.position(src.limit());
}
} finally {
compressedDirectBuf = originalCompressed;
uncompressedDirectBuf = originalUncompressed;
compressedDirectBufOff = 0;
compressedDirectBufLen = 0;
directBufferSize = originalBufferSize;
}
return n;
}
public static | CompressionHeader |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/opds/udf/SqlFormat.java | {
"start": 336,
"end": 1182
} | class ____ extends UDF {
public String evaluate(String sql) {
return evaluate(sql, null, false);
}
public String evaluate(String sql, String dbTypeName) {
return evaluate(sql, dbTypeName, false);
}
public String evaluate(String sql, String dbTypeName, boolean throwError) {
DbType dbType = dbTypeName == null ? null : DbType.valueOf(dbTypeName);
try {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
return SQLUtils.toSQLString(statementList, dbType);
} catch (Exception ex) {
if (throwError) {
throw new IllegalArgumentException("error sql : \n" + sql, ex);
}
return sql;
}
}
}
| SqlFormat |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesProcessor.java | {
"start": 2563,
"end": 15159
} | class ____ {
private static final Logger log = Logger.getLogger(KubernetesProcessor.class);
private static final String COMMON = "common";
@BuildStep
FeatureBuildItem produceFeature() {
return new FeatureBuildItem(Feature.KUBERNETES);
}
@BuildStep
public EnabledKubernetesDeploymentTargetsBuildItem enabledKubernetesDeploymentTargets(
List<KubernetesDeploymentTargetBuildItem> allDeploymentTargets) {
List<KubernetesDeploymentTargetBuildItem> mergedDeploymentTargets = mergeList(allDeploymentTargets);
Collections.sort(mergedDeploymentTargets);
List<DeploymentTargetEntry> entries = new ArrayList<>(mergedDeploymentTargets.size());
for (KubernetesDeploymentTargetBuildItem deploymentTarget : mergedDeploymentTargets) {
if (deploymentTarget.isEnabled()) {
DeploymentResourceKind deploymentResourceKind = DeploymentResourceKind.find(deploymentTarget.getGroup(),
deploymentTarget.getVersion(), deploymentTarget.getKind());
entries.add(new DeploymentTargetEntry(deploymentTarget.getName(), deploymentResourceKind,
deploymentTarget.getPriority(), deploymentTarget.getDeployStrategy()));
}
}
return new EnabledKubernetesDeploymentTargetsBuildItem(entries);
}
@BuildStep
public void preventContainerPush(ContainerImageConfig containerImageConfig,
BuildProducer<PreventImplicitContainerImagePushBuildItem> producer) {
if (containerImageConfig.isPushExplicitlyDisabled()) {
producer.produce(new PreventImplicitContainerImagePushBuildItem());
}
}
@BuildStep(onlyIfNot = IsTest.class)
public void build(ApplicationInfoBuildItem applicationInfo,
OutputTargetBuildItem outputTarget,
PackageConfig packageConfig,
KubernetesConfig kubernetesConfig,
OpenShiftConfig openshiftConfig,
KnativeConfig knativeConfig,
Capabilities capabilities,
LaunchModeBuildItem launchMode,
List<KubernetesPortBuildItem> kubernetesPorts,
EnabledKubernetesDeploymentTargetsBuildItem kubernetesDeploymentTargets,
List<ConfiguratorBuildItem> configurators,
List<ConfigurationSupplierBuildItem> configurationSuppliers,
List<DecoratorBuildItem> decorators,
BuildProducer<DekorateOutputBuildItem> dekorateSessionProducer,
Optional<CustomProjectRootBuildItem> customProjectRoot,
Optional<CustomKubernetesOutputDirBuildItem> customOutputDir,
BuildProducer<GeneratedFileSystemResourceBuildItem> generatedResourceProducer,
BuildProducer<GeneratedKubernetesResourceBuildItem> generatedKubernetesResourceProducer,
BuildProducer<KubernetesOutputDirectoryBuildItem> outputDirectoryBuildItemBuildProducer) {
List<ConfiguratorBuildItem> allConfigurators = new ArrayList<>(configurators);
List<ConfigurationSupplierBuildItem> allConfigurationSuppliers = new ArrayList<>(configurationSuppliers);
List<DecoratorBuildItem> allDecorators = new ArrayList<>(decorators);
final Path root;
try {
root = Files.createTempDirectory("quarkus-kubernetes");
} catch (IOException e) {
throw new RuntimeException("Unable to setup environment for generating Kubernetes resources", e);
}
Map<String, Object> config = KubernetesConfigUtil.toMap(kubernetesConfig, openshiftConfig, knativeConfig);
Set<String> deploymentTargets = kubernetesDeploymentTargets.getEntriesSortedByPriority().stream()
.map(DeploymentTargetEntry::getName)
.collect(Collectors.toSet());
Path artifactPath = getRunner(outputTarget, packageConfig);
try {
// by passing false to SimpleFileWriter, we ensure that no files are actually written during this phase
Optional<Project> optionalProject = KubernetesCommonHelper.createProject(applicationInfo, customProjectRoot,
artifactPath);
optionalProject.ifPresent(project -> {
Set<String> targets = new HashSet<>();
targets.add(COMMON);
targets.addAll(deploymentTargets);
final Map<String, String> generatedResourcesMap;
final SessionWriter sessionWriter = new QuarkusFileWriter(project);
final SessionReader sessionReader = new SimpleFileReader(
project.getRoot().resolve("src").resolve("main").resolve("kubernetes"), targets);
sessionWriter.setProject(project);
if (!launchMode.getLaunchMode().isProduction()) {
// needed for a fresh run
Session.clearSession();
}
final Session session = Session.getSession(new NoopLogger());
session.setWriter(sessionWriter);
session.setReader(sessionReader);
session.addPropertyConfiguration(Maps.fromProperties(config));
//We need to verify to filter out anything that doesn't extend the Configurator class.
//The ConfiguratorBuildItem is a wrapper to Object.
for (ConfiguratorBuildItem configuratorBuildItem : allConfigurators) {
session.getConfigurationRegistry().add((Configurator) configuratorBuildItem.getConfigurator());
}
//We need to verify to filter out anything that doesn't extend the ConfigurationSupplier class.
//The ConfigurationSupplierBuildItem is a wrapper to Object.
for (ConfigurationSupplierBuildItem configurationSupplierBuildItem : allConfigurationSuppliers) {
session.getConfigurationRegistry()
.add((ConfigurationSupplier) configurationSupplierBuildItem.getConfigurationSupplier());
}
//We need to verify to filter out anything that doesn't extend the Decorator class.
//The DecoratorBuildItem is a wrapper to Object.
allDecorators.stream().filter(d -> d.matches(Decorator.class)).forEach(i -> {
String group = i.getGroup();
Decorator decorator = (Decorator) i.getDecorator();
if (Strings.isNullOrEmpty(group)) {
session.getResourceRegistry().decorate(decorator);
} else {
session.getResourceRegistry().decorate(group, decorator);
}
});
//The targetDirectory should be the custom if provided, oterwise the 'default' output directory.
//I this case 'default' means that one that we used until now (up until we introduced the ability to override).
Path targetDirectory = customOutputDir
.map(c -> c.getOutputDir())
.map(d -> d.isAbsolute() ? d : project.getRoot().resolve(d))
.orElseGet(() -> getEffectiveOutputDirectory(kubernetesConfig, project.getRoot(),
outputTarget.getOutputDirectory()));
outputDirectoryBuildItemBuildProducer.produce(new KubernetesOutputDirectoryBuildItem(targetDirectory));
// write the generated resources to the filesystem
generatedResourcesMap = session.close();
List<String> generatedFiles = new ArrayList<>(generatedResourcesMap.size());
List<String> generatedFileNames = new ArrayList<>(generatedResourcesMap.size());
for (Map.Entry<String, String> resourceEntry : generatedResourcesMap.entrySet()) {
Path path = Paths.get(resourceEntry.getKey());
//We need to ignore the config yml
if (!path.toFile().getParentFile().getName().equals("dekorate")) {
continue;
}
String fileName = path.toFile().getName();
Path targetPath = targetDirectory.resolve(fileName);
String relativePath = targetPath.toAbsolutePath().toString().replace(root.toAbsolutePath().toString(), "");
generatedKubernetesResourceProducer.produce(new GeneratedKubernetesResourceBuildItem(fileName,
resourceEntry.getValue().getBytes(StandardCharsets.UTF_8)));
if (fileName.endsWith(".yml") || fileName.endsWith(".json")) {
String target = fileName.substring(0, fileName.lastIndexOf("."));
if (!deploymentTargets.contains(target)) {
continue;
}
}
generatedFileNames.add(fileName);
generatedFiles.add(relativePath);
generatedResourceProducer.produce(
new GeneratedFileSystemResourceBuildItem(
// we need to make sure we are only passing the relative path to the build item
relativePath,
resourceEntry.getValue().getBytes(StandardCharsets.UTF_8)));
}
dekorateSessionProducer.produce(new DekorateOutputBuildItem(project, session, generatedFiles));
if (!generatedFileNames.isEmpty()) {
log.debugf("Generated the Kubernetes manifests: '%s' in '%s'", String.join(",", generatedFileNames),
outputTarget.getOutputDirectory() + File.separator + KUBERNETES);
}
try {
if (root != null && root.toFile().exists()) {
FileUtil.deleteDirectory(root);
}
} catch (IOException e) {
log.debug("Unable to delete temporary directory " + root, e);
}
});
if (!optionalProject.isPresent()) {
log.warn("No project was detected, skipping generation of kubernetes manifests!");
}
} catch (Exception e) {
if (launchMode.getLaunchMode().isProduction()) {
throw e;
}
log.warn("Failed to generate Kubernetes resources", e);
}
}
/**
* This method is based on the logic in {@link io.quarkus.deployment.pkg.steps.JarResultBuildStep#buildRunnerJar}.
* Note that we cannot consume the {@link io.quarkus.deployment.pkg.builditem.JarBuildItem} because it causes build cycle
* exceptions since we need to support adding generated resources into the JAR file (see
* https://github.com/quarkusio/quarkus/pull/20113).
*/
private Path getRunner(OutputTargetBuildItem outputTarget,
PackageConfig packageConfig) {
PackageConfig.JarConfig.JarType jarType = packageConfig.jar().type();
return switch (jarType) {
case LEGACY_JAR, UBER_JAR -> outputTarget.getOutputDirectory()
.resolve(outputTarget.getBaseName() + packageConfig.computedRunnerSuffix() + ".jar");
case FAST_JAR, MUTABLE_JAR -> {
//thin JAR
Path buildDir;
if (packageConfig.outputDirectory().isPresent()) {
buildDir = outputTarget.getOutputDirectory();
} else {
buildDir = outputTarget.getOutputDirectory().resolve(DEFAULT_FAST_JAR_DIRECTORY_NAME);
}
yield buildDir.resolve(QUARKUS_RUN_JAR);
}
};
}
/**
* Resolve the effective output directory where to generate the Kubernetes manifests.
* If the `quarkus.kubernetes.output-directory` property is not provided, then the default project output directory will be
* used.
*
* @param config The Kubernetes configuration.
* @param projectLocation The project location.
* @param projectOutputDirectory The project output target.
* @return the effective output directory.
*/
private Path getEffectiveOutputDirectory(KubernetesConfig config, Path projectLocation, Path projectOutputDirectory) {
return config.outputDirectory().map(d -> projectLocation.resolve(d))
.orElse(projectOutputDirectory.resolve(KUBERNETES));
}
}
| KubernetesProcessor |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/Finished.java | {
"start": 2365,
"end": 2729
} | interface ____ {
/**
* Callback which is called when the execution reaches the {@link Finished} state.
*
* @param archivedExecutionGraph archivedExecutionGraph represents the final state of the
* job execution
*/
void onFinished(ArchivedExecutionGraph archivedExecutionGraph);
}
static | Context |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/listener/AbstractMessageListenerContainer.java | {
"start": 39953,
"end": 40353
} | class ____ {
private static final JmsProcessObservationConvention DEFAULT_CONVENTION = new DefaultJmsProcessObservationConvention();
static Observation create(ObservationRegistry registry, Message message) {
return JmsObservationDocumentation.JMS_MESSAGE_PROCESS
.observation(null, DEFAULT_CONVENTION, () -> new JmsProcessObservationContext(message), registry);
}
}
}
| ObservationFactory |
java | apache__maven | impl/maven-cli/src/main/java/org/apache/maven/cling/logging/Slf4jConfigurationFactory.java | {
"start": 1459,
"end": 3128
} | class ____ {
public static final String RESOURCE = "META-INF/maven/slf4j-configuration.properties";
public static Slf4jConfiguration getConfiguration(ILoggerFactory loggerFactory) {
String slf4jBinding = loggerFactory.getClass().getCanonicalName();
try {
Enumeration<URL> resources =
Slf4jConfigurationFactory.class.getClassLoader().getResources(RESOURCE);
while (resources.hasMoreElements()) {
URL resource = resources.nextElement();
try {
InputStream is = resource.openStream();
final Properties properties = new Properties();
if (is != null) {
try (InputStream in = is) {
properties.load(in);
}
}
String impl = properties.getProperty(slf4jBinding);
if (impl != null) {
return (Slf4jConfiguration)
Class.forName(impl).getDeclaredConstructor().newInstance();
}
} catch (IOException
| ClassNotFoundException
| NoSuchMethodException
| InvocationTargetException
| IllegalAccessException
| InstantiationException ex) {
// ignore and move on to the next
}
}
} catch (IOException ex) {
// ignore
}
return new UnsupportedSlf4jBindingConfiguration();
}
}
| Slf4jConfigurationFactory |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/xml/XmlLoggerPropsTest.java | {
"start": 1515,
"end": 3438
} | class ____ {
@BeforeAll
static void setupClass() {
System.setProperty("test", "test");
}
@AfterAll
static void tearDownClass() {
System.clearProperty("test");
}
@Test
@LoggerContextSource("log4j-loggerprops.xml")
void testWithProps(final LoggerContext context, @Named("List") final ListAppender listAppender) {
assertThat(context.getConfiguration(), is(instanceOf(XmlConfiguration.class)));
context.getLogger(getClass()).debug("Test with props");
context.getLogger("tiny.bubbles").debug("Test on root");
final List<String> events = listAppender.getMessages();
listAppender.clear();
assertThat(events, hasSize(2));
assertThat(
events.get(0),
allOf(
containsString("user="),
containsString("phrasex=****"),
containsString("test=test"),
containsString("test2=test2default"),
containsString("test3=Unknown"),
containsString("test4=test"),
containsString("test5=test"),
containsString("attribKey=attribValue"),
containsString("duplicateKey=nodeValue")));
assertThat(
events.get(1),
allOf(
containsString("user="),
containsString("phrasex=****"),
containsString("test=test"),
containsString("test2=test2default"),
containsString("test3=Unknown"),
containsString("test4=test"),
containsString("test5=test"),
containsString("attribKey=attribValue"),
containsString("duplicateKey=nodeValue")));
}
}
| XmlLoggerPropsTest |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/ForStListState.java | {
"start": 2489,
"end": 12501
} | class ____<K, N, V> extends AbstractListState<K, N, V>
implements ListState<V>, ForStInnerTable<K, N, List<V>> {
/** The column family which this internal value state belongs to. */
private final ColumnFamilyHandle columnFamilyHandle;
/** The serialized key builder which should be thread-safe. */
private final ThreadLocal<SerializedCompositeKeyBuilder<K>> serializedKeyBuilder;
/** The default namespace if not set. * */
private final N defaultNamespace;
private final ThreadLocal<TypeSerializer<N>> namespaceSerializer;
/** The data outputStream used for value serializer, which should be thread-safe. */
private final ThreadLocal<DataOutputSerializer> valueSerializerView;
/** The data inputStream used for value deserializer, which should be thread-safe. */
private final ThreadLocal<DataInputDeserializer> valueDeserializerView;
/** Whether to enable the reuse of serialized key(and namespace). */
private final boolean enableKeyReuse;
public ForStListState(
StateRequestHandler stateRequestHandler,
ColumnFamilyHandle columnFamily,
TypeSerializer<V> valueSerializer,
Supplier<SerializedCompositeKeyBuilder<K>> serializedKeyBuilderInitializer,
N defaultNamespace,
Supplier<TypeSerializer<N>> namespaceSerializerInitializer,
Supplier<DataOutputSerializer> valueSerializerViewInitializer,
Supplier<DataInputDeserializer> valueDeserializerViewInitializer) {
super(stateRequestHandler, valueSerializer);
this.columnFamilyHandle = columnFamily;
this.serializedKeyBuilder = ThreadLocal.withInitial(serializedKeyBuilderInitializer);
this.defaultNamespace = defaultNamespace;
this.namespaceSerializer = ThreadLocal.withInitial(namespaceSerializerInitializer);
this.valueSerializerView = ThreadLocal.withInitial(valueSerializerViewInitializer);
this.valueDeserializerView = ThreadLocal.withInitial(valueDeserializerViewInitializer);
// We only enable key reuse for the most common namespace across all states.
this.enableKeyReuse =
(defaultNamespace instanceof VoidNamespace)
&& (namespaceSerializerInitializer.get()
instanceof VoidNamespaceSerializer);
}
@Override
public ColumnFamilyHandle getColumnFamilyHandle() {
return columnFamilyHandle;
}
@Override
public byte[] serializeKey(ContextKey<K, N> contextKey) throws IOException {
return ForStSerializerUtils.serializeKeyAndNamespace(
contextKey,
serializedKeyBuilder.get(),
defaultNamespace,
namespaceSerializer.get(),
enableKeyReuse);
}
@Override
public byte[] serializeValue(List<V> valueList) throws IOException {
DataOutputSerializer outputView = valueSerializerView.get();
outputView.clear();
return ListDelimitedSerializer.serializeList(valueList, getValueSerializer(), outputView);
}
@Override
public List<V> deserializeValue(byte[] valueBytes) throws IOException {
DataInputDeserializer inputView = valueDeserializerView.get();
inputView.setBuffer(valueBytes);
return ListDelimitedSerializer.deserializeList(valueBytes, getValueSerializer(), inputView);
}
@SuppressWarnings("unchecked")
@Override
public ForStDBGetRequest<K, N, List<V>, ?> buildDBGetRequest(
StateRequest<?, ?, ?, ?> stateRequest) {
ContextKey<K, N> contextKey =
new ContextKey<>(
(RecordContext<K>) stateRequest.getRecordContext(),
(N) stateRequest.getNamespace());
switch (stateRequest.getRequestType()) {
case LIST_GET:
return new ForStDBListGetRequest<>(
contextKey,
this,
(InternalAsyncFuture<StateIterator<V>>) stateRequest.getFuture());
case CUSTOMIZED:
// must be LIST_GET_RAW
return new ForStDBRawGetRequest<>(
contextKey, this, (InternalAsyncFuture<byte[]>) stateRequest.getFuture());
default:
throw new UnsupportedOperationException();
}
}
@SuppressWarnings("unchecked")
@Override
public ForStDBPutRequest<K, N, List<V>> buildDBPutRequest(
StateRequest<?, ?, ?, ?> stateRequest) {
ContextKey<K, N> contextKey =
new ContextKey<>(
(RecordContext<K>) stateRequest.getRecordContext(),
(N) stateRequest.getNamespace());
List<V> value;
boolean merge = false;
switch (stateRequest.getRequestType()) {
case CLEAR:
value = null;
// "Delete(key)" is equivalent to "Put(key, null)"
break;
case LIST_UPDATE:
value = (List<V>) stateRequest.getPayload();
break;
case LIST_ADD:
value = Collections.singletonList((V) stateRequest.getPayload());
merge = true;
break;
case LIST_ADD_ALL:
value = (List<V>) stateRequest.getPayload();
merge = true;
break;
case CUSTOMIZED:
// must be LIST_ADD_ALL_RAW
return new ForStDBMultiRawMergePutRequest<>(
contextKey,
((Tuple2<ForStStateRequestType, List<byte[]>>) stateRequest.getPayload())
.f1,
this,
(InternalAsyncFuture<Void>) stateRequest.getFuture());
default:
throw new IllegalArgumentException();
}
if (merge) {
return ForStDBPutRequest.ofMerge(
contextKey, value, this, (InternalAsyncFuture<Void>) stateRequest.getFuture());
} else {
return ForStDBPutRequest.of(
contextKey, value, this, (InternalAsyncFuture<Void>) stateRequest.getFuture());
}
}
@Override
public StateFuture<Void> asyncMergeNamespaces(N target, Collection<N> sources) {
if (sources == null || sources.isEmpty()) {
return StateFutureUtils.completedVoidFuture();
}
// phase 1: read from the sources and target
List<StateFuture<byte[]>> futures = new ArrayList<>(sources.size());
for (N source : sources) {
if (source != null) {
setCurrentNamespace(source);
futures.add(
handleRequest(
StateRequestType.CUSTOMIZED,
Tuple2.of(ForStStateRequestType.LIST_GET_RAW, null)));
}
}
// phase 2: merge the sources to the target
return StateFutureUtils.combineAll(futures)
.thenCompose(
values -> {
List<StateFuture<Void>> updateFutures =
new ArrayList<>(sources.size() + 1);
List<byte[]> validValues = new ArrayList<>(sources.size());
Iterator<byte[]> valueIterator = values.iterator();
for (N source : sources) {
byte[] value = valueIterator.next();
if (value != null) {
validValues.add(value);
setCurrentNamespace(source);
updateFutures.add(asyncClear());
}
}
if (!validValues.isEmpty()) {
setCurrentNamespace(target);
updateFutures.add(
handleRequest(
StateRequestType.CUSTOMIZED,
Tuple2.of(
ForStStateRequestType.MERGE_ALL_RAW,
validValues)));
}
return StateFutureUtils.combineAll(updateFutures);
})
.thenAccept(ignores -> {});
}
@Override
public void mergeNamespaces(N target, Collection<N> sources) {
if (sources == null || sources.isEmpty()) {
return;
}
try {
// merge the sources to the target
List<byte[]> validValues = new ArrayList<>(sources.size());
for (N source : sources) {
if (source != null) {
setCurrentNamespace(source);
byte[] oldValue =
handleRequestSync(
StateRequestType.CUSTOMIZED,
Tuple2.of(ForStStateRequestType.LIST_GET_RAW, null));
if (oldValue != null) {
setCurrentNamespace(source);
clear();
validValues.add(oldValue);
}
}
}
// if something came out of merging the sources, merge it or write it to the target
if (!validValues.isEmpty()) {
setCurrentNamespace(target);
handleRequestSync(
StateRequestType.CUSTOMIZED,
Tuple2.of(ForStStateRequestType.MERGE_ALL_RAW, validValues));
}
} catch (Exception e) {
throw new RuntimeException("merge namespace fail.", e);
}
}
}
| ForStListState |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ReflectionUtils.java | {
"start": 28557,
"end": 29259
} | class ____ which the method should be referenced; never {@code null}
* @param methodName the name of the method; never {@code null} or blank
* @param parameterTypes the parameter types of the method; may be {@code null} or empty
* @return fully qualified method name; never {@code null}
*/
public static String getFullyQualifiedMethodName(Class<?> clazz, String methodName,
Class<?> @Nullable... parameterTypes) {
Preconditions.notNull(clazz, "Class must not be null");
return getFullyQualifiedMethodName(clazz.getName(), methodName, ClassUtils.nullSafeToString(parameterTypes));
}
/**
* Build the <em>fully qualified method name</em> for the method described by the
* supplied | from |
java | spring-projects__spring-framework | framework-docs/src/main/java/org/springframework/docs/dataaccess/jdbc/jdbcjdbctemplateidioms/JdbcCorporateEventDao.java | {
"start": 807,
"end": 1122
} | class ____ implements CorporateEventDao {
private final JdbcTemplate jdbcTemplate;
public JdbcCorporateEventDao(DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
// JDBC-backed implementations of the methods on the CorporateEventDao follow...
}
// end::snippet[] | JdbcCorporateEventDao |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/orca/OrcaServiceImplTest.java | {
"start": 2120,
"end": 14536
} | class ____ {
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
@Rule
public final MockitoRule mocks = MockitoJUnit.rule();
private ManagedChannel channel;
private Server oobServer;
private final FakeClock fakeClock = new FakeClock();
private MetricRecorder defaultTestService;
private BindableService orcaServiceImpl;
private final Random random = new Random();
@Mock
ClientCall.Listener<OrcaLoadReport> listener;
@Before
public void setup() throws Exception {
defaultTestService = MetricRecorder.newInstance();
orcaServiceImpl = OrcaServiceImpl.createService(fakeClock.getScheduledExecutorService(),
defaultTestService, 1, TimeUnit.SECONDS);
startServerAndGetChannel(orcaServiceImpl);
}
@After
public void teardown() throws Exception {
channel.shutdownNow();
}
private void startServerAndGetChannel(BindableService orcaService) throws Exception {
oobServer = grpcCleanup.register(
InProcessServerBuilder.forName("orca-service-test")
.addService(orcaService)
.directExecutor()
.build()
.start());
channel = grpcCleanup.register(
InProcessChannelBuilder.forName("orca-service-test")
.directExecutor().build());
}
@Test
public void testReportingLifeCycle() {
defaultTestService.setCpuUtilizationMetric(0.1);
Iterator<OrcaLoadReport> reports = OpenRcaServiceGrpc.newBlockingStub(channel)
.streamCoreMetrics(OrcaLoadReportRequest.newBuilder().build());
assertThat(reports.next()).isEqualTo(
OrcaLoadReport.newBuilder().setCpuUtilization(0.1).build());
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(1);
assertThat(fakeClock.getPendingTasks().size()).isEqualTo(1);
assertThat(fakeClock.forwardTime(1, TimeUnit.SECONDS)).isEqualTo(1);
assertThat(reports.next()).isEqualTo(
OrcaLoadReport.newBuilder().setCpuUtilization(0.1).build());
assertThat(fakeClock.getPendingTasks().size()).isEqualTo(1);
channel.shutdownNow();
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(0);
assertThat(fakeClock.getPendingTasks().size()).isEqualTo(0);
}
@Test
public void testReportingLifeCycle_serverShutdown() {
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
defaultTestService.putUtilizationMetric("buffer", 0.2);
call.start(listener, new Metadata());
call.sendMessage(OrcaLoadReportRequest.newBuilder()
.setReportInterval(Duration.newBuilder().setSeconds(0).setNanos(500).build()).build());
call.halfClose();
call.request(1);
OrcaLoadReport expect = OrcaLoadReport.newBuilder().putUtilization("buffer", 0.2).build();
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(1);
verify(listener).onMessage(eq(expect));
verify(listener, never()).onClose(any(), any());
oobServer.shutdownNow();
assertThat(fakeClock.forwardTime(1, TimeUnit.SECONDS)).isEqualTo(0);
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(0);
ArgumentCaptor<Status> callCloseCaptor = ArgumentCaptor.forClass(Status.class);
verify(listener).onClose(callCloseCaptor.capture(), any());
assertThat(callCloseCaptor.getValue().getCode()).isEqualTo(Status.Code.UNAVAILABLE);
}
@Test
@SuppressWarnings("unchecked")
public void testRequestIntervalLess() {
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
defaultTestService.putUtilizationMetric("buffer", 0.2);
defaultTestService.setApplicationUtilizationMetric(0.314159);
defaultTestService.setQpsMetric(1.9);
defaultTestService.setEpsMetric(0.2233);
call.start(listener, new Metadata());
call.sendMessage(OrcaLoadReportRequest.newBuilder()
.setReportInterval(Duration.newBuilder().setSeconds(0).setNanos(500).build()).build());
call.halfClose();
call.request(1);
OrcaLoadReport expect = OrcaLoadReport.newBuilder().putUtilization("buffer", 0.2)
.setApplicationUtilization(0.314159).setRpsFractional(1.9).setEps(0.2233).build();
verify(listener).onMessage(eq(expect));
reset(listener);
defaultTestService.removeUtilizationMetric("buffer0");
defaultTestService.clearApplicationUtilizationMetric();
defaultTestService.clearQpsMetric();
defaultTestService.clearEpsMetric();
assertThat(fakeClock.forwardTime(500, TimeUnit.NANOSECONDS)).isEqualTo(0);
verifyNoInteractions(listener);
assertThat(fakeClock.forwardTime(1, TimeUnit.SECONDS)).isEqualTo(1);
call.request(1);
expect = OrcaLoadReport.newBuilder().putUtilization("buffer", 0.2).build();
verify(listener).onMessage(eq(expect));
}
@Test
@SuppressWarnings("unchecked")
public void testRequestIntervalGreater() {
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
defaultTestService.putUtilizationMetric("buffer", 0.2);
call.start(listener, new Metadata());
call.sendMessage(OrcaLoadReportRequest.newBuilder()
.setReportInterval(Duration.newBuilder().setSeconds(10).build()).build());
call.halfClose();
call.request(1);
OrcaLoadReport expect = OrcaLoadReport.newBuilder().putUtilization("buffer", 0.2).build();
verify(listener).onMessage(eq(expect));
reset(listener);
defaultTestService.removeUtilizationMetric("buffer0");
assertThat(fakeClock.forwardTime(1, TimeUnit.SECONDS)).isEqualTo(0);
verifyNoInteractions(listener);
assertThat(fakeClock.forwardTime(9, TimeUnit.SECONDS)).isEqualTo(1);
call.request(1);
verify(listener).onMessage(eq(expect));
}
@Test
@SuppressWarnings("unchecked")
public void testRequestIntervalDefault() throws Exception {
defaultTestService = MetricRecorder.newInstance();
oobServer.shutdownNow();
startServerAndGetChannel(OrcaServiceImpl.createService(
fakeClock.getScheduledExecutorService(), defaultTestService));
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
defaultTestService.putUtilizationMetric("buffer", 0.2);
call.start(listener, new Metadata());
call.sendMessage(OrcaLoadReportRequest.newBuilder()
.setReportInterval(Duration.newBuilder().setSeconds(10).build()).build());
call.halfClose();
call.request(1);
OrcaLoadReport expect = OrcaLoadReport.newBuilder().putUtilization("buffer", 0.2).build();
verify(listener).onMessage(eq(expect));
reset(listener);
defaultTestService.removeUtilizationMetric("buffer0");
assertThat(fakeClock.forwardTime(10, TimeUnit.SECONDS)).isEqualTo(0);
verifyNoInteractions(listener);
assertThat(fakeClock.forwardTime(20, TimeUnit.SECONDS)).isEqualTo(1);
call.request(1);
verify(listener).onMessage(eq(expect));
}
@Test
public void testMultipleClients() {
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
defaultTestService.putUtilizationMetric("omg", 1.00);
call.start(listener, new Metadata());
call.sendMessage(OrcaLoadReportRequest.newBuilder().build());
call.halfClose();
call.request(1);
OrcaLoadReport expect = OrcaLoadReport.newBuilder().putUtilization("omg", 1.00).build();
verify(listener).onMessage(eq(expect));
defaultTestService.setMemoryUtilizationMetric(0.5);
ClientCall<OrcaLoadReportRequest, OrcaLoadReport> call2 = channel.newCall(
OpenRcaServiceGrpc.getStreamCoreMetricsMethod(), CallOptions.DEFAULT);
call2.start(listener, new Metadata());
call2.sendMessage(OrcaLoadReportRequest.newBuilder().build());
call2.halfClose();
call2.request(1);
expect = OrcaLoadReport.newBuilder(expect).setMemUtilization(0.5).build();
verify(listener).onMessage(eq(expect));
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(2);
assertThat(fakeClock.getPendingTasks().size()).isEqualTo(2);
channel.shutdownNow();
assertThat(fakeClock.forwardTime(1, TimeUnit.SECONDS)).isEqualTo(0);
assertThat(((OrcaServiceImpl)orcaServiceImpl).clientCount.get()).isEqualTo(0);
ArgumentCaptor<Status> callCloseCaptor = ArgumentCaptor.forClass(Status.class);
verify(listener, times(2)).onClose(callCloseCaptor.capture(), any());
assertThat(callCloseCaptor.getValue().getCode()).isEqualTo(Status.Code.UNAVAILABLE);
}
@Test
public void testApis() throws Exception {
ImmutableMap<String, Double> firstUtilization = ImmutableMap.of("util", 0.1);
OrcaLoadReport goldenReport = OrcaLoadReport.newBuilder()
.setCpuUtilization(random.nextDouble() * 10)
.setApplicationUtilization(random.nextDouble() * 10)
.setMemUtilization(random.nextDouble())
.putAllUtilization(firstUtilization)
.putUtilization("queue", 1.0)
.setRpsFractional(1239.01)
.setEps(1.618)
.build();
defaultTestService.setCpuUtilizationMetric(goldenReport.getCpuUtilization());
defaultTestService.setApplicationUtilizationMetric(goldenReport.getApplicationUtilization());
defaultTestService.setMemoryUtilizationMetric(goldenReport.getMemUtilization());
defaultTestService.setAllUtilizationMetrics(firstUtilization);
defaultTestService.putUtilizationMetric("queue", 1.0);
defaultTestService.setQpsMetric(1239.01);
defaultTestService.setEpsMetric(1.618);
Iterator<OrcaLoadReport> reports = OpenRcaServiceGrpc.newBlockingStub(channel)
.streamCoreMetrics(OrcaLoadReportRequest.newBuilder().build());
assertThat(reports.next()).isEqualTo(goldenReport);
defaultTestService.clearCpuUtilizationMetric();
defaultTestService.clearApplicationUtilizationMetric();
defaultTestService.clearMemoryUtilizationMetric();
defaultTestService.clearQpsMetric();
defaultTestService.clearEpsMetric();
fakeClock.forwardTime(1, TimeUnit.SECONDS);
goldenReport = OrcaLoadReport.newBuilder()
.putAllUtilization(firstUtilization)
.putUtilization("queue", 1.0)
.putUtilization("util", 0.1)
.build();
assertThat(reports.next()).isEqualTo(goldenReport);
defaultTestService.removeUtilizationMetric("util-not-exist");
defaultTestService.removeUtilizationMetric("queue-not-exist");
fakeClock.forwardTime(1, TimeUnit.SECONDS);
assertThat(reports.next()).isEqualTo(goldenReport);
defaultTestService.setCpuUtilizationMetric(-0.001);
defaultTestService.setApplicationUtilizationMetric(-0.001);
defaultTestService.setMemoryUtilizationMetric(-0.001);
defaultTestService.setMemoryUtilizationMetric(1.001);
defaultTestService.setQpsMetric(-0.001);
defaultTestService.setEpsMetric(-0.001);
defaultTestService.putUtilizationMetric("util-out-of-range", -0.001);
defaultTestService.putUtilizationMetric("util-out-of-range", 1.001);
fakeClock.forwardTime(1, TimeUnit.SECONDS);
assertThat(reports.next()).isEqualTo(goldenReport);
CyclicBarrier barrier = new CyclicBarrier(2);
new Thread(new Runnable() {
@Override
public void run() {
try {
barrier.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
defaultTestService.removeUtilizationMetric("util");
defaultTestService.setMemoryUtilizationMetric(0.4);
defaultTestService.setAllUtilizationMetrics(firstUtilization);
try {
barrier.await();
} catch (Exception ex) {
throw new AssertionError(ex);
}
}
}).start();
barrier.await();
defaultTestService.setMemoryUtilizationMetric(0.4);
defaultTestService.removeUtilizationMetric("util");
defaultTestService.setAllUtilizationMetrics(firstUtilization);
barrier.await();
goldenReport = OrcaLoadReport.newBuilder()
.putAllUtilization(firstUtilization)
.setMemUtilization(0.4)
.build();
fakeClock.forwardTime(1, TimeUnit.SECONDS);
assertThat(reports.next()).isEqualTo(goldenReport);
}
}
| OrcaServiceImplTest |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java | {
"start": 8065,
"end": 8397
} | class ____<T, ACC> extends ImperativeAggregateFunction<T, ACC> {
/**
* Collects a record and forwards it. The collector can output retract messages with the retract
* method. Note: This collector can only be used in the {@code emitUpdateWithRetract()} method.
*/
@PublicEvolving
public | TableAggregateFunction |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/memory/OffHeapUnsafeMemorySegmentTest.java | {
"start": 1385,
"end": 3326
} | class ____ extends MemorySegmentTestBase {
OffHeapUnsafeMemorySegmentTest(int pageSize) {
super(pageSize);
}
@Override
MemorySegment createSegment(int size) {
return MemorySegmentFactory.allocateOffHeapUnsafeMemory(size);
}
@Override
MemorySegment createSegment(int size, Object owner) {
return MemorySegmentFactory.allocateOffHeapUnsafeMemory(size, owner, () -> {});
}
@TestTemplate
@Override
void testByteBufferWrapping(int pageSize) {
assertThatThrownBy(() -> createSegment(10).wrap(1, 2))
.isInstanceOf(UnsupportedOperationException.class);
}
@TestTemplate
void testCallCleanerOnFree() {
final CompletableFuture<Void> cleanerFuture = new CompletableFuture<>();
MemorySegmentFactory.allocateOffHeapUnsafeMemory(
10, null, () -> cleanerFuture.complete(null))
.free();
assertThat(cleanerFuture).isDone();
}
@TestTemplate
void testCallCleanerOnceOnConcurrentFree() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger(0);
final Runnable cleaner =
() -> {
try {
counter.incrementAndGet();
// make the cleaner unlikely to finish before another invocation (if any)
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
};
final MemorySegment segment =
MemorySegmentFactory.allocateOffHeapUnsafeMemory(10, null, cleaner);
final Thread t1 = new Thread(segment::free);
final Thread t2 = new Thread(segment::free);
t1.start();
t2.start();
t1.join();
t2.join();
assertThat(counter).hasValue(1);
}
}
| OffHeapUnsafeMemorySegmentTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/SQLTest.java | {
"start": 2072,
"end": 28860
} | class ____ {
@BeforeAll
public void init(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Person person1 = new Person( "John Doe" );
person1.setNickName( "JD" );
person1.setAddress( "Earth" );
person1.setCreatedOn( LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ) );
person1.getAddresses().put( AddressType.HOME, "Home address" );
person1.getAddresses().put( AddressType.OFFICE, "Office address" );
entityManager.persist( person1 );
Person person2 = new Person( "Mrs. John Doe" );
person2.setAddress( "Earth" );
person2.setCreatedOn( LocalDateTime.of( 2000, 1, 2, 12, 0, 0 ) );
entityManager.persist( person2 );
Person person3 = new Person( "Dr_ John Doe" );
entityManager.persist( person3 );
Phone phone1 = new Phone( "123-456-7890" );
phone1.setId( 1L );
phone1.setType( PhoneType.MOBILE );
person1.addPhone( phone1 );
phone1.getRepairTimestamps().add( LocalDateTime.of( 2005, 1, 1, 12, 0, 0 ) );
phone1.getRepairTimestamps().add( LocalDateTime.of( 2006, 1, 1, 12, 0, 0 ) );
Call call11 = new Call();
call11.setDuration( 12 );
call11.setTimestamp( LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ) );
Call call12 = new Call();
call12.setDuration( 33 );
call12.setTimestamp( LocalDateTime.of( 2000, 1, 1, 1, 0, 0 ) );
phone1.addCall( call11 );
phone1.addCall( call12 );
Phone phone2 = new Phone( "098_765-4321" );
phone2.setId( 2L );
phone2.setType( PhoneType.LAND_LINE );
Phone phone3 = new Phone( "098-765-4320" );
phone3.setId( 3L );
phone3.setType( PhoneType.LAND_LINE );
person2.addPhone( phone2 );
person2.addPhone( phone3 );
CreditCardPayment creditCardPayment = new CreditCardPayment();
creditCardPayment.setCompleted( true );
creditCardPayment.setAmount( BigDecimal.ZERO );
creditCardPayment.setPerson( person1 );
WireTransferPayment wireTransferPayment = new WireTransferPayment();
wireTransferPayment.setCompleted( true );
wireTransferPayment.setAmount( BigDecimal.valueOf( 100 ) );
wireTransferPayment.setPerson( person2 );
entityManager.persist( creditCardPayment );
entityManager.persist( wireTransferPayment );
Partner partner = new Partner( "John Doe" );
entityManager.persist( partner );
Captain captain = new Captain();
captain.setId( new Identity() );
captain.getId().setFirstname( "Jean-Luc" );
captain.getId().setLastname( "Picard" );
entityManager.persist( captain );
SpaceShip spaceShip = new SpaceShip();
spaceShip.setName( "Enterprise" );
spaceShip.setDimensions( new Dimensions() );
spaceShip.getDimensions().setLength( 100 );
spaceShip.getDimensions().setWidth( 20 );
spaceShip.setModel( "E-1" );
spaceShip.setSpeed( 150 );
spaceShip.setCaptain( captain );
entityManager.persist( spaceShip );
} );
}
@Test
public void test_sql_jpa_all_columns_scalar_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-all-columns-scalar-query-example[]
List<Object[]> persons = entityManager.createNativeQuery(
"SELECT * FROM Person" )
.getResultList();
//end::sql-jpa-all-columns-scalar-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_custom_column_selection_scalar_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-custom-column-selection-scalar-query-example[]
List<Object[]> persons = entityManager.createNativeQuery(
"SELECT id, name FROM Person" )
.getResultList();
for ( Object[] person : persons ) {
Number id = (Number) person[0];
String name = (String) person[1];
}
//end::sql-jpa-custom-column-selection-scalar-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_query_scalar_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-all-columns-scalar-query-example[]
List<Object[]> persons = session.createNativeQuery(
"SELECT * FROM Person", Object[].class )
.list();
//end::sql-hibernate-all-columns-scalar-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
@JiraKey(value = "HHH-15914")
public void test_sql_hibernate_custom_column_selection_scalar_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-custom-column-selection-scalar-query-example[]
List<Object[]> persons = session.createNativeQuery(
"SELECT id, name FROM Person", Object[].class )
.list();
for ( Object[] person : persons ) {
Number id = (Number) person[0];
String name = (String) person[1];
}
//end::sql-hibernate-custom-column-selection-scalar-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_query_scalar_explicit_result_set_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-scalar-query-explicit-result-set-example[]
List<Object[]> persons = session.createNativeQuery(
"SELECT * FROM Person", Object[].class )
.addScalar( "id", StandardBasicTypes.LONG )
.addScalar( "name", StandardBasicTypes.STRING )
.list();
for ( Object[] person : persons ) {
Long id = (Long) person[0];
String name = (String) person[1];
}
//end::sql-hibernate-scalar-query-explicit-result-set-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_query_scalar_partial_explicit_result_set_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-scalar-query-partial-explicit-result-set-example[]
List<Object[]> persons = session.createNativeQuery(
"SELECT * FROM Person", Object[].class )
.addScalar( "id", StandardBasicTypes.LONG )
.addScalar( "name" )
.list();
for ( Object[] person : persons ) {
Long id = (Long) person[0];
String name = (String) person[1];
}
//end::sql-hibernate-scalar-query-partial-explicit-result-set-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_entity_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-query-example[]
List<Person> persons = entityManager.createNativeQuery(
"SELECT * FROM Person", Person.class )
.getResultList();
//end::sql-jpa-entity-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_entity_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-query-example[]
List<Person> persons = session.createNativeQuery(
"SELECT * FROM Person", Person.class )
.list();
//end::sql-hibernate-entity-query-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_entity_query_explicit_result_set_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-query-explicit-result-set-example[]
List<Person> persons = entityManager.createNativeQuery(
"SELECT id, name, nick_name, address, created_on, version " +
"FROM Person", Person.class )
.getResultList();
//end::sql-jpa-entity-query-explicit-result-set-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_entity_query_explicit_result_set_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-query-explicit-result-set-example[]
List<Person> persons = session.createNativeQuery(
"SELECT id, name, nick_name, address, created_on, version " +
"FROM Person", Person.class )
.list();
//end::sql-hibernate-entity-query-explicit-result-set-example[]
assertThat( persons ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_entity_associations_query_many_to_one_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-associations-query-many-to-one-example[]
List<Phone> phones = entityManager.createNativeQuery(
"SELECT id, phone_number, phone_type, person_id " +
"FROM Phone", Phone.class )
.getResultList();
//end::sql-jpa-entity-associations-query-many-to-one-example[]
assertThat( phones ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_entity_associations_query_many_to_one_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-associations-query-many-to-one-example[]
List<Phone> phones = session.createNativeQuery(
"SELECT id, phone_number, phone_type, person_id " +
"FROM Phone", Phone.class )
.list();
//end::sql-hibernate-entity-associations-query-many-to-one-example[]
assertThat( phones ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_entity_associations_query_many_to_one_join_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Phone> phones = entityManager.createNativeQuery(
"SELECT ph.* " +
"FROM Phone ph " +
"JOIN Person pr ON ph.person_id = pr.id", Phone.class )
.getResultList();
for ( Phone phone : phones ) {
assertThat( phone.getPerson().getName() ).isNotNull();
}
assertThat( phones ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_entity_associations_query_many_to_one_join_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-associations-query-many-to-one-join-example[]
List<Phone> tuples = session.createNativeQuery(
"SELECT {ph.*}, {pr.*} " +
"FROM Phone ph " +
"JOIN Person pr ON ph.person_id = pr.id", Phone.class, "ph" )
.addJoin( "pr", "ph.person" )
.list();
for ( Phone phone : tuples ) {
assertThat( phone.getPerson().getName() ).isNotNull();
}
//end::sql-hibernate-entity-associations-query-many-to-one-join-example[]
assertThat( tuples ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_entity_associations_query_many_to_one_join_result_transformer_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-associations-query-many-to-one-join-tuple-transformer-example[]
List<Phone> phones = session.createNativeQuery(
"SELECT {ph.*}, {pr.*} " +
"FROM Phone ph " +
"JOIN Person pr ON ph.person_id = pr.id", Phone.class, "ph" )
.addJoin( "pr", "ph.person" )
.setTupleTransformer( (TupleTransformer<Phone>) (tuple, aliases) -> (Phone) tuple[0] )
.list();
for ( Phone person : phones ) {
person.getPerson();
}
//end::sql-hibernate-entity-associations-query-many-to-one-join-tuple-transformer-example[]
assertThat( phones ).hasSize( 3 );
} );
}
@Test
@RequiresDialect(H2Dialect.class)
@RequiresDialect(OracleDialect.class)
@RequiresDialect(PostgreSQLDialect.class)
public void test_sql_jpa_entity_associations_query_one_to_many_join_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-associations-query-one-to-many-join-example[]
List<Phone> phones = entityManager.createNativeQuery(
"SELECT ph.* " +
"FROM Phone ph " +
"JOIN phone_call c ON c.phone_id = ph.id", Phone.class )
.getResultList();
for ( Phone phone : phones ) {
List<Call> calls = phone.getCalls();
}
//end::sql-jpa-entity-associations-query-one-to-many-join-example[]
assertThat( phones ).hasSize( 2 );
} );
}
@Test
@JiraKey(value = "HHH-10504")
public void test_sql_hibernate_entity_associations_query_one_to_many_join_example_1(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
List<Phone> phones = session.createNativeQuery(
"SELECT {ph.*}, {c.*} " +
"FROM Phone ph " +
"JOIN phone_call c ON c.phone_id = ph.id", Phone.class, "ph" )
.addJoin( "c", "ph.calls" )
.list();
for ( Phone phone : phones ) {
List<Call> calls = phone.getCalls();
}
assertThat( phones ).hasSize( 2 );
} );
}
@Test
@RequiresDialect(H2Dialect.class)
@RequiresDialect(OracleDialect.class)
@RequiresDialect(PostgreSQLDialect.class)
public void test_sql_hibernate_entity_associations_query_one_to_many_join_example_2(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-associations-query-one-to-many-join-example[]
List<Phone> tuples = session.createNativeQuery(
"SELECT {ph.*}, {c.*} " +
"FROM Phone ph " +
"JOIN phone_call c ON c.phone_id = ph.id", Phone.class, "ph" )
.addJoin( "c", "ph.calls" )
.list();
for ( Phone phone : tuples ) {
List<Call> calls = phone.getCalls();
}
//end::sql-hibernate-entity-associations-query-one-to-many-join-example[]
assertThat( tuples ).hasSize( 2 );
} );
}
@Test
public void test_sql_jpa_multi_entity_query_example(EntityManagerFactoryScope scope) {
assertThrows( NonUniqueDiscoveredSqlAliasException.class, () -> scope.inTransaction( entityManager -> {
//tag::sql-jpa-multi-entity-query-example[]
List<Person> entities = entityManager.createNativeQuery(
"SELECT * " +
"FROM Person pr, Partner pt " +
"WHERE pr.name = pt.name", Person.class )
.getResultList();
//end::sql-jpa-multi-entity-query-example[]
assertThat( entities ).hasSize( 2 );
} ) );
}
@Test
public void test_sql_hibernate_multi_entity_query_example(EntityManagerFactoryScope scope) {
try {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-multi-entity-query-example[]
List<Person> entities = session.createNativeQuery(
"SELECT * " +
"FROM Person pr, Partner pt " +
"WHERE pr.name = pt.name", Person.class )
.list();
//end::sql-hibernate-multi-entity-query-example[]
assertThat( entities ).hasSize( 2 );
} );
fail( "Should throw NonUniqueDiscoveredSqlAliasException!" );
}
catch (NonUniqueDiscoveredSqlAliasException e) {
// expected
}
catch (PersistenceException e) {
assertThat( e.getCause() ).isInstanceOf( NonUniqueDiscoveredSqlAliasException.class );
}
}
@Test
@JiraKey(value = "HHH-15914")
public void test_sql_hibernate_multi_entity_query_alias_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-multi-entity-query-alias-example[]
List<Object> entities = session.createNativeQuery(
"SELECT {pr.*}, {pt.*} " +
"FROM Person pr, Partner pt " +
"WHERE pr.name = pt.name", Object.class )
.addEntity( "pr", Person.class )
.addEntity( "pt", Partner.class )
.list();
//end::sql-hibernate-multi-entity-query-alias-example[]
assertThat( entities ).hasSize( 1 );
} );
}
@Test
public void test_sql_hibernate_dto_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-dto-query-example[]
List<PersonSummaryDTO> dtos = session.createNativeQuery(
"SELECT p.id as \"id\", p.name as \"name\" " +
"FROM Person p", Tuple.class )
.setTupleTransformer(
(tuple, aliases) -> {
PersonSummaryDTO dto = new PersonSummaryDTO();
dto.setId( (Long) tuple[0] );
dto.setName( (String) tuple[1] );
return dto;
}
)
.list();
//end::sql-hibernate-dto-query-example[]
assertThat( dtos ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_inheritance_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-inheritance-query-example[]
List<CreditCardPayment> payments = session.createNativeQuery(
"SELECT * " +
"FROM Payment p " +
"JOIN CreditCardPayment cp on cp.id = p.id", CreditCardPayment.class )
.list();
//end::sql-hibernate-inheritance-query-example[]
assertThat( payments ).hasSize( 1 );
} );
}
@Test
public void test_sql_jpa_query_parameters_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-query-parameters-example[]
List<Person> persons = entityManager.createNativeQuery(
"SELECT * " +
"FROM Person " +
"WHERE name like :name", Person.class )
.setParameter( "name", "J%" )
.getResultList();
//end::sql-jpa-query-parameters-example[]
assertThat( persons ).hasSize( 1 );
} );
}
@Test
public void test_sql_hibernate_query_parameters_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-query-parameters-example[]
List<Person> persons = session.createNativeQuery(
"SELECT * " +
"FROM Person " +
"WHERE name like :name", Person.class )
.setParameter( "name", "J%" )
.list();
//end::sql-hibernate-query-parameters-example[]
assertThat( persons ).hasSize( 1 );
} );
}
@Test
public void test_sql_jpa_scalar_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-scalar-named-query-example[]
List<String> names = entityManager.createNamedQuery(
"find_person_name", String.class )
.getResultList();
//end::sql-jpa-scalar-named-query-example[]
assertThat( names ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_scalar_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-scalar-named-query-example[]
List<String> names = session.createNamedQuery(
"find_person_name", String.class )
.list();
//end::sql-hibernate-scalar-named-query-example[]
assertThat( names ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_multiple_scalar_values_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-multiple-scalar-values-named-query-example[]
List<Object[]> tuples = entityManager.createNamedQuery(
"find_person_name_and_nickName", Object[].class )
.getResultList();
for ( Object[] tuple : tuples ) {
String name = (String) tuple[0];
String nickName = (String) tuple[1];
}
//end::sql-jpa-multiple-scalar-values-named-query-example[]
assertThat( tuples ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_multiple_scalar_values_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-multiple-scalar-values-named-query-example[]
List<Object[]> tuples = session.createNamedQuery(
"find_person_name_and_nickName", Object[].class )
.list();
for ( Object[] tuple : tuples ) {
String name = (String) tuple[0];
String nickName = (String) tuple[1];
}
//end::sql-hibernate-multiple-scalar-values-named-query-example[]
assertThat( tuples ).hasSize( 3 );
} );
}
@Test
public void test_sql_jpa_multiple_scalar_values_dto_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-multiple-scalar-values-dto-named-query-example[]
List<PersonNames> personNames = entityManager.createNamedQuery(
"find_person_name_and_nickName_dto", PersonNames.class )
.getResultList();
//end::sql-jpa-multiple-scalar-values-dto-named-query-example[]
assertThat( personNames ).hasSize( 3 );
assertThat( personNames.get( 0 ) ).isNotNull();
assertThat( personNames.get( 0 ) ).isInstanceOf( PersonNames.class );
} );
}
@Test
public void test_sql_hibernate_multiple_scalar_values_dto_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-multiple-scalar-values-dto-named-query-example[]
List<PersonNames> personNames = session.createNamedQuery(
"find_person_name_and_nickName_dto", PersonNames.class )
.list();
//end::sql-hibernate-multiple-scalar-values-dto-named-query-example[]
assertThat( personNames ).hasSize( 3 );
} );
}
@Test
public void test_sql_hibernate_multiple_scalar_values_dto_hibernate_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-multiple-scalar-values-dto-hibernate-named-query-example[]
List<PersonPhoneCount> personNames = session.createNamedQuery(
"get_person_phone_count", PersonPhoneCount.class )
.getResultList();
//end::sql-hibernate-multiple-scalar-values-dto-hibernate-named-query-example[]
assertThat( personNames ).hasSize( 2 );
assertThat( personNames.stream().filter( person -> person.getName().equals( "John Doe" ) )
.map( PersonPhoneCount::getPhoneCount ).findAny().get().intValue() ).isEqualTo( 1 );
assertThat( personNames.stream().filter( person -> person.getName().equals( "Mrs. John Doe" ) )
.map( PersonPhoneCount::getPhoneCount ).findAny().get().intValue() ).isEqualTo( 2 );
} );
}
@Test
public void test_sql_jpa_entity_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-named-query-example[]
List<Person> persons = entityManager.createNamedQuery(
"find_person_by_name", Person.class )
.setParameter( "name", "J%" )
.getResultList();
//end::sql-jpa-entity-named-query-example[]
assertThat( persons ).hasSize( 1 );
assertThat( persons ).hasSize( 1 );
assertThat( persons.get( 0 ) ).isInstanceOf( Person.class );
} );
}
@Test
public void test_sql_hibernate_entity_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-named-query-example[]
List<Person> persons = session.createNamedQuery(
"find_person_by_name", Person.class )
.setParameter( "name", "J%" )
.list();
//end::sql-hibernate-entity-named-query-example[]
assertThat( persons ).hasSize( 1 );
} );
}
@Test
@RequiresDialect(H2Dialect.class)
@RequiresDialect(OracleDialect.class)
@RequiresDialect(PostgreSQLDialect.class)
public void test_sql_jpa_entity_associations_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-entity-associations_named-query-example[]
List<Object[]> tuples = entityManager.createNamedQuery(
"find_person_with_phones_by_name", Object[].class )
.setParameter( "name", "J%" )
.getResultList();
for ( Object[] tuple : tuples ) {
Person person = (Person) tuple[0];
Phone phone = (Phone) tuple[1];
}
//end::sql-jpa-entity-associations_named-query-example[]
assertThat( tuples ).hasSize( 1 );
assertThat( tuples.get( 0 ).getClass().isArray() ).isTrue();
} );
}
@Test
@RequiresDialect(H2Dialect.class)
@RequiresDialect(OracleDialect.class)
@RequiresDialect(PostgreSQLDialect.class)
public void test_sql_hibernate_entity_associations_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-entity-associations_named-query-example[]
List<Object[]> tuples = session.createNamedQuery(
"find_person_with_phones_by_name", Object[].class )
.setParameter( "name", "J%" )
.list();
for ( Object[] tuple : tuples ) {
Person person = (Person) tuple[0];
Phone phone = (Phone) tuple[1];
}
//end::sql-hibernate-entity-associations_named-query-example[]
assertThat( tuples ).hasSize( 1 );
} );
}
@Test
public void test_sql_jpa_composite_key_entity_associations_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::sql-jpa-composite-key-entity-associations_named-query-example[]
List<Object[]> tuples = entityManager.createNamedQuery(
"find_all_spaceships", Object[].class )
.getResultList();
for ( Object[] tuple : tuples ) {
SpaceShip spaceShip = (SpaceShip) tuple[0];
Number surface = (Number) tuple[1];
Number volume = (Number) tuple[2];
}
//end::sql-jpa-composite-key-entity-associations_named-query-example[]
assertThat( tuples ).hasSize( 1 );
SpaceShip spaceShip = (SpaceShip) tuples.get( 0 )[0];
assertThat( spaceShip.getCaptain() ).isNotNull();
assertThat( spaceShip.getCaptain() ).isInstanceOf( HibernateProxy.class );
assertThat( ((HibernateProxy) spaceShip.getCaptain()).getHibernateLazyInitializer()
.isUninitialized() ).isTrue();
} );
}
@Test
public void test_sql_hibernate_composite_key_entity_associations_named_query_example(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Session session = entityManager.unwrap( Session.class );
//tag::sql-hibernate-composite-key-entity-associations_named-query-example[]
List<Object[]> tuples = session.createNamedQuery(
"find_all_spaceships", Object[].class )
.list();
for ( Object[] tuple : tuples ) {
SpaceShip spaceShip = (SpaceShip) tuple[0];
Number surface = (Number) tuple[1];
Number volume = (Number) tuple[2];
}
//end::sql-hibernate-composite-key-entity-associations_named-query-example[]
assertThat( tuples ).hasSize( 1 );
} );
}
}
| SQLTest |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/config/SimpleJmsListenerEndpoint.java | {
"start": 1053,
"end": 2037
} | class ____ extends AbstractJmsListenerEndpoint {
private @Nullable MessageListener messageListener;
/**
* Set the {@link MessageListener} to invoke when a message matching
* the endpoint is received.
*/
public void setMessageListener(@Nullable MessageListener messageListener) {
this.messageListener = messageListener;
}
/**
* Return the {@link MessageListener} to invoke when a message matching
* the endpoint is received.
*/
public @Nullable MessageListener getMessageListener() {
return this.messageListener;
}
@Override
protected MessageListener createMessageListener(MessageListenerContainer container) {
MessageListener listener = getMessageListener();
Assert.state(listener != null, "No MessageListener set");
return listener;
}
@Override
protected StringBuilder getEndpointDescription() {
return super.getEndpointDescription()
.append(" | messageListener='").append(this.messageListener).append('\'');
}
}
| SimpleJmsListenerEndpoint |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java | {
"start": 5944,
"end": 18326
} | class ____ found: " + cnfe);
}
}
/**
* Util method to build socket addr from either.
* {@literal <host>:<port>}
* {@literal <fs>://<host>:<port>/<path>}
*
* @param target target.
* @return socket addr.
*/
public static InetSocketAddress createSocketAddr(String target) {
return createSocketAddr(target, -1);
}
public static InetSocketAddress createSocketAddrUnresolved(String target) {
return createSocketAddr(target, -1, null, false, false);
}
/**
* Util method to build socket addr from either.
* {@literal <host>}
* {@literal <host>:<port>}
* {@literal <fs>://<host>:<port>/<path>}
*
* @param target target.
* @param defaultPort default port.
* @return socket addr.
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort) {
return createSocketAddr(target, defaultPort, null);
}
/**
* Create an InetSocketAddress from the given target string and
* default port. If the string cannot be parsed correctly, the
* <code>configName</code> parameter is used as part of the
* exception message, allowing the user to better diagnose
* the misconfiguration.
*
* @param target a string of either "host" or "host:port"
* @param defaultPort the default port if <code>target</code> does not
* include a port number
* @param configName the name of the configuration from which
* <code>target</code> was loaded. This is used in the
* exception message in the case that parsing fails.
* @return socket addr.
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort,
String configName) {
return createSocketAddr(target, defaultPort, configName, false);
}
/**
* Create an InetSocketAddress from the given target string and
* default port. If the string cannot be parsed correctly, the
* <code>configName</code> parameter is used as part of the
* exception message, allowing the user to better diagnose
* the misconfiguration.
*
* @param target a string of either "host" or "host:port"
* @param defaultPort the default port if <code>target</code> does not
* include a port number
* @param configName the name of the configuration from which
* <code>target</code> was loaded. This is used in the
* exception message in the case that parsing fails.
* @param useCacheIfPresent Whether use cache when create URI
* @return socket addr
*/
public static InetSocketAddress createSocketAddr(String target,
int defaultPort,
String configName,
boolean useCacheIfPresent) {
return createSocketAddr(target, defaultPort, configName, useCacheIfPresent, true);
}
public static InetSocketAddress createSocketAddr(
String target, int defaultPort, String configName,
boolean useCacheIfPresent, boolean isResolved) {
String helpText = "";
if (configName != null) {
helpText = " (configuration property '" + configName + "')";
}
if (target == null) {
throw new IllegalArgumentException("Target address cannot be null." +
helpText);
}
target = target.trim();
boolean hasScheme = target.contains("://");
URI uri = createURI(target, hasScheme, helpText, useCacheIfPresent);
String host = uri.getHost();
int port = uri.getPort();
if (port == -1) {
port = defaultPort;
}
String path = uri.getPath();
if ((host == null) || (port < 0) ||
(!hasScheme && path != null && !path.isEmpty())) {
throw new IllegalArgumentException(
"Does not contain a valid host:port authority: " + target + helpText
);
}
if (isResolved) {
return createSocketAddrForHost(host, port);
}
return InetSocketAddress.createUnresolved(host, port);
}
private static final long URI_CACHE_SIZE_DEFAULT = 1000;
private static final long URI_CACHE_EXPIRE_TIME_DEFAULT = 12;
private static final Cache<String, URI> URI_CACHE = CacheBuilder.newBuilder()
.maximumSize(URI_CACHE_SIZE_DEFAULT)
.expireAfterWrite(URI_CACHE_EXPIRE_TIME_DEFAULT, TimeUnit.HOURS)
.build();
private static URI createURI(String target,
boolean hasScheme,
String helpText,
boolean useCacheIfPresent) {
URI uri;
if (useCacheIfPresent) {
uri = URI_CACHE.getIfPresent(target);
if (uri != null) {
return uri;
}
}
try {
uri = hasScheme ? URI.create(target) :
URI.create("dummyscheme://" + target);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Does not contain a valid host:port authority: " + target + helpText
);
}
if (useCacheIfPresent) {
URI_CACHE.put(target, uri);
}
return uri;
}
/**
* Create a socket address with the given host and port. The hostname
* might be replaced with another host that was set via
* {@link #addStaticResolution(String, String)}. The value of
* hadoop.security.token.service.use_ip will determine whether the
* standard java host resolver is used, or if the fully qualified resolver
* is used.
* @param host the hostname or IP use to instantiate the object
* @param port the port number
* @return InetSocketAddress
*/
public static InetSocketAddress createSocketAddrForHost(String host, int port) {
String staticHost = getStaticResolution(host);
String resolveHost = (staticHost != null) ? staticHost : host;
InetSocketAddress addr;
try {
InetAddress iaddr = SecurityUtil.getByName(resolveHost);
// if there is a static entry for the host, make the returned
// address look like the original given host
if (staticHost != null) {
iaddr = InetAddress.getByAddress(host, iaddr.getAddress());
}
addr = new InetSocketAddress(iaddr, port);
} catch (UnknownHostException e) {
addr = InetSocketAddress.createUnresolved(host, port);
}
return addr;
}
/**
* Resolve the uri's hostname and add the default port if not in the uri
* @param uri to resolve
* @param defaultPort if none is given
* @return URI
*/
public static URI getCanonicalUri(URI uri, int defaultPort) {
// skip if there is no authority, ie. "file" scheme or relative uri
String host = uri.getHost();
if (host == null) {
return uri;
}
String fqHost = canonicalizeHost(host);
int port = uri.getPort();
// short out if already canonical with a port
if (host.equals(fqHost) && port != -1) {
return uri;
}
// reconstruct the uri with the canonical host and port
try {
uri = new URI(uri.getScheme(), uri.getUserInfo(),
fqHost, (port == -1) ? defaultPort : port,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
return uri;
}
// cache the canonicalized hostnames; the cache currently isn't expired,
// but the canonicals will only change if the host's resolver configuration
// changes
private static final ConcurrentHashMap<String, String> canonicalizedHostCache =
new ConcurrentHashMap<String, String>();
private static String canonicalizeHost(String host) {
// check if the host has already been canonicalized
String fqHost = canonicalizedHostCache.get(host);
if (fqHost == null) {
try {
fqHost = SecurityUtil.getByName(host).getHostName();
canonicalizedHostCache.putIfAbsent(host, fqHost);
// ensures that we won't return a canonicalized stale (non-cached)
// host name for a given host
fqHost = canonicalizedHostCache.get(host);
} catch (UnknownHostException e) {
fqHost = host;
}
}
return fqHost;
}
/**
* Adds a static resolution for host. This can be used for setting up
* hostnames with names that are fake to point to a well known host. For e.g.
* in some testcases we require to have daemons with different hostnames
* running on the same machine. In order to create connections to these
* daemons, one can set up mappings from those hostnames to "localhost".
* {@link NetUtils#getStaticResolution(String)} can be used to query for
* the actual hostname.
* @param host the hostname or IP use to instantiate the object.
* @param resolvedName resolved name.
*/
public static void addStaticResolution(String host, String resolvedName) {
synchronized (hostToResolved) {
hostToResolved.put(host, resolvedName);
}
}
/**
* Retrieves the resolved name for the passed host. The resolved name must
* have been set earlier using
* {@link NetUtils#addStaticResolution(String, String)}
* @param host the hostname or IP use to instantiate the object.
* @return the resolution
*/
public static String getStaticResolution(String host) {
synchronized (hostToResolved) {
return hostToResolved.get(host);
}
}
/**
* This is used to get all the resolutions that were added using
* {@link NetUtils#addStaticResolution(String, String)}. The return
* value is a List each element of which contains an array of String
* of the form String[0]=hostname, String[1]=resolved-hostname
* @return the list of resolutions
*/
public static List <String[]> getAllStaticResolutions() {
synchronized (hostToResolved) {
Set <Entry <String, String>>entries = hostToResolved.entrySet();
if (entries.size() == 0) {
return null;
}
List <String[]> l = new ArrayList<String[]>(entries.size());
for (Entry<String, String> e : entries) {
l.add(new String[] {e.getKey(), e.getValue()});
}
return l;
}
}
/**
* Returns InetSocketAddress that a client can use to
* connect to the server. Server.getListenerAddress() is not correct when
* the server binds to "0.0.0.0". This returns "hostname:port" of the server,
* or "127.0.0.1:port" when the getListenerAddress() returns "0.0.0.0:port".
*
* @param server server.
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(Server server) {
return getConnectAddress(server.getListenerAddress());
}
/**
* Returns an InetSocketAddress that a client can use to connect to the
* given listening address.
*
* @param addr of a listener
* @return socket address that a client can use to connect to the server.
*/
public static InetSocketAddress getConnectAddress(InetSocketAddress addr) {
if (!addr.isUnresolved() && addr.getAddress().isAnyLocalAddress()) {
try {
addr = new InetSocketAddress(InetAddress.getLocalHost(), addr.getPort());
} catch (UnknownHostException uhe) {
// shouldn't get here unless the host doesn't have a loopback iface
addr = createSocketAddrForHost("127.0.0.1", addr.getPort());
}
}
return addr;
}
/**
* Same as <code>getInputStream(socket, socket.getSoTimeout()).</code>
*
* @param socket socket.
* @throws IOException raised on errors performing I/O.
* @return SocketInputWrapper for reading from the socket.
* @see #getInputStream(Socket, long)
*/
public static SocketInputWrapper getInputStream(Socket socket)
throws IOException {
return getInputStream(socket, socket.getSoTimeout());
}
/**
* Return a {@link SocketInputWrapper} for the socket and set the given
* timeout. If the socket does not have an associated channel, then its socket
* timeout will be set to the specified value. Otherwise, a
* {@link SocketInputStream} will be created which reads with the configured
* timeout.
*
* Any socket created using socket factories returned by {@link #NetUtils},
* must use this | not |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java | {
"start": 2651,
"end": 24287
} | class ____<T extends InternalNumericMetricsAggregation> extends ESIntegTestCase {
static final String SINGLE_VALUED_FIELD_NAME = "l_value";
static int numDocs;
static int interval;
static int minRandomValue;
static int maxRandomValue;
static int numValueBuckets;
static long[] valueCounts;
static String histoName;
static String termsName;
/** Creates the pipeline aggregation to test */
protected abstract BucketMetricsPipelineAggregationBuilder<?> BucketMetricsPipelineAgg(String name, String bucketsPath);
/** Checks that the provided bucket values and keys agree with the result of the pipeline aggregation */
protected abstract void assertResult(
IntToDoubleFunction bucketValues,
Function<Integer, String> bucketKeys,
int numValues,
T pipelineBucket
);
/** Nested metric from the pipeline aggregation to test. This metric is added to the end of the bucket path*/
protected abstract String nestedMetric();
/** Extract the value of the nested metric provided in {@link #nestedMetric()} */
protected abstract double getNestedMetric(T bucket);
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(indicesAdmin().prepareCreate("idx").setMapping("tag", "type=keyword").get());
createIndex("idx_unmapped");
numDocs = randomIntBetween(6, 20);
interval = randomIntBetween(2, 5);
minRandomValue = 0;
maxRandomValue = 20;
numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1;
valueCounts = new long[numValueBuckets];
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
int fieldValue = randomIntBetween(minRandomValue, maxRandomValue);
builders.add(
prepareIndex("idx").setSource(
jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)).endObject()
)
);
final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1);
valueCounts[bucket]++;
}
assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer"));
for (int i = 0; i < 2; i++) {
builders.add(
prepareIndex("empty_bucket_idx").setId("" + i)
.setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())
);
}
indexRandom(true, builders);
ensureSearchable();
histoName = randomName();
termsName = randomName();
}
private String randomName() {
return randomBoolean()
? randomAlphaOfLengthBetween(3, 12)
: randomAlphaOfLengthBetween(3, 6) + "." + randomAlphaOfLengthBetween(3, 6);
}
public void testDocCountTopLevel() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue)
).addAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count")),
response -> {
Histogram histo = response.getAggregations().get(histoName);
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo(histoName));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
T pipelineBucket = response.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
assertResult((i) -> buckets.get(i).getDocCount(), (i) -> buckets.get(i).getKeyAsString(), numValueBuckets, pipelineBucket);
}
);
}
public void testDocCountAsSubAgg() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
terms(termsName).field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram(histoName).field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
)
.subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count"))
),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get(histoName);
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo(histoName));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
}
T pipelineBucket = termsBucket.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
assertResult(
(k) -> buckets.get(k).getDocCount(),
(k) -> buckets.get(k).getKeyAsString(),
numValueBuckets,
pipelineBucket
);
}
}
);
}
public void testMetricTopLevel() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval)));
assertThat(bucket.getDocCount(), greaterThan(0L));
}
T pipelineBucket = response.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
IntToDoubleFunction function = (i) -> {
Sum sum = buckets.get(i).getAggregations().get("sum");
assertThat(sum, notNullValue());
return sum.value();
};
assertResult(function, (i) -> buckets.get(i).getKeyAsString(), interval, pipelineBucket);
}
);
}
public void testMetricAsSubAgg() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
terms(termsName).field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram(histoName).field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))
)
.subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum"))
),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get(histoName);
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo(histoName));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
List<Histogram.Bucket> notNullBuckets = new ArrayList<>();
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
if (bucket.getDocCount() != 0) {
notNullBuckets.add(bucket);
}
}
T pipelineBucket = termsBucket.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
IntToDoubleFunction function = (k) -> {
Sum sum = notNullBuckets.get(k).getAggregations().get("sum");
assertThat(sum, notNullValue());
return sum.value();
};
assertResult(function, (k) -> notNullBuckets.get(k).getKeyAsString(), notNullBuckets.size(), pipelineBucket);
}
}
);
}
public void testMetricAsSubAggWithInsertZeros() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
terms(termsName).field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram(histoName).field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))
)
.subAggregation(
BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum").gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS)
)
),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get(histoName);
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo(histoName));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
}
T pipelineBucket = termsBucket.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
IntToDoubleFunction function = (k) -> {
Sum sum = buckets.get(k).getAggregations().get("sum");
assertThat(sum, notNullValue());
return sum.value();
};
assertResult(function, (k) -> buckets.get(k).getKeyAsString(), numValueBuckets, pipelineBucket);
}
}
);
}
public void testNoBuckets() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
terms(termsName).field("tag")
.includeExclude(new IncludeExclude(null, "tag.*", null, null))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))
).addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(0));
T pipelineBucket = response.getAggregations().get("pipeline_agg");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("pipeline_agg"));
assertResult((k) -> 0.0, (k) -> "", 0, pipelineBucket);
}
);
}
public void testNested() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
terms(termsName).field("tag")
.order(BucketOrder.key(true))
.subAggregation(
histogram(histoName).field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
)
.subAggregation(BucketMetricsPipelineAgg("nested_histo_bucket", histoName + ">_count"))
).addAggregation(BucketMetricsPipelineAgg("nested_terms_bucket", termsName + ">nested_histo_bucket." + nestedMetric())),
response -> {
Terms terms = response.getAggregations().get(termsName);
assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(termsName));
List<? extends Terms.Bucket> termsBuckets = terms.getBuckets();
assertThat(termsBuckets.size(), equalTo(interval));
List<T> allBuckets = new ArrayList<>();
List<String> nestedTags = new ArrayList<>();
for (int i = 0; i < interval; ++i) {
Terms.Bucket termsBucket = termsBuckets.get(i);
assertThat(termsBucket, notNullValue());
assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval)));
Histogram histo = termsBucket.getAggregations().get(histoName);
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo(histoName));
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
for (int j = 0; j < numValueBuckets; ++j) {
Histogram.Bucket bucket = buckets.get(j);
assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval));
}
T pipelineBucket = termsBucket.getAggregations().get("nested_histo_bucket");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("nested_histo_bucket"));
assertResult(
(k) -> buckets.get(k).getDocCount(),
(k) -> buckets.get(k).getKeyAsString(),
numValueBuckets,
pipelineBucket
);
allBuckets.add(pipelineBucket);
nestedTags.add(termsBucket.getKeyAsString());
}
T pipelineBucket = response.getAggregations().get("nested_terms_bucket");
assertThat(pipelineBucket, notNullValue());
assertThat(pipelineBucket.getName(), equalTo("nested_terms_bucket"));
assertResult((k) -> getNestedMetric(allBuckets.get(k)), (k) -> nestedTags.get(k), allBuckets.size(), pipelineBucket);
}
);
}
/**
* https://github.com/elastic/elasticsearch/issues/33514
*
* This bug manifests as the max_bucket agg ("peak") being added to the response twice, because
* the pipeline agg is run twice. This makes invalid JSON and breaks conversion to maps.
* The bug was caused by an UnmappedTerms being the chosen as the first reduction target. UnmappedTerms
* delegated reduction to the first non-unmapped agg, which would reduce and run pipeline aggs. But then
* execution returns to the UnmappedTerms and _it_ runs pipelines as well, doubling up on the values.
*/
public void testFieldIsntWrittenOutTwice() throws Exception {
// you need to add an additional index with no fields in order to trigger this (or potentially a shard)
// so that there is an UnmappedTerms in the list to reduce.
createIndex("foo_1");
// tag::noformat
XContentBuilder builder = jsonBuilder().startObject()
.startObject("properties")
.startObject("@timestamp")
.field("type", "date")
.endObject()
.startObject("license")
.startObject("properties")
.startObject("count")
.field("type", "long")
.endObject()
.startObject("partnumber")
.field("type", "text")
.startObject("fields")
.startObject("keyword")
.field("type", "keyword")
.field("ignore_above", 256)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
// end::noformat
assertAcked(indicesAdmin().prepareCreate("foo_2").setMapping(builder).get());
// tag::noformat
XContentBuilder docBuilder = jsonBuilder().startObject()
.startObject("license")
.field("partnumber", "foobar")
.field("count", 2)
.endObject()
.field("@timestamp", "2018-07-08T08:07:00.599Z")
.endObject();
// end::noformat
prepareIndex("foo_2").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
indicesAdmin().prepareRefresh();
TermsAggregationBuilder groupByLicenseAgg = terms("group_by_license_partnumber").field("license.partnumber.keyword");
SumAggregationBuilder sumAggBuilder = sum("total_licenses").field("license.count");
DateHistogramAggregationBuilder licensePerDayBuilder = dateHistogram("licenses_per_day").field("@timestamp")
.fixedInterval(DateHistogramInterval.DAY);
licensePerDayBuilder.subAggregation(sumAggBuilder);
groupByLicenseAgg.subAggregation(licensePerDayBuilder);
groupByLicenseAgg.subAggregation(BucketMetricsPipelineAgg("peak", "licenses_per_day>total_licenses"));
assertCheckedResponse(prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg), response -> {
BytesReference bytes = XContentHelper.toXContent(response, XContentType.JSON, false);
XContentHelper.convertToMap(bytes, false, XContentType.JSON);
});
}
}
| BucketMetricsPipeLineAggregationTestCase |
java | google__dagger | javatests/dagger/hilt/processor/internal/aggregateddeps/AggregatedDepsProcessorErrorsTest.java | {
"start": 8421,
"end": 9312
} | class ____ {}",
" }",
"}");
compile(
source,
subject -> {
subject.compilationDidFail();
subject.hasErrorCount(1);
subject.hasErrorContaining(
"Nested @InstallIn modules must be static unless they are directly nested within"
+ " a test. Found: foo.bar.Outer.Nested.InnerModule");
});
}
@Test
public void testInnerModuleInTest_succeeds() {
Source source =
HiltCompilerTests.javaSource(
"foo.bar.Outer",
"package foo.bar;",
"",
"import dagger.Module;",
"import dagger.hilt.InstallIn;",
"import dagger.hilt.components.SingletonComponent;",
"import dagger.hilt.android.testing.HiltAndroidTest;",
"",
"@HiltAndroidTest",
"public final | InnerModule |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedVariableTest.java | {
"start": 18050,
"end": 18213
} | class ____ {
public static void main(String[] args) {
try (A a = new A()) {}
}
}
| UnusedTryResource |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_439.java | {
"start": 111,
"end": 256
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
JSON.parseObject("{/*aa*/}");
}
}
| Bug_for_issue_439 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/basic/DoubleBenchmark_obj.java | {
"start": 1656,
"end": 1844
} | class ____ {
public Model() {
}
public Double v1;
public Double v2;
public Double v3;
public Double v4;
public Double v5;
}
}
| Model |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJob.java | {
"start": 25854,
"end": 26227
} | class ____ extends RuntimeException {
final long nextDelayInMsSinceEpoch;
final boolean haveEverSeenData;
EmptyDataCountException(long nextDelayInMsSinceEpoch, boolean haveEverSeenData) {
this.nextDelayInMsSinceEpoch = nextDelayInMsSinceEpoch;
this.haveEverSeenData = haveEverSeenData;
}
}
}
| EmptyDataCountException |
java | jhy__jsoup | src/main/java/org/jsoup/select/Elements.java | {
"start": 4995,
"end": 5230
} | class ____ to add
@return this
*/
public Elements addClass(String className) {
for (Element element : this) {
element.addClass(className);
}
return this;
}
/**
Remove the | name |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/MySqlAnalyzeTest_1.java | {
"start": 963,
"end": 2292
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "ANALYZE TABLE t1,t2";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
// print(statementList);
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("ANALYZE TABLE t1, t2", //
output);
// System.out.println("Tables : " + visitor.getTables());
// System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(0, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("City")));
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t2")));
// assertTrue(visitor.getColumns().contains(new Column("t2", "id")));
}
}
| MySqlAnalyzeTest_1 |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedClassIntegrationTests.java | {
"start": 63797,
"end": 64261
} | class ____
extends AbstractValidLifecycleMethodInjectionTestCase {
private final AtomicInteger value;
ValidLifecycleMethodInjectionWithConstructorInjectionTestCase(
@ConvertWith(AtomicIntegerConverter.class) AtomicInteger value) {
this.value = value;
}
@Test
void test() {
assertEquals(5, this.value.getAndIncrement());
}
}
@ParameterizedClass
@ValueSource(ints = 1)
static | ValidLifecycleMethodInjectionWithConstructorInjectionTestCase |
java | google__guava | android/guava-tests/test/com/google/common/math/StatsAccumulatorTest.java | {
"start": 4153,
"end": 37782
} | class ____ extends TestCase {
private StatsAccumulator emptyAccumulator;
private StatsAccumulator emptyAccumulatorByAddAllEmptyIterable;
private StatsAccumulator emptyAccumulatorByAddAllEmptyStats;
private StatsAccumulator oneValueAccumulator;
private StatsAccumulator oneValueAccumulatorByAddAllEmptyStats;
private StatsAccumulator twoValuesAccumulator;
private StatsAccumulator twoValuesAccumulatorByAddAllStats;
private StatsAccumulator manyValuesAccumulatorByAddAllIterable;
private StatsAccumulator manyValuesAccumulatorByAddAllIterator;
private StatsAccumulator manyValuesAccumulatorByAddAllVarargs;
private StatsAccumulator manyValuesAccumulatorByRepeatedAdd;
private StatsAccumulator manyValuesAccumulatorByAddAndAddAll;
private StatsAccumulator manyValuesAccumulatorByAddAllStats;
private StatsAccumulator manyValuesAccumulatorByAddAllStatsAccumulator;
private StatsAccumulator integerManyValuesAccumulatorByAddAllIterable;
private StatsAccumulator longManyValuesAccumulatorByAddAllIterator;
private StatsAccumulator longManyValuesAccumulatorByAddAllVarargs;
@Override
protected void setUp() throws Exception {
super.setUp();
emptyAccumulator = new StatsAccumulator();
emptyAccumulatorByAddAllEmptyIterable = new StatsAccumulator();
emptyAccumulatorByAddAllEmptyIterable.addAll(ImmutableList.<Double>of());
emptyAccumulatorByAddAllEmptyStats = new StatsAccumulator();
emptyAccumulatorByAddAllEmptyStats.addAll(Stats.of());
oneValueAccumulator = new StatsAccumulator();
oneValueAccumulator.add(ONE_VALUE);
oneValueAccumulatorByAddAllEmptyStats = new StatsAccumulator();
oneValueAccumulatorByAddAllEmptyStats.add(ONE_VALUE);
oneValueAccumulatorByAddAllEmptyStats.addAll(Stats.of());
twoValuesAccumulator = new StatsAccumulator();
twoValuesAccumulator.addAll(TWO_VALUES);
twoValuesAccumulatorByAddAllStats = new StatsAccumulator();
twoValuesAccumulatorByAddAllStats.addAll(Stats.of(ONE_VALUE));
twoValuesAccumulatorByAddAllStats.addAll(Stats.of(OTHER_ONE_VALUE));
manyValuesAccumulatorByAddAllIterable = new StatsAccumulator();
manyValuesAccumulatorByAddAllIterable.addAll(MANY_VALUES);
manyValuesAccumulatorByAddAllIterator = new StatsAccumulator();
manyValuesAccumulatorByAddAllIterator.addAll(MANY_VALUES.iterator());
manyValuesAccumulatorByAddAllVarargs = new StatsAccumulator();
manyValuesAccumulatorByAddAllVarargs.addAll(Doubles.toArray(MANY_VALUES));
manyValuesAccumulatorByRepeatedAdd = new StatsAccumulator();
for (double value : MANY_VALUES) {
manyValuesAccumulatorByRepeatedAdd.add(value);
}
manyValuesAccumulatorByAddAndAddAll = new StatsAccumulator();
manyValuesAccumulatorByAddAndAddAll.add(MANY_VALUES.get(0));
manyValuesAccumulatorByAddAndAddAll.addAll(MANY_VALUES.subList(1, MANY_VALUES.size()));
manyValuesAccumulatorByAddAllStats = new StatsAccumulator();
manyValuesAccumulatorByAddAllStats.addAll(
Stats.of(MANY_VALUES.subList(0, MANY_VALUES.size() / 2)));
manyValuesAccumulatorByAddAllStats.addAll(
Stats.of(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size())));
manyValuesAccumulatorByAddAllStatsAccumulator = new StatsAccumulator();
manyValuesAccumulatorByAddAllStatsAccumulator.addAll(
statsAccumulatorOf(MANY_VALUES.subList(0, MANY_VALUES.size() / 2)));
manyValuesAccumulatorByAddAllStatsAccumulator.addAll(
statsAccumulatorOf(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size())));
integerManyValuesAccumulatorByAddAllIterable = new StatsAccumulator();
integerManyValuesAccumulatorByAddAllIterable.addAll(INTEGER_MANY_VALUES);
longManyValuesAccumulatorByAddAllIterator = new StatsAccumulator();
longManyValuesAccumulatorByAddAllIterator.addAll(LONG_MANY_VALUES.iterator());
longManyValuesAccumulatorByAddAllVarargs = new StatsAccumulator();
longManyValuesAccumulatorByAddAllVarargs.addAll(Longs.toArray(LONG_MANY_VALUES));
}
private static StatsAccumulator statsAccumulatorOf(Iterable<? extends Number> values) {
StatsAccumulator accumulator = new StatsAccumulator();
accumulator.addAll(values);
return accumulator;
}
public void testCount() {
assertThat(emptyAccumulator.count()).isEqualTo(0);
assertThat(emptyAccumulatorByAddAllEmptyIterable.count()).isEqualTo(0);
assertThat(emptyAccumulatorByAddAllEmptyStats.count()).isEqualTo(0);
assertThat(oneValueAccumulator.count()).isEqualTo(1);
assertThat(oneValueAccumulatorByAddAllEmptyStats.count()).isEqualTo(1);
assertThat(twoValuesAccumulator.count()).isEqualTo(2);
assertThat(twoValuesAccumulatorByAddAllStats.count()).isEqualTo(2);
assertThat(manyValuesAccumulatorByAddAllIterable.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllIterator.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllVarargs.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByRepeatedAdd.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAndAddAll.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStats.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.count()).isEqualTo(MANY_VALUES_COUNT);
assertThat(integerManyValuesAccumulatorByAddAllIterable.count())
.isEqualTo(StatsTesting.INTEGER_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllIterator.count())
.isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllVarargs.count())
.isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT);
}
public void testCountOverflow_doesNotThrow() {
StatsAccumulator accumulator = new StatsAccumulator();
accumulator.add(ONE_VALUE);
for (int power = 1; power < Long.SIZE - 1; power++) {
accumulator.addAll(accumulator.snapshot());
}
// Should overflow without throwing.
accumulator.addAll(accumulator.snapshot());
assertThat(accumulator.count()).isLessThan(0L);
}
public void testMean() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.mean());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.mean());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.mean());
assertThat(oneValueAccumulator.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE);
assertThat(oneValueAccumulatorByAddAllEmptyStats.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE);
assertThat(twoValuesAccumulator.mean()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN);
assertThat(twoValuesAccumulatorByAddAllStats.mean())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAllIterable.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAllIterator.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAllVarargs.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByRepeatedAdd.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAndAddAll.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAllStats.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.mean())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
// For datasets of many double values created from an iterable, we test many combinations of
// finite and non-finite values:
for (ManyValues values : ALL_MANY_VALUES) {
StatsAccumulator accumulator = new StatsAccumulator();
StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator();
accumulator.addAll(values.asIterable());
for (double value : values.asIterable()) {
accumulatorByAddAllStats.addAll(Stats.of(value));
}
double mean = accumulator.mean();
double meanByAddAllStats = accumulatorByAddAllStats.mean();
if (values.hasAnyNaN()) {
assertWithMessage("mean of " + values).that(mean).isNaN();
assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN();
} else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) {
assertWithMessage("mean of " + values).that(mean).isNaN();
assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN();
} else if (values.hasAnyPositiveInfinity()) {
assertWithMessage("mean of " + values).that(mean).isPositiveInfinity();
assertWithMessage("mean by addAll(Stats) of " + values)
.that(meanByAddAllStats)
.isPositiveInfinity();
} else if (values.hasAnyNegativeInfinity()) {
assertWithMessage("mean of " + values).that(mean).isNegativeInfinity();
assertWithMessage("mean by addAll(Stats) of " + values)
.that(meanByAddAllStats)
.isNegativeInfinity();
} else {
assertWithMessage("mean of " + values)
.that(mean)
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
assertWithMessage("mean by addAll(Stats) of " + values)
.that(meanByAddAllStats)
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN);
}
}
assertThat(integerManyValuesAccumulatorByAddAllIterable.mean())
.isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN)
.of(INTEGER_MANY_VALUES_MEAN);
assertThat(longManyValuesAccumulatorByAddAllIterator.mean())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN)
.of(LONG_MANY_VALUES_MEAN);
assertThat(longManyValuesAccumulatorByAddAllVarargs.mean())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN)
.of(LONG_MANY_VALUES_MEAN);
}
public void testSum() {
assertThat(emptyAccumulator.sum()).isEqualTo(0.0);
assertThat(emptyAccumulatorByAddAllEmptyIterable.sum()).isEqualTo(0.0);
assertThat(emptyAccumulatorByAddAllEmptyStats.sum()).isEqualTo(0.0);
assertThat(oneValueAccumulator.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE);
assertThat(oneValueAccumulatorByAddAllEmptyStats.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE);
assertThat(twoValuesAccumulator.sum()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN * 2);
assertThat(twoValuesAccumulatorByAddAllStats.sum())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_MEAN * 2);
assertThat(manyValuesAccumulatorByAddAllIterable.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllIterator.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllVarargs.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByRepeatedAdd.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAndAddAll.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStats.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sum())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_MEAN * MANY_VALUES_COUNT);
assertThat(integerManyValuesAccumulatorByAddAllIterable.sum())
.isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN)
.of(INTEGER_MANY_VALUES_MEAN * INTEGER_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllIterator.sum())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN)
.of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllVarargs.sum())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN)
.of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT);
}
public void testPopulationVariance() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationVariance());
assertThrows(
IllegalStateException.class,
() -> emptyAccumulatorByAddAllEmptyIterable.populationVariance());
assertThrows(
IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.populationVariance());
assertThat(oneValueAccumulator.populationVariance()).isEqualTo(0.0);
assertThat(oneValueAccumulatorByAddAllEmptyStats.populationVariance()).isEqualTo(0.0);
assertThat(twoValuesAccumulator.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2);
assertThat(twoValuesAccumulatorByAddAllStats.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2);
assertThat(manyValuesAccumulatorByAddAllIterable.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllIterator.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllVarargs.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByRepeatedAdd.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAndAddAll.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStats.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
// For datasets of many double values created from an iterator, we test many combinations of
// finite and non-finite values:
for (ManyValues values : ALL_MANY_VALUES) {
StatsAccumulator accumulator = new StatsAccumulator();
StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator();
accumulator.addAll(values.asIterable().iterator());
for (double value : values.asIterable()) {
accumulatorByAddAllStats.addAll(Stats.of(value));
}
double populationVariance = accumulator.populationVariance();
double populationVarianceByAddAllStats = accumulatorByAddAllStats.populationVariance();
if (values.hasAnyNonFinite()) {
assertWithMessage("population variance of " + values).that(populationVariance).isNaN();
assertWithMessage("population variance by addAll(Stats) of " + values)
.that(populationVarianceByAddAllStats)
.isNaN();
} else {
assertWithMessage("population variance of " + values)
.that(populationVariance)
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
assertWithMessage("population variance by addAll(Stats) of " + values)
.that(populationVarianceByAddAllStats)
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT);
}
}
assertThat(integerManyValuesAccumulatorByAddAllIterable.populationVariance())
.isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllIterator.populationVariance())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT);
assertThat(longManyValuesAccumulatorByAddAllVarargs.populationVariance())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT);
}
public void testPopulationStandardDeviation() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationStandardDeviation());
assertThrows(
IllegalStateException.class,
() -> emptyAccumulatorByAddAllEmptyIterable.populationStandardDeviation());
assertThrows(
IllegalStateException.class,
() -> emptyAccumulatorByAddAllEmptyStats.populationStandardDeviation());
assertThat(oneValueAccumulator.populationStandardDeviation()).isEqualTo(0.0);
assertThat(oneValueAccumulatorByAddAllEmptyStats.populationStandardDeviation()).isEqualTo(0.0);
assertThat(twoValuesAccumulator.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2));
assertThat(twoValuesAccumulatorByAddAllStats.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2));
assertThat(manyValuesAccumulatorByAddAllIterable.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByAddAllIterator.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByAddAllVarargs.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByRepeatedAdd.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByAddAndAddAll.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByAddAllStats.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT));
assertThat(integerManyValuesAccumulatorByAddAllIterable.populationStandardDeviation())
.isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS))
.of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT));
assertThat(longManyValuesAccumulatorByAddAllIterator.populationStandardDeviation())
.isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS))
.of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT));
assertThat(longManyValuesAccumulatorByAddAllVarargs.populationStandardDeviation())
.isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS))
.of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT));
}
public void testSampleVariance() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleVariance());
assertThrows(
IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.sampleVariance());
assertThrows(
IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.sampleVariance());
assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleVariance());
assertThrows(
IllegalStateException.class, () -> oneValueAccumulatorByAddAllEmptyStats.sampleVariance());
assertThat(twoValuesAccumulator.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS);
assertThat(twoValuesAccumulatorByAddAllStats.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS);
assertThat(manyValuesAccumulatorByAddAllIterable.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByAddAllIterator.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByAddAllVarargs.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByRepeatedAdd.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByAddAndAddAll.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByAddAllStats.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleVariance())
.isWithin(ALLOWED_ERROR)
.of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1));
assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleVariance())
.isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1));
assertThat(longManyValuesAccumulatorByAddAllIterator.sampleVariance())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1));
assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleVariance())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1));
}
public void testSampleStandardDeviation() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleStandardDeviation());
assertThrows(
IllegalStateException.class,
() -> emptyAccumulatorByAddAllEmptyIterable.sampleStandardDeviation());
assertThrows(
IllegalStateException.class,
() -> emptyAccumulatorByAddAllEmptyStats.sampleStandardDeviation());
assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleStandardDeviation());
assertThrows(
IllegalStateException.class,
() -> oneValueAccumulatorByAddAllEmptyStats.sampleStandardDeviation());
assertThat(twoValuesAccumulator.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS));
assertThat(twoValuesAccumulatorByAddAllStats.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS));
assertThat(manyValuesAccumulatorByAddAllIterable.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByAddAllIterator.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByRepeatedAdd.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByAddAndAddAll.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByAddAllStats.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR)
.of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)));
assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS))
.of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1)));
assertThat(longManyValuesAccumulatorByAddAllIterator.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)));
assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation())
.isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)
.of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)));
}
public void testMax() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.max());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.max());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.max());
assertThat(oneValueAccumulator.max()).isEqualTo(ONE_VALUE);
assertThat(oneValueAccumulatorByAddAllEmptyStats.max()).isEqualTo(ONE_VALUE);
assertThat(twoValuesAccumulator.max()).isEqualTo(TWO_VALUES_MAX);
assertThat(twoValuesAccumulatorByAddAllStats.max()).isEqualTo(TWO_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAllIterable.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAllIterator.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByRepeatedAdd.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAndAddAll.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAllStats.max()).isEqualTo(MANY_VALUES_MAX);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.max()).isEqualTo(MANY_VALUES_MAX);
// For datasets of many double values created from an array, we test many combinations of
// finite and non-finite values:
for (ManyValues values : ALL_MANY_VALUES) {
StatsAccumulator accumulator = new StatsAccumulator();
StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator();
accumulator.addAll(values.asArray());
for (double value : values.asIterable()) {
accumulatorByAddAllStats.addAll(Stats.of(value));
}
double max = accumulator.max();
double maxByAddAllStats = accumulatorByAddAllStats.max();
if (values.hasAnyNaN()) {
assertWithMessage("max of " + values).that(max).isNaN();
assertWithMessage("max by addAll(Stats) of " + values).that(maxByAddAllStats).isNaN();
} else if (values.hasAnyPositiveInfinity()) {
assertWithMessage("max of " + values).that(max).isPositiveInfinity();
assertWithMessage("max by addAll(Stats) of " + values)
.that(maxByAddAllStats)
.isPositiveInfinity();
} else {
assertWithMessage("max of " + values).that(max).isEqualTo(MANY_VALUES_MAX);
assertWithMessage("max by addAll(Stats) of " + values)
.that(maxByAddAllStats)
.isEqualTo(MANY_VALUES_MAX);
}
}
assertThat(integerManyValuesAccumulatorByAddAllIterable.max())
.isEqualTo(INTEGER_MANY_VALUES_MAX);
assertThat(longManyValuesAccumulatorByAddAllIterator.max()).isEqualTo(LONG_MANY_VALUES_MAX);
assertThat(longManyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(LONG_MANY_VALUES_MAX);
}
public void testMin() {
assertThrows(IllegalStateException.class, () -> emptyAccumulator.min());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.min());
assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.min());
assertThat(oneValueAccumulator.min()).isEqualTo(ONE_VALUE);
assertThat(oneValueAccumulatorByAddAllEmptyStats.min()).isEqualTo(ONE_VALUE);
assertThat(twoValuesAccumulator.min()).isEqualTo(TWO_VALUES_MIN);
assertThat(twoValuesAccumulatorByAddAllStats.min()).isEqualTo(TWO_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAllIterable.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAllIterator.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByRepeatedAdd.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAndAddAll.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAllStats.min()).isEqualTo(MANY_VALUES_MIN);
assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.min()).isEqualTo(MANY_VALUES_MIN);
// For datasets of many double values created by adding elements individually, we test many
// combinations of finite and non-finite values:
for (ManyValues values : ALL_MANY_VALUES) {
StatsAccumulator accumulator = new StatsAccumulator();
StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator();
for (double value : values.asIterable()) {
accumulator.add(value);
accumulatorByAddAllStats.addAll(Stats.of(value));
}
double min = accumulator.min();
double minByAddAllStats = accumulatorByAddAllStats.min();
if (values.hasAnyNaN()) {
assertWithMessage("min of " + values).that(min).isNaN();
assertWithMessage("min by addAll(Stats) of " + values).that(minByAddAllStats).isNaN();
} else if (values.hasAnyNegativeInfinity()) {
assertWithMessage("min of " + values).that(min).isNegativeInfinity();
assertWithMessage("min by addAll(Stats) of " + values)
.that(minByAddAllStats)
.isNegativeInfinity();
} else {
assertWithMessage("min of " + values).that(min).isEqualTo(MANY_VALUES_MIN);
assertWithMessage("min by addAll(Stats) of " + values)
.that(minByAddAllStats)
.isEqualTo(MANY_VALUES_MIN);
}
}
assertThat(integerManyValuesAccumulatorByAddAllIterable.min())
.isEqualTo(INTEGER_MANY_VALUES_MIN);
assertThat(longManyValuesAccumulatorByAddAllIterator.min()).isEqualTo(LONG_MANY_VALUES_MIN);
assertThat(longManyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(LONG_MANY_VALUES_MIN);
}
public void testVerifyMegaStreamHalves() {
assertThat(
concat(megaPrimitiveDoubleStreamPart1(), megaPrimitiveDoubleStreamPart2())
.sorted()
.toArray())
.isEqualTo(megaPrimitiveDoubleStream().toArray());
}
public void testAddAllPrimitiveDoubleStream() {
StatsAccumulator accumulator = new StatsAccumulator();
accumulator.addAll(megaPrimitiveDoubleStreamPart1());
accumulator.addAll(megaPrimitiveDoubleStreamPart2());
assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT);
assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN);
assertThat(accumulator.populationVariance())
.isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT)
.of(MEGA_STREAM_POPULATION_VARIANCE);
assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN);
assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX);
}
public void testAddAllPrimitiveIntStream() {
StatsAccumulator accumulator = new StatsAccumulator();
accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToInt(x -> (int) x));
accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToInt(x -> (int) x));
assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT);
assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN);
assertThat(accumulator.populationVariance())
.isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT)
.of(MEGA_STREAM_POPULATION_VARIANCE);
assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN);
assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX);
}
public void testAddAllPrimitiveLongStream() {
StatsAccumulator accumulator = new StatsAccumulator();
accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToLong(x -> (long) x));
accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToLong(x -> (long) x));
assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT);
assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN);
assertThat(accumulator.populationVariance())
.isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT)
.of(MEGA_STREAM_POPULATION_VARIANCE);
assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN);
assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX);
}
}
| StatsAccumulatorTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/PendingBulkOperationCleanupActionTest.java | {
"start": 2464,
"end": 2631
} | class ____ {
private static final TestCachingRegionFactory CACHING_REGION_FACTORY = new TestCachingRegionFactory();
public static | PendingBulkOperationCleanupActionTest |
java | google__auto | factory/src/test/resources/bad/ProvidedOnMethodParameter.java | {
"start": 654,
"end": 735
} | class ____ {
void blah(Object a, @Provided Object b) {}
}
| ProvidedOnMethodParameter |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 512819,
"end": 514991
} | class ____. Multiple classes can be separated by comma.", displayName = "Module Class Names"),
@YamlProperty(name = "moduleRefs", type = "string", description = "To use custom Jackson modules referred from the Camel registry. Multiple modules can be separated by comma.", displayName = "Module Refs"),
@YamlProperty(name = "namingStrategy", type = "string", description = "If set then Jackson will use the the defined Property Naming Strategy.Possible values are: LOWER_CAMEL_CASE, LOWER_DOT_CASE, LOWER_CASE, KEBAB_CASE, SNAKE_CASE and UPPER_CAMEL_CASE", displayName = "Naming Strategy"),
@YamlProperty(name = "objectMapper", type = "string", description = "Lookup and use the existing ObjectMapper with the given id when using Jackson.", displayName = "Object Mapper"),
@YamlProperty(name = "prettyPrint", type = "boolean", defaultValue = "false", description = "To enable pretty printing output nicely formatted. Is by default false.", displayName = "Pretty Print"),
@YamlProperty(name = "schemaResolver", type = "string", description = "Optional schema resolver used to lookup schemas for the data in transit.", displayName = "Schema Resolver"),
@YamlProperty(name = "timezone", type = "string", description = "If set then Jackson will use the Timezone when marshalling/unmarshalling. This option will have no effect on the others Json DataFormat, like gson and fastjson.", displayName = "Timezone"),
@YamlProperty(name = "unmarshalType", type = "string", description = "Class name of the java type to use when unmarshalling", displayName = "Unmarshal Type"),
@YamlProperty(name = "useDefaultObjectMapper", type = "boolean", defaultValue = "true", description = "Whether to lookup and use default Jackson ObjectMapper from the registry.", displayName = "Use Default Object Mapper"),
@YamlProperty(name = "useList", type = "boolean", defaultValue = "false", description = "To unmarshal to a List of Map or a List of Pojo.", displayName = "Use List")
}
)
public static | names |
java | google__guice | core/test/com/googlecode/guice/BytecodeGenTest.java | {
"start": 13976,
"end": 15431
} | class ____ extends URLClassLoader {
MultipleVersionsOfGuiceClassLoader() {
this(MultipleVersionsOfGuiceClassLoader.class.getClassLoader());
}
MultipleVersionsOfGuiceClassLoader(ClassLoader classloader) {
super(getClassPathUrls(), classloader);
}
public Class<? extends LogCreator> loadLogCreatorType(Class<? extends LogCreator> cls)
throws ClassNotFoundException {
return loadClass(cls.getName()).asSubclass(LogCreator.class);
}
/**
* Classic parent-delegating classloaders are meant to override findClass. However,
* non-delegating classloaders (as used in OSGi) instead override loadClass to provide support
* for "class-space" separation.
*/
@Override
protected Class<?> loadClass(final String name, final boolean resolve)
throws ClassNotFoundException {
synchronized (this) {
// check our local cache to avoid duplicates
final Class<?> clazz = findLoadedClass(name);
if (clazz != null) {
return clazz;
}
}
if (name.startsWith("java.")
|| name.startsWith("jakarta.")
|| name.equals(LogCreator.class.getName())
|| (!name.startsWith("com.google.inject.") && !name.startsWith("com.googlecode.guice"))) {
// standard parent delegation
return super.loadClass(name, resolve);
} else {
// load a new copy of the | MultipleVersionsOfGuiceClassLoader |
java | quarkusio__quarkus | extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/OpenTelemetryMDCTest.java | {
"start": 6611,
"end": 7695
} | class ____ {
public final boolean isSampled;
public final String parentId;
public final String spanId;
public final String traceId;
public MdcEntry(boolean isSampled, String parentId, String spanId, String traceId) {
this.isSampled = isSampled;
this.parentId = parentId;
this.spanId = spanId;
this.traceId = traceId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof MdcEntry)) {
return false;
}
MdcEntry mdcEntry = (MdcEntry) o;
return isSampled == mdcEntry.isSampled &&
Objects.equals(parentId, mdcEntry.parentId) &&
Objects.equals(spanId, mdcEntry.spanId) &&
Objects.equals(traceId, mdcEntry.traceId);
}
@Override
public int hashCode() {
return Objects.hash(isSampled, parentId, spanId, traceId);
}
}
}
| MdcEntry |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallCheckerTest.java | {
"start": 15054,
"end": 15702
} | class ____ {
public void badApis(Time time) {
// BUG: Diagnostic contains: DoNotCall
time.setYear(1);
// BUG: Diagnostic contains: DoNotCall
time.setMonth(1);
// BUG: Diagnostic contains: DoNotCall
time.setDate(1);
}
}
""")
.doTest();
}
@Test
public void javaSqlTime_staticallyTypedAsJavaUtilDate() {
testHelper
.addSourceLines(
"TestClass.java",
"""
import java.time.Instant;
import java.util.Date;
public | TestClass |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/support/StaticMethodMatcherPointcutAdvisor.java | {
"start": 928,
"end": 1124
} | class ____ Advisors that are also static pointcuts.
* Serializable if Advice and subclass are.
*
* @author Rod Johnson
* @author Juergen Hoeller
*/
@SuppressWarnings("serial")
public abstract | for |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/basicapi/ResetTest.java | {
"start": 1392,
"end": 4093
} | class ____.lang.String");
}
@Test
public void resettingNullIsSafe() {
assertThatThrownBy(
() -> {
reset(new Object[] {null});
})
.isInstanceOf(NotAMockException.class)
.hasMessage("Argument should be a mock, but is null!");
}
@Test
public void shouldRemoveAllStubbing() {
when(mock.objectReturningMethod(isA(Integer.class))).thenReturn(100);
when(mock.objectReturningMethod(200)).thenReturn(200);
reset(mock);
assertNull(mock.objectReturningMethod(200));
assertEquals(
"default behavior should return null", null, mock.objectReturningMethod("blah"));
}
@Test
public void shouldRemoveAllInteractions() {
mock.simpleMethod(1);
reset(mock);
verifyNoInteractions(mock);
}
@Test
public void shouldRemoveAllInteractionsVerifyNoInteractions() {
mock.simpleMethod(1);
reset(mock);
verifyNoInteractions(mock);
}
@Test
public void shouldRemoveStubbingToString() {
IMethods mockTwo = mock(IMethods.class);
when(mockTwo.toString()).thenReturn("test");
reset(mockTwo);
assertThat(mockTwo.toString()).contains("Mock for IMethods");
}
@Test
public void shouldStubbingNotBeTreatedAsInteractionVerifyNoInteractions() {
when(mock.simpleMethod("one")).thenThrow(new RuntimeException());
doThrow(new RuntimeException()).when(mock).simpleMethod("two");
reset(mock);
verifyNoInteractions(mock);
}
@Test
public void shouldNotAffectMockName() {
IMethods mock = mock(IMethods.class, "mockie");
IMethods mockTwo = mock(IMethods.class);
reset(mock);
assertThat(mockTwo.toString()).contains("Mock for IMethods");
assertEquals("mockie", "" + mock);
}
@Test
public void shouldResetMultipleMocks() {
mock.simpleMethod();
mockTwo.simpleMethod();
reset(mock, mockTwo);
verifyNoMoreInteractions(mock, mockTwo);
}
@SuppressWarnings({"MockitoUsage", "CheckReturnValue"})
@Test
public void shouldValidateStateWhenResetting() {
// invalid verify:
verify(mock);
try {
reset(mockTwo);
fail();
} catch (UnfinishedVerificationException e) {
}
}
@Test
public void shouldMaintainPreviousDefaultAnswer() {
// given
mock = mock(IMethods.class, RETURNS_MOCKS);
// when
reset(mock);
// then
assertNotNull(mock.iMethodsReturningMethod());
}
}
| java |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/IdGenerator.java | {
"start": 847,
"end": 973
} | interface ____ {
/**
* Generate a new identifier.
* @return the generated identifier
*/
UUID generateId();
}
| IdGenerator |
java | apache__flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/windowing/triggers/ContinuousProcessingTimeTrigger.java | {
"start": 1576,
"end": 4907
} | class ____<W extends Window> extends Trigger<Object, W> {
private static final long serialVersionUID = 1L;
private final long interval;
/** When merging we take the lowest of all fire timestamps as the new fire timestamp. */
private final ReducingStateDescriptor<Long> stateDesc =
new ReducingStateDescriptor<>("fire-time", new Min(), LongSerializer.INSTANCE);
private ContinuousProcessingTimeTrigger(long interval) {
this.interval = interval;
}
@Override
public TriggerResult onElement(Object element, long timestamp, W window, TriggerContext ctx)
throws Exception {
ReducingState<Long> fireTimestampState = ctx.getPartitionedState(stateDesc);
timestamp = ctx.getCurrentProcessingTime();
if (fireTimestampState.get() == null) {
registerNextFireTimestamp(
timestamp - (timestamp % interval), window, ctx, fireTimestampState);
}
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onEventTime(long time, W window, TriggerContext ctx) throws Exception {
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onProcessingTime(long time, W window, TriggerContext ctx)
throws Exception {
if (time == window.maxTimestamp()) {
return TriggerResult.FIRE;
}
ReducingState<Long> fireTimestampState = ctx.getPartitionedState(stateDesc);
if (fireTimestampState.get().equals(time)) {
fireTimestampState.clear();
registerNextFireTimestamp(time, window, ctx, fireTimestampState);
return TriggerResult.FIRE;
}
return TriggerResult.CONTINUE;
}
@Override
public void clear(W window, TriggerContext ctx) throws Exception {
// State could be merged into new window.
ReducingState<Long> fireTimestamp = ctx.getPartitionedState(stateDesc);
Long timestamp = fireTimestamp.get();
if (timestamp != null) {
ctx.deleteProcessingTimeTimer(timestamp);
fireTimestamp.clear();
}
}
@Override
public boolean canMerge() {
return true;
}
@Override
public void onMerge(W window, OnMergeContext ctx) throws Exception {
// States for old windows will lose after the call.
ctx.mergePartitionedState(stateDesc);
// Register timer for this new window.
Long nextFireTimestamp = ctx.getPartitionedState(stateDesc).get();
if (nextFireTimestamp != null) {
ctx.registerProcessingTimeTimer(nextFireTimestamp);
}
}
@VisibleForTesting
public long getInterval() {
return interval;
}
@Override
public String toString() {
return "ContinuousProcessingTimeTrigger(" + interval + ")";
}
/**
* Creates a trigger that continuously fires based on the given interval.
*
* @param interval The time interval at which to fire.
* @param <W> The type of {@link Window Windows} on which this trigger can operate.
*/
public static <W extends Window> ContinuousProcessingTimeTrigger<W> of(Duration interval) {
return new ContinuousProcessingTimeTrigger<>(interval.toMillis());
}
private static | ContinuousProcessingTimeTrigger |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MQ2EndpointBuilderFactory.java | {
"start": 21175,
"end": 25718
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final MQ2HeaderNameBuilder INSTANCE = new MQ2HeaderNameBuilder();
/**
* The operation we want to perform.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQOperation}.
*/
public String awsMQOperation() {
return "CamelAwsMQOperation";
}
/**
* The number of results that must be retrieved from listBrokers
* operation.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQMaxResults}.
*/
public String awsMQMaxResults() {
return "CamelAwsMQMaxResults";
}
/**
* The broker name.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerName}.
*/
public String awsMQBrokerName() {
return "CamelAwsMQBrokerName";
}
/**
* The Broker Engine for MQ.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerEngine}.
*/
public String awsMQBrokerEngine() {
return "CamelAwsMQBrokerEngine";
}
/**
* The Broker Engine Version for MQ. Currently you can choose between
* 5.15.6 and 5.15.0 of ACTIVEMQ.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerEngineVersion}.
*/
public String awsMQBrokerEngineVersion() {
return "CamelAwsMQBrokerEngineVersion";
}
/**
* The broker id.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerID}.
*/
public String awsMQBrokerID() {
return "CamelAwsMQBrokerID";
}
/**
* A list of information about the configuration.
*
* The option is a: {@code
* software.amazon.awssdk.services.mq.model.ConfigurationId} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQConfigurationID}.
*/
public String awsMQConfigurationID() {
return "CamelAwsMQConfigurationID";
}
/**
* The deployment mode for the broker in the createBroker operation.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerDeploymentMode}.
*/
public String awsMQBrokerDeploymentMode() {
return "CamelAwsMQBrokerDeploymentMode";
}
/**
* The instance type for the MQ machine in the createBroker operation.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerInstanceType}.
*/
public String awsMQBrokerInstanceType() {
return "CamelAwsMQBrokerInstanceType";
}
/**
* The list of users for MQ.
*
* The option is a: {@code List<User>} type.
*
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerUsers}.
*/
public String awsMQBrokerUsers() {
return "CamelAwsMQBrokerUsers";
}
/**
* If the MQ instance must be publicly available or not.
*
* The option is a: {@code Boolean} type.
*
* Default: false
* Group: producer
*
* @return the name of the header {@code AwsMQBrokerPubliclyAccessible}.
*/
public String awsMQBrokerPubliclyAccessible() {
return "CamelAwsMQBrokerPubliclyAccessible";
}
}
static MQ2EndpointBuilder endpointBuilder(String componentName, String path) {
| MQ2HeaderNameBuilder |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/AbstractItemSetMapReducer.java | {
"start": 2922,
"end": 8768
} | interface ____ extends CheckedFunction<Object, Object, IOException> {}
private final String aggregationName;
private final String mapReducerName;
protected AbstractItemSetMapReducer(String aggregationName, String mapReducerName) {
this.aggregationName = aggregationName;
this.mapReducerName = mapReducerName;
}
/**
* Define the ordinal optimization to use per default, default NO_ORDINALS.
*
* @return ordinal optimization to be used
*/
protected OrdinalOptimization getDefaultOrdinalOptimization() {
return OrdinalOptimization.NO_ORDINALS;
}
/**
* Definition of code to execute before the mapper processes any input.
*
* This is mandatory to create a context object for storing data.
*/
protected abstract MapContext mapInit(BigArrays bigArrays);
/**
* Definition of the mapper that gets executed locally on every shard
*
* @param keyValues A stream of keys and values, while a value is a list of values.
* @param mapContext context object for mapping
*/
protected abstract MapContext map(Stream<Tuple<Field, List<Object>>> keyValues, MapContext mapContext);
/**
* Definition of the mapper for filtered values.
*
* @param mapContext context object for mapping
*/
protected MapContext mapFiltered(MapContext mapContext) {
return mapContext;
}
/**
* Definition of code to execute(optional) after the mapper processed all input.
*
* @param mapContext context object returned from map
* @param ordinalLookup an ordinal lookup function to remap ordinals to values
*/
protected abstract MapFinalContext mapFinalize(MapContext mapContext, List<OrdinalLookupFunction> ordinalLookup) throws IOException;
/**
* Definition of code to execute before the reducer processes any input.
*
* This is mandatory to create a result object that holds reduced data.
*/
protected abstract ReduceContext reduceInit(BigArrays bigArrays);
/**
* Definition of the reducer that gets results from every shard
*
* @param partitions individual map reduce context instances which hold the data
* @param reduceContext the result object create by doReduceInit
* @param isCanceledSupplier supplier to check whether the request has been canceled
*/
protected abstract ReduceContext reduce(
Stream<MapFinalContext> partitions,
ReduceContext reduceContext,
Supplier<Boolean> isCanceledSupplier
);
/**
* Definition of the combiner that works as a local reducer, reducing partial data.
*
* This can be optionally overwritten, otherwise it calls doReduce
*
* @param partitions individual map reduce context instances which hold the data
* @param reduceContext the result object created by doReduceInit
* @param isCanceledSupplier supplier to check whether the request has been canceled
*
*/
protected abstract MapFinalContext combine(
Stream<MapFinalContext> partitions,
ReduceContext reduceContext,
Supplier<Boolean> isCanceledSupplier
);
/**
* Definition of code to execute after the reducer processed all input.
*
* @param reduceContext the result object returned from doReduce
* @param fields list of fields from the input
* @param isCanceledSupplier supplier to check whether the request has been canceled
* @throws IOException
*/
protected abstract Result reduceFinalize(ReduceContext reduceContext, List<Field> fields, Supplier<Boolean> isCanceledSupplier)
throws IOException;
/**
* Definition of code to execute if sampling has been applied.
*
* You must overwrite this if the results of this map-reducer contains absolute doc counts in any way.
*
* @param samplingContext the sampling context
* @param result the mapReduceResult to be adjusted for sampling
*/
protected Result finalizeSampling(SamplingContext samplingContext, Result result) {
return result;
}
/**
* Definition of code to read map-reduce context from a map-reduce operation from a stream.
*
* This must be implemented, for writing the context must implement `Writeable`
*
* @param in the input stream
* @param bigArrays instance of BigArrays to use
* @return a MapReduceContext
* @throws IOException
*/
protected abstract MapFinalContext readMapReduceContext(StreamInput in, BigArrays bigArrays) throws IOException;
/**
* Definition of code to read results from a map operation from a stream.
*
* This must be implemented, for writing the context must implement `Writeable`
*
* @param in the input stream
* @param bigArrays instance of BigArrays to use
* @return a MapReduceContext
* @throws IOException
*/
protected abstract Result readResult(StreamInput in, BigArrays bigArrays) throws IOException;
/**
* Extension point to add further information for `profile:true`
*
* @param add callback to add a string/object pair as debug info
*/
protected void collectDebugInfo(BiConsumer<String, Object> add) {}
/**
* Forwarded from {@link InternalAggregation}:
*
* Signal the framework if the {@linkplain AggregatorReducer} phase needs to be called
* when there is only one {@linkplain InternalAggregation}.
*/
final boolean mustReduceOnSingleInternalAgg() {
return true;
}
@Override
public final String getWriteableName() {
return aggregationName;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(mapReducerName);
}
}
| OrdinalLookupFunction |
java | apache__dubbo | dubbo-metrics/dubbo-tracing/src/test/java/org/apache/dubbo/tracing/utils/PropagationTypeTest.java | {
"start": 1000,
"end": 1457
} | class ____ {
@Test
void forValue() {
PropagationType propagationType1 = PropagationType.forValue("W3C");
assertEquals(PropagationType.W3C, propagationType1);
PropagationType propagationType2 = PropagationType.forValue("B3");
assertEquals(PropagationType.B3, propagationType2);
PropagationType propagationType3 = PropagationType.forValue("B33");
assertNull(propagationType3);
}
}
| PropagationTypeTest |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/FunctionsTest.java | {
"start": 39499,
"end": 41171
} | interface ____ properly defined to throw any exception using String and IOExceptions as
* generic test types.
*/
@Test
void testThrows_FailableSupplier_String_IOException() {
new Functions.FailableSupplier<String, IOException>() {
@Override
public String get() throws IOException {
throw new IOException("test");
}
};
}
@Test
void testTryWithResources() {
final CloseableObject co = new CloseableObject();
final FailableConsumer<Throwable, ? extends Throwable> consumer = co::run;
final IllegalStateException ise = new IllegalStateException();
Throwable e = assertThrows(IllegalStateException.class,
() -> Functions.tryWithResources(() -> consumer.accept(ise), co::close));
assertSame(ise, e);
assertTrue(co.isClosed());
co.reset();
final Error error = new OutOfMemoryError();
e = assertThrows(OutOfMemoryError.class,
() -> Functions.tryWithResources(() -> consumer.accept(error), co::close));
assertSame(error, e);
assertTrue(co.isClosed());
co.reset();
final IOException ioe = new IOException("Unknown I/O error");
final UncheckedIOException uioe = assertThrows(UncheckedIOException.class,
() -> Functions.tryWithResources(() -> consumer.accept(ioe), co::close));
final IOException cause = uioe.getCause();
assertSame(ioe, cause);
assertTrue(co.isClosed());
co.reset();
Functions.tryWithResources(() -> consumer.accept(null), co::close);
assertTrue(co.isClosed());
}
}
| is |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/ConstructorBindingImpl.java | {
"start": 2826,
"end": 3566
} | class ____<T> implements InternalFactory<T> {
private ConstructorInjector<T> constructorInjector;
@Override
@SuppressWarnings("unchecked")
public T get(Errors errors, InternalContext context, Dependency<?> dependency) throws ErrorsException {
if (constructorInjector == null) {
throw new IllegalStateException("Constructor not ready");
}
// This may not actually be safe because it could return a super type of T (if that's all the
// client needs), but it should be OK in practice thanks to the wonders of erasure.
return (T) constructorInjector.construct(errors, context, dependency.getKey().getRawType());
}
}
}
| Factory |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/inference/CallBindingCallContext.java | {
"start": 8677,
"end": 12548
} | class ____ implements TableSemantics {
private final DataType dataType;
private final int[] partitionByColumns;
public static CallBindingTableSemantics create(
DataType tableDataType, StaticArgument staticArg, SqlNode sqlNode) {
checkNoOrderBy(sqlNode);
return new CallBindingTableSemantics(
createDataType(tableDataType, staticArg),
createPartitionByColumns(tableDataType, sqlNode));
}
private static void checkNoOrderBy(SqlNode sqlNode) {
final SqlNodeList orderByList = getSemanticsComponent(sqlNode, 2);
if (orderByList == null) {
return;
}
if (!orderByList.isEmpty()) {
throw new ValidationException("ORDER BY clause is currently not supported.");
}
}
private static @Nullable SqlNodeList getSemanticsComponent(SqlNode sqlNode, int pos) {
if (noSetSemantics(sqlNode)) {
return null;
}
// 0 => query, 1 => PARTITION BY, 2 => ORDER BY
final List<SqlNode> setSemantics = ((SqlCall) sqlNode).getOperandList();
return (SqlNodeList) setSemantics.get(pos);
}
private static DataType createDataType(DataType tableDataType, StaticArgument staticArg) {
final DataType dataType = staticArg.getDataType().orElse(null);
if (dataType != null) {
// Typed table argument
return dataType;
}
// Untyped table arguments
return tableDataType;
}
private static int[] createPartitionByColumns(DataType tableDataType, SqlNode sqlNode) {
final SqlNodeList partitionByList = getSemanticsComponent(sqlNode, 1);
if (partitionByList == null) {
return new int[0];
}
final List<String> tableColumns = DataType.getFieldNames(tableDataType);
return partitionByList.stream()
.map(n -> ((SqlIdentifier) n).getSimple())
.map(
c -> {
final int pos = tableColumns.indexOf(c);
if (pos < 0) {
throw new ValidationException(
String.format(
"Invalid column '%s' for PARTITION BY clause. "
+ "Available columns are: %s",
c, tableColumns));
}
return pos;
})
.mapToInt(Integer::intValue)
.toArray();
}
private CallBindingTableSemantics(DataType dataType, int[] partitionByColumns) {
this.dataType = dataType;
this.partitionByColumns = partitionByColumns;
}
@Override
public DataType dataType() {
return dataType;
}
@Override
public int[] partitionByColumns() {
return partitionByColumns;
}
@Override
public int[] orderByColumns() {
return new int[0];
}
@Override
public int timeColumn() {
return -1;
}
@Override
public Optional<ChangelogMode> changelogMode() {
return Optional.empty();
}
}
// --------------------------------------------------------------------------------------------
// ModelSemantics
// --------------------------------------------------------------------------------------------
private static | CallBindingTableSemantics |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/bind/BeanAsEndpointTest.java | {
"start": 1178,
"end": 1790
} | class ____ extends SpringTestSupport {
protected Object body = "James";
@Test
public void testSendingToBean() throws Exception {
MockEndpoint mock = resolveMandatoryEndpoint("mock:results", MockEndpoint.class);
mock.expectedBodiesReceived("Hello James!");
template.sendBody("direct:start", ExchangePattern.InOut, body);
mock.assertIsSatisfied();
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/bind/beanAsEndpoint.xml");
}
}
| BeanAsEndpointTest |
java | apache__camel | components/camel-sjms/src/main/java/org/apache/camel/component/sjms/jms/JmsMessageType.java | {
"start": 921,
"end": 1238
} | enum ____ {
/**
* First the JMS Message types
*/
Bytes,
Map,
Object,
Stream,
Text,
/**
* BlobMessage which is not supported by all JMS implementations
*/
Blob,
/**
* The default type that can be used for empty messages.
*/
Message
}
| JmsMessageType |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/util/XmlExpectationsHelper.java | {
"start": 3188,
"end": 3255
} | class ____ prevent hard dependency on XML Unit.
*/
private static | to |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oceanbase/OceanbaseCreateTableTest_rangePartition2.java | {
"start": 967,
"end": 4723
} | class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE TABLE employees ( "
+ "id INT NOT NULL, "
+ "fname VARCHAR(30), "
+ "lname VARCHAR(30), "
+ "hired DATE NOT NULL DEFAULT '1970-01-01', "
+ "separated DATE NOT NULL DEFAULT '9999-12-31', "
+ "job_code INT NOT NULL,store_id INT NOT NULL "
+ ") PARTITION BY RANGE (store_id) "
+ "( PARTITION p0 VALUES LESS THAN (6), "
+ "PARTITION p1 VALUES LESS THAN (11), "
+ "PARTITION p2 VALUES LESS THAN (16), "
+ "PARTITION p3 VALUES LESS THAN MAXVALUE "
+ ")";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList();
SQLStatement stmt = stmtList.get(0);
{
String result = SQLUtils.toMySqlString(stmt);
assertEquals("CREATE TABLE employees ("
+ "\n\tid INT NOT NULL,"
+ "\n\tfname VARCHAR(30),"
+ "\n\tlname VARCHAR(30),"
+ "\n\thired DATE NOT NULL DEFAULT '1970-01-01',"
+ "\n\tseparated DATE NOT NULL DEFAULT '9999-12-31',"
+ "\n\tjob_code INT NOT NULL,"
+ "\n\tstore_id INT NOT NULL"
+ "\n)"
+ "\nPARTITION BY RANGE COLUMNS (store_id) ("
+ "\n\tPARTITION p0 VALUES LESS THAN (6),"
+ "\n\tPARTITION p1 VALUES LESS THAN (11),"
+ "\n\tPARTITION p2 VALUES LESS THAN (16),"
+ "\n\tPARTITION p3 VALUES LESS THAN MAXVALUE"
+ "\n)",
result);
}
{
String result = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("create table employees ("
+ "\n\tid INT not null,"
+ "\n\tfname VARCHAR(30),"
+ "\n\tlname VARCHAR(30),"
+ "\n\thired DATE not null default '1970-01-01',"
+ "\n\tseparated DATE not null default '9999-12-31',"
+ "\n\tjob_code INT not null,"
+ "\n\tstore_id INT not null"
+ "\n)"
+ "\npartition by range columns (store_id) ("
+ "\n\tpartition p0 values less than (6),"
+ "\n\tpartition p1 values less than (11),"
+ "\n\tpartition p2 values less than (16),"
+ "\n\tpartition p3 values less than maxvalue"
+ "\n)",
result);
}
assertEquals(1, stmtList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(7, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
// assertTrue(visitor.getTables().containsKey(new TableStat.Name("t_basic_store")));
}
}
| OceanbaseCreateTableTest_rangePartition2 |
java | grpc__grpc-java | util/src/main/java/io/grpc/util/RandomSubsettingLoadBalancerProvider.java | {
"start": 861,
"end": 2828
} | class ____ extends LoadBalancerProvider {
private static final String POLICY_NAME = "random_subsetting_experimental";
@Override
public LoadBalancer newLoadBalancer(LoadBalancer.Helper helper) {
return new RandomSubsettingLoadBalancer(helper);
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public int getPriority() {
return 5;
}
@Override
public String getPolicyName() {
return POLICY_NAME;
}
@Override
public ConfigOrError parseLoadBalancingPolicyConfig(Map<String, ?> rawConfig) {
try {
return parseLoadBalancingPolicyConfigInternal(rawConfig);
} catch (RuntimeException e) {
return ConfigOrError.fromError(
Status.UNAVAILABLE
.withCause(e)
.withDescription("Failed parsing configuration for " + getPolicyName()));
}
}
private ConfigOrError parseLoadBalancingPolicyConfigInternal(Map<String, ?> rawConfig) {
Long subsetSize = JsonUtil.getNumberAsLong(rawConfig, "subsetSize");
if (subsetSize == null) {
return ConfigOrError.fromError(
Status.UNAVAILABLE.withDescription(
"Subset size missing in " + getPolicyName() + ", LB policy config=" + rawConfig));
}
ConfigOrError childConfig = GracefulSwitchLoadBalancer.parseLoadBalancingPolicyConfig(
JsonUtil.getListOfObjects(rawConfig, "childPolicy"));
if (childConfig.getError() != null) {
return ConfigOrError.fromError(Status.UNAVAILABLE
.withDescription(
"Failed to parse child in " + getPolicyName() + ", LB policy config=" + rawConfig)
.withCause(childConfig.getError().asRuntimeException()));
}
return ConfigOrError.fromConfig(
new RandomSubsettingLoadBalancer.RandomSubsettingLoadBalancerConfig.Builder()
.setSubsetSize(subsetSize)
.setChildConfig(childConfig.getConfig())
.build());
}
}
| RandomSubsettingLoadBalancerProvider |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeResourcePolicy.java | {
"start": 1051,
"end": 1200
} | class ____ capable of determining
* whether sufficient resources are available for the NN to continue operating.
*/
@InterfaceAudience.Private
final | is |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java | {
"start": 2592,
"end": 4305
} | class ____<K, V>
extends Reducer<Text, Text, Text, Text> {
private String fieldSeparator = "\t";
private String reduceOutputKeyValueSpec;
private List<Integer> reduceOutputKeyFieldList = new ArrayList<Integer>();
private List<Integer> reduceOutputValueFieldList = new ArrayList<Integer>();
private int allReduceValueFieldsFrom = -1;
public static final Logger LOG =
LoggerFactory.getLogger("FieldSelectionMapReduce");
public void setup(Context context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
this.fieldSeparator =
conf.get(FieldSelectionHelper.DATA_FIELD_SEPARATOR, "\t");
this.reduceOutputKeyValueSpec =
conf.get(FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, "0-:");
allReduceValueFieldsFrom = FieldSelectionHelper.parseOutputKeyValueSpec(
reduceOutputKeyValueSpec, reduceOutputKeyFieldList,
reduceOutputValueFieldList);
LOG.info(FieldSelectionHelper.specToString(fieldSeparator,
reduceOutputKeyValueSpec, allReduceValueFieldsFrom,
reduceOutputKeyFieldList, reduceOutputValueFieldList));
}
public void reduce(Text key, Iterable<Text> values, Context context)
throws IOException, InterruptedException {
String keyStr = key.toString() + this.fieldSeparator;
for (Text val : values) {
FieldSelectionHelper helper = new FieldSelectionHelper();
helper.extractOutputKeyValue(keyStr, val.toString(),
fieldSeparator, reduceOutputKeyFieldList,
reduceOutputValueFieldList, allReduceValueFieldsFrom, false, false);
context.write(helper.getKey(), helper.getValue());
}
}
}
| FieldSelectionReducer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DifferentNameButSameTest.java | {
"start": 2516,
"end": 2810
} | interface ____ {
B test();
B test2();
}
""")
.doTest();
}
@Test
public void fullyQualifiedType_notMentioned() {
helper
.addInputLines(
"Test.java",
"""
package pkg;
| Test |
java | quarkusio__quarkus | integration-tests/hibernate-reactive-oracle/src/main/java/io/quarkus/it/hibernate/reactive/oracle/resources/HibernateReactiveTestEndpointJoinedSubclass.java | {
"start": 2201,
"end": 3184
} | class ____ {
@Id
private Integer id;
private String title;
public Book() {
}
public Book(Integer id, String title) {
this.id = id;
this.title = title;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Book book = (Book) o;
return Objects.equals(title, book.title);
}
@Override
public int hashCode() {
return Objects.hash(title);
}
}
}
| Book |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/processor/DefaultModelElementProcessorContext.java | {
"start": 1389,
"end": 4299
} | class ____ implements ProcessorContext {
private final ProcessingEnvironment processingEnvironment;
private final DelegatingMessager messager;
private final Options options;
private final TypeFactory typeFactory;
private final VersionInformation versionInformation;
private final TypeUtils delegatingTypes;
private final ElementUtils delegatingElements;
private final AccessorNamingUtils accessorNaming;
private final RoundContext roundContext;
public DefaultModelElementProcessorContext(ProcessingEnvironment processingEnvironment, Options options,
RoundContext roundContext, Map<String, String> notToBeImported, TypeElement mapperElement) {
this.processingEnvironment = processingEnvironment;
this.messager = new DelegatingMessager( processingEnvironment.getMessager(), options.isVerbose() );
this.accessorNaming = roundContext.getAnnotationProcessorContext().getAccessorNaming();
this.versionInformation = DefaultVersionInformation.fromProcessingEnvironment( processingEnvironment );
this.delegatingTypes = TypeUtils.create( processingEnvironment, versionInformation );
this.delegatingElements = ElementUtils.create( processingEnvironment, versionInformation, mapperElement );
this.roundContext = roundContext;
this.typeFactory = new TypeFactory(
delegatingElements,
delegatingTypes,
messager,
roundContext,
notToBeImported,
options.isVerbose(),
versionInformation
);
this.options = options;
}
@Override
public Filer getFiler() {
return processingEnvironment.getFiler();
}
@Override
public TypeUtils getTypeUtils() {
return delegatingTypes;
}
@Override
public ElementUtils getElementUtils() {
return delegatingElements;
}
@Override
public TypeFactory getTypeFactory() {
return typeFactory;
}
@Override
public FormattingMessager getMessager() {
return messager;
}
@Override
public AccessorNamingUtils getAccessorNaming() {
return accessorNaming;
}
@Override
public Map<String, EnumTransformationStrategy> getEnumTransformationStrategies() {
return roundContext.getAnnotationProcessorContext().getEnumTransformationStrategies();
}
@Override
public EnumMappingStrategy getEnumMappingStrategy() {
return roundContext.getAnnotationProcessorContext().getEnumMappingStrategy();
}
@Override
public Options getOptions() {
return options;
}
@Override
public VersionInformation getVersionInformation() {
return versionInformation;
}
@Override
public boolean isErroneous() {
return messager.isErroneous();
}
private static final | DefaultModelElementProcessorContext |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java | {
"start": 1364,
"end": 2668
} | class ____ {
private static Configuration defaultConf;
static {
defaultConf = new Configuration();
}
private RecordFactoryProvider() {
}
public static RecordFactory getRecordFactory(Configuration conf) {
if (conf == null) {
//Assuming the default configuration has the correct factories set.
//Users can specify a particular factory by providing a configuration.
conf = defaultConf;
}
String recordFactoryClassName = conf.get(
YarnConfiguration.IPC_RECORD_FACTORY_CLASS,
YarnConfiguration.DEFAULT_IPC_RECORD_FACTORY_CLASS);
return (RecordFactory) getFactoryClassInstance(recordFactoryClassName);
}
private static Object getFactoryClassInstance(String factoryClassName) {
try {
Class<?> clazz = Class.forName(factoryClassName);
Method method = clazz.getMethod("get", null);
method.setAccessible(true);
return method.invoke(null, null);
} catch (ClassNotFoundException e) {
throw new YarnRuntimeException(e);
} catch (NoSuchMethodException e) {
throw new YarnRuntimeException(e);
} catch (InvocationTargetException e) {
throw new YarnRuntimeException(e);
} catch (IllegalAccessException e) {
throw new YarnRuntimeException(e);
}
}
}
| RecordFactoryProvider |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/launcher/testservices/ExceptionInExecuteLaunchableService.java | {
"start": 2952,
"end": 3189
} | class ____ extends IOException implements
ExitCodeProvider {
public IOECodedException() {
super(EXIT_IN_IOE_TEXT);
}
@Override
public int getExitCode() {
return IOE_EXIT_CODE;
}
}
}
| IOECodedException |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/main/java/org/hibernate/processor/util/StringUtil.java | {
"start": 349,
"end": 3817
} | class ____ {
private static final String GET = "get";
private static final String IS = "is";
private static final String HAS = "has";
private StringUtil() {
}
public static String determineFullyQualifiedClassName(String defaultPackage, String name) {
return isFullyQualified( name ) ? name : defaultPackage + "." + name;
}
public static boolean isFullyQualified(String name) {
return name.contains(".");
}
public static String packageNameFromFullyQualifiedName(String fullyQualifiedName) {
return fullyQualifiedName.substring( 0, fullyQualifiedName.lastIndexOf(".") );
}
public static String classNameFromFullyQualifiedName(String fullyQualifiedName) {
return fullyQualifiedName.substring( fullyQualifiedName.lastIndexOf(".") + 1 );
}
public static boolean isProperty(String methodName, String returnType) {
if ( methodName == null ) {
return false;
}
else {
return !isVoid( returnType )
&& isValidPropertyName( methodName, GET )
|| isBoolean( returnType )
&& ( isValidPropertyName( methodName, IS )
|| isValidPropertyName( methodName, HAS ) );
}
}
private static boolean isVoid(String returnType) {
return "void".equals( returnType );
}
private static boolean isBoolean(String type) {
return "Boolean".equals( type ) || "java.lang.Boolean".equals( type ) || "boolean".equals( type );
}
private static boolean isValidPropertyName(String name, String prefix) {
// the name has to start with the prefix and have at least one more character
return name.startsWith( prefix ) && name.length() > prefix.length();
}
public static String getPropertyName(String name) {
return decapitalize( trimPropertyPrefix( name ) );
}
private static String trimPropertyPrefix(String name) {
if ( name.startsWith( GET ) ) {
return name.replaceFirst( GET, "" );
}
else if ( name.startsWith( IS ) ) {
return name.replaceFirst( IS, "" );
}
else if ( name.startsWith( HAS ) ) {
return name.replaceFirst( HAS, "" );
}
else {
return name;
}
}
public static String decapitalize(String string) {
return string == null || string.isEmpty() || startsWithSeveralUpperCaseLetters( string )
? string
: string.substring( 0, 1 ).toLowerCase(Locale.ROOT) + string.substring( 1 );
}
public static String nameToFieldName(String name){
return getUpperUnderscoreCaseFromLowerCamelCase( nameToMethodName( name ) );
}
public static String nameToMethodName(String name) {
return name.replaceAll("[\\s.\\-!@#%=+/*^&|(){}\\[\\],]", "_");
}
public static String getUpperUnderscoreCaseFromLowerCamelCase(String lowerCamelCaseString) {
final StringBuilder result = new StringBuilder();
int position = 0;
boolean wasLowerCase = false;
while ( position < lowerCamelCaseString.length() ) {
final int codePoint = lowerCamelCaseString.codePointAt( position );
final boolean isUpperCase = isUpperCase( codePoint );
if ( wasLowerCase && isUpperCase ) {
result.append('_');
}
result.appendCodePoint( toUpperCase( codePoint ) );
position += charCount( codePoint );
wasLowerCase = !isUpperCase;
}
if ( result.toString().equals( lowerCamelCaseString ) ) {
result.insert(0, '_');
}
return result.toString();
}
private static boolean startsWithSeveralUpperCaseLetters(String string) {
return string.length() > 1
&& isUpperCase( string.charAt( 0 ) )
&& isUpperCase( string.charAt( 1 ) );
}
/**
* If this is an "intermediate" | StringUtil |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/JsonMappingTests.java | {
"start": 12233,
"end": 12885
} | class ____ {
private String string;
public StringNode() {
}
public StringNode(String string) {
this.string = string;
}
public String getString() {
return string;
}
public void setString(String string) {
this.string = string;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
StringNode that = (StringNode) o;
return string != null ? string.equals( that.string ) : that.string == null;
}
@Override
public int hashCode() {
return string != null ? string.hashCode() : 0;
}
}
}
| StringNode |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/AbstractMBeanServerTests.java | {
"start": 2296,
"end": 3917
} | class ____ {
protected MBeanServer server;
@BeforeEach
public final void setUp() throws Exception {
this.server = MBeanServerFactory.createMBeanServer();
try {
onSetUp();
}
catch (Exception ex) {
releaseServer();
throw ex;
}
}
@AfterEach
protected void tearDown() throws Exception {
releaseServer();
onTearDown();
}
private void releaseServer() {
try {
MBeanServerFactory.releaseMBeanServer(getServer());
}
catch (IllegalArgumentException ex) {
if (!ex.getMessage().contains("not in list")) {
throw ex;
}
}
MBeanTestUtils.resetMBeanServers();
}
protected final ConfigurableApplicationContext loadContext(String configLocation) {
GenericApplicationContext ctx = new GenericApplicationContext();
new XmlBeanDefinitionReader(ctx).loadBeanDefinitions(configLocation);
ctx.getDefaultListableBeanFactory().registerSingleton("server", getServer());
ctx.refresh();
return ctx;
}
protected void onSetUp() throws Exception {
}
protected void onTearDown() {
}
protected final MBeanServer getServer() {
return this.server;
}
/**
* Start the specified {@link MBeanExporter}.
*/
protected void start(MBeanExporter exporter) {
exporter.afterPropertiesSet();
exporter.afterSingletonsInstantiated();
}
protected void assertIsRegistered(String message, ObjectName objectName) {
assertThat(getServer().isRegistered(objectName)).as(message).isTrue();
}
protected void assertIsNotRegistered(String message, ObjectName objectName) {
assertThat(getServer().isRegistered(objectName)).as(message).isFalse();
}
static | AbstractMBeanServerTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsResponsePBImpl.java | {
"start": 1143,
"end": 2290
} | class ____ extends
AddToClusterNodeLabelsResponse {
AddToClusterNodeLabelsResponseProto proto = AddToClusterNodeLabelsResponseProto
.getDefaultInstance();
AddToClusterNodeLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
public AddToClusterNodeLabelsResponsePBImpl() {
builder = AddToClusterNodeLabelsResponseProto.newBuilder();
}
public AddToClusterNodeLabelsResponsePBImpl(
AddToClusterNodeLabelsResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public AddToClusterNodeLabelsResponseProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}
| AddToClusterNodeLabelsResponsePBImpl |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultArtifactFactory.java | {
"start": 1292,
"end": 3657
} | class ____ implements ArtifactFactory {
@Override
public Artifact create(@Nonnull ArtifactFactoryRequest request) {
requireNonNull(request, "request");
InternalSession session = InternalSession.from(request.getSession());
ArtifactType type = null;
if (request.getType() != null) {
type = session.getSession().getArtifactTypeRegistry().get(request.getType());
}
String str1 = request.getClassifier();
String classifier =
str1 != null && !str1.isEmpty() ? request.getClassifier() : type != null ? type.getClassifier() : null;
String str = request.getExtension();
String extension =
str != null && !str.isEmpty() ? request.getExtension() : type != null ? type.getExtension() : null;
return new DefaultArtifact(
session,
new org.eclipse.aether.artifact.DefaultArtifact(
request.getGroupId(),
request.getArtifactId(),
classifier,
extension,
request.getVersion(),
type));
}
@Override
public ProducedArtifact createProduced(@Nonnull ArtifactFactoryRequest request) {
requireNonNull(request, "request");
InternalSession session = InternalSession.from(request.getSession());
ArtifactType type = null;
if (request.getType() != null) {
type = session.getSession().getArtifactTypeRegistry().get(request.getType());
}
String str1 = request.getClassifier();
String classifier =
str1 != null && !str1.isEmpty() ? request.getClassifier() : type != null ? type.getClassifier() : null;
String str = request.getExtension();
String extension =
str != null && !str.isEmpty() ? request.getExtension() : type != null ? type.getExtension() : null;
return new DefaultProducedArtifact(
session,
new org.eclipse.aether.artifact.DefaultArtifact(
request.getGroupId(),
request.getArtifactId(),
classifier,
extension,
request.getVersion(),
type));
}
}
| DefaultArtifactFactory |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/PipeApplicationStub.java | {
"start": 1088,
"end": 2972
} | class ____ extends CommonStub {
public static void main(String[] args) {
PipeApplicationStub client = new PipeApplicationStub();
client.binaryProtocolStub();
}
public void binaryProtocolStub() {
try {
initSoket();
// output code
WritableUtils.writeVInt(dataOut, 50);
IntWritable wt = new IntWritable();
wt.set(123);
writeObject(wt, dataOut);
writeObject(new Text("value"), dataOut);
// PARTITIONED_OUTPUT
WritableUtils.writeVInt(dataOut, 51);
WritableUtils.writeVInt(dataOut, 0);
writeObject(wt, dataOut);
writeObject(new Text("value"), dataOut);
// STATUS
WritableUtils.writeVInt(dataOut, 52);
Text.writeString(dataOut, "PROGRESS");
dataOut.flush();
// progress
WritableUtils.writeVInt(dataOut, 53);
dataOut.writeFloat(0.55f);
// register counter
WritableUtils.writeVInt(dataOut, 55);
// id
WritableUtils.writeVInt(dataOut, 0);
Text.writeString(dataOut, "group");
Text.writeString(dataOut, "name");
// increment counter
WritableUtils.writeVInt(dataOut, 56);
WritableUtils.writeVInt(dataOut, 0);
WritableUtils.writeVLong(dataOut, 2);
// map item
int intValue = WritableUtils.readVInt(dataInput);
System.out.println("intValue:" + intValue);
IntWritable iw = new IntWritable();
readObject(iw, dataInput);
System.out.println("key:" + iw.get());
Text txt = new Text();
readObject(txt, dataInput);
System.out.println("value:" + txt.toString());
// done
// end of session
WritableUtils.writeVInt(dataOut, 54);
System.out.println("finish");
dataOut.flush();
dataOut.close();
} catch (Exception x) {
x.printStackTrace();
} finally {
closeSoket();
}
}
}
| PipeApplicationStub |
java | google__gson | gson/src/test/java/com/google/gson/JsonPrimitiveTest.java | {
"start": 1003,
"end": 12966
} | class ____ {
@SuppressWarnings("unused")
@Test
public void testNulls() {
assertThrows(NullPointerException.class, () -> new JsonPrimitive((Boolean) null));
assertThrows(NullPointerException.class, () -> new JsonPrimitive((Number) null));
assertThrows(NullPointerException.class, () -> new JsonPrimitive((String) null));
assertThrows(NullPointerException.class, () -> new JsonPrimitive((Character) null));
}
@Test
public void testBoolean() {
JsonPrimitive json = new JsonPrimitive(true);
assertThat(json.isBoolean()).isTrue();
assertThat(json.getAsBoolean()).isTrue();
// Extra support for booleans
json = new JsonPrimitive(1);
assertThat(json.getAsBoolean()).isFalse();
json = new JsonPrimitive("1");
assertThat(json.getAsBoolean()).isFalse();
json = new JsonPrimitive("true");
assertThat(json.getAsBoolean()).isTrue();
json = new JsonPrimitive("TrUe");
assertThat(json.getAsBoolean()).isTrue();
json = new JsonPrimitive("1.3");
assertThat(json.getAsBoolean()).isFalse();
}
@Test
public void testParsingStringAsBoolean() {
JsonPrimitive json = new JsonPrimitive("true");
assertThat(json.isBoolean()).isFalse();
assertThat(json.getAsBoolean()).isTrue();
}
@Test
public void testParsingStringAsNumber() {
JsonPrimitive json = new JsonPrimitive("1");
assertThat(json.isNumber()).isFalse();
assertThat(json.getAsDouble()).isEqualTo(1.0);
assertThat(json.getAsFloat()).isEqualTo(1F);
assertThat(json.getAsInt()).isEqualTo(1);
assertThat(json.getAsLong()).isEqualTo(1L);
assertThat(json.getAsShort()).isEqualTo((short) 1);
assertThat(json.getAsByte()).isEqualTo((byte) 1);
assertThat(json.getAsBigInteger()).isEqualTo(new BigInteger("1"));
assertThat(json.getAsBigDecimal()).isEqualTo(new BigDecimal("1"));
}
@Test
public void testAsNumber_Boolean() {
JsonPrimitive json = new JsonPrimitive(true);
var e = assertThrows(UnsupportedOperationException.class, () -> json.getAsNumber());
assertThat(e).hasMessageThat().isEqualTo("Primitive is neither a number nor a string");
}
@SuppressWarnings("deprecation")
@Test
public void testStringsAndChar() {
JsonPrimitive json = new JsonPrimitive("abc");
assertThat(json.isString()).isTrue();
assertThat(json.getAsCharacter()).isEqualTo('a');
assertThat(json.getAsString()).isEqualTo("abc");
json = new JsonPrimitive('z');
assertThat(json.isString()).isTrue();
assertThat(json.getAsCharacter()).isEqualTo('z');
assertThat(json.getAsString()).isEqualTo("z");
json = new JsonPrimitive(true);
assertThat(json.getAsString()).isEqualTo("true");
JsonPrimitive emptyString = new JsonPrimitive("");
assertThat(emptyString.getAsString()).isEqualTo("");
var e = assertThrows(UnsupportedOperationException.class, () -> emptyString.getAsCharacter());
assertThat(e).hasMessageThat().isEqualTo("String value is empty");
}
@Test
public void testExponential() {
JsonPrimitive json = new JsonPrimitive("1E+7");
assertThat(json.getAsBigDecimal()).isEqualTo(new BigDecimal("1E+7"));
assertThat(json.getAsDouble()).isEqualTo(1E+7);
// Integers can not handle exponents like this
assertThrows(NumberFormatException.class, () -> json.getAsInt());
}
@Test
public void testByteEqualsShort() {
JsonPrimitive p1 = new JsonPrimitive((byte) 10);
JsonPrimitive p2 = new JsonPrimitive((short) 10);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testByteEqualsInteger() {
JsonPrimitive p1 = new JsonPrimitive((byte) 10);
JsonPrimitive p2 = new JsonPrimitive(10);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testByteEqualsLong() {
JsonPrimitive p1 = new JsonPrimitive((byte) 10);
JsonPrimitive p2 = new JsonPrimitive(10L);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testByteEqualsBigInteger() {
JsonPrimitive p1 = new JsonPrimitive((byte) 10);
JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testShortEqualsInteger() {
JsonPrimitive p1 = new JsonPrimitive((short) 10);
JsonPrimitive p2 = new JsonPrimitive(10);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testShortEqualsLong() {
JsonPrimitive p1 = new JsonPrimitive((short) 10);
JsonPrimitive p2 = new JsonPrimitive(10L);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testShortEqualsBigInteger() {
JsonPrimitive p1 = new JsonPrimitive((short) 10);
JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testIntegerEqualsLong() {
JsonPrimitive p1 = new JsonPrimitive(10);
JsonPrimitive p2 = new JsonPrimitive(10L);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testIntegerEqualsBigInteger() {
JsonPrimitive p1 = new JsonPrimitive(10);
JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testLongEqualsBigInteger() {
JsonPrimitive p1 = new JsonPrimitive(10L);
JsonPrimitive p2 = new JsonPrimitive(new BigInteger("10"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testFloatEqualsDouble() {
JsonPrimitive p1 = new JsonPrimitive(10.25F);
JsonPrimitive p2 = new JsonPrimitive(10.25D);
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testFloatEqualsBigDecimal() {
JsonPrimitive p1 = new JsonPrimitive(10.25F);
JsonPrimitive p2 = new JsonPrimitive(new BigDecimal("10.25"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testDoubleEqualsBigDecimal() {
JsonPrimitive p1 = new JsonPrimitive(10.25D);
JsonPrimitive p2 = new JsonPrimitive(new BigDecimal("10.25"));
assertThat(p1).isEqualTo(p2);
assertThat(p1.hashCode()).isEqualTo(p2.hashCode());
}
@Test
public void testToString() {
JsonPrimitive json = new JsonPrimitive("Some\nEscaped\nValue");
assertThat(json.toString()).isEqualTo("\"Some\\nEscaped\\nValue\"");
json = new JsonPrimitive("");
assertThat(json.toString()).isEqualTo("\"\"");
json = new JsonPrimitive(new BigDecimal("1.333"));
assertThat(json.toString()).isEqualTo("1.333");
// Preserves trailing 0
json = new JsonPrimitive(new BigDecimal("1.0000"));
assertThat(json.toString()).isEqualTo("1.0000");
json = new JsonPrimitive(Float.NaN);
assertThat(json.toString()).isEqualTo("NaN");
json = new JsonPrimitive(Double.NEGATIVE_INFINITY);
assertThat(json.toString()).isEqualTo("-Infinity");
json = new JsonPrimitive('a');
assertThat(json.toString()).isEqualTo("\"a\"");
json = new JsonPrimitive('\0');
assertThat(json.toString()).isEqualTo("\"\\u0000\"");
json = new JsonPrimitive(true);
assertThat(json.toString()).isEqualTo("true");
}
@Test
public void testEquals() {
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive("A"), new JsonPrimitive("A"));
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive(true), new JsonPrimitive(true));
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive(5L), new JsonPrimitive(5L));
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive('a'), new JsonPrimitive('a'));
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive(Float.NaN), new JsonPrimitive(Float.NaN));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Float.NEGATIVE_INFINITY), new JsonPrimitive(Float.NEGATIVE_INFINITY));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Float.POSITIVE_INFINITY), new JsonPrimitive(Float.POSITIVE_INFINITY));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Double.NaN), new JsonPrimitive(Double.NaN));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Double.NEGATIVE_INFINITY), new JsonPrimitive(Double.NEGATIVE_INFINITY));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Double.POSITIVE_INFINITY), new JsonPrimitive(Double.POSITIVE_INFINITY));
assertThat(new JsonPrimitive("a").equals(new JsonPrimitive("b"))).isFalse();
assertThat(new JsonPrimitive(true).equals(new JsonPrimitive(false))).isFalse();
assertThat(new JsonPrimitive(0).equals(new JsonPrimitive(1))).isFalse();
}
@Test
public void testEqualsAcrossTypes() {
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive("a"), new JsonPrimitive('a'));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(new BigInteger("0")), new JsonPrimitive(0));
MoreAsserts.assertEqualsAndHashCode(new JsonPrimitive(0), new JsonPrimitive(0L));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(new BigDecimal("0")), new JsonPrimitive(0));
MoreAsserts.assertEqualsAndHashCode(
new JsonPrimitive(Float.NaN), new JsonPrimitive(Double.NaN));
}
@Test
public void testEqualsIntegerAndBigInteger() {
JsonPrimitive a = new JsonPrimitive(5L);
JsonPrimitive b = new JsonPrimitive(new BigInteger("18446744073709551621"));
assertWithMessage("%s not equals %s", a, b).that(a.equals(b)).isFalse();
}
@Test
public void testEqualsDoesNotEquateStringAndNonStringTypes() {
assertThat(new JsonPrimitive("true").equals(new JsonPrimitive(true))).isFalse();
assertThat(new JsonPrimitive("0").equals(new JsonPrimitive(0))).isFalse();
assertThat(new JsonPrimitive("NaN").equals(new JsonPrimitive(Float.NaN))).isFalse();
}
@Test
public void testDeepCopy() {
JsonPrimitive a = new JsonPrimitive("a");
assertThat(a).isSameInstanceAs(a.deepCopy()); // Primitives are immutable!
}
@Test
public void testBigDecimalEquals() {
JsonPrimitive small = new JsonPrimitive(1.0);
JsonPrimitive large = new JsonPrimitive(2.0);
assertThat(small.equals(large)).isFalse();
BigDecimal doubleMax = BigDecimal.valueOf(Double.MAX_VALUE);
JsonPrimitive smallDecimal = new JsonPrimitive(doubleMax.add(new BigDecimal("100.0")));
JsonPrimitive largeDecimal = new JsonPrimitive(doubleMax.add(new BigDecimal("200.0")));
assertThat(smallDecimal.equals(largeDecimal)).isFalse();
}
@Test
public void testBigDecimalEqualsZero() {
assertThat(
new JsonPrimitive(new BigDecimal("0.0"))
.equals(new JsonPrimitive(new BigDecimal("0.00"))))
.isTrue();
assertThat(
new JsonPrimitive(new BigDecimal("0.00"))
.equals(new JsonPrimitive(Double.valueOf("0.00"))))
.isTrue();
}
/**
* Verifies that {@link JsonPrimitive#equals(Object)} is <i>transitive</i> for {@link BigDecimal},
* as required by the {@link Object#equals(Object)} documentation.
*/
@Test
public void testBigDecimalEqualsTransitive() {
JsonPrimitive x = new JsonPrimitive(new BigDecimal("0"));
JsonPrimitive y = new JsonPrimitive(0.0d);
JsonPrimitive z = new JsonPrimitive(new BigDecimal("0.00"));
assertThat(x.equals(y)).isTrue();
assertThat(y.equals(z)).isTrue();
// ... implies
assertThat(x.equals(z)).isTrue();
}
@Test
public void testEqualsDoubleNaNAndBigDecimal() {
assertThat(new JsonPrimitive(Double.NaN).equals(new JsonPrimitive(new BigDecimal("1.0"))))
.isFalse();
}
}
| JsonPrimitiveTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/spi/EffectiveEntityGraph.java | {
"start": 779,
"end": 4387
} | class ____ implements AppliedGraph, Serializable {
private static final Logger LOG = Logger.getLogger( EffectiveEntityGraph.class );
private final boolean allowOverwrite;
private @Nullable GraphSemantic semantic;
private @Nullable RootGraphImplementor<?> graph;
/**
* @implSpec I explicitly made this constructor package protected
* because we may need to pass in the SessionFactory or JpaCompliance
* etc to be able to know what to do in {@link #applyConfiguredGraph(Map)}
* when the incoming properties contain both a
* {@link GraphSemantic#FETCH} and a {@link GraphSemantic#LOAD}.
* In other words, we may need to add some constructor argument here so
* we want to control what can call it
*/
@Incubating
public EffectiveEntityGraph() {
this( false );
}
/**
* @implSpec See {@link #EffectiveEntityGraph}
*/
@Incubating
public EffectiveEntityGraph(boolean allowOverwrite) {
this.allowOverwrite = allowOverwrite;
}
@Override
public @Nullable GraphSemantic getSemantic() {
return semantic;
}
@Override
public @Nullable RootGraphImplementor<?> getGraph() {
return graph;
}
/**
* Apply the graph and semantic. The semantic is required. The graph
* may be null, but that should generally be considered mis-use.
*
* @throws IllegalArgumentException Thrown if the semantic is null
* @throws IllegalStateException If previous state is still available (hasn't been cleared).
*/
public void applyGraph(RootGraphImplementor<?> graph, GraphSemantic semantic) {
if ( semantic == null ) {
throw new IllegalArgumentException( "Graph semantic cannot be null" );
}
verifyWriteability();
LOG.tracef( "Setting effective graph state [%s] : %s", semantic.name(), graph );
this.semantic = semantic;
this.graph = graph;
}
private void verifyWriteability() {
if ( ! allowOverwrite ) {
if ( semantic != null ) {
throw new IllegalStateException( "Cannot overwrite existing state, should clear previous state first" );
}
}
}
/**
* Apply a graph and semantic based on configuration properties or hints
* based on {@link GraphSemantic#getJpaHintName()} for {@link GraphSemantic#LOAD} or
* {@link GraphSemantic#FETCH}.
* <p>
* The semantic is required. The graph
* may be null, but that should generally be considered mis-use.
*
* @throws IllegalArgumentException If both kinds of graphs were present in the properties/hints
* @throws IllegalStateException If previous state is still available (hasn't been cleared).
*/
public void applyConfiguredGraph(@Nullable Map<String,?> properties) {
if ( properties != null && !properties.isEmpty() ) {
var fetchHint = (RootGraphImplementor<?>) properties.get( GraphSemantic.FETCH.getJpaHintName() );
var loadHint = (RootGraphImplementor<?>) properties.get( GraphSemantic.LOAD.getJpaHintName() );
if ( fetchHint == null ) {
fetchHint = (RootGraphImplementor<?>) properties.get( GraphSemantic.FETCH.getJakartaHintName() );
}
if ( loadHint == null ) {
loadHint = (RootGraphImplementor<?>) properties.get( GraphSemantic.LOAD.getJakartaHintName() );
}
if ( fetchHint != null ) {
if ( loadHint != null ) {
// can't have both
throw new IllegalArgumentException(
"Passed properties contained both a LOAD and a FETCH graph which is illegal - " +
"only one should be passed"
);
}
applyGraph( fetchHint, GraphSemantic.FETCH );
}
else if ( loadHint != null ) {
applyGraph( loadHint, GraphSemantic.LOAD );
}
}
}
public void clear() {
semantic = null;
graph = null;
}
}
| EffectiveEntityGraph |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCreate.java | {
"start": 6633,
"end": 9670
} | class ____<T>
extends AtomicLong
implements FlowableEmitter<T>, Subscription {
private static final long serialVersionUID = 7326289992464377023L;
final Subscriber<? super T> downstream;
final SequentialDisposable serial;
BaseEmitter(Subscriber<? super T> downstream) {
this.downstream = downstream;
this.serial = new SequentialDisposable();
}
@Override
public void onComplete() {
completeDownstream();
}
protected void completeDownstream() {
if (isCancelled()) {
return;
}
try {
downstream.onComplete();
} finally {
serial.dispose();
}
}
@Override
public final void onError(Throwable e) {
if (e == null) {
e = ExceptionHelper.createNullPointerException("onError called with a null Throwable.");
}
if (!signalError(e)) {
RxJavaPlugins.onError(e);
}
}
@Override
public final boolean tryOnError(Throwable e) {
if (e == null) {
e = ExceptionHelper.createNullPointerException("tryOnError called with a null Throwable.");
}
return signalError(e);
}
public boolean signalError(Throwable e) {
return errorDownstream(e);
}
protected boolean errorDownstream(Throwable e) {
if (isCancelled()) {
return false;
}
try {
downstream.onError(e);
} finally {
serial.dispose();
}
return true;
}
@Override
public final void cancel() {
serial.dispose();
onUnsubscribed();
}
void onUnsubscribed() {
// default is no-op
}
@Override
public final boolean isCancelled() {
return serial.isDisposed();
}
@Override
public final void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(this, n);
onRequested();
}
}
void onRequested() {
// default is no-op
}
@Override
public final void setDisposable(Disposable d) {
serial.update(d);
}
@Override
public final void setCancellable(Cancellable c) {
setDisposable(new CancellableDisposable(c));
}
@Override
public final long requested() {
return get();
}
@Override
public final FlowableEmitter<T> serialize() {
return new SerializedEmitter<>(this);
}
@Override
public String toString() {
return String.format("%s{%s}", getClass().getSimpleName(), super.toString());
}
}
static final | BaseEmitter |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxCreate.java | {
"start": 24653,
"end": 25112
} | class ____ implements Disposable {
@Nullable Disposable onCancel;
@Nullable Disposable disposable;
SinkDisposable(@Nullable Disposable disposable, @Nullable Disposable onCancel) {
this.disposable = disposable;
this.onCancel = onCancel;
}
@Override
public void dispose() {
if (disposable != null) {
disposable.dispose();
}
}
public void cancel() {
if (onCancel != null) {
onCancel.dispose();
}
}
}
}
| SinkDisposable |
java | junit-team__junit5 | junit-vintage-engine/src/main/java/org/junit/vintage/engine/descriptor/RunnerTestDescriptor.java | {
"start": 6659,
"end": 7195
} | class ____ extends Filter {
private final Description description;
private boolean successful;
ExcludeDescriptionFilter(Description description) {
this.description = description;
}
@Override
public boolean shouldRun(Description description) {
if (this.description.equals(description)) {
successful = true;
return false;
}
return true;
}
@Override
public String describe() {
return "exclude " + description;
}
boolean wasSuccessful() {
return successful;
}
}
}
| ExcludeDescriptionFilter |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/DialectChecks.java | {
"start": 4131,
"end": 4346
} | class ____ implements DialectCheck {
public boolean isMatch(Dialect dialect) {
return dialect.doesRepeatableReadCauseReadersToBlockWriters();
}
}
public static | DoesRepeatableReadCauseReadersToBlockWritersCheck |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/BooleanParam.java | {
"start": 889,
"end": 1304
} | class ____ extends Param<Boolean, BooleanParam.Domain> {
static final String TRUE = "true";
static final String FALSE = "false";
/** @return the parameter value as a string */
@Override
public String getValueString() {
return value.toString();
}
BooleanParam(final Domain domain, final Boolean value) {
super(domain, value);
}
/** The domain of the parameter. */
static final | BooleanParam |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/pool/ha/node/ZookeeperNodeListener.java | {
"start": 2055,
"end": 11710
} | class ____ extends NodeListener {
private static final Log LOG = LogFactory.getLog(ZookeeperNodeListener.class);
private final Lock lock = new ReentrantLock();
private String zkConnectString;
private String path = "/ha-druid-datasources";
private boolean privateZkClient; // Should I close the client?
private PathChildrenCache cache;
private CuratorFramework client;
/**
* URL Template, e.g.
* jdbc:mysql://${host}:${port}/${database}?useUnicode=true
* ${host}, ${port} and ${database} will be replaced by values in ZK
* ${host} can also be #{host} and #host#
*/
private String urlTemplate;
/**
* Init a PathChildrenCache to watch the given path.
*/
@Override
public void init() {
checkParameters();
super.init();
if (client == null) {
client = CuratorFrameworkFactory.builder()
.canBeReadOnly(true)
.connectionTimeoutMs(5000)
.connectString(zkConnectString)
.retryPolicy(new RetryForever(10000))
.sessionTimeoutMs(30000)
.build();
client.start();
privateZkClient = true;
}
cache = new PathChildrenCache(client, path, true);
cache.getListenable().addListener(new PathChildrenCacheListener() {
@Override
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
try {
LOG.info("Receive an event: " + event.getType());
lock.lock();
PathChildrenCacheEvent.Type eventType = event.getType();
switch (eventType) {
case CHILD_REMOVED:
updateSingleNode(event, NodeEventTypeEnum.DELETE);
break;
case CHILD_ADDED:
updateSingleNode(event, NodeEventTypeEnum.ADD);
break;
case CONNECTION_RECONNECTED:
refreshAllNodes();
break;
default:
// CHILD_UPDATED
// INITIALIZED
// CONNECTION_LOST
// CONNECTION_SUSPENDED
LOG.info("Received a PathChildrenCacheEvent, IGNORE it: " + event);
}
} finally {
lock.unlock();
LOG.info("Finish the processing of event: " + event.getType());
}
}
});
try {
// Use BUILD_INITIAL_CACHE to force build cache in the current Thread.
// We don't use POST_INITIALIZED_EVENT, so there's no INITIALIZED event.
cache.start(PathChildrenCache.StartMode.BUILD_INITIAL_CACHE);
} catch (Exception e) {
LOG.error("Can't start PathChildrenCache", e);
}
}
/**
* Close PathChildrenCache and CuratorFramework.
*/
@Override
public void destroy() {
if (cache != null) {
try {
cache.close();
} catch (IOException e) {
LOG.error("IOException occurred while closing PathChildrenCache.", e);
}
}
if (client != null && privateZkClient) {
client.close();
}
}
/**
* Build Properties from PathChildrenCache.
* Should be called after init().
*
* @see #getPropertiesFromCache()
*/
@Override
public List<NodeEvent> refresh() {
lock.lock();
try {
Properties properties = getPropertiesFromCache();
List<NodeEvent> events = NodeEvent.getEventsByDiffProperties(getProperties(), properties);
if (events != null && !events.isEmpty()) {
setProperties(properties);
}
return events;
} finally {
lock.unlock();
}
}
private void checkParameters() {
if (client == null && StringUtils.isEmpty(zkConnectString)) {
throw new DruidRuntimeException("ZK Client is NULL, Please set the zkConnectString.");
}
if (StringUtils.isEmpty(path)) {
throw new DruidRuntimeException("Please set the ZooKeeper node path.");
}
if (StringUtils.isEmpty(urlTemplate)) {
throw new DruidRuntimeException("Please set the urlTemplate.");
}
}
private void updateSingleNode(PathChildrenCacheEvent event, NodeEventTypeEnum type) {
ChildData data = event.getData();
String nodeName = getNodeName(data);
List<String> names = new ArrayList<String>();
names.add(getPrefix() + "." + nodeName);
Properties properties = getPropertiesFromChildData(data);
List<NodeEvent> events = NodeEvent.generateEvents(properties, names, type);
if (events.isEmpty()) {
return;
}
if (type == NodeEventTypeEnum.ADD) {
getProperties().putAll(properties);
} else {
for (String n : properties.stringPropertyNames()) {
getProperties().remove(n);
}
}
super.update(events);
}
private void refreshAllNodes() {
try {
if (client.checkExists().forPath(path) == null) {
LOG.warn("PATH[" + path + "] is NOT existed, can NOT refresh nodes.");
return;
}
cache.rebuild();
Properties properties = getPropertiesFromCache();
List<NodeEvent> events = NodeEvent.getEventsByDiffProperties(getProperties(), properties);
if (events != null && !events.isEmpty()) {
setProperties(properties);
super.update(events);
}
} catch (Exception e) {
LOG.error("Can NOT refresh Cache Nodes.", e);
}
}
private Properties getPropertiesFromCache() {
List<ChildData> data = cache.getCurrentData();
Properties properties = new Properties();
for (ChildData c : data) {
properties.putAll(getPropertiesFromChildData(c));
}
return properties;
}
private Properties getPropertiesFromChildData(ChildData data) {
String dataPrefix = getPrefix();
Properties properties = new Properties();
if (data == null) {
return properties;
}
String nodeName = getNodeName(data);
String str = new String(data.getData());
Properties full = new Properties();
try {
full.load(new StringReader(str));
} catch (IOException e) {
LOG.error("Can't load Properties from String. " + str, e);
}
Properties filtered = PropertiesUtils.filterPrefix(full, dataPrefix);
for (String n : filtered.stringPropertyNames()) {
properties.setProperty(
n.replaceFirst(dataPrefix, dataPrefix + "\\." + nodeName),
filtered.getProperty(n));
}
if (!properties.containsKey(dataPrefix + "." + nodeName + ".url")) {
properties.setProperty(dataPrefix + "." + nodeName + ".url", formatUrl(filtered));
}
return properties;
}
private String formatUrl(Properties properties) {
String url = urlTemplate;
String dataPrefix = getPrefix();
if (properties.containsKey(dataPrefix + ".host")) {
url = url.replace("${host}", properties.getProperty(dataPrefix + ".host"));
url = url.replace("#{host}", properties.getProperty(dataPrefix + ".host"));
url = url.replace("#host#", properties.getProperty(dataPrefix + ".host"));
}
if (properties.containsKey(dataPrefix + ".port")) {
url = url.replace("${port}", properties.getProperty(dataPrefix + ".port"));
url = url.replace("#{port}", properties.getProperty(dataPrefix + ".port"));
url = url.replace("#port#", properties.getProperty(dataPrefix + ".port"));
}
if (properties.containsKey(dataPrefix + ".database")) {
url = url.replace("${database}", properties.getProperty(dataPrefix + ".database"));
url = url.replace("#{database}", properties.getProperty(dataPrefix + ".database"));
url = url.replace("#database#", properties.getProperty(dataPrefix + ".database"));
}
return url;
}
private String getNodeName(ChildData data) {
String eventZkPath = data.getPath();
if (eventZkPath.startsWith(path + "/")) {
return eventZkPath.substring(path.length() + 1);
} else {
return eventZkPath;
}
}
public void setClient(CuratorFramework client) {
if (client != null) {
this.client = client;
privateZkClient = false;
}
}
public CuratorFramework getClient() {
return client;
}
public String getZkConnectString() {
return zkConnectString;
}
public void setZkConnectString(String zkConnectString) {
this.zkConnectString = zkConnectString;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getUrlTemplate() {
return urlTemplate;
}
public void setUrlTemplate(String urlTemplate) {
this.urlTemplate = urlTemplate;
}
}
| ZookeeperNodeListener |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/exec/spi/PreAction.java | {
"start": 311,
"end": 1037
} | interface ____ extends SecondaryAction {
/**
* Perform the action.
* <p/>
* Generally the action should use the passed {@code jdbcStatementAccess} to interact with the
* database, although the {@code jdbcConnection} can be used to create specialized statements,
* access the {@linkplain java.sql.DatabaseMetaData database metadata}, etc.
*
* @param jdbcStatementAccess Access to a JDBC Statement object which may be used to perform the action.
* @param jdbcConnection The JDBC Connection.
* @param executionContext Access to contextual information useful while executing.
*/
void performPreAction(StatementAccess jdbcStatementAccess, Connection jdbcConnection, ExecutionContext executionContext);
}
| PreAction |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/bind/ServletRequestUtils.java | {
"start": 22901,
"end": 23574
} | class ____ extends ParameterParser<String> {
@Override
protected String getType() {
return "string";
}
@Override
protected String doParse(String parameter) throws NumberFormatException {
return parameter;
}
public String validateRequiredString(String name, String value) throws ServletRequestBindingException {
validateRequiredParameter(name, value);
return value;
}
public String[] validateRequiredStrings(String name, String[] values) throws ServletRequestBindingException {
validateRequiredParameter(name, values);
for (String value : values) {
validateRequiredParameter(name, value);
}
return values;
}
}
}
| StringParser |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/Spr10668Tests.java | {
"start": 1414,
"end": 1528
} | class ____ {
@Autowired(required = false)
MyComponent component;
}
@Configuration
public static | ParentConfig |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/primitive/PrimitiveWrapperProducerTest.java | {
"start": 589,
"end": 2892
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Producers.class, Injection.class, Observer.class,
ProducerDisposer.class);
@Test
public void testPrimitiveWrapperNullProducers() {
// field injection, constructor injetion, initializer method
Injection bean = Arc.container().instance(Injection.class).get();
assertEquals(false, bean.bool);
assertEquals((byte) 0, bean.b);
assertEquals((short) 0, bean.s);
assertEquals(0, bean.i);
assertEquals(0L, bean.l);
assertEquals(0.0F, bean.f);
assertEquals(0.0, bean.d);
assertEquals((char) 0, bean.c);
// observer method
Arc.container().beanManager().getEvent().fire("foo");
assertEquals(false, Observer.bool);
assertEquals((byte) 0, Observer.b);
assertEquals((short) 0, Observer.s);
assertEquals(0, Observer.i);
assertEquals(0L, Observer.l);
assertEquals(0.0F, Observer.f);
assertEquals(0.0, Observer.d);
assertEquals((char) 0, Observer.c);
// producer method
InstanceHandle<MyPojo> handle = Arc.container().instance(MyPojo.class);
assertNotNull(handle.get());
assertEquals(false, ProducerDisposer.producer_bool);
assertEquals((byte) 0, ProducerDisposer.producer_b);
assertEquals((short) 0, ProducerDisposer.producer_s);
assertEquals(0, ProducerDisposer.producer_i);
assertEquals(0L, ProducerDisposer.producer_l);
assertEquals(0.0F, ProducerDisposer.producer_f);
assertEquals(0.0, ProducerDisposer.producer_d);
assertEquals((char) 0, ProducerDisposer.producer_c);
// disposer method
handle.destroy();
assertEquals(false, ProducerDisposer.disposer_bool);
assertEquals((byte) 0, ProducerDisposer.disposer_b);
assertEquals((short) 0, ProducerDisposer.disposer_s);
assertEquals(0, ProducerDisposer.disposer_i);
assertEquals(0L, ProducerDisposer.disposer_l);
assertEquals(0.0F, ProducerDisposer.disposer_f);
assertEquals(0.0, ProducerDisposer.disposer_d);
assertEquals((char) 0, ProducerDisposer.disposer_c);
}
@Dependent
static | PrimitiveWrapperProducerTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/hint/RuntimeHintsTests.java | {
"start": 870,
"end": 2455
} | class ____ {
private final RuntimeHints hints = new RuntimeHints();
@Test
void reflectionHintWithClass() {
this.hints.reflection().registerType(String.class, MemberCategory.INVOKE_PUBLIC_CONSTRUCTORS);
assertThat(this.hints.reflection().typeHints()).singleElement().satisfies(typeHint -> {
assertThat(typeHint.getType().getCanonicalName()).isEqualTo(String.class.getCanonicalName());
assertThat(typeHint.fields()).isEmpty();
assertThat(typeHint.constructors()).isEmpty();
assertThat(typeHint.methods()).isEmpty();
assertThat(typeHint.getMemberCategories()).containsOnly(MemberCategory.INVOKE_PUBLIC_CONSTRUCTORS);
});
}
@Test
void resourceHintWithClass() {
this.hints.resources().registerType(String.class);
assertThat(this.hints.resources().resourcePatternHints()).singleElement().satisfies(resourceHint ->
assertThat(resourceHint.getIncludes()).map(ResourcePatternHint::getPattern)
.containsExactlyInAnyOrder("/", "java", "java/lang", "java/lang/String.class"));
}
@Test
void javaSerializationHintWithClass() {
this.hints.serialization().registerType(String.class);
assertThat(this.hints.serialization().javaSerializationHints().map(JavaSerializationHint::getType))
.containsExactly(TypeReference.of(String.class));
}
@Test
void jdkProxyWithClass() {
this.hints.proxies().registerJdkProxy(Function.class);
assertThat(this.hints.proxies().jdkProxyHints()).singleElement().satisfies(jdkProxyHint ->
assertThat(jdkProxyHint.getProxiedInterfaces()).containsExactly(TypeReference.of(Function.class)));
}
}
| RuntimeHintsTests |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/multiple/SourceTypeHasMultipleTypeVarBothGenericMapper.java | {
"start": 1091,
"end": 1335
} | class ____ {
private final Map<String, Long> prop;
public Source(Map<String, Long> prop) {
this.prop = prop;
}
public Map<String, Long> getProp() {
return prop;
}
}
| Source |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OpenApiResourceAuthenticatedInherited1.java | {
"start": 350,
"end": 448
} | class ____ extends OpenApiResourceAuthenticatedAtClassLevel {
}
| OpenApiResourceAuthenticatedInherited1 |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/feature/Features.java | {
"start": 1131,
"end": 2622
} | class ____ {
private Features() {}
/**
* Check if the personality is "maven3".
*/
public static boolean mavenMaven3Personality(@Nullable Map<String, ?> userProperties) {
return doGet(userProperties, Constants.MAVEN_MAVEN3_PERSONALITY, false);
}
/**
* Check if the consumer POM feature is active.
*/
public static boolean consumerPom(@Nullable Map<String, ?> userProperties) {
return doGet(userProperties, Constants.MAVEN_CONSUMER_POM, !mavenMaven3Personality(userProperties));
}
/**
* Check if consumer POM flattening is enabled.
*/
public static boolean consumerPomFlatten(@Nullable Map<String, ?> userProperties) {
return doGet(userProperties, Constants.MAVEN_CONSUMER_POM_FLATTEN, false);
}
/**
* Check if build POM deployment is enabled.
*/
public static boolean deployBuildPom(@Nullable Map<String, ?> userProperties) {
return doGet(userProperties, Constants.MAVEN_DEPLOY_BUILD_POM, true);
}
private static boolean doGet(Map<String, ?> userProperties, String key, boolean def) {
return doGet(userProperties != null ? userProperties.get(key) : null, def);
}
private static boolean doGet(Object val, boolean def) {
if (val instanceof Boolean bool) {
return bool;
} else if (val != null) {
return Boolean.parseBoolean(val.toString());
} else {
return def;
}
}
}
| Features |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/stages/CommitTaskStage.java | {
"start": 1617,
"end": 3471
} | class ____ extends
AbstractJobOrTaskStage<Void, CommitTaskStage.Result> {
private static final Logger LOG = LoggerFactory.getLogger(
CommitTaskStage.class);
public CommitTaskStage(final StageConfig stageConfig) {
super(true, stageConfig, OP_STAGE_TASK_COMMIT, false);
}
/**
* Scan the task attempt dir then save the manifest.
* A snapshot of the IOStats will be included in the manifest;
* this includes the scan time.
* @param arguments arguments to the function.
* @return the path the manifest was saved to, and the manifest.
* @throws IOException IO failure.
*/
@Override
protected CommitTaskStage.Result executeStage(final Void arguments)
throws IOException {
LOG.info("{}: Committing task \"{}\"", getName(), getTaskAttemptId());
// execute the scan
final TaskAttemptScanDirectoryStage scanStage =
new TaskAttemptScanDirectoryStage(getStageConfig());
TaskManifest manifest = scanStage.apply(arguments);
// add the scan as task commit. It's not quite, as it doesn't include
// the saving, but ...
scanStage.addExecutionDurationToStatistics(getIOStatistics(), OP_STAGE_TASK_COMMIT);
// Now save with retry, updating the statistics on every attempt.
Pair<Path, TaskManifest> p = new SaveTaskManifestStage(getStageConfig())
.apply(() -> {
/* save a snapshot of the IO Statistics */
final IOStatisticsSnapshot manifestStats = snapshotIOStatistics();
manifestStats.aggregate(getIOStatistics());
manifest.setIOStatistics(manifestStats);
return manifest;
});
return new CommitTaskStage.Result(p.getLeft(), p.getRight());
}
/**
* Result of the stage: the path the manifest was saved to
* and the manifest which was successfully saved.
*/
public static final | CommitTaskStage |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneQualifierTest.java | {
"start": 4883,
"end": 4999
} | class ____ {
private int n;
public TestClass1() {}
public void setN(int n) {}
}
/**
* A | TestClass1 |
java | grpc__grpc-java | s2a/src/main/java/io/grpc/s2a/internal/handshaker/S2ATrustManager.java | {
"start": 1651,
"end": 3564
} | class ____ implements X509TrustManager {
private final Optional<S2AIdentity> localIdentity;
private final S2AStub stub;
private final String hostname;
static S2ATrustManager createForClient(
S2AStub stub, String hostname, Optional<S2AIdentity> localIdentity) {
checkNotNull(stub);
checkNotNull(hostname);
return new S2ATrustManager(stub, hostname, localIdentity);
}
private S2ATrustManager(S2AStub stub, String hostname, Optional<S2AIdentity> localIdentity) {
this.stub = stub;
this.hostname = hostname;
this.localIdentity = localIdentity;
}
/**
* Validates the given certificate chain provided by the peer.
*
* @param chain the peer certificate chain
* @param authType the authentication type based on the client certificate
* @throws IllegalArgumentException if null or zero-length chain is passed in for the chain
* parameter.
* @throws CertificateException if the certificate chain is not trusted by this TrustManager.
*/
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
checkPeerTrusted(chain, /* isCheckingClientCertificateChain= */ true);
}
/**
* Validates the given certificate chain provided by the peer.
*
* @param chain the peer certificate chain
* @param authType the authentication type based on the client certificate
* @throws IllegalArgumentException if null or zero-length chain is passed in for the chain
* parameter.
* @throws CertificateException if the certificate chain is not trusted by this TrustManager.
*/
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
checkPeerTrusted(chain, /* isCheckingClientCertificateChain= */ false);
}
/**
* Returns null because the accepted issuers are held in S2A and this | S2ATrustManager |
java | spring-projects__spring-boot | module/spring-boot-cache/src/test/java/org/springframework/boot/cache/autoconfigure/CacheAutoConfigurationTests.java | {
"start": 39925,
"end": 40347
} | class ____ {
@Bean
org.springframework.data.redis.cache.RedisCacheConfiguration customRedisCacheConfiguration() {
return org.springframework.data.redis.cache.RedisCacheConfiguration.defaultCacheConfig()
.entryTtl(java.time.Duration.ofSeconds(30))
.prefixCacheNameWith("bar");
}
}
@Configuration(proxyBeanMethods = false)
@Import(RedisConfiguration.class)
static | RedisWithCacheConfigurationConfiguration |
java | apache__dubbo | dubbo-compatible/src/test/java/org/apache/dubbo/metadata/annotation/processing/util/FieldUtilsTest.java | {
"start": 2912,
"end": 10239
} | class ____ extends AbstractAnnotationProcessingTest {
private TypeElement testType;
@Override
protected void addCompiledClasses(Set<Class<?>> classesToBeCompiled) {}
@Override
protected void beforeEach() {
testType = getType(TestServiceImpl.class);
}
@Test
void testGetDeclaredFields() {
TypeElement type = getType(Model.class);
List<VariableElement> fields = getDeclaredFields(type);
assertModelFields(fields);
fields = getDeclaredFields(type.asType());
assertModelFields(fields);
assertTrue(getDeclaredFields((Element) null).isEmpty());
assertTrue(getDeclaredFields((TypeMirror) null).isEmpty());
fields = getDeclaredFields(type, f -> "f".equals(f.getSimpleName().toString()));
assertEquals(1, fields.size());
assertEquals("f", fields.get(0).getSimpleName().toString());
}
@Test
void testGetAllDeclaredFields() {
TypeElement type = getType(Model.class);
List<VariableElement> fields = getAllDeclaredFields(type);
assertModelAllFields(fields);
assertTrue(getAllDeclaredFields((Element) null).isEmpty());
assertTrue(getAllDeclaredFields((TypeMirror) null).isEmpty());
fields = getAllDeclaredFields(type, f -> "f".equals(f.getSimpleName().toString()));
assertEquals(1, fields.size());
assertEquals("f", fields.get(0).getSimpleName().toString());
}
@Test
void testGetDeclaredField() {
TypeElement type = getType(Model.class);
testGetDeclaredField(type, "f", float.class);
testGetDeclaredField(type, "d", double.class);
testGetDeclaredField(type, "tu", TimeUnit.class);
testGetDeclaredField(type, "str", String.class);
testGetDeclaredField(type, "bi", BigInteger.class);
testGetDeclaredField(type, "bd", BigDecimal.class);
assertNull(getDeclaredField(type, "b"));
assertNull(getDeclaredField(type, "s"));
assertNull(getDeclaredField(type, "i"));
assertNull(getDeclaredField(type, "l"));
assertNull(getDeclaredField(type, "z"));
assertNull(getDeclaredField((Element) null, "z"));
assertNull(getDeclaredField((TypeMirror) null, "z"));
}
@Test
void testFindField() {
TypeElement type = getType(Model.class);
testFindField(type, "f", float.class);
testFindField(type, "d", double.class);
testFindField(type, "tu", TimeUnit.class);
testFindField(type, "str", String.class);
testFindField(type, "bi", BigInteger.class);
testFindField(type, "bd", BigDecimal.class);
testFindField(type, "b", byte.class);
testFindField(type, "s", short.class);
testFindField(type, "i", int.class);
testFindField(type, "l", long.class);
testFindField(type, "z", boolean.class);
assertNull(findField((Element) null, "f"));
assertNull(findField((Element) null, null));
assertNull(findField((TypeMirror) null, "f"));
assertNull(findField((TypeMirror) null, null));
assertNull(findField(type, null));
assertNull(findField(type.asType(), null));
}
@Test
void testIsEnumField() {
TypeElement type = getType(Color.class);
VariableElement field = findField(type, "RED");
assertTrue(isEnumMemberField(field));
field = findField(type, "YELLOW");
assertTrue(isEnumMemberField(field));
field = findField(type, "BLUE");
assertTrue(isEnumMemberField(field));
type = getType(Model.class);
field = findField(type, "f");
assertFalse(isEnumMemberField(field));
assertFalse(isEnumMemberField(null));
}
@Test
void testIsNonStaticField() {
TypeElement type = getType(Model.class);
assertTrue(isNonStaticField(findField(type, "f")));
type = getType(Color.class);
assertFalse(isNonStaticField(findField(type, "BLUE")));
}
@Test
void testIsField() {
TypeElement type = getType(Model.class);
assertTrue(isField(findField(type, "f")));
assertTrue(isField(findField(type, "f"), PRIVATE));
type = getType(Color.class);
assertTrue(isField(findField(type, "BLUE"), PUBLIC, STATIC, FINAL));
assertFalse(isField(null));
assertFalse(isField(null, PUBLIC, STATIC, FINAL));
}
@Test
void testGetNonStaticFields() {
TypeElement type = getType(Model.class);
List<VariableElement> fields = getNonStaticFields(type);
assertModelFields(fields);
fields = getNonStaticFields(type.asType());
assertModelFields(fields);
assertTrue(getAllNonStaticFields((Element) null).isEmpty());
assertTrue(getAllNonStaticFields((TypeMirror) null).isEmpty());
}
@Test
void testGetAllNonStaticFields() {
TypeElement type = getType(Model.class);
List<VariableElement> fields = getAllNonStaticFields(type);
assertModelAllFields(fields);
fields = getAllNonStaticFields(type.asType());
assertModelAllFields(fields);
assertTrue(getAllNonStaticFields((Element) null).isEmpty());
assertTrue(getAllNonStaticFields((TypeMirror) null).isEmpty());
}
private void assertModelFields(List<VariableElement> fields) {
assertEquals(6, fields.size());
assertEquals("d", fields.get(1).getSimpleName().toString());
assertEquals("tu", fields.get(2).getSimpleName().toString());
assertEquals("str", fields.get(3).getSimpleName().toString());
assertEquals("bi", fields.get(4).getSimpleName().toString());
assertEquals("bd", fields.get(5).getSimpleName().toString());
}
private void assertModelAllFields(List<VariableElement> fields) {
assertEquals(11, fields.size());
assertEquals("f", fields.get(0).getSimpleName().toString());
assertEquals("d", fields.get(1).getSimpleName().toString());
assertEquals("tu", fields.get(2).getSimpleName().toString());
assertEquals("str", fields.get(3).getSimpleName().toString());
assertEquals("bi", fields.get(4).getSimpleName().toString());
assertEquals("bd", fields.get(5).getSimpleName().toString());
assertEquals("b", fields.get(6).getSimpleName().toString());
assertEquals("s", fields.get(7).getSimpleName().toString());
assertEquals("i", fields.get(8).getSimpleName().toString());
assertEquals("l", fields.get(9).getSimpleName().toString());
assertEquals("z", fields.get(10).getSimpleName().toString());
}
private void testGetDeclaredField(TypeElement type, String fieldName, Type fieldType) {
VariableElement field = getDeclaredField(type, fieldName);
assertField(field, fieldName, fieldType);
}
private void testFindField(TypeElement type, String fieldName, Type fieldType) {
VariableElement field = findField(type, fieldName);
assertField(field, fieldName, fieldType);
}
private void assertField(VariableElement field, String fieldName, Type fieldType) {
assertEquals(fieldName, field.getSimpleName().toString());
assertEquals(fieldType.getTypeName(), field.asType().toString());
}
}
| FieldUtilsTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java | {
"start": 40058,
"end": 46045
} | class ____ extends Shell
implements CommandExecutor {
private String[] command;
private StringBuilder output;
public ShellCommandExecutor(String[] execString) {
this(execString, null);
}
public ShellCommandExecutor(String[] execString, File dir) {
this(execString, dir, null);
}
public ShellCommandExecutor(String[] execString, File dir,
Map<String, String> env) {
this(execString, dir, env , 0L);
}
public ShellCommandExecutor(String[] execString, File dir,
Map<String, String> env, long timeout) {
this(execString, dir, env , timeout, true);
}
/**
* Create a new instance of the ShellCommandExecutor to execute a command.
*
* @param execString The command to execute with arguments
* @param dir If not-null, specifies the directory which should be set
* as the current working directory for the command.
* If null, the current working directory is not modified.
* @param env If not-null, environment of the command will include the
* key-value pairs specified in the map. If null, the current
* environment is not modified.
* @param timeout Specifies the time in milliseconds, after which the
* command will be killed and the status marked as timed-out.
* If 0, the command will not be timed out.
* @param inheritParentEnv Indicates if the process should inherit the env
* vars from the parent process or not.
*/
public ShellCommandExecutor(String[] execString, File dir,
Map<String, String> env, long timeout, boolean inheritParentEnv) {
command = execString.clone();
if (dir != null) {
setWorkingDirectory(dir);
}
if (env != null) {
setEnvironment(env);
}
timeOutInterval = timeout;
this.inheritParentEnv = inheritParentEnv;
}
/**
* Returns the timeout value set for the executor's sub-commands.
* @return The timeout value in milliseconds
*/
@VisibleForTesting
public long getTimeoutInterval() {
return timeOutInterval;
}
/**
* Execute the shell command.
* @throws IOException if the command fails, or if the command is
* not well constructed.
*/
public void execute() throws IOException {
for (String s : command) {
if (s == null) {
throw new IOException("(null) entry in command string: "
+ StringUtils.join(" ", command));
}
}
this.run();
}
@Override
public String[] getExecString() {
return command;
}
@Override
protected void parseExecResult(BufferedReader lines) throws IOException {
output = new StringBuilder();
char[] buf = new char[512];
int nRead;
while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
output.append(buf, 0, nRead);
}
}
/** Get the output of the shell command. */
public String getOutput() {
return (output == null) ? "" : output.toString();
}
/**
* Returns the commands of this instance.
* Arguments with spaces in are presented with quotes round; other
* arguments are presented raw
*
* @return a string representation of the object.
*/
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
String[] args = getExecString();
for (String s : args) {
if (s.indexOf(' ') >= 0) {
builder.append('"').append(s).append('"');
} else {
builder.append(s);
}
builder.append(' ');
}
return builder.toString();
}
@Override
public void close() {
}
}
/**
* To check if the passed script to shell command executor timed out or
* not.
*
* @return if the script timed out.
*/
public boolean isTimedOut() {
return timedOut.get();
}
/**
* Declare that the command has timed out.
*
*/
private void setTimedOut() {
this.timedOut.set(true);
}
/**
* Static method to execute a shell command.
* Covers most of the simple cases without requiring the user to implement
* the <code>Shell</code> interface.
* @param cmd shell command to execute.
* @return the output of the executed command.
* @throws IOException raised on errors performing I/O.
*/
public static String execCommand(String ... cmd) throws IOException {
return execCommand(null, cmd, 0L);
}
/**
* Static method to execute a shell command.
* Covers most of the simple cases without requiring the user to implement
* the <code>Shell</code> interface.
* @param env the map of environment key=value
* @param cmd shell command to execute.
* @param timeout time in milliseconds after which script should be marked timeout
* @return the output of the executed command.
* @throws IOException on any problem.
*/
public static String execCommand(Map<String, String> env, String[] cmd,
long timeout) throws IOException {
ShellCommandExecutor exec = new ShellCommandExecutor(cmd, null, env,
timeout);
exec.execute();
return exec.getOutput();
}
/**
* Static method to execute a shell command.
* Covers most of the simple cases without requiring the user to implement
* the <code>Shell</code> interface.
* @param env the map of environment key=value
* @param cmd shell command to execute.
* @return the output of the executed command.
* @throws IOException on any problem.
*/
public static String execCommand(Map<String,String> env, String ... cmd)
throws IOException {
return execCommand(env, cmd, 0L);
}
/**
* Timer which is used to timeout scripts spawned off by shell.
*/
private static | ShellCommandExecutor |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/jackson2/UserMixin.java | {
"start": 1036,
"end": 2147
} | class ____ register a
* custom deserializer {@link UserDeserializer} to deserialize User object successfully.
* In order to use this mixin you need to register two more mixin classes in your
* ObjectMapper configuration.
* <ol>
* <li>{@link SimpleGrantedAuthorityMixin}</li>
* <li>{@link UnmodifiableSetMixin}</li>
* </ol>
* <pre>
* ObjectMapper mapper = new ObjectMapper();
* mapper.registerModule(new CoreJackson2Module());
* </pre>
*
* @author Jitendra Singh
* @since 4.2
* @see UserDeserializer
* @see CoreJackson2Module
* @see SecurityJackson2Modules
* @deprecated as of 7.0 in favor of
* {@code org.springframework.security.jackson.UserMixin} based on Jackson 3
*/
@SuppressWarnings("removal")
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY)
@JsonDeserialize(using = UserDeserializer.class)
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE,
isGetterVisibility = JsonAutoDetect.Visibility.NONE)
@JsonIgnoreProperties(ignoreUnknown = true)
@Deprecated(forRemoval = true)
abstract | also |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeReconfiguration.java | {
"start": 5738,
"end": 37439
} | class ____ {
private static final String DATA_DIR = MiniDFSCluster.getBaseDirectory()
+ "data";
private final static InetSocketAddress NN_ADDR = new InetSocketAddress(
"localhost", 5020);
private final int NUM_NAME_NODE = 1;
private final int NUM_DATA_NODE = 10;
private MiniDFSCluster cluster;
private static long counter = 0;
@BeforeEach
public void Setup() throws IOException {
startDFSCluster(NUM_NAME_NODE, NUM_DATA_NODE);
}
@AfterEach
public void tearDown() throws Exception {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
File dir = new File(DATA_DIR);
if (dir.exists())
assertTrue(FileUtil.fullyDelete(dir),
"Cannot delete data-node dirs");
}
private void startDFSCluster(int numNameNodes, int numDataNodes)
throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(DFS_DATANODE_PEER_STATS_ENABLED_KEY, true);
MiniDFSNNTopology nnTopology = MiniDFSNNTopology
.simpleFederatedTopology(numNameNodes);
cluster = new MiniDFSCluster.Builder(conf).nnTopology(nnTopology)
.numDataNodes(numDataNodes).build();
cluster.waitActive();
}
/**
* Starts an instance of DataNode
*
* @throws IOException
*/
public DataNode[] createDNsForTest(int numDateNode) throws IOException {
Configuration conf = new HdfsConfiguration();
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, DATA_DIR);
conf.set(DFSConfigKeys.DFS_DATANODE_ADDRESS_KEY, "0.0.0.0:0");
conf.set(DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY, "0.0.0.0:0");
conf.set(DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY, "0.0.0.0:0");
conf.setInt(CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0);
DataNode[] result = new DataNode[numDateNode];
for (int i = 0; i < numDateNode; i++) {
result[i] = InternalDataNodeTestUtils.startDNWithMockNN(conf, NN_ADDR, DATA_DIR);
}
return result;
}
@Test
public void testMaxConcurrentMoversReconfiguration()
throws ReconfigurationException, IOException {
int maxConcurrentMovers = 10;
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// try invalid values
try {
dn.reconfigureProperty(
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
try {
dn.reconfigureProperty(
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
String.valueOf(0));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
// change properties
dn.reconfigureProperty(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
String.valueOf(maxConcurrentMovers));
// verify change
assertEquals(maxConcurrentMovers, dn.xserver.balanceThrottler.getMaxConcurrentMovers(),
String.format("%s has wrong value", DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY));
assertEquals(maxConcurrentMovers,
Integer.parseInt(dn.getConf().get(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY)),
String.format("%s has wrong value", DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY));
// revert to default
dn.reconfigureProperty(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
null);
// verify default
assertEquals(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT,
dn.xserver.balanceThrottler.getMaxConcurrentMovers(),
String.format("%s has wrong value", DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY));
assertEquals(null, dn.getConf().get(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY),
String.format("expect %s is not configured",
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY));
}
}
@Test
public void testAcquireWithMaxConcurrentMoversGreaterThanDefault()
throws IOException, ReconfigurationException {
final DataNode[] dns = createDNsForTest(1);
try {
testAcquireOnMaxConcurrentMoversReconfiguration(dns[0], 10);
} finally {
dns[0].shutdown();
}
}
@Test
public void testAcquireWithMaxConcurrentMoversLessThanDefault()
throws IOException, ReconfigurationException {
final DataNode[] dns = createDNsForTest(1);
try {
testAcquireOnMaxConcurrentMoversReconfiguration(dns[0], 3);
} finally {
dns[0].shutdown();
}
}
/**
* Simulates a scenario where the DataNode has been reconfigured with fewer
* mover threads, but all of the current treads are busy and therefore the
* DataNode is unable to honor this request within a reasonable amount of
* time. The DataNode eventually gives up and returns a flag indicating that
* the request was not honored.
*/
@Test
public void testFailedDecreaseConcurrentMovers()
throws IOException, ReconfigurationException {
final DataNode[] dns = createDNsForTest(1);
final DataNode dataNode = dns[0];
try {
// Set the current max to 2
dataNode.xserver.updateBalancerMaxConcurrentMovers(2);
// Simulate grabbing 2 threads
dataNode.xserver.balanceThrottler.acquire();
dataNode.xserver.balanceThrottler.acquire();
dataNode.xserver.setMaxReconfigureWaitTime(1);
// Attempt to set new maximum to 1
final boolean success =
dataNode.xserver.updateBalancerMaxConcurrentMovers(1);
assertFalse(success);
} finally {
dataNode.shutdown();
}
}
/**
* Test with invalid configuration.
*/
@Test
public void testFailedDecreaseConcurrentMoversReconfiguration()
throws IOException, ReconfigurationException {
assertThrows(ReconfigurationException.class, () -> {
final DataNode[] dns = createDNsForTest(1);
final DataNode dataNode = dns[0];
try {
// Set the current max to 2
dataNode.xserver.updateBalancerMaxConcurrentMovers(2);
// Simulate grabbing 2 threads
dataNode.xserver.balanceThrottler.acquire();
dataNode.xserver.balanceThrottler.acquire();
dataNode.xserver.setMaxReconfigureWaitTime(1);
// Now try reconfigure maximum downwards with threads released
dataNode.reconfigurePropertyImpl(
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY, "1");
} catch (ReconfigurationException e) {
assertEquals(DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
e.getProperty());
assertEquals("1", e.getNewValue());
throw e;
} finally {
dataNode.shutdown();
}
});
}
private void testAcquireOnMaxConcurrentMoversReconfiguration(
DataNode dataNode, int maxConcurrentMovers) throws IOException,
ReconfigurationException {
final int defaultMaxThreads = dataNode.getConf().getInt(
DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
DFSConfigKeys.DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_DEFAULT);
/** Test that the default setup is working */
for (int i = 0; i < defaultMaxThreads; i++) {
assertEquals(true, dataNode.xserver.balanceThrottler.acquire(),
"should be able to get thread quota");
}
assertEquals(false, dataNode.xserver.balanceThrottler.acquire(),
"should not be able to get thread quota");
// Give back the threads
for (int i = 0; i < defaultMaxThreads; i++) {
dataNode.xserver.balanceThrottler.release();
}
/** Test that the change is applied correctly */
// change properties
dataNode.reconfigureProperty(
DFS_DATANODE_BALANCE_MAX_NUM_CONCURRENT_MOVES_KEY,
String.valueOf(maxConcurrentMovers));
assertEquals(maxConcurrentMovers, dataNode.xserver.balanceThrottler.getMaxConcurrentMovers(),
"thread quota is wrong");
for (int i = 0; i < maxConcurrentMovers; i++) {
assertEquals(true, dataNode.xserver.balanceThrottler.acquire(),
"should be able to get thread quota");
}
assertEquals(false, dataNode.xserver.balanceThrottler.acquire(),
"should not be able to get thread quota");
}
@Test
public void testBlockReportIntervalReconfiguration()
throws ReconfigurationException {
int blockReportInterval = 300 * 1000;
String[] blockReportParameters = {
DFS_BLOCKREPORT_INTERVAL_MSEC_KEY,
DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY,
DFS_BLOCKREPORT_INITIAL_DELAY_KEY};
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
BlockPoolManager blockPoolManager = dn.getBlockPoolManager();
// Try invalid values.
for (String blockReportParameter : blockReportParameters) {
try {
dn.reconfigureProperty(blockReportParameter, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
}
try {
dn.reconfigureProperty(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
try {
dn.reconfigureProperty(DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
dn.reconfigureProperty(DFS_BLOCKREPORT_INITIAL_DELAY_KEY, String.valueOf(-1));
assertEquals(0, dn.getDnConf().initialBlockReportDelayMs);
// Change properties and verify the change.
dn.reconfigureProperty(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY,
String.valueOf(blockReportInterval));
for (BPOfferService bpos : blockPoolManager.getAllNamenodeThreads()) {
if (bpos != null) {
for (BPServiceActor actor : bpos.getBPServiceActors()) {
assertEquals(blockReportInterval, actor.getScheduler().getBlockReportIntervalMs(),
String.format("%s has wrong value", DFS_BLOCKREPORT_INTERVAL_MSEC_KEY));
}
}
}
dn.reconfigureProperty(DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY, String.valueOf(123));
assertEquals(123, dn.getDnConf().blockReportSplitThreshold);
dn.reconfigureProperty(DFS_BLOCKREPORT_INITIAL_DELAY_KEY, "123");
assertEquals(123000, dn.getDnConf().initialBlockReportDelayMs);
// Revert to default and verify default.
dn.reconfigureProperty(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, null);
for (BPOfferService bpos : blockPoolManager.getAllNamenodeThreads()) {
if (bpos != null) {
for (BPServiceActor actor : bpos.getBPServiceActors()) {
assertEquals(DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT,
actor.getScheduler().getBlockReportIntervalMs(),
String.format("%s has wrong value", DFS_BLOCKREPORT_INTERVAL_MSEC_KEY));
}
}
}
assertNull(dn.getConf().get(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY),
String.format("expect %s is not configured", DFS_BLOCKREPORT_INTERVAL_MSEC_KEY));
dn.reconfigureProperty(DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY, null);
assertNull(dn.getConf().get(DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY),
String.format("expect %s is not configured", DFS_BLOCKREPORT_SPLIT_THRESHOLD_KEY));
dn.reconfigureProperty(DFS_BLOCKREPORT_INITIAL_DELAY_KEY, null);
assertNull(dn.getConf().get(DFS_BLOCKREPORT_INITIAL_DELAY_KEY),
String.format("expect %s is not configured", DFS_BLOCKREPORT_INITIAL_DELAY_KEY));
}
}
@Test
public void testDataXceiverReconfiguration()
throws ReconfigurationException {
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// Try invalid values.
try {
dn.reconfigureProperty(DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
try {
dn.reconfigureProperty(DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
// Change properties and verify change.
dn.reconfigureProperty(DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, String.valueOf(123));
assertEquals(123, dn.getXferServer().getMaxXceiverCount(),
String.format("%s has wrong value", DFS_DATANODE_MAX_RECEIVER_THREADS_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY,
String.valueOf(1000));
assertEquals(1000, dn.getXferServer().getTransferThrottler().getBandwidth(),
String.format("%s has wrong value", DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY,
String.valueOf(1000));
assertEquals(1000, dn.getXferServer().getWriteThrottler().getBandwidth(),
String.format("%s has wrong value", DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY,
String.valueOf(1000));
assertEquals(1000, dn.getXferServer().getReadThrottler().getBandwidth(),
String.format("%s has wrong value", DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY));
// Revert to default.
dn.reconfigureProperty(DFS_DATANODE_MAX_RECEIVER_THREADS_KEY, null);
assertEquals(DFS_DATANODE_MAX_RECEIVER_THREADS_DEFAULT,
dn.getXferServer().getMaxXceiverCount(),
String.format("%s has wrong value", DFS_DATANODE_MAX_RECEIVER_THREADS_KEY));
assertNull(dn.getConf().get(DFS_DATANODE_MAX_RECEIVER_THREADS_KEY),
String.format("expect %s is not configured", DFS_DATANODE_MAX_RECEIVER_THREADS_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY, null);
assertEquals(null, dn.getXferServer().getTransferThrottler(),
String.format("%s has wrong value", DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY));
assertNull(dn.getConf().get(DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY), String
.format("expect %s is not configured", DFS_DATANODE_DATA_TRANSFER_BANDWIDTHPERSEC_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY, null);
assertEquals(null, dn.getXferServer().getWriteThrottler(),
String.format("%s has wrong value", DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY));
assertNull(dn.getConf().get(DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY), String
.format("expect %s is not configured", DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY));
dn.reconfigureProperty(DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY, null);
assertEquals(null, dn.getXferServer().getReadThrottler(),
String.format("%s has wrong value", DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY));
assertNull(dn.getConf().get(DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY),
String.format("expect %s is not configured", DFS_DATANODE_DATA_READ_BANDWIDTHPERSEC_KEY));
}
}
@Test
public void testCacheReportReconfiguration()
throws ReconfigurationException {
int cacheReportInterval = 300 * 1000;
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// Try invalid values.
try {
dn.reconfigureProperty(DFS_CACHEREPORT_INTERVAL_MSEC_KEY, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(DFS_CACHEREPORT_INTERVAL_MSEC_KEY, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
// Change properties.
dn.reconfigureProperty(DFS_CACHEREPORT_INTERVAL_MSEC_KEY,
String.valueOf(cacheReportInterval));
// Verify change.
assertEquals(cacheReportInterval, dn.getDnConf().getCacheReportInterval(),
String.format("%s has wrong value", DFS_CACHEREPORT_INTERVAL_MSEC_KEY));
// Revert to default.
dn.reconfigureProperty(DFS_CACHEREPORT_INTERVAL_MSEC_KEY, null);
assertEquals(DFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT, dn.getDnConf().getCacheReportInterval(),
String.format("%s has wrong value", DFS_CACHEREPORT_INTERVAL_MSEC_KEY));
assertNull(dn.getConf().get(DFS_CACHEREPORT_INTERVAL_MSEC_KEY),
String.format("expect %s is not configured", DFS_CACHEREPORT_INTERVAL_MSEC_KEY));
}
}
@Test
public void testSlowPeerParameters() throws Exception {
String[] slowPeersParameters = {
DFS_DATANODE_MIN_OUTLIER_DETECTION_NODES_KEY,
DFS_DATANODE_SLOWPEER_LOW_THRESHOLD_MS_KEY,
DFS_DATANODE_PEER_METRICS_MIN_OUTLIER_DETECTION_SAMPLES_KEY};
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// Try invalid values.
LambdaTestUtils.intercept(ReconfigurationException.class,
"Could not change property dfs.datanode.peer.stats.enabled from 'true' to 'text'",
() -> dn.reconfigureProperty(DFS_DATANODE_PEER_STATS_ENABLED_KEY, "text"));
for (String parameter : slowPeersParameters) {
try {
dn.reconfigureProperty(parameter, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(parameter, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
}
// Change and verify properties.
dn.reconfigureProperty(DFS_DATANODE_PEER_STATS_ENABLED_KEY, "false");
assertFalse(dn.getDnConf().peerStatsEnabled);
// Reset DFS_DATANODE_PEER_STATS_ENABLED_KEY to true.
dn.reconfigureProperty(DFS_DATANODE_PEER_STATS_ENABLED_KEY, "true");
for (String parameter : slowPeersParameters) {
dn.reconfigureProperty(parameter, "123");
}
assertEquals(123, dn.getPeerMetrics().getMinOutlierDetectionNodes());
assertEquals(123, dn.getPeerMetrics().getLowThresholdMs());
assertEquals(123, dn.getPeerMetrics().getMinOutlierDetectionSamples());
assertEquals(123, dn.getPeerMetrics().getSlowNodeDetector().getMinOutlierDetectionNodes());
assertEquals(123, dn.getPeerMetrics().getSlowNodeDetector().getLowThresholdMs());
// Revert to default and verify.
dn.reconfigureProperty(DFS_DATANODE_PEER_STATS_ENABLED_KEY, null);
assertEquals(null, dn.getConf().get(DFS_DATANODE_PEER_STATS_ENABLED_KEY),
String.format("expect %s is not configured", DFS_DATANODE_PEER_STATS_ENABLED_KEY));
// Reset DFS_DATANODE_PEER_STATS_ENABLED_KEY to true.
dn.reconfigureProperty(DFS_DATANODE_PEER_STATS_ENABLED_KEY, "true");
for (String parameter : slowPeersParameters) {
dn.reconfigureProperty(parameter, null);
}
assertEquals(null, dn.getConf().get(DFS_DATANODE_MIN_OUTLIER_DETECTION_NODES_KEY), String
.format("expect %s is not configured", DFS_DATANODE_MIN_OUTLIER_DETECTION_NODES_KEY));
assertEquals(null, dn.getConf().get(DFS_DATANODE_SLOWPEER_LOW_THRESHOLD_MS_KEY),
String.format("expect %s is not configured", DFS_DATANODE_SLOWPEER_LOW_THRESHOLD_MS_KEY));
assertEquals(null,
dn.getConf().get(DFS_DATANODE_PEER_METRICS_MIN_OUTLIER_DETECTION_SAMPLES_KEY),
String.format("expect %s is not configured",
DFS_DATANODE_PEER_METRICS_MIN_OUTLIER_DETECTION_SAMPLES_KEY));
assertEquals(dn.getPeerMetrics().getSlowNodeDetector().getMinOutlierDetectionNodes(),
DFS_DATANODE_MIN_OUTLIER_DETECTION_NODES_DEFAULT);
assertEquals(dn.getPeerMetrics().getSlowNodeDetector().getLowThresholdMs(),
DFS_DATANODE_SLOWPEER_LOW_THRESHOLD_MS_DEFAULT);
}
}
@Test
public void testSlowDiskParameters() throws ReconfigurationException, IOException {
String[] slowDisksParameters1 = {
DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY,
DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY};
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// Try invalid values.
try {
dn.reconfigureProperty(DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY, "text");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY, "text");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
// Enable disk stats, make DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY > 0.
dn.reconfigureProperty(DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY, "1");
for (String parameter : slowDisksParameters1) {
try {
dn.reconfigureProperty(parameter, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(parameter, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
}
// Change and verify properties.
dn.reconfigureProperty(DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY, "1ms");
assertEquals(1, dn.getDnConf().outliersReportIntervalMs);
BlockPoolManager blockPoolManager = new BlockPoolManager(dn);
blockPoolManager.refreshNamenodes(dn.getConf());
for (BPOfferService bpos : blockPoolManager.getAllNamenodeThreads()) {
if (bpos != null) {
for (BPServiceActor actor : bpos.getBPServiceActors()) {
assertEquals(1, actor.getScheduler().getOutliersReportIntervalMs(),
String.format("%s has wrong value", DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY));
}
}
}
String[] slowDisksParameters2 = {
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY,
DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY,
DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY,
DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY};
for (String parameter : slowDisksParameters2) {
dn.reconfigureProperty(parameter, "99");
}
// Assert diskMetrics.
assertEquals(99, dn.getDiskMetrics().getMinOutlierDetectionDisks());
assertEquals(99, dn.getDiskMetrics().getLowThresholdMs());
assertEquals(99, dn.getDiskMetrics().getMaxSlowDisksToExclude());
// Assert dnConf.
assertTrue(dn.getDnConf().diskStatsEnabled);
// Assert profilingEventHook.
assertTrue(dn.getFileIoProvider().getProfilingEventHook().getDiskStatsEnabled());
assertEquals((int) ((double) 99 / 100 * Integer.MAX_VALUE),
dn.getFileIoProvider().getProfilingEventHook().getSampleRangeMax());
// Assert slowDiskDetector.
assertEquals(99, dn.getDiskMetrics().getSlowDiskDetector().getMinOutlierDetectionNodes());
assertEquals(99, dn.getDiskMetrics().getSlowDiskDetector().getLowThresholdMs());
// Revert to default and verify.
dn.reconfigureProperty(DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY, null);
assertEquals(null, dn.getConf().get(DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY),
String.format("expect %s is not configured", DFS_DATANODE_OUTLIERS_REPORT_INTERVAL_KEY));
dn.reconfigureProperty(DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY, null);
assertEquals(null, dn.getConf().get(DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY),
String.format("expect %s is not configured",
DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY));
assertFalse(dn.getFileIoProvider().getProfilingEventHook().getDiskStatsEnabled());
assertEquals(0, dn.getFileIoProvider().getProfilingEventHook().getSampleRangeMax());
// Enable disk stats, make DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY > 0.
dn.reconfigureProperty(DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY, "1");
dn.reconfigureProperty(DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY, null);
dn.reconfigureProperty(DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY, null);
dn.reconfigureProperty(DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY, null);
assertEquals(null, dn.getConf().get(DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY), String
.format("expect %s is not configured", DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_KEY));
assertEquals(null, dn.getConf().get(DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY),
String.format("expect %s is not configured", DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_KEY));
assertEquals(null, dn.getConf().get(DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY),
String.format("expect %s is not configured", DFS_DATANODE_MAX_SLOWDISKS_TO_EXCLUDE_KEY));
assertEquals(DFS_DATANODE_MIN_OUTLIER_DETECTION_DISKS_DEFAULT,
dn.getDiskMetrics().getSlowDiskDetector().getMinOutlierDetectionNodes());
assertEquals(DFS_DATANODE_SLOWDISK_LOW_THRESHOLD_MS_DEFAULT,
dn.getDiskMetrics().getSlowDiskDetector().getLowThresholdMs());
}
}
@Test
public void testDfsUsageParameters() throws ReconfigurationException {
String[] dfsUsageParameters = {
FS_DU_INTERVAL_KEY,
FS_GETSPACEUSED_JITTER_KEY};
for (int i = 0; i < NUM_DATA_NODE; i++) {
DataNode dn = cluster.getDataNodes().get(i);
// Try invalid values.
for (String parameter : dfsUsageParameters) {
try {
dn.reconfigureProperty(parameter, "text");
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof NumberFormatException,
"expecting NumberFormatException");
}
try {
dn.reconfigureProperty(parameter, String.valueOf(-1));
fail("ReconfigurationException expected");
} catch (ReconfigurationException expected) {
assertTrue(expected.getCause() instanceof IllegalArgumentException,
"expecting IllegalArgumentException");
}
}
// Change and verify properties.
for (String parameter : dfsUsageParameters) {
dn.reconfigureProperty(parameter, "99");
}
List<FsVolumeImpl> volumeList = dn.data.getVolumeList();
for (FsVolumeImpl fsVolume : volumeList) {
Map<String, BlockPoolSlice> blockPoolSlices = fsVolume.getBlockPoolSlices();
for (Map.Entry<String, BlockPoolSlice> entry : blockPoolSlices.entrySet()) {
GetSpaceUsed dfsUsage = entry.getValue().getDfsUsage();
if (dfsUsage instanceof CachingGetSpaceUsed) {
assertEquals(99,
((CachingGetSpaceUsed) entry.getValue().getDfsUsage()).getRefreshInterval());
assertEquals(99, ((CachingGetSpaceUsed) entry.getValue().getDfsUsage()).getJitter());
}
}
}
// Revert to default and verify.
for (String parameter : dfsUsageParameters) {
dn.reconfigureProperty(parameter, null);
}
for (FsVolumeImpl fsVolume : volumeList) {
Map<String, BlockPoolSlice> blockPoolSlices = fsVolume.getBlockPoolSlices();
for (Map.Entry<String, BlockPoolSlice> entry : blockPoolSlices.entrySet()) {
GetSpaceUsed dfsUsage = entry.getValue().getDfsUsage();
if (dfsUsage instanceof CachingGetSpaceUsed) {
assertEquals(FS_DU_INTERVAL_DEFAULT,
((CachingGetSpaceUsed) entry.getValue().getDfsUsage()).getRefreshInterval(),
String.format("expect %s is not configured", FS_DU_INTERVAL_KEY));
assertEquals(FS_GETSPACEUSED_JITTER_DEFAULT,
((CachingGetSpaceUsed) entry.getValue().getDfsUsage()).getJitter(),
String.format("expect %s is not configured", FS_GETSPACEUSED_JITTER_KEY));
}
assertEquals(null, dn.getConf().get(FS_DU_INTERVAL_KEY),
String.format("expect %s is not configured", FS_DU_INTERVAL_KEY));
assertEquals(null, dn.getConf().get(FS_GETSPACEUSED_JITTER_KEY),
String.format("expect %s is not configured", FS_GETSPACEUSED_JITTER_KEY));
}
}
}
}
public static | TestDataNodeReconfiguration |
java | google__guice | core/test/com/google/inject/InjectorTest.java | {
"start": 9888,
"end": 9994
} | class ____ annotates",
"while locating ",
Chicken.class.getName());
}
}
static | it |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IdentifierNameTest.java | {
"start": 2335,
"end": 2578
} | class ____ {
private static int Foo;
private static int FooBar;
private static int Bar_Foo;
}
""")
.addOutputLines(
"Test.java",
"""
| Test |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java | {
"start": 184919,
"end": 190349
} | class ____ implements ArgumentMatcher<IngestDocument> {
private final IngestDocument ingestDocument;
IngestDocumentMatcher(String index, String type, String id, long version, VersionType versionType, Map<String, Object> source) {
this.ingestDocument = new IngestDocument(index, id, version, null, versionType, source);
}
@Override
public boolean matches(IngestDocument other) {
// ingest metadata and IngestCtxMap will not be the same (timestamp differs every time)
return Objects.equals(ingestDocument.getSource(), other.getSource())
&& Objects.equals(ingestDocument.getMetadata().getMap(), other.getMetadata().getMap());
}
}
private void assertProcessorStats(
int processor,
IngestStats stats,
ProjectId projectId,
String pipelineId,
long count,
long failed,
long time
) {
assertStats(stats.processorStats().get(projectId).get(pipelineId).get(processor).stats(), count, failed, time);
}
private void assertPipelineStats(
List<IngestStats.PipelineStat> pipelineStats,
ProjectId projectId,
String pipelineId,
long count,
long failed,
long time,
long ingested,
long produced
) {
var pipeline = getPipeline(pipelineStats, projectId, pipelineId);
assertStats(pipeline.stats(), count, failed, time);
assertByteStats(pipeline.byteStats(), ingested, produced);
}
private void assertStats(IngestStats.Stats stats, long count, long failed, long time) {
assertThat(stats.ingestCount(), equalTo(count));
assertThat(stats.ingestCurrent(), equalTo(0L));
assertThat(stats.ingestFailedCount(), equalTo(failed));
assertThat(stats.ingestTimeInMillis(), greaterThanOrEqualTo(time));
}
private void assertByteStats(IngestStats.ByteStats byteStats, long ingested, long produced) {
assertThat(byteStats.bytesIngested(), equalTo(ingested));
assertThat(byteStats.bytesProduced(), equalTo(produced));
}
private IngestStats.PipelineStat getPipeline(List<IngestStats.PipelineStat> pipelineStats, ProjectId projectId, String id) {
return pipelineStats.stream().filter(p1 -> p1.projectId().equals(projectId) && p1.pipelineId().equals(id)).findFirst().orElse(null);
}
private static List<IngestService.PipelineClusterStateUpdateTask> oneTask(ProjectId projectId, DeletePipelineRequest request) {
return List.of(
new IngestService.DeletePipelineClusterStateUpdateTask(projectId, ActionTestUtils.assertNoFailureListener(t -> {}), request)
);
}
private static ClusterState executeDelete(ProjectId projectId, DeletePipelineRequest request, ClusterState clusterState) {
try {
return executeAndAssertSuccessful(clusterState, IngestService.PIPELINE_TASK_EXECUTOR, oneTask(projectId, request));
} catch (Exception e) {
throw new AssertionError(e);
}
}
private static void executeFailingDelete(ProjectId projectId, DeletePipelineRequest request, ClusterState clusterState)
throws Exception {
ClusterStateTaskExecutorUtils.executeAndThrowFirstFailure(
clusterState,
IngestService.PIPELINE_TASK_EXECUTOR,
oneTask(projectId, request)
);
}
private static List<IngestService.PipelineClusterStateUpdateTask> oneTask(ProjectId projectId, PutPipelineRequest request) {
return oneTaskWithInstantSource(projectId, request, Instant::now);
}
private static List<IngestService.PipelineClusterStateUpdateTask> oneTaskWithInstantSource(
final ProjectId projectId,
final PutPipelineRequest request,
final InstantSource instantSource
) {
return List.of(
new IngestService.PutPipelineClusterStateUpdateTask(
projectId,
ActionTestUtils.assertNoFailureListener(t -> {}),
request,
instantSource
)
);
}
private static ClusterState executePut(PutPipelineRequest request, ClusterState clusterState) {
return executePut(DEFAULT_PROJECT_ID, request, clusterState);
}
private static ClusterState executePut(ProjectId projectId, PutPipelineRequest request, ClusterState clusterState) {
return executePutWithInstantSource(projectId, request, clusterState, Instant::now);
}
private static ClusterState executePutWithInstantSource(
final ProjectId projectId,
final PutPipelineRequest request,
final ClusterState clusterState,
final InstantSource instantSource
) {
try {
return executeAndAssertSuccessful(
clusterState,
IngestService.PIPELINE_TASK_EXECUTOR,
oneTaskWithInstantSource(projectId, request, instantSource)
);
} catch (Exception e) {
throw new AssertionError(e);
}
}
private static void executeFailingPut(PutPipelineRequest request, ClusterState clusterState) throws Exception {
ClusterStateTaskExecutorUtils.executeAndThrowFirstFailure(
clusterState,
IngestService.PIPELINE_TASK_EXECUTOR,
oneTask(DEFAULT_PROJECT_ID, request)
);
}
}
| IngestDocumentMatcher |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestSelectUnsupported.java | {
"start": 1644,
"end": 3437
} | class ____ extends AbstractS3ATestBase {
/**
* S3 Select SQL statement.
*/
private static final String STATEMENT = "SELECT *" +
" FROM S3Object s" +
" WHERE s._1 = 'foo'";
/**
* A {@code .must(SELECT_SQL, _)} option MUST raise {@code UnsupportedOperationException}.
*/
@Test
public void testSelectOpenFileMustFailure() throws Throwable {
intercept(UnsupportedOperationException.class, SELECT_UNSUPPORTED, () ->
getFileSystem().openFile(methodPath())
.must(SELECT_SQL, STATEMENT)
.build()
.get());
}
/**
* A {@code .opt(SELECT_SQL, _)} option is ignored..
*/
@Test
public void testSelectOpenFileMayIsIgnored() throws Throwable {
final Path path = methodPath();
final S3AFileSystem fs = getFileSystem();
ContractTestUtils.touch(fs, path);
fs.openFile(path)
.opt(SELECT_SQL, STATEMENT)
.build()
.get()
.close();
}
@Test
public void testPathCapabilityNotAvailable() throws Throwable {
describe("verify that the FS lacks the path capability");
Assertions.assertThat(getFileSystem().hasPathCapability(methodPath(), SELECT_SQL))
.describedAs("S3 Select reported as present")
.isFalse();
}
@Test
public void testS3GuardToolFails() throws Throwable {
// ensure that the command doesn't actually exit the VM.
disableSystemExit();
final ExitUtil.ExitException ex =
intercept(ExitUtil.ExitException.class, SELECT_UNSUPPORTED,
() -> S3GuardTool.main(new String[]{
"select", "-sql", STATEMENT
}));
Assertions.assertThat(ex.getExitCode())
.describedAs("exit code of exception")
.isEqualTo(EXIT_UNSUPPORTED_VERSION);
}
}
| ITestSelectUnsupported |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/spies/SpyingOnInterfacesTest.java | {
"start": 843,
"end": 3636
} | class ____ extends TestBase {
@Test
public void shouldFailFastWhenCallingRealMethodOnInterface() throws Exception {
List<?> list = mock(List.class);
try {
// when
when(list.get(0)).thenCallRealMethod();
// then
fail();
} catch (MockitoException e) {
}
}
@Test
public void shouldFailInRuntimeWhenCallingRealMethodOnInterface() throws Exception {
// given
List<Object> list = mock(List.class);
when(list.get(0))
.thenAnswer(
new Answer<Object>() {
public Object answer(InvocationOnMock invocation) throws Throwable {
return invocation.callRealMethod();
}
});
try {
// when
list.get(0);
// then
fail();
} catch (MockitoException e) {
}
}
@Test
public void shouldAllowDelegatingToDefaultMethod() throws Exception {
Class<?> type =
new ByteBuddy()
.makeInterface()
.defineMethod("foo", String.class, Visibility.PUBLIC)
.intercept(FixedValue.value("bar"))
.make()
.load(getClass().getClassLoader(), ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
Object object = mock(type);
// when
when(type.getMethod("foo").invoke(object)).thenCallRealMethod();
// then
Assertions.assertThat(type.getMethod("foo").invoke(object)).isEqualTo((Object) "bar");
type.getMethod("foo").invoke(verify(object));
}
@Test
public void shouldAllowSpyingOnDefaultMethod() throws Exception {
Class<?> iFace =
new ByteBuddy()
.makeInterface()
.defineMethod("foo", String.class, Visibility.PUBLIC)
.intercept(FixedValue.value("bar"))
.make()
.load(getClass().getClassLoader(), ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
Class<?> impl =
new ByteBuddy()
.subclass(iFace)
.make()
.load(iFace.getClassLoader(), ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
Object object = spy(impl.getConstructor().newInstance());
// when
Assertions.assertThat(impl.getMethod("foo").invoke(object)).isEqualTo((Object) "bar");
// then
impl.getMethod("foo").invoke(verify(object));
}
}
| SpyingOnInterfacesTest |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestTracingProcessor.java | {
"start": 11514,
"end": 12192
} | class ____ extends TestSelectionCommand {
@Argument
private String pattern;
@Override
protected String configValue() {
return Objects.requireNonNullElse(pattern, "");
}
@Override
protected String configKey() {
return "quarkus.test.include-pattern";
}
@Override
protected void configure(TestSupport testSupport) {
testSupport.setPatterns(pattern, testSupport.exclude != null ? testSupport.exclude.pattern() : null);
}
}
@CommandDefinition(name = "exclude", description = "Sets the current exclude pattern")
public static | IncludePatternCommand |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.