language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/compileroption/ProcessingEnvironmentCompilerOptions.java | {
"start": 11454,
"end": 12059
} | interface ____ {
/** The key of the option (appears after "-A"). */
@Override
String toString();
/**
* Returns all aliases besides {@link #toString()}, such as old names for an option, in order of
* precedence.
*/
default ImmutableList<String> aliases() {
return ImmutableList.of();
}
/** All the command-line names for this option, in order of precedence. */
default Stream<String> allNames() {
return concat(Stream.of(toString()), aliases().stream());
}
}
/** An option that can be set on the command line. */
private | CommandLineOption |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/UnsafeRowWriter.java | {
"start": 1834,
"end": 6997
} | class ____ extends UnsafeWriter {
private final UnsafeRow row;
private final int nullBitsSize;
private final int fixedSize;
public UnsafeRowWriter(int numFields) {
this(new UnsafeRow(numFields));
}
public UnsafeRowWriter(int numFields, int initialBufferSize) {
this(new UnsafeRow(numFields), initialBufferSize);
}
public UnsafeRowWriter(UnsafeWriter writer, int numFields) {
this(null, writer.getBufferHolder(), numFields);
}
private UnsafeRowWriter(UnsafeRow row) {
this(row, new BufferHolder(row), row.numFields());
}
private UnsafeRowWriter(UnsafeRow row, int initialBufferSize) {
this(row, new BufferHolder(row, initialBufferSize), row.numFields());
}
private UnsafeRowWriter(UnsafeRow row, BufferHolder holder, int numFields) {
super(holder);
this.row = row;
this.nullBitsSize = UnsafeRow.calculateBitSetWidthInBytes(numFields);
this.fixedSize = nullBitsSize + 8 * numFields;
this.startingOffset = cursor();
}
/**
* Updates total size of the UnsafeRow using the size collected by BufferHolder, and returns
* the UnsafeRow created at a constructor
*/
public UnsafeRow getRow() {
row.setTotalSize(totalSize());
return row;
}
/**
* Resets the `startingOffset` according to the current cursor of row buffer, and clear out null
* bits. This should be called before we write a new nested struct to the row buffer.
*/
public void resetRowWriter() {
this.startingOffset = cursor();
// grow the global buffer to make sure it has enough space to write fixed-length data.
grow(fixedSize);
increaseCursor(fixedSize);
zeroOutNullBytes();
}
/**
* Clears out null bits. This should be called before we write a new row to row buffer.
*/
public void zeroOutNullBytes() {
for (int i = 0; i < nullBitsSize; i += 8) {
Platform.putLong(getBuffer(), startingOffset + i, 0L);
}
}
public boolean isNullAt(int ordinal) {
return BitSetMethods.isSet(getBuffer(), startingOffset, ordinal);
}
public void setNullAt(int ordinal) {
BitSetMethods.set(getBuffer(), startingOffset, ordinal);
write(ordinal, 0L);
}
@Override
public void setNull1Bytes(int ordinal) {
setNullAt(ordinal);
}
@Override
public void setNull2Bytes(int ordinal) {
setNullAt(ordinal);
}
@Override
public void setNull4Bytes(int ordinal) {
setNullAt(ordinal);
}
@Override
public void setNull8Bytes(int ordinal) {
setNullAt(ordinal);
}
public long getFieldOffset(int ordinal) {
return startingOffset + nullBitsSize + 8L * ordinal;
}
@Override
public void write(int ordinal, boolean value) {
final long offset = getFieldOffset(ordinal);
writeLong(offset, 0L);
writeBoolean(offset, value);
}
@Override
public void write(int ordinal, byte value) {
final long offset = getFieldOffset(ordinal);
writeLong(offset, 0L);
writeByte(offset, value);
}
@Override
public void write(int ordinal, short value) {
final long offset = getFieldOffset(ordinal);
writeLong(offset, 0L);
writeShort(offset, value);
}
@Override
public void write(int ordinal, int value) {
final long offset = getFieldOffset(ordinal);
writeLong(offset, 0L);
writeInt(offset, value);
}
@Override
public void write(int ordinal, long value) {
writeLong(getFieldOffset(ordinal), value);
}
@Override
public void write(int ordinal, float value) {
final long offset = getFieldOffset(ordinal);
writeLong(offset, 0);
writeFloat(offset, value);
}
@Override
public void write(int ordinal, double value) {
writeDouble(getFieldOffset(ordinal), value);
}
@Override
public void write(int ordinal, Decimal input, int precision, int scale) {
if (precision <= Decimal.MAX_LONG_DIGITS()) {
// make sure Decimal object has the same scale as DecimalType
if (input != null && input.changePrecision(precision, scale)) {
write(ordinal, input.toUnscaledLong());
} else {
setNullAt(ordinal);
}
} else {
// grow the global buffer before writing data.
holder.grow(16);
// always zero-out the 16-byte buffer
Platform.putLong(getBuffer(), cursor(), 0L);
Platform.putLong(getBuffer(), cursor() + 8, 0L);
// Make sure Decimal object has the same scale as DecimalType.
// Note that we may pass in null Decimal object to set null for it.
if (input == null || !input.changePrecision(precision, scale)) {
BitSetMethods.set(getBuffer(), startingOffset, ordinal);
// keep the offset for future update
setOffsetAndSize(ordinal, 0);
} else {
final byte[] bytes = input.toJavaBigDecimal().unscaledValue().toByteArray();
final int numBytes = bytes.length;
assert numBytes <= 16;
// Write the bytes to the variable length portion.
Platform.copyMemory(
bytes, Platform.BYTE_ARRAY_OFFSET, getBuffer(), cursor(), numBytes);
setOffsetAndSize(ordinal, bytes.length);
}
// move the cursor forward.
increaseCursor(16);
}
}
}
| UnsafeRowWriter |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/ai/remote/redo/McpServerEndpointRedoData.java | {
"start": 836,
"end": 1649
} | class ____ extends RedoData<McpServerEndpoint> {
private final String mcpName;
public McpServerEndpointRedoData(String mcpName) {
this.mcpName = mcpName;
}
public String getMcpName() {
return mcpName;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
McpServerEndpointRedoData that = (McpServerEndpointRedoData) o;
return Objects.equals(mcpName, that.mcpName) && super.equals(o);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), mcpName);
}
}
| McpServerEndpointRedoData |
java | apache__flink | flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/sink/writer/strategy/ResultInfo.java | {
"start": 1029,
"end": 1109
} | interface ____ {
int getFailedMessages();
int getBatchSize();
}
| ResultInfo |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TimeoutExtensionTests.java | {
"start": 31205,
"end": 31822
} | class ____ {
@Test
@Timeout(value = 10, unit = MILLISECONDS, threadMode = SEPARATE_THREAD)
void testZero() throws InterruptedException {
Thread.sleep(1000);
}
@Test
@Timeout(value = 10, unit = MILLISECONDS, threadMode = SAME_THREAD)
void testOne() throws InterruptedException {
Thread.sleep(1000);
}
@Test
@Timeout(value = 10, unit = MILLISECONDS, threadMode = SEPARATE_THREAD)
void testTwo() throws InterruptedException {
Thread.sleep(1000);
}
}
@SuppressWarnings("JUnitMalformedDeclaration")
@TestMethodOrder(OrderAnnotation.class)
static | MixedSameThreadAndSeparateThreadTestCase |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java | {
"start": 20026,
"end": 20583
} | class ____<E extends Expression> {
private final Class<E> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());
@SuppressWarnings("unchecked")
public QueryTranslation translate(Expression exp, boolean onAggs, TranslatorHandler handler) {
return (typeToken.isInstance(exp) ? asQuery((E) exp, onAggs, handler) : null);
}
protected abstract QueryTranslation asQuery(E e, boolean onAggs, TranslatorHandler handler);
}
//
// Agg translators
//
static | SqlExpressionTranslator |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/SyntheticBeanParamsTest.java | {
"start": 910,
"end": 985
} | enum ____ {
FOO,
BAR,
BAZ,
}
public | SimpleEnum |
java | quarkusio__quarkus | extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java | {
"start": 5012,
"end": 16523
} | class ____ {
private static final String TEST_TRANSACTION = "io.quarkus.test.TestTransaction";
@BuildStep
public NativeImageSystemPropertyBuildItem nativeImageSystemPropertyBuildItem() {
return new NativeImageSystemPropertyBuildItem("CoordinatorEnvironmentBean.transactionStatusManagerEnable", "false");
}
@BuildStep
@Record(RUNTIME_INIT)
@Produce(NarayanaInitBuildItem.class)
public void build(NarayanaJtaRecorder recorder,
CombinedIndexBuildItem indexBuildItem,
BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass,
BuildProducer<RuntimeInitializedClassBuildItem> runtimeInit,
BuildProducer<FeatureBuildItem> feature,
BuildProducer<LogCleanupFilterBuildItem> logCleanupFilters,
BuildProducer<NativeImageFeatureBuildItem> nativeImageFeatures,
TransactionManagerBuildTimeConfig transactionManagerBuildTimeConfig,
ShutdownContextBuildItem shutdownContextBuildItem,
Capabilities capabilities) {
recorder.handleShutdown(shutdownContextBuildItem);
feature.produce(new FeatureBuildItem(Feature.NARAYANA_JTA));
additionalBeans.produce(new AdditionalBeanBuildItem(NarayanaJtaProducers.class));
additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf("io.quarkus.narayana.jta.RequestScopedTransaction"));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(
"com.arjuna.ats.internal.jta.resources.arjunacore.CommitMarkableResourceRecord"));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(SocketProcessId.class.getName()));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(CommitMarkableResourceRecordRecoveryModule.class.getName()));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(RecoverConnectableAtomicAction.class.getName()));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(TransactionStatusConnectionManager.class.getName()));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(JTAActionStatusServiceXAResourceOrphanFilter.class.getName()));
runtimeInit.produce(new RuntimeInitializedClassBuildItem(AtomicActionExpiryScanner.class.getName()));
indexBuildItem.getIndex().getAllKnownSubclasses(JDBCImple_driver.class).stream()
.map(impl -> ReflectiveClassBuildItem.builder(impl.name().toString()).build())
.forEach(reflectiveClass::produce);
reflectiveClass.produce(ReflectiveClassBuildItem.builder(JTAEnvironmentBean.class,
UserTransactionImple.class,
CheckedActionFactoryImple.class,
TransactionManagerImple.class,
TransactionSynchronizationRegistryImple.class,
ObjectStoreEnvironmentBean.class,
ShadowNoFileLockStore.class,
JDBCStore.class,
SocketProcessId.class,
AtomicActionRecoveryModule.class,
XARecoveryModule.class,
XAResourceRecord.class,
JTATransactionLogXAResourceOrphanFilter.class,
JTANodeNameXAResourceOrphanFilter.class,
JTAActionStatusServiceXAResourceOrphanFilter.class,
ExpiredTransactionStatusManagerScanner.class)
.publicConstructors()
.reason(getClass().getName())
.build());
AdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder();
builder.addBeanClass(TransactionalInterceptorSupports.class);
builder.addBeanClass(TransactionalInterceptorNever.class);
builder.addBeanClass(TransactionalInterceptorRequired.class);
builder.addBeanClass(TransactionalInterceptorRequiresNew.class);
builder.addBeanClass(TransactionalInterceptorMandatory.class);
builder.addBeanClass(TransactionalInterceptorNotSupported.class);
additionalBeans.produce(builder.build());
transactionManagerBuildTimeConfig.unsafeMultipleLastResources().ifPresent(mode -> {
if (!mode.equals(UnsafeMultipleLastResourcesMode.FAIL)) {
recorder.logUnsafeMultipleLastResourcesOnStartup(mode);
}
});
//we want to force Arjuna to init at static init time
Properties defaultProperties = PropertiesFactory.getDefaultProperties();
//we don't want to store the system properties here
//we re-apply them at runtime
for (Object i : System.getProperties().keySet()) {
defaultProperties.remove(i);
}
recorder.setDefaultProperties(defaultProperties);
// This must be done before setNodeName as the code in setNodeName will create a TSM based on the value of this property
recorder.disableTransactionStatusManager();
allowUnsafeMultipleLastResources(recorder, transactionManagerBuildTimeConfig, capabilities, logCleanupFilters,
nativeImageFeatures);
recorder.setNodeName();
recorder.setDefaultTimeout();
recorder.setConfig();
}
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
public void nativeImageFeature(TransactionManagerBuildTimeConfig transactionManagerBuildTimeConfig,
BuildProducer<NativeImageFeatureBuildItem> nativeImageFeatures) {
switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources()
.orElse(UnsafeMultipleLastResourcesMode.DEFAULT)) {
case ALLOW, WARN_FIRST, WARN_EACH -> {
nativeImageFeatures.produce(new NativeImageFeatureBuildItem(DisableLoggingFeature.class));
}
}
}
@BuildStep
@Record(RUNTIME_INIT)
@Consume(NarayanaInitBuildItem.class)
@Consume(SyntheticBeansRuntimeInitBuildItem.class)
public void startRecoveryService(NarayanaJtaRecorder recorder, List<JdbcDataSourceBuildItem> jdbcDataSourceBuildItems) {
Map<String, String> configuredDataSourcesConfigKeys = jdbcDataSourceBuildItems.stream()
.map(j -> j.getName())
.collect(Collectors.toMap(Function.identity(),
n -> DataSourceUtil.dataSourcePropertyKey(n, "jdbc.transactions")));
Set<String> dataSourcesWithTransactionIntegration = jdbcDataSourceBuildItems.stream()
.filter(j -> j.isTransactionIntegrationEnabled())
.map(j -> j.getName())
.collect(Collectors.toSet());
recorder.startRecoveryService(configuredDataSourcesConfigKeys, dataSourcesWithTransactionIntegration);
}
@BuildStep(onlyIf = IsTest.class)
void testTx(BuildProducer<GeneratedBeanBuildItem> generatedBeanBuildItemBuildProducer,
BuildProducer<AdditionalBeanBuildItem> additionalBeans) {
if (!testTransactionOnClassPath()) {
return;
}
//generate the annotated interceptor with gizmo
//all the logic is in the parent, but we don't have access to the
//binding annotation here
try (ClassCreator c = ClassCreator.builder()
.classOutput(new GeneratedBeanGizmoAdaptor(generatedBeanBuildItemBuildProducer)).className(
TestTransactionInterceptor.class.getName() + "Generated")
.superClass(TestTransactionInterceptor.class).build()) {
c.addAnnotation(TEST_TRANSACTION);
c.addAnnotation(Interceptor.class.getName());
c.addAnnotation(Priority.class).addValue("value", Interceptor.Priority.PLATFORM_BEFORE + 200);
}
additionalBeans.produce(AdditionalBeanBuildItem.builder().addBeanClass(TestTransactionInterceptor.class)
.addBeanClass(TEST_TRANSACTION).build());
}
private static boolean testTransactionOnClassPath() {
try {
Class.forName(TEST_TRANSACTION, false, Thread.currentThread().getContextClassLoader());
return true;
} catch (ClassNotFoundException ignored) {
return false;
}
}
@BuildStep
public ContextConfiguratorBuildItem transactionContext(ContextRegistrationPhaseBuildItem contextRegistrationPhase) {
return new ContextConfiguratorBuildItem(contextRegistrationPhase.getContext()
.configure(TransactionScoped.class).normal().contextClass(TransactionContext.class));
}
@BuildStep
public CustomScopeBuildItem registerScope() {
return new CustomScopeBuildItem(TransactionScoped.class);
}
@BuildStep
void unremovableBean(BuildProducer<UnremovableBeanBuildItem> unremovableBeans) {
// LifecycleManager comes from smallrye-context-propagation-jta and is only used via programmatic lookup in JtaContextProvider
unremovableBeans.produce(UnremovableBeanBuildItem.beanClassNames(JtaContextProvider.LifecycleManager.class.getName()));
// The tx manager is obtained via CDI.current().select(TransactionManager.class) in the JtaContextProvider
unremovableBeans.produce(UnremovableBeanBuildItem.beanTypes(TransactionManager.class));
}
@BuildStep
void logCleanupFilters(BuildProducer<LogCleanupFilterBuildItem> logCleanupFilters) {
logCleanupFilters.produce(new LogCleanupFilterBuildItem("com.arjuna.ats.jbossatx", "ARJUNA032010:", "ARJUNA032013:"));
}
private void allowUnsafeMultipleLastResources(NarayanaJtaRecorder recorder,
TransactionManagerBuildTimeConfig transactionManagerBuildTimeConfig,
Capabilities capabilities, BuildProducer<LogCleanupFilterBuildItem> logCleanupFilters,
BuildProducer<NativeImageFeatureBuildItem> nativeImageFeatures) {
switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources()
.orElse(UnsafeMultipleLastResourcesMode.DEFAULT)) {
case ALLOW -> {
recorder.allowUnsafeMultipleLastResources(capabilities.isPresent(Capability.AGROAL), true);
// we will handle the warnings ourselves at runtime init when the option is set explicitly
logCleanupFilters.produce(
new LogCleanupFilterBuildItem("com.arjuna.ats.arjuna", "ARJUNA012139", "ARJUNA012141", "ARJUNA012142"));
}
case WARN_FIRST -> {
recorder.allowUnsafeMultipleLastResources(capabilities.isPresent(Capability.AGROAL), true);
// we will handle the warnings ourselves at runtime init when the option is set explicitly
// but we still want Narayana to produce a warning on the first offending transaction
logCleanupFilters.produce(
new LogCleanupFilterBuildItem("com.arjuna.ats.arjuna", "ARJUNA012139", "ARJUNA012142"));
}
case WARN_EACH -> {
recorder.allowUnsafeMultipleLastResources(capabilities.isPresent(Capability.AGROAL), false);
// we will handle the warnings ourselves at runtime init when the option is set explicitly
// but we still want Narayana to produce one warning per offending transaction
logCleanupFilters.produce(
new LogCleanupFilterBuildItem("com.arjuna.ats.arjuna", "ARJUNA012139", "ARJUNA012142"));
}
case FAIL -> { // No need to do anything, this is the default behavior of Narayana
}
}
}
}
| NarayanaJtaProcessor |
java | micronaut-projects__micronaut-core | inject-kotlin/src/test/groovy/io/micronaut/kotlin/processing/visitor/order/MyVisitor3.java | {
"start": 816,
"end": 1141
} | class ____ implements TypeElementVisitor<VisitMyAnnotation, Object> {
@Override
public void visitClass(ClassElement element, VisitorContext context) {
MyVisitor1.ORDER.add(getClass().getSimpleName() + " " + element.getName());
}
@Override
public int getOrder() {
return 3;
}
}
| MyVisitor3 |
java | google__auto | value/src/main/java/com/google/auto/value/extension/toprettystring/processor/ToPrettyStringValidator.java | {
"start": 2362,
"end": 5158
} | class ____ extends AbstractProcessor {
@Override
public boolean process(
Set<? extends TypeElement> annotations, RoundEnvironment roundEnvironment) {
Types types = processingEnv.getTypeUtils();
Elements elements = processingEnv.getElementUtils();
TypeElement toPrettyString = elements.getTypeElement(TO_PRETTY_STRING_NAME);
Set<ExecutableElement> annotatedMethods =
methodsIn(roundEnvironment.getElementsAnnotatedWith(toPrettyString));
for (ExecutableElement method : annotatedMethods) {
validateMethod(method, elements);
}
validateSingleToPrettyStringMethod(annotatedMethods, types, elements);
return false;
}
private void validateMethod(ExecutableElement method, Elements elements) {
ErrorReporter errorReporter = new ErrorReporter(method, processingEnv.getMessager());
if (method.getModifiers().contains(STATIC)) {
errorReporter.reportError("@ToPrettyString methods must be instance methods");
}
TypeMirror stringType = elements.getTypeElement("java.lang.String").asType();
if (!MoreTypes.equivalence().equivalent(method.getReturnType(), stringType)) {
errorReporter.reportError("@ToPrettyString methods must return String");
}
if (!method.getParameters().isEmpty()) {
errorReporter.reportError("@ToPrettyString methods cannot have parameters");
}
}
private void validateSingleToPrettyStringMethod(
Set<ExecutableElement> annotatedMethods, Types types, Elements elements) {
Set<TypeElement> enclosingTypes =
annotatedMethods.stream()
.map(Element::getEnclosingElement)
.map(MoreElements::asType)
.collect(toCollection(LinkedHashSet::new));
for (TypeElement enclosingType : enclosingTypes) {
ImmutableList<ExecutableElement> methods =
toPrettyStringMethods(enclosingType, types, elements);
if (methods.size() > 1) {
processingEnv
.getMessager()
.printMessage(
ERROR,
String.format(
"%s has multiple @ToPrettyString methods:%s",
enclosingType.getQualifiedName(), formatMethodList(methods)),
enclosingType);
}
}
}
private String formatMethodList(ImmutableList<ExecutableElement> methods) {
return methods.stream().map(this::formatMethodInList).collect(joining());
}
private String formatMethodInList(ExecutableElement method) {
return String.format(
"\n - %s.%s()",
MoreElements.asType(method.getEnclosingElement()).getQualifiedName(),
method.getSimpleName());
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
private static final | ToPrettyStringValidator |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/argumentselectiondefects/Heuristic.java | {
"start": 984,
"end": 1108
} | interface ____ {
boolean isAcceptableChange(Changes changes, Tree node, MethodSymbol symbol, VisitorState state);
}
| Heuristic |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/annotation/BasicAuthentication.java | {
"start": 548,
"end": 628
} | interface ____ {
String AUTH_MECHANISM_SCHEME = "basic";
}
| BasicAuthentication |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/jdbc/mutation/ParameterUsage.java | {
"start": 244,
"end": 426
} | enum ____ {
/**
* The parameter is used in the update set clause or insert values clause
*/
SET,
/**
* The parameter is used in the where clause
*/
RESTRICT
}
| ParameterUsage |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java | {
"start": 2212,
"end": 3280
} | class ____<K,V> extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(InputSampler.class);
static int printUsage() {
System.out.println("sampler -r <reduces>\n" +
" [-inFormat <input format class>]\n" +
" [-keyClass <map input & output key class>]\n" +
" [-splitRandom <double pcnt> <numSamples> <maxsplits> | " +
" // Sample from random splits at random (general)\n" +
" -splitSample <numSamples> <maxsplits> | " +
" // Sample from first records in splits (random data)\n"+
" -splitInterval <double pcnt> <maxsplits>]" +
" // Sample from splits at intervals (sorted data)");
System.out.println("Default sampler: -splitRandom 0.1 10000 10");
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}
public InputSampler(Configuration conf) {
setConf(conf);
}
/**
* Interface to sample using an
* {@link org.apache.hadoop.mapreduce.InputFormat}.
*/
public | InputSampler |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SyncableDataOutputStream.java | {
"start": 1486,
"end": 3569
} | class ____ extends DataOutputStream
implements Syncable, StreamCapabilities {
private static final Logger LOG = LoggerFactory.getLogger(SyncableDataOutputStream.class);
public SyncableDataOutputStream(OutputStream out) {
super(out);
}
/**
* Get a reference to the wrapped output stream.
*
* @return the underlying output stream
*/
@InterfaceAudience.LimitedPrivate({"HDFS"})
public OutputStream getOutStream() {
return out;
}
@Override
public boolean hasCapability(String capability) {
return StoreImplementationUtils.hasCapability(out, capability);
}
@Override
public void hflush() throws IOException {
if (out instanceof Syncable) {
((Syncable) out).hflush();
}
}
@Override
public void hsync() throws IOException {
if (out instanceof Syncable) {
((Syncable) out).hsync();
}
}
@Override
public void close() throws IOException {
IOException ioeFromFlush = null;
try {
flush();
} catch (IOException e) {
ioeFromFlush = e;
throw e;
} finally {
try {
this.out.close();
} catch (IOException e) {
// If there was an Exception during flush(), the Azure SDK will throw back the
// same when we call close on the same stream. When try and finally both throw
// Exception, Java will use Throwable#addSuppressed for one of the Exception so
// that the caller will get one exception back. When within this, if both
// Exceptions are equal, it will throw back IllegalStateException. This makes us
// to throw back a non IOE. The below special handling is to avoid this.
if (ioeFromFlush == e) {
// Do nothing..
// The close() call gave back the same IOE which flush() gave. Just swallow it
LOG.debug("flush() and close() throwing back same Exception. Just swallowing the latter", e);
} else {
// Let Java handle 2 different Exceptions been thrown from try and finally.
throw e;
}
}
}
}
}
| SyncableDataOutputStream |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/htmlunit/webdriver/MockMvcHtmlUnitDriverBuilderTests.java | {
"start": 4104,
"end": 4279
} | class ____ {
@RequestMapping("/test")
String contextPath(HttpServletRequest request) {
return EXPECTED_BODY;
}
}
}
@RestController
static | ContextPathController |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4SetUpNotRunTest.java | {
"start": 6649,
"end": 6807
} | class ____ extends TestCase {
public void setUp() {}
}
/** setUp() method is private and wouldn't be run by JUnit3 */
@RunWith(JUnit4.class)
| J4SetUpJUnit3Class |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/QuarkusAppliedToMultipleModulesTest.java | {
"start": 152,
"end": 687
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testBasicMultiModuleBuild() throws Exception {
final File projectDir = getProjectDir("quarkus-plugin-in-multiple-modules");
final BuildResult build = runGradleWrapper(projectDir, "clean", "quarkusBuild");
assertThat(BuildResult.isSuccessful(build.getTasks().get(":modA:quarkusBuild"))).isTrue();
assertThat(BuildResult.isSuccessful(build.getTasks().get(":modB:quarkusBuild"))).isTrue();
}
}
| QuarkusAppliedToMultipleModulesTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/ConsoleAppenderDefaultSuppressedThrowable.java | {
"start": 1643,
"end": 2747
} | class ____ {
private static final Logger LOG = LogManager.getLogger(ConsoleAppenderDefaultSuppressedThrowable.class);
public static void main(final String[] args) {
final String config =
args.length == 0 ? "target/test-classes/log4j2-console-default-suppressed-throwable.xml" : args[0];
test(config);
}
static void test(final String config) {
try (final LoggerContext ignored =
Configurator.initialize(ConsoleAppenderDefaultSuppressedThrowable.class.getName(), config)) {
final IOException ioEx = new IOException("test suppressed");
ioEx.addSuppressed(new IOException("test suppressed 1", new IOException("test 1")));
final IOException ioEx2 = new IOException("test 2");
ioEx2.addSuppressed(new IOException("test 3"));
ioEx.addSuppressed(new IOException("test suppressed 2", ioEx2));
final IOException e = new IOException("test", ioEx);
LOG.error("Error message {}, suppressed?", "Hi", e);
}
}
}
| ConsoleAppenderDefaultSuppressedThrowable |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/constraint/ForeignKeyNoConstraintTest.java | {
"start": 1170,
"end": 2041
} | class ____ {
@Test
@JiraKey(value = "HHH-12975")
public void testPrimaryKeyJoinColumnForeignKeyNoConstraint(SessionFactoryScope scope) {
for ( Namespace namespace : scope.getMetadataImplementor().getDatabase().getNamespaces() ) {
for ( Table table : namespace.getTables() ) {
if ( "Car".equals( table.getName() ) ) {
assertThat( table.getForeignKeyCollection() ).hasSize( 0 );
}
}
}
}
@Test
@JiraKey(value = "HHH-12975")
public void testMapsIdJoinColumnForeignKeyNoConstraint(SessionFactoryScope scope) {
for ( Namespace namespace : scope.getMetadataImplementor().getDatabase().getNamespaces() ) {
for ( Table table : namespace.getTables() ) {
if ( "Post".equals( table.getName() ) ) {
assertThat( table.getForeignKeyCollection() ).hasSize( 0 );
}
}
}
}
@Entity(name = "Car")
public static | ForeignKeyNoConstraintTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/circular/CircularProducerNormalScopeSetterInjectionTest.java | {
"start": 1295,
"end": 2006
} | class ____ {
@Produces
@ApplicationScoped
MyValue producerMethod() {
return new MyValue("foobar");
}
@Produces
@ApplicationScoped
@MyQualifier
MyValue producerField = new MyValue("quux");
MyValue foobar;
MyValue quux;
@Inject
void set(MyValue foobar, @MyQualifier MyValue quux) {
this.foobar = foobar;
this.quux = quux;
}
String get() {
return foobar.get() + quux.get();
}
}
@Qualifier
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER })
@ | MyBean |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/domain/sample/ItemSite.java | {
"start": 1159,
"end": 1357
} | class ____ {
@Id @ManyToOne private Item item;
@Id @ManyToOne private Site site;
public ItemSite() {}
public ItemSite(Item item, Site site) {
this.item = item;
this.site = site;
}
}
| ItemSite |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/results/ImplicitSelectWithJoinTests.java | {
"start": 1097,
"end": 10279
} | class ____ {
private static final String HQL = "from Product p join p.vendor v where v.name like '%Steve%'";
private static final String HQL0 = "from Product this join this.vendor v where v.name like '%Steve%'";
private static final String HQL2 = "select p " + HQL;
private static final String HQL3 = "from Product q join q.vendor w, Product p join p.vendor v where v.name like '%Steve%' and w.name like '%Gavin%'";
@Test
public void testNoExpectedTypeWithThis(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<?> query = session.createSelectionQuery( HQL0 );
{
final List<?> results = query.list();
assertThat( results ).hasSize( 1 );
final Object result = results.get( 0 );
assertThat( result ).isInstanceOf( Product.class );
}
try (ScrollableResults<?> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object result = results.get();
assertThat( result ).isInstanceOf( Product.class );
assertThat( results.next() ).isFalse();
}
} );
}
@Test @FailureExpected(reason = "this functionality was disabled, and an exception is now thrown")
public void testNoExpectedType(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<?> query = session.createSelectionQuery( HQL );
{
final List<?> results = query.list();
assertThat( results ).hasSize( 1 );
final Object result = results.get( 0 );
assertThat( result ).isInstanceOf( Product.class );
}
try (ScrollableResults<?> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object result = results.get();
assertThat( result ).isInstanceOf( Product.class );
assertThat( results.next() ).isFalse();
}
} );
}
@Test
public void testProductResult(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<Product> query = session.createSelectionQuery( HQL, Product.class );
{
final List<Product> results = query.list();
assertThat( results ).hasSize( 1 );
final Product result = results.get( 0 );
assertThat( result ).isNotNull();
}
try (ScrollableResults<Product> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Product result = results.get();
assertThat( result ).isNotNull();
assertThat( results.next() ).isFalse();
}
} );
}
@Test @FailureExpected(reason = "this functionality was disabled, and an exception is now thrown")
public void testArrayResultNoResultType(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<?> query = session.createSelectionQuery( HQL3 );
{
final List<?> results = query.list();
assertThat( results ).hasSize( 1 );
final Object result = results.get( 0 );
assertThat( result ).isNotNull();
assertInstanceOf( Object[].class, result );
assertThat( (Object[]) result ).hasSize(4);
assertThat( (Object[]) result ).hasExactlyElementsOfTypes(Product.class, Vendor.class, Product.class, Vendor.class);
}
try (ScrollableResults<?> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object result = results.get();
assertThat( result ).isNotNull();
assertInstanceOf( Object[].class, result );
assertThat( (Object[]) result ).hasSize(4);
assertThat( (Object[]) result ).hasExactlyElementsOfTypes(Product.class, Vendor.class, Product.class, Vendor.class);
assertThat( results.next() ).isFalse();
}
} );
// frankly, this would be more consistent and more backward-compatible
// scope.inTransaction( (session) -> {
// final SelectionQuery<?> query = session.createSelectionQuery( HQL );
//
// {
// final List<?> results = query.list();
// assertThat( results ).hasSize( 1 );
// final Object result = results.get( 0 );
// assertThat( result ).isNotNull();
// assertInstanceOf( Object[].class, result );
// assertThat( (Object[]) result ).hasSize(2);
// assertThat( (Object[]) result ).hasExactlyElementsOfTypes(Product.class, Vendor.class);
// }
//
// {
// final ScrollableResults<?> results = query.scroll();
// assertThat( results.next() ).isTrue();
// final Object result = results.get();
// assertThat( result ).isNotNull();
// assertInstanceOf( Object[].class, result );
// assertThat( (Object[]) result ).hasSize(2);
// assertThat( (Object[]) result ).hasExactlyElementsOfTypes(Product.class, Vendor.class);
// assertThat( results.next() ).isFalse();
// }
// } );
}
@Test
public void testArrayResult(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<Object[]> query = session.createSelectionQuery( HQL3, Object[].class );
{
final List<Object[]> results = query.list();
assertThat( results ).hasSize( 1 );
final Object[] result = results.get( 0 );
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 4 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 1 ] ).isNotNull();
assertThat( result[ 2 ] ).isNotNull();
assertThat( result[ 3 ] ).isNotNull();
assertThat( result ).hasExactlyElementsOfTypes(Product.class, Vendor.class, Product.class, Vendor.class);
}
try (ScrollableResults<Object[]> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object[] result = results.get();
assertThat( results.next() ).isFalse();
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 4 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 1 ] ).isNotNull();
assertThat( result[ 2 ] ).isNotNull();
assertThat( result[ 3 ] ).isNotNull();
assertThat( result ).hasExactlyElementsOfTypes(Product.class, Vendor.class, Product.class, Vendor.class);
}
} );
scope.inTransaction( (session) -> {
final SelectionQuery<Object[]> query = session.createSelectionQuery( HQL, Object[].class );
{
final List<Object[]> results = query.list();
assertThat( results ).hasSize( 1 );
final Object[] result = results.get( 0 );
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 2 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 1 ] ).isNotNull();
assertThat( result ).hasExactlyElementsOfTypes(Product.class, Vendor.class);
}
try (final ScrollableResults<Object[]> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object[] result = results.get();
assertThat( results.next() ).isFalse();
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 2 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 1 ] ).isNotNull();
assertThat( result ).hasExactlyElementsOfTypes(Product.class, Vendor.class);
}
} );
}
@Test
public void testExplicitSingleSelectionArrayResult(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<Object[]> query = session.createSelectionQuery( HQL2, Object[].class );
{
final List<Object[]> results = query.list();
assertThat( results ).hasSize( 1 );
final Object[] result = results.get( 0 );
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 1 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 0 ] ).isInstanceOf( Product.class );
}
try (ScrollableResults<Object[]> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Object[] result = results.get();
assertThat( results.next() ).isFalse();
assertThat( result ).isNotNull();
assertThat( result ).hasSize( 1 );
assertThat( result[ 0 ] ).isNotNull();
assertThat( result[ 0 ] ).isInstanceOf( Product.class );
}
} );
}
@Test
public void testExplicitSingleSelectionProductResult(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final SelectionQuery<Product> query = session.createSelectionQuery( HQL2, Product.class );
{
final List<Product> results = query.list();
assertThat( results ).hasSize( 1 );
final Product result = results.get( 0 );
assertThat( result ).isNotNull();
}
try (ScrollableResults<Product> results = query.scroll()) {
assertThat( results.next() ).isTrue();
final Product result = results.get();
assertThat( result ).isNotNull();
assertThat( results.next() ).isFalse();
}
} );
}
@BeforeEach
public void prepareTestData(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
final Vendor steve = new Vendor( 1, "Steve's Curios", "Acme Corp." );
final Product product1 = new Product( 10, UUID.fromString( "53886a8a-7082-4879-b430-25cb94415be8" ), steve );
final Vendor gavin = new Vendor( 2, "Gavin & Associates", "Acme Corp." );
final Product product2 = new Product( 11, UUID.fromString( "53886a8b-3083-4879-b431-25cb95515be9" ), gavin );
session.persist( steve );
session.persist( product1 );
session.persist( gavin );
session.persist( product2 );
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
| ImplicitSelectWithJoinTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAlterIndexStatement.java | {
"start": 2959,
"end": 4165
} | class ____ extends SQLObjectImpl {
private SQLObject option;
public SQLObject getOption() {
return option;
}
public void setOption(SQLObject option) {
this.option = option;
}
@Override
public void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, option);
}
visitor.endVisit(this);
}
}
public SQLExprTableSource getTable() {
return table;
}
public void setTable(SQLName x) {
setTable(new SQLExprTableSource(x));
}
public void setTable(SQLExprTableSource x) {
if (x != null) {
x.setParent(this);
}
this.table = x;
}
public List<SQLAssignItem> getPartitions() {
return partitions;
}
public SQLPartitionBy getDbPartitionBy() {
return dbPartitionBy;
}
public void setDbPartitionBy(SQLPartitionBy x) {
if (x != null) {
x.setParent(this);
}
this.dbPartitionBy = x;
}
@Override
public DDLObjectType getDDLObjectType() {
return DDLObjectType.INDEX;
}
}
| Rebuild |
java | spring-projects__spring-boot | module/spring-boot-data-neo4j-test/src/dockerTest/java/org/springframework/boot/data/neo4j/test/autoconfigure/DataNeo4jTestIntegrationTests.java | {
"start": 1921,
"end": 2958
} | class ____ {
@Container
@ServiceConnection
static final Neo4jContainer neo4j = TestImage.container(Neo4jContainer.class);
@Autowired
private Neo4jTemplate neo4jTemplate;
@Autowired
private ExampleRepository exampleRepository;
@Autowired
private ApplicationContext applicationContext;
@Test
void testRepository() {
ExampleGraph exampleGraph = new ExampleGraph("Look, new @DataNeo4jTest!");
assertThat(exampleGraph.getId()).isNull();
ExampleGraph savedGraph = this.exampleRepository.save(exampleGraph);
assertThat(savedGraph.getId()).isNotNull();
assertThat(this.neo4jTemplate.count(ExampleGraph.class)).isOne();
}
@Test
void didNotInjectExampleService() {
assertThatExceptionOfType(NoSuchBeanDefinitionException.class)
.isThrownBy(() -> this.applicationContext.getBean(ExampleService.class));
}
@Test
void serviceConnectionAutoConfigurationWasImported() {
assertThat(this.applicationContext).has(importedAutoConfiguration(ServiceConnectionAutoConfiguration.class));
}
}
| DataNeo4jTestIntegrationTests |
java | google__error-prone | core/src/main/java/com/google/errorprone/refaster/PlaceholderUnificationVisitor.java | {
"start": 14115,
"end": 26792
} | interface ____<T1, T2, T3, T4, R> {
R apply(T1 t1, T2 t2, T3 t3, T4 t4);
}
private static <T1, T2, T3, T4, R> Choice<State<R>> chooseSubtrees(
State<?> state,
Function<State<?>, Choice<? extends State<? extends T1>>> choice1,
Function<State<?>, Choice<? extends State<? extends T2>>> choice2,
Function<State<?>, Choice<? extends State<? extends T3>>> choice3,
Function<State<?>, Choice<? extends State<? extends T4>>> choice4,
QuadFunction<T1, T2, T3, T4, R> finalizer) {
return choice1
.apply(state)
.flatMap(
s1 ->
choice2
.apply(s1)
.flatMap(
s2 ->
choice3
.apply(s2)
.flatMap(
s3 ->
choice4
.apply(s3)
.map(
s4 ->
s4.withResult(
finalizer.apply(
s1.result(),
s2.result(),
s3.result(),
s4.result()))))));
}
@Override
public Choice<State<JCArrayAccess>> visitArrayAccess(ArrayAccessTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
s -> unifyExpression(node.getIndex(), s),
maker()::Indexed);
}
@Override
public Choice<State<JCBinary>> visitBinary(BinaryTree node, State<?> state) {
Tag tag = ((JCBinary) node).getTag();
return chooseSubtrees(
state,
s -> unifyExpression(node.getLeftOperand(), s),
s -> unifyExpression(node.getRightOperand(), s),
(l, r) -> maker().Binary(tag, l, r));
}
@Override
public Choice<State<JCMethodInvocation>> visitMethodInvocation(
MethodInvocationTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getMethodSelect(), s),
s -> unifyExpressions(node.getArguments(), s),
(select, args) -> maker().Apply(null, select, args));
}
@Override
public Choice<State<JCFieldAccess>> visitMemberSelect(MemberSelectTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
expr -> maker().Select(expr, (Name) node.getIdentifier()));
}
@Override
public Choice<State<JCParens>> visitParenthesized(ParenthesizedTree node, State<?> state) {
return chooseSubtrees(state, s -> unifyExpression(node.getExpression(), s), maker()::Parens);
}
private static final ImmutableSet<Tag> MUTATING_UNARY_TAGS =
ImmutableSet.copyOf(EnumSet.of(Tag.PREINC, Tag.PREDEC, Tag.POSTINC, Tag.POSTDEC));
@Override
public Choice<State<JCUnary>> visitUnary(UnaryTree node, State<?> state) {
Tag tag = ((JCUnary) node).getTag();
return chooseSubtrees(
state, s -> unifyExpression(node.getExpression(), s), expr -> maker().Unary(tag, expr))
.filter(
s ->
!MUTATING_UNARY_TAGS.contains(tag)
|| !(s.result().getExpression() instanceof PlaceholderParamIdent));
}
@Override
public Choice<State<JCTypeCast>> visitTypeCast(TypeCastTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
expr -> maker().TypeCast((JCTree) node.getType(), expr));
}
@Override
public Choice<State<JCInstanceOf>> visitInstanceOf(InstanceOfTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
expr -> maker().TypeTest(expr, (JCTree) node.getType()));
}
@Override
public Choice<State<JCNewClass>> visitNewClass(NewClassTree node, State<?> state) {
if (node.getEnclosingExpression() != null
|| (node.getTypeArguments() != null && !node.getTypeArguments().isEmpty())
|| node.getClassBody() != null) {
return Choice.none();
}
return chooseSubtrees(
state,
s -> unifyExpression(node.getIdentifier(), s),
s -> unifyExpressions(node.getArguments(), s),
(ident, args) -> maker().NewClass(null, null, ident, args, null));
}
@Override
public Choice<State<JCNewArray>> visitNewArray(NewArrayTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpressions(node.getDimensions(), s),
s -> unifyExpressions(node.getInitializers(), s),
(dims, inits) -> maker().NewArray((JCExpression) node.getType(), dims, inits));
}
@Override
public Choice<State<JCConditional>> visitConditionalExpression(
ConditionalExpressionTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getCondition(), s),
s -> unifyExpression(node.getTrueExpression(), s),
s -> unifyExpression(node.getFalseExpression(), s),
maker()::Conditional);
}
@Override
public Choice<State<JCAssign>> visitAssignment(AssignmentTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getVariable(), s),
s -> unifyExpression(node.getExpression(), s),
maker()::Assign)
.filter(s -> !(s.result().getVariable() instanceof PlaceholderParamIdent));
}
@Override
public Choice<State<JCAssignOp>> visitCompoundAssignment(
CompoundAssignmentTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getVariable(), s),
s -> unifyExpression(node.getExpression(), s),
(variable, expr) -> maker().Assignop(((JCAssignOp) node).getTag(), variable, expr))
.filter(assignOp -> !(assignOp.result().getVariable() instanceof PlaceholderParamIdent));
}
@Override
public Choice<State<JCExpressionStatement>> visitExpressionStatement(
ExpressionStatementTree node, State<?> state) {
return chooseSubtrees(state, s -> unifyExpression(node.getExpression(), s), maker()::Exec);
}
@Override
public Choice<State<JCBlock>> visitBlock(BlockTree node, State<?> state) {
return chooseSubtrees(
state, s -> unifyStatements(node.getStatements(), s), stmts -> maker().Block(0, stmts));
}
@Override
public Choice<State<JCThrow>> visitThrow(ThrowTree node, State<?> state) {
return chooseSubtrees(state, s -> unifyExpression(node.getExpression(), s), maker()::Throw);
}
@Override
public Choice<State<JCEnhancedForLoop>> visitEnhancedForLoop(
EnhancedForLoopTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
s -> unifyStatement(node.getStatement(), s),
(expr, stmt) -> maker().ForeachLoop((JCVariableDecl) node.getVariable(), expr, stmt));
}
@Override
public Choice<State<JCIf>> visitIf(IfTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getCondition(), s),
s -> unifyStatement(node.getThenStatement(), s),
s -> unifyStatement(node.getElseStatement(), s),
maker()::If);
}
@Override
public Choice<State<JCDoWhileLoop>> visitDoWhileLoop(DoWhileLoopTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyStatement(node.getStatement(), s),
s -> unifyExpression(node.getCondition(), s),
maker()::DoLoop);
}
@Override
public Choice<State<JCForLoop>> visitForLoop(ForLoopTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyStatements(node.getInitializer(), s),
s -> unifyExpression(node.getCondition(), s),
s -> unifyStatements(node.getUpdate(), s),
s -> unifyStatement(node.getStatement(), s),
(inits, cond, update, stmt) ->
maker().ForLoop(inits, cond, List.convert(JCExpressionStatement.class, update), stmt));
}
@Override
public Choice<State<JCLabeledStatement>> visitLabeledStatement(
LabeledStatementTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyStatement(node.getStatement(), s),
stmt -> maker().Labelled((Name) node.getLabel(), stmt));
}
@Override
public Choice<State<JCVariableDecl>> visitVariable(VariableTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getInitializer(), s),
init ->
maker()
.VarDef(
(JCModifiers) node.getModifiers(),
(Name) node.getName(),
(JCExpression) node.getType(),
init));
}
@Override
public Choice<State<JCWhileLoop>> visitWhileLoop(WhileLoopTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getCondition(), s),
s -> unifyStatement(node.getStatement(), s),
maker()::WhileLoop);
}
@Override
public Choice<State<JCSynchronized>> visitSynchronized(SynchronizedTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
s -> unifyStatement(node.getBlock(), s),
(expr, block) -> maker().Synchronized(expr, (JCBlock) block));
}
@Override
public Choice<State<JCReturn>> visitReturn(ReturnTree node, State<?> state) {
return chooseSubtrees(state, s -> unifyExpression(node.getExpression(), s), maker()::Return);
}
@Override
public Choice<State<JCTry>> visitTry(TryTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unify(node.getResources(), s),
s -> unifyStatement(node.getBlock(), s),
s -> unify(node.getCatches(), s),
s -> unifyStatement(node.getFinallyBlock(), s),
(resources, block, catches, finallyBlock) ->
maker()
.Try(
resources,
(JCBlock) block,
List.convert(JCCatch.class, catches),
(JCBlock) finallyBlock));
}
@Override
public Choice<State<JCCatch>> visitCatch(CatchTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyStatement(node.getBlock(), s),
block -> maker().Catch((JCVariableDecl) node.getParameter(), (JCBlock) block));
}
@Override
public Choice<State<JCSwitch>> visitSwitch(SwitchTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getExpression(), s),
s -> unify(node.getCases(), s),
(expr, cases) -> maker().Switch(expr, List.convert(JCCase.class, cases)));
}
@Override
public Choice<State<JCCase>> visitCase(CaseTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unify(node.getLabels(), s),
s -> unifyExpression(node.getGuard(), s),
s -> unifyStatements(node.getStatements(), s),
s -> unify(node.getBody(), s),
(labels, guard, stmts, body) ->
maker()
.Case(
node.getCaseKind(),
List.convert(JCCaseLabel.class, labels),
guard,
stmts,
body));
}
@Override
public Choice<State<JCLambda>> visitLambdaExpression(LambdaExpressionTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unify(node.getBody(), s),
body ->
maker()
.Lambda(
List.convert(
JCVariableDecl.class, (List<? extends VariableTree>) node.getParameters()),
body));
}
@Override
public Choice<State<JCMemberReference>> visitMemberReference(
MemberReferenceTree node, State<?> state) {
return chooseSubtrees(
state,
s -> unifyExpression(node.getQualifierExpression(), s),
expr ->
maker()
.Reference(
node.getMode(),
(Name) node.getName(),
expr,
List.convert(
JCExpression.class,
(List<? extends ExpressionTree>) node.getTypeArguments())));
}
}
| QuadFunction |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/OracleNumberLiteralTest.java | {
"start": 882,
"end": 1886
} | class ____ extends TestCase {
public void test_number_literal() throws Exception {
String sql = "SELECT 7, +255, 0.5, +6.34,25e-03, +6.34F, 0.5d, -1D FROM DUAL";
OracleStatementParser parser = new OracleStatementParser(sql);
SQLSelectStatement stmt = (SQLSelectStatement) parser.parseStatementList().get(0);
String text = TestUtils.outputOracle(stmt);
assertEquals("SELECT 7, 255, 0.5, 6.34, 25e-03\n\t, 6.34F, 0.5D, -1.0D\nFROM DUAL", text);
System.out.println(text);
}
public void test_number_literal_2() throws Exception {
String sql = "SELECT BINARY_FLOAT_INFINITY FROM DUAL";
OracleStatementParser parser = new OracleStatementParser(sql);
SQLSelectStatement stmt = (SQLSelectStatement) parser.parseStatementList().get(0);
String text = TestUtils.outputOracle(stmt);
assertEquals("SELECT BINARY_FLOAT_INFINITY\nFROM DUAL", text);
System.out.println(text);
}
}
| OracleNumberLiteralTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/aop/AopAutoConfigurationTests.java | {
"start": 4834,
"end": 5017
} | class ____ {
private boolean called;
boolean isCalled() {
return this.called;
}
@Before("execution(* foo(..))")
void before() {
this.called = true;
}
}
| TestAspect |
java | mockito__mockito | mockito-integration-tests/inline-mocks-tests/src/test/java/org/mockitoinline/SpyWithConstructorTest.java | {
"start": 295,
"end": 755
} | class ____ {
private SomethingAbstract somethingAbstract;
@Before
public void setUp() {
somethingAbstract =
mock(
SomethingAbstract.class,
withSettings().useConstructor("foo").defaultAnswer(CALLS_REAL_METHODS));
}
@Test
public void shouldUseConstructor() {
assertEquals("foo", somethingAbstract.getValue());
}
abstract static | SpyWithConstructorTest |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java | {
"start": 1055,
"end": 1426
} | class ____ extends ScriptTestCase {
public static int classMul(int i, int j) {
return i * j;
}
public static int compileTimeBlowUp(int i, int j) {
throw new RuntimeException("Boom");
}
public static List<Object> fancyConstant(String thing1, String thing2) {
return List.of(thing1, thing2);
}
public static | BindingsTests |
java | quarkusio__quarkus | extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/vertx/HttpInstrumenterVertxTracer.java | {
"start": 14480,
"end": 14945
} | class ____ implements TextMapGetter<HttpRequest> {
@Override
public Iterable<String> keys(final HttpRequest carrier) {
return carrier.headers().names();
}
@Override
public String get(final HttpRequest carrier, final String key) {
if (carrier == null) {
return null;
}
return carrier.headers().get(key);
}
}
private static | HttpRequestTextMapGetter |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/BeforeAndAfterTestExecutionCallbackTests.java | {
"start": 13943,
"end": 14322
} | class ____ implements BeforeTestExecutionCallback, AfterTestExecutionCallback {
@Override
public void beforeTestExecution(ExtensionContext context) {
callSequence.add("barBeforeTestExecutionCallback");
}
@Override
public void afterTestExecution(ExtensionContext context) {
callSequence.add("barAfterTestExecutionCallback");
}
}
static | BarTestExecutionCallbacks |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ComparableTypeTest.java | {
"start": 2574,
"end": 2660
} | class ____ extends A {}
// BUG: Diagnostic contains: [ComparableType]
public static | B |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/stream/LangCollectorsTest.java | {
"start": 1240,
"end": 11799
} | class ____ {
int value;
private Fixture(final int value) {
this.value = value;
}
@Override
public String toString() {
return Integer.toString(value);
}
}
private static final Long _1L = Long.valueOf(1);
private static final Long _2L = Long.valueOf(2);
private static final Long _3L = Long.valueOf(3);
private static final Function<Object, String> TO_STRING = Objects::toString;
private static final Collector<Object, ?, String> JOINING_0 = LangCollectors.joining();
private static final Collector<Object, ?, String> JOINING_1 = LangCollectors.joining("-");
private static final Collector<Object, ?, String> JOINING_3 = LangCollectors.joining("-", "<", ">");
private static final Collector<Object, ?, String> JOINING_4 = LangCollectors.joining("-", "<", ">", TO_STRING);
private static final Collector<Object, ?, String> JOINING_4_NUL = LangCollectors.joining("-", "<", ">", o -> Objects.toString(o, "NUL"));
private String join0(final Object... objects) {
return LangCollectors.collect(JOINING_0, objects);
}
private String join1(final Object... objects) {
return LangCollectors.collect(JOINING_1, objects);
}
private String join3(final Object... objects) {
return LangCollectors.collect(JOINING_3, objects);
}
private String join4(final Object... objects) {
return LangCollectors.collect(JOINING_4, objects);
}
private String join4NullToString(final Object... objects) {
return LangCollectors.collect(JOINING_4_NUL, objects);
}
@Test
void testCollectStrings1Arg() {
assertEquals("", join1());
assertEquals("1", join1("1"));
assertEquals("1-2", join1("1", "2"));
assertEquals("1-2-3", join1("1", "2", "3"));
assertEquals("1-null-3", join1("1", null, "3"));
}
@Test
void testJoinCollectNonStrings0Arg() {
assertEquals("", join0());
assertEquals("1", join0(_1L));
assertEquals("12", join0(_1L, _2L));
assertEquals("123", join0(_1L, _2L, _3L));
assertEquals("1null3", join0(_1L, null, _3L));
assertEquals("12", join0(new AtomicLong(1), new AtomicLong(2)));
assertEquals("12", join0(new Fixture(1), new Fixture(2)));
}
@Test
void testJoinCollectNonStrings1Arg() {
assertEquals("", join1());
assertEquals("1", join1(_1L));
assertEquals("1-2", join1(_1L, _2L));
assertEquals("1-2-3", join1(_1L, _2L, _3L));
assertEquals("1-null-3", join1(_1L, null, _3L));
assertEquals("1-2", join1(new AtomicLong(1), new AtomicLong(2)));
assertEquals("1-2", join1(new Fixture(1), new Fixture(2)));
}
@Test
void testJoinCollectNonStrings3Args() {
assertEquals("<>", join3());
assertEquals("<1>", join3(_1L));
assertEquals("<1-2>", join3(_1L, _2L));
assertEquals("<1-2-3>", join3(_1L, _2L, _3L));
assertEquals("<1-null-3>", join3(_1L, null, _3L));
assertEquals("<1-2>", join3(new AtomicLong(1), new AtomicLong(2)));
assertEquals("<1-2>", join3(new Fixture(1), new Fixture(2)));
}
@Test
void testJoinCollectNonStrings4Args() {
assertEquals("<>", join4());
assertEquals("<1>", join4(_1L));
assertEquals("<1-2>", join4(_1L, _2L));
assertEquals("<1-2-3>", join4(_1L, _2L, _3L));
assertEquals("<1-null-3>", join4(_1L, null, _3L));
assertEquals("<1-NUL-3>", join4NullToString(_1L, null, _3L));
assertEquals("<1-2>", join4(new AtomicLong(1), new AtomicLong(2)));
assertEquals("<1-2>", join4(new Fixture(1), new Fixture(2)));
}
@Test
void testJoinCollectNullArgs() {
assertEquals("", join0((Object[]) null));
assertEquals("", join1((Object[]) null));
assertEquals("<>", join3((Object[]) null));
assertEquals("<>", join4NullToString((Object[]) null));
}
@Test
void testJoinCollectStrings0Arg() {
assertEquals("", join0());
assertEquals("1", join0("1"));
assertEquals("12", join0("1", "2"));
assertEquals("123", join0("1", "2", "3"));
assertEquals("1null3", join0("1", null, "3"));
}
@Test
void testJoinCollectStrings3Args() {
assertEquals("<>", join3());
assertEquals("<1>", join3("1"));
assertEquals("<1-2>", join3("1", "2"));
assertEquals("<1-2-3>", join3("1", "2", "3"));
assertEquals("<1-null-3>", join3("1", null, "3"));
}
@Test
void testJoinCollectStrings4Args() {
assertEquals("<>", join4());
assertEquals("<1>", join4("1"));
assertEquals("<1-2>", join4("1", "2"));
assertEquals("<1-2-3>", join4("1", "2", "3"));
assertEquals("<1-null-3>", join4("1", null, "3"));
assertEquals("<1-NUL-3>", join4NullToString("1", null, "3"));
}
@Test
void testJoiningNonStrings0Arg() {
// Stream.of()
assertEquals("", Stream.of().collect(JOINING_0));
assertEquals("1", Stream.of(_1L).collect(JOINING_0));
assertEquals("12", Stream.of(_1L, _2L).collect(JOINING_0));
assertEquals("123", Stream.of(_1L, _2L, _3L).collect(JOINING_0));
assertEquals("1null3", Stream.of(_1L, null, _3L).collect(JOINING_0));
assertEquals("12", Stream.of(new AtomicLong(1), new AtomicLong(2)).collect(JOINING_0));
assertEquals("12", Stream.of(new Fixture(1), new Fixture(2)).collect(JOINING_0));
// Arrays.stream()
assertEquals("", Arrays.stream(new Object[] {}).collect(JOINING_0));
assertEquals("1", Arrays.stream(new Long[] { _1L }).collect(JOINING_0));
assertEquals("12", Arrays.stream(new Long[] { _1L, _2L }).collect(JOINING_0));
assertEquals("123", Arrays.stream(new Long[] { _1L, _2L, _3L }).collect(JOINING_0));
assertEquals("1null3", Arrays.stream(new Long[] { _1L, null, _3L }).collect(JOINING_0));
assertEquals("12", Arrays.stream(new AtomicLong[] { new AtomicLong(1), new AtomicLong(2) }).collect(JOINING_0));
assertEquals("12", Arrays.stream(new Fixture[] { new Fixture(1), new Fixture(2) }).collect(JOINING_0));
}
@Test
void testJoiningNonStrings1Arg() {
// Stream.of()
assertEquals("", Stream.of().collect(JOINING_1));
assertEquals("1", Stream.of(_1L).collect(JOINING_1));
assertEquals("1-2", Stream.of(_1L, _2L).collect(JOINING_1));
assertEquals("1-2-3", Stream.of(_1L, _2L, _3L).collect(JOINING_1));
assertEquals("1-null-3", Stream.of(_1L, null, _3L).collect(JOINING_1));
assertEquals("1-2", Stream.of(new AtomicLong(1), new AtomicLong(2)).collect(JOINING_1));
assertEquals("1-2", Stream.of(new Fixture(1), new Fixture(2)).collect(JOINING_1));
// Arrays.stream()
assertEquals("", Arrays.stream(new Object[] {}).collect(JOINING_1));
assertEquals("1", Arrays.stream(new Long[] { _1L }).collect(JOINING_1));
assertEquals("1-2", Arrays.stream(new Long[] { _1L, _2L }).collect(JOINING_1));
assertEquals("1-2-3", Arrays.stream(new Long[] { _1L, _2L, _3L }).collect(JOINING_1));
assertEquals("1-null-3", Arrays.stream(new Long[] { _1L, null, _3L }).collect(JOINING_1));
assertEquals("1-2", Arrays.stream(new AtomicLong[] { new AtomicLong(1), new AtomicLong(2) }).collect(JOINING_1));
assertEquals("1-2", Arrays.stream(new Fixture[] { new Fixture(1), new Fixture(2) }).collect(JOINING_1));
}
@Test
void testJoiningNonStrings3Args() {
assertEquals("<>", Stream.of().collect(JOINING_3));
assertEquals("<1>", Stream.of(_1L).collect(JOINING_3));
assertEquals("<1-2>", Stream.of(_1L, _2L).collect(JOINING_3));
assertEquals("<1-2-3>", Stream.of(_1L, _2L, _3L).collect(JOINING_3));
assertEquals("<1-null-3>", Stream.of(_1L, null, _3L).collect(JOINING_3));
assertEquals("<1-2>", Stream.of(new AtomicLong(1), new AtomicLong(2)).collect(JOINING_3));
assertEquals("<1-2>", Stream.of(new Fixture(1), new Fixture(2)).collect(JOINING_3));
}
@Test
void testJoiningNonStrings4Args() {
assertEquals("<>", Stream.of().collect(JOINING_4));
assertEquals("<1>", Stream.of(_1L).collect(JOINING_4));
assertEquals("<1-2>", Stream.of(_1L, _2L).collect(JOINING_4));
assertEquals("<1-2-3>", Stream.of(_1L, _2L, _3L).collect(JOINING_4));
assertEquals("<1-null-3>", Stream.of(_1L, null, _3L).collect(JOINING_4));
assertEquals("<1-NUL-3>", Stream.of(_1L, null, _3L).collect(JOINING_4_NUL));
assertEquals("<1-2>", Stream.of(new AtomicLong(1), new AtomicLong(2)).collect(JOINING_4));
assertEquals("<1-2>", Stream.of(new Fixture(1), new Fixture(2)).collect(JOINING_4));
}
@Test
void testJoiningStrings0Arg() {
assertEquals("", Stream.of().collect(JOINING_0));
assertEquals("1", Stream.of("1").collect(JOINING_0));
assertEquals("12", Stream.of("1", "2").collect(JOINING_0));
assertEquals("123", Stream.of("1", "2", "3").collect(JOINING_0));
assertEquals("1null3", Stream.of("1", null, "3").collect(JOINING_0));
}
@Test
void testJoiningStrings1Arg() {
assertEquals("", Stream.of().collect(JOINING_1));
assertEquals("1", Stream.of("1").collect(JOINING_1));
assertEquals("1-2", Stream.of("1", "2").collect(JOINING_1));
assertEquals("1-2-3", Stream.of("1", "2", "3").collect(JOINING_1));
assertEquals("1-null-3", Stream.of("1", null, "3").collect(JOINING_1));
}
@Test
void testJoiningStrings3Args() {
assertEquals("<>", Stream.of().collect(JOINING_3));
assertEquals("<1>", Stream.of("1").collect(JOINING_3));
assertEquals("<1-2>", Stream.of("1", "2").collect(JOINING_3));
assertEquals("<1-2-3>", Stream.of("1", "2", "3").collect(JOINING_3));
assertEquals("<1-null-3>", Stream.of("1", null, "3").collect(JOINING_3));
}
@Test
void testJoiningStrings4Args() {
assertEquals("<>", Stream.of().collect(JOINING_4));
assertEquals("<1>", Stream.of("1").collect(JOINING_4));
assertEquals("<1-2>", Stream.of("1", "2").collect(JOINING_4));
assertEquals("<1-2-3>", Stream.of("1", "2", "3").collect(JOINING_4));
assertEquals("<1-null-3>", Stream.of("1", null, "3").collect(JOINING_4));
assertEquals("<1-NUL-3>", Stream.of("1", null, "3").collect(JOINING_4_NUL));
}
}
| Fixture |
java | elastic__elasticsearch | x-pack/plugin/fleet/src/test/java/org/elasticsearch/xpack/fleet/action/GetSecretRequestTests.java | {
"start": 425,
"end": 962
} | class ____ extends AbstractWireSerializingTestCase<GetSecretRequest> {
@Override
protected Writeable.Reader<GetSecretRequest> instanceReader() {
return GetSecretRequest::new;
}
@Override
protected GetSecretRequest createTestInstance() {
return new GetSecretRequest(randomAlphaOfLengthBetween(2, 10));
}
@Override
protected GetSecretRequest mutateInstance(GetSecretRequest instance) {
return new GetSecretRequest(instance.id() + randomAlphaOfLength(1));
}
}
| GetSecretRequestTests |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/FileSystemStatisticNames.java | {
"start": 1128,
"end": 1479
} | class ____ {
private FileSystemStatisticNames() {
}
/**
* How long did filesystem initialization take?
*/
public static final String FILESYSTEM_INITIALIZATION = "filesystem_initialization";
/**
* How long did filesystem close take?
*/
public static final String FILESYSTEM_CLOSE = "filesystem_close";
}
| FileSystemStatisticNames |
java | google__error-prone | test_helpers/src/test/java/com/google/errorprone/CompilationTestHelperTest.java | {
"start": 17075,
"end": 17322
} | class ____ extends BugChecker
implements CompilationUnitTreeMatcher {
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
throw new AssertionError();
}
}
}
| AssertionFailingChecker |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/output/ReplayOutput.java | {
"start": 484,
"end": 1983
} | class ____<K, V> extends CommandOutput<K, V, List<ReplayOutput.Signal>> {
/**
* Initialize a new instance that encodes and decodes keys and values using the supplied codec.
*/
public ReplayOutput() {
super((RedisCodec) StringCodec.ASCII, new ArrayList<>());
}
@Override
public void set(ByteBuffer bytes) {
output.add(new BulkString(bytes));
}
@Override
public void set(long integer) {
output.add(new Integer(integer));
}
@Override
public void set(double number) {
output.add(new Double(number));
}
@Override
public void setError(ByteBuffer error) {
error.mark();
output.add(new ErrorBytes(error));
error.reset();
super.setError(error);
}
@Override
public void setError(String error) {
output.add(new ErrorString(error));
super.setError(error);
}
@Override
public void complete(int depth) {
output.add(new Complete(depth));
}
@Override
public void multi(int count) {
output.add(new Multi(count));
}
/**
* Replay all captured signals on a {@link CommandOutput}.
*
* @param target the target {@link CommandOutput}.
*/
public void replay(CommandOutput<?, ?, ?> target) {
for (Signal signal : output) {
signal.replay(target);
}
}
/**
* Encapsulates a replayable decoding signal.
*/
public static abstract | ReplayOutput |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/reflect/MethodUtilsTest.java | {
"start": 4488,
"end": 4666
} | class ____ extends AbstractGetMatchingMethod {
@Override
public void testMethod5(final Exception exception) {
}
}
public static | GetMatchingMethodImpl |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ExponentialHistogramBlock.java | {
"start": 4799,
"end": 4941
} | interface ____ {
double readDouble();
long readLong();
BytesRef readBytesRef(BytesRef scratch);
}
}
| SerializedInput |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/lib/ZKClient.java | {
"start": 1286,
"end": 3969
} | class ____ {
private ZooKeeper zkClient;
/**
* the zookeeper client library to
* talk to zookeeper
* @param string the host
* @throws IOException if there are I/O errors.
*/
public ZKClient(String string) throws IOException {
zkClient = new ZooKeeper(string, 30000, new ZKWatcher());
}
/**
* register the service to a specific path.
*
* @param path the path in zookeeper namespace to register to
* @param data the data that is part of this registration
* @throws IOException if there are I/O errors.
* @throws InterruptedException if any thread has interrupted.
*/
public void registerService(String path, String data) throws
IOException, InterruptedException {
try {
zkClient.create(path, data.getBytes(StandardCharsets.UTF_8),
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL);
} catch(KeeperException ke) {
throw new IOException(ke);
}
}
/**
* unregister the service.
*
* @param path the path at which the service was registered
* @throws IOException if there are I/O errors.
* @throws InterruptedException if any thread has interrupted.
*/
public void unregisterService(String path) throws IOException,
InterruptedException {
try {
zkClient.delete(path, -1);
} catch(KeeperException ke) {
throw new IOException(ke);
}
}
/**
* list the services registered under a path.
*
* @param path the path under which services are
* registered
* @return the list of names of services registered
* @throws IOException if there are I/O errors.
* @throws InterruptedException if any thread has interrupted.
*/
public List<String> listServices(String path) throws IOException,
InterruptedException {
List<String> children = null;
try {
children = zkClient.getChildren(path, false);
} catch(KeeperException ke) {
throw new IOException(ke);
}
return children;
}
/**
* get data published by the service at the registration address.
*
* @param path the path where the service is registered
* @return the data of the registered service
* @throws IOException if there are I/O errors.
* @throws InterruptedException if any thread has interrupted.
*/
public String getServiceData(String path) throws IOException,
InterruptedException {
String data;
try {
Stat stat = new Stat();
byte[] byteData = zkClient.getData(path, false, stat);
data = new String(byteData, StandardCharsets.UTF_8);
} catch(KeeperException ke) {
throw new IOException(ke);
}
return data;
}
/**
* a watcher | ZKClient |
java | quarkusio__quarkus | extensions/flyway/deployment/src/test/java/io/quarkus/flyway/test/FlywayExtensionBaselineOnMigrateTest.java | {
"start": 387,
"end": 1446
} | class ____ {
@Inject
Flyway flyway;
static final FlywayH2TestCustomizer customizer = FlywayH2TestCustomizer
.withDbName("quarkus-flyway-baseline-on-migrate")
.withPort(11301)
.withInitSqlFile("src/test/resources/h2-init-data.sql");
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.setBeforeAllCustomizer(customizer::startH2)
.setAfterAllCustomizer(customizer::stopH2)
.withApplicationRoot((jar) -> jar
.addClass(FlywayH2TestCustomizer.class)
.addAsResource("baseline-on-migrate.properties", "application.properties"));
@Test
@DisplayName("Create history table correctly")
public void testFlywayInitialBaselineInfo() {
MigrationInfo baselineInfo = flyway.info().applied()[0];
assertEquals("0.0.1", baselineInfo.getVersion().getVersion());
assertEquals("Initial description for test", baselineInfo.getDescription());
}
}
| FlywayExtensionBaselineOnMigrateTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetQueueInfoResponse.java | {
"start": 1693,
"end": 2248
} | class ____ {
@Private
@Unstable
public static GetQueueInfoResponse newInstance(QueueInfo queueInfo) {
GetQueueInfoResponse response = Records.newRecord(GetQueueInfoResponse.class);
response.setQueueInfo(queueInfo);
return response;
}
/**
* Get the <code>QueueInfo</code> for the specified queue.
* @return <code>QueueInfo</code> for the specified queue
*/
@Public
@Stable
public abstract QueueInfo getQueueInfo();
@Private
@Unstable
public abstract void setQueueInfo(QueueInfo queueInfo);
}
| GetQueueInfoResponse |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/DispatcherServletPathTests.java | {
"start": 856,
"end": 2617
} | class ____ {
@Test
void getRelativePathReturnsRelativePath() {
assertThat(((DispatcherServletPath) () -> "spring").getRelativePath("boot")).isEqualTo("spring/boot");
assertThat(((DispatcherServletPath) () -> "spring/").getRelativePath("boot")).isEqualTo("spring/boot");
assertThat(((DispatcherServletPath) () -> "spring").getRelativePath("/boot")).isEqualTo("spring/boot");
}
@Test
void getPrefixWhenHasSimplePathReturnPath() {
assertThat(((DispatcherServletPath) () -> "spring").getPrefix()).isEqualTo("spring");
}
@Test
void getPrefixWhenHasPatternRemovesPattern() {
assertThat(((DispatcherServletPath) () -> "spring/*.do").getPrefix()).isEqualTo("spring");
}
@Test
void getPathWhenPathEndsWithSlashRemovesSlash() {
assertThat(((DispatcherServletPath) () -> "spring/").getPrefix()).isEqualTo("spring");
}
@Test
void getServletUrlMappingWhenPathIsEmptyReturnsSlash() {
assertThat(((DispatcherServletPath) () -> "").getServletUrlMapping()).isEqualTo("/");
}
@Test
void getServletUrlMappingWhenPathIsSlashReturnsSlash() {
assertThat(((DispatcherServletPath) () -> "/").getServletUrlMapping()).isEqualTo("/");
}
@Test
void getServletUrlMappingWhenPathContainsStarReturnsPath() {
assertThat(((DispatcherServletPath) () -> "spring/*.do").getServletUrlMapping()).isEqualTo("spring/*.do");
}
@Test
void getServletUrlMappingWhenHasPathNotEndingSlashReturnsSlashStarPattern() {
assertThat(((DispatcherServletPath) () -> "spring/boot").getServletUrlMapping()).isEqualTo("spring/boot/*");
}
@Test
void getServletUrlMappingWhenHasPathEndingWithSlashReturnsSlashStarPattern() {
assertThat(((DispatcherServletPath) () -> "spring/boot/").getServletUrlMapping()).isEqualTo("spring/boot/*");
}
}
| DispatcherServletPathTests |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java | {
"start": 654,
"end": 2291
} | class ____ extends AStatement {
private final ANode initializerNode;
private final AExpression conditionNode;
private final AExpression afterthoughtNode;
private final SBlock blockNode;
public SFor(
int identifier,
Location location,
ANode initializerNode,
AExpression conditionNode,
AExpression afterthoughtNode,
SBlock blockNode
) {
super(identifier, location);
this.initializerNode = initializerNode;
this.conditionNode = conditionNode;
this.afterthoughtNode = afterthoughtNode;
this.blockNode = blockNode;
}
public ANode getInitializerNode() {
return initializerNode;
}
public AExpression getConditionNode() {
return conditionNode;
}
public AExpression getAfterthoughtNode() {
return afterthoughtNode;
}
public SBlock getBlockNode() {
return blockNode;
}
@Override
public <Scope> void visit(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
userTreeVisitor.visitFor(this, scope);
}
@Override
public <Scope> void visitChildren(UserTreeVisitor<Scope> userTreeVisitor, Scope scope) {
if (initializerNode != null) {
initializerNode.visit(userTreeVisitor, scope);
}
if (conditionNode != null) {
conditionNode.visit(userTreeVisitor, scope);
}
if (afterthoughtNode != null) {
afterthoughtNode.visit(userTreeVisitor, scope);
}
if (blockNode != null) {
blockNode.visit(userTreeVisitor, scope);
}
}
}
| SFor |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/similarity/NonNegativeScoresSimilarityTests.java | {
"start": 869,
"end": 1983
} | class ____ extends ESTestCase {
public void testBasics() {
Similarity negativeScoresSim = new Similarity() {
@Override
public long computeNorm(FieldInvertState state) {
return state.getLength();
}
@Override
public SimScorer scorer(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
return new SimScorer() {
@Override
public float score(float freq, long norm) {
return freq - 5;
}
};
}
};
Similarity assertingSimilarity = new NonNegativeScoresSimilarity(negativeScoresSim);
SimScorer scorer = assertingSimilarity.scorer(1f, null);
assertEquals(2f, scorer.score(7f, 1L), 0f);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> scorer.score(2f, 1L));
assertThat(e.getMessage(), Matchers.containsString("Similarities must not produce negative scores"));
}
}
| NonNegativeScoresSimilarityTests |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LocalSourceOperator.java | {
"start": 1278,
"end": 1343
} | interface ____ extends Supplier<Block[]> {}
public | BlockSupplier |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-dubbo/src/test/java/org/apache/dubbo/rpc/protocol/dubbo/support/ProtocolUtils.java | {
"start": 1204,
"end": 2923
} | class ____ {
public static <T> T refer(Class<T> type, String url) {
return refer(type, URL.valueOf(url));
}
public static <T> T refer(Class<T> type, URL url) {
FrameworkModel frameworkModel = url.getOrDefaultFrameworkModel();
ProxyFactory proxy =
frameworkModel.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
Protocol protocol = frameworkModel.getExtensionLoader(Protocol.class).getAdaptiveExtension();
return proxy.getProxy(protocol.refer(type, url));
}
public static Invoker<?> referInvoker(Class<?> type, URL url) {
FrameworkModel frameworkModel = url.getOrDefaultFrameworkModel();
ProxyFactory proxy =
frameworkModel.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
Protocol protocol = frameworkModel.getExtensionLoader(Protocol.class).getAdaptiveExtension();
return (Invoker<?>) protocol.refer(type, url);
}
public static <T> Exporter<T> export(T instance, Class<T> type, String url) {
return export(instance, type, URL.valueOf(url));
}
public static <T> Exporter<T> export(T instance, Class<T> type, URL url) {
FrameworkModel frameworkModel = url.getOrDefaultFrameworkModel();
ProxyFactory proxy =
frameworkModel.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
Protocol protocol = frameworkModel.getExtensionLoader(Protocol.class).getAdaptiveExtension();
return protocol.export(proxy.getInvoker(instance, type, url));
}
public static void closeAll() {
DubboProtocol.getDubboProtocol().destroy();
FrameworkModel.destroyAll();
}
}
| ProtocolUtils |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/server/csrf/ServerCsrfTokenRequestResolver.java | {
"start": 807,
"end": 1091
} | interface ____ capable of resolving the token value of a
* {@link CsrfToken} from the provided {@code ServerWebExchange}. Used by the
* {@link CsrfWebFilter}.
*
* @author Steve Riesenberg
* @since 5.8
* @see ServerCsrfTokenRequestAttributeHandler
*/
@FunctionalInterface
public | are |
java | apache__camel | components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/jaxws/CxfProducerSoapFaultTest.java | {
"start": 1721,
"end": 4310
} | class ____ {
private static final String JAXWS_SERVER_ADDRESS
= "http://localhost:" + CXFTestSupport.getPort1() + "/CxfProducerSoapFaultTest/test";
private static final String JAXWS_ENDPOINT_URI
= "cxf://" + JAXWS_SERVER_ADDRESS + "?serviceClass=org.apache.hello_world_soap_http.Greeter";
protected CamelContext camelContext;
protected ProducerTemplate template;
@BeforeAll
public static void startService() throws Exception {
GreeterImpl greeterImpl = new GreeterImpl();
Endpoint.publish(JAXWS_SERVER_ADDRESS, greeterImpl);
}
@BeforeEach
public void setUp() throws Exception {
camelContext = new DefaultCamelContext();
camelContext.start();
template = camelContext.createProducerTemplate();
}
@AfterEach
public void tearDown() throws Exception {
template.stop();
camelContext.stop();
}
@Test
public void testAsyncSoapFault() throws Exception {
invokeSoapFault(false);
}
@Test
public void testSyncSoapFault() throws Exception {
invokeSoapFault(true);
}
private void invokeSoapFault(boolean sync) throws Exception {
String cxfEndpointURI = JAXWS_ENDPOINT_URI;
if (sync) {
cxfEndpointURI = cxfEndpointURI + "&synchronous=true";
}
Exchange exchange = sendJaxWsMessage(cxfEndpointURI, "BadRecordLitFault", "testDocLitFault");
Exception exception = exchange.getException();
// assert we got the exception first
assertNotNull(exception, "except to get the exception");
assertTrue(exception instanceof BadRecordLitFault, "Get a wrong soap fault");
// check out the message header which is copied from in message
String fileName = exchange.getMessage().getHeader(Exchange.FILE_NAME, String.class);
assertEquals("testFile", fileName, "Should get the file name from out message header");
}
private Exchange sendJaxWsMessage(final String uri, final String message, final String operation) {
Exchange exchange = template.request(uri, new Processor() {
public void process(final Exchange exchange) {
final List<String> params = new ArrayList<>();
params.add(message);
exchange.getIn().setBody(params);
exchange.getIn().setHeader(CxfConstants.OPERATION_NAME, operation);
exchange.getIn().setHeader(Exchange.FILE_NAME, "testFile");
}
});
return exchange;
}
}
| CxfProducerSoapFaultTest |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 32166,
"end": 32553
} | interface ____ {",
" B b();",
"}");
Source bComponent =
CompilerTests.javaSource(
"test.B",
"package test;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"import dagger.Subcomponent;",
"",
"@Subcomponent(modules = B.BModule.class)",
" | A |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/FileSystemTableSink.java | {
"start": 5154,
"end": 18684
} | class ____ extends AbstractFileSystemTable
implements DynamicTableSink, SupportsPartitioning, SupportsOverwrite {
// For compaction reading
@Nullable private final DecodingFormat<BulkFormat<RowData, FileSourceSplit>> bulkReaderFormat;
@Nullable private final DecodingFormat<DeserializationSchema<RowData>> deserializationFormat;
// For Writing
@Nullable private final EncodingFormat<BulkWriter.Factory<RowData>> bulkWriterFormat;
@Nullable private final EncodingFormat<SerializationSchema<RowData>> serializationFormat;
private boolean overwrite = false;
private boolean dynamicGrouping = false;
private LinkedHashMap<String, String> staticPartitions = new LinkedHashMap<>();
@Nullable private Integer configuredParallelism;
FileSystemTableSink(
ObjectIdentifier tableIdentifier,
DataType physicalRowDataType,
List<String> partitionKeys,
ReadableConfig tableOptions,
@Nullable DecodingFormat<BulkFormat<RowData, FileSourceSplit>> bulkReaderFormat,
@Nullable DecodingFormat<DeserializationSchema<RowData>> deserializationFormat,
@Nullable EncodingFormat<BulkWriter.Factory<RowData>> bulkWriterFormat,
@Nullable EncodingFormat<SerializationSchema<RowData>> serializationFormat) {
super(tableIdentifier, physicalRowDataType, partitionKeys, tableOptions);
this.bulkReaderFormat = bulkReaderFormat;
this.deserializationFormat = deserializationFormat;
if (Stream.of(bulkWriterFormat, serializationFormat).allMatch(Objects::isNull)) {
String identifier = tableOptions.get(FactoryUtil.FORMAT);
throw new ValidationException(
String.format(
"Could not find any format factory for identifier '%s' in the classpath.",
identifier));
}
this.bulkWriterFormat = bulkWriterFormat;
this.serializationFormat = serializationFormat;
this.configuredParallelism =
this.tableOptions.get(FileSystemConnectorOptions.SINK_PARALLELISM);
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context sinkContext) {
return new DataStreamSinkProvider() {
@Override
public DataStreamSink<?> consumeDataStream(
ProviderContext providerContext, DataStream<RowData> dataStream) {
return consume(providerContext, dataStream, sinkContext);
}
};
}
private DataStreamSink<?> consume(
ProviderContext providerContext, DataStream<RowData> dataStream, Context sinkContext) {
final int inputParallelism = dataStream.getParallelism();
final int parallelism = Optional.ofNullable(configuredParallelism).orElse(inputParallelism);
boolean parallelismConfigued = configuredParallelism != null;
if (sinkContext.isBounded()) {
return createBatchSink(dataStream, sinkContext, parallelism, parallelismConfigued);
} else {
if (overwrite) {
throw new IllegalStateException("Streaming mode not support overwrite.");
}
return createStreamingSink(
providerContext, dataStream, sinkContext, parallelism, parallelismConfigued);
}
}
private RowDataPartitionComputer partitionComputer() {
return new RowDataPartitionComputer(
defaultPartName,
DataType.getFieldNames(physicalRowDataType).toArray(new String[0]),
DataType.getFieldDataTypes(physicalRowDataType).toArray(new DataType[0]),
partitionKeys.toArray(new String[0]));
}
private DataStreamSink<RowData> createBatchSink(
DataStream<RowData> inputStream,
Context sinkContext,
final int parallelism,
boolean parallelismConfigured) {
FileSystemOutputFormat.Builder<RowData> builder = new FileSystemOutputFormat.Builder<>();
builder.setPartitionComputer(partitionComputer())
.setDynamicGrouped(dynamicGrouping)
.setPartitionColumns(partitionKeys.toArray(new String[0]))
.setFormatFactory(createOutputFormatFactory(sinkContext))
.setMetaStoreFactory(new EmptyMetaStoreFactory(path))
.setOverwrite(overwrite)
.setStaticPartitions(staticPartitions)
.setPath(path)
.setOutputFileConfig(
OutputFileConfig.builder()
.withPartPrefix("part-" + UUID.randomUUID())
.build())
.setPartitionCommitPolicyFactory(
new PartitionCommitPolicyFactory(
tableOptions.get(
FileSystemConnectorOptions
.SINK_PARTITION_COMMIT_POLICY_KIND),
tableOptions.get(
FileSystemConnectorOptions
.SINK_PARTITION_COMMIT_POLICY_CLASS),
tableOptions.get(
FileSystemConnectorOptions
.SINK_PARTITION_COMMIT_SUCCESS_FILE_NAME),
tableOptions.get(
FileSystemConnectorOptions
.SINK_PARTITION_COMMIT_POLICY_CLASS_PARAMETERS)));
DataStreamSink<RowData> sink = inputStream.writeUsingOutputFormat(builder.build());
sink.getTransformation().setParallelism(parallelism, parallelismConfigured);
return sink.name("Filesystem");
}
private DataStreamSink<?> createStreamingSink(
ProviderContext providerContext,
DataStream<RowData> dataStream,
Context sinkContext,
final int parallelism,
boolean parallelismConfigured) {
FileSystemFactory fsFactory = FileSystem::get;
RowDataPartitionComputer computer = partitionComputer();
boolean autoCompaction = tableOptions.get(AUTO_COMPACTION);
Object writer = createWriter(sinkContext);
boolean isEncoder = writer instanceof Encoder;
TableBucketAssigner assigner = new TableBucketAssigner(computer);
TableRollingPolicy rollingPolicy =
new TableRollingPolicy(
!isEncoder || autoCompaction,
tableOptions.get(SINK_ROLLING_POLICY_FILE_SIZE).getBytes(),
tableOptions.get(SINK_ROLLING_POLICY_ROLLOVER_INTERVAL).toMillis(),
tableOptions.get(SINK_ROLLING_POLICY_INACTIVITY_INTERVAL).toMillis());
String randomPrefix = "part-" + UUID.randomUUID().toString();
OutputFileConfig.OutputFileConfigBuilder fileNamingBuilder = OutputFileConfig.builder();
fileNamingBuilder =
autoCompaction
? fileNamingBuilder.withPartPrefix(convertToUncompacted(randomPrefix))
: fileNamingBuilder.withPartPrefix(randomPrefix);
OutputFileConfig fileNamingConfig = fileNamingBuilder.build();
BucketsBuilder<RowData, String, ? extends BucketsBuilder<RowData, ?, ?>> bucketsBuilder;
if (isEncoder) {
//noinspection unchecked
bucketsBuilder =
StreamingFileSink.forRowFormat(
path,
new ProjectionEncoder((Encoder<RowData>) writer, computer))
.withBucketAssigner(assigner)
.withOutputFileConfig(fileNamingConfig)
.withRollingPolicy(rollingPolicy);
} else {
//noinspection unchecked
bucketsBuilder =
StreamingFileSink.forBulkFormat(
path,
new ProjectionBulkFactory(
(BulkWriter.Factory<RowData>) writer, computer))
.withBucketAssigner(assigner)
.withOutputFileConfig(fileNamingConfig)
.withRollingPolicy(rollingPolicy);
}
long bucketCheckInterval = tableOptions.get(SINK_ROLLING_POLICY_CHECK_INTERVAL).toMillis();
DataStream<PartitionCommitInfo> writerStream;
if (autoCompaction) {
long compactionSize =
tableOptions
.getOptional(COMPACTION_FILE_SIZE)
.orElse(tableOptions.get(SINK_ROLLING_POLICY_FILE_SIZE))
.getBytes();
CompactReader.Factory<RowData> reader =
createCompactReaderFactory(sinkContext)
.orElseThrow(
() ->
new TableException(
"Please implement available reader for compaction:"
+ " BulkFormat, FileInputFormat."));
writerStream =
StreamingSink.compactionWriter(
providerContext,
dataStream,
bucketCheckInterval,
bucketsBuilder,
fsFactory,
path,
reader,
compactionSize,
parallelism,
parallelismConfigured);
} else {
writerStream =
StreamingSink.writer(
providerContext,
dataStream,
bucketCheckInterval,
bucketsBuilder,
parallelism,
partitionKeys,
tableOptions,
parallelismConfigured);
}
return StreamingSink.sink(
providerContext,
writerStream,
path,
tableIdentifier,
partitionKeys,
new EmptyMetaStoreFactory(path),
fsFactory,
tableOptions);
}
private Optional<CompactReader.Factory<RowData>> createCompactReaderFactory(Context context) {
// Compute producedDataType (including partition fields) and physicalDataType (excluding
// partition fields)
final DataType producedDataType = physicalRowDataType;
final DataType physicalDataType =
DataType.getFields(producedDataType).stream()
.filter(field -> !partitionKeys.contains(field.getName()))
.collect(Collectors.collectingAndThen(Collectors.toList(), DataTypes::ROW));
if (bulkReaderFormat != null) {
final BulkFormat<RowData, FileSourceSplit> format =
new FileInfoExtractorBulkFormat(
bulkReaderFormat.createRuntimeDecoder(
createSourceContext(context), physicalDataType),
producedDataType,
context.createTypeInformation(producedDataType),
Collections.emptyMap(),
partitionKeys,
defaultPartName);
return Optional.of(CompactBulkReader.factory(format));
} else if (deserializationFormat != null) {
final DeserializationSchema<RowData> decoder =
deserializationFormat.createRuntimeDecoder(
createSourceContext(context), physicalDataType);
final BulkFormat<RowData, FileSourceSplit> format =
new FileInfoExtractorBulkFormat(
new DeserializationSchemaAdapter(decoder),
producedDataType,
context.createTypeInformation(producedDataType),
Collections.emptyMap(),
partitionKeys,
defaultPartName);
return Optional.of(CompactBulkReader.factory(format));
}
return Optional.empty();
}
private DynamicTableSource.Context createSourceContext(Context context) {
return new DynamicTableSource.Context() {
@Override
public <T> TypeInformation<T> createTypeInformation(DataType producedDataType) {
return context.createTypeInformation(producedDataType);
}
@Override
public <T> TypeInformation<T> createTypeInformation(LogicalType producedLogicalType) {
return context.createTypeInformation(producedLogicalType);
}
@Override
public DynamicTableSource.DataStructureConverter createDataStructureConverter(
DataType producedDataType) {
// This method cannot be implemented without changing the
// DynamicTableSink.DataStructureConverter | FileSystemTableSink |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/JobMasterServiceProcess.java | {
"start": 1197,
"end": 2510
} | interface ____ extends AutoCloseableAsync {
static JobMasterServiceProcess waitingForLeadership() {
return WaitingForLeadership.INSTANCE;
}
/** The leader session id of this process. */
UUID getLeaderSessionId();
/** True iff the {@link JobMasterService} has been initialized and is running. */
boolean isInitializedAndRunning();
/**
* Future which is completed with the {@link JobMasterGateway} once the {@link JobMasterService}
* has been created. Upon closing of the process, this future is completed exceptionally if it
* is still uncompleted.
*/
CompletableFuture<JobMasterGateway> getJobMasterGatewayFuture();
/**
* Future which is completed with the result of job execution. The job's result can be the
* {@link JobManagerRunnerResult}, {@link JobNotFinishedException} if the job was not finished
* or an {@link Throwable} if an unexpected failure occurs. Upon closing of the process, this
* future is completed exceptionally with {@link JobNotFinishedException}.
*/
CompletableFuture<JobManagerRunnerResult> getResultFuture();
/** Future which is completed with the {@link JobMasterService} address once it is created. */
CompletableFuture<String> getLeaderAddressFuture();
| JobMasterServiceProcess |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/fail/Fail_failBecauseExceptionWasNotThrown_Test.java | {
"start": 904,
"end": 1555
} | class ____ {
@Test
void should_include_message_built_with_given_exception_name() {
// WHEN
var assertionError = expectAssertionError(() -> failBecauseExceptionWasNotThrown(NullPointerException.class));
// THEN
then(assertionError).hasMessage("NullPointerException should have been thrown");
}
@Test
void should_include_message_built_with_given_throwable_name() {
// WHEN
var assertionError = expectAssertionError(() -> failBecauseExceptionWasNotThrown(OutOfMemoryError.class));
// THEN
then(assertionError).hasMessage("OutOfMemoryError should have been thrown");
}
}
| Fail_failBecauseExceptionWasNotThrown_Test |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 683754,
"end": 686445
} | class ____ extends YamlDeserializerBase<OpenIdConnectDefinition> {
public OpenIdConnectDefinitionDeserializer() {
super(OpenIdConnectDefinition.class);
}
@Override
protected OpenIdConnectDefinition newInstance() {
return new OpenIdConnectDefinition();
}
@Override
protected boolean setProperty(OpenIdConnectDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "key": {
String val = asText(node);
target.setKey(val);
break;
}
case "url": {
String val = asText(node);
target.setUrl(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "optimisticLockRetryPolicy",
types = org.apache.camel.model.OptimisticLockRetryPolicyDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Optimistic Lock Retry Policy",
description = "To configure optimistic locking",
deprecated = false,
properties = {
@YamlProperty(name = "exponentialBackOff", type = "boolean", defaultValue = "true", description = "Enable exponential backoff", displayName = "Exponential Back Off"),
@YamlProperty(name = "maximumRetries", type = "number", description = "Sets the maximum number of retries", displayName = "Maximum Retries"),
@YamlProperty(name = "maximumRetryDelay", type = "string", defaultValue = "1000", description = "Sets the upper value of retry in millis between retries, when using exponential or random backoff", displayName = "Maximum Retry Delay"),
@YamlProperty(name = "randomBackOff", type = "boolean", defaultValue = "false", description = "Enables random backoff", displayName = "Random Back Off"),
@YamlProperty(name = "retryDelay", type = "string", defaultValue = "50", description = "Sets the delay in millis between retries", displayName = "Retry Delay")
}
)
public static | OpenIdConnectDefinitionDeserializer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/formatstring/LenientFormatStringValidationTest.java | {
"start": 3216,
"end": 3615
} | class ____ {
void test() {
Preconditions.checkState(false, "expected %s = %s", 1, 1);
}
}
""")
.doTest();
}
@Test
public void tooManyArguments_fixWithNonLiteral() {
refactoring
.addInputLines(
"Test.java",
"""
import com.google.common.base.Preconditions;
| Test |
java | junit-team__junit5 | junit-platform-suite-api/src/main/java/org/junit/platform/suite/api/ConfigurationParametersResource.java | {
"start": 1419,
"end": 1583
} | interface ____ {
/**
* The classpath location for the desired properties file; never {@code null} or blank.
*/
String value();
}
| ConfigurationParametersResource |
java | google__dagger | hilt-android-testing/main/java/dagger/hilt/android/internal/testing/TestComponentDataSupplier.java | {
"start": 2309,
"end": 2986
} | class ____ "
+ " annotated with @HiltAndroidTest and that the processor is running over your"
+ " test.",
testClass.getSimpleName(),
generatedClassName);
}
private static String getEnclosedClassName(Class<?> testClass) {
StringBuilder sb = new StringBuilder();
Class<?> currClass = testClass;
while (currClass != null) {
Class<?> enclosingClass = currClass.getEnclosingClass();
if (enclosingClass != null) {
sb.insert(0, "_" + currClass.getSimpleName());
} else {
sb.insert(0, currClass.getCanonicalName());
}
currClass = enclosingClass;
}
return sb.toString();
}
}
| is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateExpressionTest.java | {
"start": 1132,
"end": 2196
} | class ____ extends ContextTestSupport {
@Test
public void testAggregateExpressionSize() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedBodiesReceived("A+A", "B+B", "Z");
template.sendBody("direct:start", "A");
template.sendBody("direct:start", "B");
template.sendBody("direct:start", "A");
template.sendBody("direct:start", "B");
// send the last one with the batch size property
template.sendBodyAndProperty("direct:start", "Z", Exchange.BATCH_SIZE, 5);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
from("direct:start").aggregate(body(), new BodyInAggregatingStrategy()).completionFromBatchConsumer()
.to("mock:result");
// END SNIPPET: e1
}
};
}
}
| AggregateExpressionTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/TypeEqualsChecker.java | {
"start": 2059,
"end": 2847
} | class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final TypePredicate TYPE_MIRROR =
isDescendantOf("javax.lang.model.type.TypeMirror");
private static final Matcher<MethodInvocationTree> TYPE_EQUALS =
anyOf(
toType(MethodInvocationTree.class, instanceMethod().onClass(TYPE_MIRROR).named("equals")),
allOf(
staticEqualsInvocation(),
argument(0, typePredicateMatcher(TYPE_MIRROR)),
argument(1, typePredicateMatcher(TYPE_MIRROR))));
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!TYPE_EQUALS.matches(tree, state)) {
return Description.NO_MATCH;
}
return describeMatch(tree);
}
}
| TypeEqualsChecker |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/http/client/bind/method/NameAuthorization.java | {
"start": 473,
"end": 645
} | interface ____ {
@AliasFor(member = "name")
String value() default "";
@AliasFor(member = "value")
String name() default "";
}
//end::clazz[]
| NameAuthorization |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/RestartStrategyDescriptionUtils.java | {
"start": 1134,
"end": 5490
} | class ____ {
/**
* Returns a descriptive string of the restart strategy configured in the given Configuration
* object.
*
* @param configuration the Configuration to extract the restart strategy from
* @return a description of the restart strategy
*/
public static String getRestartStrategyDescription(Configuration configuration) {
final Optional<String> restartStrategyNameOptional =
configuration.getOptional(RestartStrategyOptions.RESTART_STRATEGY);
return restartStrategyNameOptional
.map(
restartStrategyName -> {
switch (RestartStrategyOptions.RestartStrategyType.of(
restartStrategyName.toLowerCase())) {
case NO_RESTART_STRATEGY:
return "Restart deactivated.";
case FIXED_DELAY:
return getFixedDelayDescription(configuration);
case FAILURE_RATE:
return getFailureRateDescription(configuration);
case EXPONENTIAL_DELAY:
return getExponentialDelayDescription(configuration);
default:
throw new IllegalArgumentException(
"Unknown restart strategy "
+ restartStrategyName
+ ".");
}
})
.orElse("Cluster level default restart strategy");
}
private static String getExponentialDelayDescription(Configuration configuration) {
Duration initialBackoff =
configuration.get(
RestartStrategyOptions.RESTART_STRATEGY_EXPONENTIAL_DELAY_INITIAL_BACKOFF);
Duration maxBackoff =
configuration.get(
RestartStrategyOptions.RESTART_STRATEGY_EXPONENTIAL_DELAY_MAX_BACKOFF);
double backoffMultiplier =
configuration.get(
RestartStrategyOptions
.RESTART_STRATEGY_EXPONENTIAL_DELAY_BACKOFF_MULTIPLIER);
Duration resetBackoffThreshold =
configuration.get(
RestartStrategyOptions
.RESTART_STRATEGY_EXPONENTIAL_DELAY_RESET_BACKOFF_THRESHOLD);
double jitter =
configuration.get(
RestartStrategyOptions.RESTART_STRATEGY_EXPONENTIAL_DELAY_JITTER_FACTOR);
return String.format(
"Restart with exponential delay: starting at %s, increasing by %f, with maximum %s. "
+ "Delay resets after %s with jitter %f",
initialBackoff, backoffMultiplier, maxBackoff, resetBackoffThreshold, jitter);
}
private static String getFailureRateDescription(Configuration configuration) {
int maxFailures =
configuration.get(
RestartStrategyOptions
.RESTART_STRATEGY_FAILURE_RATE_MAX_FAILURES_PER_INTERVAL);
Duration failureRateInterval =
configuration.get(
RestartStrategyOptions.RESTART_STRATEGY_FAILURE_RATE_FAILURE_RATE_INTERVAL);
Duration failureRateDelay =
configuration.get(RestartStrategyOptions.RESTART_STRATEGY_FAILURE_RATE_DELAY);
return String.format(
"Failure rate restart with maximum of %d failures within interval %s and fixed delay %s.",
maxFailures, failureRateInterval, failureRateDelay);
}
private static String getFixedDelayDescription(Configuration configuration) {
int attempts =
configuration.get(RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS);
Duration delay =
configuration.get(RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_DELAY);
return String.format(
"Restart with fixed delay (%s). #%d restart attempts.", delay, attempts);
}
}
| RestartStrategyDescriptionUtils |
java | quarkusio__quarkus | integration-tests/hibernate-reactive-db2/src/main/java/io/quarkus/it/hibernate/reactive/db2/HibernateReactiveDB2TestEndpoint.java | {
"start": 390,
"end": 3707
} | class ____ {
@Inject
Mutiny.SessionFactory sessionFactory;
// Injecting a Vert.x Pool is not required, it us only used to
// independently validate the contents of the database for the test
@Inject
Pool db2Pool;
@GET
@Path("/reactiveFindMutiny")
public Uni<GuineaPig> reactiveFindMutiny() {
final GuineaPig expectedPig = new GuineaPig(5, "Aloi");
return populateDB()
.chain(() -> sessionFactory.withSession(s -> s.find(GuineaPig.class, expectedPig.getId())));
}
@GET
@Path("/reactivePersist")
public Uni<String> reactivePersist() {
return sessionFactory.withTransaction(s -> s.persist(new GuineaPig(10, "Tulip")))
.chain(() -> selectNameFromId(10));
}
@GET
@Path("/reactiveRemoveTransientEntity")
public Uni<String> reactiveRemoveTransientEntity() {
return populateDB()
.chain(() -> selectNameFromId(5))
.map(name -> {
if (name == null) {
throw new AssertionError("Database was not populated properly");
}
return name;
})
.chain(() -> sessionFactory
.withTransaction(s -> s.merge(new GuineaPig(5, "Aloi")).chain(s::remove)))
.chain(() -> selectNameFromId(5))
.onItem().ifNotNull().transform(result -> result)
.onItem().ifNull().continueWith("OK");
}
@GET
@Path("/reactiveRemoveManagedEntity")
public Uni<String> reactiveRemoveManagedEntity() {
return populateDB()
.chain(() -> sessionFactory
.withTransaction(s -> s.find(GuineaPig.class, 5).chain(s::remove)))
.chain(() -> selectNameFromId(5))
.onItem().ifNotNull().transform(result -> result)
.onItem().ifNull().continueWith("OK");
}
@GET
@Path("/reactiveUpdate")
public Uni<String> reactiveUpdate() {
final String NEW_NAME = "Tina";
return populateDB()
.chain(() -> sessionFactory.withTransaction(s -> s.find(GuineaPig.class, 5)
.invoke(pig -> {
if (NEW_NAME.equals(pig.getName())) {
throw new AssertionError("Pig already had name " + NEW_NAME);
}
pig.setName(NEW_NAME);
})))
.chain(() -> selectNameFromId(5));
}
private Uni<RowSet<Row>> populateDB() {
return db2Pool.query("DELETE FROM Pig").execute()
.chain(() -> db2Pool.preparedQuery("INSERT INTO Pig (id, name) VALUES (5, 'Aloi')").execute());
}
private Uni<String> selectNameFromId(Integer id) {
return db2Pool.preparedQuery("SELECT name FROM Pig WHERE id = ?").execute(Tuple.of(id)).map(rowSet -> {
if (rowSet.size() == 1) {
return rowSet.iterator().next().getString(0);
} else if (rowSet.size() > 1) {
throw new AssertionError("More than one result returned: " + rowSet.size());
} else {
return null; // Size 0
}
});
}
}
| HibernateReactiveDB2TestEndpoint |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/name/NamePrefixCollisionTest.java | {
"start": 1059,
"end": 1124
} | class ____ {
}
@Named("x.y")
@Dependent
static | Alpha |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/NamedPemKeyStoreTest.java | {
"start": 845,
"end": 2215
} | class ____ {
private static final String configuration = """
quarkus.tls.http.key-store.pem.foo.cert=target/certs/test-formats.crt
quarkus.tls.http.key-store.pem.foo.key=target/certs/test-formats.key
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
TlsConfiguration named = certificates.get("http").orElseThrow();
assertThat(def.getKeyStoreOptions()).isNull();
assertThat(def.getKeyStore()).isNull();
assertThat(named.getKeyStoreOptions()).isNotNull();
assertThat(named.getKeyStore()).isNotNull();
X509Certificate certificate = (X509Certificate) named.getKeyStore().getCertificate("dummy-entry-0");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
}
| NamedPemKeyStoreTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java | {
"start": 1223,
"end": 3389
} | class ____ {
private Configuration conf = new Configuration();
@Test
public void tetGetDefaultUri() {
conf.set(FS_DEFAULT_NAME_KEY, "hdfs://nn_host");
URI uri = FileSystem.getDefaultUri(conf);
assertThat(uri.getScheme()).isEqualTo("hdfs");
assertThat(uri.getAuthority()).isEqualTo("nn_host");
}
@Test
public void tetGetDefaultUriWithPort() {
conf.set(FS_DEFAULT_NAME_KEY, "hdfs://nn_host:5432");
URI uri = FileSystem.getDefaultUri(conf);
assertThat(uri.getScheme()).isEqualTo("hdfs");
assertThat(uri.getAuthority()).isEqualTo("nn_host:5432");
}
@Test
public void tetGetDefaultUriTrailingSlash() {
conf.set(FS_DEFAULT_NAME_KEY, "hdfs://nn_host/");
URI uri = FileSystem.getDefaultUri(conf);
assertThat(uri.getScheme()).isEqualTo("hdfs");
assertThat(uri.getAuthority()).isEqualTo("nn_host");
}
@Test
public void tetGetDefaultUriNoScheme() {
conf.set(FS_DEFAULT_NAME_KEY, "nn_host");
URI uri = FileSystem.getDefaultUri(conf);
assertThat(uri.getScheme()).isEqualTo("hdfs");
assertThat(uri.getAuthority()).isEqualTo("nn_host");
}
@Test
public void tetGetDefaultUriNoSchemeTrailingSlash() throws Exception {
conf.set(FS_DEFAULT_NAME_KEY, "nn_host/");
intercept(IllegalArgumentException.class,
"No scheme in default FS",
() -> FileSystem.getDefaultUri(conf));
}
@Test
public void tetFsGet() throws IOException {
conf.set(FS_DEFAULT_NAME_KEY, "file:///");
FileSystem fs = FileSystem.get(conf);
assertThat(fs).isInstanceOf(LocalFileSystem.class);
}
@Test
public void tetFsGetNoScheme() throws Exception {
// Bare host name or address indicates hdfs scheme
conf.set(FS_DEFAULT_NAME_KEY, "nn_host");
intercept(UnsupportedFileSystemException.class, "hdfs",
() -> FileSystem.get(conf));
}
@Test
public void tetFsGetNoSchemeTrailingSlash() throws Exception {
// Bare host name or address with trailing slash is invalid
conf.set(FS_DEFAULT_NAME_KEY, "nn_host/");
intercept(IllegalArgumentException.class,
"No scheme in default FS",
() -> FileSystem.get(conf));
}
}
| TestDefaultUri |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/service/registry/echo/EchoB.java | {
"start": 814,
"end": 904
} | interface ____ {
@GetExchange("/echoB")
String handle(@RequestParam String input);
}
| EchoB |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/runtime/ContainerExecutionException.java | {
"start": 1292,
"end": 2834
} | class ____ extends YarnException {
private static final long serialVersionUID = 1L;
private static final int EXIT_CODE_UNSET = -1;
private static final String OUTPUT_UNSET = "<unknown>";
private int exitCode;
private String output;
private String errorOutput;
public ContainerExecutionException(String message) {
super(message);
exitCode = EXIT_CODE_UNSET;
output = OUTPUT_UNSET;
errorOutput = OUTPUT_UNSET;
}
public ContainerExecutionException(Throwable throwable) {
super(throwable);
exitCode = EXIT_CODE_UNSET;
output = OUTPUT_UNSET;
errorOutput = OUTPUT_UNSET;
}
public ContainerExecutionException(String message, int exitCode) {
super(message);
this.exitCode = exitCode;
this.output = OUTPUT_UNSET;
this.errorOutput = OUTPUT_UNSET;
}
public ContainerExecutionException(String message, int exitCode, String
output, String errorOutput) {
super(message);
this.exitCode = exitCode;
this.output = output;
this.errorOutput = errorOutput;
}
public ContainerExecutionException(Throwable cause, int exitCode, String
output, String errorOutput) {
super(cause);
this.exitCode = exitCode;
this.output = output;
this.errorOutput = errorOutput;
}
public int getExitCode() {
return exitCode;
}
public String getOutput() {
return output;
}
public String getErrorOutput() {
return errorOutput;
}
public static int getDefaultExitCode() {
return EXIT_CODE_UNSET;
}
} | ContainerExecutionException |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/AnnotationTest2.java | {
"start": 685,
"end": 1379
} | class ____ {
@JSONField(name = "ID")
private int id;
private String name;
private String descrition;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@JSONField(name = "desc")
public String getDescrition() {
return descrition;
}
@JSONField(name = "desc")
public void setDescrition(String descrition) {
this.descrition = descrition;
}
}
}
| User |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/EmbeddableSource.java | {
"start": 876,
"end": 971
} | class ____ representing the
* composition.
*/
boolean isDynamic();
boolean isUnique();
}
| for |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/FlowableComponentBuilderFactory.java | {
"start": 1398,
"end": 1885
} | interface ____ {
/**
* Flowable (camel-flowable)
* Send and receive messages from the Flowable BPMN and CMMN engines.
*
* Category: workflow
* Since: 4.9
* Maven coordinates: org.apache.camel:camel-flowable
*
* @return the dsl builder
*/
static FlowableComponentBuilder flowable() {
return new FlowableComponentBuilderImpl();
}
/**
* Builder for the Flowable component.
*/
| FlowableComponentBuilderFactory |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/proxy/jdbc/TransactionInfo.java | {
"start": 693,
"end": 830
} | class ____ extends com.alibaba.druid.util.TransactionInfo {
public TransactionInfo(long id) {
super(id);
}
}
| TransactionInfo |
java | quarkusio__quarkus | integration-tests/oidc-code-flow/src/main/java/io/quarkus/it/keycloak/CustomTenantConfigResolver.java | {
"start": 529,
"end": 1851
} | class ____ implements TenantConfigResolver {
@Inject
@ConfigProperty(name = "quarkus.oidc.auth-server-url")
String authServerUrl;
OidcTenantConfig config = new OidcTenantConfig();
public CustomTenantConfigResolver() {
}
@PostConstruct
public void initConfig() {
config.setTenantId("tenant-before-wrong-redirect");
config.setAuthServerUrl(authServerUrl);
config.setClientId("quarkus-app");
config.getCredentials().setSecret("secret");
config.setApplicationType(ApplicationType.WEB_APP);
}
@Override
public Uni<OidcTenantConfig> resolve(RoutingContext context, OidcRequestContext<OidcTenantConfig> requestContext) {
if (context.request().path().contains("callback-before-wrong-redirect")) {
List<String> stateParam = context.queryParam("state");
if (stateParam.size() == 1 &&
context.getCookie("q_auth_tenant-before-wrong-redirect_" + stateParam.get(0)) != null) {
// trigger the code to access token exchange failure due to a redirect uri mismatch
config.authentication.setRedirectPath("wrong-path");
}
return Uni.createFrom().item(config);
}
return Uni.createFrom().nullItem();
}
}
| CustomTenantConfigResolver |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java | {
"start": 14842,
"end": 15680
} | class ____ extends TableFunction[String] {
* def eval(str: String): Unit = {
* str.split("#").foreach(collect)
* }
* }
*
* val split = new MySplitUDTF()
* table.joinLateral(split($"c") as "s", $"a" === $"s")
* .select($"a", $"b", $"c", $"s")
* }</pre>
*/
Table joinLateral(Expression tableFunctionCall, Expression joinPredicate);
/**
* Joins this {@link Table} with an user-defined {@link TableFunction}. This join is similar to
* a SQL left outer join with ON TRUE predicate but works with a table function. Each row of the
* table is joined with all rows produced by the table function. If the table function does not
* produce any row, the outer row is padded with nulls.
*
* <p>Java Example:
*
* <pre>{@code
* | MySplitUDTF |
java | spring-projects__spring-framework | buildSrc/src/main/java/org/springframework/build/optional/OptionalDependenciesPlugin.java | {
"start": 1249,
"end": 2133
} | class ____ implements Plugin<Project> {
/**
* Name of the {@code optional} configuration.
*/
public static final String OPTIONAL_CONFIGURATION_NAME = "optional";
@Override
public void apply(Project project) {
Configuration optional = project.getConfigurations().create(OPTIONAL_CONFIGURATION_NAME);
optional.setCanBeConsumed(false);
optional.setCanBeResolved(false);
project.getPlugins().withType(JavaBasePlugin.class, (javaBasePlugin) -> {
SourceSetContainer sourceSets = project.getExtensions().getByType(JavaPluginExtension.class)
.getSourceSets();
sourceSets.all((sourceSet) -> {
project.getConfigurations().getByName(sourceSet.getCompileClasspathConfigurationName()).extendsFrom(optional);
project.getConfigurations().getByName(sourceSet.getRuntimeClasspathConfigurationName()).extendsFrom(optional);
});
});
}
}
| OptionalDependenciesPlugin |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/LocalSourceExecSerializationTests.java | {
"start": 629,
"end": 1709
} | class ____ extends AbstractPhysicalPlanSerializationTests<LocalSourceExec> {
public static LocalSourceExec randomLocalSourceExec() {
Source source = randomSource();
List<Attribute> output = randomFieldAttributes(1, 9, false);
LocalSupplier supplier = LocalSupplierTests.randomLocalSupplier();
return new LocalSourceExec(source, output, supplier);
}
@Override
protected LocalSourceExec createTestInstance() {
return randomLocalSourceExec();
}
@Override
protected LocalSourceExec mutateInstance(LocalSourceExec instance) throws IOException {
List<Attribute> output = instance.output();
LocalSupplier supplier = instance.supplier();
if (randomBoolean()) {
output = randomValueOtherThan(output, () -> randomFieldAttributes(1, 9, false));
} else {
supplier = randomValueOtherThan(supplier, () -> LocalSupplierTests.randomLocalSupplier());
}
return new LocalSourceExec(instance.source(), output, supplier);
}
}
| LocalSourceExecSerializationTests |
java | apache__camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyMultipleSimultaneousClientsTest.java | {
"start": 1470,
"end": 3703
} | class ____ extends BaseNettyTest {
private String uri = "netty:tcp://localhost:{{port}}?sync=true&reuseAddress=true&synchronous=false";
private int clientCount = 20;
private CountDownLatch startLatch = new CountDownLatch(1);
private CountDownLatch finishLatch = new CountDownLatch(clientCount);
@Test
public void testSimultaneousClients() throws Exception {
ExecutorService executorService = Executors.newFixedThreadPool(clientCount);
Future<?>[] replies = new Future[clientCount];
for (int i = 0; i < clientCount; i++) {
replies[i] = executorService.submit(new Callable<Object>() {
@Override
public Object call() throws Exception {
// wait until we're allowed to start
startLatch.await();
Object reply = template.requestBody(uri, "World");
// signal that we're done now
finishLatch.countDown();
return reply;
}
});
}
Object[] expectedReplies = new Object[clientCount];
Arrays.fill(expectedReplies, "Bye World");
getMockEndpoint("mock:result").expectedMessageCount(clientCount);
getMockEndpoint("mock:result").expectedBodiesReceived(expectedReplies);
// fire the simultaneous client calls
startLatch.countDown();
// and wait long enough until they're all done
assertTrue(finishLatch.await(5, TimeUnit.SECONDS), "Waiting on the latch ended up with a timeout!");
executorService.shutdown();
// assert on what we expect to receive
for (int i = 0; i < clientCount; i++) {
assertEquals("Bye World", replies[i].get());
}
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(uri)
.log("${body}")
.transform(body().prepend("Bye "))
.to("mock:result");
}
};
}
}
| NettyMultipleSimultaneousClientsTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/rx/SetRxIterator.java | {
"start": 1024,
"end": 3363
} | class ____<V> {
public Flowable<V> create() {
ReplayProcessor<V> p = ReplayProcessor.create();
return p.doOnRequest(new LongConsumer() {
private String nextIterPos = "0";
private RedisClient client;
private AtomicLong elementsRead = new AtomicLong();
private boolean finished;
private volatile boolean completed;
private AtomicLong readAmount = new AtomicLong();
@Override
public void accept(long value) {
readAmount.addAndGet(value);
if (completed || elementsRead.get() == 0) {
nextValues();
completed = false;
}
}
protected void nextValues() {
scanIterator(client, nextIterPos).whenComplete((res, e) -> {
if (e != null) {
p.onError(e);
return;
}
if (finished) {
client = null;
nextIterPos = "0";
return;
}
client = res.getRedisClient();
nextIterPos = res.getPos();
for (Object val : res.getValues()) {
p.onNext((V) val);
elementsRead.incrementAndGet();
}
if (elementsRead.get() >= readAmount.get()) {
p.onComplete();
elementsRead.set(0);
completed = true;
return;
}
if ("0".equals(res.getPos()) && !tryAgain()) {
finished = true;
p.onComplete();
}
if (finished || completed) {
return;
}
nextValues();
});
}
});
}
protected boolean tryAgain() {
return false;
}
protected abstract RFuture<ScanResult<Object>> scanIterator(RedisClient client, String nextIterPos);
}
| SetRxIterator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JavaPeriodGetDaysTest.java | {
"start": 4104,
"end": 4515
} | class ____ {
public static void foo(Period period) {
long months = period.getMonths();
}
}
""")
.doTest();
}
@Test
public void getDaysOnly() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.Period;
public | TestCase |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/trace/hook/DefaultRecallMessageTraceHook.java | {
"start": 1585,
"end": 3854
} | class ____ implements RPCHook {
private static final String RECALL_TRACE_ENABLE_KEY = "com.rocketmq.recall.default.trace.enable";
private boolean enableDefaultTrace = Boolean.parseBoolean(System.getProperty(RECALL_TRACE_ENABLE_KEY, "false"));
private TraceDispatcher traceDispatcher;
public DefaultRecallMessageTraceHook(TraceDispatcher traceDispatcher) {
this.traceDispatcher = traceDispatcher;
}
@Override
public void doBeforeRequest(String remoteAddr, RemotingCommand request) {
}
@Override
public void doAfterResponse(String remoteAddr, RemotingCommand request, RemotingCommand response) {
if (request.getCode() != RequestCode.RECALL_MESSAGE
|| !enableDefaultTrace
|| null == response.getExtFields()
|| null == response.getExtFields().get(MessageConst.PROPERTY_MSG_REGION)
|| null == traceDispatcher) {
return;
}
try {
String regionId = response.getExtFields().get(MessageConst.PROPERTY_MSG_REGION);
RecallMessageRequestHeader requestHeader =
request.decodeCommandCustomHeader(RecallMessageRequestHeader.class);
String topic = NamespaceUtil.withoutNamespace(requestHeader.getTopic());
String group = NamespaceUtil.withoutNamespace(requestHeader.getProducerGroup());
String recallHandle = requestHeader.getRecallHandle();
RecallMessageHandle.HandleV1 handleV1 =
(RecallMessageHandle.HandleV1) RecallMessageHandle.decodeHandle(recallHandle);
TraceBean traceBean = new TraceBean();
traceBean.setTopic(topic);
traceBean.setMsgId(handleV1.getMessageId());
TraceContext traceContext = new TraceContext();
traceContext.setRegionId(regionId);
traceContext.setTraceBeans(new ArrayList<>(1));
traceContext.setTraceType(TraceType.Recall);
traceContext.setGroupName(group);
traceContext.getTraceBeans().add(traceBean);
traceContext.setSuccess(ResponseCode.SUCCESS == response.getCode());
traceDispatcher.append(traceContext);
} catch (Exception e) {
}
}
}
| DefaultRecallMessageTraceHook |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditIntegration.java | {
"start": 3816,
"end": 7014
} | class ____ {}", auditClassname);
final Constructor<? extends OperationAuditor> constructor
= auditClassname.getConstructor();
final OperationAuditor instance = constructor.newInstance();
instance.init(options);
return instance;
} catch (NoSuchMethodException | InstantiationException
| RuntimeException
| IllegalAccessException | InvocationTargetException e) {
throw new IOException("Failed to instantiate class "
+ auditClassname
+ " defined in " + key
+ ": " + e,
e);
}
}
/**
* Get the span from the execution attributes.
* @param executionAttributes the execution attributes
* @return the span callbacks or null
*/
public static AuditSpanS3A
retrieveAttachedSpan(final ExecutionAttributes executionAttributes) {
return executionAttributes.getAttribute(AUDIT_SPAN_EXECUTION_ATTRIBUTE);
}
/**
* Attach a span to the execution attributes.
* @param executionAttributes the execution attributes
* @param span span to attach
*/
public static void attachSpanToRequest(
final ExecutionAttributes executionAttributes,
final AuditSpanS3A span) {
executionAttributes.putAttribute(AUDIT_SPAN_EXECUTION_ATTRIBUTE, span);
}
/**
* Translate an audit exception.
* @param path path of operation.
* @param exception exception
* @return the IOE to raise.
*/
public static IOException translateAuditException(String path,
AuditFailureException exception) {
if (exception instanceof AuditOperationRejectedException) {
// special handling of this subclass
return new UnsupportedRequestException(path,
exception.getMessage(), exception);
}
return (AccessDeniedException)new AccessDeniedException(path, null,
exception.toString()).initCause(exception);
}
/**
* Translate an exception if it or its inner exception is an
* {@link AuditFailureException}.
* If this condition is not met, null is returned.
* @param path path of operation.
* @param exception exception
* @return a translated exception or null.
*/
public static IOException maybeTranslateAuditException(String path,
Exception exception) {
if (exception instanceof AuditFailureException) {
return translateAuditException(path, (AuditFailureException) exception);
} else if (exception.getCause() instanceof AuditFailureException) {
return translateAuditException(path,
(AuditFailureException) exception.getCause());
} else {
return null;
}
}
public static boolean containsAuditException(Exception exception) {
return exception instanceof AuditFailureException
|| exception.getCause() instanceof AuditFailureException;
}
/**
* Check if the configuration is set to reject operations that are
* performed outside of an audit span.
*
* @param conf the configuration to check
* @return true if operations outside of an audit span should be rejected, false otherwise
*/
public static boolean isRejectOutOfSpan(final Configuration conf) {
return conf.getBoolean(REJECT_OUT_OF_SPAN_OPERATIONS, false);
}
}
| is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e1/b/DependentId.java | {
"start": 285,
"end": 396
} | class ____ implements Serializable {
String name;
long empPK; // corresponds to PK type of Employee
}
| DependentId |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/EnhancedProxyCacheTest.java | {
"start": 1678,
"end": 5645
} | class ____ {
private static final AtomicLong countryId = new AtomicLong();
@Test
public void testPreferenceFor2LCOverUninitializedProxy(SessionFactoryScope scope) throws Exception {
final Statistics stats = scope.getSessionFactory().getStatistics();
storeTestData( scope );
clearAllCaches( scope );
stats.clear();
assertTrue( stats.isStatisticsEnabled() );
assertEquals( 0, stats.getEntityFetchCount() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
// First we load the Country once, then trigger initialization of the related Continent proxy.
// 2LC is empty, so stats should show that these objects are being loaded from the DB.
scope.inSession( s -> {
Country nl = s.get( Country.class, countryId.get() );
assertNotNull( nl );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 1, stats.getSecondLevelCacheMissCount() );
assertEquals( 1, stats.getEntityLoadCount() );
Continent continent = nl.getContinent();
//Check that this is indeed an enhanced proxy so to ensure we're testing in the right conditions.
//The following casts should not fail:
final PersistentAttributeInterceptable interceptable = (PersistentAttributeInterceptable) continent;
final EnhancementAsProxyLazinessInterceptor interceptor = (EnhancementAsProxyLazinessInterceptor) interceptable.$$_hibernate_getInterceptor();
assertFalse( interceptor.isInitialized() );
assertFalse( interceptor.isAttributeLoaded( "code" ) );
//Trigger initialization of the enhanced proxy:
assertEquals( "EU", continent.getCode() );
assertTrue( interceptor.isInitialized() );
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 2, stats.getEntityLoadCount() );
} );
stats.clear();
//Now load the same objects again; we expect to hit 2LC this time,
//and we should see no needs to hit the DB.
//Also, since all data is readily available we won't need to make
//all attributes lazy.
scope.inSession( s -> {
assertEquals( 0, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getEntityLoadCount() );
Country nl = s.get( Country.class, countryId.get() );
assertNotNull( nl );
assertEquals( 1, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getEntityLoadCount() );
Continent continent = nl.getContinent();
final PersistentAttributeInterceptable interceptable = (PersistentAttributeInterceptable) continent;
final EnhancementAsProxyLazinessInterceptor interceptor = (EnhancementAsProxyLazinessInterceptor) interceptable.$$_hibernate_getInterceptor();
assertFalse( interceptor.isInitialized() );
assertFalse( interceptor.isAttributeLoaded( "code" ) );
assertEquals( 1, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getEntityLoadCount() );
//Trigger initialization of the enhanced proxy:
assertEquals( "EU", continent.getCode() );
assertTrue( interceptor.isInitialized() );
assertEquals( 2, stats.getSecondLevelCacheHitCount() );
assertEquals( 0, stats.getSecondLevelCacheMissCount() );
assertEquals( 0, stats.getEntityLoadCount() );
} );
}
private void clearAllCaches(SessionFactoryScope scope) {
final CacheImplementor cache = scope.getSessionFactory().getCache();
for (String name : cache.getCacheRegionNames() ) {
cache.getRegion( name ).clear();
}
}
private void storeTestData(SessionFactoryScope scope) {
scope.inTransaction( s -> {
Continent continent = new Continent();
continent.setCode( "EU" );
continent.setName( "Europe" );
s.persist( continent );
Country c = new Country();
c.setCode( "NL" );
c.setName( "Nederland" );
c.setContinent( continent );
s.persist( c );
countryId.set( c.getId() );
} );
}
}
| EnhancedProxyCacheTest |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/AdapterMethodsTest.java | {
"start": 15384,
"end": 16576
} | class ____ {
@FromJson
int jsonToInt(JsonReader reader) throws IOException {
if (reader.peek() == JsonReader.Token.NULL) {
reader.nextNull();
return -1;
}
return reader.nextInt();
}
@ToJson
void intToJson(JsonWriter writer, int value) throws IOException {
if (value == -1) {
writer.nullValue();
} else {
writer.value(value);
}
}
}
@Test
public void adapterThrows() throws Exception {
Moshi moshi = new Moshi.Builder().add(new ExceptionThrowingPointJsonAdapter()).build();
JsonAdapter<Point[]> arrayOfPointAdapter = moshi.adapter(Point[].class).lenient();
try {
arrayOfPointAdapter.toJson(new Point[] {null, null, new Point(0, 0)});
fail();
} catch (JsonDataException expected) {
assertThat(expected.getMessage()).isEqualTo("java.lang.Exception: pointToJson fail! at $[2]");
}
try {
arrayOfPointAdapter.fromJson("[null,null,[0,0]]");
fail();
} catch (JsonDataException expected) {
assertThat(expected.getMessage())
.isEqualTo("java.lang.Exception: pointFromJson fail! at $[2]");
}
}
static | NullableIntToJsonAdapter |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/CorrelateSortToRankRule.java | {
"start": 3720,
"end": 9130
} | class ____
extends RelRule<CorrelateSortToRankRule.CorrelateSortToRankRuleConfig> {
public static final CorrelateSortToRankRule INSTANCE =
CorrelateSortToRankRule.CorrelateSortToRankRuleConfig.DEFAULT.toRule();
protected CorrelateSortToRankRule(CorrelateSortToRankRuleConfig config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
Correlate correlate = call.rel(0);
if (correlate.getJoinType() != JoinRelType.INNER) {
return false;
}
Aggregate agg = call.rel(1);
if (!agg.getAggCallList().isEmpty() || agg.getGroupSets().size() > 1) {
return false;
}
Project aggInput = call.rel(2);
if (!aggInput.isMapping()) {
return false;
}
Sort sort = call.rel(3);
if (sort.offset != null || sort.fetch == null) {
// 1. we can not describe the offset using rank
// 2. there is no need to transform to rank if no fetch limit
return false;
}
Project sortInput = call.rel(4);
if (!sortInput.isMapping()) {
return false;
}
Filter filter = call.rel(5);
List<RexNode> cnfCond = RelOptUtil.conjunctions(filter.getCondition());
if (cnfCond.stream().anyMatch(c -> !isValidCondition(c, correlate))) {
return false;
}
return aggInput.getInput().getDigest().equals(filter.getInput().getDigest());
}
private boolean isValidCondition(RexNode condition, Correlate correlate) {
// must be equiv condition
if (condition.getKind() != SqlKind.EQUALS) {
return false;
}
Tuple2<RexInputRef, RexFieldAccess> tuple = resolveFilterCondition(condition);
if (tuple.f0 == null) {
return false;
}
RexCorrelVariable variable = (RexCorrelVariable) tuple.f1.getReferenceExpr();
return variable.id.equals(correlate.getCorrelationId());
}
/**
* Resolves the filter condition with specific pattern: input ref and field access.
*
* @param condition The join condition
* @return tuple of operands (RexInputRef, RexFieldAccess), or null if the pattern does not
* match
*/
private Tuple2<RexInputRef, RexFieldAccess> resolveFilterCondition(RexNode condition) {
RexCall condCall = (RexCall) condition;
RexNode operand0 = condCall.getOperands().get(0);
RexNode operand1 = condCall.getOperands().get(1);
if (operand0.isA(SqlKind.INPUT_REF) && operand1.isA(SqlKind.FIELD_ACCESS)) {
return Tuple2.of((RexInputRef) operand0, (RexFieldAccess) operand1);
} else if (operand0.isA(SqlKind.FIELD_ACCESS) && operand1.isA(SqlKind.INPUT_REF)) {
return Tuple2.of((RexInputRef) operand1, (RexFieldAccess) operand0);
} else {
return Tuple2.of(null, null);
}
}
@Override
public void onMatch(RelOptRuleCall call) {
RelBuilder builder = call.builder();
Sort sort = call.rel(3);
Project sortInput = call.rel(4);
Filter filter = call.rel(5);
List<RexNode> cnfCond = RelOptUtil.conjunctions(filter.getCondition());
ImmutableBitSet partitionKey =
ImmutableBitSet.of(
cnfCond.stream()
.map(c -> resolveFilterCondition(c).f0.getIndex())
.collect(Collectors.toList()));
RelDataType baseType = sortInput.getInput().getRowType();
List<RexNode> projects = new ArrayList<>();
partitionKey.asList().forEach(k -> projects.add(RexInputRef.of(k, baseType)));
projects.addAll(sortInput.getProjects());
RelCollation oriCollation = sort.getCollation();
List<RelFieldCollation> newFieldCollations =
oriCollation.getFieldCollations().stream()
.map(
fc -> {
int newFieldIdx =
((RexInputRef)
sortInput
.getProjects()
.get(fc.getFieldIndex()))
.getIndex();
return fc.withFieldIndex(newFieldIdx);
})
.collect(Collectors.toList());
RelCollation newCollation = RelCollations.of(newFieldCollations);
RelNode newRel =
((FlinkRelBuilder) (builder.push(filter.getInput())))
.rank(
partitionKey,
newCollation,
RankType.ROW_NUMBER,
new ConstantRankRange(
1, ((RexLiteral) sort.fetch).getValueAs(Long.class)),
null,
false)
.project(projects)
.build();
call.transformTo(newRel);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | CorrelateSortToRankRule |
java | alibaba__nacos | config/src/test/java/com/alibaba/nacos/config/server/service/repository/embedded/EmbeddedConfigInfoGrayPersistServiceImplTest.java | {
"start": 2801,
"end": 20975
} | class ____ {
private EmbeddedConfigInfoGrayPersistServiceImpl embeddedConfigInfoGrayPersistService;
@Mock
private DataSourceService dataSourceService;
@Mock
private IdGeneratorManager idGeneratorManager;
@Mock
private HistoryConfigInfoPersistService historyConfigInfoPersistService;
MockedStatic<EnvUtil> envUtilMockedStatic;
MockedStatic<EmbeddedStorageContextHolder> embeddedStorageContextHolderMockedStatic;
MockedStatic<DynamicDataSource> dynamicDataSourceMockedStatic;
@Mock
DynamicDataSource dynamicDataSource;
@Mock
DatabaseOperate databaseOperate;
/**
* before test.
*/
@BeforeEach
public void before() {
embeddedStorageContextHolderMockedStatic = Mockito.mockStatic(EmbeddedStorageContextHolder.class);
dynamicDataSourceMockedStatic = Mockito.mockStatic(DynamicDataSource.class);
envUtilMockedStatic = Mockito.mockStatic(EnvUtil.class);
when(DynamicDataSource.getInstance()).thenReturn(dynamicDataSource);
when(dynamicDataSource.getDataSource()).thenReturn(dataSourceService);
when(dataSourceService.getDataSourceType()).thenReturn("derby");
envUtilMockedStatic.when(() -> EnvUtil.getProperty(anyString(), eq(Boolean.class), eq(false)))
.thenReturn(false);
embeddedConfigInfoGrayPersistService = new EmbeddedConfigInfoGrayPersistServiceImpl(databaseOperate,
idGeneratorManager, historyConfigInfoPersistService);
}
/**
* after each case.
*/
@AfterEach
public void after() {
dynamicDataSourceMockedStatic.close();
envUtilMockedStatic.close();
embeddedStorageContextHolderMockedStatic.close();
}
@Test
public void testInsertOrUpdateGrayOfAdd() {
String dataId = "dataId111222";
String group = "group";
String tenant = "tenant";
String appName = "appname1234";
String content = "c12345";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key23456");
//mock query config state empty and return obj after insert
ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper();
configInfoStateWrapper.setLastModified(System.currentTimeMillis());
configInfoStateWrapper.setId(234567890L);
String grayName = "tag123grayName";
String grayRule = "";
Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null).thenReturn(configInfoStateWrapper);
String srcIp = "ip345678";
String srcUser = "user1234567";
ConfigOperateResult configOperateResult = embeddedConfigInfoGrayPersistService.insertOrUpdateGray(configInfo,
grayName, grayRule, srcIp, srcUser);
//mock insert invoked.
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), any(), eq(dataId), eq(group), eq(tenant),
eq(grayName), eq(grayRule), eq(appName), eq(content),
eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp), eq(srcUser),
any(Timestamp.class), any(Timestamp.class)), times(1));
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configInfo.getId()), eq(configInfo), eq(srcIp), eq(srcUser),
any(Timestamp.class), eq("I"), eq("gray"), eq(grayName), anyString());
assertEquals(configInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(configInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
}
@Test
public void testInsertOrUpdateGrayOfUpdate() {
String dataId = "dataId111222";
String group = "group";
String tenant = "tenant";
String appName = "appname1234";
String content = "c12345";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key23456");
//mock query config state and return obj after update
ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper();
configInfoStateWrapper.setLastModified(System.currentTimeMillis());
configInfoStateWrapper.setId(234567890L);
String grayName = "tag123grayName";
final String grayRule = "tag123grayrule";
Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper())
.thenReturn(configInfoStateWrapper);
//mock exist config info
ConfigInfoGrayWrapper configAllInfo4Gray = new ConfigInfoGrayWrapper();
configAllInfo4Gray.setDataId(dataId);
configAllInfo4Gray.setGroup(group);
configAllInfo4Gray.setTenant(tenant);
configAllInfo4Gray.setMd5("old_md5");
configAllInfo4Gray.setSrcUser("user");
when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER))).thenReturn(configAllInfo4Gray);
String srcIp = "ip345678";
String srcUser = "user1234567";
ConfigOperateResult configOperateResult = embeddedConfigInfoGrayPersistService.insertOrUpdateGray(configInfo,
grayName, grayRule, srcIp, srcUser);
//verify update to be invoked
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), eq(content),
eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp), eq(srcUser),
any(Timestamp.class), eq(appName), eq(grayRule), eq(dataId), eq(group), eq(tenant),
eq(grayName)), times(1));
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configAllInfo4Gray.getId()), eq(configAllInfo4Gray), eq(srcIp),
eq(srcUser), any(Timestamp.class), eq("U"), eq("gray"), eq(grayName), anyString());
assertEquals(configInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(configInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
}
@Test
public void testInsertOrUpdateGrayCasOfAdd() {
String dataId = "dataId111222";
String group = "group";
String tenant = "tenant";
String appName = "appname1234";
String content = "c12345";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key23456");
configInfo.setMd5("casMd5");
//mock query config state empty and return obj after insert
ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper();
configInfoStateWrapper.setLastModified(System.currentTimeMillis());
configInfoStateWrapper.setId(234567890L);
String grayName = "tag123grayName";
String grayRule = "";
Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(null).thenReturn(configInfoStateWrapper);
String srcIp = "ip345678";
String srcUser = "user1234567";
ConfigOperateResult configOperateResult = embeddedConfigInfoGrayPersistService.insertOrUpdateGrayCas(configInfo,
grayName, grayRule, srcIp, srcUser);
//verify insert to be invoked
//mock insert invoked.
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), any(), eq(dataId), eq(group), eq(tenant),
eq(grayName), eq(grayRule), eq(appName), eq(content),
eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp), eq(srcUser),
any(Timestamp.class), any(Timestamp.class)), times(1));
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configInfo.getId()), eq(configInfo), eq(srcIp), eq(srcUser),
any(Timestamp.class), eq("I"), eq("gray"), eq(grayName), anyString());
assertEquals(configInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(configInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
}
@Test
public void testInsertOrUpdateGrayCasOfUpdate() {
String dataId = "dataId111222";
String group = "group";
String tenant = "tenant";
String appName = "appname1234";
String content = "c12345";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, appName, content);
configInfo.setEncryptedDataKey("key23456");
configInfo.setMd5("casMd5");
//mock query config state and return obj after update
ConfigInfoStateWrapper configInfoStateWrapper = new ConfigInfoStateWrapper();
configInfoStateWrapper.setLastModified(System.currentTimeMillis());
configInfoStateWrapper.setId(234567890L);
String grayName = "tag123grayName";
final String grayRule = "";
Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper())
.thenReturn(configInfoStateWrapper);
//mock exist config info
ConfigInfoGrayWrapper configAllInfo4Gray = new ConfigInfoGrayWrapper();
configAllInfo4Gray.setDataId(dataId);
configAllInfo4Gray.setGroup(group);
configAllInfo4Gray.setTenant(tenant);
configAllInfo4Gray.setMd5("old_md5");
configAllInfo4Gray.setSrcUser("user");
when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER))).thenReturn(configAllInfo4Gray);
String srcIp = "ip345678";
String srcUser = "user1234567";
//mock cas update return 1
Mockito.when(databaseOperate.blockUpdate()).thenReturn(true);
ConfigOperateResult configOperateResult = embeddedConfigInfoGrayPersistService.insertOrUpdateGrayCas(configInfo,
grayName, grayRule, srcIp, srcUser);
//verify update to be invoked
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), eq(content),
eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp), eq(srcUser), eq(appName),
eq(grayRule), eq(dataId), eq(group), eq(tenant), eq(grayName), eq(configInfo.getMd5())),
times(1));
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configAllInfo4Gray.getId()), eq(configAllInfo4Gray), eq(srcIp),
eq(srcUser), any(Timestamp.class), eq("U"), eq("gray"), eq(grayName), anyString());
assertEquals(configInfoStateWrapper.getId(), configOperateResult.getId());
assertEquals(configInfoStateWrapper.getLastModified(), configOperateResult.getLastModified());
}
@Test
public void testRemoveConfigInfoGrayName() {
String dataId = "dataId1112222";
String group = "group22";
String tenant = "tenant2";
final String srcIp = "ip345678";
final String srcUser = "user1234567";
final String grayName = "grayName...";
//mock exist config info
ConfigInfoGrayWrapper configAllInfo4Gray = new ConfigInfoGrayWrapper();
configAllInfo4Gray.setDataId(dataId);
configAllInfo4Gray.setGroup(group);
configAllInfo4Gray.setTenant(tenant);
configAllInfo4Gray.setMd5("old_md5");
when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER))).thenReturn(configAllInfo4Gray);
embeddedConfigInfoGrayPersistService.removeConfigInfoGray(dataId, group, tenant, grayName, srcIp, srcUser);
//verify delete sql invoked.
embeddedStorageContextHolderMockedStatic.verify(
() -> EmbeddedStorageContextHolder.addSqlContext(anyString(), eq(dataId), eq(group), eq(tenant),
eq(grayName)), times(1));
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configAllInfo4Gray.getId()), eq(configAllInfo4Gray), eq(srcIp),
eq(srcUser), any(Timestamp.class), eq("D"), eq("gray"), eq(grayName), anyString());
}
@Test
public void testFindConfigInfo4Gray() {
String dataId = "dataId1112222";
String group = "group22";
String tenant = "tenant2";
String grayName = "tag123345";
//mock query tag return obj
ConfigInfoGrayWrapper configInfoGrayWrapperMocked = new ConfigInfoGrayWrapper();
configInfoGrayWrapperMocked.setLastModified(System.currentTimeMillis());
Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {dataId, group, tenant, grayName}),
eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER))).thenReturn(configInfoGrayWrapperMocked);
ConfigInfoGrayWrapper configInfo4GrayReturn = embeddedConfigInfoGrayPersistService.findConfigInfo4Gray(dataId,
group, tenant, grayName);
assertEquals(configInfoGrayWrapperMocked, configInfo4GrayReturn);
}
@Test
public void testConfigInfoGrayCount() {
Timestamp timestamp = new Timestamp(System.currentTimeMillis());
//mock count
Mockito.when(databaseOperate.queryOne(anyString(), eq(Integer.class))).thenReturn(308);
//execute & verify
int count = embeddedConfigInfoGrayPersistService.configInfoGrayCount();
assertEquals(308, count);
}
@Test
public void testFindAllConfigInfoGrayForDumpAll() {
//mock count
Mockito.when(databaseOperate.queryOne(anyString(), eq(Integer.class))).thenReturn(308);
List<ConfigInfoGrayWrapper> mockGrayList = new ArrayList<>();
mockGrayList.add(new ConfigInfoGrayWrapper());
mockGrayList.add(new ConfigInfoGrayWrapper());
mockGrayList.add(new ConfigInfoGrayWrapper());
mockGrayList.get(0).setLastModified(System.currentTimeMillis());
mockGrayList.get(1).setLastModified(System.currentTimeMillis());
mockGrayList.get(2).setLastModified(System.currentTimeMillis());
//mock query list
Mockito.when(
databaseOperate.queryMany(anyString(), eq(new Object[] {}), eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER)))
.thenReturn(mockGrayList);
int pageNo = 3;
int pageSize = 100;
//execute & verify
Page<ConfigInfoGrayWrapper> returnGrayPage = embeddedConfigInfoGrayPersistService.findAllConfigInfoGrayForDumpAll(
pageNo, pageSize);
assertEquals(308, returnGrayPage.getTotalCount());
assertEquals(mockGrayList, returnGrayPage.getPageItems());
}
@Test
public void testFindConfigInfoGrays() {
String dataId = "dataId1112222";
String group = "group22";
String tenant = "tenant2";
List<String> mockedGrays = Arrays.asList("tags1", "tags11", "tags111");
Mockito.when(databaseOperate.queryMany(anyString(), eq(new Object[] {dataId, group, tenant}), eq(String.class)))
.thenReturn(mockedGrays);
List<String> configInfoGrays = embeddedConfigInfoGrayPersistService.findConfigInfoGrays(dataId, group, tenant);
assertEquals(mockedGrays, configInfoGrays);
}
@Test
public void testFindChangeConfigInfo4Gray() {
List<ConfigInfoGrayWrapper> mockList = new ArrayList<>();
mockList.add(new ConfigInfoGrayWrapper());
mockList.add(new ConfigInfoGrayWrapper());
mockList.add(new ConfigInfoGrayWrapper());
mockList.get(0).setLastModified(System.currentTimeMillis());
mockList.get(1).setLastModified(System.currentTimeMillis());
mockList.get(2).setLastModified(System.currentTimeMillis());
long lastMaxId = 123;
Timestamp timestamp = new Timestamp(System.currentTimeMillis());
when(databaseOperate.queryMany(anyString(), eq(new Object[] {timestamp, lastMaxId, 100}),
eq(CONFIG_INFO_GRAY_WRAPPER_ROW_MAPPER))).thenReturn(mockList)
.thenThrow(new CannotGetJdbcConnectionException("mock exception22"));
List<ConfigInfoGrayWrapper> changeConfig = embeddedConfigInfoGrayPersistService.findChangeConfig(timestamp,
lastMaxId, 100);
assertTrue(changeConfig.get(0).getLastModified() == mockList.get(0).getLastModified());
assertTrue(changeConfig.get(1).getLastModified() == mockList.get(1).getLastModified());
assertTrue(changeConfig.get(2).getLastModified() == mockList.get(2).getLastModified());
try {
embeddedConfigInfoGrayPersistService.findChangeConfig(timestamp, lastMaxId, 100);
assertTrue(false);
} catch (CannotGetJdbcConnectionException exception) {
assertEquals("mock exception22", exception.getMessage());
}
}
}
| EmbeddedConfigInfoGrayPersistServiceImplTest |
java | apache__camel | components/camel-spring-parent/camel-spring-rabbitmq/src/test/java/org/apache/camel/component/springrabbit/integration/RabbitMQConsumerRoutingKeyIT.java | {
"start": 1303,
"end": 3269
} | class ____ extends RabbitMQITSupport {
@Test
public void testConsumer() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context, 30, TimeUnit.SECONDS);
}
@Test
public void testConsumerWithHeader() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:result").expectedHeaderReceived("cheese", "gouda");
template.sendBodyAndHeader("direct:start", "Hello World", "cheese", "gouda");
MockEndpoint.assertIsSatisfied(context, 30, TimeUnit.SECONDS);
}
@Test
public void testConsumerWithMessage() throws Exception {
MessageProperties props = MessagePropertiesBuilder.newInstance()
.setContentType(MessageProperties.CONTENT_TYPE_TEXT_PLAIN)
.setMessageId("123")
.setHeader("bar", "baz")
.build();
Message body = MessageBuilder.withBody("foo".getBytes())
.andProperties(props)
.build();
getMockEndpoint("mock:result").expectedBodiesReceived("foo");
getMockEndpoint("mock:result").expectedHeaderReceived("bar", "baz");
template.sendBody("direct:start", body);
MockEndpoint.assertIsSatisfied(context, 30, TimeUnit.SECONDS);
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.to("spring-rabbitmq:foo?routingKey=foo.bar");
from("spring-rabbitmq:foo?queues=myqueue&routingKey=foo.bar")
.to("log:result")
.to("mock:result");
}
};
}
}
| RabbitMQConsumerRoutingKeyIT |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/codec/CancelWithoutDemandCodecTests.java | {
"start": 6339,
"end": 7779
} | class ____ implements ReactiveHttpOutputMessage {
private final DataBufferFactory bufferFactory;
public CancellingOutputMessage(DataBufferFactory bufferFactory) {
this.bufferFactory = bufferFactory;
}
@Override
public DataBufferFactory bufferFactory() {
return this.bufferFactory;
}
@Override
public void beforeCommit(Supplier<? extends Mono<Void>> action) {
}
@Override
public boolean isCommitted() {
return false;
}
@Override
public Mono<Void> writeWith(Publisher<? extends DataBuffer> body) {
Flux<? extends DataBuffer> flux = Flux.from(body);
BaseSubscriber<DataBuffer> subscriber = new ZeroDemandSubscriber();
flux.subscribe(subscriber); // Assume sync execution (for example, encoding with Flux.just)
subscriber.cancel();
return Mono.empty();
}
@Override
public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) {
Flux<? extends DataBuffer> flux = Flux.from(body).concatMap(Flux::from);
BaseSubscriber<DataBuffer> subscriber = new ZeroDemandSubscriber();
flux.subscribe(subscriber); // Assume sync execution (for example, encoding with Flux.just)
subscriber.cancel();
return Mono.empty();
}
@Override
public Mono<Void> setComplete() {
throw new UnsupportedOperationException();
}
@Override
public HttpHeaders getHeaders() {
return new HttpHeaders();
}
}
private static | CancellingOutputMessage |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/reindex/RetryListener.java | {
"start": 919,
"end": 2637
} | class ____ extends DelegatingActionListener<ScrollableHitSource.Response, ScrollableHitSource.Response>
implements
RejectAwareActionListener<ScrollableHitSource.Response> {
private final Logger logger;
private final Iterator<TimeValue> retries;
private final ThreadPool threadPool;
private final Consumer<RejectAwareActionListener<ScrollableHitSource.Response>> retryScrollHandler;
private int retryCount = 0;
RetryListener(
Logger logger,
ThreadPool threadPool,
BackoffPolicy backoffPolicy,
Consumer<RejectAwareActionListener<ScrollableHitSource.Response>> retryScrollHandler,
ActionListener<ScrollableHitSource.Response> delegate
) {
super(delegate);
this.logger = logger;
this.threadPool = threadPool;
this.retries = backoffPolicy.iterator();
this.retryScrollHandler = retryScrollHandler;
}
@Override
public void onResponse(ScrollableHitSource.Response response) {
delegate.onResponse(response);
}
@Override
public void onRejection(Exception e) {
if (retries.hasNext()) {
retryCount += 1;
TimeValue delay = retries.next();
logger.trace(() -> "retrying rejected search after [" + delay + "]", e);
schedule(() -> retryScrollHandler.accept(this), delay);
} else {
logger.warn(() -> "giving up on search because we retried [" + retryCount + "] times without success", e);
delegate.onFailure(e);
}
}
private void schedule(Runnable runnable, TimeValue delay) {
threadPool.schedule(runnable, delay, EsExecutors.DIRECT_EXECUTOR_SERVICE);
}
}
| RetryListener |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/multipart/FormEncodedDataDefinition.java | {
"start": 3427,
"end": 11121
} | class ____ implements ServerHttpRequest.ReadCallback, FormDataParser {
private final ResteasyReactiveRequestContext exchange;
private final FormData data;
private final StringBuilder builder = new StringBuilder();
private final long maxAttributeSize;
private String name = null;
private String charset;
//0= parsing name
//1=parsing name, decode required
//2=parsing value
//3=parsing value, decode required
//4=finished
private int state = 0;
private FormEncodedDataParser(final String charset, final ResteasyReactiveRequestContext exchange, int maxParams,
long maxAttributeSize) {
this.exchange = exchange;
this.charset = charset;
this.data = new FormData(maxParams);
this.maxAttributeSize = maxAttributeSize;
}
private void doParse(final ByteBuffer buffer) throws IOException {
while (buffer.hasRemaining()) {
byte n = buffer.get();
switch (state) {
case 0: {
if (n == '=') {
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
name = builder.toString();
builder.setLength(0);
state = 2;
} else if (n == '&') {
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
addPair(builder.toString(), "");
builder.setLength(0);
state = 0;
} else if (n == '%' || n == '+' || n < 0) {
state = 1;
builder.append((char) (n & 0xFF));
} else {
builder.append((char) n);
}
break;
}
case 1: {
if (n == '=') {
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
name = decodeParameterName(builder.toString(), charset, true, new StringBuilder());
builder.setLength(0);
state = 2;
} else if (n == '&') {
addPair(decodeParameterName(builder.toString(), charset, true, new StringBuilder()), "");
builder.setLength(0);
state = 0;
} else {
builder.append((char) (n & 0xFF));
}
break;
}
case 2: {
if (n == '&') {
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
addPair(name, builder.toString());
builder.setLength(0);
state = 0;
} else if (n == '%' || n == '+' || n < 0) {
state = 3;
builder.append((char) (n & 0xFF));
} else {
builder.append((char) n);
}
break;
}
case 3: {
if (n == '&') {
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
addPair(name, decodeParameterValue(name, builder.toString(), charset, true, new StringBuilder()));
builder.setLength(0);
state = 0;
} else {
builder.append((char) (n & 0xFF));
}
break;
}
}
}
if (builder.length() > maxAttributeSize) {
throw new WebApplicationException(Response.Status.REQUEST_ENTITY_TOO_LARGE);
}
}
private void addPair(String name, String value) {
//if there was exception during decoding ignore the parameter [UNDERTOW-1554]
if (name != null && value != null) {
data.add(name, value);
}
}
private String decodeParameterValue(String name, String value, String charset, boolean decodeSlash,
StringBuilder stringBuilder) {
return URLUtils.decode(value, Charset.forName(charset), decodeSlash, stringBuilder);
}
private String decodeParameterName(String name, String charset, boolean decodeSlash, StringBuilder stringBuilder) {
return URLUtils.decode(name, Charset.forName(charset), decodeSlash, stringBuilder);
}
@Override
public void parse() throws Exception {
if (exchange.getFormData() != null) {
return;
}
exchange.suspend();
exchange.serverRequest().setReadListener(this);
exchange.serverRequest().resumeRequestInput();
}
@Override
public FormData parseBlocking() throws Exception {
final FormData existing = exchange.getFormData();
if (existing != null) {
return existing;
}
try (InputStream input = exchange.getInputStream()) {
int c;
byte[] data = new byte[1024];
while ((c = input.read(data)) > 0) {
ByteBuffer buf = ByteBuffer.wrap(data, 0, c);
doParse(buf);
}
inputDone();
return this.data;
}
}
@Override
public void close() throws IOException {
}
@Override
public void setCharacterEncoding(final String encoding) {
this.charset = encoding;
}
@Override
public void done() {
inputDone();
exchange.resume();
}
private void inputDone() {
if (state == 2) {
addPair(name, builder.toString());
} else if (state == 3) {
addPair(name, decodeParameterValue(name, builder.toString(), charset, true, new StringBuilder()));
} else if (builder.length() > 0) {
if (state == 1) {
addPair(decodeParameterName(builder.toString(), charset, true, new StringBuilder()), "");
} else {
addPair(builder.toString(), "");
}
}
state = 4;
exchange.setFormData(data);
}
@Override
public void data(ByteBuffer data) {
try {
doParse(data);
} catch (Exception e) {
exchange.resume(e);
}
}
}
}
| FormEncodedDataParser |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/planning/StageExecutionInterval.java | {
"start": 1284,
"end": 1413
} | class ____ to compute the time interval in which the stage can
* be allocated resources by {@link IterativePlanner}.
*/
public | used |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/ActiveResources.java | {
"start": 678,
"end": 4622
} | class ____ {
private final boolean isActiveResourceRetentionAllowed;
private final Executor monitorClearedResourcesExecutor;
@VisibleForTesting final Map<Key, ResourceWeakReference> activeEngineResources = new HashMap<>();
private final ReferenceQueue<EngineResource<?>> resourceReferenceQueue = new ReferenceQueue<>();
private ResourceListener listener;
private volatile boolean isShutdown;
@Nullable private volatile DequeuedResourceCallback cb;
ActiveResources(boolean isActiveResourceRetentionAllowed) {
this(
isActiveResourceRetentionAllowed,
java.util.concurrent.Executors.newSingleThreadExecutor(
new ThreadFactory() {
@Override
public Thread newThread(@NonNull final Runnable r) {
return new Thread(
new Runnable() {
@Override
public void run() {
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
r.run();
}
},
"glide-active-resources");
}
}));
}
@VisibleForTesting
ActiveResources(
boolean isActiveResourceRetentionAllowed, Executor monitorClearedResourcesExecutor) {
this.isActiveResourceRetentionAllowed = isActiveResourceRetentionAllowed;
this.monitorClearedResourcesExecutor = monitorClearedResourcesExecutor;
monitorClearedResourcesExecutor.execute(
new Runnable() {
@Override
public void run() {
cleanReferenceQueue();
}
});
}
void setListener(ResourceListener listener) {
synchronized (listener) {
synchronized (this) {
this.listener = listener;
}
}
}
synchronized void activate(Key key, EngineResource<?> resource) {
ResourceWeakReference toPut =
new ResourceWeakReference(
key, resource, resourceReferenceQueue, isActiveResourceRetentionAllowed);
ResourceWeakReference removed = activeEngineResources.put(key, toPut);
if (removed != null) {
removed.reset();
}
}
synchronized void deactivate(Key key) {
ResourceWeakReference removed = activeEngineResources.remove(key);
if (removed != null) {
removed.reset();
}
}
@Nullable
synchronized EngineResource<?> get(Key key) {
ResourceWeakReference activeRef = activeEngineResources.get(key);
if (activeRef == null) {
return null;
}
EngineResource<?> active = activeRef.get();
if (active == null) {
cleanupActiveReference(activeRef);
}
return active;
}
@SuppressWarnings({"WeakerAccess", "SynchronizeOnNonFinalField"})
@Synthetic
void cleanupActiveReference(@NonNull ResourceWeakReference ref) {
synchronized (this) {
activeEngineResources.remove(ref.key);
if (!ref.isCacheable || ref.resource == null) {
return;
}
}
EngineResource<?> newResource =
new EngineResource<>(
ref.resource,
/* isMemoryCacheable= */ true,
/* isRecyclable= */ false,
ref.key,
listener);
listener.onResourceReleased(ref.key, newResource);
}
@SuppressWarnings("WeakerAccess")
@Synthetic
void cleanReferenceQueue() {
while (!isShutdown) {
try {
ResourceWeakReference ref = (ResourceWeakReference) resourceReferenceQueue.remove();
cleanupActiveReference(ref);
// This section for testing only.
DequeuedResourceCallback current = cb;
if (current != null) {
current.onResourceDequeued();
}
// End for testing only.
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
@VisibleForTesting
void setDequeuedResourceCallback(DequeuedResourceCallback cb) {
this.cb = cb;
}
@VisibleForTesting
| ActiveResources |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java | {
"start": 2136,
"end": 2550
} | class ____ extends ESTestCase {
private ThreadPool threadPool;
@Override
public void setUp() throws Exception {
super.setUp();
threadPool = new TestThreadPool(NetworkModuleTests.class.getName());
}
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
static | NetworkModuleTests |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/fs/local/LocalRecoverableFsDataOutputStreamTest.java | {
"start": 3191,
"end": 6133
} | class ____ extends FileChannel {
final FileChannel delegate;
private final List<Event> events;
TestFileChannel(FileChannel delegate, List<Event> events) {
this.delegate = delegate;
this.events = events;
}
@Override
public int read(ByteBuffer dst) throws IOException {
return delegate.read(dst);
}
@Override
public long read(ByteBuffer[] dsts, int offset, int length) throws IOException {
return delegate.read(dsts, offset, length);
}
@Override
public int write(ByteBuffer src) throws IOException {
return delegate.write(src);
}
@Override
public long write(ByteBuffer[] srcs, int offset, int length) throws IOException {
return delegate.write(srcs, offset, length);
}
@Override
public long position() throws IOException {
return delegate.position();
}
@Override
public FileChannel position(long newPosition) throws IOException {
return delegate.position(newPosition);
}
@Override
public long size() throws IOException {
return delegate.size();
}
@Override
public FileChannel truncate(long size) throws IOException {
return delegate.truncate(size);
}
@Override
public void force(boolean metaData) throws IOException {
delegate.force(metaData);
events.add(Event.SYNC);
}
@Override
public long transferTo(long position, long count, WritableByteChannel target)
throws IOException {
return delegate.transferTo(position, count, target);
}
@Override
public long transferFrom(ReadableByteChannel src, long position, long count)
throws IOException {
return delegate.transferFrom(src, position, count);
}
@Override
public int read(ByteBuffer dst, long position) throws IOException {
return delegate.read(dst, position);
}
@Override
public int write(ByteBuffer src, long position) throws IOException {
return delegate.write(src, position);
}
@Override
public MappedByteBuffer map(MapMode mode, long position, long size) throws IOException {
return delegate.map(mode, position, size);
}
@Override
public FileLock lock(long position, long size, boolean shared) throws IOException {
return delegate.lock(position, size, shared);
}
@Override
public FileLock tryLock(long position, long size, boolean shared) throws IOException {
return delegate.tryLock(position, size, shared);
}
@Override
protected void implCloseChannel() {}
}
}
| TestFileChannel |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/profile/AbstractProfiler.java | {
"start": 543,
"end": 1390
} | class ____<PB extends AbstractProfileBreakdown<?>, E> {
protected final AbstractInternalProfileTree<PB, E> profileTree;
public AbstractProfiler(AbstractInternalProfileTree<PB, E> profileTree) {
this.profileTree = profileTree;
}
/**
* Get the {@link AbstractProfileBreakdown} for the given element in the
* tree, potentially creating it if it did not exist.
*/
public PB getQueryBreakdown(E query) {
return profileTree.getProfileBreakdown(query);
}
/**
* Removes the last (e.g. most recent) element on the stack.
*/
public void pollLastElement() {
profileTree.pollLast();
}
/**
* @return a hierarchical representation of the profiled tree
*/
public List<ProfileResult> getTree() {
return profileTree.getTree();
}
}
| AbstractProfiler |
java | apache__camel | components/camel-ftp/src/main/java/org/apache/camel/component/file/remote/SftpConfiguration.java | {
"start": 1112,
"end": 11307
} | class ____ extends RemoteFileConfiguration {
public static final int DEFAULT_SFTP_PORT = 22;
@UriParam(label = "security", secret = true)
private String knownHostsFile;
@UriParam(label = "security", defaultValue = "true")
private boolean useUserKnownHostsFile = true;
@UriParam(label = "security", defaultValue = "false")
private boolean autoCreateKnownHostsFile;
@UriParam(label = "security", secret = true)
@Metadata(supportFileReference = true)
private String knownHostsUri;
@UriParam(label = "security", secret = true)
private byte[] knownHosts;
@UriParam(label = "security", secret = true)
private String privateKeyFile;
@UriParam(label = "security", secret = true)
@Metadata(supportFileReference = true)
private String privateKeyUri;
@UriParam(label = "security", secret = true)
private byte[] privateKey;
@UriParam(label = "security", secret = true)
private String privateKeyPassphrase;
@UriParam(label = "security", secret = true)
private KeyPair keyPair;
@UriParam(defaultValue = "no", enums = "no,yes", label = "security")
private String strictHostKeyChecking = "no";
@UriParam(label = "advanced")
private int serverAliveInterval;
@UriParam(defaultValue = "1", label = "advanced")
private int serverAliveCountMax = 1;
@UriParam(label = "producer,advanced")
private String chmod;
// comma separated list of ciphers.
// null means default jsch list will be used
@UriParam(label = "security")
private String ciphers;
@UriParam(label = "advanced")
private int compression;
@UriParam(label = "security")
private String preferredAuthentications;
@UriParam(defaultValue = "WARN", enums = "DEBUG,INFO,WARN,ERROR")
private LoggingLevel jschLoggingLevel = LoggingLevel.WARN;
@UriParam(label = "advanced")
private Integer bulkRequests;
@UriParam(label = "advanced")
private String bindAddress;
@UriParam(label = "advanced", defaultValue = "true")
private boolean existDirCheckUsingLs = true;
@UriParam(label = "security")
private String keyExchangeProtocols;
@UriParam(label = "producer,advanced")
private String chmodDirectory;
@UriParam(label = "security")
private String serverHostKeys;
@UriParam(label = "security")
private String publicKeyAcceptedAlgorithms;
@UriParam(label = "advanced")
private String filenameEncoding;
@UriParam(label = "advanced", defaultValue = "DEBUG")
private LoggingLevel serverMessageLoggingLevel = LoggingLevel.DEBUG;
public SftpConfiguration() {
setProtocol("sftp");
}
public SftpConfiguration(URI uri) {
super(uri);
}
@Override
protected void setDefaultPort() {
setPort(DEFAULT_SFTP_PORT);
}
public String getKnownHostsFile() {
return knownHostsFile;
}
/**
* Sets the known_hosts file, so that the SFTP endpoint can do host key verification.
*/
public void setKnownHostsFile(String knownHostsFile) {
this.knownHostsFile = knownHostsFile;
}
public String getKnownHostsUri() {
return knownHostsUri;
}
public boolean isUseUserKnownHostsFile() {
return useUserKnownHostsFile;
}
/**
* If knownHostFile has not been explicit configured then use the host file from
* System.getProperty(user.home)/.ssh/known_hosts
*/
public void setUseUserKnownHostsFile(boolean useUserKnownHostsFile) {
this.useUserKnownHostsFile = useUserKnownHostsFile;
}
public boolean isAutoCreateKnownHostsFile() {
return autoCreateKnownHostsFile;
}
/**
* If knownHostFile does not exist, then attempt to auto-create the path and file (beware that the file will be
* created by the current user of the running Java process, which may not have file permission).
*/
public void setAutoCreateKnownHostsFile(boolean autoCreateKnownHostsFile) {
this.autoCreateKnownHostsFile = autoCreateKnownHostsFile;
}
/**
* Sets the known_hosts file (loaded from classpath by default), so that the SFTP endpoint can do host key
* verification.
*/
public void setKnownHostsUri(String knownHostsUri) {
this.knownHostsUri = knownHostsUri;
}
public byte[] getKnownHosts() {
return knownHosts;
}
/**
* Sets the known_hosts from the byte array, so that the SFTP endpoint can do host key verification.
*/
public void setKnownHosts(byte[] knownHosts) {
this.knownHosts = knownHosts;
}
public String getPrivateKeyFile() {
return privateKeyFile;
}
/**
* Set the private key file so that the SFTP endpoint can do private key verification.
*/
public void setPrivateKeyFile(String privateKeyFile) {
this.privateKeyFile = privateKeyFile;
}
public String getPrivateKeyUri() {
return privateKeyUri;
}
/**
* Set the private key file (loaded from classpath by default) so that the SFTP endpoint can do private key
* verification.
*/
public void setPrivateKeyUri(String privateKeyUri) {
this.privateKeyUri = privateKeyUri;
}
public byte[] getPrivateKey() {
return privateKey;
}
/**
* Set the private key as byte[] so that the SFTP endpoint can do private key verification.
*/
public void setPrivateKey(byte[] privateKey) {
this.privateKey = privateKey;
}
public String getPrivateKeyPassphrase() {
return privateKeyPassphrase;
}
/**
* Set the private key file passphrase so that the SFTP endpoint can do private key verification.
*/
public void setPrivateKeyPassphrase(String privateKeyFilePassphrase) {
this.privateKeyPassphrase = privateKeyFilePassphrase;
}
@Deprecated
public String getPrivateKeyFilePassphrase() {
return privateKeyPassphrase;
}
@Deprecated
public void setPrivateKeyFilePassphrase(String privateKeyFilePassphrase) {
this.privateKeyPassphrase = privateKeyFilePassphrase;
}
public KeyPair getKeyPair() {
return keyPair;
}
/**
* Sets a key pair of the public and private key so to that the SFTP endpoint can do public/private key
* verification.
*/
public void setKeyPair(KeyPair keyPair) {
this.keyPair = keyPair;
}
public String getStrictHostKeyChecking() {
return strictHostKeyChecking;
}
/**
* Sets whether to use strict host key checking.
*/
public void setStrictHostKeyChecking(String strictHostKeyChecking) {
this.strictHostKeyChecking = strictHostKeyChecking;
}
/**
* Sets the interval (millis) to send a keep-alive message. If zero is specified, any keep-alive message must not be
* sent. The default interval is zero.
*/
public void setServerAliveInterval(int serverAliveInterval) {
this.serverAliveInterval = serverAliveInterval;
}
public int getServerAliveInterval() {
return serverAliveInterval;
}
/**
* Sets the number of keep-alive messages which may be sent without receiving any messages back from the server. If
* this threshold is reached while keep-alive messages are being sent, the connection will be disconnected. The
* default value is one.
*/
public void setServerAliveCountMax(int serverAliveCountMax) {
this.serverAliveCountMax = serverAliveCountMax;
}
public int getServerAliveCountMax() {
return serverAliveCountMax;
}
/**
* Allows you to set chmod on the stored file. For example chmod=640.
*/
public void setChmod(String chmod) {
this.chmod = chmod;
}
public String getChmod() {
return chmod;
}
/**
* Allows you to set chmod during path creation. For example chmod=640.
*/
public void setChmodDirectory(String chmodDirectory) {
this.chmodDirectory = chmodDirectory;
}
public String getChmodDirectory() {
return chmodDirectory;
}
/**
* Set a comma separated list of ciphers that will be used in order of preference. Possible cipher names are defined
* by JCraft JSCH. Some examples include:
* aes128-ctr,aes128-cbc,3des-ctr,3des-cbc,blowfish-cbc,aes192-cbc,aes256-cbc. If not specified the default list
* from JSCH will be used.
*/
public void setCiphers(String ciphers) {
this.ciphers = ciphers;
}
public String getCiphers() {
return ciphers;
}
public int getCompression() {
return compression;
}
/**
* To use compression. Specify a level from 1 to 10. Important: You must manually add the needed JSCH zlib JAR to
* the classpath for compression support.
*/
public void setCompression(int compression) {
this.compression = compression;
}
/**
* Set the preferred authentications which SFTP endpoint will used. Some example include:password,publickey. If not
* specified the default list from JSCH will be used.
*/
public void setPreferredAuthentications(String pAuthentications) {
this.preferredAuthentications = pAuthentications;
}
public String getPreferredAuthentications() {
return preferredAuthentications;
}
public LoggingLevel getJschLoggingLevel() {
return jschLoggingLevel;
}
/**
* The logging level to use for JSCH activity logging. As JSCH is verbose at by default at INFO level the threshold
* is WARN by default.
*/
public void setJschLoggingLevel(LoggingLevel jschLoggingLevel) {
this.jschLoggingLevel = jschLoggingLevel;
}
/**
* Specifies how many requests may be outstanding at any one time. Increasing this value may slightly improve file
* transfer speed but will increase memory usage.
*/
public void setBulkRequests(Integer bulkRequests) {
this.bulkRequests = bulkRequests;
}
public Integer getBulkRequests() {
return bulkRequests;
}
/**
* Specifies the address of the local | SftpConfiguration |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/support/discovery/ClassContainerSelectorResolver.java | {
"start": 1281,
"end": 2732
} | class ____ implements SelectorResolver {
private final Predicate<Class<?>> classFilter;
private final Predicate<String> classNameFilter;
ClassContainerSelectorResolver(Predicate<Class<?>> classFilter, Predicate<String> classNameFilter) {
this.classFilter = Preconditions.notNull(classFilter, "classFilter must not be null");
this.classNameFilter = Preconditions.notNull(classNameFilter, "classNameFilter must not be null");
}
@Override
public Resolution resolve(ClasspathRootSelector selector, Context context) {
return classSelectors(findAllClassesInClasspathRoot(selector.getClasspathRoot(), classFilter, classNameFilter));
}
@Override
public Resolution resolve(ModuleSelector selector, Context context) {
if (selector.getModule().isPresent()) {
Module module = selector.getModule().get();
return classSelectors(findAllClassesInModule(module, classFilter, classNameFilter));
}
return classSelectors(findAllClassesInModule(selector.getModuleName(), classFilter, classNameFilter));
}
@Override
public Resolution resolve(PackageSelector selector, Context context) {
return classSelectors(findAllClassesInPackage(selector.getPackageName(), classFilter, classNameFilter));
}
private Resolution classSelectors(List<Class<?>> classes) {
if (classes.isEmpty()) {
return unresolved();
}
return selectors(classes.stream().map(DiscoverySelectors::selectClass).collect(toSet()));
}
}
| ClassContainerSelectorResolver |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/scheduler/SchedulerRouteTest.java | {
"start": 2268,
"end": 2489
} | class ____ {
public final AtomicInteger counter = new AtomicInteger();
public void someMethod() {
LOG.debug("Invoked someMethod()");
counter.incrementAndGet();
}
}
}
| MyBean |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/TestResolver.java | {
"start": 1285,
"end": 1731
} | class ____ {
/**
* Test promote action INT -> LONG, with logical type for LONG.
*/
@Test
void resolveTime() {
final Schema writeSchema = Schema.create(Schema.Type.INT);
final Schema readSchema = new TimeConversions.TimeMicrosConversion().getRecommendedSchema(); // LONG
Resolver.Action action = Resolver.resolve(writeSchema, readSchema);
Assertions.assertNotNull(action);
MatcherAssert.assertThat("Wrong | TestResolver |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/opensaml5Main/java/org/springframework/security/saml2/provider/service/metadata/OpenSaml5MetadataResolver.java | {
"start": 1180,
"end": 2877
} | class ____ implements Saml2MetadataResolver {
static {
OpenSamlInitializationService.initialize();
}
private final BaseOpenSamlMetadataResolver delegate;
public OpenSaml5MetadataResolver() {
this.delegate = new BaseOpenSamlMetadataResolver(new OpenSaml5Template());
}
@Override
public String resolve(RelyingPartyRegistration relyingPartyRegistration) {
return this.delegate.resolve(relyingPartyRegistration);
}
public String resolve(Iterable<RelyingPartyRegistration> relyingPartyRegistrations) {
return this.delegate.resolve(relyingPartyRegistrations);
}
/**
* Set a {@link Consumer} for modifying the OpenSAML {@link EntityDescriptor}
* @param entityDescriptorCustomizer a consumer that accepts an
* {@link EntityDescriptorParameters}
* @since 5.7
*/
public void setEntityDescriptorCustomizer(Consumer<EntityDescriptorParameters> entityDescriptorCustomizer) {
this.delegate.setEntityDescriptorCustomizer(
(parameters) -> entityDescriptorCustomizer.accept(new EntityDescriptorParameters(parameters)));
}
/**
* Configure whether to pretty-print the metadata XML. This can be helpful when
* signing the metadata payload.
*
* @since 6.2
**/
public void setUsePrettyPrint(boolean usePrettyPrint) {
this.delegate.setUsePrettyPrint(usePrettyPrint);
}
/**
* Configure whether to sign the metadata, defaults to {@code false}.
*
* @since 6.4
*/
public void setSignMetadata(boolean signMetadata) {
this.delegate.setSignMetadata(signMetadata);
}
/**
* A tuple containing an OpenSAML {@link EntityDescriptor} and its associated
* {@link RelyingPartyRegistration}
*
* @since 5.7
*/
public static final | OpenSaml5MetadataResolver |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.