language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanDuplicateTypeCreationIntegrationTests.java | {
"start": 1532,
"end": 1911
} | class ____ {
@MockitoBean
ExampleService mock1;
@MockitoBean
ExampleService mock2;
@Autowired
List<ExampleService> services;
@Test
void duplicateMocksShouldHaveBeenCreated() {
assertThat(services).containsExactly(mock1, mock2);
assertThat(mock1).isNotSameAs(mock2);
assertIsMock(mock1);
assertIsMock(mock2);
}
}
| MockitoBeanDuplicateTypeCreationIntegrationTests |
java | spring-projects__spring-boot | module/spring-boot-micrometer-observation/src/test/java/org/springframework/boot/micrometer/observation/autoconfigure/PropertiesObservationFilterPredicateTests.java | {
"start": 1023,
"end": 3828
} | class ____ {
@Test
void shouldDoNothingIfKeyValuesAreEmpty() {
PropertiesObservationFilterPredicate filter = createFilter();
Context mapped = mapContext(filter, "a", "alpha");
assertThat(mapped.getLowCardinalityKeyValues()).containsExactly(KeyValue.of("a", "alpha"));
}
@Test
void shouldAddKeyValues() {
PropertiesObservationFilterPredicate filter = createFilter("b", "beta");
Context mapped = mapContext(filter, "a", "alpha");
assertThat(mapped.getLowCardinalityKeyValues()).containsExactly(KeyValue.of("a", "alpha"),
KeyValue.of("b", "beta"));
}
@Test
void shouldFilter() {
PropertiesObservationFilterPredicate predicate = createPredicate("spring.security");
Context context = new Context();
assertThat(predicate.test("spring.security.filterchains", context)).isFalse();
assertThat(predicate.test("spring.security", context)).isFalse();
assertThat(predicate.test("spring.data", context)).isTrue();
assertThat(predicate.test("spring", context)).isTrue();
}
@Test
void filterShouldFallbackToAll() {
PropertiesObservationFilterPredicate predicate = createPredicate("all");
Context context = new Context();
assertThat(predicate.test("spring.security.filterchains", context)).isFalse();
assertThat(predicate.test("spring.security", context)).isFalse();
assertThat(predicate.test("spring.data", context)).isFalse();
assertThat(predicate.test("spring", context)).isFalse();
}
@Test
void shouldNotFilterIfDisabledNamesIsEmpty() {
PropertiesObservationFilterPredicate predicate = createPredicate();
Context context = new Context();
assertThat(predicate.test("spring.security.filterchains", context)).isTrue();
assertThat(predicate.test("spring.security", context)).isTrue();
assertThat(predicate.test("spring.data", context)).isTrue();
assertThat(predicate.test("spring", context)).isTrue();
}
private static Context mapContext(PropertiesObservationFilterPredicate filter, String... initialKeyValues) {
Context context = new Context();
context.addLowCardinalityKeyValues(KeyValues.of(initialKeyValues));
return filter.map(context);
}
private static PropertiesObservationFilterPredicate createFilter(String... keyValues) {
ObservationProperties properties = new ObservationProperties();
for (int i = 0; i < keyValues.length; i += 2) {
properties.getKeyValues().put(keyValues[i], keyValues[i + 1]);
}
return new PropertiesObservationFilterPredicate(properties);
}
private static PropertiesObservationFilterPredicate createPredicate(String... disabledNames) {
ObservationProperties properties = new ObservationProperties();
for (String name : disabledNames) {
properties.getEnable().put(name, false);
}
return new PropertiesObservationFilterPredicate(properties);
}
}
| PropertiesObservationFilterPredicateTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java | {
"start": 2759,
"end": 6186
} | class ____ implements ShardsAllocator {
private static final Logger logger = LogManager.getLogger(DesiredBalanceShardsAllocator.class);
private final ShardsAllocator delegateAllocator;
private final ThreadPool threadPool;
/**
* This is a callback to run {@link AllocationService#executeWithRoutingAllocation(ClusterState, String, RerouteStrategy)}, which
* produces a new ClusterState with the changes made by {@link DesiredBalanceReconciler#reconcile}. The {@link RerouteStrategy} provided
* to the callback calls into {@link #desiredBalanceReconciler} for the changes. The {@link #masterServiceTaskQueue} will publish the
* new cluster state after the cluster state is constructed by the {@link ReconcileDesiredBalanceExecutor}.
*/
private final DesiredBalanceReconcilerAction reconciler;
private final DesiredBalanceComputer desiredBalanceComputer;
/**
* Reconciliation ({@link DesiredBalanceReconciler#reconcile(DesiredBalance, RoutingAllocation)}) takes the {@link DesiredBalance}
* output of {@link DesiredBalanceComputer#compute} and identifies how shards need to be added, moved or removed to go from the current
* cluster shard allocation to the new desired allocation.
*/
private final DesiredBalanceReconciler desiredBalanceReconciler;
private final ContinuousComputation<DesiredBalanceInput> desiredBalanceComputation;
/**
* Saves and runs listeners after DesiredBalance computations complete.
*/
private final PendingListenersQueue pendingListenersQueue;
/**
* Each reroute request gets assigned a monotonically increasing sequence number. Many reroute requests may arrive before the balancer
* asynchronously runs a computation. The balancer will use the latest request and save this sequence number to track back to the
* request.
*/
private final AtomicLong indexGenerator = new AtomicLong(-1);
private final ConcurrentLinkedQueue<List<MoveAllocationCommand>> pendingDesiredBalanceMoves = new ConcurrentLinkedQueue<>();
private final MasterServiceTaskQueue<ReconcileDesiredBalanceTask> masterServiceTaskQueue;
private final AtomicReference<DesiredBalance> currentDesiredBalanceRef = new AtomicReference<>(DesiredBalance.NOT_MASTER);
private volatile boolean resetCurrentDesiredBalance = false;
private final Set<String> processedNodeShutdowns = new HashSet<>();
private final NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator;
private final DesiredBalanceMetrics desiredBalanceMetrics;
private final AllocationBalancingRoundMetrics balancingRoundMetrics;
/**
* Manages balancer round results in order to report on the balancer activity in a configurable manner.
*/
private final AllocationBalancingRoundSummaryService balancerRoundSummaryService;
// stats
protected final CounterMetric computationsSubmitted = new CounterMetric();
protected final CounterMetric computationsExecuted = new CounterMetric();
protected final CounterMetric computationsConverged = new CounterMetric();
protected final MeanMetric computedShardMovements = new MeanMetric();
protected final CounterMetric cumulativeComputationTime = new CounterMetric();
protected final CounterMetric cumulativeReconciliationTime = new CounterMetric();
@FunctionalInterface
public | DesiredBalanceShardsAllocator |
java | quarkusio__quarkus | devtools/cli-common/src/main/java/io/quarkus/cli/common/build/JBangRunner.java | {
"start": 787,
"end": 5864
} | class ____ implements BuildSystemRunner {
static final String[] windowsWrapper = { "jbang.cmd", "jbang.ps1" };
static final String otherWrapper = "jbang";
final OutputOptionMixin output;
final RegistryClientMixin registryClient;
final PropertiesOptions propertiesOptions;
final Path projectRoot;
String mainPath;
public JBangRunner(OutputOptionMixin output, PropertiesOptions propertiesOptions, RegistryClientMixin registryClient,
Path projectRoot) {
this.output = output;
this.registryClient = registryClient;
this.projectRoot = projectRoot;
this.propertiesOptions = propertiesOptions;
}
@Override
public OutputOptionMixin getOutput() {
return output;
}
@Override
public BuildTool getBuildTool() {
return BuildTool.JBANG;
}
@Override
public Integer listExtensionCategories(RunModeOption runMode, CategoryListFormatOptions format) throws Exception {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Integer listExtensions(RunModeOption runMode, ListFormatOptions format, boolean installable, String searchPattern,
String category)
throws Exception {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Integer addExtension(RunModeOption runMode, Set<String> extensions) {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Integer removeExtension(RunModeOption runMode, Set<String> extensions) {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Integer projectInfo(boolean perModule) {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Integer updateProject(TargetQuarkusVersionGroup targetQuarkusVersion, RewriteGroup rewrite)
throws Exception {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public BuildCommandArgs prepareAction(String action, BuildOptions buildOptions, RunModeOption runMode,
List<String> params) {
ArrayDeque<String> args = new ArrayDeque<>();
if (buildOptions.offline) {
args.add("--offline");
}
if (output.isVerbose()) {
args.add("--verbose");
}
if (buildOptions.buildNative) {
args.add("--native");
}
if (buildOptions.clean) {
args.add("--fresh");
}
args.add(action);
args.addAll(flattenMappedProperties(propertiesOptions.properties));
args.add(registryClient.getRegistryClientProperty());
args.addAll(params);
args.add(getMainPath());
return prependExecutable(args);
}
@Override
public BuildCommandArgs prepareBuild(BuildOptions buildOptions, RunModeOption runMode, List<String> params) {
return prepareAction("build", buildOptions, runMode, params);
}
@Override
public BuildCommandArgs prepareTest(BuildOptions buildOptions, RunModeOption runMode, List<String> params, String filter) {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public List<Supplier<BuildCommandArgs>> prepareDevTestMode(boolean devMode, DevOptions commonOptions,
DebugOptions debugOptions,
List<String> params) {
throw new UnsupportedOperationException("Not there yet. ;)");
}
@Override
public Path getProjectRoot() {
return projectRoot;
}
@Override
public File getWrapper() {
return ExecuteUtil.findWrapper(projectRoot, windowsWrapper, otherWrapper);
}
@Override
public File getExecutable() {
return ExecuteUtil.findExecutable(otherWrapper,
"Unable to find the jbang executable, is it in your path?",
output);
}
String getMainPath() {
if (mainPath == null) {
File src = projectRoot.resolve("src").toFile();
if (src.exists() && src.isDirectory()) {
String[] names = src.list();
if (names != null && names.length > 0) {
String first = null;
for (String name : names) {
if (name.equalsIgnoreCase("main.java")) {
mainPath = fixPath(src.toPath().resolve(name));
return mainPath;
}
if (first == null && name.endsWith(".java")) {
first = name;
}
}
if (first != null) {
mainPath = fixPath(src.toPath().resolve(first));
return mainPath;
}
}
}
throw new IllegalStateException("Unable to find a source file for use with JBang");
}
return mainPath;
}
}
| JBangRunner |
java | google__dagger | javatests/dagger/internal/codegen/DelegateRequestRepresentationTest.java | {
"start": 11959,
"end": 12864
} | interface ____ {",
" Provider<CharSequence> charSequence();",
" CharSequence charSequenceInstance();",
"",
" @Named(\"named\") Provider<String> namedString();",
"}");
CompilerTests.daggerCompiler(module, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerTestComponent"));
});
}
@Test
public void doubleBinds() throws Exception {
Source module =
CompilerTests.javaSource(
"test.TestModule",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
" | TestComponent |
java | google__guice | extensions/throwingproviders/test/com/google/inject/throwingproviders/CheckedProviderTest.java | {
"start": 26024,
"end": 26070
} | interface ____ {
String s();
}
static | Foo |
java | elastic__elasticsearch | modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java | {
"start": 2402,
"end": 11599
} | class ____ extends ESIntegTestCase {
private static final Logger logger = LogManager.getLogger(KibanaThreadPoolIT.class);
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(IndexingPressure.MAX_PRIMARY_BYTES.getKey(), "1KB")
.put(IndexingPressure.MAX_COORDINATING_BYTES.getKey(), "1KB")
.put("thread_pool.search.size", 1)
.put("thread_pool.search.queue_size", 1)
.put("thread_pool.write.size", 1)
.put("thread_pool.write.queue_size", 1)
.put("thread_pool.get.size", 1)
.put("thread_pool.get.queue_size", 1)
// a rejected GET may retry on an INITIALIZING shard (the target of a relocation) and unexpectedly succeed, so block rebalancing
.put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)
.build();
}
private static final String USER_INDEX = "user_index";
// For system indices that use ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS, we'll want to
// block normal system index thread pools as well.
private static final Set<String> THREAD_POOLS_TO_BLOCK = Set.of(ThreadPool.Names.GET, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH);
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Set.of(KibanaPlugin.class);
}
public void testKibanaThreadPoolByPassesBlockedThreadPools() throws Exception {
List<String> kibanaSystemIndices = Stream.of(
KibanaPlugin.KIBANA_INDEX_DESCRIPTOR.getIndexPattern(),
KibanaPlugin.REPORTING_INDEX_DESCRIPTOR.getIndexPattern(),
KibanaPlugin.APM_AGENT_CONFIG_INDEX_DESCRIPTOR.getIndexPattern(),
KibanaPlugin.APM_CUSTOM_LINK_INDEX_DESCRIPTOR.getIndexPattern()
).map(s -> s.replace("*", randomAlphaOfLength(8).toLowerCase(Locale.ROOT))).toList();
runWithBlockedThreadPools(() -> {
for (String index : kibanaSystemIndices) {
// index documents
String idToDelete = client().prepareIndex(index).setSource(Map.of("foo", "delete me!")).get().getId();
String idToUpdate = client().prepareIndex(index).setSource(Map.of("foo", "update me!")).get().getId();
// bulk index, delete, and update
Client bulkClient = client();
BulkResponse response = bulkClient.prepareBulk(index)
.add(bulkClient.prepareIndex(index).setSource(Map.of("foo", "search me!")))
.add(bulkClient.prepareDelete(index, idToDelete))
.add(bulkClient.prepareUpdate().setId(idToUpdate).setDoc(Map.of("foo", "I'm updated!")))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
assertNoFailures(response);
// match-all search
assertHitCount(client().prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), 2);
}
});
}
public void testBlockedThreadPoolsRejectUserRequests() throws Exception {
assertAcked(
client().admin()
.indices()
.prepareCreate(USER_INDEX)
.setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) // avoid retrying rejected actions
);
runWithBlockedThreadPools(this::assertThreadPoolsBlocked);
assertAcked(client().admin().indices().prepareDelete(USER_INDEX));
}
private void assertThreadPoolsBlocked() {
var e1 = expectThrows(
EsRejectedExecutionException.class,
() -> client().prepareIndex(USER_INDEX).setSource(Map.of("foo", "bar")).get()
);
assertThat(e1.getMessage(), startsWith("rejected execution of TimedRunnable"));
final var getFuture = client().prepareGet(USER_INDEX, "id").execute();
// response handling is force-executed on GET pool, so we must
// (a) wait for that task to be enqueued, expanding the queue beyond its configured limit, and
// (b) check for the exception in the background
try {
assertTrue(waitUntil(() -> {
if (getFuture.isDone()) {
return true;
}
for (ThreadPool threadPool : internalCluster().getInstances(ThreadPool.class)) {
for (ThreadPoolStats.Stats stats : threadPool.stats().stats()) {
if (stats.name().equals(ThreadPool.Names.GET) && stats.queue() > 1) {
return true;
}
}
}
return false;
}));
} catch (Exception e) {
fail(e);
}
new Thread(() -> expectThrows(EsRejectedExecutionException.class, () -> getFuture.actionGet(SAFE_AWAIT_TIMEOUT))).start();
// intentionally commented out this test until https://github.com/elastic/elasticsearch/issues/97916 is fixed
var e3 = expectThrows(
SearchPhaseExecutionException.class,
() -> client().prepareSearch(USER_INDEX)
.setQuery(QueryBuilders.matchAllQuery())
// Request times out if max concurrent shard requests is set to 1
.setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10))
.get()
);
assertThat(e3.getMessage(), containsString("all shards failed"));
}
protected void runWithBlockedThreadPools(Runnable runnable) throws Exception {
Phaser phaser = new Phaser();
// register this test's thread
phaser.register();
blockThreadPool(phaser);
phaser.arriveAndAwaitAdvance();// wait until all waitAction are executing
fillQueues();
logger.debug("number of nodes " + internalCluster().getNodeNames().length);
logger.debug("number of parties arrived " + phaser.getArrivedParties());
try {
runnable.run();
} finally {
phaser.arriveAndAwaitAdvance(); // release all waitAction
}
}
private void blockThreadPool(Phaser phaser) {
for (String nodeName : internalCluster().getNodeNames()) {
ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName);
for (String threadPoolName : THREAD_POOLS_TO_BLOCK) {
blockThreadPool(threadPoolName, threadPool, phaser);
}
}
}
private void fillQueues() {
for (String nodeName : internalCluster().getNodeNames()) {
ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName);
for (String threadPoolName : THREAD_POOLS_TO_BLOCK) {
fillThreadPoolQueues(threadPoolName, threadPool);
}
}
}
private static void blockThreadPool(String threadPoolName, ThreadPool threadPool, Phaser phaser) {
ThreadPool.Info info = threadPool.info(threadPoolName);
Runnable waitAction = () -> {
phaser.arriveAndAwaitAdvance();// block until all are executed on a threadpool
phaser.arriveAndAwaitAdvance();// block until main thread has not finished
};
phaser.bulkRegister(info.getMax());
for (int i = 0; i < info.getMax(); i++) {
// we need to make sure that there is a task blocking a thread pool
// otherwise a queue might end up having a spot
do {
try {
threadPool.executor(threadPoolName).execute(waitAction);
break;
} catch (EsRejectedExecutionException e) {
// if exception was thrown when submitting, retry.
}
} while (true);
}
}
private static void fillThreadPoolQueues(String threadPoolName, ThreadPool threadPool) {
ThreadPool.Info info = threadPool.info(threadPoolName);
for (int i = 0; i < info.getQueueSize(); i++) {
try {
threadPool.executor(threadPoolName).execute(() -> {});
} catch (EsRejectedExecutionException e) {
logger.debug("Exception when filling the queue " + threadPoolName, e);
logThreadPoolQueue(threadPoolName, threadPool);
// we can't be sure that some other task won't get queued in a test cluster
// but the threadpool's thread is already blocked
}
}
logThreadPoolQueue(threadPoolName, threadPool);
}
private static void logThreadPoolQueue(String threadPoolName, ThreadPool threadPool) {
if (threadPool.executor(threadPoolName) instanceof EsThreadPoolExecutor tpe) {
logger.debug("Thread pool details " + threadPoolName + " " + tpe);
logger.debug(Arrays.toString(tpe.getTasks().toArray()));
}
}
}
| KibanaThreadPoolIT |
java | google__dagger | javatests/dagger/functional/factory/FactoryImplicitModulesTest.java | {
"start": 3204,
"end": 3311
} | interface ____ {
double getDouble();
@Component.Factory
| ConcreteModuleThatCouldBeAbstractComponent |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/ComparableAndComparator.java | {
"start": 2156,
"end": 3336
} | interface ____ subtype of Comparable _and_ Comparator */
private static final Matcher<Tree> COMPARABLE_AND_COMPARATOR_MATCHER =
allOf(COMPARABLE_MATCHER, COMPARATOR_MATCHER);
private static boolean matchAnySuperType(ClassTree tree, VisitorState state) {
List<Tree> superTypes = Lists.<Tree>newArrayList(tree.getImplementsClause());
Tree superClass = tree.getExtendsClause();
if (superClass != null) {
superTypes.add(superClass);
}
return superTypes.stream()
.anyMatch(superType -> COMPARABLE_AND_COMPARATOR_MATCHER.matches(superType, state));
}
@Override
public Description matchClass(ClassTree tree, VisitorState state) {
if (COMPARABLE_AND_COMPARATOR_MATCHER.matches(tree, state)) {
// Filter out inherited case to not warn again
if (matchAnySuperType(tree, state)) {
return Description.NO_MATCH;
}
// enums already implement Comparable and are simultaneously allowed to implement Comparator
ClassSymbol symbol = getSymbol(tree);
if (symbol.isEnum()) {
return Description.NO_MATCH;
}
return describeMatch(tree);
}
return Description.NO_MATCH;
}
}
| is |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/connector/write/V1Write.java | {
"start": 1271,
"end": 1352
} | interface ____ extends Write {
InsertableRelation toInsertableRelation();
}
| V1Write |
java | google__dagger | javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java | {
"start": 42282,
"end": 43798
} | class ____ {",
" @Inject void method(",
" @QualifierA",
" @QualifierB",
" String s) {}",
"}");
daggerCompiler(file, QUALIFIER_A, QUALIFIER_B)
.compile(
subject -> {
subject.hasErrorCount(2);
if (CompilerTests.backend(subject) == XProcessingEnv.Backend.KSP) {
// TODO(b/381557487): KSP2 reports the error on the parameter instead of the
// the annotation.
subject.hasErrorContaining(
"A single dependency request may not use more than one @Qualifier")
.onSource(file)
.onLine(9);
} else {
subject.hasErrorContaining(
"A single dependency request may not use more than one @Qualifier")
.onSource(file)
.onLine(7);
subject.hasErrorContaining(
"A single dependency request may not use more than one @Qualifier")
.onSource(file)
.onLine(8);
}
});
}
@Test public void injectConstructorDependsOnProduced() {
Source file =
CompilerTests.javaSource(
"test.A",
"package test;",
"",
"import dagger.producers.Produced;",
"import javax.inject.Inject;",
"",
"final | MultipleQualifierMethodParam |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java | {
"start": 23537,
"end": 24078
} | interface ____ {
String PREFIX = HdfsClientConfigKeys.PREFIX + "mmap.";
String ENABLED_KEY = PREFIX + "enabled";
boolean ENABLED_DEFAULT = true;
String CACHE_SIZE_KEY = PREFIX + "cache.size";
int CACHE_SIZE_DEFAULT = 256;
String CACHE_TIMEOUT_MS_KEY = PREFIX + "cache.timeout.ms";
long CACHE_TIMEOUT_MS_DEFAULT = 60*MINUTE;
String RETRY_TIMEOUT_MS_KEY = PREFIX + "retry.timeout.ms";
long RETRY_TIMEOUT_MS_DEFAULT = 5*MINUTE;
}
/** dfs.client.hedged.read configuration properties */
| Mmap |
java | netty__netty | codec-socks/src/main/java/io/netty/handler/codec/socksx/v5/Socks5PrivateAuthResponse.java | {
"start": 762,
"end": 938
} | interface ____ to the response for private authentication methods
* in the range 0x80-0xFE as defined in RFC 1928. For custom private authentication
* protocols, this | corresponds |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/SinkManyBestEffortStressTest.java | {
"start": 5549,
"end": 6067
} | class ____ {
final SinkManyBestEffort<Integer> sink = SinkManyBestEffort.createBestEffort();
final StressSubscriber<Integer> sub1 = new StressSubscriber<>(0);
final StressSubscriber<Integer> sub2 = new StressSubscriber<>(0);
{
sink.subscribe(sub1);
}
@Actor
public void remove() {
sub1.cancel();
}
@Actor
public void add() {
sink.subscribe(sub2);
}
@Arbiter
public void arbiter(I_Result r) {
r.r1 = sink.subscribers[0] == sub2.subscription ? 1 : 0;
}
}
} | AddVsRemoveStressTest |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/support/ReloadableResourceBundleMessageSource.java | {
"start": 4227,
"end": 17785
} | class ____ extends AbstractResourceBasedMessageSource
implements ResourceLoaderAware {
private static final String XML_EXTENSION = ".xml";
private List<String> fileExtensions = List.of(".properties", XML_EXTENSION);
private @Nullable Properties fileEncodings;
private boolean concurrentRefresh = true;
private PropertiesPersister propertiesPersister = DefaultPropertiesPersister.INSTANCE;
private ResourceLoader resourceLoader = new DefaultResourceLoader();
// Cache to hold filename lists per Locale
private final ConcurrentMap<String, Map<Locale, List<String>>> cachedFilenames = new ConcurrentHashMap<>();
// Cache to hold already loaded properties per filename
private final ConcurrentMap<String, PropertiesHolder> cachedProperties = new ConcurrentHashMap<>();
// Cache to hold already loaded properties per filename
private final ConcurrentMap<Locale, PropertiesHolder> cachedMergedProperties = new ConcurrentHashMap<>();
/**
* Set the list of supported file extensions.
* <p>The default is a list containing {@code .properties} and {@code .xml}.
* @param fileExtensions the file extensions (starts with a dot)
* @since 6.1
*/
public void setFileExtensions(List<String> fileExtensions) {
Assert.isTrue(!CollectionUtils.isEmpty(fileExtensions), "At least one file extension is required");
for (String extension : fileExtensions) {
if (!extension.startsWith(".")) {
throw new IllegalArgumentException("File extension '" + extension + "' should start with '.'");
}
}
this.fileExtensions = Collections.unmodifiableList(fileExtensions);
}
/**
* Set per-file charsets to use for parsing properties files.
* <p>Only applies to classic properties files, not to XML files.
* @param fileEncodings a Properties with filenames as keys and charset
* names as values. Filenames have to match the basename syntax,
* with optional locale-specific components: for example, "WEB-INF/messages"
* or "WEB-INF/messages_en".
* @see #setBasenames
* @see org.springframework.util.PropertiesPersister#load
*/
public void setFileEncodings(Properties fileEncodings) {
this.fileEncodings = fileEncodings;
}
/**
* Specify whether to allow for concurrent refresh behavior, i.e. one thread
* locked in a refresh attempt for a specific cached properties file whereas
* other threads keep returning the old properties for the time being, until
* the refresh attempt has completed.
* <p>Default is "true", minimizing contention between threads. If you prefer
* the old behavior, i.e. to fully block on refresh, switch this flag to "false".
* @since 4.1
* @see #setCacheSeconds
*/
public void setConcurrentRefresh(boolean concurrentRefresh) {
this.concurrentRefresh = concurrentRefresh;
}
/**
* Set the PropertiesPersister to use for parsing properties files.
* <p>The default is {@code DefaultPropertiesPersister}.
* @see DefaultPropertiesPersister#INSTANCE
*/
public void setPropertiesPersister(@Nullable PropertiesPersister propertiesPersister) {
this.propertiesPersister =
(propertiesPersister != null ? propertiesPersister : DefaultPropertiesPersister.INSTANCE);
}
/**
* Set the ResourceLoader to use for loading bundle properties files.
* <p>The default is a DefaultResourceLoader. Will get overridden by the
* ApplicationContext if running in a context, as it implements the
* ResourceLoaderAware interface. Can be manually overridden when
* running outside an ApplicationContext.
* @see org.springframework.core.io.DefaultResourceLoader
* @see org.springframework.context.ResourceLoaderAware
*/
@Override
public void setResourceLoader(@Nullable ResourceLoader resourceLoader) {
this.resourceLoader = (resourceLoader != null ? resourceLoader : new DefaultResourceLoader());
}
/**
* Resolves the given message code as key in the retrieved bundle files,
* returning the value found in the bundle as-is (without MessageFormat parsing).
*/
@Override
protected @Nullable String resolveCodeWithoutArguments(String code, Locale locale) {
if (getCacheMillis() < 0) {
PropertiesHolder propHolder = getMergedProperties(locale);
String result = propHolder.getProperty(code);
if (result != null) {
return result;
}
}
else {
for (String basename : getBasenameSet()) {
List<String> filenames = calculateAllFilenames(basename, locale);
for (String filename : filenames) {
PropertiesHolder propHolder = getProperties(filename);
String result = propHolder.getProperty(code);
if (result != null) {
return result;
}
}
}
}
return null;
}
/**
* Resolves the given message code as key in the retrieved bundle files,
* using a cached MessageFormat instance per message code.
*/
@Override
protected @Nullable MessageFormat resolveCode(String code, Locale locale) {
if (getCacheMillis() < 0) {
PropertiesHolder propHolder = getMergedProperties(locale);
MessageFormat result = propHolder.getMessageFormat(code, locale);
if (result != null) {
return result;
}
}
else {
for (String basename : getBasenameSet()) {
List<String> filenames = calculateAllFilenames(basename, locale);
for (String filename : filenames) {
PropertiesHolder propHolder = getProperties(filename);
MessageFormat result = propHolder.getMessageFormat(code, locale);
if (result != null) {
return result;
}
}
}
}
return null;
}
/**
* Get a PropertiesHolder that contains the actually visible properties
* for a Locale, after merging all specified resource bundles.
* Either fetches the holder from the cache or freshly loads it.
* <p>Only used when caching resource bundle contents forever, i.e.
* with cacheSeconds < 0. Therefore, merged properties are always
* cached forever.
* @see #collectPropertiesToMerge
* @see #mergeProperties
*/
protected PropertiesHolder getMergedProperties(Locale locale) {
PropertiesHolder mergedHolder = this.cachedMergedProperties.get(locale);
if (mergedHolder != null) {
return mergedHolder;
}
mergedHolder = mergeProperties(collectPropertiesToMerge(locale));
PropertiesHolder existing = this.cachedMergedProperties.putIfAbsent(locale, mergedHolder);
if (existing != null) {
mergedHolder = existing;
}
return mergedHolder;
}
/**
* Determine the properties to merge based on the specified basenames.
* @param locale the locale
* @return the list of properties holders
* @since 6.1.4
* @see #getBasenameSet()
* @see #calculateAllFilenames
* @see #mergeProperties
*/
protected List<PropertiesHolder> collectPropertiesToMerge(Locale locale) {
String[] basenames = StringUtils.toStringArray(getBasenameSet());
List<PropertiesHolder> holders = new ArrayList<>(basenames.length);
for (int i = basenames.length - 1; i >= 0; i--) {
List<String> filenames = calculateAllFilenames(basenames[i], locale);
for (int j = filenames.size() - 1; j >= 0; j--) {
String filename = filenames.get(j);
PropertiesHolder propHolder = getProperties(filename);
if (propHolder.getProperties() != null) {
holders.add(propHolder);
}
}
}
return holders;
}
/**
* Merge the given properties holders into a single holder.
* @param holders the list of properties holders
* @return a single merged properties holder
* @since 6.1.4
* @see #newProperties()
* @see #getMergedProperties
* @see #collectPropertiesToMerge
*/
protected PropertiesHolder mergeProperties(List<PropertiesHolder> holders) {
Properties mergedProps = newProperties();
long latestTimestamp = -1;
for (PropertiesHolder holder : holders) {
mergedProps.putAll(holder.getProperties());
if (holder.getFileTimestamp() > latestTimestamp) {
latestTimestamp = holder.getFileTimestamp();
}
}
return new PropertiesHolder(mergedProps, latestTimestamp);
}
/**
* Calculate all filenames for the given bundle basename and Locale.
* Will calculate filenames for the given Locale, the system Locale
* (if applicable), and the default file.
* @param basename the basename of the bundle
* @param locale the locale
* @return the List of filenames to check
* @see #setFallbackToSystemLocale
* @see #calculateFilenamesForLocale
*/
protected List<String> calculateAllFilenames(String basename, Locale locale) {
Map<Locale, List<String>> localeMap = this.cachedFilenames.get(basename);
if (localeMap != null) {
List<String> filenames = localeMap.get(locale);
if (filenames != null) {
return filenames;
}
}
// Filenames for given Locale
List<String> filenames = new ArrayList<>(7);
filenames.addAll(calculateFilenamesForLocale(basename, locale));
// Filenames for default Locale, if any
Locale defaultLocale = getDefaultLocale();
if (defaultLocale != null && !defaultLocale.equals(locale)) {
List<String> fallbackFilenames = calculateFilenamesForLocale(basename, defaultLocale);
for (String fallbackFilename : fallbackFilenames) {
if (!filenames.contains(fallbackFilename)) {
// Entry for fallback locale that isn't already in filenames list.
filenames.add(fallbackFilename);
}
}
}
// Filename for default bundle file
filenames.add(basename);
if (localeMap == null) {
localeMap = new ConcurrentHashMap<>();
Map<Locale, List<String>> existing = this.cachedFilenames.putIfAbsent(basename, localeMap);
if (existing != null) {
localeMap = existing;
}
}
localeMap.put(locale, filenames);
return filenames;
}
/**
* Calculate the filenames for the given bundle basename and Locale,
* appending language code, country code, and variant code.
* <p>For example, basename "messages", Locale "de_AT_oo" → "messages_de_AT_OO",
* "messages_de_AT", "messages_de".
* <p>Follows the rules defined by {@link java.util.Locale#toString()}.
* @param basename the basename of the bundle
* @param locale the locale
* @return the List of filenames to check
*/
protected List<String> calculateFilenamesForLocale(String basename, Locale locale) {
List<String> result = new ArrayList<>(3);
String language = locale.getLanguage();
String country = locale.getCountry();
String variant = locale.getVariant();
StringBuilder temp = new StringBuilder(basename);
temp.append('_');
if (language.length() > 0) {
temp.append(language);
result.add(0, temp.toString());
}
temp.append('_');
if (country.length() > 0) {
temp.append(country);
result.add(0, temp.toString());
}
if (variant.length() > 0 && (language.length() > 0 || country.length() > 0)) {
temp.append('_').append(variant);
result.add(0, temp.toString());
}
return result;
}
/**
* Get a PropertiesHolder for the given filename, either from the
* cache or freshly loaded.
* @param filename the bundle filename (basename + Locale)
* @return the current PropertiesHolder for the bundle
*/
protected PropertiesHolder getProperties(String filename) {
PropertiesHolder propHolder = this.cachedProperties.get(filename);
long originalTimestamp = -2;
if (propHolder != null) {
originalTimestamp = propHolder.getRefreshTimestamp();
if (originalTimestamp == -1 || originalTimestamp > System.currentTimeMillis() - getCacheMillis()) {
// Up to date
return propHolder;
}
}
else {
propHolder = new PropertiesHolder();
PropertiesHolder existingHolder = this.cachedProperties.putIfAbsent(filename, propHolder);
if (existingHolder != null) {
propHolder = existingHolder;
}
}
// At this point, we need to refresh...
if (this.concurrentRefresh && propHolder.getRefreshTimestamp() >= 0) {
// A populated but stale holder -> could keep using it.
if (!propHolder.refreshLock.tryLock()) {
// Getting refreshed by another thread already ->
// let's return the existing properties for the time being.
return propHolder;
}
}
else {
propHolder.refreshLock.lock();
}
try {
PropertiesHolder existingHolder = this.cachedProperties.get(filename);
if (existingHolder != null && existingHolder.getRefreshTimestamp() > originalTimestamp) {
return existingHolder;
}
return refreshProperties(filename, propHolder);
}
finally {
propHolder.refreshLock.unlock();
}
}
/**
* Refresh the PropertiesHolder for the given bundle filename.
* <p>The holder can be {@code null} if not cached before, or a timed-out cache entry
* (potentially getting re-validated against the current last-modified timestamp).
* @param filename the bundle filename (basename + Locale)
* @param propHolder the current PropertiesHolder for the bundle
* @see #resolveResource(String)
*/
protected PropertiesHolder refreshProperties(String filename, @Nullable PropertiesHolder propHolder) {
long refreshTimestamp = (getCacheMillis() < 0 ? -1 : System.currentTimeMillis());
Resource resource = resolveResource(filename);
if (resource != null) {
long fileTimestamp = -1;
if (getCacheMillis() >= 0) {
// Last-modified timestamp of file will just be read if caching with timeout.
try {
fileTimestamp = resource.lastModified();
if (propHolder != null && propHolder.getFileTimestamp() == fileTimestamp) {
if (logger.isDebugEnabled()) {
logger.debug("Re-caching properties for filename [" + filename + "] - file hasn't been modified");
}
propHolder.setRefreshTimestamp(refreshTimestamp);
return propHolder;
}
}
catch (IOException ex) {
// Probably a | ReloadableResourceBundleMessageSource |
java | apache__flink | flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/CommandLineWrapper.java | {
"start": 1251,
"end": 2627
} | class ____ {
private final String url;
private Path targetDir;
private long timeoutSecs;
WGetBuilder(String url) {
this.url = url;
}
public WGetBuilder targetDir(Path dir) {
this.targetDir = dir;
return this;
}
public WGetBuilder timeoutSecs(Duration timeout) {
this.timeoutSecs = timeout.getSeconds();
return this;
}
public String[] build() {
final List<String> commandsList = new ArrayList<>(5);
commandsList.add("wget");
commandsList.add("-q"); // silent
// commandsList.add("--show-progress"); // enable progress bar
if (targetDir != null) {
commandsList.add("-P");
commandsList.add(targetDir.toAbsolutePath().toString());
}
if (timeoutSecs > 0) {
commandsList.add("--timeout");
commandsList.add(Long.toString(timeoutSecs));
}
commandsList.add(url);
return commandsList.toArray(new String[commandsList.size()]);
}
}
public static SedBuilder sed(final String command, final Path file) {
return new SedBuilder(command, file);
}
/** Wrapper around sed used for processing text. */
public static final | WGetBuilder |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/messaging/WebSocketAnnotationMethodMessageHandlerTests.java | {
"start": 3669,
"end": 4036
} | class ____ extends WebSocketAnnotationMethodMessageHandler {
public TestWebSocketAnnotationMethodMessageHandler(SimpMessageSendingOperations brokerTemplate,
SubscribableChannel clientInboundChannel, MessageChannel clientOutboundChannel) {
super(clientInboundChannel, clientOutboundChannel, brokerTemplate);
}
}
}
| TestWebSocketAnnotationMethodMessageHandler |
java | grpc__grpc-java | examples/src/test/java/io/grpc/examples/routeguide/RouteGuideClientTest.java | {
"start": 2207,
"end": 18864
} | class ____ {
/**
* This rule manages automatic graceful shutdown for the registered server at the end of test.
*/
@Rule
public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule();
private final MutableHandlerRegistry serviceRegistry = new MutableHandlerRegistry();
private final TestHelper testHelper = mock(TestHelper.class);
private final Random noRandomness =
new Random() {
int index;
boolean isForSleep;
/**
* Returns a number deterministically. If the random number is for sleep time, then return
* -500 so that {@code Thread.sleep(random.nextInt(1000) + 500)} sleeps 0 ms. Otherwise, it
* is for list index, then return incrementally (and cyclically).
*/
@Override
public int nextInt(int bound) {
int retVal = isForSleep ? -500 : (index++ % bound);
isForSleep = ! isForSleep;
return retVal;
}
};
private RouteGuideClient client;
@Before
public void setUp() throws Exception {
// Generate a unique in-process server name.
String serverName = InProcessServerBuilder.generateName();
// Use a mutable service registry for later registering the service impl for each test case.
grpcCleanup.register(InProcessServerBuilder.forName(serverName)
.fallbackHandlerRegistry(serviceRegistry).directExecutor().build().start());
client = new RouteGuideClient(grpcCleanup.register(
InProcessChannelBuilder.forName(serverName).directExecutor().build()));
client.setTestHelper(testHelper);
}
/**
* Example for testing blocking unary call.
*/
@Test
public void getFeature() {
Point requestPoint = Point.newBuilder().setLatitude(-1).setLongitude(-1).build();
Point responsePoint = Point.newBuilder().setLatitude(-123).setLongitude(-123).build();
final AtomicReference<Point> pointDelivered = new AtomicReference<Point>();
final Feature responseFeature =
Feature.newBuilder().setName("dummyFeature").setLocation(responsePoint).build();
// implement the fake service
RouteGuideImplBase getFeatureImpl =
new RouteGuideImplBase() {
@Override
public void getFeature(Point point, StreamObserver<Feature> responseObserver) {
pointDelivered.set(point);
responseObserver.onNext(responseFeature);
responseObserver.onCompleted();
}
};
serviceRegistry.addService(getFeatureImpl);
client.getFeature(-1, -1);
assertEquals(requestPoint, pointDelivered.get());
verify(testHelper).onMessage(responseFeature);
verify(testHelper, never()).onRpcError(any(Throwable.class));
}
/**
* Example for testing blocking unary call.
*/
@Test
public void getFeature_error() {
Point requestPoint = Point.newBuilder().setLatitude(-1).setLongitude(-1).build();
final AtomicReference<Point> pointDelivered = new AtomicReference<Point>();
final StatusRuntimeException fakeError = new StatusRuntimeException(Status.DATA_LOSS);
// implement the fake service
RouteGuideImplBase getFeatureImpl =
new RouteGuideImplBase() {
@Override
public void getFeature(Point point, StreamObserver<Feature> responseObserver) {
pointDelivered.set(point);
responseObserver.onError(fakeError);
}
};
serviceRegistry.addService(getFeatureImpl);
client.getFeature(-1, -1);
assertEquals(requestPoint, pointDelivered.get());
ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class);
verify(testHelper).onRpcError(errorCaptor.capture());
assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue()));
}
/**
* Example for testing blocking server-streaming.
*/
@Test
public void listFeatures() {
final Feature responseFeature1 = Feature.newBuilder().setName("feature 1").build();
final Feature responseFeature2 = Feature.newBuilder().setName("feature 2").build();
final AtomicReference<Rectangle> rectangleDelivered = new AtomicReference<Rectangle>();
// implement the fake service
RouteGuideImplBase listFeaturesImpl =
new RouteGuideImplBase() {
@Override
public void listFeatures(Rectangle rectangle, StreamObserver<Feature> responseObserver) {
rectangleDelivered.set(rectangle);
// send two response messages
responseObserver.onNext(responseFeature1);
responseObserver.onNext(responseFeature2);
// complete the response
responseObserver.onCompleted();
}
};
serviceRegistry.addService(listFeaturesImpl);
client.listFeatures(1, 2, 3, 4);
assertEquals(Rectangle.newBuilder()
.setLo(Point.newBuilder().setLatitude(1).setLongitude(2).build())
.setHi(Point.newBuilder().setLatitude(3).setLongitude(4).build())
.build(),
rectangleDelivered.get());
verify(testHelper).onMessage(responseFeature1);
verify(testHelper).onMessage(responseFeature2);
verify(testHelper, never()).onRpcError(any(Throwable.class));
}
/**
* Example for testing blocking server-streaming.
*/
@Test
public void listFeatures_error() {
final Feature responseFeature1 =
Feature.newBuilder().setName("feature 1").build();
final AtomicReference<Rectangle> rectangleDelivered = new AtomicReference<Rectangle>();
final StatusRuntimeException fakeError = new StatusRuntimeException(Status.INVALID_ARGUMENT);
// implement the fake service
RouteGuideImplBase listFeaturesImpl =
new RouteGuideImplBase() {
@Override
public void listFeatures(Rectangle rectangle, StreamObserver<Feature> responseObserver) {
rectangleDelivered.set(rectangle);
// send one response message
responseObserver.onNext(responseFeature1);
// let the rpc fail
responseObserver.onError(fakeError);
}
};
serviceRegistry.addService(listFeaturesImpl);
client.listFeatures(1, 2, 3, 4);
assertEquals(Rectangle.newBuilder()
.setLo(Point.newBuilder().setLatitude(1).setLongitude(2).build())
.setHi(Point.newBuilder().setLatitude(3).setLongitude(4).build())
.build(),
rectangleDelivered.get());
ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class);
verify(testHelper).onMessage(responseFeature1);
verify(testHelper).onRpcError(errorCaptor.capture());
assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue()));
}
/**
* Example for testing async client-streaming.
*/
@Test
public void recordRoute() throws Exception {
client.setRandom(noRandomness);
Point point1 = Point.newBuilder().setLatitude(1).setLongitude(1).build();
Point point2 = Point.newBuilder().setLatitude(2).setLongitude(2).build();
Point point3 = Point.newBuilder().setLatitude(3).setLongitude(3).build();
Feature requestFeature1 =
Feature.newBuilder().setLocation(point1).build();
Feature requestFeature2 =
Feature.newBuilder().setLocation(point2).build();
Feature requestFeature3 =
Feature.newBuilder().setLocation(point3).build();
final List<Feature> features = Arrays.asList(
requestFeature1, requestFeature2, requestFeature3);
final List<Point> pointsDelivered = new ArrayList<>();
final RouteSummary fakeResponse = RouteSummary
.newBuilder()
.setPointCount(7)
.setFeatureCount(8)
.setDistance(9)
.setElapsedTime(10)
.build();
// implement the fake service
RouteGuideImplBase recordRouteImpl =
new RouteGuideImplBase() {
@Override
public StreamObserver<Point> recordRoute(
final StreamObserver<RouteSummary> responseObserver) {
StreamObserver<Point> requestObserver = new StreamObserver<Point>() {
@Override
public void onNext(Point value) {
pointsDelivered.add(value);
}
@Override
public void onError(Throwable t) {
}
@Override
public void onCompleted() {
responseObserver.onNext(fakeResponse);
responseObserver.onCompleted();
}
};
return requestObserver;
}
};
serviceRegistry.addService(recordRouteImpl);
// send requestFeature1, requestFeature2, requestFeature3, and then requestFeature1 again
client.recordRoute(features, 4);
assertEquals(
Arrays.asList(
requestFeature1.getLocation(),
requestFeature2.getLocation(),
requestFeature3.getLocation(),
requestFeature1.getLocation()),
pointsDelivered);
verify(testHelper).onMessage(fakeResponse);
verify(testHelper, never()).onRpcError(any(Throwable.class));
}
/**
* Example for testing async client-streaming.
*/
@Test
public void recordRoute_serverError() throws Exception {
client.setRandom(noRandomness);
Point point1 = Point.newBuilder().setLatitude(1).setLongitude(1).build();
final Feature requestFeature1 =
Feature.newBuilder().setLocation(point1).build();
final List<Feature> features = Arrays.asList(requestFeature1);
final StatusRuntimeException fakeError = new StatusRuntimeException(Status.INVALID_ARGUMENT);
// implement the fake service
RouteGuideImplBase recordRouteImpl =
new RouteGuideImplBase() {
@Override
public StreamObserver<Point> recordRoute(StreamObserver<RouteSummary> responseObserver) {
// send an error immediately
responseObserver.onError(fakeError);
StreamObserver<Point> requestObserver = new StreamObserver<Point>() {
@Override
public void onNext(Point value) {
}
@Override
public void onError(Throwable t) {
}
@Override
public void onCompleted() {
}
};
return requestObserver;
}
};
serviceRegistry.addService(recordRouteImpl);
client.recordRoute(features, 4);
ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class);
verify(testHelper).onRpcError(errorCaptor.capture());
assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue()));
}
/**
* Example for testing bi-directional call.
*/
@Test
public void routeChat_simpleResponse() throws Exception {
RouteNote fakeResponse1 = RouteNote.newBuilder().setMessage("dummy msg1").build();
RouteNote fakeResponse2 = RouteNote.newBuilder().setMessage("dummy msg2").build();
final List<String> messagesDelivered = new ArrayList<>();
final List<Point> locationsDelivered = new ArrayList<>();
final AtomicReference<StreamObserver<RouteNote>> responseObserverRef =
new AtomicReference<StreamObserver<RouteNote>>();
final CountDownLatch allRequestsDelivered = new CountDownLatch(1);
// implement the fake service
RouteGuideImplBase routeChatImpl =
new RouteGuideImplBase() {
@Override
public StreamObserver<RouteNote> routeChat(StreamObserver<RouteNote> responseObserver) {
responseObserverRef.set(responseObserver);
StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() {
@Override
public void onNext(RouteNote value) {
messagesDelivered.add(value.getMessage());
locationsDelivered.add(value.getLocation());
}
@Override
public void onError(Throwable t) {
}
@Override
public void onCompleted() {
allRequestsDelivered.countDown();
}
};
return requestObserver;
}
};
serviceRegistry.addService(routeChatImpl);
// start routeChat
CountDownLatch latch = client.routeChat();
// request message sent and delivered for four times
assertTrue(allRequestsDelivered.await(1, TimeUnit.SECONDS));
assertEquals(
Arrays.asList("First message", "Second message", "Third message", "Fourth message"),
messagesDelivered);
assertEquals(
Arrays.asList(
Point.newBuilder().setLatitude(0).setLongitude(0).build(),
Point.newBuilder().setLatitude(0).setLongitude(10_000_000).build(),
Point.newBuilder().setLatitude(10_000_000).setLongitude(0).build(),
Point.newBuilder().setLatitude(10_000_000).setLongitude(10_000_000).build()
),
locationsDelivered);
// Let the server send out two simple response messages
// and verify that the client receives them.
// Allow some timeout for verify() if not using directExecutor
responseObserverRef.get().onNext(fakeResponse1);
verify(testHelper).onMessage(fakeResponse1);
responseObserverRef.get().onNext(fakeResponse2);
verify(testHelper).onMessage(fakeResponse2);
// let server complete.
responseObserverRef.get().onCompleted();
assertTrue(latch.await(1, TimeUnit.SECONDS));
verify(testHelper, never()).onRpcError(any(Throwable.class));
}
/**
* Example for testing bi-directional call.
*/
@Test
public void routeChat_echoResponse() throws Exception {
final List<RouteNote> notesDelivered = new ArrayList<>();
// implement the fake service
RouteGuideImplBase routeChatImpl =
new RouteGuideImplBase() {
@Override
public StreamObserver<RouteNote> routeChat(
final StreamObserver<RouteNote> responseObserver) {
StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() {
@Override
public void onNext(RouteNote value) {
notesDelivered.add(value);
responseObserver.onNext(value);
}
@Override
public void onError(Throwable t) {
responseObserver.onError(t);
}
@Override
public void onCompleted() {
responseObserver.onCompleted();
}
};
return requestObserver;
}
};
serviceRegistry.addService(routeChatImpl);
client.routeChat().await(1, TimeUnit.SECONDS);
String[] messages =
{"First message", "Second message", "Third message", "Fourth message"};
for (int i = 0; i < 4; i++) {
verify(testHelper).onMessage(notesDelivered.get(i));
assertEquals(messages[i], notesDelivered.get(i).getMessage());
}
verify(testHelper, never()).onRpcError(any(Throwable.class));
}
/**
* Example for testing bi-directional call.
*/
@Test
public void routeChat_errorResponse() throws Exception {
final List<RouteNote> notesDelivered = new ArrayList<>();
final StatusRuntimeException fakeError = new StatusRuntimeException(Status.PERMISSION_DENIED);
// implement the fake service
RouteGuideImplBase routeChatImpl =
new RouteGuideImplBase() {
@Override
public StreamObserver<RouteNote> routeChat(
final StreamObserver<RouteNote> responseObserver) {
StreamObserver<RouteNote> requestObserver = new StreamObserver<RouteNote>() {
@Override
public void onNext(RouteNote value) {
notesDelivered.add(value);
responseObserver.onError(fakeError);
}
@Override
public void onError(Throwable t) {
}
@Override
public void onCompleted() {
responseObserver.onCompleted();
}
};
return requestObserver;
}
};
serviceRegistry.addService(routeChatImpl);
client.routeChat().await(1, TimeUnit.SECONDS);
assertEquals("First message", notesDelivered.get(0).getMessage());
verify(testHelper, never()).onMessage(any(Message.class));
ArgumentCaptor<Throwable> errorCaptor = ArgumentCaptor.forClass(Throwable.class);
verify(testHelper).onRpcError(errorCaptor.capture());
assertEquals(fakeError.getStatus(), Status.fromThrowable(errorCaptor.getValue()));
}
}
| RouteGuideClientTest |
java | apache__camel | tooling/camel-tooling-util/src/test/java/org/apache/camel/tooling/util/ResourceUtils.java | {
"start": 878,
"end": 1139
} | class ____ {
private ResourceUtils() {
// noop
}
public static File getResourceAsFile(String pathToFile) throws Exception {
return new File(ResourceUtils.class.getClassLoader().getResource(pathToFile).getFile());
}
}
| ResourceUtils |
java | apache__logging-log4j2 | log4j-iostreams/src/test/java/org/apache/logging/log4j/io/LoggerBufferedReaderTest.java | {
"start": 972,
"end": 1652
} | class ____ extends LoggerReaderTest {
private BufferedReader bufferedReader;
@Override
protected Reader createReader() {
return this.bufferedReader = (BufferedReader) IoBuilder.forLogger(getExtendedLogger())
.filter(this.wrapped)
.setLevel(LEVEL)
.setBuffered(true)
.buildReader();
}
@Test
public void testReadLine() throws Exception {
assertEquals("first line", FIRST, this.bufferedReader.readLine());
assertMessages(FIRST);
assertEquals("second line", LAST, this.bufferedReader.readLine());
assertMessages(FIRST, LAST);
}
}
| LoggerBufferedReaderTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/handler/RedisChannelInitializer.java | {
"start": 1881,
"end": 9116
} | enum ____ {PUBSUB, PLAIN}
private final RedisClientConfig config;
private final RedisClient redisClient;
private final Type type;
private final ConnectionWatchdog connectionWatchdog;
private final PingConnectionHandler pingConnectionHandler;
public RedisChannelInitializer(Bootstrap bootstrap, RedisClientConfig config, RedisClient redisClient, ChannelGroup channels, Type type) {
super();
this.config = config;
this.redisClient = redisClient;
this.type = type;
if (config.getPingConnectionInterval() > 0) {
pingConnectionHandler = new PingConnectionHandler(config);
} else {
pingConnectionHandler = null;
}
connectionWatchdog = new ConnectionWatchdog(bootstrap, channels, config);
}
@Override
protected void initChannel(Channel ch) throws Exception {
initSsl(config, ch);
if (type == Type.PLAIN) {
ch.pipeline().addLast(new RedisConnectionHandler(redisClient));
} else {
ch.pipeline().addLast(new RedisPubSubConnectionHandler(redisClient));
}
ch.pipeline().addLast(
connectionWatchdog,
new CommandEncoder(config.getCommandMapper()),
CommandBatchEncoder.INSTANCE);
if (type == Type.PLAIN) {
ch.pipeline().addLast(new CommandsQueue());
} else {
ch.pipeline().addLast(new CommandsQueuePubSub());
}
if (pingConnectionHandler != null) {
ch.pipeline().addLast(pingConnectionHandler);
}
if (type == Type.PLAIN) {
ch.pipeline().addLast(new CommandDecoder(config.getAddress().getScheme()));
} else {
ch.pipeline().addLast(new CommandPubSubDecoder(config));
}
ch.pipeline().addLast(new ErrorsLoggingHandler());
config.getNettyHook().afterChannelInitialization(ch);
}
private void initSsl(RedisClientConfig config, Channel ch) throws GeneralSecurityException, IOException {
if (!config.getAddress().isSsl()) {
return;
}
io.netty.handler.ssl.SslProvider provided = io.netty.handler.ssl.SslProvider.JDK;
if (config.getSslProvider() == SslProvider.OPENSSL) {
provided = io.netty.handler.ssl.SslProvider.OPENSSL;
}
SslContextBuilder sslContextBuilder = SslContextBuilder.forClient()
.sslProvider(provided)
.keyStoreType(config.getSslKeystoreType());
sslContextBuilder.protocols(config.getSslProtocols());
if (config.getSslCiphers() != null) {
sslContextBuilder.ciphers(Arrays.asList(config.getSslCiphers()));
}
if (config.getSslTruststore() != null) {
KeyStore keyStore = getKeyStore(config);
InputStream stream = config.getSslTruststore().openStream();
try {
char[] password = null;
if (config.getSslTruststorePassword() != null) {
password = config.getSslTruststorePassword().toCharArray();
}
keyStore.load(stream, password);
} finally {
stream.close();
}
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(keyStore);
sslContextBuilder.trustManager(trustManagerFactory);
}
if (config.getSslTrustManagerFactory() != null) {
sslContextBuilder.trustManager(config.getSslTrustManagerFactory());
}
if (config.getSslKeystore() != null){
KeyStore keyStore = getKeyStore(config);
InputStream stream = config.getSslKeystore().openStream();
char[] password = null;
if (config.getSslKeystorePassword() != null) {
password = config.getSslKeystorePassword().toCharArray();
}
try {
keyStore.load(stream, password);
} finally {
stream.close();
}
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(keyStore, password);
sslContextBuilder.keyManager(keyManagerFactory);
}
if (config.getSslKeyManagerFactory() != null) {
sslContextBuilder.keyManager(config.getSslKeyManagerFactory());
}
SSLParameters sslParams = new SSLParameters();
if (config.getSslVerificationMode() == SslVerificationMode.STRICT) {
sslParams.setEndpointIdentificationAlgorithm("HTTPS");
} else if (config.getSslVerificationMode() == SslVerificationMode.CA_ONLY) {
sslParams.setEndpointIdentificationAlgorithm("");
} else {
if (config.getSslTruststore() == null) {
sslContextBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE);
}
}
SslContext sslContext = sslContextBuilder.build();
String hostname = config.getSslHostname();
if (hostname == null || NetUtil.createByteArrayFromIpAddressString(hostname) != null) {
hostname = config.getAddress().getHost();
}
SSLEngine sslEngine = sslContext.newEngine(ch.alloc(), hostname, config.getAddress().getPort());
sslEngine.setSSLParameters(sslParams);
SslHandler sslHandler = new SslHandler(sslEngine);
ch.pipeline().addLast(sslHandler);
ch.pipeline().addLast(new ChannelInboundHandlerAdapter() {
volatile boolean sslInitDone;
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
if (sslInitDone) {
super.channelActive(ctx);
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (!sslInitDone && evt instanceof SslHandshakeCompletionEvent) {
SslHandshakeCompletionEvent e = (SslHandshakeCompletionEvent) evt;
if (e.isSuccess()) {
sslInitDone = true;
ctx.fireChannelActive();
} else {
RedisConnection connection = RedisConnection.getFrom(ctx.channel());
connection.closeAsync();
connection.getConnectionPromise().completeExceptionally(e.cause());
}
}
super.userEventTriggered(ctx, evt);
}
});
}
private KeyStore getKeyStore(RedisClientConfig config) throws KeyStoreException {
if (config.getSslKeystoreType() != null) {
return KeyStore.getInstance(config.getSslKeystoreType());
}
return KeyStore.getInstance(KeyStore.getDefaultType());
}
}
| Type |
java | google__guava | android/guava-tests/test/com/google/common/collect/ForwardingQueueTest.java | {
"start": 1496,
"end": 4450
} | class ____<T> extends ForwardingQueue<T> {
private final Queue<T> backingQueue;
StandardImplForwardingQueue(Queue<T> backingQueue) {
this.backingQueue = backingQueue;
}
@Override
protected Queue<T> delegate() {
return backingQueue;
}
@Override
public boolean addAll(Collection<? extends T> collection) {
return standardAddAll(collection);
}
@Override
public void clear() {
standardClear();
}
@Override
public boolean contains(Object object) {
return standardContains(object);
}
@Override
public boolean containsAll(Collection<?> collection) {
return standardContainsAll(collection);
}
@Override
public boolean remove(Object object) {
return standardRemove(object);
}
@Override
public boolean removeAll(Collection<?> collection) {
return standardRemoveAll(collection);
}
@Override
public boolean retainAll(Collection<?> collection) {
return standardRetainAll(collection);
}
@Override
public Object[] toArray() {
return standardToArray();
}
@Override
public <T> T[] toArray(T[] array) {
return standardToArray(array);
}
@Override
public String toString() {
return standardToString();
}
@Override
public boolean offer(T o) {
return standardOffer(o);
}
@Override
public @Nullable T peek() {
return standardPeek();
}
@Override
public @Nullable T poll() {
return standardPoll();
}
}
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(ForwardingQueueTest.class);
suite.addTest(
QueueTestSuiteBuilder.using(
new TestStringQueueGenerator() {
@Override
protected Queue<String> create(String[] elements) {
return new StandardImplForwardingQueue<>(new LinkedList<>(asList(elements)));
}
})
.named("ForwardingQueue[LinkedList] with standard implementations")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionFeature.GENERAL_PURPOSE)
.createTestSuite());
return suite;
}
@SuppressWarnings({"rawtypes", "unchecked"})
public void testForwarding() {
new ForwardingWrapperTester()
.testForwarding(
Queue.class,
new Function<Queue, Queue>() {
@Override
public Queue apply(Queue delegate) {
return wrap(delegate);
}
});
}
private static <T> Queue<T> wrap(Queue<T> delegate) {
return new ForwardingQueue<T>() {
@Override
protected Queue<T> delegate() {
return delegate;
}
};
}
}
| StandardImplForwardingQueue |
java | apache__camel | components/camel-micrometer/src/test/java/org/apache/camel/component/micrometer/eventnotifier/MicrometerExchangeEventNotifierNamingStrategyTest.java | {
"start": 1000,
"end": 2235
} | class ____ {
@Test
void testDefaultFormatName() {
MicrometerExchangeEventNotifierNamingStrategy strategy = MicrometerExchangeEventNotifierNamingStrategy.DEFAULT;
String result = strategy.formatName("some.metric.name");
assertEquals("some.metric.name", result);
}
@Test
void testLegacyFormatName() {
MicrometerExchangeEventNotifierNamingStrategy strategy = MicrometerExchangeEventNotifierNamingStrategy.LEGACY;
String result = strategy.formatName("some.metric.name");
assertEquals("SomeMetricName", result);
}
@Test
void getDefaultInflightExchangesName() {
MicrometerExchangeEventNotifierNamingStrategy strategy = MicrometerExchangeEventNotifierNamingStrategy.DEFAULT;
String result = strategy.getInflightExchangesName();
assertEquals("camel.exchanges.inflight", result);
}
@Test
void getLegacyInflightExchangesName() {
MicrometerExchangeEventNotifierNamingStrategy strategy = MicrometerExchangeEventNotifierNamingStrategy.LEGACY;
String result = strategy.getInflightExchangesName();
assertEquals("CamelExchangesInflight", result);
}
}
| MicrometerExchangeEventNotifierNamingStrategyTest |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/ExplicitJettyRouteTest.java | {
"start": 1222,
"end": 1939
} | class ____ extends BaseJettyTest {
@Test
public void testSendToJetty() {
Object response = template.requestBody("http://localhost:{{port}}/myapp/myservice", "bookid=123");
// convert the response to a String
String body = context.getTypeConverter().convertTo(String.class, response);
assertEquals("<html><body>Book 123 is Camel in Action</body></html>", body);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("jetty:http://localhost:{{port}}/myapp/myservice").process(new MyBookService());
}
};
}
public static | ExplicitJettyRouteTest |
java | spring-projects__spring-boot | module/spring-boot-neo4j/src/test/java/org/springframework/boot/neo4j/health/Neo4jReactiveHealthIndicatorTests.java | {
"start": 1891,
"end": 5280
} | class ____ {
@Test
void neo4jIsUp() {
ResultSummary resultSummary = ResultSummaryMock.createResultSummary("My Home", "test");
Driver driver = mockDriver(resultSummary, "4711", "ultimate collectors edition");
Neo4jReactiveHealthIndicator healthIndicator = new Neo4jReactiveHealthIndicator(driver);
healthIndicator.health().as(StepVerifier::create).consumeNextWith((health) -> {
assertThat(health.getStatus()).isEqualTo(Status.UP);
assertThat(health.getDetails()).containsEntry("server", "4711@My Home");
assertThat(health.getDetails()).containsEntry("edition", "ultimate collectors edition");
}).expectComplete().verify(Duration.ofSeconds(30));
}
@Test
void neo4jIsUpWithOneSessionExpiredException() {
ResultSummary resultSummary = ResultSummaryMock.createResultSummary("My Home", "");
ReactiveSession session = mock(ReactiveSession.class);
ReactiveResult statementResult = mockStatementResult(resultSummary, "4711", "some edition");
AtomicInteger count = new AtomicInteger();
given(session.run(anyString())).will((invocation) -> {
if (count.compareAndSet(0, 1)) {
return Flux.error(new SessionExpiredException("Session expired"));
}
return Flux.just(statementResult);
});
Driver driver = mock(Driver.class);
given(driver.session(eq(ReactiveSession.class), any(SessionConfig.class))).willReturn(session);
Neo4jReactiveHealthIndicator healthIndicator = new Neo4jReactiveHealthIndicator(driver);
healthIndicator.health().as(StepVerifier::create).consumeNextWith((health) -> {
assertThat(health.getStatus()).isEqualTo(Status.UP);
assertThat(health.getDetails()).containsEntry("server", "4711@My Home");
assertThat(health.getDetails()).containsEntry("edition", "some edition");
}).expectComplete().verify(Duration.ofSeconds(30));
then(session).should(times(2)).close();
}
@Test
void neo4jIsDown() {
Driver driver = mock(Driver.class);
given(driver.session(eq(ReactiveSession.class), any(SessionConfig.class)))
.willThrow(ServiceUnavailableException.class);
Neo4jReactiveHealthIndicator healthIndicator = new Neo4jReactiveHealthIndicator(driver);
healthIndicator.health().as(StepVerifier::create).consumeNextWith((health) -> {
assertThat(health.getStatus()).isEqualTo(Status.DOWN);
assertThat(health.getDetails()).containsKeys("error");
}).expectComplete().verify(Duration.ofSeconds(30));
}
private ReactiveResult mockStatementResult(ResultSummary resultSummary, String version, String edition) {
Record record = mock(Record.class);
given(record.get("edition")).willReturn(Values.value(edition));
given(record.get("version")).willReturn(Values.value(version));
ReactiveResult statementResult = mock(ReactiveResult.class);
given(statementResult.records()).willReturn(Mono.just(record));
given(statementResult.consume()).willReturn(Mono.just(resultSummary));
return statementResult;
}
private Driver mockDriver(ResultSummary resultSummary, String version, String edition) {
ReactiveResult statementResult = mockStatementResult(resultSummary, version, edition);
ReactiveSession session = mock(ReactiveSession.class);
given(session.run(anyString())).willReturn(Mono.just(statementResult));
Driver driver = mock(Driver.class);
given(driver.session(eq(ReactiveSession.class), any(SessionConfig.class))).willReturn(session);
return driver;
}
}
| Neo4jReactiveHealthIndicatorTests |
java | micronaut-projects__micronaut-core | http-tck/src/main/java/io/micronaut/http/tck/TestScenario.java | {
"start": 4492,
"end": 6761
} | class ____ {
private Map<String, Object> configuration;
private String specName;
private BiConsumer<ServerUnderTest, HttpRequest<?>> assertion;
private RequestSupplier request;
/**
*
* @param specName Value for {@literal spec.name} property. Used to avoid bean pollution.
* @return Test Scenario builder
*/
public Builder specName(String specName) {
this.specName = specName;
return this;
}
/**
*
* @param request HTTP Request to be sent in the test scenario
* @return The Test Scenario Builder
*/
public Builder request(HttpRequest<?> request) {
this.request = server -> request;
return this;
}
/**
*
* @param request HTTP Request supplier that given a server, provides the request to be sent in the test scenario
* @return The Test Scenario Builder
*/
public Builder request(RequestSupplier request) {
this.request = request;
return this;
}
/**
*
* @param configuration Test Scenario configuration
* @return Test scenario builder
*/
public Builder configuration(Map<String, Object> configuration) {
this.configuration = configuration;
return this;
}
/**
*
* @param assertion Assertion for a request and server.
* @return The Test Scenario Builder
*/
public Builder assertion(BiConsumer<ServerUnderTest, HttpRequest<?>> assertion) {
this.assertion = assertion;
return this;
}
/**
*
* @return Builds a Test scenario
*/
private TestScenario build() {
return new TestScenario(specName, configuration,
Objects.requireNonNull(request),
Objects.requireNonNull(assertion));
}
/**
* Runs the Test Scneario.
* @throws IOException Exception thrown while getting the server under test.
*/
public void run() throws IOException {
build().run();
}
}
}
| Builder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/model/ast/MutatingTableReference.java | {
"start": 564,
"end": 3188
} | class ____ implements TableReference {
private final TableMapping tableMapping;
public MutatingTableReference(TableMapping tableMapping) {
this.tableMapping = tableMapping;
}
public TableMapping getTableMapping() {
return tableMapping;
}
public String getTableName() {
return tableMapping.getTableName();
}
@Override
public String getIdentificationVariable() {
return null;
}
@Override
public String getTableId() {
return getTableName();
}
@Override
public boolean isOptional() {
return tableMapping.isOptional();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
throw new UnsupportedOperationException( "Mutating table reference should be handled by the statement visitation" );
}
@Override
public Boolean visitAffectedTableNames(Function<String, Boolean> nameCollector) {
return nameCollector.apply( getTableName() );
}
@Override
public TableReference resolveTableReference(
NavigablePath navigablePath,
String tableExpression) {
if ( getTableName().equals( tableExpression ) ) {
return this;
}
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Table-expression (%s) did not match mutating table name - %s",
tableExpression,
getTableName()
)
);
}
@Override
public TableReference resolveTableReference(
NavigablePath navigablePath,
ValuedModelPart modelPart,
String tableExpression) {
if ( getTableName().equals( tableExpression ) ) {
return this;
}
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Table-expression (%s) did not match mutating table name - %s",
tableExpression,
getTableName()
)
);
}
@Override
public TableReference getTableReference(NavigablePath navigablePath, String tableExpression, boolean resolve) {
return getTableName().equals( tableExpression ) ? this : null;
}
@Override
public TableReference getTableReference(
NavigablePath navigablePath,
ValuedModelPart modelPart,
String tableExpression,
boolean resolve) {
return getTableName().equals( tableExpression ) ? this : null;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final MutatingTableReference that = (MutatingTableReference) o;
return Objects.equals( getTableName(), that.getTableName() );
}
@Override
public int hashCode() {
return Objects.hash( getTableName() );
}
@Override
public String toString() {
return "MutatingTableReference(" + getTableName() + ")";
}
}
| MutatingTableReference |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java | {
"start": 1938,
"end": 3466
} | class ____<T> implements Stringifier<T> {
private static final String SEPARATOR = ",";
private Serializer<T> serializer;
private Deserializer<T> deserializer;
private DataInputBuffer inBuf;
private DataOutputBuffer outBuf;
public DefaultStringifier(Configuration conf, Class<T> c) {
SerializationFactory factory = new SerializationFactory(conf);
this.serializer = factory.getSerializer(c);
this.deserializer = factory.getDeserializer(c);
this.inBuf = new DataInputBuffer();
this.outBuf = new DataOutputBuffer();
try {
serializer.open(outBuf);
deserializer.open(inBuf);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
@Override
public T fromString(String str) throws IOException {
byte[] bytes = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8));
inBuf.reset(bytes, bytes.length);
T restored = deserializer.deserialize(null);
return restored;
}
@Override
public String toString(T obj) throws IOException {
outBuf.reset();
serializer.serialize(obj);
byte[] buf = new byte[outBuf.getLength()];
System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8);
}
@Override
public void close() throws IOException {
inBuf.close();
outBuf.close();
deserializer.close();
serializer.close();
}
/**
* Stores the item in the configuration with the given keyName.
*
* @param <K> the | DefaultStringifier |
java | quarkusio__quarkus | extensions/cache/deployment/src/test/java/io/quarkus/cache/test/runtime/ExplicitCompositeKeyCacheTest.java | {
"start": 725,
"end": 5197
} | class ____ {
private static final Locale KEY_1_ELEMENT_1 = Locale.US;
private static final BigDecimal KEY_1_ELEMENT_2 = new BigDecimal(123);
private static final Locale KEY_2_ELEMENT_1 = Locale.FRANCE;
private static final BigDecimal KEY_2_ELEMENT_2 = new BigDecimal(456);
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest().withApplicationRoot(jar -> jar.addClass(CachedService.class));
@Inject
CachedService cachedService;
@Test
public void testAllCacheAnnotations() {
// In most of the cached service methods calls below, a changing third argument will be passed to the methods.
// The fact that it changes each time should not have any effect on the cache because it is not part of the cache key.
// STEP 1
// Action: @CacheResult-annotated method call.
// Expected effect: method invoked and result cached.
// Verified by: STEP 2.
String value1 = cachedService.cachedMethod(KEY_1_ELEMENT_1, KEY_1_ELEMENT_2, new Object());
// STEP 2
// Action: same call as STEP 1.
// Expected effect: method not invoked and result coming from the cache.
// Verified by: same object reference between STEPS 1 and 2 results.
String value2 = cachedService.cachedMethod(KEY_1_ELEMENT_1, KEY_1_ELEMENT_2, new Object());
assertTrue(value1 == value2);
// STEP 3
// Action: same call as STEP 2 with a changing key element.
// Expected effect: method invoked and result cached.
// Verified by: different objects references between STEPS 2 and 3 results.
String value3 = cachedService.cachedMethod(KEY_1_ELEMENT_1, new BigDecimal(789), new Object());
assertTrue(value2 != value3);
// STEP 4
// Action: same principle as STEP 3, but this time we're changing the other key element.
// Expected effect: method invoked and result cached.
// Verified by: different objects references between STEPS 2 and 4 results.
String value4 = cachedService.cachedMethod(Locale.JAPAN, KEY_1_ELEMENT_2, new Object());
assertTrue(value2 != value4);
// STEP 5
// Action: same call as STEP 2 with an entirely new key.
// Expected effect: method invoked and result cached.
// Verified by: different objects references between STEPS 2 and 5 results.
String value5 = cachedService.cachedMethod(KEY_2_ELEMENT_1, KEY_2_ELEMENT_2, new Object());
assertTrue(value2 != value5);
// STEP 6
// Action: cache entry invalidation.
// Expected effect: STEP 2 cache entry removed.
// Verified by: STEP 7.
cachedService.invalidate(new Object(), KEY_1_ELEMENT_1, KEY_1_ELEMENT_2);
// STEP 7
// Action: same call as STEP 2.
// Expected effect: method invoked because of STEP 6 and result cached.
// Verified by: different objects references between STEPS 2 and 7 results.
String value7 = cachedService.cachedMethod(KEY_1_ELEMENT_1, KEY_1_ELEMENT_2, new Object());
assertTrue(value2 != value7);
// STEP 8
// Action: same call as STEP 5.
// Expected effect: method not invoked and result coming from the cache.
// Verified by: same object reference between STEPS 5 and 8 results.
String value8 = cachedService.cachedMethod(KEY_2_ELEMENT_1, KEY_2_ELEMENT_2, new Object());
assertTrue(value5 == value8);
// STEP 9
// Action: full cache invalidation.
// Expected effect: empty cache.
// Verified by: STEPS 10 and 11.
cachedService.invalidateAll();
// STEP 10
// Action: same call as STEP 7.
// Expected effect: method invoked because of STEP 9 and result cached.
// Verified by: different objects references between STEPS 7 and 10 results.
String value10 = cachedService.cachedMethod(KEY_1_ELEMENT_1, KEY_1_ELEMENT_2, new Object());
assertTrue(value7 != value10);
// STEP 11
// Action: same call as STEP 8.
// Expected effect: method invoked because of STEP 9 and result cached.
// Verified by: different objects references between STEPS 8 and 11 results.
String value11 = cachedService.cachedMethod(KEY_2_ELEMENT_1, KEY_2_ELEMENT_2, new Object());
assertTrue(value8 != value11);
}
@Dependent
static | ExplicitCompositeKeyCacheTest |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/GlobalVariableRepository.java | {
"start": 1019,
"end": 1164
} | class ____ extends AbstractVariableRepository {
@Override
public String getId() {
return "global";
}
}
| GlobalVariableRepository |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/EnumUtils.java | {
"start": 9268,
"end": 9366
} | enum ____.</p>
*
* @param <E> the type of the enumeration.
* @param enumClass the | name |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/TaskRetriever.java | {
"start": 1148,
"end": 4630
} | class ____ {
/**
* Returns a {@link TaskInfo} if one exists representing an in-progress trained model download.
*
* @param client a {@link Client} used to retrieve the task
* @param modelId the id of the model to check for an existing task
* @param waitForCompletion a boolean flag determine if the request should wait for an existing task to complete before returning (aka
* wait for the download to complete)
* @param timeout the timeout value in seconds that the request should fail if it does not complete
* @param errorMessageOnWaitTimeout Message to use if the request times out with {@code waitForCompletion == true}
* @param listener a listener, if a task is found it is returned via {@code ActionListener.onResponse(taskInfo)}.
* If a task is not found null is returned
*/
public static void getDownloadTaskInfo(
Client client,
String modelId,
boolean waitForCompletion,
TimeValue timeout,
Supplier<String> errorMessageOnWaitTimeout,
ActionListener<TaskInfo> listener
) {
client.admin()
.cluster()
.prepareListTasks()
.setActions(MlTasks.MODEL_IMPORT_TASK_ACTION)
.setDetailed(true)
.setWaitForCompletion(waitForCompletion)
.setDescriptions(downloadModelTaskDescription(modelId))
.setTimeout(timeout)
.execute(ActionListener.wrap((response) -> {
var tasks = response.getTasks();
if (tasks.size() > 0) {
// there really shouldn't be more than a single task but if there is we'll just use the first one
listener.onResponse(tasks.get(0));
} else if (waitForCompletion && didItTimeout(response)) {
listener.onFailure(taskDidNotCompleteException(errorMessageOnWaitTimeout.get()));
} else {
response.rethrowFailures("Checking model [" + modelId + "] download status");
listener.onResponse(null);
}
}, e -> {
listener.onFailure(
new ElasticsearchStatusException(
"Unable to retrieve task information for model id [{}]",
RestStatus.INTERNAL_SERVER_ERROR,
e,
modelId
)
);
}));
}
private static boolean didItTimeout(ListTasksResponse response) {
if (response.getNodeFailures().isEmpty() == false) {
// if one node timed out then the others will also have timed out
var firstNodeFailure = response.getNodeFailures().get(0);
if (firstNodeFailure.status() == RestStatus.REQUEST_TIMEOUT) {
return true;
}
var timeoutException = ExceptionsHelper.unwrap(
firstNodeFailure,
ElasticsearchTimeoutException.class,
ReceiveTimeoutTransportException.class
);
if (timeoutException != null) {
return true;
}
}
return false;
}
private static ElasticsearchException taskDidNotCompleteException(String message) {
return new ElasticsearchStatusException(message, RestStatus.REQUEST_TIMEOUT);
}
private TaskRetriever() {}
}
| TaskRetriever |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java | {
"start": 2404,
"end": 17993
} | class ____ extends AbstractAbfsScaleTest{
private final UserGroupInformation userGroupInfo;
private final String localUser;
private final String localGroup;
private static final String DAEMON = "daemon";
private static final String ASTERISK = "*";
private static final String SHORT_NAME = "abc";
private static final String DOMAIN = "domain.com";
private static final String FULLY_QUALIFIED_NAME = "abc@domain.com";
private static final String SERVICE_PRINCIPAL_ID = UUID.randomUUID().toString();
public ITestAbfsIdentityTransformer() throws Exception {
super();
userGroupInfo = UserGroupInformation.getCurrentUser();
localUser = userGroupInfo.getShortUserName();
localGroup = userGroupInfo.getPrimaryGroupName();
}
@Test
public void testDaemonServiceSettingIdentity() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
// Default config
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(DAEMON, identityTransformer.transformUserOrGroupForSetRequest(DAEMON),
"Identity should not change for default config");
// Add service principal id
config.set(FS_AZURE_OVERRIDE_OWNER_SP, SERVICE_PRINCIPAL_ID);
// case 1: substitution list doesn't contain daemon
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, "a,b,c,d");
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(DAEMON, identityTransformer.transformUserOrGroupForSetRequest(DAEMON),
"Identity should not change when substitution list doesn't contain daemon");
// case 2: substitution list contains daemon name
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, DAEMON + ",a,b,c,d");
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID, identityTransformer.transformUserOrGroupForSetRequest(DAEMON),
"Identity should be replaced to servicePrincipalId");
// case 3: substitution list is *
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, ASTERISK);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID, identityTransformer.transformUserOrGroupForSetRequest(DAEMON),
"Identity should be replaced to servicePrincipalId");
}
@Test
public void testFullyQualifiedNameSettingIdentity() throws IOException {
Configuration config = this.getRawConfiguration();
// Default config
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(SHORT_NAME, identityTransformer.transformUserOrGroupForSetRequest(SHORT_NAME),
"short name should not be converted to full name by default");
resetIdentityConfig(config);
// Add config to get fully qualified username
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
config.set(FS_AZURE_FILE_OWNER_DOMAINNAME, DOMAIN);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(FULLY_QUALIFIED_NAME,
identityTransformer.transformUserOrGroupForSetRequest(SHORT_NAME),
"short name should be converted to full name");
}
@Test
public void testNoOpForSettingOidAsIdentity() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
config.set(FS_AZURE_FILE_OWNER_DOMAINNAME, DOMAIN);
config.set(FS_AZURE_OVERRIDE_OWNER_SP, UUID.randomUUID().toString());
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, "a,b,c,d");
IdentityTransformer identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
final String principalId = UUID.randomUUID().toString();
assertEquals(principalId, identityTransformer.transformUserOrGroupForSetRequest(principalId),
"Identity should not be changed when owner is already a principal id ");
}
@Test
public void testNoOpWhenSettingSuperUserAsdentity() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
config.set(FS_AZURE_FILE_OWNER_DOMAINNAME, DOMAIN);
// Default config
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(SUPER_USER, identityTransformer.transformUserOrGroupForSetRequest(SUPER_USER),
"Identity should not be changed because it is not in substitution list");
}
@Test
public void testIdentityReplacementForSuperUserGetRequest() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
// with default config, identityTransformer should do $superUser replacement
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(localUser,
identityTransformer.transformIdentityForGetRequest(SUPER_USER, true, localUser),
"$superuser should be replaced with local user by default");
// Disable $supeuser replacement
config.setBoolean(FS_AZURE_SKIP_SUPER_USER_REPLACEMENT, true);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SUPER_USER,
identityTransformer.transformIdentityForGetRequest(SUPER_USER, true, localUser),
"$superuser should not be replaced");
}
@Test
public void testIdentityReplacementForDaemonServiceGetRequest() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
// Default config
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"By default servicePrincipalId should not be converted for GetFileStatus(), listFileStatus(), getAcl()");
resetIdentityConfig(config);
// 1. substitution list doesn't contain currentUser
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, "a,b,c,d");
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"servicePrincipalId should not be replaced if local daemon user is not in substitution list");
resetIdentityConfig(config);
// 2. substitution list contains currentUser(daemon name) but the service principal id in config doesn't match
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, localUser + ",a,b,c,d");
config.set(FS_AZURE_OVERRIDE_OWNER_SP, UUID.randomUUID().toString());
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"servicePrincipalId should not be replaced if it is not equal to the SPN set in config");
resetIdentityConfig(config);
// 3. substitution list contains currentUser(daemon name) and the service principal id in config matches
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, localUser + ",a,b,c,d");
config.set(FS_AZURE_OVERRIDE_OWNER_SP, SERVICE_PRINCIPAL_ID);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(localUser,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"servicePrincipalId should be transformed to local use");
resetIdentityConfig(config);
// 4. substitution is "*" but the service principal id in config doesn't match the input
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, ASTERISK);
config.set(FS_AZURE_OVERRIDE_OWNER_SP, UUID.randomUUID().toString());
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SERVICE_PRINCIPAL_ID,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"servicePrincipalId should not be replaced if it is not equal to the SPN set in config");
resetIdentityConfig(config);
// 5. substitution is "*" and the service principal id in config match the input
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, ASTERISK);
config.set(FS_AZURE_OVERRIDE_OWNER_SP, SERVICE_PRINCIPAL_ID);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(localUser,
identityTransformer.transformIdentityForGetRequest(SERVICE_PRINCIPAL_ID, true, localUser),
"servicePrincipalId should be transformed to local user");
}
@Test
public void testIdentityReplacementForKinitUserGetRequest() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
// Default config
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
assertEquals(FULLY_QUALIFIED_NAME,
identityTransformer.transformIdentityForGetRequest(FULLY_QUALIFIED_NAME, true, localUser),
"full name should not be transformed if shortname is not enabled");
// add config to get short name
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
assertEquals(SHORT_NAME,
identityTransformer.transformIdentityForGetRequest(FULLY_QUALIFIED_NAME, true, localUser),
"should convert the full owner name to shortname ");
assertEquals(FULLY_QUALIFIED_NAME,
identityTransformer.transformIdentityForGetRequest(FULLY_QUALIFIED_NAME, false, localGroup),
"group name should not be converted to shortname ");
}
@Test
public void transformAclEntriesForSetRequest() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
List<AclEntry> aclEntriesToBeTransformed = Lists.newArrayList(
aclEntry(ACCESS, USER, DAEMON, ALL),
aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME,ALL),
aclEntry(DEFAULT, USER, SUPER_USER, ALL),
aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(DEFAULT, USER, SHORT_NAME, ALL),
aclEntry(DEFAULT, GROUP, DAEMON, ALL),
aclEntry(DEFAULT, GROUP, SHORT_NAME, ALL), // Notice: for group type ACL entry, if name is shortName,
aclEntry(DEFAULT, OTHER, ALL), // It won't be converted to Full Name. This is
aclEntry(DEFAULT, MASK, ALL) // to make the behavior consistent with HDI.
);
// make a copy
List<AclEntry> aclEntries = Lists.newArrayList(aclEntriesToBeTransformed);
// Default config should not change the identities
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
identityTransformer.transformAclEntriesForSetRequest(aclEntries);
checkAclEntriesList(aclEntriesToBeTransformed, aclEntries);
resetIdentityConfig(config);
// With config
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, DAEMON + ",a,b,c,d");
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
config.set(FS_AZURE_FILE_OWNER_DOMAINNAME, DOMAIN);
config.set(FS_AZURE_OVERRIDE_OWNER_SP, SERVICE_PRINCIPAL_ID);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
identityTransformer.transformAclEntriesForSetRequest(aclEntries);
// expected acl entries
List<AclEntry> expectedAclEntries = Lists.newArrayList(
aclEntry(ACCESS, USER, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME, ALL),
aclEntry(DEFAULT, USER, SUPER_USER, ALL),
aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(DEFAULT, USER, FULLY_QUALIFIED_NAME, ALL),
aclEntry(DEFAULT, GROUP, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(DEFAULT, GROUP, SHORT_NAME, ALL),
aclEntry(DEFAULT, OTHER, ALL),
aclEntry(DEFAULT, MASK, ALL)
);
checkAclEntriesList(aclEntries, expectedAclEntries);
}
@Test
public void transformAclEntriesForGetRequest() throws IOException {
Configuration config = this.getRawConfiguration();
resetIdentityConfig(config);
List<AclEntry> aclEntriesToBeTransformed = Lists.newArrayList(
aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME, ALL),
aclEntry(DEFAULT, USER, SUPER_USER, ALL),
aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(DEFAULT, USER, SHORT_NAME, ALL),
aclEntry(DEFAULT, GROUP, SHORT_NAME, ALL),
aclEntry(DEFAULT, OTHER, ALL),
aclEntry(DEFAULT, MASK, ALL)
);
// make a copy
List<AclEntry> aclEntries = Lists.newArrayList(aclEntriesToBeTransformed);
// Default config should not change the identities
IdentityTransformer identityTransformer = getTransformerWithDefaultIdentityConfig(config);
identityTransformer.transformAclEntriesForGetRequest(aclEntries, localUser, localGroup);
checkAclEntriesList(aclEntriesToBeTransformed, aclEntries);
resetIdentityConfig(config);
// With config
config.set(FS_AZURE_OVERRIDE_OWNER_SP_LIST, localUser + ",a,b,c,d");
config.setBoolean(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME, true);
config.set(FS_AZURE_FILE_OWNER_DOMAINNAME, DOMAIN);
config.set(FS_AZURE_OVERRIDE_OWNER_SP, SERVICE_PRINCIPAL_ID);
identityTransformer = getTransformerWithCustomizedIdentityConfig(config);
// make a copy
aclEntries = Lists.newArrayList(aclEntriesToBeTransformed);
identityTransformer.transformAclEntriesForGetRequest(aclEntries, localUser, localGroup);
// expected acl entries
List<AclEntry> expectedAclEntries = Lists.newArrayList(
aclEntry(ACCESS, USER, SHORT_NAME, ALL), // Full UPN should be transformed to shortName
aclEntry(DEFAULT, USER, localUser, ALL), // $SuperUser should be transformed to shortName
aclEntry(DEFAULT, USER, localUser, ALL), // principal Id should be transformed to local user name
aclEntry(DEFAULT, USER, SHORT_NAME, ALL),
aclEntry(DEFAULT, GROUP, SHORT_NAME, ALL),
aclEntry(DEFAULT, OTHER, ALL),
aclEntry(DEFAULT, MASK, ALL)
);
checkAclEntriesList(aclEntries, expectedAclEntries);
}
private void resetIdentityConfig(Configuration config) {
config.unset(FS_AZURE_FILE_OWNER_ENABLE_SHORTNAME);
config.unset(FS_AZURE_FILE_OWNER_DOMAINNAME);
config.unset(FS_AZURE_OVERRIDE_OWNER_SP);
config.unset(FS_AZURE_OVERRIDE_OWNER_SP_LIST);
config.unset(FS_AZURE_SKIP_SUPER_USER_REPLACEMENT);
}
private IdentityTransformer getTransformerWithDefaultIdentityConfig(Configuration config) throws IOException {
resetIdentityConfig(config);
return new IdentityTransformer(config);
}
private IdentityTransformer getTransformerWithCustomizedIdentityConfig(Configuration config) throws IOException {
return new IdentityTransformer(config);
}
private void checkAclEntriesList(List<AclEntry> aclEntries, List<AclEntry> expected) {
assertTrue(aclEntries.size() == expected.size(), "list size not equals");
for (int i = 0; i < aclEntries.size(); i++) {
assertEquals(expected.get(i).getName(), aclEntries.get(i).getName(), "Identity doesn't match");
}
}
}
| ITestAbfsIdentityTransformer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/map/SessionAttribute.java | {
"start": 197,
"end": 951
} | class ____ {
private Long id;
private String name;
private String stringData;
private Serializable objectData;
SessionAttribute() {}
public SessionAttribute(String name, Serializable obj) {
this.name = name;
this.objectData = obj;
}
public SessionAttribute(String name, String str) {
this.name = name;
this.stringData = str;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Serializable getObjectData() {
return objectData;
}
public void setObjectData(Serializable objectData) {
this.objectData = objectData;
}
public String getStringData() {
return stringData;
}
public void setStringData(String stringData) {
this.stringData = stringData;
}
}
| SessionAttribute |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/config/remote/request/ConfigFuzzyWatchRequestTest.java | {
"start": 983,
"end": 3568
} | class ____ extends BasedConfigRequestTest {
private static final String GROUP_KEY_PATTERN = "test.*";
private static final String WATCH_TYPE = "FUZZY";
@Override
@Test
public void testSerialize() throws JsonProcessingException {
ConfigFuzzyWatchRequest configFuzzyWatchRequest = new ConfigFuzzyWatchRequest();
configFuzzyWatchRequest.putAllHeader(HEADERS);
configFuzzyWatchRequest.setGroupKeyPattern(GROUP_KEY_PATTERN);
configFuzzyWatchRequest.setWatchType(WATCH_TYPE);
configFuzzyWatchRequest.setInitializing(true);
Set<String> receivedGroupKeys = new HashSet<>();
receivedGroupKeys.add("test-group-key-1");
receivedGroupKeys.add("test-group-key-2");
configFuzzyWatchRequest.setReceivedGroupKeys(receivedGroupKeys);
final String requestId = injectRequestUuId(configFuzzyWatchRequest);
String json = mapper.writeValueAsString(configFuzzyWatchRequest);
assertTrue(json.contains("\"module\":\"" + Constants.Config.CONFIG_MODULE));
assertTrue(json.contains("\"groupKeyPattern\":\"" + GROUP_KEY_PATTERN));
assertTrue(json.contains("\"watchType\":\"" + WATCH_TYPE));
assertTrue(json.contains("\"initializing\":" + true));
assertTrue(json.contains("\"receivedGroupKeys\":["));
assertTrue(json.contains("\"test-group-key-1\""));
assertTrue(json.contains("\"test-group-key-2\""));
assertTrue(json.contains("\"requestId\":\"" + requestId));
}
@Override
@Test
public void testDeserialize() throws JsonProcessingException {
String json = "{\"headers\":{\"header1\":\"test_header1\"},\"groupKeyPattern\":\"test.*\","
+ "\"watchType\":\"FUZZY\",\"initializing\":true,"
+ "\"receivedGroupKeys\":[\"test-group-key-1\",\"test-group-key-2\"],\"module\":\"config\"}";
ConfigFuzzyWatchRequest actual = mapper.readValue(json, ConfigFuzzyWatchRequest.class);
assertEquals(GROUP_KEY_PATTERN, actual.getGroupKeyPattern());
assertEquals(WATCH_TYPE, actual.getWatchType());
assertEquals(true, actual.isInitializing());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getModule());
assertEquals(HEADER_VALUE, actual.getHeader(HEADER_KEY));
assertEquals(2, actual.getReceivedGroupKeys().size());
assertTrue(actual.getReceivedGroupKeys().contains("test-group-key-1"));
assertTrue(actual.getReceivedGroupKeys().contains("test-group-key-2"));
}
} | ConfigFuzzyWatchRequestTest |
java | netty__netty | transport-native-kqueue/src/test/java/io/netty/channel/kqueue/KqueueWriteBeforeRegisteredTest.java | {
"start": 867,
"end": 1130
} | class ____ extends WriteBeforeRegisteredTest {
@Override
protected List<TestsuitePermutation.BootstrapFactory<Bootstrap>> newFactories() {
return KQueueSocketTestPermutation.INSTANCE.clientSocketWithFastOpen();
}
}
| KqueueWriteBeforeRegisteredTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java | {
"start": 1004,
"end": 6504
} | class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("value", ElementType.DOUBLE),
new IntermediateStateDesc("delta", ElementType.DOUBLE),
new IntermediateStateDesc("seen", ElementType.BOOLEAN) );
private final DriverContext driverContext;
private final SumDoubleAggregator.SumState state;
private final List<Integer> channels;
public SumDoubleAggregatorFunction(DriverContext driverContext, List<Integer> channels,
SumDoubleAggregator.SumState state) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
}
public static SumDoubleAggregatorFunction create(DriverContext driverContext,
List<Integer> channels) {
return new SumDoubleAggregatorFunction(driverContext, channels, SumDoubleAggregator.initSingle());
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
DoubleBlock vBlock = page.getBlock(channels.get(0));
DoubleVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock, mask);
return;
}
addRawVector(vVector, mask);
}
private void addRawInputNotMasked(Page page) {
DoubleBlock vBlock = page.getBlock(channels.get(0));
DoubleVector vVector = vBlock.asVector();
if (vVector == null) {
addRawBlock(vBlock);
return;
}
addRawVector(vVector);
}
private void addRawVector(DoubleVector vVector) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
double vValue = vVector.getDouble(valuesPosition);
SumDoubleAggregator.combine(state, vValue);
}
}
private void addRawVector(DoubleVector vVector, BooleanVector mask) {
state.seen(true);
for (int valuesPosition = 0; valuesPosition < vVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
double vValue = vVector.getDouble(valuesPosition);
SumDoubleAggregator.combine(state, vValue);
}
}
private void addRawBlock(DoubleBlock vBlock) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
double vValue = vBlock.getDouble(vOffset);
SumDoubleAggregator.combine(state, vValue);
}
}
}
private void addRawBlock(DoubleBlock vBlock, BooleanVector mask) {
for (int p = 0; p < vBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int vValueCount = vBlock.getValueCount(p);
if (vValueCount == 0) {
continue;
}
state.seen(true);
int vStart = vBlock.getFirstValueIndex(p);
int vEnd = vStart + vValueCount;
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
double vValue = vBlock.getDouble(vOffset);
SumDoubleAggregator.combine(state, vValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block valueUncast = page.getBlock(channels.get(0));
if (valueUncast.areAllValuesNull()) {
return;
}
DoubleVector value = ((DoubleBlock) valueUncast).asVector();
assert value.getPositionCount() == 1;
Block deltaUncast = page.getBlock(channels.get(1));
if (deltaUncast.areAllValuesNull()) {
return;
}
DoubleVector delta = ((DoubleBlock) deltaUncast).asVector();
assert delta.getPositionCount() == 1;
Block seenUncast = page.getBlock(channels.get(2));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert seen.getPositionCount() == 1;
SumDoubleAggregator.combineIntermediate(state, value.getDouble(0), delta.getDouble(0), seen.getBoolean(0));
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
if (state.seen() == false) {
blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1);
return;
}
blocks[offset] = SumDoubleAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
| SumDoubleAggregatorFunction |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/test-fixtures-multi-module/application/src/main/java/org/example/Main.java | {
"start": 29,
"end": 212
} | class ____ {
// Here just because we want dependency
// to production code and test-fixtures of the same module
private MainLibrary2 mainLibrary2 = new MainLibrary2();
} | Main |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsActionTests.java | {
"start": 1612,
"end": 14690
} | class ____ extends ESTestCase {
private static final TransformCheckpointingInfo CHECKPOINTING_INFO = new TransformCheckpointingInfo(
new TransformCheckpointStats(1, null, null, 1, 1),
new TransformCheckpointStats(2, null, null, 2, 5),
2,
Instant.now(),
Instant.now()
);
private final TransformTask task = mock(TransformTask.class);
public void testDeriveStatsStopped() {
String transformId = "transform-with-stats";
String reason = null;
TransformIndexerStats stats = TransformIndexerStatsTests.randomStats();
TransformState stoppedState = new TransformState(
TransformTaskState.STOPPED,
IndexerState.STOPPED,
null,
0,
reason,
null,
null,
true,
null
);
withIdStateAndStats(transformId, stoppedState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPED,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPED,
reason,
null,
stats,
CHECKPOINTING_INFO,
TransformHealth.GREEN
)
)
);
reason = "foo";
stoppedState = new TransformState(TransformTaskState.STOPPED, IndexerState.STOPPED, null, 0, reason, null, null, true, null);
withIdStateAndStats(transformId, stoppedState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPED,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPED,
reason,
null,
stats,
CHECKPOINTING_INFO,
TransformHealth.GREEN
)
)
);
}
public void testDeriveStatsStoppedWithAuthStateGreen() {
testDeriveStatsStoppedWithAuthState(null, AuthorizationState.green(), TransformCheckpointingInfo.EMPTY, TransformHealth.GREEN);
testDeriveStatsStoppedWithAuthState(CHECKPOINTING_INFO, AuthorizationState.green(), CHECKPOINTING_INFO, TransformHealth.GREEN);
}
public void testDeriveStatsStoppedWithAuthStateRed() {
AuthorizationState redAuthState = AuthorizationState.red(new ElasticsearchSecurityException("missing privileges"));
TransformHealth expectedHealth = new TransformHealth(
HealthStatus.RED,
List.of(TransformHealthChecker.IssueType.PRIVILEGES_CHECK_FAILED.newIssue("missing privileges", 1, null))
);
testDeriveStatsStoppedWithAuthState(null, redAuthState, TransformCheckpointingInfo.EMPTY, expectedHealth);
testDeriveStatsStoppedWithAuthState(CHECKPOINTING_INFO, redAuthState, CHECKPOINTING_INFO, expectedHealth);
}
private void testDeriveStatsStoppedWithAuthState(
TransformCheckpointingInfo info,
AuthorizationState authState,
TransformCheckpointingInfo expectedInfo,
TransformHealth expectedHealth
) {
String transformId = "transform-with-auth-state";
TransformIndexerStats stats = TransformIndexerStatsTests.randomStats();
TransformState stoppedState = new TransformState(
TransformTaskState.STOPPED,
IndexerState.STOPPED,
null,
0,
null,
null,
null,
true,
authState
);
withIdStateAndStats(transformId, stoppedState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, info),
equalTo(new TransformStats(transformId, TransformStats.State.STOPPED, null, null, stats, expectedInfo, expectedHealth))
);
}
public void testDeriveStatsFailed() {
String transformId = "transform-with-stats";
String reason = null;
TransformHealth expectedHealth = new TransformHealth(
HealthStatus.RED,
List.of(TransformHealthChecker.IssueType.TRANSFORM_TASK_FAILED.newIssue(null, 1, null))
);
TransformIndexerStats stats = TransformIndexerStatsTests.randomStats();
TransformState failedState = new TransformState(
TransformTaskState.FAILED,
IndexerState.STOPPED,
null,
0,
reason,
null,
null,
true,
null
);
withIdStateAndStats(transformId, failedState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.FAILED,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
expectedHealth
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(new TransformStats(transformId, TransformStats.State.FAILED, reason, null, stats, CHECKPOINTING_INFO, expectedHealth))
);
reason = "the task is failed";
expectedHealth = new TransformHealth(
HealthStatus.RED,
List.of(TransformHealthChecker.IssueType.TRANSFORM_TASK_FAILED.newIssue(reason, 1, null))
);
failedState = new TransformState(TransformTaskState.FAILED, IndexerState.STOPPED, null, 0, reason, null, null, true, null);
withIdStateAndStats(transformId, failedState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.FAILED,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
expectedHealth
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(new TransformStats(transformId, TransformStats.State.FAILED, reason, null, stats, CHECKPOINTING_INFO, expectedHealth))
);
}
public void testDeriveStats() {
String transformId = "transform-with-stats";
String reason = null;
TransformIndexerStats stats = TransformIndexerStatsTests.randomStats();
TransformState runningState = new TransformState(
TransformTaskState.STARTED,
IndexerState.INDEXING,
null,
0,
reason,
null,
null,
true,
null
);
withIdStateAndStats(transformId, runningState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPING,
"transform is set to stop at the next checkpoint",
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPING,
"transform is set to stop at the next checkpoint",
null,
stats,
CHECKPOINTING_INFO,
TransformHealth.GREEN
)
)
);
reason = "foo";
runningState = new TransformState(TransformTaskState.STARTED, IndexerState.INDEXING, null, 0, reason, null, null, true, null);
withIdStateAndStats(transformId, runningState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPING,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(
new TransformStats(
transformId,
TransformStats.State.STOPPING,
reason,
null,
stats,
CHECKPOINTING_INFO,
TransformHealth.GREEN
)
)
);
// Stop at next checkpoint is false.
runningState = new TransformState(TransformTaskState.STARTED, IndexerState.INDEXING, null, 0, reason, null, null, false, null);
withIdStateAndStats(transformId, runningState, stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.INDEXING,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, CHECKPOINTING_INFO),
equalTo(
new TransformStats(
transformId,
TransformStats.State.INDEXING,
reason,
null,
stats,
CHECKPOINTING_INFO,
TransformHealth.GREEN
)
)
);
}
public void testDeriveStatsWithIndexBlock() {
String transformId = "transform-with-stats";
String reason = "transform is paused while destination index is blocked";
TransformIndexerStats stats = TransformIndexerStatsTests.randomStats();
TransformState runningState = new TransformState(
TransformTaskState.STARTED,
IndexerState.STARTED,
null,
0,
null,
null,
null,
false,
null
);
var context = new TransformContext(TransformTaskState.STARTED, "", 0, mock());
context.setIsWaitingForIndexToUnblock(true);
var task = mock(TransformTask.class);
when(task.getContext()).thenReturn(context);
when(task.getTransformId()).thenReturn(transformId);
when(task.getState()).thenReturn(runningState);
when(task.getStats()).thenReturn(stats);
assertThat(
TransportGetTransformStatsAction.deriveStats(task, null),
equalTo(
new TransformStats(
transformId,
TransformStats.State.WAITING,
reason,
null,
stats,
TransformCheckpointingInfo.EMPTY,
TransformHealth.GREEN
)
)
);
}
private void withIdStateAndStats(String transformId, TransformState state, TransformIndexerStats stats) {
when(task.getTransformId()).thenReturn(transformId);
when(task.getState()).thenReturn(state);
when(task.getStats()).thenReturn(stats);
when(task.getContext()).thenReturn(new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)));
}
}
| TransportGetTransformStatsActionTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java | {
"start": 57638,
"end": 60130
} | class ____ implements ReplicationOperation.Replicas<ReplicaRequest> {
@Override
public void performOn(
final ShardRouting replica,
final ReplicaRequest request,
final long primaryTerm,
final long globalCheckpoint,
final long maxSeqNoOfUpdatesOrDeletes,
final ActionListener<ReplicationOperation.ReplicaResponse> listener
) {
String nodeId = replica.currentNodeId();
final DiscoveryNode node = clusterService.state().nodes().get(nodeId);
if (node == null) {
listener.onFailure(new NoNodeAvailableException("unknown node [" + nodeId + "]"));
return;
}
final ConcreteReplicaRequest<ReplicaRequest> replicaRequest = new ConcreteReplicaRequest<>(
request,
replica.allocationId().getId(),
primaryTerm,
globalCheckpoint,
maxSeqNoOfUpdatesOrDeletes
);
final ActionListenerResponseHandler<ReplicaResponse> handler = new ActionListenerResponseHandler<>(
listener,
ReplicaResponse::new,
TransportResponseHandler.TRANSPORT_WORKER
);
transportService.sendRequest(node, transportReplicaAction, replicaRequest, transportOptions, handler);
}
@Override
public void failShardIfNeeded(
ShardRouting replica,
long primaryTerm,
String message,
Exception exception,
ActionListener<Void> listener
) {
// This does not need to fail the shard. The idea is that this
// is a non-write operation (something like a refresh or a global
// checkpoint sync) and therefore the replica should still be
// "alive" if it were to fail.
listener.onResponse(null);
}
@Override
public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, long primaryTerm, ActionListener<Void> listener) {
// This does not need to make the shard stale. The idea is that this
// is a non-write operation (something like a refresh or a global
// checkpoint sync) and therefore the replica should still be
// "alive" if it were to be marked as stale.
listener.onResponse(null);
}
}
/**
* a wrapper | ReplicasProxy |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVLeafFieldData.java | {
"start": 701,
"end": 1024
} | class ____ extends AbstractBinaryDVLeafFieldData {
BytesBinaryDVLeafFieldData(BinaryDocValues values) {
super(values);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return new BinaryDocValuesField(getBytesValues(), name);
}
}
| BytesBinaryDVLeafFieldData |
java | elastic__elasticsearch | qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java | {
"start": 3209,
"end": 15650
} | class ____ extends ESClientYamlSuiteTestCase {
private static final Logger logger = LogManager.getLogger(CcsCommonYamlTestSuiteIT.class);
private static RestClient searchClient;
private static RestClient adminSearchClient;
private static List<HttpHost> clusterHosts;
private static TestCandidateAwareClient searchYamlTestClient;
// the remote cluster is the one we write index operations etc... to
private static final String REMOTE_CLUSTER_NAME = "remote_cluster";
private static final AtomicBoolean isRemoteConfigured = new AtomicBoolean(false);
private static final AtomicBoolean isCombinedComputed = new AtomicBoolean(false);
private static final AtomicReference<TestFeatureService> combinedTestFeatureServiceRef = new AtomicReference<>();
private static final AtomicReference<Set<String>> combinedOsSetRef = new AtomicReference<>();
private static final AtomicReference<Set<String>> combinedNodeVersionsRef = new AtomicReference<>();
private static LocalClusterConfigProvider commonClusterConfig = cluster -> cluster.module("x-pack-async-search")
.module("aggregations")
.module("analysis-common")
.module("mapper-extras")
.module("vector-tile")
.module("x-pack-analytics")
.module("x-pack-eql")
.module("x-pack-sql")
.setting("xpack.security.enabled", "false")
// geohex_grid requires gold license
.setting("xpack.license.self_generated.type", "trial")
.feature(FeatureFlag.TIME_SERIES_MODE)
.feature(FeatureFlag.SYNTHETIC_VECTORS)
.feature(FeatureFlag.GENERIC_VECTOR_FORMAT);
private static ElasticsearchCluster remoteCluster = ElasticsearchCluster.local()
.name(REMOTE_CLUSTER_NAME)
.nodes(2)
.setting("node.roles", "[data,ingest,master]")
.apply(commonClusterConfig)
.build();
private static ElasticsearchCluster localCluster = ElasticsearchCluster.local()
.name("local_cluster")
.setting("node.roles", "[data,ingest,master,remote_cluster_client]")
.setting("cluster.remote.remote_cluster.seeds", () -> "\"" + remoteCluster.getTransportEndpoint(0) + "\"")
.setting("cluster.remote.connections_per_cluster", "1")
.setting("cluster.remote.remote_cluster.skip_unavailable", "false")
.apply(commonClusterConfig)
.build();
@ClassRule
// Use a RuleChain to ensure that remote cluster is started before local cluster
public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster);
// the CCS api calls that we run against the "search" cluster in this test setup
static final Set<String> CCS_APIS = Set.of(
"search",
"field_caps",
"msearch",
"scroll",
"clear_scroll",
"indices.resolve_index",
"async_search.submit",
"async_search.get",
"async_search.status",
"async_search.delete",
"eql.search",
"eql.get",
"eql.get_status",
"eql.delete",
"sql.query",
"sql.clear_cursor",
"sql.translate",
"open_point_in_time",
"close_point_in_time"
);
@Override
protected String getTestRestCluster() {
return remoteCluster.getHttpAddresses();
}
/**
* initialize the search client and an additional administration client and check for an established connection
*/
@Before
public void initSearchClient() throws IOException {
if (searchClient == null) {
assert adminSearchClient == null;
assert clusterHosts == null;
String[] stringUrls = localCluster.getHttpAddresses().split(",");
List<HttpHost> hosts = new ArrayList<>(stringUrls.length);
for (String stringUrl : stringUrls) {
int portSeparator = stringUrl.lastIndexOf(':');
if (portSeparator < 0) {
throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]");
}
String host = stringUrl.substring(0, portSeparator);
int port = Integer.parseInt(stringUrl.substring(portSeparator + 1));
hosts.add(buildHttpHost(host, port));
}
clusterHosts = unmodifiableList(hosts);
logger.info("initializing REST search clients against {}", clusterHosts);
searchClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0]));
adminSearchClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[0]));
searchYamlTestClient = new TestCandidateAwareClient(getRestSpec(), searchClient, hosts, this::getClientBuilderWithSniffedHosts);
assert searchClient != null;
assert adminSearchClient != null;
assert clusterHosts != null;
if (isRemoteConfigured.compareAndSet(false, true)) {
// check that we have an established CCS connection
Request request = new Request("GET", "_remote/info");
Response response = adminSearchClient.performRequest(request);
assertOK(response);
ObjectPath responseObject = ObjectPath.createFromResponse(response);
assertNotNull(responseObject.evaluate(REMOTE_CLUSTER_NAME));
assertNull(responseObject.evaluate(REMOTE_CLUSTER_NAME + ".cluster_credentials"));
logger.info("Established connection to remote cluster [" + REMOTE_CLUSTER_NAME + "]");
}
}
searchYamlTestClient.setTestCandidate(getTestCandidate());
}
public CcsCommonYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) throws IOException {
super(rewrite(testCandidate));
}
/**
* we need to rewrite a few "match" sections in order to change the expected index name values
* to include the remote cluster prefix
*/
static ClientYamlTestCandidate rewrite(ClientYamlTestCandidate clientYamlTestCandidate) {
ClientYamlTestSection testSection = clientYamlTestCandidate.getTestSection();
List<ExecutableSection> executableSections = testSection.getExecutableSections();
List<ExecutableSection> modifiedExecutableSections = new ArrayList<>();
String lastAPIDoSection = "";
for (ExecutableSection section : executableSections) {
ExecutableSection rewrittenSection = section;
if (section instanceof MatchAssertion matchSection) {
Object modifiedExpectedValue = ((MatchAssertion) section).getExpectedValue();
if (matchSection.getField().endsWith("_index") || matchSection.getField().contains("fields._index")) {
modifiedExpectedValue = rewriteExpectedIndexValue(matchSection.getExpectedValue());
}
if (lastAPIDoSection.equals("indices.resolve_index") && matchSection.getField().endsWith("name")) {
// modify " indices.resolve_index" expected index names
modifiedExpectedValue = rewriteExpectedIndexValue(matchSection.getExpectedValue());
}
if (lastAPIDoSection.equals("field_caps") && matchSection.getField().endsWith("indices")) {
modifiedExpectedValue = rewriteExpectedIndexValue(matchSection.getExpectedValue());
}
rewrittenSection = new MatchAssertion(matchSection.getLocation(), matchSection.getField(), modifiedExpectedValue);
} else if (section instanceof IsFalseAssertion falseAssertion) {
if ((lastAPIDoSection.startsWith("async_") || lastAPIDoSection.equals("search"))
&& ((IsFalseAssertion) section).getField().endsWith("_clusters")) {
// in ccs scenarios, the response "_cluster" section will be there
rewrittenSection = new IsTrueAssertion(falseAssertion.getLocation(), falseAssertion.getField());
}
} else if (section instanceof DoSection) {
lastAPIDoSection = ((DoSection) section).getApiCallSection().getApi();
if (lastAPIDoSection.equals("msearch")) {
// modify "msearch" body sections so the "index" part is targeting the remote cluster
DoSection doSection = ((DoSection) section);
List<Map<String, Object>> bodies = doSection.getApiCallSection().getBodies();
for (Map<String, Object> body : bodies) {
if (body.containsKey("index")) {
String modifiedIndex = REMOTE_CLUSTER_NAME + ":" + body.get("index");
body.put("index", modifiedIndex);
} else if (body.containsKey("query") && body.containsKey("pit")) {
// search/350_point_in_time/msearch uses _index in a match query
@SuppressWarnings("unchecked")
final var query = (Map<String, Object>) body.get("query");
if (query.containsKey("match")) {
@SuppressWarnings("unchecked")
final var match = (Map<String, Object>) query.get("match");
if (match.containsKey("_index")) {
match.put("_index", REMOTE_CLUSTER_NAME + ":" + match.get("_index"));
}
}
}
}
} else if (lastAPIDoSection.equals("sql.query") || lastAPIDoSection.equals("sql.translate")) {
DoSection doSection = ((DoSection) section);
List<Map<String, Object>> bodies = doSection.getApiCallSection().getBodies();
for (Map<String, Object> body : bodies) {
if (body.containsKey("query")) {
final String query = (String) body.get("query");
// Prefix the index name after FROM with the remote cluster alias
// Split and join the old query string to take care of any excessive whitespaces
final String rewrittenQuery = Strings.arrayToDelimitedString(query.split("\\s+"), " ")
.replace("FROM ", "FROM " + REMOTE_CLUSTER_NAME + ":");
body.put("query", rewrittenQuery);
}
}
}
}
modifiedExecutableSections.add(rewrittenSection);
}
return new ClientYamlTestCandidate(
clientYamlTestCandidate.getRestTestSuite(),
new ClientYamlTestSection(
testSection.getLocation(),
testSection.getName(),
testSection.getPrerequisiteSection(),
modifiedExecutableSections
)
);
}
/**
* add the remote cluster prefix to either a single index name or a list of expected index names
*/
private static Object rewriteExpectedIndexValue(Object expectedValue) {
if (expectedValue instanceof String) {
return REMOTE_CLUSTER_NAME + ":" + expectedValue;
}
if (expectedValue instanceof List) {
@SuppressWarnings("unchecked")
List<String> expectedValues = (List<String>) expectedValue;
return expectedValues.stream().map(s -> REMOTE_CLUSTER_NAME + ":" + s).toList();
}
throw new IllegalArgumentException("Either String or List<String> expected");
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return createParameters();
}
@Override
protected ClientYamlTestExecutionContext createRestTestExecutionContext(
ClientYamlTestCandidate clientYamlTestCandidate,
ClientYamlTestClient clientYamlTestClient,
final Set<String> nodesVersions,
final TestFeatureService testFeatureService,
final Set<String> osSet
) {
try {
if (isCombinedComputed.compareAndSet(false, true)) {
// Ensure the test specific initialization is run by calling it explicitly (@Before annotations on base-derived | CcsCommonYamlTestSuiteIT |
java | apache__flink | flink-core/src/main/java/org/apache/flink/configuration/WritableConfig.java | {
"start": 1065,
"end": 1528
} | interface ____ {
/**
* Stores a given value using the metadata included in the {@link ConfigOption}. The value
* should be readable back through {@link ReadableConfig}.
*
* @param option metadata information
* @param value value to be stored
* @param <T> type of the value to be stored
* @return instance of this configuration for fluent API
*/
<T> WritableConfig set(ConfigOption<T> option, T value);
}
| WritableConfig |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/ids/embeddedid/ItemId.java | {
"start": 435,
"end": 2202
} | class ____ implements Serializable {
@Column(name = "model")
private String model;
@Column(name = "version")
private Integer version;
@ManyToOne
@JoinColumn(name = "producer", nullable = false) // NOT NULL for Sybase
private Producer producer;
public ItemId() {
}
public ItemId(String model, Integer version, Producer producer) {
this.model = model;
this.version = version;
this.producer = producer;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof ItemId) ) {
return false;
}
ItemId itemId = (ItemId) o;
if ( getModel() != null ? !getModel().equals( itemId.getModel() ) : itemId.getModel() != null ) {
return false;
}
if ( getProducer() != null ? !getProducer().equals( itemId.getProducer() ) : itemId.getProducer() != null ) {
return false;
}
if ( getVersion() != null ? !getVersion().equals( itemId.getVersion() ) : itemId.getVersion() != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = model != null ? model.hashCode() : 0;
result = 31 * result + (version != null ? version.hashCode() : 0);
result = 31 * result + (producer != null ? producer.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ItemId(model = " + model + ", version = " + version + ", producer = " + producer + ")";
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public Producer getProducer() {
return producer;
}
public void setProducer(Producer producer) {
this.producer = producer;
}
}
| ItemId |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/fst/Util.java | {
"start": 15488,
"end": 34943
} | class ____<T> implements Iterable<Result<T>> {
/**
* <code>true</code> iff this is a complete result ie. if the specified queue size was large
* enough to find the complete list of results. This might be <code>false</code> if the {@link
* TopNSearcher} rejected too many results.
*/
public final boolean isComplete;
/** The top results */
public final List<Result<T>> topN;
TopResults(boolean isComplete, List<Result<T>> topN) {
this.topN = topN;
this.isComplete = isComplete;
}
@Override
public Iterator<Result<T>> iterator() {
return topN.iterator();
}
}
/** Starting from node, find the top N min cost completions to a final node. */
public static <T> TopResults<T> shortestPaths(
FST<T> fst,
Arc<T> fromNode,
T startOutput,
Comparator<T> comparator,
int topN,
boolean allowEmptyString
) throws IOException {
// All paths are kept, so we can pass topN for
// maxQueueDepth and the pruning is admissible:
TopNSearcher<T> searcher = new TopNSearcher<>(fst, topN, topN, comparator);
// since this search is initialized with a single start node
// it is okay to start with an empty input path here
searcher.addStartPaths(fromNode, startOutput, allowEmptyString, new IntsRefBuilder());
return searcher.search();
}
/**
* Dumps an {@link FST} to a GraphViz's <code>dot</code> language description for visualization.
* Example of use:
*
* <pre class="prettyprint">
* PrintWriter pw = new PrintWriter("out.dot");
* Util.toDot(fst, pw, true, true);
* pw.close();
* </pre>
*
* and then, from command line:
*
* <pre>
* dot -Tpng -o out.png out.dot
* </pre>
*
* <p>Note: larger FSTs (a few thousand nodes) won't even render, don't bother.
*
* @param sameRank If <code>true</code>, the resulting <code>dot</code> file will try to order
* states in layers of breadth-first traversal. This may mess up arcs, but makes the output
* FST's structure a bit clearer.
* @param labelStates If <code>true</code> states will have labels equal to their offsets in their
* binary format. Expands the graph considerably.
* @see <a href="http://www.graphviz.org/">graphviz project</a>
*/
public static <T> void toDot(FST<T> fst, Writer out, boolean sameRank, boolean labelStates) throws IOException {
final String expandedNodeColor = "blue";
// This is the start arc in the automaton (from the epsilon state to the first state
// with outgoing transitions.
final Arc<T> startArc = fst.getFirstArc(new Arc<>());
// A queue of transitions to consider for the next level.
final List<Arc<T>> thisLevelQueue = new ArrayList<>();
// A queue of transitions to consider when processing the next level.
final List<Arc<T>> nextLevelQueue = new ArrayList<>();
nextLevelQueue.add(startArc);
// System.out.println("toDot: startArc: " + startArc);
// A list of states on the same level (for ranking).
final List<Integer> sameLevelStates = new ArrayList<>();
// A bitset of already seen states (target offset).
final BitSet seen = new BitSet();
seen.set((int) startArc.target());
// Shape for states.
final String stateShape = "circle";
final String finalStateShape = "doublecircle";
// Emit DOT prologue.
out.write("digraph FST {\n");
out.write(" rankdir = LR; splines=true; concentrate=true; ordering=out; ranksep=2.5; \n");
if (labelStates == false) {
out.write(" node [shape=circle, width=.2, height=.2, style=filled]\n");
}
emitDotState(out, "initial", "point", "white", "");
final T NO_OUTPUT = fst.outputs.getNoOutput();
final BytesReader r = fst.getBytesReader();
// final FST.Arc<T> scratchArc = new FST.Arc<>();
{
final String stateColor;
if (fst.isExpandedTarget(startArc, r)) {
stateColor = expandedNodeColor;
} else {
stateColor = null;
}
final boolean isFinal;
final T finalOutput;
if (startArc.isFinal()) {
isFinal = true;
finalOutput = startArc.nextFinalOutput() == NO_OUTPUT ? null : startArc.nextFinalOutput();
} else {
isFinal = false;
finalOutput = null;
}
emitDotState(
out,
Long.toString(startArc.target()),
isFinal ? finalStateShape : stateShape,
stateColor,
finalOutput == null ? "" : fst.outputs.outputToString(finalOutput)
);
}
out.write(" initial -> " + startArc.target() + "\n");
int level = 0;
while (nextLevelQueue.isEmpty() == false) {
// we could double buffer here, but it doesn't matter probably.
// System.out.println("next level=" + level);
thisLevelQueue.addAll(nextLevelQueue);
nextLevelQueue.clear();
level++;
out.write("\n // Transitions and states at level: " + level + "\n");
while (thisLevelQueue.isEmpty() == false) {
final Arc<T> arc = thisLevelQueue.remove(thisLevelQueue.size() - 1);
// System.out.println(" pop: " + arc);
if (FST.targetHasArcs(arc)) {
// scan all target arcs
// System.out.println(" readFirstTarget...");
final long node = arc.target();
fst.readFirstRealTargetArc(arc.target(), arc, r);
// System.out.println(" firstTarget: " + arc);
while (true) {
// System.out.println(" cycle arc=" + arc);
// Emit the unseen state and add it to the queue for the next level.
if (arc.target() >= 0 && seen.get((int) arc.target()) == false) {
/*
boolean isFinal = false;
T finalOutput = null;
fst.readFirstTargetArc(arc, scratchArc);
if (scratchArc.isFinal() && fst.targetHasArcs(scratchArc)) {
// target is final
isFinal = true;
finalOutput = scratchArc.output == NO_OUTPUT ? null : scratchArc.output;
System.out.println("dot hit final label=" + (char) scratchArc.label);
}
*/
final String stateColor;
if (fst.isExpandedTarget(arc, r)) {
stateColor = expandedNodeColor;
} else {
stateColor = null;
}
final String finalOutput;
if (arc.nextFinalOutput() != null && arc.nextFinalOutput() != NO_OUTPUT) {
finalOutput = fst.outputs.outputToString(arc.nextFinalOutput());
} else {
finalOutput = "";
}
emitDotState(out, Long.toString(arc.target()), stateShape, stateColor, finalOutput);
// To see the node address, use this instead:
// emitDotState(out, Integer.toString(arc.target), stateShape, stateColor,
// String.valueOf(arc.target));
seen.set((int) arc.target());
nextLevelQueue.add(new Arc<T>().copyFrom(arc));
sameLevelStates.add((int) arc.target());
}
String outs;
if (arc.output() != NO_OUTPUT) {
outs = "/" + fst.outputs.outputToString(arc.output());
} else {
outs = "";
}
if (FST.targetHasArcs(arc) == false && arc.isFinal() && arc.nextFinalOutput() != NO_OUTPUT) {
// Tricky special case: sometimes, due to
// pruning, the builder can [sillily] produce
// an FST with an arc into the final end state
// (-1) but also with a next final output; in
// this case we pull that output up onto this
// arc
outs = outs + "/[" + fst.outputs.outputToString(arc.nextFinalOutput()) + "]";
}
final String arcColor;
if (arc.flag(FST.BIT_TARGET_NEXT)) {
arcColor = "red";
} else {
arcColor = "black";
}
assert arc.label() != FST.END_LABEL;
out.write(
" "
+ node
+ " -> "
+ arc.target()
+ " [label=\""
+ printableLabel(arc.label())
+ outs
+ "\""
+ (arc.isFinal() ? " style=\"bold\"" : "")
+ " color=\""
+ arcColor
+ "\"]\n"
);
// Break the loop if we're on the last arc of this state.
if (arc.isLast()) {
// System.out.println(" break");
break;
}
fst.readNextRealArc(arc, r);
}
}
}
// Emit state ranking information.
if (sameRank && sameLevelStates.size() > 1) {
out.write(" {rank=same; ");
for (int state : sameLevelStates) {
out.write(state + "; ");
}
out.write(" }\n");
}
sameLevelStates.clear();
}
// Emit terminating state (always there anyway).
out.write(" -1 [style=filled, color=black, shape=doublecircle, label=\"\"]\n\n");
out.write(" {rank=sink; -1 }\n");
out.write("}\n");
out.flush();
}
/** Emit a single state in the <code>dot</code> language. */
private static void emitDotState(Writer out, String name, String shape, String color, String label) throws IOException {
out.write(
" "
+ name
+ " ["
+ (shape != null ? "shape=" + shape : "")
+ " "
+ (color != null ? "color=" + color : "")
+ " "
+ (label != null ? "label=\"" + label + "\"" : "label=\"\"")
+ " "
+ "]\n"
);
}
/** Ensures an arc's label is indeed printable (dot uses US-ASCII). */
private static String printableLabel(int label) {
// Any ordinary ascii character, except for " or \, are
// printed as the character; else, as a hex string:
if (label >= 0x20 && label <= 0x7d && label != 0x22 && label != 0x5c) { // " OR \
return Character.toString((char) label);
}
return "0x" + Integer.toHexString(label);
}
/** Just maps each UTF16 unit (char) to the ints in an IntsRef. */
public static IntsRef toUTF16(CharSequence s, IntsRefBuilder scratch) {
final int charLimit = s.length();
scratch.setLength(charLimit);
scratch.grow(charLimit);
for (int idx = 0; idx < charLimit; idx++) {
scratch.setIntAt(idx, s.charAt(idx));
}
return scratch.get();
}
/**
* Decodes the Unicode codepoints from the provided CharSequence and places them in the provided
* scratch IntsRef, which must not be null, returning it.
*/
public static IntsRef toUTF32(CharSequence s, IntsRefBuilder scratch) {
int charIdx = 0;
int intIdx = 0;
final int charLimit = s.length();
while (charIdx < charLimit) {
scratch.grow(intIdx + 1);
final int utf32 = Character.codePointAt(s, charIdx);
scratch.setIntAt(intIdx, utf32);
charIdx += Character.charCount(utf32);
intIdx++;
}
scratch.setLength(intIdx);
return scratch.get();
}
/**
* Decodes the Unicode codepoints from the provided char[] and places them in the provided scratch
* IntsRef, which must not be null, returning it.
*/
public static IntsRef toUTF32(char[] s, int offset, int length, IntsRefBuilder scratch) {
int charIdx = offset;
int intIdx = 0;
final int charLimit = offset + length;
while (charIdx < charLimit) {
scratch.grow(intIdx + 1);
final int utf32 = Character.codePointAt(s, charIdx, charLimit);
scratch.setIntAt(intIdx, utf32);
charIdx += Character.charCount(utf32);
intIdx++;
}
scratch.setLength(intIdx);
return scratch.get();
}
/** Just takes unsigned byte values from the BytesRef and converts into an IntsRef. */
public static IntsRef toIntsRef(BytesRef input, IntsRefBuilder scratch) {
scratch.clear();
for (int i = 0; i < input.length; i++) {
scratch.append(input.bytes[i + input.offset] & 0xFF);
}
return scratch.get();
}
/** Just converts IntsRef to BytesRef; you must ensure the int values fit into a byte. */
public static BytesRef toBytesRef(IntsRef input, BytesRefBuilder scratch) {
scratch.grow(input.length);
for (int i = 0; i < input.length; i++) {
int value = input.ints[i + input.offset];
// NOTE: we allow -128 to 255
assert value >= Byte.MIN_VALUE && value <= 255 : "value " + value + " doesn't fit into byte";
scratch.setByteAt(i, (byte) value);
}
scratch.setLength(input.length);
return scratch.get();
}
// Uncomment for debugging:
/*
public static <T> void dotToFile(FST<T> fst, String filePath) throws IOException {
Writer w = new OutputStreamWriter(new FileOutputStream(filePath));
toDot(fst, w, true, true);
w.close();
}
*/
/**
* Reads the first arc greater or equal than the given label into the provided arc in place and
* returns it iff found, otherwise return <code>null</code>.
*
* @param label the label to ceil on
* @param fst the fst to operate on
* @param follow the arc to follow reading the label from
* @param arc the arc to read into in place
* @param in the fst's {@link BytesReader}
*/
public static <T> Arc<T> readCeilArc(int label, FST<T> fst, Arc<T> follow, Arc<T> arc, BytesReader in) throws IOException {
if (label == FST.END_LABEL) {
return FST.readEndArc(follow, arc);
}
if (FST.targetHasArcs(follow) == false) {
return null;
}
fst.readFirstTargetArc(follow, arc, in);
if (arc.bytesPerArc() != 0 && arc.label() != FST.END_LABEL) {
if (arc.nodeFlags() == FST.ARCS_FOR_DIRECT_ADDRESSING) {
// Fixed length arcs in a direct addressing node.
int targetIndex = label - arc.label();
if (targetIndex >= arc.numArcs()) {
return null;
} else if (targetIndex < 0) {
return arc;
} else {
if (BitTable.isBitSet(targetIndex, arc, in)) {
fst.readArcByDirectAddressing(arc, in, targetIndex);
assert arc.label() == label;
} else {
int ceilIndex = BitTable.nextBitSet(targetIndex, arc, in);
assert ceilIndex != -1;
fst.readArcByDirectAddressing(arc, in, ceilIndex);
assert arc.label() > label;
}
return arc;
}
}
// Fixed length arcs in a binary search node.
int idx = binarySearch(fst, arc, label);
if (idx >= 0) {
return fst.readArcByIndex(arc, in, idx);
}
idx = -1 - idx;
if (idx == arc.numArcs()) {
// DEAD END!
return null;
}
return fst.readArcByIndex(arc, in, idx);
}
// Variable length arcs in a linear scan list,
// or special arc with label == FST.END_LABEL.
fst.readFirstRealTargetArc(follow.target(), arc, in);
while (true) {
// System.out.println(" non-bs cycle");
if (arc.label() >= label) {
// System.out.println(" found!");
return arc;
} else if (arc.isLast()) {
return null;
} else {
fst.readNextRealArc(arc, in);
}
}
}
/**
* Perform a binary search of Arcs encoded as a packed array
*
* @param fst the FST from which to read
* @param arc the starting arc; sibling arcs greater than this will be searched. Usually the first
* arc in the array.
* @param targetLabel the label to search for
* @param <T> the output type of the FST
* @return the index of the Arc having the target label, or if no Arc has the matching label,
* {@code -1 - idx)}, where {@code idx} is the index of the Arc with the next highest label,
* or the total number of arcs if the target label exceeds the maximum.
* @throws IOException when the FST reader does
*/
static <T> int binarySearch(FST<T> fst, Arc<T> arc, int targetLabel) throws IOException {
assert arc.nodeFlags() == FST.ARCS_FOR_BINARY_SEARCH
: "Arc is not encoded as packed array for binary search (nodeFlags=" + arc.nodeFlags() + ")";
BytesReader in = fst.getBytesReader();
int low = arc.arcIdx();
int mid;
int high = arc.numArcs() - 1;
while (low <= high) {
mid = (low + high) >>> 1;
in.setPosition(arc.posArcsStart());
in.skipBytes((long) arc.bytesPerArc() * mid + 1);
final int midLabel = fst.readLabel(in);
final int cmp = midLabel - targetLabel;
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -1 - low;
}
}
| TopResults |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/ExpressionImpl.java | {
"start": 6821,
"end": 7930
} | class ____ implements Part {
protected final String name;
protected final String typeInfo;
protected volatile ValueResolver cachedResolver;
PartImpl(String name, String typeInfo) {
this.name = name;
this.typeInfo = typeInfo;
}
public String getName() {
return name;
}
public String getTypeInfo() {
return typeInfo;
}
@Override
public int hashCode() {
return Objects.hash(name, typeInfo);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PartImpl other = (PartImpl) obj;
return Objects.equals(name, other.name) && Objects.equals(typeInfo, other.typeInfo);
}
@Override
public String toString() {
return name;
}
}
}
| PartImpl |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/datasource/AbstractStreamCommands.java | {
"start": 1263,
"end": 15910
} | class ____<K, F, V> extends AbstractRedisCommands {
AbstractStreamCommands(RedisCommandExecutor redis, Type k, Type m, Type v) {
super(redis, new Marshaller(k, m, v));
}
Uni<Response> _xack(K key, String group, String... ids) {
nonNull(key, "key");
nonNull(group, "group");
notNullOrEmpty(ids, "ids");
doesNotContainNull(ids, "ids");
RedisCommand cmd = RedisCommand.of(Command.XACK)
.put(marshaller.encode(key))
.put(group)
.putAll(ids);
return execute(cmd);
}
Uni<Response> _xadd(K key, Map<F, V> payload) {
return _xadd(key, new XAddArgs(), payload);
}
Uni<Response> _xadd(K key, XAddArgs args, Map<F, V> payload) {
nonNull(key, "key");
nonNull(args, "args");
nonNull(payload, "payload");
RedisCommand cmd = RedisCommand.of(Command.XADD)
.put(marshaller.encode(key))
.putArgs(args);
for (Map.Entry<F, V> entry : payload.entrySet()) {
cmd.put(marshaller.encode(entry.getKey()));
cmd.putNullable(marshaller.encode(entry.getValue()));
}
return execute(cmd);
}
Uni<Response> _xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start, int count) {
nonNull(key, "key");
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
validateTimeout(minIdleTime, "minIdleTime");
notNullOrBlank(start, "start");
positive(count, "count");
RedisCommand cmd = RedisCommand.of(Command.XAUTOCLAIM)
.put(marshaller.encode(key))
.put(group).put(consumer).put(minIdleTime.toMillis()).put(start)
.put("COUNT").put(count);
return execute(cmd);
}
Uni<Response> _xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start) {
nonNull(key, "key");
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
validateTimeout(minIdleTime, "minIdleTime");
notNullOrBlank(start, "start");
RedisCommand cmd = RedisCommand.of(Command.XAUTOCLAIM)
.put(marshaller.encode(key))
.put(group).put(consumer).put(minIdleTime.toMillis()).put(start);
return execute(cmd);
}
Uni<Response> _xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start, int count,
boolean justId) {
nonNull(key, "key");
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
validateTimeout(minIdleTime, "minIdleTime");
notNullOrBlank(start, "start");
positive(count, "count");
RedisCommand cmd = RedisCommand.of(Command.XAUTOCLAIM)
.put(marshaller.encode(key))
.put(group).put(consumer).put(minIdleTime.toMillis()).put(start);
if (count > 0) {
cmd.put("COUNT").put(count);
}
if (justId) {
cmd.put("JUSTID");
}
return execute(cmd);
}
Uni<Response> _xclaim(K key, String group, String consumer, Duration minIdleTime, String... id) {
nonNull(key, "key");
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
validateTimeout(minIdleTime, "minIdleTime");
notNullOrEmpty(id, "id");
doesNotContainNull(id, "id");
RedisCommand cmd = RedisCommand.of(Command.XCLAIM)
.put(marshaller.encode(key))
.put(group)
.put(consumer)
.put(Long.toString(minIdleTime.toMillis()))
.putAll(id);
return execute(cmd);
}
Uni<Response> _xclaim(K key, String group, String consumer, Duration minIdleTime, XClaimArgs args, String... id) {
nonNull(key, "key");
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
validateTimeout(minIdleTime, "minIdleTime");
nonNull(args, "args");
notNullOrEmpty(id, "id");
doesNotContainNull(id, "id");
RedisCommand cmd = RedisCommand.of(Command.XCLAIM)
.put(marshaller.encode(key))
.put(group)
.put(consumer)
.put(Long.toString(minIdleTime.toMillis()))
.putAll(id)
.putArgs(args);
return execute(cmd);
}
Uni<Response> _xdel(K key, String... id) {
nonNull(key, "key");
notNullOrEmpty(id, "id");
doesNotContainNull(id, "id");
RedisCommand cmd = RedisCommand.of(Command.XDEL)
.put(marshaller.encode(key))
.putAll(id);
return execute(cmd);
}
Uni<Response> _xgroupCreate(K key, String groupname, String from) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(from, "from");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("CREATE")
.put(marshaller.encode(key))
.put(groupname)
.put(from);
return execute(cmd);
}
Uni<Response> _xgroupCreate(K key, String groupname, String from, XGroupCreateArgs args) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(from, "from");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("CREATE")
.put(marshaller.encode(key))
.put(groupname)
.put(from)
.putArgs(args);
return execute(cmd);
}
Uni<Response> _xgroupCreateConsumer(K key, String groupname, String consumername) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(consumername, "consumername");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("CREATECONSUMER")
.put(marshaller.encode(key))
.put(groupname)
.put(consumername);
return execute(cmd);
}
Uni<Response> _xgroupDelConsumer(K key, String groupname, String consumername) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(consumername, "consumername");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("DELCONSUMER")
.put(marshaller.encode(key))
.put(groupname)
.put(consumername);
return execute(cmd);
}
Uni<Response> _xgroupDestroy(K key, String groupname) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("DESTROY")
.put(marshaller.encode(key))
.put(groupname);
return execute(cmd);
}
Uni<Response> _xgroupSetId(K key, String groupname, String from) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(from, "from");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("SETID")
.put(marshaller.encode(key))
.put(groupname)
.put(from);
return execute(cmd);
}
Uni<Response> _xgroupSetId(K key, String groupname, String from, XGroupSetIdArgs args) {
nonNull(key, "key");
notNullOrBlank(groupname, "groupname");
notNullOrBlank(from, "from");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XGROUP)
.put("SETID")
.put(marshaller.encode(key))
.put(groupname)
.put(from)
.putArgs(args);
return execute(cmd);
}
Uni<Response> _xlen(K key) {
nonNull(key, "key");
RedisCommand cmd = RedisCommand.of(Command.XLEN)
.put(marshaller.encode(key));
return execute(cmd);
}
Uni<Response> _xrange(K key, StreamRange range, int count) {
nonNull(key, "key");
nonNull(range, "range");
positive(count, "count");
RedisCommand cmd = RedisCommand.of(Command.XRANGE)
.put(marshaller.encode(key))
.putArgs(range)
.put("COUNT").put(count);
return execute(cmd);
}
Uni<Response> _xrange(K key, StreamRange range) {
nonNull(key, "key");
nonNull(range, "range");
RedisCommand cmd = RedisCommand.of(Command.XRANGE)
.put(marshaller.encode(key))
.putArgs(range);
return execute(cmd);
}
Uni<Response> _xread(K key, String id) {
nonNull(key, "key");
notNullOrBlank(id, "id");
RedisCommand cmd = RedisCommand.of(Command.XREAD)
.put("STREAMS")
.put(marshaller.encode(key))
.put(id);
return execute(cmd);
}
Uni<Response> _xread(Map<K, String> lastIdsPerStream) {
nonNull(lastIdsPerStream, "lastIdsPerStream");
RedisCommand cmd = RedisCommand.of(Command.XREAD)
.put("STREAMS");
writeStreamsAndIds(lastIdsPerStream, cmd);
return execute(cmd);
}
Uni<Response> _xread(K key, String id, XReadArgs args) {
nonNull(key, "key");
notNullOrBlank(id, "id");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XREAD)
.putArgs(args)
.put("STREAMS")
.put(marshaller.encode(key))
.put(id);
return execute(cmd);
}
Uni<Response> _xread(Map<K, String> lastIdsPerStream, XReadArgs args) {
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XREAD)
.putArgs(args)
.put("STREAMS");
writeStreamsAndIds(lastIdsPerStream, cmd);
return execute(cmd);
}
private void writeStreamsAndIds(Map<K, String> lastIdsPerStream, RedisCommand cmd) {
List<String> ids = new ArrayList<>();
for (Map.Entry<K, String> entry : lastIdsPerStream.entrySet()) {
cmd.put(marshaller.encode(entry.getKey()));
ids.add(entry.getValue());
}
cmd.putAll(ids);
}
Uni<Response> _xreadgroup(String group, String consumer, K key, String id) {
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
nonNull(key, "key");
notNullOrBlank(id, "id");
RedisCommand cmd = RedisCommand.of(Command.XREADGROUP)
.put("GROUP")
.put(group)
.put(consumer)
.put("STREAMS")
.put(marshaller.encode(key))
.put(id);
return execute(cmd);
}
Uni<Response> _xreadgroup(String group, String consumer, Map<K, String> lastIdsPerStream) {
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
nonNull(lastIdsPerStream, "lastIdsPerStream");
RedisCommand cmd = RedisCommand.of(Command.XREADGROUP)
.put("GROUP")
.put(group)
.put(consumer)
.put("STREAMS");
writeStreamsAndIds(lastIdsPerStream, cmd);
return execute(cmd);
}
Uni<Response> _xreadgroup(String group, String consumer, K key, String id, XReadGroupArgs args) {
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
nonNull(key, "key");
notNullOrBlank(id, "id");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XREADGROUP)
.put("GROUP")
.put(group)
.put(consumer)
.putArgs(args)
.put("STREAMS")
.put(marshaller.encode(key))
.put(id);
return execute(cmd);
}
Uni<Response> _xreadgroup(String group, String consumer, Map<K, String> lastIdsPerStream, XReadGroupArgs args) {
notNullOrBlank(group, "group");
notNullOrBlank(consumer, "consumer");
nonNull(lastIdsPerStream, "lastIdsPerStream");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XREADGROUP)
.put("GROUP")
.put(group)
.put(consumer)
.putArgs(args)
.put("STREAMS");
writeStreamsAndIds(lastIdsPerStream, cmd);
return execute(cmd);
}
Uni<Response> _xrevrange(K key, StreamRange range, int count) {
nonNull(key, "key");
nonNull(range, "range");
positive(count, "count");
RedisCommand cmd = RedisCommand.of(Command.XREVRANGE)
.put(marshaller.encode(key))
.putArgs(range)
.put("COUNT")
.put(count);
return execute(cmd);
}
Uni<Response> _xrevrange(K key, StreamRange range) {
nonNull(key, "key");
nonNull(range, "range");
RedisCommand cmd = RedisCommand.of(Command.XREVRANGE)
.put(marshaller.encode(key))
.putArgs(range);
return execute(cmd);
}
Uni<Response> _xtrim(K key, XTrimArgs args) {
nonNull(key, "key");
nonNull(args, "args");
RedisCommand cmd = RedisCommand.of(Command.XTRIM)
.put(marshaller.encode(key))
.putArgs(args);
return execute(cmd);
}
Uni<Response> _xpending(K key, String group) {
nonNull(key, "key");
nonNull(key, "group");
RedisCommand cmd = RedisCommand.of(Command.XPENDING)
.put(marshaller.encode(key))
.put(group);
return execute(cmd);
}
Uni<Response> _xpending(K key, String group, StreamRange range, int count, XPendingArgs args) {
nonNull(key, "key");
nonNull(key, "group");
nonNull(range, "range");
positive(count, "count");
RedisCommand cmd = RedisCommand.of(Command.XPENDING)
.put(marshaller.encode(key))
.put(group);
// IDLE must be before the range and count
if (args != null && args.idle() != null) {
cmd.put("IDLE");
cmd.put(args.idle().toMillis());
}
cmd.putArgs(range)
.put(count);
if (args != null) {
cmd.putArgs(args);
}
return execute(cmd);
}
}
| AbstractStreamCommands |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/method/DeprecatedMethodConfig.java | {
"start": 870,
"end": 1011
} | class ____ {
@TestConfigurationProperties("foo")
@Deprecated
public Foo foo() {
return new Foo();
}
public static | DeprecatedMethodConfig |
java | apache__flink | flink-core/src/test/java/org/apache/flink/testutils/TestFileSystem.java | {
"start": 3864,
"end": 5088
} | class ____ extends FSDataOutputStream {
private final LocalDataOutputStream stream;
private final Path path;
private TestOutputStream(LocalDataOutputStream stream, Path path) {
this.stream = stream;
this.path = path;
}
@Override
public long getPos() throws IOException {
return stream.getPos();
}
@Override
public void write(int b) throws IOException {
stream.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
stream.write(b, off, len);
}
@Override
public void flush() throws IOException {
stream.flush();
}
@Override
public void sync() throws IOException {
stream.sync();
}
@Override
public void close() throws IOException {
currentUnclosedOutputStream.compute(
path, (k, v) -> Preconditions.checkNotNull(v) == 1 ? null : v - 1);
stream.close();
}
}
// ------------------------------------------------------------------------
public static final | TestOutputStream |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/oracle_cursor/Author2.java | {
"start": 760,
"end": 1981
} | class ____ {
private Integer id;
private String name;
private List<String> bookNames;
public Author2() {
super();
}
public Author2(Integer id, String name, List<String> bookNames) {
super();
this.id = id;
this.name = name;
this.bookNames = bookNames;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getBookNames() {
return bookNames;
}
public void setBookNames(List<String> bookNames) {
this.bookNames = bookNames;
}
@Override
public int hashCode() {
return Objects.hash(bookNames, id, name);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Author2)) {
return false;
}
Author2 other = (Author2) obj;
return Objects.equals(bookNames, other.bookNames) && Objects.equals(id, other.id)
&& Objects.equals(name, other.name);
}
@Override
public String toString() {
return "Author2 [id=" + id + ", name=" + name + ", bookNames=" + bookNames + "]";
}
}
| Author2 |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxElapsedTest.java | {
"start": 1071,
"end": 2580
} | class ____ {
Flux<Tuple2<Long, String>> scenario_aFluxCanBeBenchmarked(){
return Flux.just("test")
.elapsed();
}
@Test
public void aFluxCanBeBenchmarked(){
StepVerifier.withVirtualTime(this::scenario_aFluxCanBeBenchmarked,0)
.thenAwait(Duration.ofSeconds(2))
.thenRequest(1)
.expectNextMatches(t -> t.getT1() == 2000 && t.getT2().equals("test"))
.verifyComplete();
}
@Test
public void scanOperator() {
Flux<Tuple2<Long, Integer>> test = Flux.just(1).elapsed(Schedulers.single());
assertThat(test).isInstanceOf(Scannable.class);
assertThat(from(test).scan(Scannable.Attr.RUN_ON)).isSameAs(Schedulers.single());
assertThat(from(test).scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
CoreSubscriber<Tuple2<Long, String>> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxElapsed.ElapsedSubscriber<String> test = new FluxElapsed.ElapsedSubscriber<>(actual, Schedulers.single());
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.RUN_ON)).isSameAs(Schedulers.single());
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
}
| FluxElapsedTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlSubPartitionByValue.java | {
"start": 1111,
"end": 3185
} | class ____ extends SQLSubPartitionBy implements MySqlObject {
private List<SQLExpr> columns = new ArrayList<SQLExpr>();
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof MySqlASTVisitor) {
accept0((MySqlASTVisitor) visitor);
} else {
throw new IllegalArgumentException("not support visitor type : " + visitor.getClass().getName());
}
}
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, columns);
acceptChild(visitor, subPartitionsCount);
}
visitor.endVisit(this);
}
public List<SQLExpr> getColumns() {
return columns;
}
public void addColumn(SQLExpr column) {
if (column != null) {
column.setParent(this);
}
this.columns.add(column);
}
public void cloneTo(MySqlSubPartitionByValue x) {
super.cloneTo(x);
for (SQLExpr column : columns) {
SQLExpr c2 = column.clone();
c2.setParent(x);
x.columns.add(c2);
}
}
public MySqlSubPartitionByValue clone() {
MySqlSubPartitionByValue x = new MySqlSubPartitionByValue();
cloneTo(x);
return x;
}
public boolean isPartitionByColumn(long columnNameHashCode64) {
for (SQLExpr column : columns) {
if (column instanceof SQLName) {
if (((SQLName) column).nameHashCode64() == columnNameHashCode64) {
return true;
} else if (column instanceof SQLMethodInvokeExpr) {
List<SQLExpr> arguments = ((SQLMethodInvokeExpr) column).getArguments();
for (SQLExpr argument : arguments) {
if (((SQLName) argument).nameHashCode64() == columnNameHashCode64) {
return true;
}
}
}
}
}
return false;
}
}
| MySqlSubPartitionByValue |
java | processing__processing4 | java/src/processing/mode/java/preproc/ImportStatement.java | {
"start": 1184,
"end": 2037
} | class ____ of the import with all packages
* Ends with star for starred imports
*/
private String memberName;
/**
* Name of the package e.g. everything before last dot
*/
private String packageName;
private ImportStatement() { }
/**
* Create an import statement for a full package.
*
* @param cls The fully qualified name of the package.
* @return ImportStatement which imports all package members in a non-static context using a wildcard.
*/
public static ImportStatement wholePackage(String pckg) {
ImportStatement is = new ImportStatement();
is.packageName = pckg;
is.memberName = "*";
is.isStarred = true;
return is;
}
/**
* Create an import statement for a single class.
*
* @param cls The fully qualified name of the class.
* @return ImportStatement which imports the | name |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/MonoZipTest.java | {
"start": 1217,
"end": 17457
} | class ____ {
@Test
public void allEmpty() {
assertThat(Mono.zip(Mono.empty(), Mono.empty())
.block()).isNull();
}
@Test
public void allNonEmptyIterable() {
assertThat(Mono.zip(Arrays.asList(Mono.just(1), Mono.just(2)),
args -> (int) args[0] + (int) args[1])
.block()).isEqualTo(3);
}
@Test
public void noSourcePublisherCombined() {
assertThat(Mono.zip(args -> (int) args[0] + (int) args[1])
.block()).isNull();
}
@Test
public void oneSourcePublisherCombined() {
assertThat(Mono.zip(args -> (int) args[0], Mono.just(1))
.block()).isEqualTo(1);
}
@Test
public void allEmptyDelay() {
assertThat(Mono.zipDelayError(Mono.empty(), Mono.empty())
.block()).isNull();
}
@Test
public void noSourcePublisherCombinedDelay() {
assertThat(Mono.zipDelayError(args -> (int) args[0] + (int) args[1])
.block()).isNull();
}
@Test
public void oneSourcePublisherCombinedDelay() {
assertThat(Mono.zipDelayError(args -> (int) args[0], Mono.just(1))
.block()).isEqualTo(1);
}
@Test
public void nonEmptyPublisherCombinedDelay() {
assertThat(Mono.zipDelayError(args -> (int) args[0] + (int) args[1],
Mono.just(1),
Mono.just(2))
.block()).isEqualTo(3);
}
@Test
@Timeout(5)
public void castCheck() {
Mono<String[]> mono = Mono.zip(a -> Arrays.copyOf(a, a.length, String[].class),
Mono.just("hello"),
Mono.just("world"));
mono.subscribe(System.out::println);
}
@Test//(timeout = 5000)
public void all2NonEmpty() {
assertThat(Mono.zip(Mono.delay(Duration.ofMillis(150)), Mono.delay(Duration.ofMillis(250))).block())
.isEqualTo(Tuples.of(0L, 0L));
}
@Test
public void allNonEmpty2() {
assertThat(Mono.zip(args -> (int) args[0] + (int) args[1],
Mono.just(1),
Mono.just(2))
.block()).isEqualTo(3);
}
@Test
public void someEmpty() {
StepVerifier.withVirtualTime(() ->
Mono.zip(Mono.delay(Duration.ofMillis(150)).then(), Mono.delay(Duration
.ofMillis(250))))
.thenAwait(Duration.ofMillis(150))
.verifyComplete();
}
@Test//(timeout = 5000)
public void allNonEmpty() {
for (int i = 2; i < 7; i++) {
Long[] result = new Long[i];
Arrays.fill(result, 0L);
@SuppressWarnings("unchecked") Mono<Long>[] monos = new Mono[i];
for (int j = 0; j < i; j++) {
monos[j] = Mono.delay(Duration.ofMillis(150 + 50 * j));
}
Object[] out = Mono.zip(a -> a, monos)
.block();
assertThat(out).isEqualTo(result);
}
}
@Test
public void pairWise() {
Mono<Tuple2<Integer, String>> f = Mono.just(1)
.zipWith(Mono.just("test2"));
assertThat(f).isInstanceOf(MonoZip.class);
MonoZip<?, ?> s = (MonoZip<?, ?>) f;
assertThat(s.sources).isNotNull();
assertThat(s.sources).hasSize(2);
f.subscribeWith(AssertSubscriber.create())
.assertValues(Tuples.of(1, "test2"))
.assertComplete();
}
@Test
public void pairWise2() {
Mono<Tuple2<Tuple2<Integer, String>, String>> f =
Mono.zip(Mono.just(1), Mono.just("test"))
.zipWith(Mono.just("test2"));
assertThat(f).isInstanceOf(MonoZip.class);
MonoZip<?, ?> s = (MonoZip<?, ?>) f;
assertThat(s.sources).isNotNull();
assertThat(s.sources).hasSize(3);
Mono<Tuple2<Integer, String>> ff = f.map(t -> Tuples.of(t.getT1()
.getT1(),
t.getT1()
.getT2() + t.getT2()));
ff.subscribeWith(AssertSubscriber.create())
.assertValues(Tuples.of(1, "testtest2"))
.assertComplete();
}
@Test
public void pairWise3() {
Mono<Tuple2<Tuple2<Integer, String>, String>> f =
Mono.zip(Arrays.asList(Mono.just(1), Mono.just("test")),
obj -> Tuples.of((int) obj[0], (String) obj[1]))
.zipWith(Mono.just("test2"));
assertThat(f).isInstanceOf(MonoZip.class);
MonoZip<?, ?> s = (MonoZip<?, ?>) f;
assertThat(s.sources).isNotNull();
assertThat(s.sources).hasSize(2);
Mono<Tuple2<Integer, String>> ff = f.map(t -> Tuples.of(t.getT1()
.getT1(),
t.getT1()
.getT2() + t.getT2()));
ff.subscribeWith(AssertSubscriber.create())
.assertValues(Tuples.of(1, "testtest2"))
.assertComplete();
}
@Test
public void zipMonoJust() {
StepVerifier.create(Mono.zip(Mono.just(1), Mono.just(2)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust3() {
StepVerifier.create(Mono.zip(Mono.just(1), Mono.just(2), Mono.just(3)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust4() {
StepVerifier.create(Mono.zip(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust5() {
StepVerifier.create(Mono.zip(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust6() {
StepVerifier.create(Mono.zip(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust7() {
StepVerifier.create(Mono.zip(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6),
Mono.just(7)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6 && v.getT7() == 7).isTrue())
.verifyComplete();
}
@Test
public void zipMonoJust8() {
StepVerifier.create(Mono.zip(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6),
Mono.just(7),
Mono.just(8)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6 && v.getT7() == 7 && v.getT8() == 8).isTrue())
.verifyComplete();
}
@Test
public void zipMonoError() {
StepVerifier.create(Mono.zip(Mono.<Integer>error(new Exception("test1")),
Mono.<Integer>error(new Exception("test2"))))
.verifyErrorSatisfies(e -> assertThat(e).hasMessage("test1"));
}
@Test
public void zipMonoCallable() {
StepVerifier.create(Mono.zip(Mono.fromCallable(() -> 1), Mono.fromCallable(() -> 2)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorJustMono() {
StepVerifier.create(Mono.zipDelayError(Mono.just(1), Mono.just(2)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorJustMono3() {
StepVerifier.create(Mono.zipDelayError(Mono.just(1), Mono.just(2), Mono.just(3)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorMonoJust4() {
StepVerifier.create(Mono.zipDelayError(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorMonoJust5() {
StepVerifier.create(Mono.zipDelayError(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorMonoJust6() {
StepVerifier.create(Mono.zipDelayError(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorMonoJust7() {
StepVerifier.create(Mono.zipDelayError(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6),
Mono.just(7)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6 && v.getT7() == 7).isTrue())
.verifyComplete();
}
@Test
public void zipDelayErrorMonoJust8() {
StepVerifier.create(Mono.zipDelayError(
Mono.just(1),
Mono.just(2),
Mono.just(3),
Mono.just(4),
Mono.just(5),
Mono.just(6),
Mono.just(7),
Mono.just(8)))
.assertNext(v -> assertThat(v.getT1() == 1 && v.getT2() == 2 && v.getT3() == 3 && v.getT4() == 4 && v.getT5() == 5 && v.getT6() == 6 && v.getT7() == 7 && v.getT8() == 8).isTrue())
.verifyComplete();
}
@Test
public void zipIterableDelayErrorCombinesErrors() {
Exception boom1 = new NullPointerException("boom1");
Exception boom2 = new IllegalArgumentException("boom2");
StepVerifier.create(Mono.zipDelayError(
Arrays.asList(Mono.just("foo"), Mono.<String>error(boom1), Mono.<String>error(boom2)),
Tuples.fn3()))
.verifyErrorMatches(e -> e.getMessage().equals("Multiple exceptions") &&
e.getSuppressed()[0] == boom1 &&
e.getSuppressed()[1] == boom2);
}
@Test
public void zipIterableDoesntCombineErrors() {
Exception boom1 = new NullPointerException("boom1");
Exception boom2 = new IllegalArgumentException("boom2");
StepVerifier.create(Mono.zip(
Arrays.asList(Mono.just("foo"), Mono.<String>error(boom1), Mono.<String>error(boom2)),
Tuples.fn3()))
.verifyErrorMatches(e -> e == boom1);
}
@Test
public void delayErrorEmptySourceErrorSource() {
Mono<String> error = Mono.error(new IllegalStateException("boom"));
Mono<String> empty = Mono.empty();
StepVerifier.create(Mono.zipDelayError(error,empty))
.expectErrorMessage("boom")
.verify();
}
@Test
public void delayErrorEmptySourceErrorTwoSource() {
final IllegalStateException e1 = new IllegalStateException("boom1");
final IllegalStateException e2 = new IllegalStateException("boom2");
Mono<String> error1 = Mono.error(e1);
Mono<String> error2 = Mono.error(e2);
Mono<String> empty = Mono.empty();
StepVerifier.create(Mono.zipDelayError(error1, empty, error2))
.expectErrorSatisfies(e -> assertThat(e)
.matches(Exceptions::isMultiple)
.hasSuppressedException(e1)
.hasSuppressedException(e2))
.verify();
}
@Test
public void delayErrorEmptySources() {
AtomicBoolean cancelled = new AtomicBoolean();
Mono<String> empty1 = Mono.empty();
Mono<String> empty2 = Mono.empty();
Mono<String> empty3 = Mono.<String>empty().delaySubscription(Duration.ofMillis(500))
.doOnCancel(() -> cancelled.set(true));
StepVerifier.create(Mono.zipDelayError(empty1, empty2, empty3))
.expectSubscription()
.expectNoEvent(Duration.ofMillis(400))
.verifyComplete();
assertThat(cancelled).isFalse();
}
@Test
public void emptySources() {
AtomicBoolean cancelled = new AtomicBoolean();
Mono<String> empty1 = Mono.empty();
Mono<String> empty2 = Mono.empty();
Mono<String> empty3 = Mono.<String>empty().delaySubscription(Duration.ofMillis(500))
.doOnCancel(() -> cancelled.set(true));
Duration d = StepVerifier.create(Mono.zip(empty1, empty2, empty3))
.verifyComplete();
assertThat(cancelled).isTrue();
assertThat(d).isLessThan(Duration.ofMillis(500));
}
@Test
public void scanOperator() {
MonoZip s = new MonoZip<>(true, z -> z);
assertThat(s.scan(Scannable.Attr.DELAY_ERROR)).as("delayError").isTrue();
assertThat(s.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanCoordinator() {
CoreSubscriber<String> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoZip.ZipCoordinator<String> test = new MonoZip.ZipCoordinator<>(new Mono[2],
actual, 2, true, a -> String.valueOf(a[0]));
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(2);
assertThat(test.scan(Scannable.Attr.DELAY_ERROR)).isTrue();
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void innerErrorIncrementsParentDone() {
CoreSubscriber<String> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoZip.ZipCoordinator<String> parent = new MonoZip.ZipCoordinator<>(new Mono[2],
actual, 2, false, a -> String.valueOf(a[0]));
MonoZip.ZipInner<String> test = new MonoZip.ZipInner<>(parent);
assertThat(parent.state).isZero();
test.onError(new IllegalStateException("boom"));
assertThat(parent.state).isEqualTo(MonoZip.ZipCoordinator.INTERRUPTED_FLAG + 2);
assertThat(parent.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanCoordinatorNotDoneUntilN() {
CoreSubscriber<String> actual = new LambdaMonoSubscriber<>(null, e -> {}, null, null);
MonoZip.ZipCoordinator<String> test = new MonoZip.ZipCoordinator<>(new Mono[2],
actual, 10, true, a -> String.valueOf(a[0]));
test.state = 9;
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.state = 10;
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
@Test
public void scanWhenInner() {
CoreSubscriber<? super String> actual = new LambdaMonoSubscriber<>(null, e ->
{}, null, null);
MonoZip.ZipCoordinator<String>
coordinator = new MonoZip.ZipCoordinator<>(new Mono[2], actual, 2,
false, a -> null);
MonoZip.ZipInner<String> test = new MonoZip.ZipInner<>(coordinator);
Subscription innerSub = Operators.cancelledSubscription();
test.onSubscribe(innerSub);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(innerSub);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(coordinator);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(coordinator.scan(Scannable.Attr.TERMINATED)).isFalse(); //done == 1
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom");
assertThat(coordinator.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void andAliasZipWith() {
Mono<Tuple2<Integer, String>> and = Mono.just(1)
.zipWith(Mono.just("B"));
Mono<Tuple2<Tuple2<Integer, String>, Integer>> zipWith = and.zipWith(Mono.just(3));
StepVerifier.create(zipWith)
.expectNext(Tuples.of(Tuples.of(1, "B"), 3))
.verifyComplete();
}
@Test
public void andCombinatorAliasZipWithCombinator() {
Mono<String> and = Mono.just(1).zipWith(Mono.just("B"), (i, s) -> i + s);
Mono<String> zipWith = and.zipWith(Mono.just(3), (s, i) -> s + i);
StepVerifier.create(zipWith)
.expectNext("1B3")
.verifyComplete();
}
}
| MonoZipTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/beanvalidation/TvOwner.java | {
"start": 399,
"end": 497
} | class ____ {
@Id
@GeneratedValue
public Integer id;
@ManyToOne
@NotNull
public Tv tv;
}
| TvOwner |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/client/InterceptingClientHttpRequestFactoryTests.java | {
"start": 1416,
"end": 8173
} | class ____ {
private RequestFactoryMock requestFactoryMock = new RequestFactoryMock();
private MockClientHttpRequest requestMock = new MockClientHttpRequest();
private MockClientHttpResponse responseMock = new MockClientHttpResponse();
private InterceptingClientHttpRequestFactory requestFactory;
@BeforeEach
void beforeEach() {
this.requestMock.setResponse(this.responseMock);
}
@Test
void invokeInterceptors() throws Exception {
List<ClientHttpRequestInterceptor> interceptors = new ArrayList<>();
interceptors.add(new NoOpInterceptor());
interceptors.add(new NoOpInterceptor());
interceptors.add(new NoOpInterceptor());
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, interceptors);
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
ClientHttpResponse response = request.execute();
assertThat(((NoOpInterceptor) interceptors.get(0)).invocationCount).isEqualTo(1);
assertThat(((NoOpInterceptor) interceptors.get(1)).invocationCount).isEqualTo(1);
assertThat(((NoOpInterceptor) interceptors.get(2)).invocationCount).isEqualTo(1);
assertThat(requestMock.isExecuted()).isTrue();
assertThat(response).isSameAs(responseMock);
}
@Test
void skipInterceptor() throws Exception {
List<ClientHttpRequestInterceptor> interceptors = new ArrayList<>();
interceptors.add((request, body, execution) -> responseMock);
interceptors.add(new NoOpInterceptor());
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, interceptors);
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
ClientHttpResponse response = request.execute();
assertThat(((NoOpInterceptor) interceptors.get(1)).invocationCount).isZero();
assertThat(requestMock.isExecuted()).isFalse();
assertThat(response).isSameAs(responseMock);
}
@Test
void updateRequestHeader() throws Exception {
final String headerName = "Foo";
final String headerValue = "Bar";
final String otherValue = "Baz";
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> {
HttpRequestWrapper wrapper = new HttpRequestWrapper(request);
wrapper.getHeaders().add(headerName, otherValue);
return execution.execute(wrapper, body);
};
requestMock = new MockClientHttpRequest() {
@Override
protected ClientHttpResponse executeInternal() {
List<String> headerValues = getHeaders().get(headerName);
assertThat(headerValues).containsExactly(headerValue, otherValue);
return responseMock;
}
};
requestMock.getHeaders().add(headerName, headerValue);
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, Collections.singletonList(interceptor));
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
request.execute();
}
@Test
void updateRequestAttribute() throws Exception {
final String attrName = "Foo";
final String attrValue = "Bar";
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> {
request.getAttributes().put(attrName, attrValue);
return execution.execute(request, body);
};
requestMock = new MockClientHttpRequest() {
@Override
protected ClientHttpResponse executeInternal() {
assertThat(getAttributes()).containsEntry(attrName, attrValue);
return responseMock;
}
};
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, Collections.singletonList(interceptor));
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
request.execute();
}
@Test
void updateRequestURI() throws Exception {
final URI changedUri = URI.create("https://example.com/2");
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> execution.execute(new HttpRequestWrapper(request) {
@Override
public URI getURI() {
return changedUri;
}
}, body);
requestFactoryMock = new RequestFactoryMock() {
@Override
public ClientHttpRequest createRequest(URI uri, HttpMethod httpMethod) throws IOException {
assertThat(uri).isEqualTo(changedUri);
return super.createRequest(uri, httpMethod);
}
};
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, Collections.singletonList(interceptor));
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
request.execute();
}
@Test
void updateRequestMethod() throws Exception {
final HttpMethod changedMethod = HttpMethod.POST;
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> execution.execute(new HttpRequestWrapper(request) {
@Override
public HttpMethod getMethod() {
return changedMethod;
}
}, body);
requestFactoryMock = new RequestFactoryMock() {
@Override
public ClientHttpRequest createRequest(URI uri, HttpMethod httpMethod) throws IOException {
assertThat(httpMethod).isEqualTo(changedMethod);
return super.createRequest(uri, httpMethod);
}
};
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, Collections.singletonList(interceptor));
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
request.execute();
}
@Test
void updateRequestBody() throws Exception {
final byte[] changedBody = "Foo".getBytes();
ClientHttpRequestInterceptor interceptor = (request, body, execution) -> execution.execute(request, changedBody);
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, Collections.singletonList(interceptor));
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
request.execute();
assertThat(Arrays.equals(changedBody, requestMock.getBodyAsBytes())).isTrue();
assertThat(requestMock.getHeaders().getContentLength()).isEqualTo(changedBody.length);
}
@Test
void multipleExecutions() throws Exception {
List<ClientHttpRequestInterceptor> interceptors = new ArrayList<>();
interceptors.add(new MultipleExecutionInterceptor());
interceptors.add(new NoOpInterceptor());
requestFactory = new InterceptingClientHttpRequestFactory(requestFactoryMock, interceptors);
ClientHttpRequest request = requestFactory.createRequest(URI.create("https://example.com"), HttpMethod.GET);
ClientHttpResponse response = request.execute();
assertThat(((NoOpInterceptor) interceptors.get(1)).invocationCount).isEqualTo(2);
assertThat(requestMock.isExecuted()).isTrue();
assertThat(response).isSameAs(responseMock);
}
private static | InterceptingClientHttpRequestFactoryTests |
java | apache__camel | components/camel-grpc/src/main/java/org/apache/camel/component/grpc/client/GrpcStreamingExchangeForwarder.java | {
"start": 1343,
"end": 5081
} | class ____ implements GrpcExchangeForwarder {
private final GrpcConfiguration configuration;
private final Object grpcStub;
private final Lock lock = new ReentrantLock();
private volatile StreamObserver<Object> currentStream;
private volatile StreamObserver<Object> currentResponseObserver;
public GrpcStreamingExchangeForwarder(GrpcConfiguration configuration, Object grpcStub) {
this.configuration = configuration;
this.grpcStub = grpcStub;
}
@Override
public boolean forward(Exchange exchange, StreamObserver<Object> responseObserver, AsyncCallback callback) {
Message message = exchange.getIn();
StreamObserver<Object> streamObserver = checkAndRecreateStreamObserver(responseObserver);
if (message.getHeaders().containsKey(GrpcConstants.GRPC_EVENT_TYPE_HEADER)) {
switch (message.getHeader(GrpcConstants.GRPC_EVENT_TYPE_HEADER, String.class)) {
case GrpcConstants.GRPC_EVENT_TYPE_ON_NEXT -> streamObserver.onNext(message.getBody());
case GrpcConstants.GRPC_EVENT_TYPE_ON_ERROR -> streamObserver.onError((Throwable) message.getBody());
case GrpcConstants.GRPC_EVENT_TYPE_ON_COMPLETED -> streamObserver.onCompleted();
default -> {
// NO-OP
}
}
} else {
streamObserver.onNext(message.getBody());
}
callback.done(true);
return true;
}
@Override
public void forward(Exchange exchange) {
throw new UnsupportedOperationException("Synchronous call is not supported in streaming mode");
}
@Override
public void shutdown() {
if (this.currentResponseObserver != null) {
checkAndRecreateStreamObserver(this.currentResponseObserver).onCompleted();
}
doCloseStream();
}
private StreamObserver<Object> checkAndRecreateStreamObserver(StreamObserver<Object> responseObserver) {
StreamObserver<Object> curStream = this.currentStream;
if (curStream == null) {
lock.lock();
try {
if (this.currentStream == null) {
this.currentResponseObserver = responseObserver;
this.currentStream = doCreateStream(responseObserver);
}
curStream = this.currentStream;
} finally {
lock.unlock();
}
}
StreamObserver<Object> curResponseObserver = this.currentResponseObserver;
if (curResponseObserver != null && !curResponseObserver.equals(responseObserver)) {
throw new IllegalArgumentException("This forwarder must always use the same response observer");
}
return curStream;
}
private void doCloseStream() {
lock.lock();
try {
this.currentStream = null;
this.currentResponseObserver = null;
} finally {
lock.unlock();
}
}
private StreamObserver<Object> doCreateStream(StreamObserver<Object> streamObserver) {
return GrpcUtils.invokeAsyncMethodStreaming(grpcStub, configuration.getMethod(), new StreamObserver<Object>() {
@Override
public void onNext(Object o) {
streamObserver.onNext(o);
}
@Override
public void onError(Throwable throwable) {
doCloseStream();
streamObserver.onError(throwable);
}
@Override
public void onCompleted() {
doCloseStream();
streamObserver.onCompleted();
}
});
}
}
| GrpcStreamingExchangeForwarder |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersFindSuperMethodsTest.java | {
"start": 1707,
"end": 1824
} | interface ____ {
void foo();
}
""");
writeFile(
"Bar.java",
"""
| Foo |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/test/java/org/apache/hadoop/yarn/appcatalog/controller/AppStoreControllerTest.java | {
"start": 1476,
"end": 3336
} | class ____ {
private AppStoreController controller;
@BeforeEach
public void setUp() throws Exception {
this.controller = new AppStoreController();
}
@Test
void testGetRecommended() throws Exception {
AppStoreController ac = Mockito.mock(AppStoreController.class);
List<AppStoreEntry> actual = new ArrayList<AppStoreEntry>();
when(ac.get()).thenReturn(actual);
final List<AppStoreEntry> result = ac.get();
assertEquals(result, actual);
}
@Test
void testSearch() throws Exception {
String keyword = "jenkins";
AppStoreController ac = Mockito.mock(AppStoreController.class);
List<AppStoreEntry> expected = new ArrayList<AppStoreEntry>();
when(ac.search(keyword)).thenReturn(expected);
final List<AppStoreEntry> actual = ac.search(keyword);
assertEquals(expected, actual);
}
@Test
void testRegister() throws Exception {
AppStoreController ac = Mockito.mock(AppStoreController.class);
Application app = new Application();
app.setName("jenkins");
app.setOrganization("jenkins.org");
app.setDescription("This is a description");
app.setIcon("/css/img/feather.png");
Response expected = Response.ok().build();
when(ac.register(app)).thenReturn(Response.ok().build());
final Response actual = ac.register(app);
assertEquals(expected.getStatus(), actual.getStatus());
}
@Test
void testPathAnnotation() throws Exception {
assertNotNull(this.controller.getClass()
.getAnnotations());
assertThat(this.controller.getClass().isAnnotationPresent(Path.class))
.as("The controller has the annotation Path")
.isTrue();
final Path path = this.controller.getClass().getAnnotation(Path.class);
assertThat(path.value())
.as("The path is /app_store")
.isEqualTo("/app_store");
}
}
| AppStoreControllerTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/bean/override/mockito/MockitoBeanOverrideHandler.java | {
"start": 1811,
"end": 5483
} | class ____ extends AbstractMockitoBeanOverrideHandler {
private final Set<Class<?>> extraInterfaces;
private final Answers answers;
private final boolean serializable;
MockitoBeanOverrideHandler(ResolvableType typeToMock, MockitoBean mockitoBean) {
this(null, typeToMock, mockitoBean);
}
MockitoBeanOverrideHandler(@Nullable Field field, ResolvableType typeToMock, MockitoBean mockitoBean) {
this(field, typeToMock, (!mockitoBean.name().isBlank() ? mockitoBean.name() : null),
mockitoBean.contextName(), (mockitoBean.enforceOverride() ? REPLACE : REPLACE_OR_CREATE),
mockitoBean.reset(), mockitoBean.extraInterfaces(), mockitoBean.answers(), mockitoBean.serializable());
}
private MockitoBeanOverrideHandler(@Nullable Field field, ResolvableType typeToMock, @Nullable String beanName,
String contextName, BeanOverrideStrategy strategy, MockReset reset, Class<?>[] extraInterfaces,
Answers answers, boolean serializable) {
super(field, typeToMock, beanName, contextName, strategy, reset);
Assert.notNull(typeToMock, "'typeToMock' must not be null");
this.extraInterfaces = asClassSet(extraInterfaces);
this.answers = answers;
this.serializable = serializable;
}
private static Set<Class<?>> asClassSet(Class<?>[] classes) {
if (classes.length == 0) {
return Collections.emptySet();
}
Set<Class<?>> classSet = new LinkedHashSet<>(Arrays.asList(classes));
return Collections.unmodifiableSet(classSet);
}
/**
* Return the extra interfaces.
* @return the extra interfaces or an empty set
*/
Set<Class<?>> getExtraInterfaces() {
return this.extraInterfaces;
}
/**
* Return the {@link Answers}.
* @return the answers mode
*/
Answers getAnswers() {
return this.answers;
}
/**
* Determine if the mock is serializable.
* @return {@code true} if the mock is serializable
*/
boolean isSerializable() {
return this.serializable;
}
@Override
protected Object createOverrideInstance(String beanName,
@Nullable BeanDefinition existingBeanDefinition, @Nullable Object existingBeanInstance) {
return createMock(beanName);
}
@SuppressWarnings("unchecked")
private <T> T createMock(String name) {
MockSettings settings = MockReset.withSettings(getReset());
if (StringUtils.hasLength(name)) {
settings.name(name);
}
if (!this.extraInterfaces.isEmpty()) {
settings.extraInterfaces(ClassUtils.toClassArray(this.extraInterfaces));
}
settings.defaultAnswer(this.answers);
if (this.serializable) {
settings.serializable();
}
Class<?> targetType = getBeanType().resolve();
return (T) Mockito.mock(targetType, settings);
}
@Override
public boolean equals(@Nullable Object other) {
if (other == this) {
return true;
}
if (other == null || other.getClass() != getClass()) {
return false;
}
return (other instanceof MockitoBeanOverrideHandler that && super.equals(that) &&
(this.serializable == that.serializable) && (this.answers == that.answers) &&
Objects.equals(this.extraInterfaces, that.extraInterfaces));
}
@Override
public int hashCode() {
return super.hashCode() + Objects.hash(this.extraInterfaces, this.answers, this.serializable);
}
@Override
public String toString() {
return new ToStringCreator(this)
.append("field", getField())
.append("beanType", getBeanType())
.append("beanName", getBeanName())
.append("contextName", getContextName())
.append("strategy", getStrategy())
.append("reset", getReset())
.append("extraInterfaces", getExtraInterfaces())
.append("answers", getAnswers())
.append("serializable", isSerializable())
.toString();
}
}
| MockitoBeanOverrideHandler |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/JdkDefaultApplicationProtocolNegotiator.java | {
"start": 902,
"end": 2141
} | class ____ implements JdkApplicationProtocolNegotiator {
public static final JdkDefaultApplicationProtocolNegotiator INSTANCE =
new JdkDefaultApplicationProtocolNegotiator();
private static final SslEngineWrapperFactory DEFAULT_SSL_ENGINE_WRAPPER_FACTORY = new SslEngineWrapperFactory() {
@Override
public SSLEngine wrapSslEngine(SSLEngine engine,
JdkApplicationProtocolNegotiator applicationNegotiator, boolean isServer) {
return engine;
}
};
private JdkDefaultApplicationProtocolNegotiator() {
}
@Override
public SslEngineWrapperFactory wrapperFactory() {
return DEFAULT_SSL_ENGINE_WRAPPER_FACTORY;
}
@Override
public ProtocolSelectorFactory protocolSelectorFactory() {
throw new UnsupportedOperationException("Application protocol negotiation unsupported");
}
@Override
public ProtocolSelectionListenerFactory protocolListenerFactory() {
throw new UnsupportedOperationException("Application protocol negotiation unsupported");
}
@Override
public List<String> protocols() {
return Collections.emptyList();
}
}
| JdkDefaultApplicationProtocolNegotiator |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/executor/SimpleExecutor.java | {
"start": 1290,
"end": 3425
} | class ____ extends BaseExecutor {
public SimpleExecutor(Configuration configuration, Transaction transaction) {
super(configuration, transaction);
}
@Override
public int doUpdate(MappedStatement ms, Object parameter) throws SQLException {
Statement stmt = null;
try {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(this, ms, parameter, RowBounds.DEFAULT, null, null);
stmt = prepareStatement(handler, ms.getStatementLog());
return handler.update(stmt);
} finally {
closeStatement(stmt);
}
}
@Override
public <E> List<E> doQuery(MappedStatement ms, Object parameter, RowBounds rowBounds, ResultHandler resultHandler,
BoundSql boundSql) throws SQLException {
Statement stmt = null;
try {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameter, rowBounds, resultHandler,
boundSql);
stmt = prepareStatement(handler, ms.getStatementLog());
return handler.query(stmt, resultHandler);
} finally {
closeStatement(stmt);
}
}
@Override
protected <E> Cursor<E> doQueryCursor(MappedStatement ms, Object parameter, RowBounds rowBounds, BoundSql boundSql)
throws SQLException {
Configuration configuration = ms.getConfiguration();
StatementHandler handler = configuration.newStatementHandler(wrapper, ms, parameter, rowBounds, null, boundSql);
Statement stmt = prepareStatement(handler, ms.getStatementLog());
Cursor<E> cursor = handler.queryCursor(stmt);
stmt.closeOnCompletion();
return cursor;
}
@Override
public List<BatchResult> doFlushStatements(boolean isRollback) {
return Collections.emptyList();
}
private Statement prepareStatement(StatementHandler handler, Log statementLog) throws SQLException {
Statement stmt;
Connection connection = getConnection(statementLog);
stmt = handler.prepare(connection, transaction.getTimeout());
handler.parameterize(stmt);
return stmt;
}
}
| SimpleExecutor |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindyComplexOneToManyKeyValuePairUnMarshallTest.java | {
"start": 1351,
"end": 2953
} | class ____ extends CommonBindyTest {
@Test
@DirtiesContext
public void testUnMarshallMessage() throws Exception {
String message = "8=FIX 4.19=2034=135=049=INVMGR56=BRKR"
+ "1=BE.CHM.00111=CHM0001-0158=this is a camel - bindy test" + "22=448=BE000124567854=1"
+ "22=548=BE000987654354=2" + "22=648=BE000999999954=3" + "10=220";
String message2 = "8=FIX 4.19=2034=135=049=INVMGR56=BRKR"
+ "1=BE.CHM.00111=CHM0001-0158=this is a camel - bindy test10=220";
result.expectedMessageCount(2);
template.sendBody(message);
template.sendBody(message2);
result.assertIsSatisfied();
Order order1 = result.getReceivedExchanges().get(0).getIn().getBody(Order.class);
Order order2 = result.getReceivedExchanges().get(1).getIn().getBody(Order.class);
assertTrue(order1.toString().contains("BE.CHM.001, 11: CHM0001-01, 58: this is a camel - bindy test"));
assertTrue(order1.getSecurities().get(0).toString().contains("22: 4, 48: BE0001245678, 54: 1"));
assertTrue(order1.getSecurities().get(1).toString().contains("22: 5, 48: BE0009876543, 54: 2"));
assertTrue(order1.getSecurities().get(2).toString().contains("22: 6, 48: BE0009999999, 54: 3"));
assertTrue(order2.getHeader().toString().contains("FIX 4.1, 9: 20, 34: 1 , 35: 0, 49: INVMGR, 56: BRKR"));
assertTrue(order2.getTrailer().toString().contains("10: 220"));
}
public static | BindyComplexOneToManyKeyValuePairUnMarshallTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/exc/InvalidDefinitionException.java | {
"start": 685,
"end": 3658
} | class ____
extends DatabindException
{
protected final JavaType _type;
protected transient BeanDescription _beanDesc;
protected transient BeanPropertyDefinition _property;
protected InvalidDefinitionException(JsonParser p, String msg,
JavaType type) {
super(p, msg);
_type = type;
_beanDesc = null;
_property = null;
}
protected InvalidDefinitionException(JsonGenerator g, String msg,
JavaType type) {
super(g, msg);
_type = type;
_beanDesc = null;
_property = null;
}
protected InvalidDefinitionException(JsonParser p, String msg,
BeanDescription bean, BeanPropertyDefinition prop) {
super(p, msg);
_type = (bean == null) ? null : bean.getType();
_beanDesc = bean;
_property = prop;
}
protected InvalidDefinitionException(JsonGenerator g, String msg,
BeanDescription bean, BeanPropertyDefinition prop) {
super(g, msg);
_type = (bean == null) ? null : bean.getType();
_beanDesc = bean;
_property = prop;
}
public static InvalidDefinitionException from(JsonParser p, String msg,
BeanDescription bean, BeanPropertyDefinition prop) {
return new InvalidDefinitionException(p, msg, bean, prop);
}
public static InvalidDefinitionException from(JsonParser p, String msg,
BeanDescription.Supplier beanDescRef, BeanPropertyDefinition prop) {
return new InvalidDefinitionException(p, msg, beanDescRef.get(), prop);
}
public static InvalidDefinitionException from(JsonParser p, String msg,
JavaType type) {
return new InvalidDefinitionException(p, msg, type);
}
public static InvalidDefinitionException from(JsonGenerator g, String msg,
BeanDescription bean, BeanPropertyDefinition prop) {
return new InvalidDefinitionException(g, msg, bean, prop);
}
public static InvalidDefinitionException from(JsonGenerator g, String msg,
JavaType type) {
return new InvalidDefinitionException(g, msg, type);
}
/**
* Accessor for type fully resolved type that had the problem; this should always
* known and available, never <code>null</code>
*/
public JavaType getType() {
return _type;
}
/**
* Accessor for type definition (class) that had the definition problem, if any; may sometimes
* be undefined or unknown; if so, returns <code>null</code>.
*/
public BeanDescription getBeanDescription() {
return _beanDesc;
}
/**
* Accessor for property that had the definition problem if any
* (none, for example if the problem relates to type in general),
* if known. If not known (or relevant), returns <code>null</code>.
*/
public BeanPropertyDefinition getProperty() {
return _property;
}
}
| InvalidDefinitionException |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/AfterReturningAdviceBindingTests.java | {
"start": 4191,
"end": 4918
} | class ____ extends AdviceBindingTestAspect {
private AfterReturningAdviceBindingCollaborator getCollaborator() {
return (AfterReturningAdviceBindingCollaborator) this.collaborator;
}
public void oneString(String name) {
getCollaborator().oneString(name);
}
public void oneTestBeanArg(TestBean bean) {
getCollaborator().oneTestBeanArg(bean);
}
public void testBeanArrayArg(ITestBean[] beans) {
getCollaborator().testBeanArrayArg(beans);
}
public void objectMatchNoArgs() {
getCollaborator().objectMatchNoArgs();
}
public void stringMatchNoArgs() {
getCollaborator().stringMatchNoArgs();
}
public void oneInt(int result) {
getCollaborator().oneInt(result);
}
| AfterReturningAdviceBindingTestAspect |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest_ifnull_2.java | {
"start": 893,
"end": 1248
} | class ____ extends TestCase {
public void test_false() throws Exception {
WallProvider provider = new MySqlWallProvider();
assertFalse(provider.checkValid(//
"SELECT * FROM T WHERE FID = ? OR IFNULL(CAST(CURRENT_USER() AS CHAR))"));
assertEquals(1, provider.getTableStats().size());
}
}
| MySqlWallTest_ifnull_2 |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineCompareFilter.java | {
"start": 1207,
"end": 4036
} | class ____ extends TimelineFilter {
private TimelineCompareOp compareOp;
private String key;
private Object value;
// If comparison operator is NOT_EQUAL, this flag decides if we should return
// the entity if key does not exist.
private boolean keyMustExist = true;
public TimelineCompareFilter() {
}
public TimelineCompareFilter(TimelineCompareOp op, String key, Object val,
boolean keyMustExistFlag) {
this.compareOp = op;
this.key = key;
this.value = val;
if (op == TimelineCompareOp.NOT_EQUAL) {
this.keyMustExist = keyMustExistFlag;
} else {
this.keyMustExist = true;
}
}
public TimelineCompareFilter(TimelineCompareOp op, String key, Object val) {
this(op, key, val, true);
}
@Override
public TimelineFilterType getFilterType() {
return TimelineFilterType.COMPARE;
}
public TimelineCompareOp getCompareOp() {
return compareOp;
}
public String getKey() {
return key;
}
public void setKey(String keyToBeSet) {
key = keyToBeSet;
}
public Object getValue() {
return value;
}
public void setCompareOp(TimelineCompareOp timelineCompareOp,
boolean keyExistFlag) {
this.compareOp = timelineCompareOp;
if (timelineCompareOp == TimelineCompareOp.NOT_EQUAL) {
this.keyMustExist = keyExistFlag;
}
}
public void setValue(Object val) {
value = val;
}
public boolean getKeyMustExist() {
return keyMustExist;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((compareOp == null) ? 0 : compareOp.hashCode());
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result + (keyMustExist ? 1231 : 1237);
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TimelineCompareFilter other = (TimelineCompareFilter) obj;
if (compareOp != other.compareOp) {
return false;
}
if (key == null) {
if (other.key != null) {
return false;
}
} else if (!key.equals(other.key)) {
return false;
}
if (keyMustExist != other.keyMustExist) {
return false;
}
if (value == null) {
if (other.value != null) {
return false;
}
} else if (!value.equals(other.value)) {
return false;
}
return true;
}
@Override
public String toString() {
return String.format("%s (%s, %s:%s:%b)",
this.getClass().getSimpleName(), this.compareOp.name(),
this.key, this.value, this.keyMustExist);
}
} | TimelineCompareFilter |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1900/Issue1933.java | {
"start": 117,
"end": 981
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
OrderInfoVO v0 = JSON.parseObject("{\"orderStatus\":1}", OrderInfoVO.class);
assertEquals(1, v0.orderStatus);
assertEquals(0, v0.oldStatus);
assertEquals(0, v0.oldOrderStatus);
}
public void test_for_issue_1() throws Exception {
OrderInfoVO v0 = JSON.parseObject("{\"oldStatus\":1}", OrderInfoVO.class);
assertEquals(0, v0.orderStatus);
assertEquals(1, v0.oldStatus);
assertEquals(0, v0.oldOrderStatus);
}
public void test_for_issue_2() throws Exception {
OrderInfoVO v0 = JSON.parseObject("{\"oldOrderStatus\":1}", OrderInfoVO.class);
assertEquals(0, v0.orderStatus);
assertEquals(0, v0.oldStatus);
assertEquals(1, v0.oldOrderStatus);
}
public static | Issue1933 |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonCanonicalTypeTest.java | {
"start": 4552,
"end": 4606
} | interface ____ extends D.N {}
}
| N |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/persister/entity/mutation/EntityTableMapping.java | {
"start": 10338,
"end": 11291
} | class ____ extends SelectableMappingImpl implements TableDetails.KeyColumn {
public KeyColumn(String tableName, SelectableMapping originalMapping) {
super(
tableName,
originalMapping.getSelectionExpression(),
null, // Leads to construction of a fresh path based on selection expression
originalMapping.getCustomReadExpression(),
originalMapping.getCustomWriteExpression(),
originalMapping.getColumnDefinition(),
originalMapping.getLength(),
originalMapping.getPrecision(),
originalMapping.getScale(),
originalMapping.getTemporalPrecision(),
originalMapping.isLob(),
originalMapping.isNullable(),
originalMapping.isInsertable(),
originalMapping.isUpdateable(),
originalMapping.isPartitioned(),
originalMapping.isFormula(),
originalMapping.getJdbcMapping()
);
}
@Override
public String getColumnName() {
return getSelectionExpression();
}
}
}
| KeyColumn |
java | apache__camel | components/camel-spring-parent/camel-spring-redis/src/generated/java/org/apache/camel/component/redis/RedisProducerInvokeOnHeaderFactory.java | {
"start": 391,
"end": 9971
} | class ____ implements InvokeOnHeaderStrategy {
@Override
public Object invoke(Object obj, String key, Exchange exchange, AsyncCallback callback) throws Exception {
org.apache.camel.component.redis.RedisProducer target = (org.apache.camel.component.redis.RedisProducer) obj;
switch (key) {
case "append":
case "APPEND": return target.invokeAppend(exchange);
case "blpop":
case "BLPOP": return target.invokeBlpop(exchange);
case "brpop":
case "BRPOP": return target.invokeBrpop(exchange);
case "brpoplpush":
case "BRPOPLPUSH": return target.invokeBrpoplpush(exchange);
case "decr":
case "DECR": return target.invokeDecr(exchange);
case "decrby":
case "DECRBY": return target.invokeDecrby(exchange);
case "del":
case "DEL": target.invokeDel(exchange); return null;
case "discard":
case "DISCARD": target.invokeDiscard(exchange); return null;
case "echo":
case "ECHO": return target.invokeEcho(exchange);
case "exec":
case "EXEC": target.invokeExec(exchange); return null;
case "exists":
case "EXISTS": return target.invokeExists(exchange);
case "expire":
case "EXPIRE": return target.invokeExpire(exchange);
case "expireat":
case "EXPIREAT": return target.invokeExpireat(exchange);
case "geoadd":
case "GEOADD": return target.invokeGeoadd(exchange);
case "geodist":
case "GEODIST": return target.invokeGeodist(exchange);
case "geohash":
case "GEOHASH": return target.invokeGeohash(exchange);
case "geopos":
case "GEOPOS": return target.invokeGeopos(exchange);
case "georadius":
case "GEORADIUS": return target.invokeGeoradius(exchange);
case "georadiusbymember":
case "GEORADIUSBYMEMBER": return target.invokeGeoradiusbymember(exchange);
case "get":
case "GET": return target.invokeGet(exchange);
case "getbit":
case "GETBIT": return target.invokeGetbit(exchange);
case "getrange":
case "GETRANGE": return target.invokeGetrange(exchange);
case "getset":
case "GETSET": return target.invokeGetset(exchange);
case "hdel":
case "HDEL": target.invokeHdel(exchange); return null;
case "hexists":
case "HEXISTS": return target.invokeHexists(exchange);
case "hget":
case "HGET": return target.invokeHget(exchange);
case "hgetall":
case "HGETALL": return target.invokeHgetAll(exchange);
case "hincrby":
case "HINCRBY": return target.invokeHincrBy(exchange);
case "hkeys":
case "HKEYS": return target.invokeHkeys(exchange);
case "hlen":
case "HLEN": return target.invokeHlen(exchange);
case "hmget":
case "HMGET": return target.invokeHmget(exchange);
case "hmset":
case "HMSET": target.invokeHmset(exchange); return null;
case "hset":
case "HSET": target.invokeHset(exchange); return null;
case "hsetnx":
case "HSETNX": return target.invokeHsetnx(exchange);
case "hvals":
case "HVALS": return target.invokeHvals(exchange);
case "incr":
case "INCR": return target.invokeIncr(exchange);
case "incrby":
case "INCRBY": return target.invokeIncrby(exchange);
case "keys":
case "KEYS": return target.invokeKeys(exchange);
case "lindex":
case "LINDEX": return target.invokeLindex(exchange);
case "linsert":
case "LINSERT": return target.invokeLinsert(exchange);
case "llen":
case "LLEN": return target.invokeLlen(exchange);
case "lpop":
case "LPOP": return target.invokeLpop(exchange);
case "lpush":
case "LPUSH": return target.invokeLpush(exchange);
case "lpushx":
case "LPUSHX": return target.invokeLpushx(exchange);
case "lrange":
case "LRANGE": return target.invokeLrange(exchange);
case "lrem":
case "LREM": return target.invokeLrem(exchange);
case "lset":
case "LSET": target.invokeLset(exchange); return null;
case "ltrim":
case "LTRIM": target.invokeLtrim(exchange); return null;
case "mget":
case "MGET": return target.invokeMget(exchange);
case "move":
case "MOVE": return target.invokeMove(exchange);
case "mset":
case "MSET": target.invokeMset(exchange); return null;
case "msetnx":
case "MSETNX": target.invokeMsetnx(exchange); return null;
case "multi":
case "MULTI": target.invokeMulti(exchange); return null;
case "persist":
case "PERSIST": return target.invokePersist(exchange);
case "pexpire":
case "PEXPIRE": return target.invokePexpire(exchange);
case "pexpireat":
case "PEXPIREAT": return target.invokePexpireat(exchange);
case "ping":
case "PING": return target.invokePing(exchange);
case "publish":
case "PUBLISH": target.invokePublish(exchange); return null;
case "quit":
case "QUIT": target.invokeQuit(exchange); return null;
case "randomkey":
case "RANDOMKEY": return target.invokeRandomkey(exchange);
case "rename":
case "RENAME": target.invokeRename(exchange); return null;
case "renamenx":
case "RENAMENX": return target.invokeRenamenx(exchange);
case "rpop":
case "RPOP": return target.invokeRpop(exchange);
case "rpoplpush":
case "RPOPLPUSH": return target.invokeRpoplpush(exchange);
case "rpush":
case "RPUSH": return target.invokeRpush(exchange);
case "rpushx":
case "RPUSHX": return target.invokeRpushx(exchange);
case "sadd":
case "SADD": return target.invokeSadd(exchange);
case "scard":
case "SCARD": return target.invokeScard(exchange);
case "sdiff":
case "SDIFF": return target.invokeSdiff(exchange);
case "sdiffstore":
case "SDIFFSTORE": target.invokeSdiffstore(exchange); return null;
case "set":
case "SET": target.invokeSet(exchange); return null;
case "setbit":
case "SETBIT": target.invokeSetbit(exchange); return null;
case "setex":
case "SETEX": target.invokeSetex(exchange); return null;
case "setnx":
case "SETNX": return target.invokeSetnx(exchange);
case "setrange":
case "SETRANGE": target.invokeSetrange(exchange); return null;
case "sinter":
case "SINTER": return target.invokeSinter(exchange);
case "sinterstore":
case "SINTERSTORE": target.invokeSinterstore(exchange); return null;
case "sismember":
case "SISMEMBER": return target.invokeSismember(exchange);
case "smembers":
case "SMEMBERS": return target.invokeSmembers(exchange);
case "smove":
case "SMOVE": return target.invokeSmove(exchange);
case "sort":
case "SORT": return target.invokeSort(exchange);
case "spop":
case "SPOP": return target.invokeSpop(exchange);
case "srandmember":
case "SRANDMEMBER": return target.invokeSrandmember(exchange);
case "srem":
case "SREM": return target.invokeSrem(exchange);
case "strlen":
case "STRLEN": return target.invokeStrlen(exchange);
case "sunion":
case "SUNION": return target.invokeSunion(exchange);
case "sunionstore":
case "SUNIONSTORE": target.invokeSunionstore(exchange); return null;
case "ttl":
case "TTL": return target.invokeTtl(exchange);
case "type":
case "TYPE": return target.invokeType(exchange);
case "unwatch":
case "UNWATCH": target.invokeUnwatch(exchange); return null;
case "watch":
case "WATCH": target.invokeWatch(exchange); return null;
case "zadd":
case "ZADD": return target.invokeZadd(exchange);
case "zcard":
case "ZCARD": return target.invokeZcard(exchange);
case "zcount":
case "ZCOUNT": return target.invokeZcount(exchange);
case "zincrby":
case "ZINCRBY": return target.invokeZincrby(exchange);
case "zinterstore":
case "ZINTERSTORE": target.invokeZinterstore(exchange); return null;
case "zrange":
case "ZRANGE": return target.invokeZrange(exchange);
case "zrangebyscore":
case "ZRANGEBYSCORE": return target.invokeZrangebyscore(exchange);
case "zrank":
case "ZRANK": return target.invokeZrank(exchange);
case "zrem":
case "ZREM": return target.invokeZrem(exchange);
case "zremrangebyrank":
case "ZREMRANGEBYRANK": target.invokeZremrangebyrank(exchange); return null;
case "zremrangebyscore":
case "ZREMRANGEBYSCORE": target.invokeZremrangebyscore(exchange); return null;
case "zrevrange":
case "ZREVRANGE": return target.invokeZrevrange(exchange);
case "zrevrangebyscore":
case "ZREVRANGEBYSCORE": return target.invokeZrevrangebyscore(exchange);
case "zrevrank":
case "ZREVRANK": return target.invokeZrevrank(exchange);
case "zunionstore":
case "ZUNIONSTORE": target.invokeZunionstore(exchange); return null;
default: return null;
}
}
}
| RedisProducerInvokeOnHeaderFactory |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/runners/util/TestMethodsFinderTest.java | {
"start": 316,
"end": 382
} | class ____ extends TestBase {
public static | TestMethodsFinderTest |
java | google__dagger | javatests/dagger/internal/codegen/ModuleFactoryGeneratorTest.java | {
"start": 1523,
"end": 7392
} | interface ____ {}");
@Parameters(name = "{0}")
public static ImmutableList<Object[]> parameters() {
return CompilerMode.TEST_PARAMETERS;
}
private final CompilerMode compilerMode;
public ModuleFactoryGeneratorTest(CompilerMode compilerMode) {
this.compilerMode = compilerMode;
}
private DaggerModuleMethodSubject assertThatMethodInUnannotatedClass(String method) {
return DaggerModuleMethodSubject.Factory.assertThatMethodInUnannotatedClass(method)
.withProcessorOptions(compilerMode.processorOptions());
}
private DaggerModuleMethodSubject assertThatModuleMethod(String method) {
return DaggerModuleMethodSubject.Factory.assertThatModuleMethod(method)
.withProcessorOptions(compilerMode.processorOptions());
}
private DaggerCompiler daggerCompiler(Source... sources) {
return CompilerTests.daggerCompiler(sources)
.withProcessingOptions(compilerMode.processorOptions());
}
@Rule public GoldenFileRule goldenFileRule = new GoldenFileRule();
// TODO(gak): add tests for invalid combinations of scope and qualifier annotations like we have
// for @Inject
@Test public void providesMethodNotInModule() {
assertThatMethodInUnannotatedClass("@Provides String provideString() { return null; }")
.hasError("@Provides methods can only be present within a @Module or @ProducerModule");
}
@Test public void providesMethodAbstract() {
assertThatModuleMethod("@Provides abstract String abstractMethod();")
.hasError("@Provides methods cannot be abstract");
}
@Test public void providesMethodPrivate() {
assertThatModuleMethod("@Provides private String privateMethod() { return null; }")
.hasError("@Provides methods cannot be private");
}
@Test public void providesMethodReturnVoid() {
assertThatModuleMethod("@Provides void voidMethod() {}")
.hasError("@Provides methods must return a value (not void)");
}
@Test
public void providesMethodReturnsProvider() {
assertThatModuleMethod("@Provides Provider<String> provideProvider() {}")
.hasError("@Provides methods must not return framework types");
}
@Test
public void providesMethodReturnsJakartaProvider() {
assertThatModuleMethod("@Provides jakarta.inject.Provider<String> provideProvider() {}")
.hasError("@Provides methods must not return framework types");
}
@Test
public void providesMethodReturnsDaggerInternalProvider() {
assertThatModuleMethod("@Provides dagger.internal.Provider<String> provideProvider() {}")
.hasError("@Provides methods must not return disallowed types");
}
@Test
public void providesIntoSetMethodReturnsDaggerInternalProvider() {
assertThatModuleMethod(
"@Provides @IntoSet dagger.internal.Provider<String> provideProvider() {}")
.hasError("@Provides methods must not return disallowed types");
}
@Test
public void providesMethodReturnsLazy() {
assertThatModuleMethod("@Provides Lazy<String> provideLazy() {}")
.hasError("@Provides methods must not return framework types");
}
@Test
public void providesMethodReturnsMembersInjector() {
assertThatModuleMethod("@Provides MembersInjector<String> provideMembersInjector() {}")
.hasError("@Provides methods must not return framework types");
}
@Test
public void providesMethodReturnsProducer() {
assertThatModuleMethod("@Provides Producer<String> provideProducer() {}")
.hasError("@Provides methods must not return framework types");
}
@Test
public void providesMethodReturnsProduced() {
assertThatModuleMethod("@Provides Produced<String> provideProduced() {}")
.hasError("@Provides methods must not return framework types");
}
@Test public void providesMethodWithTypeParameter() {
assertThatModuleMethod("@Provides <T> String typeParameter() { return null; }")
.hasError("@Provides methods may not have type parameters");
}
@Test public void providesMethodSetValuesWildcard() {
assertThatModuleMethod("@Provides @ElementsIntoSet Set<?> provideWildcard() { return null; }")
.hasError(
"@Provides methods must return a primitive, an array, a type variable, "
+ "or a declared type");
}
@Test public void providesMethodSetValuesRawSet() {
assertThatModuleMethod("@Provides @ElementsIntoSet Set provideSomething() { return null; }")
.hasError("@Provides methods annotated with @ElementsIntoSet cannot return a raw Set");
}
@Test public void providesElementsIntoSetMethodReturnsSetDaggerProvider() {
assertThatModuleMethod(
"@Provides @ElementsIntoSet Set<dagger.internal.Provider<String>> provideProvider() {}")
.hasError("@Provides methods must not return disallowed types");
}
@Test public void providesMethodSetValuesNotASet() {
assertThatModuleMethod(
"@Provides @ElementsIntoSet List<String> provideStrings() { return null; }")
.hasError("@Provides methods annotated with @ElementsIntoSet must return a Set");
}
@Test
public void bindsMethodReturnsProvider() {
assertThatModuleMethod("@Binds abstract Provider<Number> bindsProvider(Provider<Long> impl);")
.hasError("@Binds methods must not return framework types");
}
@Test
public void bindsMethodReturnsDaggerProvider() {
assertThatModuleMethod("@Binds abstract dagger.internal.Provider<Number> "
+ "bindsProvider(dagger.internal.Provider<Long> impl);")
.hasError("@Binds methods must not return disallowed types");
}
@Test public void modulesWithTypeParamsMustBeAbstract() {
Source moduleFile =
CompilerTests.javaSource(
"test.TestModule",
"package test;",
"",
"import dagger.Module;",
"",
"@Module",
"final | Nullable |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RTimeSeriesRx.java | {
"start": 1079,
"end": 13983
} | interface ____<V, L> extends RExpirableRx {
/**
* Returns iterator over collection elements
*
* @return iterator
*/
Flowable<V> iterator();
/**
* Adds element to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @return void
*/
Completable add(long timestamp, V object);
/**
* Adds element with <code>label</code> to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param label object label
*/
Completable add(long timestamp, V object, L label);
/**
* Adds all elements contained in the specified map to this time-series collection.
* Map contains of timestamp mapped by object.
*
* @param objects - map of elements to add
* @return void
*/
Completable addAll(Map<Long, V> objects);
/**
* Adds all entries collection to this time-series collection.
*
* @param entries collection of time series entries
*/
Completable addAll(Collection<TimeSeriesEntry<V, L>> entries);
/**
* Use {@link #add(long, Object, Duration)} instead
*
* @param timestamp - object timestamp
* @param object - object itself
* @param timeToLive - time to live interval
* @param timeUnit - unit of time to live interval
* @return void
*/
@Deprecated
Completable add(long timestamp, V object, long timeToLive, TimeUnit timeUnit);
/**
* Adds element to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param timeToLive time to live interval
*/
Completable add(long timestamp, V object, Duration timeToLive);
/**
* Adds element with <code>label</code> to this time-series collection
* by specified <code>timestamp</code>.
*
* @param timestamp object timestamp
* @param object object itself
* @param label object label
* @param timeToLive time to live interval
*/
Completable add(long timestamp, V object, L label, Duration timeToLive);
/**
* Use {@link #addAll(Map, Duration)} instead
*
* @param objects - map of elements to add
* @param timeToLive - time to live interval
* @param timeUnit - unit of time to live interval
* @return void
*/
@Deprecated
Completable addAll(Map<Long, V> objects, long timeToLive, TimeUnit timeUnit);
/**
* Adds all elements contained in the specified map to this time-series collection.
* Map contains of timestamp mapped by object.
*
* @param objects map of elements to add
* @param timeToLive time to live interval
*/
Completable addAll(Map<Long, V> objects, Duration timeToLive);
/**
* Adds all time series entries collection to this time-series collection.
* Specified time to live interval applied to all entries defined in collection.
*
* @param entries collection of time series entries
* @param timeToLive time to live interval
*/
Completable addAll(Collection<TimeSeriesEntry<V, L>> entries, Duration timeToLive);
/**
* Returns size of this set.
*
* @return size
*/
Single<Integer> size();
/**
* Returns object by specified <code>timestamp</code> or <code>null</code> if it doesn't exist.
*
* @param timestamp - object timestamp
* @return object
*/
Maybe<V> get(long timestamp);
/**
* Returns time series entry by specified <code>timestamp</code> or <code>null</code> if it doesn't exist.
*
* @param timestamp object timestamp
* @return time series entry
*/
Maybe<TimeSeriesEntry<V, L>> getEntry(long timestamp);
/**
* Removes object by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return <code>true</code> if an element was removed as a result of this call
*/
Single<Boolean> remove(long timestamp);
/**
* Removes and returns object by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return object or <code>null</code> if it doesn't exist
*/
Maybe<V> getAndRemove(long timestamp);
/**
* Removes and returns entry by specified <code>timestamp</code>.
*
* @param timestamp - object timestamp
* @return entry or <code>null</code> if it doesn't exist
*/
Maybe<TimeSeriesEntry<V, L>> getAndRemoveEntry(long timestamp);
/**
* Removes and returns the head elements or {@code null} if this time-series collection is empty.
*
* @param count - elements amount
* @return the head element,
* or {@code null} if this time-series collection is empty
*/
Single<Collection<V>> pollFirst(int count);
/**
* Removes and returns head entries
*
* @param count - entries amount
* @return collection of head entries
*/
Single<Collection<TimeSeriesEntry<V, L>>> pollFirstEntries(int count);
/**
* Removes and returns the tail elements or {@code null} if this time-series collection is empty.
*
* @param count - elements amount
* @return the tail element or {@code null} if this time-series collection is empty
*/
Single<Collection<V>> pollLast(int count);
/**
* Removes and returns tail entries
*
* @param count - entries amount
* @return collection of tail entries
*/
Single<Collection<TimeSeriesEntry<V, L>>> pollLastEntries(int count);
/**
* Removes and returns the head element or {@code null} if this time-series collection is empty.
*
* @return the head element,
* or {@code null} if this time-series collection is empty
*/
Maybe<V> pollFirst();
/**
* Removes and returns head entry or {@code null} if this time-series collection is empty.
*
* @return the head entry,
* or {@code null} if this time-series collection is empty
*/
Maybe<TimeSeriesEntry<V, L>> pollFirstEntry();
/**
* Removes and returns the tail element or {@code null} if this time-series collection is empty.
*
* @return the tail element or {@code null} if this time-series collection is empty
*/
Maybe<V> pollLast();
/**
* Removes and returns the tail entry or {@code null} if this time-series collection is empty.
*
* @return the tail entry or {@code null} if this time-series collection is empty
*/
Maybe<TimeSeriesEntry<V, L>> pollLastEntry();
/**
* Returns the tail element or {@code null} if this time-series collection is empty.
*
* @return the tail element or {@code null} if this time-series collection is empty
*/
Maybe<V> last();
/**
* Returns the tail entry or {@code null} if this time-series collection is empty.
*
* @return the tail entry or {@code null} if this time-series collection is empty
*/
Maybe<TimeSeriesEntry<V, L>> lastEntry();
/**
* Returns the head element or {@code null} if this time-series collection is empty.
*
* @return the head element or {@code null} if this time-series collection is empty
*/
Maybe<V> first();
/**
* Returns the head entry or {@code null} if this time-series collection is empty.
*
* @return the head entry or {@code null} if this time-series collection is empty
*/
Maybe<TimeSeriesEntry<V, L>> firstEntry();
/**
* Returns timestamp of the head timestamp or {@code null} if this time-series collection is empty.
*
* @return timestamp or {@code null} if this time-series collection is empty
*/
Single<Long> firstTimestamp();
/**
* Returns timestamp of the tail element or {@code null} if this time-series collection is empty.
*
* @return timestamp or {@code null} if this time-series collection is empty
*/
Single<Long> lastTimestamp();
/**
* Returns the tail elements of this time-series collection.
*
* @param count - elements amount
* @return the tail elements
*/
Single<Collection<V>> last(int count);
/**
* Returns the tail entries of this time-series collection.
*
* @param count - entries amount
* @return the tail entries
*/
Single<Collection<TimeSeriesEntry<V, L>>> lastEntries(int count);
/**
* Returns the head elements of this time-series collection.
*
* @param count - elements amount
* @return the head elements
*/
Single<Collection<V>> first(int count);
/**
* Returns the head entries of this time-series collection.
*
* @param count - entries amount
* @return the head entries
*/
Single<Collection<TimeSeriesEntry<V, L>>> firstEntries(int count);
/**
* Removes values within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return number of removed elements
*/
Single<Integer> removeRange(long startTimestamp, long endTimestamp);
/**
* Returns ordered elements of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
Single<Collection<V>> range(long startTimestamp, long endTimestamp);
/**
* Returns ordered elements of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
Single<Collection<V>> range(long startTimestamp, long endTimestamp, int limit);
/**
* Returns elements of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
Single<Collection<V>> rangeReversed(long startTimestamp, long endTimestamp);
/**
* Returns elements of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
Single<Collection<V>> rangeReversed(long startTimestamp, long endTimestamp, int limit);
/**
* Returns ordered entries of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
Single<Collection<TimeSeriesEntry<V, L>>> entryRange(long startTimestamp, long endTimestamp);
/**
* Returns ordered entries of this time-series collection within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
Single<Collection<TimeSeriesEntry<V, L>>> entryRange(long startTimestamp, long endTimestamp, int limit);
/**
* Returns entries of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp - start timestamp
* @param endTimestamp - end timestamp
* @return elements collection
*/
Single<Collection<TimeSeriesEntry<V, L>>> entryRangeReversed(long startTimestamp, long endTimestamp);
/**
* Returns entries of this time-series collection in reverse order within timestamp range. Including boundary values.
*
* @param startTimestamp start timestamp
* @param endTimestamp end timestamp
* @param limit result size limit
* @return elements collection
*/
Single<Collection<TimeSeriesEntry<V, L>>> entryRangeReversed(long startTimestamp, long endTimestamp, int limit);
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.ScoredSortedSetAddListener
* @see org.redisson.api.listener.ScoredSortedSetRemoveListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener - object event listener
* @return listener id
*/
@Override
Single<Integer> addListener(ObjectListener listener);
}
| RTimeSeriesRx |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/jdk8/OptionalUnwrappedTest.java | {
"start": 1435,
"end": 2974
} | class ____ {
public String name;
}
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testUntypedWithOptionalsNotNulls() throws Exception
{
String jsonExp = a2q("{'XX.name':'Bob'}");
String jsonAct = MAPPER.writeValueAsString(new OptionalParent());
assertEquals(jsonExp, jsonAct);
}
// for [datatype-jdk8#20]
@Test
public void testShouldSerializeUnwrappedOptional() throws Exception {
assertEquals("{\"id\":\"foo\"}",
MAPPER.writeValueAsString(new Bean("foo", Optional.<Bean2>empty())));
}
// for [datatype-jdk8#26]
@Test
public void testPropogatePrefixToSchema() throws Exception {
final AtomicReference<String> propertyName = new AtomicReference<>();
MAPPER.acceptJsonFormatVisitor(OptionalParent.class, new JsonFormatVisitorWrapper.Base(
new SerializationContextExt.Impl(new JsonFactory(),
MAPPER.serializationConfig(), null,
BeanSerializerFactory.instance, new SerializerCache())) {
@Override
public JsonObjectFormatVisitor expectObjectFormat(JavaType type) {
return new JsonObjectFormatVisitor.Base(getContext()) {
@Override
public void optionalProperty(BeanProperty prop) {
propertyName.set(prop.getName());
}
};
}
});
assertEquals("XX.name", propertyName.get());
}
}
| Bean2 |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/naturalid/mutable/cached/CachedMutableNaturalIdNonStrictReadWriteTest.java | {
"start": 963,
"end": 1054
} | class ____ extends CachedMutableNaturalIdTest {
}
| CachedMutableNaturalIdNonStrictReadWriteTest |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/HttpProducerContentTypeWithCharsetTest.java | {
"start": 1372,
"end": 4025
} | class ____ extends BaseHttpTest {
private static final String CONTENT_TYPE_WITHOUT_CHARSET
= "multipart/form-data;boundary=---------------------------j2radvtrk";
private static final String CONTENT_TYPE_WITH_CHARSET = CONTENT_TYPE_WITHOUT_CHARSET + ";charset=utf-8";
private HttpServer localServer;
private String endpointUrl;
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/content", (request, response, context) -> {
String contentType = request.getFirstHeader(Exchange.CONTENT_TYPE).getValue();
assertEquals(CONTENT_TYPE_WITH_CHARSET.replace(";", "; "), contentType);
assertFalse(request.containsHeader(Exchange.CONTENT_ENCODING));
response.setEntity(new StringEntity(contentType, StandardCharsets.US_ASCII));
response.setCode(HttpStatus.SC_OK);
}).create();
localServer.start();
endpointUrl = "http://localhost:" + localServer.getLocalPort();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
void testContentTypeWithCharsetInCharsetHeader() {
Exchange out = template.request(endpointUrl + "/content", exchange -> {
exchange.getIn().setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_WITHOUT_CHARSET);
exchange.getIn().setHeader(Exchange.CHARSET_NAME, "utf-8");
exchange.getIn().setBody("This is content");
});
assertNotNull(out);
assertFalse(out.isFailed(), "Should not fail");
assertEquals(CONTENT_TYPE_WITH_CHARSET.replace(";", "; "), out.getMessage().getBody(String.class));
}
@Test
void testContentTypeWithCharsetInContentTypeHeader() {
Exchange out = template.request(endpointUrl + "/content", exchange -> {
exchange.getIn().setHeader(Exchange.CONTENT_TYPE, CONTENT_TYPE_WITH_CHARSET);
exchange.getIn().setBody("This is content");
});
assertNotNull(out);
assertFalse(out.isFailed(), "Should not fail");
assertEquals(CONTENT_TYPE_WITH_CHARSET.replace(";", "; "), out.getMessage().getBody(String.class));
}
}
| HttpProducerContentTypeWithCharsetTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/naming/ImplicitEntityNameSource.java | {
"start": 254,
"end": 453
} | interface ____ extends ImplicitNameSource {
/**
* Access to the entity's name information
*
* @return The entity's name information
*/
EntityNaming getEntityNaming();
}
| ImplicitEntityNameSource |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/StreamCachingRecipientListTest.java | {
"start": 1011,
"end": 1876
} | class ____ extends ContextTestSupport {
@Test
public void testByteArrayInputStream() throws Exception {
getMockEndpoint("mock:foo").expectedBodiesReceived("<hello/>");
getMockEndpoint("mock:bar").expectedBodiesReceived("<hello/>");
getMockEndpoint("mock:baz").expectedBodiesReceived("<hello/>");
template.sendBodyAndHeader("direct:a", new ByteArrayInputStream("<hello/>".getBytes()), "mySlip",
"mock:foo,mock:bar,mock:baz");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.setStreamCaching(true);
from("direct:a").recipientList(header("mySlip"));
}
};
}
}
| StreamCachingRecipientListTest |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/jakartaData/java/org/hibernate/processor/test/data/innerclass/Person.java | {
"start": 420,
"end": 707
} | class ____ {
private String name;
private String snn;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSnn() {
return snn;
}
public void setSnn(String snn) {
this.snn = snn;
}
}
}
| PersonId |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/TtlNonFixedLenElemListStateTestContext.java | {
"start": 1083,
"end": 1764
} | class ____ extends TtlListStateTestContextBase<String> {
TtlNonFixedLenElemListStateTestContext() {
super(StringSerializer.INSTANCE);
}
@Override
void initTestValues() {
emptyValue = Collections.emptyList();
updateEmpty = Arrays.asList("5", "7", "10");
updateUnexpired = Arrays.asList("8", "9", "11");
updateExpired = Arrays.asList("1", "4");
getUpdateEmpty = updateEmpty;
getUnexpired = updateUnexpired;
getUpdateExpired = updateExpired;
}
@Override
String generateRandomElement(int i) {
return Integer.toString(RANDOM.nextInt(100));
}
}
| TtlNonFixedLenElemListStateTestContext |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/UnremovedConfigMappingTest.java | {
"start": 2028,
"end": 2091
} | class ____ {
String prop;
}
}
| UnremovedConfigProperties |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/client/RestTestClient.java | {
"start": 3002,
"end": 6777
} | interface ____ {
/**
* The name of a request header used to assign a unique ID to every request
* performed through the {@code RestTestClient}. This can be useful to
* store contextual information under that ID at all phases of request
* processing (for example, from a server-side component) and later look up
* that information once an {@link ExchangeResult} is available.
*/
String RESTTESTCLIENT_REQUEST_ID = "RestTestClient-Request-Id";
/**
* Prepare an HTTP GET request.
* @return a spec for specifying the target URL
*/
RequestHeadersUriSpec<?> get();
/**
* Prepare an HTTP HEAD request.
* @return a spec for specifying the target URL
*/
RequestHeadersUriSpec<?> head();
/**
* Prepare an HTTP POST request.
* @return a spec for specifying the target URL
*/
RequestBodyUriSpec post();
/**
* Prepare an HTTP PUT request.
* @return a spec for specifying the target URL
*/
RequestBodyUriSpec put();
/**
* Prepare an HTTP PATCH request.
* @return a spec for specifying the target URL
*/
RequestBodyUriSpec patch();
/**
* Prepare an HTTP DELETE request.
* @return a spec for specifying the target URL
*/
RequestHeadersUriSpec<?> delete();
/**
* Prepare an HTTP OPTIONS request.
* @return a spec for specifying the target URL
*/
RequestHeadersUriSpec<?> options();
/**
* Prepare a request for the specified {@code HttpMethod}.
* @return a spec for specifying the target URL
*/
RequestBodyUriSpec method(HttpMethod method);
/**
* Return a builder to mutate properties of this test client.
*/
<B extends Builder<B>> Builder<B> mutate();
/**
* Begin creating a {@link RestTestClient} with a {@linkplain MockMvcBuilders#standaloneSetup
* Standalone MockMvc setup}.
*/
static StandaloneSetupBuilder bindToController(Object... controllers) {
return new DefaultRestTestClientBuilder.DefaultStandaloneSetupBuilder(controllers);
}
/**
* Begin creating a {@link RestTestClient} with a {@linkplain MockMvcBuilders#routerFunctions
* RouterFunction MockMvc setup}.
*/
static RouterFunctionSetupBuilder bindToRouterFunction(RouterFunction<?>... routerFunctions) {
return new DefaultRestTestClientBuilder.DefaultRouterFunctionSetupBuilder(routerFunctions);
}
/**
* Begin creating a {@link RestTestClient} with a {@linkplain MockMvcBuilders#webAppContextSetup
* WebApplicationContext MockMvc setup}.
*/
static WebAppContextSetupBuilder bindToApplicationContext(WebApplicationContext context) {
return new DefaultRestTestClientBuilder.DefaultWebAppContextSetupBuilder(context);
}
/**
* Begin creating a {@link RestTestClient} by providing an already
* initialized {@link MockMvc} instance to use as the server.
*/
static Builder<?> bindTo(MockMvc mockMvc) {
ClientHttpRequestFactory requestFactory = new MockMvcClientHttpRequestFactory(mockMvc);
return RestTestClient.bindToServer(requestFactory);
}
/**
* This server setup option allows you to connect to a live server.
* <p><pre class="code">
* RestTestClient client = RestTestClient.bindToServer()
* .baseUrl("http://localhost:8080")
* .build();
* </pre>
* @return chained API to customize client config
*/
static Builder<?> bindToServer() {
return new DefaultRestTestClientBuilder<>();
}
/**
* A variant of {@link #bindToServer()} with a pre-configured request factory.
* @return chained API to customize client config
*/
static Builder<?> bindToServer(ClientHttpRequestFactory requestFactory) {
return new DefaultRestTestClientBuilder<>(RestClient.builder().requestFactory(requestFactory));
}
/**
* Steps to customize the underlying {@link RestClient} via {@link RestClient.Builder}.
* @param <B> the type of builder
*/
| RestTestClient |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/InheritanceUtils.java | {
"start": 1370,
"end": 2014
} | class ____ null)
* @since 3.2
*/
public static int distance(final Class<?> child, final Class<?> parent) {
if (child == null || parent == null) {
return -1;
}
if (child.equals(parent)) {
return 0;
}
final Class<?> cParent = child.getSuperclass();
int d = BooleanUtils.toInteger(parent.equals(cParent));
if (d == 1) {
return d;
}
d += distance(cParent, parent);
return d > 0 ? d + 1 : -1;
}
/**
* {@link InheritanceUtils} instances should NOT be constructed in standard programming.
* Instead, the | is |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/project/AbstractProjectResolver.java | {
"start": 1153,
"end": 4254
} | class ____ implements ProjectResolver {
private final Supplier<ThreadContext> threadContext;
public AbstractProjectResolver(Supplier<ThreadContext> threadContext) {
this.threadContext = threadContext;
}
/**
* Subclasses should override this method to handle the case where no project id is specified in the thread context.
* This may return a default project id or throw an exception
*/
protected abstract ProjectId getFallbackProjectId();
/**
* Returns {@code true} if the current request is permitted to perform operations on all projects, {@code false} otherwise.
*/
protected abstract boolean allowAccessToAllProjects(ThreadContext threadContext);
@Override
public ProjectId getProjectId() {
final String headerValue = getProjectIdFromThreadContext();
if (headerValue == null) {
return getFallbackProjectId();
}
return ProjectId.fromId(headerValue);
}
@Override
public Collection<ProjectId> getProjectIds(ClusterState clusterState) {
var headerValue = getProjectIdFromThreadContext();
if (headerValue == null) {
if (allowAccessToAllProjects(threadContext.get())) {
return clusterState.metadata().projects().keySet();
} else {
throw new ElasticsearchSecurityException("No project id supplied, and not permitted to access all projects");
}
}
return Set.of(findProject(clusterState.metadata(), headerValue).id());
}
@Override
public <E extends Exception> void executeOnProject(ProjectId projectId, CheckedRunnable<E> body) throws E {
final ThreadContext threadContext = this.threadContext.get();
final String existingProjectId = threadContext.getHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER);
if (existingProjectId != null) {
// We intentionally do not allow callers to override an existing project-id
// This method may only be called from a non-project context (e.g. a cluster state listener)
throw new IllegalStateException(
"There is already a project-id [" + existingProjectId + "] in the thread-context, cannot set it to [" + projectId + "]"
);
}
try (var ignoreAndRestore = threadContext.newStoredContext()) {
threadContext.putHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER, projectId.id());
body.run();
}
}
@Override
public boolean supportsMultipleProjects() {
return true;
}
protected static ProjectMetadata findProject(Metadata metadata, String headerValue) {
var project = metadata.projects().get(ProjectId.fromId(headerValue));
if (project == null) {
throw new IllegalArgumentException("Could not find project with id [" + headerValue + "]");
}
return project;
}
protected String getProjectIdFromThreadContext() {
return threadContext.get().getHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER);
}
}
| AbstractProjectResolver |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/BatchPreparedStatementSetter.java | {
"start": 1274,
"end": 1516
} | class ____
* catch and handle SQLExceptions appropriately.
*
* @author Rod Johnson
* @since March 2, 2003
* @see JdbcTemplate#batchUpdate(String, BatchPreparedStatementSetter)
* @see InterruptibleBatchPreparedStatementSetter
*/
public | will |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/autoflush/HibernateAutoflushTest.java | {
"start": 3805,
"end": 3964
} | class ____ {
@Id
long id;
String typeOfThing;
public Thing(String typeOfThing) {
this.typeOfThing = typeOfThing;
}
public Thing() {
}
}
}
| Thing |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scripting/groovy/GroovyScriptFactoryTests.java | {
"start": 16555,
"end": 26121
} | class ____ {}");
given(script.suggestedClassName()).willReturn("someName");
GroovyScriptFactory factory = new GroovyScriptFactory("a script source locator (doesn't matter here)");
Object scriptedObject = factory.getScriptedObject(script);
assertThat(scriptedObject).isNotNull();
}
@Test
void testGetScriptedObjectDoesChokeOnNullScriptSourceBeingPassedIn() {
GroovyScriptFactory factory = new GroovyScriptFactory("a script source locator (doesn't matter here)");
assertThatNullPointerException().as("NullPointerException as per contract ('null' ScriptSource supplied)")
.isThrownBy(() -> factory.getScriptedObject(null));
}
@Test
void testResourceScriptFromTag() {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
Messenger messenger = (Messenger) ctx.getBean("messenger");
CallCounter countingAspect = (CallCounter) ctx.getBean("getMessageAspect");
assertThat(AopUtils.isAopProxy(messenger)).isTrue();
boolean condition = messenger instanceof Refreshable;
assertThat(condition).isFalse();
assertThat(countingAspect.getCalls()).isEqualTo(0);
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
assertThat(countingAspect.getCalls()).isEqualTo(1);
ctx.close();
assertThat(countingAspect.getCalls()).isEqualTo(-200);
}
@Test
void testPrototypeScriptFromTag() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
ConfigurableMessenger messenger = (ConfigurableMessenger) ctx.getBean("messengerPrototype");
ConfigurableMessenger messenger2 = (ConfigurableMessenger) ctx.getBean("messengerPrototype");
assertThat(messenger2).isNotSameAs(messenger);
assertThat(messenger2.getClass()).isSameAs(messenger.getClass());
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
assertThat(messenger2.getMessage()).isEqualTo("Hello World!");
messenger.setMessage("Bye World!");
messenger2.setMessage("Byebye World!");
assertThat(messenger.getMessage()).isEqualTo("Bye World!");
assertThat(messenger2.getMessage()).isEqualTo("Byebye World!");
}
@Test
void testInlineScriptFromTag() {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
BeanDefinition bd = ctx.getBeanFactory().getBeanDefinition("calculator");
assertThat(ObjectUtils.containsElement(bd.getDependsOn(), "messenger")).isTrue();
Calculator calculator = (Calculator) ctx.getBean("calculator");
assertThat(calculator).isNotNull();
boolean condition = calculator instanceof Refreshable;
assertThat(condition).isFalse();
}
@Test
void testRefreshableFromTag() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("refreshableMessenger");
Messenger messenger = (Messenger) ctx.getBean("refreshableMessenger");
CallCounter countingAspect = (CallCounter) ctx.getBean("getMessageAspect");
assertThat(AopUtils.isAopProxy(messenger)).isTrue();
boolean condition = messenger instanceof Refreshable;
assertThat(condition).isTrue();
assertThat(countingAspect.getCalls()).isEqualTo(0);
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
assertThat(countingAspect.getCalls()).isEqualTo(1);
assertThat(ctx.getBeansOfType(Messenger.class)).containsValue(messenger);
}
@Test // SPR-6268
public void testRefreshableFromTagProxyTargetClass() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-proxy-target-class.xml",
getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("refreshableMessenger");
Messenger messenger = (Messenger) ctx.getBean("refreshableMessenger");
assertThat(AopUtils.isAopProxy(messenger)).isTrue();
boolean condition = messenger instanceof Refreshable;
assertThat(condition).isTrue();
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
assertThat(ctx.getBeansOfType(ConcreteMessenger.class)).containsValue((ConcreteMessenger) messenger);
// Check that AnnotationUtils works with concrete proxied script classes
assertThat(AnnotationUtils.findAnnotation(messenger.getClass(), Component.class)).isNotNull();
}
@Test // SPR-6268
public void testProxyTargetClassNotAllowedIfNotGroovy() {
try {
new ClassPathXmlApplicationContext("groovy-with-xsd-proxy-target-class.xml", getClass());
}
catch (BeanCreationException ex) {
assertThat(ex.getMessage()).contains("Cannot use proxyTargetClass=true");
}
}
@Test
void testAnonymousScriptDetected() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
Map<?, Messenger> beans = ctx.getBeansOfType(Messenger.class);
assertThat(beans).hasSize(4);
assertThat(ctx.getBean(MyBytecodeProcessor.class).processed.contains(
"org.springframework.scripting.groovy.GroovyMessenger2")).isTrue();
}
@Test
void testJsr223FromTag() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("messenger");
Messenger messenger = (Messenger) ctx.getBean("messenger");
assertThat(AopUtils.isAopProxy(messenger)).isFalse();
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
}
@Test
void testJsr223FromTagWithInterface() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("messengerWithInterface");
Messenger messenger = (Messenger) ctx.getBean("messengerWithInterface");
assertThat(AopUtils.isAopProxy(messenger)).isFalse();
}
@Test
void testRefreshableJsr223FromTag() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("refreshableMessenger");
Messenger messenger = (Messenger) ctx.getBean("refreshableMessenger");
assertThat(AopUtils.isAopProxy(messenger)).isTrue();
boolean condition = messenger instanceof Refreshable;
assertThat(condition).isTrue();
assertThat(messenger.getMessage()).isEqualTo("Hello World!");
}
@Test
void testInlineJsr223FromTag() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("inlineMessenger");
Messenger messenger = (Messenger) ctx.getBean("inlineMessenger");
assertThat(AopUtils.isAopProxy(messenger)).isFalse();
}
@Test
void testInlineJsr223FromTagWithInterface() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
assertThat(Arrays.asList(ctx.getBeanNamesForType(Messenger.class))).contains("inlineMessengerWithInterface");
Messenger messenger = (Messenger) ctx.getBean("inlineMessengerWithInterface");
assertThat(AopUtils.isAopProxy(messenger)).isFalse();
}
/**
* Tests the SPR-2098 bug whereby no more than 1 property element could be
* passed to a scripted bean :(
*/
@Test
void testCanPassInMoreThanOneProperty() {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-multiple-properties.xml", getClass());
TestBean tb = (TestBean) ctx.getBean("testBean");
ContextScriptBean bean = (ContextScriptBean) ctx.getBean("bean");
assertThat(bean.getName()).as("The first property ain't bein' injected.").isEqualTo("Sophie Marceau");
assertThat(bean.getAge()).as("The second property ain't bein' injected.").isEqualTo(31);
assertThat(bean.getTestBean()).isEqualTo(tb);
assertThat(bean.getApplicationContext()).isEqualTo(ctx);
ContextScriptBean bean2 = (ContextScriptBean) ctx.getBean("bean2");
assertThat(bean2.getTestBean()).isEqualTo(tb);
assertThat(bean2.getApplicationContext()).isEqualTo(ctx);
}
@Test
void testMetaClassWithBeans() {
testMetaClass("org/springframework/scripting/groovy/calculators.xml");
}
@Test
void testMetaClassWithXsd() {
testMetaClass("org/springframework/scripting/groovy/calculators-with-xsd.xml");
}
private void testMetaClass(String xmlFile) {
// expect the exception we threw in the custom metaclass to show it got invoked
ApplicationContext ctx = new ClassPathXmlApplicationContext(xmlFile);
Calculator calc = (Calculator) ctx.getBean("delegatingCalculator");
assertThatIllegalStateException().isThrownBy(() ->
calc.add(1, 2))
.withMessage("Gotcha");
}
@Test
void testFactoryBean() {
ApplicationContext context = new ClassPathXmlApplicationContext("groovyContext.xml", getClass());
Object factory = context.getBean("&factory");
boolean condition1 = factory instanceof FactoryBean;
assertThat(condition1).isTrue();
Object result = context.getBean("factory");
boolean condition = result instanceof String;
assertThat(condition).isTrue();
assertThat(result).isEqualTo("test");
}
@Test
void testRefreshableFactoryBean() {
ApplicationContext context = new ClassPathXmlApplicationContext("groovyContext.xml", getClass());
Object factory = context.getBean("&refreshableFactory");
boolean condition1 = factory instanceof FactoryBean;
assertThat(condition1).isTrue();
Object result = context.getBean("refreshableFactory");
boolean condition = result instanceof String;
assertThat(condition).isTrue();
assertThat(result).isEqualTo("test");
}
public static | Bar |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/StateSnapshotTransformer.java | {
"start": 1989,
"end": 2751
} | enum ____ {
/** Transform all entries. */
TRANSFORM_ALL,
/**
* Skip first null entries.
*
* <p>While traversing collection entries, as optimisation, stops transforming if
* encounters first non-null included entry and returns it plus the rest untouched.
*/
STOP_ON_FIRST_INCLUDED
}
default TransformStrategy getFilterStrategy() {
return TransformStrategy.TRANSFORM_ALL;
}
}
/**
* This factory creates state transformers depending on the form of values to transform.
*
* <p>If there is no transforming needed, the factory methods return {@code Optional.empty()}.
*/
| TransformStrategy |
java | apache__kafka | clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/security/GroupAuthorizerIntegrationTest.java | {
"start": 6158,
"end": 21349
} | class ____ extends DefaultKafkaPrincipalBuilder {
public GroupPrincipalBuilder() {
super(null, null);
}
@Override
public KafkaPrincipal build(AuthenticationContext context) {
String listenerName = context.listenerName();
return switch (listenerName) {
case BROKER_LISTENER_NAME, CONTROLLER_LISTENER_NAME -> BROKER_PRINCIPAL;
case CLIENT_LISTENER_NAME -> CLIENT_PRINCIPAL;
default -> throw new IllegalArgumentException("No principal mapped to listener " + listenerName);
};
}
}
private AccessControlEntry createAcl(AclOperation aclOperation, AclPermissionType aclPermissionType, KafkaPrincipal principal) {
return new AccessControlEntry(
principal.toString(),
WILDCARD_HOST,
aclOperation,
aclPermissionType
);
}
private void addAndVerifyAcls(Set<AccessControlEntry> acls, ResourcePattern resource, ClusterInstance clusterInstance) throws InterruptedException {
List<AclBinding> aclBindings = acls.stream()
.map(acl -> new AclBinding(resource, acl))
.toList();
Authorizer authorizer = getAuthorizer(clusterInstance);
authorizer.createAcls(ANONYMOUS_CONTEXT, aclBindings)
.forEach(future -> {
try {
future.toCompletableFuture().get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Failed to create ACLs", e);
}
});
AclBindingFilter aclBindingFilter = new AclBindingFilter(resource.toFilter(), AccessControlEntryFilter.ANY);
clusterInstance.waitAcls(aclBindingFilter, acls);
}
private void removeAndVerifyAcls(Set<AccessControlEntry> deleteAcls, ResourcePattern resource, ClusterInstance clusterInstance) throws InterruptedException {
List<AclBindingFilter> aclBindingFilters = deleteAcls.stream()
.map(acl -> new AclBindingFilter(resource.toFilter(), acl.toFilter()))
.toList();
Authorizer authorizer = getAuthorizer(clusterInstance);
authorizer.deleteAcls(ANONYMOUS_CONTEXT, aclBindingFilters)
.forEach(future -> {
try {
future.toCompletableFuture().get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Failed to delete ACLs", e);
}
});
AclBindingFilter aclBindingFilter = new AclBindingFilter(resource.toFilter(), AccessControlEntryFilter.ANY);
TestUtils.waitForCondition(() -> {
Set<AccessControlEntry> remainingAclEntries = new HashSet<>();
authorizer.acls(aclBindingFilter).forEach(aclBinding -> remainingAclEntries.add(aclBinding.entry()));
return deleteAcls.stream().noneMatch(remainingAclEntries::contains);
}, "Failed to verify ACLs deletion");
}
static final AuthorizableRequestContext ANONYMOUS_CONTEXT = new AuthorizableRequestContext() {
@Override
public String listenerName() {
return "";
}
@Override
public SecurityProtocol securityProtocol() {
return SecurityProtocol.PLAINTEXT;
}
@Override
public KafkaPrincipal principal() {
return KafkaPrincipal.ANONYMOUS;
}
@Override
public InetAddress clientAddress() {
return null;
}
@Override
public int requestType() {
return 0;
}
@Override
public int requestVersion() {
return 0;
}
@Override
public String clientId() {
return "";
}
@Override
public int correlationId() {
return 0;
}
};
@ClusterTest
public void testUnauthorizedProduceAndConsumeWithClassicConsumer(ClusterInstance clusterInstance) throws InterruptedException {
testUnauthorizedProduceAndConsume(clusterInstance, GroupProtocol.CLASSIC);
}
@ClusterTest
public void testUnauthorizedProduceAndConsumeWithAsyncConsumer(ClusterInstance clusterInstance) throws InterruptedException {
testUnauthorizedProduceAndConsume(clusterInstance, GroupProtocol.CONSUMER);
}
public void testUnauthorizedProduceAndConsume(ClusterInstance clusterInstance, GroupProtocol groupProtocol) throws InterruptedException {
setup(clusterInstance);
String topic = "topic";
String group = "group";
addAndVerifyAcls(
Set.of(createAcl(AclOperation.CREATE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL),
clusterInstance
);
addAndVerifyAcls(
Set.of(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.GROUP, group, PatternType.LITERAL),
clusterInstance
);
Producer<byte[], byte[]> producer = clusterInstance.producer();
Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(
GROUP_PROTOCOL_CONFIG, groupProtocol.name.toLowerCase(Locale.ROOT),
ConsumerConfig.GROUP_ID_CONFIG, group
));
try {
clusterInstance.createTopic(topic, 1, (short) 1);
ExecutionException produceException = assertThrows(
ExecutionException.class,
() -> producer.send(new ProducerRecord<>(topic, "message".getBytes())).get()
);
Throwable cause = produceException.getCause();
assertInstanceOf(TopicAuthorizationException.class, cause);
TopicAuthorizationException topicAuthException = (TopicAuthorizationException) cause;
assertEquals(Set.of(topic), topicAuthException.unauthorizedTopics());
TopicPartition topicPartition = new TopicPartition(topic, 0);
consumer.assign(Collections.singletonList(topicPartition));
TopicAuthorizationException consumeException = assertThrows(
TopicAuthorizationException.class,
() -> consumer.poll(Duration.ofSeconds(15))
);
assertEquals(consumeException.unauthorizedTopics(), topicAuthException.unauthorizedTopics());
} finally {
producer.close(Duration.ZERO);
consumer.close();
}
}
@ClusterTest
public void testClassicConsumeUnsubscribeWithGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeUnsubscribeWithOrWithoutGroupPermission(clusterInstance, GroupProtocol.CLASSIC, true);
}
@ClusterTest
public void testAsyncConsumeUnsubscribeWithGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeUnsubscribeWithOrWithoutGroupPermission(clusterInstance, GroupProtocol.CONSUMER, true);
}
@ClusterTest
public void testClassicConsumeUnsubscribeWithoutGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeUnsubscribeWithOrWithoutGroupPermission(clusterInstance, GroupProtocol.CLASSIC, false);
}
@ClusterTest
public void testAsyncConsumeUnsubscribeWithoutGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeUnsubscribeWithOrWithoutGroupPermission(clusterInstance, GroupProtocol.CONSUMER, false);
}
private void testConsumeUnsubscribeWithOrWithoutGroupPermission(ClusterInstance clusterInstance, GroupProtocol groupProtocol, boolean withGroupPermission) throws InterruptedException, ExecutionException {
setup(clusterInstance);
String topic = "topic";
String group = "group";
// allow topic read/write permission to poll/send record
Set<AccessControlEntry> acls = new HashSet<>();
acls.add(createAcl(AclOperation.CREATE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.WRITE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
addAndVerifyAcls(
acls,
new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL),
clusterInstance
);
addAndVerifyAcls(
Set.of(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.GROUP, group, PatternType.LITERAL),
clusterInstance
);
try (Producer<byte[], byte[]> producer = clusterInstance.producer();
Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, group,
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false",
GROUP_PROTOCOL_CONFIG, groupProtocol.name.toLowerCase(Locale.ROOT)))
) {
clusterInstance.createTopic(topic, 1, (short) 1);
producer.send(new ProducerRecord<>(topic, "message".getBytes())).get();
consumer.subscribe(Collections.singletonList(topic));
TestUtils.waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofSeconds(15));
return records.count() == 1;
}, "consumer failed to receive message");
if (!withGroupPermission) {
removeAndVerifyAcls(
Set.of(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.GROUP, group, PatternType.LITERAL),
clusterInstance
);
}
assertDoesNotThrow(consumer::unsubscribe);
}
}
@ClusterTest
public void testClassicConsumeCloseWithGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeCloseWithGroupPermission(clusterInstance, GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumeCloseWithGroupPermission(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testConsumeCloseWithGroupPermission(clusterInstance, GroupProtocol.CONSUMER);
}
private void testConsumeCloseWithGroupPermission(ClusterInstance clusterInstance, GroupProtocol groupProtocol) throws InterruptedException, ExecutionException {
setup(clusterInstance);
String topic = "topic";
String group = "group";
// allow topic read/write permission to poll/send record
Set<AccessControlEntry> acls = new HashSet<>();
acls.add(createAcl(AclOperation.CREATE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.WRITE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
addAndVerifyAcls(
acls,
new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL),
clusterInstance
);
addAndVerifyAcls(
Set.of(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.GROUP, group, PatternType.LITERAL),
clusterInstance
);
Producer<Object, Object> producer = clusterInstance.producer();
Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, group,
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false",
GROUP_PROTOCOL_CONFIG, groupProtocol.name.toLowerCase(Locale.ROOT)));
try {
clusterInstance.createTopic(topic, 1, (short) 1);
producer.send(new ProducerRecord<>(topic, "message".getBytes())).get();
consumer.subscribe(List.of(topic));
TestUtils.waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofSeconds(15));
return records.count() == 1;
}, "consumer failed to receive message");
} finally {
producer.close();
assertDoesNotThrow(() -> consumer.close());
}
}
@ClusterTest
public void testAuthorizedProduceAndConsumeWithClassic(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testAuthorizedProduceAndConsume(clusterInstance, GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAuthorizedProduceAndConsumeWithAsync(ClusterInstance clusterInstance) throws ExecutionException, InterruptedException {
testAuthorizedProduceAndConsume(clusterInstance, GroupProtocol.CONSUMER);
}
private void testAuthorizedProduceAndConsume(ClusterInstance clusterInstance, GroupProtocol groupProtocol) throws InterruptedException, ExecutionException {
setup(clusterInstance);
String topic = "topic";
String group = "group";
Set<AccessControlEntry> acls = new HashSet<>();
acls.add(createAcl(AclOperation.CREATE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.WRITE, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
acls.add(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL));
addAndVerifyAcls(
acls,
new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL),
clusterInstance
);
addAndVerifyAcls(
Set.of(createAcl(AclOperation.READ, AclPermissionType.ALLOW, CLIENT_PRINCIPAL)),
new ResourcePattern(ResourceType.GROUP, group, PatternType.LITERAL),
clusterInstance
);
try (Producer<byte[], byte[]> producer = clusterInstance.producer();
Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(
ConsumerConfig.GROUP_ID_CONFIG, group,
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false",
GROUP_PROTOCOL_CONFIG, groupProtocol.name.toLowerCase(Locale.ROOT)))
) {
clusterInstance.createTopic(topic, 1, (short) 1);
producer.send(new ProducerRecord<>(topic, "message".getBytes())).get();
TopicPartition topicPartition = new TopicPartition(topic, 0);
consumer.assign(List.of(topicPartition));
TestUtils.waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofSeconds(15));
return records.count() == 1;
}, "consumer failed to receive message");
}
}
}
| GroupPrincipalBuilder |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestDeleteForecastAction.java | {
"start": 1261,
"end": 2407
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(
new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}/_forecast/"),
new Route(DELETE, BASE_PATH + "anomaly_detectors/{" + ID + "}/_forecast/{" + FORECAST_ID + "}")
);
}
@Override
public String getName() {
return "ml_delete_forecast_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String jobId = restRequest.param(Job.ID.getPreferredName());
String forecastId = restRequest.param(Forecast.FORECAST_ID.getPreferredName(), Metadata.ALL);
final DeleteForecastAction.Request request = new DeleteForecastAction.Request(jobId, forecastId);
request.ackTimeout(getAckTimeout(restRequest));
request.setAllowNoForecasts(restRequest.paramAsBoolean("allow_no_forecasts", request.isAllowNoForecasts()));
return channel -> client.execute(DeleteForecastAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
| RestDeleteForecastAction |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GitHubEndpointBuilderFactory.java | {
"start": 32230,
"end": 35303
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedGitHubEndpointProducerBuilder advanced() {
return (AdvancedGitHubEndpointProducerBuilder) this;
}
/**
* GitHub repository name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: common
*
* @param repoName the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder repoName(String repoName) {
doSetProperty("repoName", repoName);
return this;
}
/**
* GitHub repository owner (organization).
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: common
*
* @param repoOwner the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder repoOwner(String repoOwner) {
doSetProperty("repoOwner", repoOwner);
return this;
}
/**
* To use the given encoding when getting a git commit file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param encoding the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder encoding(String encoding) {
doSetProperty("encoding", encoding);
return this;
}
/**
* To set git commit status state.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param state the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder state(String state) {
doSetProperty("state", state);
return this;
}
/**
* To set git commit status target url.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param targetUrl the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder targetUrl(String targetUrl) {
doSetProperty("targetUrl", targetUrl);
return this;
}
/**
* GitHub OAuth token. Must be configured on either component or
* endpoint.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthToken the value to set
* @return the dsl builder
*/
default GitHubEndpointProducerBuilder oauthToken(String oauthToken) {
doSetProperty("oauthToken", oauthToken);
return this;
}
}
/**
* Advanced builder for endpoint producers for the GitHub component.
*/
public | GitHubEndpointProducerBuilder |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/output/ColorPalette.java | {
"start": 864,
"end": 4601
} | class ____ {
public static final ColorPalette SINGLE_COLOR = new ColorPalette(singleColorPalette(), false);
public static final ColorPalette DEFAULT = new ColorPalette(defaultPalette(), false);
public static final ColorPalette NONE = new ColorPalette(new EnumMap<>(Style.class), true);
private final Map<Style, String> colorsToAnsiSequences;
private final boolean disableAnsiColors;
// https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters
private static Map<Style, String> defaultPalette() {
Map<Style, String> colorsToAnsiSequences = new EnumMap<>(Style.class);
colorsToAnsiSequences.put(Style.NONE, "0");
colorsToAnsiSequences.put(Style.SUCCESSFUL, "32");
colorsToAnsiSequences.put(Style.ABORTED, "33");
colorsToAnsiSequences.put(Style.FAILED, "31");
colorsToAnsiSequences.put(Style.SKIPPED, "35");
colorsToAnsiSequences.put(Style.CONTAINER, "36");
colorsToAnsiSequences.put(Style.TEST, "34");
colorsToAnsiSequences.put(Style.DYNAMIC, "35");
colorsToAnsiSequences.put(Style.REPORTED, "37");
return colorsToAnsiSequences;
}
private static Map<Style, String> singleColorPalette() {
Map<Style, String> colorsToAnsiSequences = new EnumMap<>(Style.class);
colorsToAnsiSequences.put(Style.NONE, "0");
colorsToAnsiSequences.put(Style.SUCCESSFUL, "1");
colorsToAnsiSequences.put(Style.ABORTED, "4");
colorsToAnsiSequences.put(Style.FAILED, "7");
colorsToAnsiSequences.put(Style.SKIPPED, "9");
colorsToAnsiSequences.put(Style.CONTAINER, "1");
colorsToAnsiSequences.put(Style.TEST, "0");
colorsToAnsiSequences.put(Style.DYNAMIC, "0");
colorsToAnsiSequences.put(Style.REPORTED, "2");
return colorsToAnsiSequences;
}
ColorPalette(Map<Style, String> overrides) {
this(defaultPalette(), false);
if (overrides.containsKey(Style.NONE)) {
throw new IllegalArgumentException("Cannot override the standard style 'NONE'");
}
this.colorsToAnsiSequences.putAll(overrides);
}
ColorPalette(Properties properties) {
this(toOverrideMap(properties));
}
ColorPalette(Reader reader) {
this(getProperties(reader));
}
public ColorPalette(Path path) {
this(getProperties(path));
}
private ColorPalette(Map<Style, String> colorsToAnsiSequences, boolean disableAnsiColors) {
this.colorsToAnsiSequences = colorsToAnsiSequences;
this.disableAnsiColors = disableAnsiColors;
}
private static Map<Style, String> toOverrideMap(Properties properties) {
Map<String, String> upperCaseProperties = properties.entrySet().stream().collect(Collectors.toMap(
entry -> ((String) entry.getKey()).toUpperCase(Locale.ROOT), entry -> (String) entry.getValue()));
return Arrays.stream(Style.values()).filter(style -> upperCaseProperties.containsKey(style.name())).collect(
Collectors.toMap(Function.identity(), style -> upperCaseProperties.get(style.name())));
}
private static Properties getProperties(Reader reader) {
Properties properties = new Properties();
try {
properties.load(reader);
}
catch (IOException e) {
throw new IllegalArgumentException("Could not read color palette properties", e);
}
return properties;
}
private static Properties getProperties(Path path) {
try (FileReader fileReader = new FileReader(path.toFile(), StandardCharsets.UTF_8)) {
return getProperties(fileReader);
}
catch (IOException e) {
throw new IllegalArgumentException("Could not open color palette properties file", e);
}
}
public String paint(Style style, String text) {
return this.disableAnsiColors || style == Style.NONE ? text
: getAnsiFormatter(style) + text + getAnsiFormatter(Style.NONE);
}
private String getAnsiFormatter(Style style) {
return "\u001B[%sm".formatted(this.colorsToAnsiSequences.get(style));
}
}
| ColorPalette |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/patterntext/PatternTextIndexFieldData.java | {
"start": 1362,
"end": 1500
} | class ____ implements IndexFieldData<LeafFieldData> {
private final PatternTextFieldType fieldType;
static | PatternTextIndexFieldData |
java | spring-projects__spring-boot | module/spring-boot-pulsar/src/test/java/org/springframework/boot/pulsar/autoconfigure/PulsarPropertiesTests.java | {
"start": 2401,
"end": 2628
} | class ____ {
private PulsarProperties bindProperties(Map<String, String> map) {
return new Binder(new MapConfigurationPropertySource(map)).bind("spring.pulsar", PulsarProperties.class).get();
}
@Nested
| PulsarPropertiesTests |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ResponseBodyEmitterReturnValueHandlerTests.java | {
"start": 14431,
"end": 15149
} | class ____ {
private ResponseBodyEmitter h1() { return null; }
private ResponseEntity<ResponseBodyEmitter> h2() { return null; }
private SseEmitter h3() { return null; }
private ResponseEntity<SseEmitter> h4() { return null; }
private ResponseEntity<String> h5() { return null; }
private ResponseEntity<AtomicReference<String>> h6() { return null; }
private ResponseEntity<?> h7() { return null; }
private Flux<String> h8() { return null; }
private ResponseEntity<Flux<String>> h9() { return null; }
private ResponseEntity<Flux<SimpleBean>> h10() { return null; }
private ResponseEntity<Publisher<?>> h11() { return null; }
}
@SuppressWarnings("unused")
private static | TestController |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/RegularFileTest.java | {
"start": 6108,
"end": 33617
} | class ____ extends TestCase {
private final TestConfiguration configuration;
private final FakeFileTimeSource fileTimeSource = new FakeFileTimeSource();
protected RegularFile file;
public RegularFileTestRunner(String methodName, TestConfiguration configuration) {
super(methodName);
this.configuration = configuration;
}
@Override
public String getName() {
return super.getName() + " [" + configuration + "]";
}
@Override
public void setUp() {
file = configuration.createRegularFile();
}
@Override
public void tearDown() {
configuration.tearDown(file);
}
private void fillContent(String fill) throws IOException {
file.write(0, buffer(fill));
}
public void testEmpty() {
assertEquals(0, file.size());
assertContentEquals("", file);
}
public void testEmpty_read_singleByte() {
assertEquals(-1, file.read(0));
assertEquals(-1, file.read(1));
}
public void testEmpty_read_byteArray() {
byte[] array = new byte[10];
assertEquals(-1, file.read(0, array, 0, array.length));
assertArrayEquals(bytes("0000000000"), array);
}
public void testEmpty_read_singleBuffer() {
ByteBuffer buffer = ByteBuffer.allocate(10);
int read = file.read(0, buffer);
assertEquals(-1, read);
assertEquals(0, buffer.position());
}
public void testEmpty_read_multipleBuffers() {
ByteBuffer buf1 = ByteBuffer.allocate(5);
ByteBuffer buf2 = ByteBuffer.allocate(5);
long read = file.read(0, ImmutableList.of(buf1, buf2));
assertEquals(-1, read);
assertEquals(0, buf1.position());
assertEquals(0, buf2.position());
}
public void testEmpty_write_singleByte_atStart() throws IOException {
file.write(0, (byte) 1);
assertContentEquals("1", file);
}
public void testEmpty_write_byteArray_atStart() throws IOException {
byte[] bytes = bytes("111111");
file.write(0, bytes, 0, bytes.length);
assertContentEquals(bytes, file);
}
public void testEmpty_write_partialByteArray_atStart() throws IOException {
byte[] bytes = bytes("2211111122");
file.write(0, bytes, 2, 6);
assertContentEquals("111111", file);
}
public void testEmpty_write_singleBuffer_atStart() throws IOException {
file.write(0, buffer("111111"));
assertContentEquals("111111", file);
}
public void testEmpty_write_multipleBuffers_atStart() throws IOException {
file.write(0, buffers("111", "111"));
assertContentEquals("111111", file);
}
public void testEmpty_write_singleByte_atNonZeroPosition() throws IOException {
file.write(5, (byte) 1);
assertContentEquals("000001", file);
}
public void testEmpty_write_byteArray_atNonZeroPosition() throws IOException {
byte[] bytes = bytes("111111");
file.write(5, bytes, 0, bytes.length);
assertContentEquals("00000111111", file);
}
public void testEmpty_write_partialByteArray_atNonZeroPosition() throws IOException {
byte[] bytes = bytes("2211111122");
file.write(5, bytes, 2, 6);
assertContentEquals("00000111111", file);
}
public void testEmpty_write_singleBuffer_atNonZeroPosition() throws IOException {
file.write(5, buffer("111"));
assertContentEquals("00000111", file);
}
public void testEmpty_write_multipleBuffers_atNonZeroPosition() throws IOException {
file.write(5, buffers("111", "222"));
assertContentEquals("00000111222", file);
}
public void testEmpty_write_noBytesArray_atStart() throws IOException {
file.write(0, bytes(), 0, 0);
assertContentEquals(bytes(), file);
}
public void testEmpty_write_noBytesArray_atNonZeroPosition() throws IOException {
file.write(5, bytes(), 0, 0);
assertContentEquals(bytes("00000"), file);
}
public void testEmpty_write_noBytesBuffer_atStart() throws IOException {
file.write(0, buffer(""));
assertContentEquals(bytes(), file);
}
public void testEmpty_write_noBytesBuffer_atNonZeroPosition() throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(0);
file.write(5, buffer);
assertContentEquals(bytes("00000"), file);
}
public void testEmpty_write_noBytesBuffers_atStart() throws IOException {
file.write(0, ImmutableList.of(buffer(""), buffer(""), buffer("")));
assertContentEquals(bytes(), file);
}
public void testEmpty_write_noBytesBuffers_atNonZeroPosition() throws IOException {
file.write(5, ImmutableList.of(buffer(""), buffer(""), buffer("")));
assertContentEquals(bytes("00000"), file);
}
public void testEmpty_transferFrom_fromStart_countEqualsSrcSize() throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 0, 6);
assertEquals(6, transferred);
assertContentEquals("111111", file);
}
public void testEmpty_transferFrom_fromStart_countLessThanSrcSize() throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 0, 3);
assertEquals(3, transferred);
assertContentEquals("111", file);
}
public void testEmpty_transferFrom_fromStart_countGreaterThanSrcSize() throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 0, 12);
assertEquals(6, transferred);
assertContentEquals("111111", file);
}
public void testEmpty_transferFrom_positionGreaterThanSize() throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 4, 6);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_positionGreaterThanSize_countEqualsSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 4, 6);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_positionGreaterThanSize_countLessThanSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 4, 3);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_positionGreaterThanSize_countGreaterThanSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("111111")), 4, 12);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_fromStart_noBytes_countEqualsSrcSize() throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("")), 0, 0);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_fromStart_noBytes_countGreaterThanSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("")), 0, 10);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_postionGreaterThanSrcSize_noBytes_countEqualsSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("")), 5, 0);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferFrom_postionGreaterThanSrcSize_noBytes_countGreaterThanSrcSize()
throws IOException {
long transferred = file.transferFrom(new ByteBufferChannel(buffer("")), 5, 10);
assertEquals(0, transferred);
assertContentEquals(bytes(), file);
}
public void testEmpty_transferTo() throws IOException {
ByteBufferChannel channel = new ByteBufferChannel(100);
assertEquals(0, file.transferTo(0, 100, channel));
}
public void testEmpty_copy() throws IOException {
RegularFile copy = file.copyWithoutContent(1, fileTimeSource.now());
assertContentEquals("", copy);
}
public void testEmpty_truncate_toZero() throws IOException {
file.truncate(0);
assertContentEquals("", file);
}
public void testEmpty_truncate_sizeUp() throws IOException {
file.truncate(10);
assertContentEquals("", file);
}
public void testNonEmpty() throws IOException {
fillContent("222222");
assertContentEquals("222222", file);
}
public void testNonEmpty_read_singleByte() throws IOException {
fillContent("123456");
assertEquals(1, file.read(0));
assertEquals(2, file.read(1));
assertEquals(6, file.read(5));
assertEquals(-1, file.read(6));
assertEquals(-1, file.read(100));
}
public void testNonEmpty_read_all_byteArray() throws IOException {
fillContent("222222");
byte[] array = new byte[6];
assertEquals(6, file.read(0, array, 0, array.length));
assertArrayEquals(bytes("222222"), array);
}
public void testNonEmpty_read_all_singleBuffer() throws IOException {
fillContent("222222");
ByteBuffer buffer = ByteBuffer.allocate(6);
assertEquals(6, file.read(0, buffer));
assertBufferEquals("222222", 0, buffer);
}
public void testNonEmpty_read_all_multipleBuffers() throws IOException {
fillContent("223334");
ByteBuffer buf1 = ByteBuffer.allocate(3);
ByteBuffer buf2 = ByteBuffer.allocate(3);
assertEquals(6, file.read(0, ImmutableList.of(buf1, buf2)));
assertBufferEquals("223", 0, buf1);
assertBufferEquals("334", 0, buf2);
}
public void testNonEmpty_read_all_byteArray_largerThanContent() throws IOException {
fillContent("222222");
byte[] array = new byte[10];
assertEquals(6, file.read(0, array, 0, array.length));
assertArrayEquals(bytes("2222220000"), array);
array = new byte[10];
assertEquals(6, file.read(0, array, 2, 6));
assertArrayEquals(bytes("0022222200"), array);
}
public void testNonEmpty_read_all_singleBuffer_largerThanContent() throws IOException {
fillContent("222222");
ByteBuffer buffer = ByteBuffer.allocate(16);
assertBufferEquals("0000000000000000", 16, buffer);
assertEquals(6, file.read(0, buffer));
assertBufferEquals("2222220000000000", 10, buffer);
}
public void testNonEmpty_read_all_multipleBuffers_largerThanContent() throws IOException {
fillContent("222222");
ByteBuffer buf1 = ByteBuffer.allocate(4);
ByteBuffer buf2 = ByteBuffer.allocate(8);
assertEquals(6, file.read(0, ImmutableList.of(buf1, buf2)));
assertBufferEquals("2222", 0, buf1);
assertBufferEquals("22000000", 6, buf2);
}
public void testNonEmpty_read_all_multipleBuffers_extraBuffers() throws IOException {
fillContent("222222");
ByteBuffer buf1 = ByteBuffer.allocate(4);
ByteBuffer buf2 = ByteBuffer.allocate(8);
ByteBuffer buf3 = ByteBuffer.allocate(4);
assertEquals(6, file.read(0, ImmutableList.of(buf1, buf2, buf3)));
assertBufferEquals("2222", 0, buf1);
assertBufferEquals("22000000", 6, buf2);
assertBufferEquals("0000", 4, buf3);
}
public void testNonEmpty_read_partial_fromStart_byteArray() throws IOException {
fillContent("222222");
byte[] array = new byte[3];
assertEquals(3, file.read(0, array, 0, array.length));
assertArrayEquals(bytes("222"), array);
array = new byte[10];
assertEquals(3, file.read(0, array, 1, 3));
assertArrayEquals(bytes("0222000000"), array);
}
public void testNonEmpty_read_partial_fromMiddle_byteArray() throws IOException {
fillContent("22223333");
byte[] array = new byte[3];
assertEquals(3, file.read(3, array, 0, array.length));
assertArrayEquals(bytes("233"), array);
array = new byte[10];
assertEquals(3, file.read(3, array, 1, 3));
assertArrayEquals(bytes("0233000000"), array);
}
public void testNonEmpty_read_partial_fromEnd_byteArray() throws IOException {
fillContent("2222222222");
byte[] array = new byte[3];
assertEquals(2, file.read(8, array, 0, array.length));
assertArrayEquals(bytes("220"), array);
array = new byte[10];
assertEquals(2, file.read(8, array, 1, 3));
assertArrayEquals(bytes("0220000000"), array);
}
public void testNonEmpty_read_partial_fromStart_singleBuffer() throws IOException {
fillContent("222222");
ByteBuffer buffer = ByteBuffer.allocate(3);
assertEquals(3, file.read(0, buffer));
assertBufferEquals("222", 0, buffer);
}
public void testNonEmpty_read_partial_fromMiddle_singleBuffer() throws IOException {
fillContent("22223333");
ByteBuffer buffer = ByteBuffer.allocate(3);
assertEquals(3, file.read(3, buffer));
assertBufferEquals("233", 0, buffer);
}
public void testNonEmpty_read_partial_fromEnd_singleBuffer() throws IOException {
fillContent("2222222222");
ByteBuffer buffer = ByteBuffer.allocate(3);
assertEquals(2, file.read(8, buffer));
assertBufferEquals("220", 1, buffer);
}
public void testNonEmpty_read_partial_fromStart_multipleBuffers() throws IOException {
fillContent("12345678");
ByteBuffer buf1 = ByteBuffer.allocate(2);
ByteBuffer buf2 = ByteBuffer.allocate(2);
assertEquals(4, file.read(0, ImmutableList.of(buf1, buf2)));
assertBufferEquals("12", 0, buf1);
assertBufferEquals("34", 0, buf2);
}
public void testNonEmpty_read_partial_fromMiddle_multipleBuffers() throws IOException {
fillContent("12345678");
ByteBuffer buf1 = ByteBuffer.allocate(2);
ByteBuffer buf2 = ByteBuffer.allocate(2);
assertEquals(4, file.read(3, ImmutableList.of(buf1, buf2)));
assertBufferEquals("45", 0, buf1);
assertBufferEquals("67", 0, buf2);
}
public void testNonEmpty_read_partial_fromEnd_multipleBuffers() throws IOException {
fillContent("123456789");
ByteBuffer buf1 = ByteBuffer.allocate(2);
ByteBuffer buf2 = ByteBuffer.allocate(2);
assertEquals(3, file.read(6, ImmutableList.of(buf1, buf2)));
assertBufferEquals("78", 0, buf1);
assertBufferEquals("90", 1, buf2);
}
public void testNonEmpty_read_fromPastEnd_byteArray() throws IOException {
fillContent("123");
byte[] array = new byte[3];
assertEquals(-1, file.read(3, array, 0, array.length));
assertArrayEquals(bytes("000"), array);
assertEquals(-1, file.read(3, array, 0, 2));
assertArrayEquals(bytes("000"), array);
}
public void testNonEmpty_read_fromPastEnd_singleBuffer() throws IOException {
fillContent("123");
ByteBuffer buffer = ByteBuffer.allocate(3);
assertEquals(-1, file.read(3, buffer));
assertBufferEquals("000", 3, buffer);
}
public void testNonEmpty_read_fromPastEnd_multipleBuffers() throws IOException {
fillContent("123");
ByteBuffer buf1 = ByteBuffer.allocate(2);
ByteBuffer buf2 = ByteBuffer.allocate(2);
assertEquals(-1, file.read(6, ImmutableList.of(buf1, buf2)));
assertBufferEquals("00", 2, buf1);
assertBufferEquals("00", 2, buf2);
}
public void testNonEmpty_write_partial_fromStart_singleByte() throws IOException {
fillContent("222222");
assertEquals(1, file.write(0, (byte) 1));
assertContentEquals("122222", file);
}
public void testNonEmpty_write_partial_fromMiddle_singleByte() throws IOException {
fillContent("222222");
assertEquals(1, file.write(3, (byte) 1));
assertContentEquals("222122", file);
}
public void testNonEmpty_write_partial_fromEnd_singleByte() throws IOException {
fillContent("222222");
assertEquals(1, file.write(6, (byte) 1));
assertContentEquals("2222221", file);
}
public void testNonEmpty_write_partial_fromStart_byteArray() throws IOException {
fillContent("222222");
assertEquals(3, file.write(0, bytes("111"), 0, 3));
assertContentEquals("111222", file);
assertEquals(2, file.write(0, bytes("333333"), 0, 2));
assertContentEquals("331222", file);
}
public void testNonEmpty_write_partial_fromMiddle_byteArray() throws IOException {
fillContent("22222222");
assertEquals(3, file.write(3, buffer("111")));
assertContentEquals("22211122", file);
assertEquals(2, file.write(5, bytes("333333"), 1, 2));
assertContentEquals("22211332", file);
}
public void testNonEmpty_write_partial_fromBeforeEnd_byteArray() throws IOException {
fillContent("22222222");
assertEquals(3, file.write(6, bytes("111"), 0, 3));
assertContentEquals("222222111", file);
assertEquals(2, file.write(8, bytes("333333"), 2, 2));
assertContentEquals("2222221133", file);
}
public void testNonEmpty_write_partial_fromEnd_byteArray() throws IOException {
fillContent("222222");
assertEquals(3, file.write(6, bytes("111"), 0, 3));
assertContentEquals("222222111", file);
assertEquals(2, file.write(9, bytes("333333"), 3, 2));
assertContentEquals("22222211133", file);
}
public void testNonEmpty_write_partial_fromPastEnd_byteArray() throws IOException {
fillContent("222222");
assertEquals(3, file.write(8, bytes("111"), 0, 3));
assertContentEquals("22222200111", file);
assertEquals(2, file.write(13, bytes("333333"), 4, 2));
assertContentEquals("222222001110033", file);
}
public void testNonEmpty_write_partial_fromStart_singleBuffer() throws IOException {
fillContent("222222");
assertEquals(3, file.write(0, buffer("111")));
assertContentEquals("111222", file);
}
public void testNonEmpty_write_partial_fromMiddle_singleBuffer() throws IOException {
fillContent("22222222");
assertEquals(3, file.write(3, buffer("111")));
assertContentEquals("22211122", file);
}
public void testNonEmpty_write_partial_fromBeforeEnd_singleBuffer() throws IOException {
fillContent("22222222");
assertEquals(3, file.write(6, buffer("111")));
assertContentEquals("222222111", file);
}
public void testNonEmpty_write_partial_fromEnd_singleBuffer() throws IOException {
fillContent("222222");
assertEquals(3, file.write(6, buffer("111")));
assertContentEquals("222222111", file);
}
public void testNonEmpty_write_partial_fromPastEnd_singleBuffer() throws IOException {
fillContent("222222");
assertEquals(3, file.write(8, buffer("111")));
assertContentEquals("22222200111", file);
}
public void testNonEmpty_write_partial_fromStart_multipleBuffers() throws IOException {
fillContent("222222");
assertEquals(4, file.write(0, buffers("11", "33")));
assertContentEquals("113322", file);
}
public void testNonEmpty_write_partial_fromMiddle_multipleBuffers() throws IOException {
fillContent("22222222");
assertEquals(4, file.write(2, buffers("11", "33")));
assertContentEquals("22113322", file);
}
public void testNonEmpty_write_partial_fromBeforeEnd_multipleBuffers() throws IOException {
fillContent("22222222");
assertEquals(6, file.write(6, buffers("111", "333")));
assertContentEquals("222222111333", file);
}
public void testNonEmpty_write_partial_fromEnd_multipleBuffers() throws IOException {
fillContent("222222");
assertEquals(6, file.write(6, buffers("111", "333")));
assertContentEquals("222222111333", file);
}
public void testNonEmpty_write_partial_fromPastEnd_multipleBuffers() throws IOException {
fillContent("222222");
assertEquals(4, file.write(10, buffers("11", "33")));
assertContentEquals("22222200001133", file);
}
public void testNonEmpty_write_overwrite_sameLength() throws IOException {
fillContent("2222");
assertEquals(4, file.write(0, buffer("1234")));
assertContentEquals("1234", file);
}
public void testNonEmpty_write_overwrite_greaterLength() throws IOException {
fillContent("2222");
assertEquals(8, file.write(0, buffer("12345678")));
assertContentEquals("12345678", file);
}
public void testNonEmpty_transferTo_fromStart_countEqualsSize() throws IOException {
fillContent("123456");
ByteBufferChannel channel = new ByteBufferChannel(10);
assertEquals(6, file.transferTo(0, 6, channel));
assertBufferEquals("1234560000", 4, channel.buffer());
}
public void testNonEmpty_transferTo_fromStart_countLessThanSize() throws IOException {
fillContent("123456");
ByteBufferChannel channel = new ByteBufferChannel(10);
assertEquals(4, file.transferTo(0, 4, channel));
assertBufferEquals("1234000000", 6, channel.buffer());
}
public void testNonEmpty_transferTo_fromMiddle_countEqualsSize() throws IOException {
fillContent("123456");
ByteBufferChannel channel = new ByteBufferChannel(10);
assertEquals(2, file.transferTo(4, 6, channel));
assertBufferEquals("5600000000", 8, channel.buffer());
}
public void testNonEmpty_transferTo_fromMiddle_countLessThanSize() throws IOException {
fillContent("12345678");
ByteBufferChannel channel = new ByteBufferChannel(10);
assertEquals(4, file.transferTo(3, 4, channel));
assertBufferEquals("4567000000", 6, channel.buffer());
}
public void testNonEmpty_transferFrom_toStart_countEqualsSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("11111"));
assertEquals(5, file.transferFrom(channel, 0, 5));
assertContentEquals("11111222", file);
}
public void testNonEmpty_transferFrom_toStart_countLessThanSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("11111"));
assertEquals(3, file.transferFrom(channel, 0, 3));
assertContentEquals("11122222", file);
}
public void testNonEmpty_transferFrom_toStart_countGreaterThanSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("11111"));
assertEquals(5, file.transferFrom(channel, 0, 10));
assertContentEquals("11111222", file);
}
public void testNonEmpty_transferFrom_toMiddle_countEqualsSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("1111"));
assertEquals(4, file.transferFrom(channel, 2, 4));
assertContentEquals("22111122", file);
}
public void testNonEmpty_transferFrom_toMiddle_countLessThanSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("11111"));
assertEquals(3, file.transferFrom(channel, 2, 3));
assertContentEquals("22111222", file);
}
public void testNonEmpty_transferFrom_toMiddle_countGreaterThanSrcSize() throws IOException {
fillContent("22222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("1111"));
assertEquals(4, file.transferFrom(channel, 2, 100));
assertContentEquals("22111122", file);
}
public void testNonEmpty_transferFrom_toMiddle_transferGoesBeyondContentSize()
throws IOException {
fillContent("222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("111111"));
assertEquals(6, file.transferFrom(channel, 4, 6));
assertContentEquals("2222111111", file);
}
public void testNonEmpty_transferFrom_toEnd() throws IOException {
fillContent("222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("111111"));
assertEquals(6, file.transferFrom(channel, 6, 6));
assertContentEquals("222222111111", file);
}
public void testNonEmpty_transferFrom_positionGreaterThanSize() throws IOException {
fillContent("222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("111111"));
assertEquals(0, file.transferFrom(channel, 10, 6));
assertContentEquals("222222", file);
}
public void testNonEmpty_transferFrom_hugeOverestimateCount() throws IOException {
fillContent("222222");
ByteBufferChannel channel = new ByteBufferChannel(buffer("111111"));
assertEquals(6, file.transferFrom(channel, 6, 1024 * 1024 * 10));
assertContentEquals("222222111111", file);
}
public void testNonEmpty_copy() throws IOException {
fillContent("123456");
RegularFile copy = file.copyWithoutContent(1, fileTimeSource.now());
file.copyContentTo(copy);
assertContentEquals("123456", copy);
}
public void testNonEmpty_copy_multipleTimes() throws IOException {
fillContent("123456");
RegularFile copy = file.copyWithoutContent(1, fileTimeSource.now());
file.copyContentTo(copy);
RegularFile copy2 = copy.copyWithoutContent(2, fileTimeSource.now());
copy.copyContentTo(copy2);
assertContentEquals("123456", copy);
}
public void testNonEmpty_truncate_toZero() throws IOException {
fillContent("123456");
file.truncate(0);
assertContentEquals("", file);
}
public void testNonEmpty_truncate_partial() throws IOException {
fillContent("12345678");
file.truncate(5);
assertContentEquals("12345", file);
}
public void testNonEmpty_truncate_sizeUp() throws IOException {
fillContent("123456");
file.truncate(12);
assertContentEquals("123456", file);
}
public void testDeletedStoreRemainsUsableWhileOpen() throws IOException {
byte[] bytes = bytes("1234567890");
file.write(0, bytes, 0, bytes.length);
file.opened();
file.opened();
file.deleted();
assertContentEquals(bytes, file);
byte[] moreBytes = bytes("1234");
file.write(bytes.length, moreBytes, 0, 4);
byte[] totalBytes = concat(bytes, bytes("1234"));
assertContentEquals(totalBytes, file);
file.closed();
assertContentEquals(totalBytes, file);
file.closed();
// don't check anything else; no guarantee of what if anything will happen once the file is
// deleted and completely closed
}
private static void assertBufferEquals(String expected, ByteBuffer actual) {
assertEquals(expected.length(), actual.capacity());
assertArrayEquals(bytes(expected), actual.array());
}
private static void assertBufferEquals(String expected, int remaining, ByteBuffer actual) {
assertBufferEquals(expected, actual);
assertEquals(remaining, actual.remaining());
}
private static void assertContentEquals(String expected, RegularFile actual) {
assertContentEquals(bytes(expected), actual);
}
protected static void assertContentEquals(byte[] expected, RegularFile actual) {
assertEquals(expected.length, actual.sizeWithoutLocking());
byte[] actualBytes = new byte[(int) actual.sizeWithoutLocking()];
int unused = actual.read(0, ByteBuffer.wrap(actualBytes));
assertArrayEquals(expected, actualBytes);
}
}
}
| RegularFileTestRunner |
java | elastic__elasticsearch | libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java | {
"start": 978,
"end": 1177
} | class ____ extends BigCountTests {
@Override
public TDigest createDigest(int compression) {
return TDigest.createAvlTreeDigest(arrays(), compression);
}
}
| BigCountTestsTreeDigestTests |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/core/GrantedAuthorityDefaults.java | {
"start": 860,
"end": 1209
} | class ____ {
private final String rolePrefix;
public GrantedAuthorityDefaults(String rolePrefix) {
this.rolePrefix = rolePrefix;
}
/**
* The default prefix used with role based authorization. Default is "ROLE_".
* @return the default role prefix
*/
public String getRolePrefix() {
return this.rolePrefix;
}
}
| GrantedAuthorityDefaults |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/message/AbstractMessageFactory.java | {
"start": 1458,
"end": 5574
} | class ____ implements MessageFactory2, Serializable {
private static final long serialVersionUID = -1307891137684031187L;
@Override
public Message newMessage(final CharSequence message) {
return new SimpleMessage(message);
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.message.MessageFactory#newMessage(java.lang.Object)
*/
@Override
public Message newMessage(final Object message) {
return new ObjectMessage(message);
}
/*
* (non-Javadoc)
*
* @see org.apache.logging.log4j.message.MessageFactory#newMessage(java.lang.String)
*/
@Override
public Message newMessage(final String message) {
return new SimpleMessage(message);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0) {
return newMessage(message, new Object[] {p0});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0, final Object p1) {
return newMessage(message, new Object[] {p0, p1});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0, final Object p1, final Object p2) {
return newMessage(message, new Object[] {p0, p1, p2});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message, final Object p0, final Object p1, final Object p2, final Object p3) {
return newMessage(message, new Object[] {p0, p1, p2, p3});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4, p5});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4, p5, p6});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4, p5, p6, p7});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4, p5, p6, p7, p8});
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8,
final Object p9) {
return newMessage(message, new Object[] {p0, p1, p2, p3, p4, p5, p6, p7, p8, p9});
}
}
| AbstractMessageFactory |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.