language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/QueryParametersValidationTest.java
{ "start": 2991, "end": 4329 }
class ____ implements UserType<Boolean> { @Override public int getSqlType() { return Types.CHAR; } @Override public Class returnedClass() { return boolean.class; } @Override public boolean equals(Boolean x, Boolean y) throws HibernateException { return Objects.equals( x, y); } @Override public int hashCode(Boolean x) throws HibernateException { return Objects.hashCode(x); } @Override public Boolean nullSafeGet(ResultSet rs, int position, WrapperOptions options) throws SQLException { return "Y".equals( rs.getString( position ) ); } @Override public void nullSafeSet( PreparedStatement st, Boolean value, int index, WrapperOptions options) throws SQLException { st.setString(index, value ? "Y" : "N"); } @Override public Boolean deepCopy(Boolean value) throws HibernateException { return value; } @Override public boolean isMutable() { return false; } @Override public Serializable disassemble(Boolean value) throws HibernateException { return null; } @Override public Boolean assemble(Serializable cached, Object owner) throws HibernateException { return null; } @Override public Boolean replace(Boolean original, Boolean target, Object owner) throws HibernateException { return null; } } }
BooleanUserType
java
spring-projects__spring-security
acl/src/test/java/org/springframework/security/acls/jdbc/JdbcMutableAclServiceTestsWithAclClassId.java
{ "start": 1427, "end": 2831 }
class ____ extends JdbcMutableAclServiceTests { private static final String TARGET_CLASS_WITH_UUID = TargetObjectWithUUID.class.getName(); private final ObjectIdentity topParentOid = new ObjectIdentityImpl(TARGET_CLASS_WITH_UUID, UUID.randomUUID()); private final ObjectIdentity middleParentOid = new ObjectIdentityImpl(TARGET_CLASS_WITH_UUID, UUID.randomUUID()); private final ObjectIdentity childOid = new ObjectIdentityImpl(TARGET_CLASS_WITH_UUID, UUID.randomUUID()); @Override protected String getSqlClassPathResource() { return "createAclSchemaWithAclClassIdType.sql"; } @Override protected ObjectIdentity getTopParentOid() { return this.topParentOid; } @Override protected ObjectIdentity getMiddleParentOid() { return this.middleParentOid; } @Override protected ObjectIdentity getChildOid() { return this.childOid; } @Override protected String getTargetClass() { return TARGET_CLASS_WITH_UUID; } @Test @Transactional public void identityWithUuidIdIsSupportedByCreateAcl() { SecurityContextHolder.getContext().setAuthentication(getAuth()); UUID id = UUID.randomUUID(); ObjectIdentity oid = new ObjectIdentityImpl(TARGET_CLASS_WITH_UUID, id); getJdbcMutableAclService().createAcl(oid); assertThat(getJdbcMutableAclService().readAclById(new ObjectIdentityImpl(TARGET_CLASS_WITH_UUID, id))) .isNotNull(); } }
JdbcMutableAclServiceTestsWithAclClassId
java
grpc__grpc-java
s2a/src/main/java/io/grpc/s2a/internal/channel/S2AHandshakerServiceChannel.java
{ "start": 1297, "end": 1903 }
interface ____ a {@code Resource<Channel>}. A {@code * Resource<Channel>} is a factory for creating gRPC channels to the S2A server at a given address, * and a channel must be returned to the {@code Resource<Channel>} when it is no longer needed. * * <p>Typical usage pattern is below: * * <pre>{@code * Resource<Channel> resource = S2AHandshakerServiceChannel.getChannelResource("localhost:1234", * creds); * Channel channel = resource.create(); * // Send an RPC over the channel to the S2A server running at localhost:1234. * resource.close(channel); * }</pre> */ @ThreadSafe public final
called
java
reactor__reactor-core
reactor-test/src/test/java/reactor/test/scheduler/VirtualTimeSchedulerTests.java
{ "start": 1419, "end": 15672 }
class ____ { @Test public void cancelledAndEmptyConstantsAreNotSame() { assertThat(VirtualTimeScheduler.CANCELLED).isNotSameAs(VirtualTimeScheduler.EMPTY); assertThat(VirtualTimeScheduler.CANCELLED.isDisposed()).isTrue(); assertThat(VirtualTimeScheduler.EMPTY.isDisposed()).isFalse(); } @Test public void allEnabled() { assertThat(Schedulers.newParallel("")).isNotInstanceOf(VirtualTimeScheduler.class); assertThat(Schedulers.newBoundedElastic(4, Integer.MAX_VALUE, "")).isNotInstanceOf(VirtualTimeScheduler.class); assertThat(Schedulers.newSingle("")).isNotInstanceOf(VirtualTimeScheduler.class); VirtualTimeScheduler.getOrSet(); assertThat(Schedulers.newParallel("")).isInstanceOf(VirtualTimeScheduler.class); assertThat(Schedulers.newBoundedElastic(4, Integer.MAX_VALUE, "")).isInstanceOf(VirtualTimeScheduler.class); assertThat(Schedulers.newSingle("")).isInstanceOf(VirtualTimeScheduler.class); VirtualTimeScheduler t = VirtualTimeScheduler.get(); assertThat(Schedulers.newParallel("")).isSameAs(t); assertThat(Schedulers.newBoundedElastic(5, Integer.MAX_VALUE, "")).isSameAs(t); //same even though different parameter assertThat(Schedulers.newSingle("")).isSameAs(t); } @Test public void enableProvidedAllSchedulerIdempotent() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); VirtualTimeScheduler.getOrSet(vts); assertThat(vts).isSameAs(uncache(Schedulers.single())); assertThat(vts.shutdown).isFalse(); VirtualTimeScheduler.getOrSet(vts); assertThat(vts).isSameAs(uncache(Schedulers.single())); assertThat(vts.shutdown).isFalse(); } @Test public void enableTwoSimilarSchedulersUsesFirst() { VirtualTimeScheduler vts1 = VirtualTimeScheduler.create(); VirtualTimeScheduler vts2 = VirtualTimeScheduler.create(); VirtualTimeScheduler firstEnableResult = VirtualTimeScheduler.getOrSet(vts1); VirtualTimeScheduler secondEnableResult = VirtualTimeScheduler.getOrSet(vts2); assertThat(vts1).isSameAs(firstEnableResult); assertThat(vts1).isSameAs(secondEnableResult); assertThat(vts1).isSameAs(uncache(Schedulers.single())); assertThat(vts1.shutdown).isFalse(); } @Test public void disposedSchedulerIsStillCleanedUp() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); vts.dispose(); assertThat(VirtualTimeScheduler.isFactoryEnabled()).isFalse(); StepVerifier.withVirtualTime(() -> Mono.just("foo"), () -> vts, Long.MAX_VALUE) .then(() -> assertThat(VirtualTimeScheduler.isFactoryEnabled()).isTrue()) .then(() -> assertThat(VirtualTimeScheduler.get()).isSameAs(vts)) .expectNext("foo") .verifyComplete(); assertThat(VirtualTimeScheduler.isFactoryEnabled()).isFalse(); StepVerifier.withVirtualTime(() -> Mono.just("foo")) .then(() -> assertThat(VirtualTimeScheduler.isFactoryEnabled()).isTrue()) .then(() -> assertThat(VirtualTimeScheduler.get()).isNotSameAs(vts)) .expectNext("foo") .verifyComplete(); assertThat(VirtualTimeScheduler.isFactoryEnabled()).isFalse(); } @Test public void captureNowInScheduledTask() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(true); List<Long> singleExecutionsTimestamps = new ArrayList<>(); List<Long> periodicExecutionTimestamps = new ArrayList<>(); try { vts.advanceTimeBy(Duration.ofMillis(100)); vts.schedule(() -> singleExecutionsTimestamps.add(vts.now(TimeUnit.MILLISECONDS)), 100, TimeUnit.MILLISECONDS); vts.schedule(() -> singleExecutionsTimestamps.add(vts.now(TimeUnit.MILLISECONDS)), 456, TimeUnit.MILLISECONDS); vts.schedulePeriodically(() -> periodicExecutionTimestamps.add(vts.now(TimeUnit.MILLISECONDS)), 0, 100, TimeUnit.MILLISECONDS); vts.advanceTimeBy(Duration.ofMillis(1000)); assertThat(singleExecutionsTimestamps) .as("single executions") .containsExactly(100L, 456L + 100L); assertThat(periodicExecutionTimestamps) .as("periodic executions") .containsExactly(100L, 200L, 300L, 400L, 500L, 600L, 700L, 800L, 900L, 1000L, 1100L); } finally { vts.dispose(); } } @Test public void nestedSchedule() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); List<Long> singleExecutionsTimestamps = new ArrayList<>(); try { vts.schedule(() -> vts.schedule( () -> singleExecutionsTimestamps.add(vts.now(TimeUnit.MILLISECONDS)), 100, TimeUnit.MILLISECONDS ), 300, TimeUnit.MILLISECONDS); vts.advanceTimeBy(Duration.ofMillis(1000)); assertThat(singleExecutionsTimestamps) .as("single executions") .containsExactly(400L); } finally { vts.dispose(); } } @Test public void racingAdvanceTimeOnEmptyQueue() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); try { for (int i = 1; i <= 100; i++) { RaceTestUtils.race( () -> vts.advanceTimeBy(Duration.ofSeconds(10)), () -> vts.advanceTimeBy(Duration.ofSeconds(3))); assertThat(vts.now(TimeUnit.MILLISECONDS)) .as("iteration " + i) .isEqualTo(13_000 * i); assertThat(vts.nanoTime) .as("now() == nanoTime in iteration " + i) .isEqualTo(vts.now(TimeUnit.NANOSECONDS)); } } finally { vts.dispose(); } } @Test public void racingAdvanceTimeOnFullQueue() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); try { vts.schedule(() -> {}, 10, TimeUnit.HOURS); for (int i = 1; i <= 100; i++) { reactor.test.util.RaceTestUtils.race( () -> vts.advanceTimeBy(Duration.ofSeconds(10)), () -> vts.advanceTimeBy(Duration.ofSeconds(3))); assertThat(vts.now(TimeUnit.MILLISECONDS)) .as("now() iteration " + i) .isEqualTo(13_000 * i); assertThat(vts.nanoTime) .as("now() == nanoTime in iteration " + i) .isEqualTo(vts.now(TimeUnit.NANOSECONDS)); } } finally { vts.dispose(); } } @Test public void racingAdvanceTimeOnVaryingQueue() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(true); AtomicInteger count = new AtomicInteger(); try { for (int i = 1; i <= 100; i++) { RaceTestUtils.race( () -> vts.advanceTimeBy(Duration.ofSeconds(10)), () -> vts.advanceTimeBy(Duration.ofSeconds(3))); if (i % 10 == 0) { vts.schedule(count::incrementAndGet, 14, TimeUnit.SECONDS); } assertThat(vts.now(TimeUnit.MILLISECONDS)) .as("now() iteration " + i) .isEqualTo(13_000 * i); } assertThat(count).as("scheduled task run").hasValue(10); assertThat(vts.nanoTime) .as("now() == nanoTime") .isEqualTo(vts.now(TimeUnit.NANOSECONDS)); assertThat(vts.deferredNanoTime).as("cleared deferredNanoTime").isZero(); } finally { vts.dispose(); } } @Test public void scheduledTaskCount() { VirtualTimeScheduler vts = VirtualTimeScheduler.create(); assertThat(vts.getScheduledTaskCount()).as("initial value").isEqualTo(0); vts.schedule(() -> { }); assertThat(vts.getScheduledTaskCount()).as("a task scheduled").isEqualTo(1); } @Test public void scheduledTaskCountWithInitialDelay() { // schedule with delay VirtualTimeScheduler vts = VirtualTimeScheduler.create(); vts.schedule(() -> { }, 10, TimeUnit.DAYS); assertThat(vts.getScheduledTaskCount()).as("scheduled in future").isEqualTo(1); vts.advanceTimeBy(Duration.ofDays(11)); assertThat(vts.getScheduledTaskCount()).as("time advanced").isEqualTo(1); } @Test public void scheduledTaskCountWithNoInitialDelay() { // schedulePeriodically with no initial delay VirtualTimeScheduler vts = VirtualTimeScheduler.create(); vts.schedulePeriodically(() -> { }, 0, 5, TimeUnit.DAYS); assertThat(vts.getScheduledTaskCount()) .as("initial delay task performed and scheduled for the first periodical task") .isEqualTo(2); vts.advanceTimeBy(Duration.ofDays(5)); assertThat(vts.getScheduledTaskCount()) .as("scheduled for the second periodical task") .isEqualTo(3); } @Test public void scheduledTaskCountBySchedulePeriodically() { // schedulePeriodically with initial delay VirtualTimeScheduler vts = VirtualTimeScheduler.create(); vts.schedulePeriodically(() -> { }, 10, 5, TimeUnit.DAYS); assertThat(vts.getScheduledTaskCount()) .as("scheduled for initial delay task") .isEqualTo(1); vts.advanceTimeBy(Duration.ofDays(1)); assertThat(vts.getScheduledTaskCount()) .as("Still on initial delay") .isEqualTo(1); vts.advanceTimeBy(Duration.ofDays(10)); assertThat(vts.getScheduledTaskCount()) .as("first periodical task scheduled after initial one") .isEqualTo(2); vts.advanceTimeBy(Duration.ofDays(5)); assertThat(vts.getScheduledTaskCount()) .as("second periodical task scheduled") .isEqualTo(3); } @Test public void getOrSetWithDefer() { AtomicReference<VirtualTimeScheduler> vts1 = new AtomicReference<>(); AtomicReference<VirtualTimeScheduler> vts2 = new AtomicReference<>(); RaceTestUtils.race( () -> vts1.set(VirtualTimeScheduler.getOrSet(true)), () -> vts2.set(VirtualTimeScheduler.getOrSet(true)) ); assertThat(vts1.get().defer).isTrue(); assertThat(vts2.get()).isSameAs(vts1.get()); } @Test public void resetRestoresSnapshotOfSchedulers() { AtomicInteger singleCreated = new AtomicInteger(); Schedulers.Factory customFactory = new Schedulers.Factory() { @Override public Scheduler newSingle(ThreadFactory threadFactory) { singleCreated.incrementAndGet(); return Schedulers.Factory.super.newSingle(threadFactory); } }; Schedulers.setFactory(customFactory); Scheduler originalScheduler = Schedulers.single(); assertThat(singleCreated).as("created custom pre VTS").hasValue(1); //replace custom factory with VTS factory VirtualTimeScheduler.getOrSet(); // trigger cache of VTS in CACHED_SINGLE Scheduler vtsScheduler = Schedulers.single(); assertThat(singleCreated).as("after VTS setup").hasValue(1); assertThat(vtsScheduler).as("shared scheduler replaced").isNotSameAs(originalScheduler); assertThat(originalScheduler.isDisposed()).as("original isDisposed").isFalse(); //attempt to restore the original schedulers and factory VirtualTimeScheduler.reset(); Scheduler postResetSharedScheduler = Schedulers.single(); Scheduler postResetNewScheduler = Schedulers.newSingle("ignored"); postResetNewScheduler.dispose(); assertThat(singleCreated).as("total custom created").hasValue(2); assertThat(postResetSharedScheduler).as("shared restored").isSameAs(originalScheduler); assertThat(postResetNewScheduler).as("new from restoredgt").isNotInstanceOf(VirtualTimeScheduler.class); } @Test public void doubleCreationOfVtsCorrectlyResetsOriginalCustomFactory() { AtomicInteger singleCreated = new AtomicInteger(); Schedulers.Factory customFactory = new Schedulers.Factory() { @Override public Scheduler newSingle(ThreadFactory threadFactory) { singleCreated.incrementAndGet(); return Schedulers.Factory.super.newSingle(threadFactory); } }; Schedulers.setFactory(customFactory); Scheduler originalScheduler = Schedulers.single(); assertThat(singleCreated).as("created custom pre VTS").hasValue(1); //replace custom factory with VTS factory VirtualTimeScheduler.getOrSet(); // trigger cache of VTS in CACHED_SINGLE Scheduler vtsScheduler = Schedulers.single(); assertThat(singleCreated).as("after 1st VTS setup").hasValue(1); assertThat(vtsScheduler).as("shared scheduler 1st replaced").isNotSameAs(originalScheduler); assertThat(originalScheduler.isDisposed()).as("original isDisposed").isFalse(); //force replacing VTS factory by another VTS factory VirtualTimeScheduler.set(VirtualTimeScheduler.create()); // trigger cache of VTS in CACHED_SINGLE Scheduler vtsScheduler2 = Schedulers.single(); assertThat(singleCreated).as("after 2nd VTS setup").hasValue(1); assertThat(vtsScheduler2).as("shared scheduler 2nd replaced") .isNotSameAs(originalScheduler) .isNotSameAs(vtsScheduler); assertThat(originalScheduler.isDisposed()).as("original isDisposed").isFalse(); //attempt to restore the original schedulers and factory VirtualTimeScheduler.reset(); Scheduler postResetSharedScheduler = Schedulers.single(); Scheduler postResetNewScheduler = Schedulers.newSingle("ignored"); postResetNewScheduler.dispose(); assertThat(singleCreated).as("total custom created").hasValue(2); assertThat(postResetSharedScheduler).as("shared restored").isSameAs(originalScheduler); assertThat(postResetNewScheduler).as("new from restoredgt").isNotInstanceOf(VirtualTimeScheduler.class); } @Test void scheduledTaskShouldBeDisposedAfterExecution() { VirtualTimeScheduler vtScheduler = VirtualTimeScheduler.create(); Disposable immediateDisposable = vtScheduler.schedule(() -> { }); assertThat(immediateDisposable.isDisposed()).isTrue(); Duration scheduleDelayDuration = Duration.ofSeconds(10); Disposable scheduledDisposable = vtScheduler.schedule(() -> { }, scheduleDelayDuration.getSeconds(), TimeUnit.SECONDS); assertThat(scheduledDisposable.isDisposed()).isFalse(); vtScheduler.advanceTimeBy(scheduleDelayDuration); assertThat(scheduledDisposable.isDisposed()).isTrue(); } @Test void scheduledTaskShouldNotBeExecutedIfDisposed() { VirtualTimeScheduler vtScheduler = VirtualTimeScheduler.create(); Duration scheduleDelayDuration = Duration.ofSeconds(10); Disposable scheduledDisposable = vtScheduler.schedule(() -> Assertions.fail( "This task should not be executed, because it was disposed of" + " beforehand."), scheduleDelayDuration.getSeconds(), TimeUnit.SECONDS); scheduledDisposable.dispose(); assertThat(scheduledDisposable.isDisposed()).isTrue(); vtScheduler.advanceTimeBy(scheduleDelayDuration); assertThat(scheduledDisposable.isDisposed()).isTrue(); } @SuppressWarnings("unchecked") private static Scheduler uncache(Scheduler potentialCached) { if (potentialCached instanceof Supplier) { return ((Supplier<Scheduler>) potentialCached).get(); } return potentialCached; } @AfterEach public void cleanup() { VirtualTimeScheduler.reset(); } }
VirtualTimeSchedulerTests
java
google__gson
test-shrinker/src/main/java/com/example/ClassWithJsonAdapterAnnotation.java
{ "start": 3898, "end": 4081 }
class ____ to work around incorrect delegation behavior for JsonSerializer // and JsonDeserializer used with @JsonAdapter, see https://github.com/google/gson/issues/1783 static
mainly
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/Canceling.java
{ "start": 1447, "end": 2788 }
class ____ extends StateWithExecutionGraph { private final Context context; Canceling( Context context, ExecutionGraph executionGraph, ExecutionGraphHandler executionGraphHandler, OperatorCoordinatorHandler operatorCoordinatorHandler, Logger logger, ClassLoader userCodeClassLoader, List<ExceptionHistoryEntry> failureCollection) { super( context, executionGraph, executionGraphHandler, operatorCoordinatorHandler, logger, userCodeClassLoader, failureCollection); this.context = context; getExecutionGraph().cancel(); } @Override public JobStatus getJobStatus() { return JobStatus.CANCELLING; } @Override public void cancel() { // we are already in the state canceling } @Override void onFailure(Throwable failure, CompletableFuture<Map<String, String>> failureLabels) { // Execution graph is already cancelling, so there is nothing more we can do. } @Override void onGloballyTerminalState(JobStatus globallyTerminalState) { context.goToFinished(ArchivedExecutionGraph.createFrom(getExecutionGraph())); } static
Canceling
java
apache__flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/data/util/DataFormatConverters.java
{ "start": 41509, "end": 42326 }
class ____ extends DataFormatConverter<ArrayData, float[]> { private static final long serialVersionUID = -3237695040861141459L; public static final PrimitiveFloatArrayConverter INSTANCE = new PrimitiveFloatArrayConverter(); private PrimitiveFloatArrayConverter() {} @Override ArrayData toInternalImpl(float[] value) { return new GenericArrayData(value); } @Override float[] toExternalImpl(ArrayData value) { return value.toFloatArray(); } @Override float[] toExternalImpl(RowData row, int column) { return toExternalImpl(row.getArray(column)); } } /** Converter for primitive double array. */ public static final
PrimitiveFloatArrayConverter
java
quarkusio__quarkus
extensions/kubernetes/spi/src/main/java/io/quarkus/kubernetes/spi/KubernetesOutputDirectoryBuildItem.java
{ "start": 227, "end": 539 }
class ____ extends SimpleBuildItem { private final Path outputDirectory; public KubernetesOutputDirectoryBuildItem(Path outputDirectory) { this.outputDirectory = outputDirectory; } public Path getOutputDirectory() { return outputDirectory; } }
KubernetesOutputDirectoryBuildItem
java
quarkusio__quarkus
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/proxy/fakedns/FakeDNSServer.java
{ "start": 17364, "end": 18200 }
class ____ implements ProtocolCodecFactory { @Override public ProtocolEncoder getEncoder(IoSession session) throws Exception { return new DnsUdpEncoder() { @Override public void encode(IoSession session, Object message, ProtocolEncoderOutput out) { IoBuffer buf = IoBuffer.allocate(1024); FakeDNSServer.this.encode((DnsMessage) message, buf); buf.flip(); out.write(buf); } }; } @Override public ProtocolDecoder getDecoder(IoSession session) throws Exception { return new DnsUdpDecoder(); } } /** * ProtocolCodecFactory which allows to test AAAA resolution */ private final
TestDnsProtocolUdpCodecFactory
java
apache__flink
flink-core/src/main/java/org/apache/flink/core/io/IOReadableWritable.java
{ "start": 1376, "end": 1447 }
class ____ a default * (zero-argument) constructor! */ @Public public
has
java
elastic__elasticsearch
server/src/internalClusterTest/java/org/elasticsearch/index/IndexingPressureIT.java
{ "start": 35860, "end": 36681 }
class ____ extends Plugin { public static AtomicReference<CheckedBiConsumer<ShardId, Engine.Index, Exception>> PRE_INDEX_CHECK_REF = new AtomicReference<>( ((shardId, index) -> {}) ); public PreIndexListenerInstallerPlugin() {} @Override public void onIndexModule(IndexModule indexModule) { indexModule.addIndexOperationListener(new InjectablePreIndexOperationListener(PRE_INDEX_CHECK_REF)); } public static void installPreIndexListener(CheckedBiConsumer<ShardId, Engine.Index, Exception> preIndexCheck) { PRE_INDEX_CHECK_REF.set(preIndexCheck); } public static void resetPreIndexListener() { PRE_INDEX_CHECK_REF.set((shardId, index) -> {}); } } static
PreIndexListenerInstallerPlugin
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/RewriteableAware.java
{ "start": 499, "end": 1023 }
interface ____ extends TranslationAware { /** * @return The current active query builder. */ QueryBuilder queryBuilder(); /** * Replaces the current query builder with a rewritten iteration. This happens multiple times through the rewrite phase until * the final iteration of the query builder is stored. * @param queryBuilder QueryBuilder * @return Expression defining the active QueryBuilder */ Expression replaceQueryBuilder(QueryBuilder queryBuilder); }
RewriteableAware
java
apache__dubbo
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/reference/javaconfig/JavaConfigReferenceBeanTest.java
{ "start": 18639, "end": 18816 }
class ____ { @DubboReference(group = "${myapp.group}") private HelloService helloService; } @Configuration public static
AnnotationAtFieldConfiguration
java
apache__dubbo
dubbo-remoting/dubbo-remoting-netty/src/test/java/org/apache/dubbo/remoting/transport/netty/ThreadNameTest.java
{ "start": 1605, "end": 3922 }
class ____ { private static final Logger logger = LoggerFactory.getLogger(ThreadNameTest.class); private NettyServer server; private NettyClient client; private URL serverURL; private URL clientURL; private ThreadNameVerifyHandler serverHandler; private ThreadNameVerifyHandler clientHandler; private static String serverRegex = "DubboServerHandler\\-localhost:(\\d+)\\-thread\\-(\\d+)"; private static String clientRegex = "DubboClientHandler\\-thread\\-(\\d+)"; private final CountDownLatch serverLatch = new CountDownLatch(1); private final CountDownLatch clientLatch = new CountDownLatch(1); @BeforeEach public void before() throws Exception { int port = NetUtils.getAvailablePort(20880 + new Random().nextInt(10000)); serverURL = URL.valueOf("telnet://localhost?side=provider&codec=telnet").setPort(port); ApplicationModel applicationModel = ApplicationModel.defaultModel(); ApplicationConfig applicationConfig = new ApplicationConfig("provider-app"); applicationConfig.setExecutorManagementMode(EXECUTOR_MANAGEMENT_MODE_DEFAULT); applicationModel.getApplicationConfigManager().setApplication(applicationConfig); serverURL = serverURL.setScopeModel(applicationModel); clientURL = URL.valueOf("telnet://localhost?side=consumer&codec=telnet").setPort(port); clientURL = clientURL.setScopeModel(applicationModel); serverHandler = new ThreadNameVerifyHandler(serverRegex, false, serverLatch); clientHandler = new ThreadNameVerifyHandler(clientRegex, true, clientLatch); server = new NettyServer(serverURL, serverHandler); client = new NettyClient(clientURL, clientHandler); } @AfterEach public void after() throws Exception { if (client != null) { client.close(); client = null; } if (server != null) { server.close(); server = null; } } @Test void testThreadName() throws Exception { client.send("hello"); serverLatch.await(30, TimeUnit.SECONDS); clientLatch.await(30, TimeUnit.SECONDS); if (!serverHandler.isSuccess() || !clientHandler.isSuccess()) { Assertions.fail(); } }
ThreadNameTest
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/AnyKeyPart.java
{ "start": 1542, "end": 10654 }
class ____ implements BasicValuedModelPart, FetchOptions { public static final String KEY_NAME = "{key}"; private final NavigableRole navigableRole; private final String table; private final String column; private final SelectablePath selectablePath; private final DiscriminatedAssociationModelPart anyPart; private final @Nullable String customReadExpression; private final @Nullable String customWriteExpression; private final @Nullable String columnDefinition; private final @Nullable Long length; private final @Nullable Integer arrayLength; private final @Nullable Integer precision; private final @Nullable Integer scale; private final boolean nullable; private final boolean insertable; private final boolean updateable; private final boolean partitioned; private final JdbcMapping jdbcMapping; public AnyKeyPart( NavigableRole navigableRole, DiscriminatedAssociationModelPart anyPart, String table, String column, SelectablePath selectablePath, String customReadExpression, String customWriteExpression, String columnDefinition, Long length, Integer precision, Integer scale, boolean nullable, boolean insertable, boolean updateable, boolean partitioned, JdbcMapping jdbcMapping) { this( navigableRole, anyPart, table, column, selectablePath, customReadExpression, customWriteExpression, columnDefinition, length, null, precision, scale, nullable, insertable, updateable, partitioned, jdbcMapping ); } public AnyKeyPart( NavigableRole navigableRole, DiscriminatedAssociationModelPart anyPart, String table, String column, SelectablePath selectablePath, @Nullable String customReadExpression, @Nullable String customWriteExpression, @Nullable String columnDefinition, @Nullable Long length, @Nullable Integer arrayLength, @Nullable Integer precision, @Nullable Integer scale, boolean nullable, boolean insertable, boolean updateable, boolean partitioned, JdbcMapping jdbcMapping) { this.navigableRole = navigableRole; this.table = table; this.column = column; this.selectablePath = selectablePath; this.anyPart = anyPart; this.customReadExpression = customReadExpression; this.customWriteExpression = customWriteExpression; this.columnDefinition = columnDefinition; this.length = length; this.arrayLength = arrayLength; this.precision = precision; this.scale = scale; this.nullable = nullable; this.insertable = insertable; this.updateable = updateable; this.partitioned = partitioned; this.jdbcMapping = jdbcMapping; } @Override public String getContainingTableExpression() { return table; } @Override public String getSelectionExpression() { return column; } @Override public String getSelectableName() { return selectablePath.getSelectableName(); } @Override public SelectablePath getSelectablePath() { return selectablePath; } @Override public boolean isFormula() { return false; } @Override public boolean isNullable() { return nullable; } @Override public boolean isInsertable() { return insertable; } @Override public boolean isUpdateable() { return updateable; } @Override public boolean isPartitioned() { return partitioned; } @Override public @Nullable String getCustomReadExpression() { return customReadExpression; } @Override public @Nullable String getCustomWriteExpression() { return customWriteExpression; } @Override public @Nullable String getColumnDefinition() { return columnDefinition; } @Override public @Nullable Long getLength() { return length; } @Override public @Nullable Integer getArrayLength() { return arrayLength; } @Override public @Nullable Integer getPrecision() { return precision; } @Override public @Nullable Integer getScale() { return scale; } @Override public @Nullable Integer getTemporalPrecision() { return null; } @Override public JdbcMapping getJdbcMapping() { return jdbcMapping; } @Override public JavaType<?> getJavaType() { return jdbcMapping.getMappedJavaType(); } @Override public String getPartName() { return KEY_NAME; } @Override public NavigableRole getNavigableRole() { return navigableRole; } @Override public EntityMappingType findContainingEntityMapping() { return anyPart.findContainingEntityMapping(); } @Override public MappingType getMappedType() { return jdbcMapping; } @Override public String getFetchableName() { return getPartName(); } @Override public int getFetchableKey() { return 1; } @Override public FetchOptions getMappedFetchOptions() { return this; } @Override public Fetch generateFetch( FetchParent fetchParent, NavigablePath fetchablePath, FetchTiming fetchTiming, boolean selected, String resultVariable, DomainResultCreationState creationState) { final var sqlAstCreationState = creationState.getSqlAstCreationState(); final var fromClauseAccess = sqlAstCreationState.getFromClauseAccess(); final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver(); final var tableGroup = fromClauseAccess.getTableGroup( fetchParent.getNavigablePath().getParent() ); final var tableReference = tableGroup.resolveTableReference( fetchablePath, table ); final var columnReference = sqlExpressionResolver.resolveSqlExpression( tableReference, this ); final var sqlSelection = sqlExpressionResolver.resolveSqlSelection( columnReference, jdbcMapping.getJdbcJavaType(), fetchParent, sqlAstCreationState.getCreationContext().getTypeConfiguration() ); return new BasicFetch<>( sqlSelection.getValuesArrayPosition(), fetchParent, fetchablePath, this, fetchTiming, creationState, !sqlSelection.isVirtual() ); } @Override public FetchStyle getStyle() { return FetchStyle.SELECT; } @Override public FetchTiming getTiming() { return FetchTiming.IMMEDIATE; } @Override public <X, Y> int breakDownJdbcValues( Object domainValue, int offset, X x, Y y, JdbcValueBiConsumer<X, Y> valueConsumer, SharedSessionContractImplementor session) { valueConsumer.consume( offset, x, y, domainValue, this ); return getJdbcTypeCount(); } @Override public int forEachJdbcType(int offset, IndexedConsumer<JdbcMapping> action) { action.accept( offset, jdbcMapping ); return getJdbcTypeCount(); } @Override public <X, Y> int forEachJdbcValue( Object value, int offset, X x, Y y, JdbcValuesBiConsumer<X, Y> valuesConsumer, SharedSessionContractImplementor session) { valuesConsumer.consume( offset, x, y, value, jdbcMapping ); return getJdbcTypeCount(); } @Override public JdbcMapping getJdbcMapping(int index) { if ( index != 0 ) { throw new IndexOutOfBoundsException( index ); } return jdbcMapping; } @Override public JdbcMapping getSingleJdbcMapping() { return jdbcMapping; } @Override public Object disassemble(Object value, SharedSessionContractImplementor session) { return value; } @Override public <X, Y> int forEachDisassembledJdbcValue( Object value, int offset, X x, Y y, JdbcValuesBiConsumer<X, Y> valuesConsumer, SharedSessionContractImplementor session) { valuesConsumer.consume( offset, x, y, value, jdbcMapping ); return 1; } @Override public <T> DomainResult<T> createDomainResult( NavigablePath navigablePath, TableGroup tableGroup, String resultVariable, DomainResultCreationState creationState) { final var sqlSelection = resolveSqlSelection( navigablePath, tableGroup, creationState ); return new BasicResult<>( sqlSelection.getValuesArrayPosition(), resultVariable, jdbcMapping, navigablePath, false, !sqlSelection.isVirtual() ); } @Override public void applySqlSelections( NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) { resolveSqlSelection( navigablePath, tableGroup, creationState ); } @Override public void applySqlSelections( NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState, BiConsumer<SqlSelection, JdbcMapping> selectionConsumer) { selectionConsumer.accept( resolveSqlSelection( navigablePath, tableGroup, creationState ), getJdbcMapping() ); } private SqlSelection resolveSqlSelection( NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) { final var tableReference = tableGroup.resolveTableReference( navigablePath, this, getContainingTableExpression() ); final var sqlAstCreationState = creationState.getSqlAstCreationState(); final var expressionResolver = sqlAstCreationState.getSqlExpressionResolver(); return expressionResolver.resolveSqlSelection( expressionResolver.resolveSqlExpression( tableReference, this ), jdbcMapping.getJdbcJavaType(), null, sqlAstCreationState.getCreationContext().getTypeConfiguration() ); } }
AnyKeyPart
java
elastic__elasticsearch
modules/lang-painless/src/test/java/org/elasticsearch/painless/NumberTests.java
{ "start": 539, "end": 8439 }
class ____ extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;")); // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;")); } public void testAssignmentSubtractionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;")); // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;")); } public void testAssignmentMultiplicationOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;")); // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;")); } public void testAssignmentDivisionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;")); assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; x /= 0.0f; return x;")); // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0f; x /= 0.0; return x;")); } public void testAddition() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;")); } public void testAdditionConst() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 + 1.7976931348623157E308;")); } public void testSubtraction() throws Exception { assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); assertEquals( Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;") ); } public void testSubtractionConst() throws Exception { assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); assertEquals(Double.NEGATIVE_INFINITY, exec("return -1.7976931348623157E308 - 1.7976931348623157E308;")); } public void testMultiplication() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;")); } public void testMultiplicationConst() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 * 1.7976931348623157E308;")); } public void testDivision() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 1.401298464324817E-45f; return x / y;")); assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; float y = 0.0f; return x / y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0; double y = 0.0; return x / y;")); } public void testDivisionConst() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;")); assertEquals(Float.POSITIVE_INFINITY, exec("return 1.0f / 0.0f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 / 4.9E-324;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.0 / 0.0;")); } public void testDivisionNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 0f; float y = 0f; return x / y;"))); assertTrue(Float.isNaN((Float) exec("return 0f / 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 0f; x /= 0f; return x;"))); // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 0.0; double y = 0.0; return x / y;"))); assertTrue(Double.isNaN((Double) exec("return 0.0 / 0.0;"))); assertTrue(Double.isNaN((Double) exec("double x = 0.0; x /= 0.0; return x;"))); } public void testRemainderNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 1f; float y = 0f; return x % y;"))); assertTrue(Float.isNaN((Float) exec("return 1f % 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 1f; x %= 0f; return x;"))); // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 1.0; double y = 0.0; return x % y;"))); assertTrue(Double.isNaN((Double) exec("return 1.0 % 0.0;"))); assertTrue(Double.isNaN((Double) exec("double x = 1.0; x %= 0.0; return x;"))); } public void testHexCollisionDouble() { assertEquals(0xd, exec("return 0xd")); assertEquals(0x0d, exec("return 0x0d")); assertEquals(0x1d, exec("return 0x1d")); assertEquals(0xdd, exec("return 0xdd")); assertEquals(1d, exec("return 1d")); } public void testHexCollisionFloat() { assertEquals(0xf, exec("return 0xf")); assertEquals(0x0f, exec("return 0x0f")); assertEquals(0x1f, exec("return 0x1f")); assertEquals(0xff, exec("return 0xff")); assertEquals(1f, exec("return 1f")); } public void testHex() { for (int i = 0; i <= 0xf; i++) { String hex = Integer.toHexString(i); assertEquals(i, exec("return 0x" + hex)); assertEquals(i, exec("return 0x" + hex.toUpperCase(Locale.ROOT))); assertEquals(i, exec("return 0X" + hex)); assertEquals(i, exec("return 0X" + hex.toUpperCase(Locale.ROOT))); } } }
NumberTests
java
netty__netty
resolver/src/main/java/io/netty/resolver/RoundRobinInetAddressResolver.java
{ "start": 1416, "end": 4674 }
class ____ extends InetNameResolver { private final NameResolver<InetAddress> nameResolver; /** * @param executor the {@link EventExecutor} which is used to notify the listeners of the {@link Future} returned by * {@link #resolve(String)} * @param nameResolver the {@link NameResolver} used for name resolution */ public RoundRobinInetAddressResolver(EventExecutor executor, NameResolver<InetAddress> nameResolver) { super(executor); this.nameResolver = nameResolver; } @Override protected void doResolve(final String inetHost, final Promise<InetAddress> promise) throws Exception { // hijack the doResolve request, but do a doResolveAll request under the hood. // Note that InetSocketAddress.getHostName() will never incur a reverse lookup here, // because an unresolved address always has a host name. nameResolver.resolveAll(inetHost).addListener(new FutureListener<List<InetAddress>>() { @Override public void operationComplete(Future<List<InetAddress>> future) throws Exception { if (future.isSuccess()) { List<InetAddress> inetAddresses = future.getNow(); int numAddresses = inetAddresses.size(); if (numAddresses > 0) { // if there are multiple addresses: we shall pick one by one // to support the round robin distribution promise.setSuccess(inetAddresses.get(randomIndex(numAddresses))); } else { promise.setFailure(new UnknownHostException(inetHost)); } } else { promise.setFailure(future.cause()); } } }); } @Override protected void doResolveAll(String inetHost, final Promise<List<InetAddress>> promise) throws Exception { nameResolver.resolveAll(inetHost).addListener(new FutureListener<List<InetAddress>>() { @Override public void operationComplete(Future<List<InetAddress>> future) throws Exception { if (future.isSuccess()) { List<InetAddress> inetAddresses = future.getNow(); if (!inetAddresses.isEmpty()) { // create a copy to make sure that it's modifiable random access collection List<InetAddress> result = new ArrayList<InetAddress>(inetAddresses); // rotate by different distance each time to force round robin distribution Collections.rotate(result, randomIndex(inetAddresses.size())); promise.setSuccess(result); } else { promise.setSuccess(inetAddresses); } } else { promise.setFailure(future.cause()); } } }); } private static int randomIndex(int numAddresses) { return numAddresses == 1 ? 0 : ThreadLocalRandom.current().nextInt(numAddresses); } @Override public void close() { nameResolver.close(); } }
RoundRobinInetAddressResolver
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java
{ "start": 3144, "end": 6663 }
class ____ { /* * We want to detect situations where we touch logging before the configuration is loaded. If we do this, Log4j will status log an error * message at the error level. With this error listener, we can capture if this happens. More broadly, we can detect any error-level * status log message which likely indicates that something is broken. The listener is installed immediately on startup, and then when * we get around to configuring logging we check that no error-level log messages have been logged by the status logger. If they have we * fail startup and any such messages can be seen on the console. */ private static final AtomicBoolean error = new AtomicBoolean(); private static final StatusListener ERROR_LISTENER = new StatusConsoleListener(Level.ERROR) { @Override public void log(StatusData data) { error.set(true); super.log(data); } }; private static Appender consoleAppender; /** * Registers a listener for status logger errors. This listener should be registered as early as possible to ensure that no errors are * logged by the status logger before logging is configured. */ public static void registerErrorListener() { error.set(false); StatusLogger.getLogger().registerListener(ERROR_LISTENER); } /** * Configure logging without reading a log4j2.properties file, effectively configuring the * status logger and all loggers to the console. * * @param settings for configuring logger.level and individual loggers */ public static void configureWithoutConfig(final Settings settings) { Objects.requireNonNull(settings); configureESLogging(); // we initialize the status logger immediately otherwise Log4j will complain when we try to get the context configureStatusLogger(); configureLoggerLevels(settings); } /** * Configure logging reading from any log4j2.properties found in the config directory and its * subdirectories from the specified environment. Will also configure logging to point the logs * directory from the specified environment. * * @param environment the environment for reading configs and the logs path * @param useConsole whether a console appender should exist * @throws IOException if there is an issue readings any log4j2.properties in the config * directory */ public static void configure(final Environment environment, boolean useConsole) throws IOException { Objects.requireNonNull(environment); try { // we are about to configure logging, check that the status logger did not log any error-level messages checkErrorListener(); } finally { // whether or not the error listener check failed we can remove the listener now StatusLogger.getLogger().removeListener(ERROR_LISTENER); } configureESLogging(); configure(environment.settings(), environment.configDir(), environment.logsDir(), useConsole); initializeStatics(); // creates a permanent status logger that can watch for StatusLogger events and forward to a real logger configureStatusLoggerForwarder(); } private static void configureStatusLoggerForwarder() { // the real logger is lazily retrieved here since logging won't yet be setup during clinit of this
LogConfigurator
java
quarkusio__quarkus
test-framework/common/src/main/java/io/quarkus/test/common/RestAssuredURLManager.java
{ "start": 1044, "end": 5618 }
class ____ being loaded if RestAssured is not present private static Object oldRequestSpecification; private static final boolean REST_ASSURED_PRESENT; static { boolean present = false; try { Class.forName("io.restassured.RestAssured"); present = true; } catch (ClassNotFoundException ignored) { } REST_ASSURED_PRESENT = present; } private RestAssuredURLManager() { } private static int getPortFromConfig(int defaultValue, String... keys) { for (String key : keys) { Optional<Integer> port = ConfigProvider.getConfig().getOptionalValue(key, Integer.class); if (port.isPresent()) return port.get(); } return defaultValue; } public static void setURL(boolean useSecureConnection) { setURL(useSecureConnection, null, null); } public static void setURL(boolean useSecureConnection, String additionalPath) { setURL(useSecureConnection, null, additionalPath); } public static void setURL(boolean useSecureConnection, Integer port) { setURL(useSecureConnection, port, null); } public static void setURL(boolean useSecureConnection, Integer port, String additionalPath) { if (!REST_ASSURED_PRESENT) { return; } oldPort = RestAssured.port; if (port == null) { port = useSecureConnection ? getPortFromConfig(DEFAULT_HTTPS_PORT, "quarkus.http.test-ssl-port") : getPortFromConfig(DEFAULT_HTTP_PORT, "quarkus.lambda.mock-event-server.test-port", "quarkus.http.test-port"); } RestAssured.port = port; oldBaseURI = RestAssured.baseURI; final String protocol = useSecureConnection ? "https://" : "http://"; String host = ConfigProvider.getConfig().getOptionalValue("quarkus.http.host", String.class) .orElse("localhost"); if (host.equals("0.0.0.0")) { host = "localhost"; } RestAssured.baseURI = protocol + host; oldBasePath = RestAssured.basePath; Optional<String> basePath = ConfigProvider.getConfig().getOptionalValue("quarkus.http.root-path", String.class); if (basePath.isPresent() || additionalPath != null) { StringBuilder bp = new StringBuilder(); if (basePath.isPresent()) { if (basePath.get().startsWith("/")) { bp.append(basePath.get().substring(1)); } else { bp.append(basePath.get()); } if (bp.toString().endsWith("/")) { bp.setLength(bp.length() - 1); } } if (additionalPath != null) { if (!additionalPath.startsWith("/")) { bp.append("/"); } bp.append(additionalPath); if (bp.toString().endsWith("/")) { bp.setLength(bp.length() - 1); } } RestAssured.basePath = bp.toString(); } oldRestAssuredConfig = RestAssured.config(); Duration timeout = ConfigProvider.getConfig() .getOptionalValue("quarkus.http.test-timeout", Duration.class).orElse(Duration.ofSeconds(30)); configureTimeouts(timeout); oldRequestSpecification = RestAssured.requestSpecification; if (ConfigProvider.getConfig() .getOptionalValue("quarkus.test.rest-assured.enable-logging-on-failure", Boolean.class).orElse(true)) { RestAssured.enableLoggingOfRequestAndResponseIfValidationFails(); } } private static void configureTimeouts(Duration d) { RestAssured.config = RestAssured.config().httpClient(new HttpClientConfig() .setParam("http.conn-manager.timeout", d.toMillis()) // this needs to be long .setParam("http.connection.timeout", (int) d.toMillis()) // this needs to be int .setParam("http.socket.timeout", (int) d.toMillis())); // same here } public static void clearURL() { if (!REST_ASSURED_PRESENT) { return; } RestAssured.port = oldPort; RestAssured.baseURI = oldBaseURI; RestAssured.basePath = oldBasePath; RestAssured.config = (RestAssuredConfig) oldRestAssuredConfig; RestAssured.requestSpecification = (RequestSpecification) oldRequestSpecification; } }
for
java
quarkusio__quarkus
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeployer.java
{ "start": 3155, "end": 24283 }
class ____ { private static final Logger log = Logger.getLogger(KubernetesDeployer.class); private static final String CONTAINER_IMAGE_EXTENSIONS_STR = ContainerImageCapabilitiesUtil.CAPABILITY_TO_EXTENSION_NAME .values().stream() .map(s -> "\"" + s + "\"").collect(Collectors.joining(", ")); @BuildStep(onlyIf = IsNormalNotRemoteDev.class) public void selectDeploymentTarget(ContainerImageInfoBuildItem containerImageInfo, EnabledKubernetesDeploymentTargetsBuildItem targets, Capabilities capabilities, ContainerImageConfig containerImageConfig, BuildProducer<SelectedKubernetesDeploymentTargetBuildItem> selectedDeploymentTarget, BuildProducer<PreventImplicitContainerImagePushBuildItem> preventImplicitContainerImagePush) { Optional<String> activeContainerImageCapability = ContainerImageCapabilitiesUtil .getActiveContainerImageCapability(capabilities); //If container image actions are explicitly disabled block deployment even if a container image capability is not present. if (!containerImageConfig.isBuildExplicitlyDisabled() && !containerImageConfig.isPushExplicitlyDisabled() && activeContainerImageCapability.isEmpty()) { // we can't throw an exception here, because it could prevent the Kubernetes resources from being generated return; } final DeploymentTargetEntry selectedTarget = determineDeploymentTarget(containerImageInfo, targets, containerImageConfig); selectedDeploymentTarget.produce(new SelectedKubernetesDeploymentTargetBuildItem(selectedTarget)); if (MINIKUBE.equals(selectedTarget.getName()) || KIND.equals(selectedTarget.getName())) { preventImplicitContainerImagePush.produce(new PreventImplicitContainerImagePushBuildItem()); } } @BuildStep public void checkEnvironment(Optional<SelectedKubernetesDeploymentTargetBuildItem> selectedDeploymentTarget, KubernetesClientBuildItem kubernetesClientBuilder, List<GeneratedKubernetesResourceBuildItem> resources, BuildProducer<KubernetesDeploymentClusterBuildItem> deploymentCluster) { if (!KubernetesDeploy.INSTANCE.checkSilently(kubernetesClientBuilder)) { return; } String target = selectedDeploymentTarget.map(s -> s.getEntry().getName()).orElse(KUBERNETES); if (target.equals(KUBERNETES)) { deploymentCluster.produce(new KubernetesDeploymentClusterBuildItem(KUBERNETES)); } } @BuildStep(onlyIf = IsNormalNotRemoteDev.class) public void deploy(KubernetesClientBuildItem kubernetesClientBuilder, Capabilities capabilities, List<KubernetesDeploymentClusterBuildItem> deploymentClusters, Optional<SelectedKubernetesDeploymentTargetBuildItem> selectedDeploymentTarget, OutputTargetBuildItem outputTarget, KubernetesOutputDirectoryBuildItem outputDirectoryBuildItem, OpenShiftConfig openshiftConfig, ContainerImageConfig containerImageConfig, ApplicationInfoBuildItem applicationInfo, List<KubernetesOptionalResourceDefinitionBuildItem> optionalResourceDefinitions, BuildProducer<DeploymentResultBuildItem> deploymentResult, // needed to ensure that this step runs after the container image has been built @SuppressWarnings("unused") List<ArtifactResultBuildItem> artifactResults) { if (!KubernetesDeploy.INSTANCE.check(kubernetesClientBuilder)) { return; } if (selectedDeploymentTarget.isEmpty()) { if (!containerImageConfig.isBuildExplicitlyDisabled() && !containerImageConfig.isPushExplicitlyDisabled() && ContainerImageCapabilitiesUtil.getActiveContainerImageCapability(capabilities).isEmpty()) { throw new RuntimeException( "A Kubernetes deployment was requested but no extension was found to build a container image. Consider adding one of following extensions: " + CONTAINER_IMAGE_EXTENSIONS_STR + "."); } return; } try (final KubernetesClient client = kubernetesClientBuilder.buildClient()) { deploymentResult .produce(deploy(selectedDeploymentTarget.get().getEntry(), client, outputDirectoryBuildItem.getOutputDirectory(), openshiftConfig, applicationInfo, optionalResourceDefinitions)); } } /** * Determine a single deployment target out of the possible options. * * The selection is done as follows: * * If there is no target deployment at all, just use vanilla Kubernetes. This will happen in cases where the user does not * select a deployment target and no extension that specify one is present. * * If the user specifies deployment targets, pick the first one. */ private DeploymentTargetEntry determineDeploymentTarget( ContainerImageInfoBuildItem containerImageInfo, EnabledKubernetesDeploymentTargetsBuildItem targets, ContainerImageConfig containerImageConfig) { final DeploymentTargetEntry selectedTarget; List<String> userSpecifiedDeploymentTargets = KubernetesConfigUtil.getExplicitlyConfiguredDeploymentTargets(); if (userSpecifiedDeploymentTargets.isEmpty()) { selectedTarget = targets.getEntriesSortedByPriority().get(0); if (targets.getEntriesSortedByPriority().size() > 1) { log.info("Selecting target '" + selectedTarget.getName() + "' since it has the highest priority among the implicitly enabled deployment targets"); } } else { String firstUserSpecifiedDeploymentTarget = userSpecifiedDeploymentTargets.get(0); selectedTarget = targets .getEntriesSortedByPriority() .stream() .filter(d -> d.getName().equals(firstUserSpecifiedDeploymentTarget)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("The specified value '" + firstUserSpecifiedDeploymentTarget + "' is not one of the allowed values of \"quarkus.kubernetes.deployment-target\"")); if (userSpecifiedDeploymentTargets.size() > 1) { log.info( "Only the first deployment target (which is '" + firstUserSpecifiedDeploymentTarget + "') selected via \"quarkus.kubernetes.deployment-target\" will be deployed"); } } return selectedTarget; } private DeploymentResultBuildItem deploy(DeploymentTargetEntry deploymentTarget, KubernetesClient client, Path outputDir, OpenShiftConfig openshiftConfig, ApplicationInfoBuildItem applicationInfo, List<KubernetesOptionalResourceDefinitionBuildItem> optionalResourceDefinitions) { String namespace = Optional.ofNullable(client.getNamespace()).orElse("default"); log.info("Deploying to " + deploymentTarget.getName().toLowerCase() + " server: " + client.getMasterUrl() + " in namespace: " + namespace + "."); File manifest = outputDir.resolve(deploymentTarget.getName().toLowerCase() + ".yml").toFile(); try (FileInputStream fis = new FileInputStream(manifest)) { KubernetesList list = Serialization.unmarshalAsList(fis); Optional<GenericKubernetesResource> conflictingResource = findConflictingResource(client, deploymentTarget, list.getItems()); if (conflictingResource.isPresent()) { String messsage = "Skipping deployment of " + deploymentTarget.getDeploymentResourceKind() + " " + conflictingResource.get().getMetadata().getName() + " because a " + conflictingResource.get().getKind() + " with the same name exists."; log.warn(messsage); log.warn("This may occur when switching deployment targets, or when the default deployment target is changed."); log.warn("Please remove conflicting resource and try again."); throw new IllegalStateException(messsage); } list.getItems().stream().filter(distinctByResourceKey()).forEach(i -> { Optional<HasMetadata> existing = Optional.ofNullable(client.resource(i).get()); checkLabelSelectorVersions(deploymentTarget, i, existing); }); list.getItems().stream().filter(distinctByResourceKey()).forEach(i -> { deployResource(deploymentTarget, client, i, optionalResourceDefinitions); log.info("Applied: " + i.getKind() + " " + i.getMetadata().getName() + "."); }); printExposeInformation(client, list, openshiftConfig, applicationInfo); HasMetadata m = list.getItems().stream() .filter(r -> deploymentTarget.getDeploymentResourceKind().matches(r)) .findFirst().orElseThrow(() -> new IllegalStateException( "No " + deploymentTarget.getDeploymentResourceKind() + " found under: " + manifest.getAbsolutePath())); return new DeploymentResultBuildItem(m.getMetadata().getName(), m.getMetadata().getLabels()); } catch (FileNotFoundException e) { throw new IllegalStateException("Can't find generated kubernetes manifest: " + manifest.getAbsolutePath()); } catch (KubernetesClientException e) { KubernetesClientErrorHandler.handle(e); throw e; } catch (IOException e) { throw new RuntimeException("Error closing file: " + manifest.getAbsolutePath()); } } private void deployResource(DeploymentTargetEntry deploymentTarget, KubernetesClient client, HasMetadata metadata, List<KubernetesOptionalResourceDefinitionBuildItem> optionalResourceDefinitions) { var r = findResource(client, metadata); Optional<HasMetadata> existing = Optional.ofNullable(client.resource(metadata).get()); if (shouldDeleteExisting(deploymentTarget, metadata)) { deleteResource(metadata, r); } try { switch (deploymentTarget.getDeployStrategy()) { case Create: r.create(); break; case Replace: r.replace(); break; case ServerSideApply: r.patch(PatchContext.of(PatchType.SERVER_SIDE_APPLY)); break; default: r.createOrReplace(); break; } } catch (Exception e) { if (e instanceof InterruptedException) { throw e; } else if (isOptional(optionalResourceDefinitions, metadata)) { log.warn("Failed to apply: " + metadata.getKind() + " " + metadata.getMetadata().getName() + ", possibly due to missing a CRD apiVersion: " + metadata.getApiVersion() + " and kind: " + metadata.getKind() + "."); } else { throw e; } } } private Optional<GenericKubernetesResource> findConflictingResource(KubernetesClient clinet, DeploymentTargetEntry deploymentTarget, List<HasMetadata> generated) { HasMetadata deploymentResource = generated.stream() .filter(r -> deploymentTarget.getDeploymentResourceKind().matches(r)) .findFirst() .orElseThrow(() -> new IllegalStateException( "No " + deploymentTarget.getDeploymentResourceKind() + " found under: " + deploymentTarget.getName())); String name = deploymentResource.getMetadata().getName(); for (DeploymentResourceKind deploymentKind : DeploymentResourceKind.values()) { if (deploymentKind.matches(deploymentResource)) { continue; } try { GenericKubernetesResource resource = clinet .genericKubernetesResources(deploymentKind.getApiVersion(), deploymentKind.getKind()).withName(name) .get(); if (resource != null) { log.warn("Found conflicting resource:" + resource.getApiVersion() + "/" + resource.getKind() + ":" + resource.getMetadata().getName()); return Optional.of(resource); } } catch (KubernetesClientException e) { // ignore } } return Optional.empty(); } private void deleteResource(HasMetadata metadata, Resource<?> r) { r.delete(); try { r.waitUntilCondition(Objects::isNull, 10, TimeUnit.SECONDS); } catch (Exception e) { if (e instanceof InterruptedException) { throw e; } //This is something that should not really happen. it's not a fatal condition so let's just log. log.warn("Failed to wait for the deletion of: " + metadata.getApiVersion() + " " + metadata.getKind() + " " + metadata.getMetadata().getName() + ". Is the resource waitable?"); } } private Resource<?> findResource(KubernetesClient client, HasMetadata metadata) { if (metadata instanceof GenericKubernetesResource) { GenericKubernetesResource genericResource = (GenericKubernetesResource) metadata; ResourceDefinitionContext context = getGenericResourceContext(client, genericResource) .orElseThrow(() -> new IllegalStateException("Could not retrieve API resource information for:" + metadata.getApiVersion() + " " + metadata.getKind() + ". Is the CRD for the resource available?")); return client.genericKubernetesResources(context).resource(genericResource); } return client.resource(metadata); } private void printExposeInformation(KubernetesClient client, KubernetesList list, OpenShiftConfig openshiftConfig, ApplicationInfoBuildItem applicationInfo) { String generatedRouteName = ResourceNameUtil.getResourceName(openshiftConfig, applicationInfo); List<HasMetadata> items = list.getItems(); for (HasMetadata item : items) { if (Constants.ROUTE_API_GROUP.equals(item.getApiVersion()) && Constants.ROUTE.equals(item.getKind()) && generatedRouteName.equals(item.getMetadata().getName())) { try { OpenShiftClient openShiftClient = client.adapt(OpenShiftClient.class); Route route = openShiftClient.routes().withName(generatedRouteName).get(); boolean isTLS = (route.getSpec().getTls() != null); String host = route.getSpec().getHost(); log.infov("The deployed application can be accessed at: http{0}://{1}", isTLS ? "s" : "", host); } catch (KubernetesClientException ignored) { } break; } } } /** * Obtain everything the APIResourceList from the server and extract all the info we need in order to know how to create / * delete the specified generic resource. * * @param client the client instance to use to query the server. * @param resource the generic resource. * @return an optional {@link ResourceDefinitionContext} with the resource info or empty if resource could not be matched. */ private static Optional<ResourceDefinitionContext> getGenericResourceContext(KubernetesClient client, GenericKubernetesResource resource) { APIResourceList apiResourceList = client.getApiResources(resource.getApiVersion()); if (apiResourceList == null || apiResourceList.getResources() == null || apiResourceList.getResources().isEmpty()) { return Optional.empty(); } return client.getApiResources(resource.getApiVersion()).getResources().stream() .filter(r -> r.getKind().equals(resource.getKind())) .map(r -> new ResourceDefinitionContext.Builder() .withGroup(ApiVersionUtil.trimGroup(resource.getApiVersion())) .withVersion(ApiVersionUtil.trimVersion(resource.getApiVersion())) .withKind(r.getKind()) .withNamespaced(r.getNamespaced()) .withPlural(r.getName()) .build()) .findFirst(); } private static boolean isOptional(List<KubernetesOptionalResourceDefinitionBuildItem> optionalResourceDefinitions, HasMetadata resource) { return optionalResourceDefinitions.stream() .anyMatch(t -> t.getApiVersion().equals(resource.getApiVersion()) && t.getKind().equals(resource.getKind())); } private static boolean shouldDeleteExisting(DeploymentTargetEntry deploymentTarget, HasMetadata resource) { if (deploymentTarget.getDeployStrategy() != DeployStrategy.CreateOrUpdate) { return false; } return KNATIVE.equalsIgnoreCase(deploymentTarget.getName()) || resource instanceof Service || (Objects.equals("v1", resource.getApiVersion()) && Objects.equals("Service", resource.getKind())) || resource instanceof Job || (Objects.equals("batch/v1", resource.getApiVersion()) && Objects.equals("Job", resource.getKind())); } private static Predicate<HasMetadata> distinctByResourceKey() { Map<Object, Boolean> seen = new ConcurrentHashMap<>(); return t -> seen.putIfAbsent(t.getApiVersion() + "/" + t.getKind() + ":" + t.getMetadata().getName(), Boolean.TRUE) == null; } private static void checkLabelSelectorVersions(DeploymentTargetEntry deploymnetTarget, HasMetadata resource, Optional<HasMetadata> existing) { if (!existing.isPresent()) { return; } if (resource instanceof Deployment) { Optional<String> version = getLabelSelectorVersion(resource); Optional<String> existingVersion = getLabelSelectorVersion(existing.get()); if (version.isPresent() && existingVersion.isPresent()) { if (!version.get().equals(existingVersion.get())) { throw new IllegalStateException(String.format( "A previous Deployment with a conflicting label %s=%s was found in the label selector (current is %s=%s). As the label selector is immutable, you need to either align versions or manually delete previous deployment.", VERSION_LABEL, existingVersion.get(), VERSION_LABEL, version.get())); } } else if (version.isPresent()) { throw new IllegalStateException(String.format( "A Deployment with a conflicting label %s=%s was in the label selector was requested (previous had no such label). As the label selector is immutable, you need to either manually delete previous deployment, or remove the label (consider using quarkus.%s.add-version-to-label-selectors=false).", VERSION_LABEL, version.get(), deploymnetTarget.getName().toLowerCase())); } else if (existingVersion.isPresent()) { throw new IllegalStateException(String.format( "A Deployment with no label in the label selector was requested (previous includes %s=%s). As the label selector is immutable, you need to either manually delete previous deployment, or ensure the %s label is present (consider using quarkus.%s.add-version-to-label-selectors=true).", VERSION_LABEL, existingVersion.get(), VERSION_LABEL, deploymnetTarget.getName().toLowerCase())); } } } private static Optional<String> getLabelSelectorVersion(HasMetadata resource) { AtomicReference<String> version = new AtomicReference<>(); KubernetesList list = new KubernetesListBuilder().addToItems(resource).accept(new Visitor<LabelSelectorFluent<?>>() { @Override public void visit(LabelSelectorFluent<?> item) { if (item.getMatchLabels() != null) { version.set(item.getMatchLabels().get(VERSION_LABEL)); } } }).build(); return Optional.ofNullable(version.get()); } }
KubernetesDeployer
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestCombineFileInputFormat.java
{ "start": 4473, "end": 4676 }
class ____ extend CombineFileInputFormat. It allows * non-existent files to be passed into the CombineFileInputFormat, allows * for easy testing without having to create real files. */ private
to
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/mixed/FishTankMapper.java
{ "start": 560, "end": 1648 }
interface ____ { FishTankMapper INSTANCE = Mappers.getMapper( FishTankMapper.class ); @Mappings({ @Mapping(target = "fish.kind", source = "fish.type"), @Mapping(target = "fish.name", ignore = true), @Mapping(target = "ornament", source = "interior.ornament"), @Mapping(target = "material.materialType", source = "material"), @Mapping(target = "quality.report.organisation.name", source = "quality.report.organisationName") }) FishTankDto map( FishTank source ); @Mappings({ @Mapping(target = "fish.kind", source = "source.fish.type"), @Mapping(target = "fish.name", ignore = true), @Mapping(target = "ornament", source = "source.interior.ornament"), @Mapping(target = "material.materialType", source = "source.material"), @Mapping(target = "quality.report.organisation.name", source = "source.quality.report.organisationName") }) FishTankDto mapAsWell( FishTank source ); @InheritInverseConfiguration( name = "map" ) FishTank map( FishTankDto source ); }
FishTankMapper
java
apache__dubbo
dubbo-registry/dubbo-registry-api/src/test/java/org/apache/dubbo/registry/client/metadata/MetadataServiceNameMappingTest.java
{ "start": 2012, "end": 5775 }
class ____ { private MetadataServiceNameMapping mapping; private URL url; private ConfigManager configManager; private MetadataReport metadataReport; private ApplicationModel applicationModel; private Map<String, MetadataReport> metadataReportList = new HashMap<>(); @BeforeEach public void setUp() { applicationModel = ApplicationModel.defaultModel(); configManager = mock(ConfigManager.class); metadataReport = mock(MetadataReport.class); metadataReportList.put("default", metadataReport); mapping = new MetadataServiceNameMapping(applicationModel); mapping.setApplicationModel(applicationModel); url = URL.valueOf("dubbo://127.0.0.1:20880/TestService?version=1.0.0"); } @AfterEach public void teardown() { applicationModel.destroy(); } @Test void testMap() { ApplicationModel mockedApplicationModel = spy(applicationModel); when(configManager.getMetadataConfigs()).thenReturn(Collections.emptyList()); Mockito.when(mockedApplicationModel.getApplicationConfigManager()).thenReturn(configManager); Mockito.when(mockedApplicationModel.getCurrentConfig()).thenReturn(new ApplicationConfig("test")); // metadata report config not found mapping.setApplicationModel(mockedApplicationModel); boolean result = mapping.map(url); assertFalse(result); when(configManager.getMetadataConfigs()).thenReturn(Arrays.asList(new MetadataReportConfig())); MetadataReportInstance reportInstance = mock(MetadataReportInstance.class); Mockito.when(reportInstance.getMetadataReports(true)).thenReturn(metadataReportList); mapping.metadataReportInstance = reportInstance; when(metadataReport.registerServiceAppMapping(any(), any(), any())).thenReturn(true); // metadata report directly result = mapping.map(url); assertTrue(result); // metadata report using cas and retry, succeeded after retried 10 times when(metadataReport.registerServiceAppMapping(any(), any(), any())).thenReturn(false); when(metadataReport.getConfigItem(any(), any())).thenReturn(new ConfigItem()); when(metadataReport.registerServiceAppMapping(any(), any(), any(), any())) .thenAnswer(new Answer<Boolean>() { private int counter = 0; @Override public Boolean answer(InvocationOnMock invocationOnMock) { if (++counter == 10) { return true; } return false; } }); assertTrue(mapping.map(url)); // metadata report using cas and retry, failed after 11 times retry when(metadataReport.registerServiceAppMapping(any(), any(), any(), any())) .thenReturn(false); Exception exceptionExpected = null; assertFalse(mapping.map(url)); } /** * This test currently doesn't make any sense */ @Test void testGet() { Set<String> set = new HashSet<>(); set.add("app1"); MetadataReportInstance reportInstance = mock(MetadataReportInstance.class); Mockito.when(reportInstance.getMetadataReport(any())).thenReturn(metadataReport); when(metadataReport.getServiceAppMapping(any(), any())).thenReturn(set); mapping.metadataReportInstance = reportInstance; Set<String> result = mapping.get(url); assertEquals(set, result); } /** * Same situation as testGet, so left empty. */ @Test void testGetAndListen() { // TODO } }
MetadataServiceNameMappingTest
java
spring-projects__spring-framework
spring-websocket/src/main/java/org/springframework/web/socket/messaging/SubProtocolWebSocketHandler.java
{ "start": 2966, "end": 17877 }
class ____ implements WebSocketHandler, SubProtocolCapable, MessageHandler, SmartLifecycle { /** The default value for {@link #setTimeToFirstMessage(int) timeToFirstMessage}. */ private static final int DEFAULT_TIME_TO_FIRST_MESSAGE = 60 * 1000; private final Log logger = LogFactory.getLog(SubProtocolWebSocketHandler.class); private final MessageChannel clientInboundChannel; private final SubscribableChannel clientOutboundChannel; private final Map<String, SubProtocolHandler> protocolHandlerLookup = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); private final Set<SubProtocolHandler> protocolHandlers = new LinkedHashSet<>(); private @Nullable SubProtocolHandler defaultProtocolHandler; private final Map<String, WebSocketSessionHolder> sessions = new ConcurrentHashMap<>(); private int sendTimeLimit = 10 * 1000; private int sendBufferSizeLimit = 512 * 1024; private int timeToFirstMessage = DEFAULT_TIME_TO_FIRST_MESSAGE; private volatile long lastSessionCheckTime = System.currentTimeMillis(); private final Lock sessionCheckLock = new ReentrantLock(); private final DefaultStats stats = new DefaultStats(); private @Nullable Integer phase; private volatile boolean running; private final Object lifecycleMonitor = new Object(); /** * Create a new {@code SubProtocolWebSocketHandler} for the given inbound and outbound channels. * @param clientInboundChannel the inbound {@code MessageChannel} * @param clientOutboundChannel the outbound {@code MessageChannel} */ public SubProtocolWebSocketHandler(MessageChannel clientInboundChannel, SubscribableChannel clientOutboundChannel) { Assert.notNull(clientInboundChannel, "Inbound MessageChannel must not be null"); Assert.notNull(clientOutboundChannel, "Outbound MessageChannel must not be null"); this.clientInboundChannel = clientInboundChannel; this.clientOutboundChannel = clientOutboundChannel; } /** * Configure one or more handlers to use depending on the sub-protocol requested by * the client in the WebSocket handshake request. * @param protocolHandlers the sub-protocol handlers to use */ public void setProtocolHandlers(List<SubProtocolHandler> protocolHandlers) { this.protocolHandlerLookup.clear(); this.protocolHandlers.clear(); for (SubProtocolHandler handler : protocolHandlers) { addProtocolHandler(handler); } } public List<SubProtocolHandler> getProtocolHandlers() { return new ArrayList<>(this.protocolHandlers); } /** * Register a sub-protocol handler. */ public void addProtocolHandler(SubProtocolHandler handler) { List<String> protocols = handler.getSupportedProtocols(); if (CollectionUtils.isEmpty(protocols)) { if (logger.isErrorEnabled()) { logger.error("No sub-protocols for " + handler); } return; } for (String protocol : protocols) { SubProtocolHandler replaced = this.protocolHandlerLookup.put(protocol, handler); if (replaced != null && replaced != handler) { throw new IllegalStateException("Cannot map " + handler + " to protocol '" + protocol + "': already mapped to " + replaced + "."); } } this.protocolHandlers.add(handler); } /** * Return the sub-protocols keyed by protocol name. */ public Map<String, SubProtocolHandler> getProtocolHandlerMap() { return this.protocolHandlerLookup; } /** * Set the {@link SubProtocolHandler} to use when the client did not request a * sub-protocol. * @param defaultProtocolHandler the default handler */ public void setDefaultProtocolHandler(@Nullable SubProtocolHandler defaultProtocolHandler) { this.defaultProtocolHandler = defaultProtocolHandler; if (this.protocolHandlerLookup.isEmpty()) { setProtocolHandlers(Collections.singletonList(defaultProtocolHandler)); } } /** * Return the default sub-protocol handler to use. */ public @Nullable SubProtocolHandler getDefaultProtocolHandler() { return this.defaultProtocolHandler; } /** * Return all supported protocols. */ @Override public List<String> getSubProtocols() { return new ArrayList<>(this.protocolHandlerLookup.keySet()); } /** * Specify the send-time limit (milliseconds). * @see ConcurrentWebSocketSessionDecorator */ public void setSendTimeLimit(int sendTimeLimit) { this.sendTimeLimit = sendTimeLimit; } /** * Return the send-time limit (milliseconds). */ public int getSendTimeLimit() { return this.sendTimeLimit; } /** * Specify the buffer-size limit (number of bytes). * @see ConcurrentWebSocketSessionDecorator */ public void setSendBufferSizeLimit(int sendBufferSizeLimit) { this.sendBufferSizeLimit = sendBufferSizeLimit; } /** * Return the buffer-size limit (number of bytes). */ public int getSendBufferSizeLimit() { return this.sendBufferSizeLimit; } /** * Set the maximum time allowed in milliseconds after the WebSocket connection * is established and before the first sub-protocol message is received. * <p>This handler is for WebSocket connections that use a sub-protocol. * Therefore, we expect the client to send at least one sub-protocol message * in the beginning, or else we assume the connection isn't doing well, for example, * proxy issue, slow network, and can be closed. * <p>By default this is set to {@code 60,000} (1 minute). * @param timeToFirstMessage the maximum time allowed in milliseconds * @since 5.1 * @see #checkSessions() */ public void setTimeToFirstMessage(int timeToFirstMessage) { this.timeToFirstMessage = timeToFirstMessage; } /** * Return the maximum time allowed after the WebSocket connection is * established and before the first sub-protocol message. * @since 5.1 */ public int getTimeToFirstMessage() { return this.timeToFirstMessage; } /** * Set the phase that this handler should run in. * <p>By default, this is {@link SmartLifecycle#DEFAULT_PHASE}, but with * {@code @EnableWebSocketMessageBroker} configuration it is set to 0. * @since 6.1.4 */ public void setPhase(int phase) { this.phase = phase; } @Override public int getPhase() { return (this.phase != null ? this.phase : SmartLifecycle.super.getPhase()); } /** * Return a String describing internal state and counters. * Effectively {@code toString()} on {@link #getStats() getStats()}. */ public String getStatsInfo() { return this.stats.toString(); } /** * Return a structured object with various session counters. * @since 5.2 */ public Stats getStats() { return this.stats; } @Override public final void start() { Assert.state(this.defaultProtocolHandler != null || !this.protocolHandlers.isEmpty(), "No handlers"); synchronized (this.lifecycleMonitor) { this.clientOutboundChannel.subscribe(this); this.running = true; } } @Override public final void stop() { synchronized (this.lifecycleMonitor) { this.running = false; this.clientOutboundChannel.unsubscribe(this); } // Proactively notify all active WebSocket sessions for (WebSocketSessionHolder holder : this.sessions.values()) { try { holder.getSession().close(CloseStatus.GOING_AWAY); } catch (Throwable ex) { if (logger.isWarnEnabled()) { logger.warn("Failed to close '" + holder.getSession() + "': " + ex); } } } } @Override public final void stop(Runnable callback) { synchronized (this.lifecycleMonitor) { stop(); callback.run(); } } @Override public final boolean isRunning() { return this.running; } @Override public void afterConnectionEstablished(WebSocketSession session) throws Exception { // WebSocketHandlerDecorator could close the session if (!session.isOpen()) { return; } checkSessions(); this.stats.incrementSessionCount(session); session = decorateSession(session); this.sessions.put(session.getId(), new WebSocketSessionHolder(session)); findProtocolHandler(session).afterSessionStarted(session, this.clientInboundChannel); } /** * Handle an inbound message from a WebSocket client. */ @Override public void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception { WebSocketSessionHolder holder = this.sessions.get(session.getId()); if (holder != null) { session = holder.getSession(); } SubProtocolHandler protocolHandler = findProtocolHandler(session); protocolHandler.handleMessageFromClient(session, message, this.clientInboundChannel); if (holder != null) { holder.setHasHandledMessages(); } } /** * Handle an outbound Spring Message to a WebSocket client. */ @Override public void handleMessage(Message<?> message) throws MessagingException { String sessionId = resolveSessionId(message); if (sessionId == null) { if (logger.isErrorEnabled()) { logger.error("Could not find session id in " + message); } return; } WebSocketSessionHolder holder = this.sessions.get(sessionId); if (holder == null) { if (logger.isDebugEnabled()) { // The broker may not have removed the session yet logger.debug("No session for " + message); } return; } WebSocketSession session = holder.getSession(); try { findProtocolHandler(session).handleMessageToClient(session, message); } catch (SessionLimitExceededException ex) { try { if (logger.isDebugEnabled()) { logger.debug("Terminating '" + session + "'", ex); } else if (logger.isWarnEnabled()) { logger.warn("Terminating '" + session + "': " + ex.getMessage()); } this.stats.incrementLimitExceededCount(); clearSession(session, ex.getStatus()); // clear first, session may be unresponsive session.close(ex.getStatus()); } catch (Exception secondException) { logger.debug("Failure while closing session " + sessionId + ".", secondException); } } catch (Exception ex) { // Could be part of normal workflow (for example, browser tab closed) if (logger.isDebugEnabled()) { logger.debug("Failed to send message to client in " + session + ": " + message, ex); } } } @Override public void handleTransportError(WebSocketSession session, Throwable exception) throws Exception { this.stats.incrementTransportError(); } @Override public void afterConnectionClosed(WebSocketSession session, CloseStatus closeStatus) throws Exception { clearSession(session, closeStatus); } @Override public boolean supportsPartialMessages() { return false; } /** * Decorate the given {@link WebSocketSession}, if desired. * <p>The default implementation builds a {@link ConcurrentWebSocketSessionDecorator} * with the configured {@link #getSendTimeLimit() send-time limit} and * {@link #getSendBufferSizeLimit() buffer-size limit}. * @param session the original {@code WebSocketSession} * @return the decorated {@code WebSocketSession}, or potentially the given session as-is * @since 4.3.13 */ protected WebSocketSession decorateSession(WebSocketSession session) { return new ConcurrentWebSocketSessionDecorator(session, getSendTimeLimit(), getSendBufferSizeLimit()); } /** * Find a {@link SubProtocolHandler} for the given session. * @param session the {@code WebSocketSession} to find a handler for */ protected final SubProtocolHandler findProtocolHandler(WebSocketSession session) { String protocol = null; try { protocol = session.getAcceptedProtocol(); } catch (Exception ex) { // Shouldn't happen logger.error("Failed to obtain session.getAcceptedProtocol(): " + "will use the default protocol handler (if configured).", ex); } SubProtocolHandler handler; if (StringUtils.hasLength(protocol)) { handler = this.protocolHandlerLookup.get(protocol); if (handler == null) { throw new IllegalStateException( "No handler for '" + protocol + "' among " + this.protocolHandlerLookup); } } else { if (this.defaultProtocolHandler != null) { handler = this.defaultProtocolHandler; } else if (this.protocolHandlers.size() == 1) { handler = this.protocolHandlers.iterator().next(); } else { throw new IllegalStateException("Multiple protocol handlers configured and " + "no protocol was negotiated. Consider configuring a default SubProtocolHandler."); } } return handler; } private @Nullable String resolveSessionId(Message<?> message) { for (SubProtocolHandler handler : this.protocolHandlerLookup.values()) { String sessionId = handler.resolveSessionId(message); if (sessionId != null) { return sessionId; } } if (this.defaultProtocolHandler != null) { String sessionId = this.defaultProtocolHandler.resolveSessionId(message); if (sessionId != null) { return sessionId; } } return null; } /** * A higher-level protocol can use heartbeats to detect sessions that need to * be cleaned up. However, if a WebSocket session is established, but messages * can't flow (for example, due to a proxy issue), then the higher level protocol is * never successfully negotiated, and without heartbeats, sessions can hang. * The method checks for sessions that have not received any messages 60 * seconds after the WebSocket session was established, and closes them. */ private void checkSessions() { long currentTime = System.currentTimeMillis(); long timeSinceLastCheck = currentTime - this.lastSessionCheckTime; if (!isRunning() || timeSinceLastCheck < getTimeToFirstMessage() / 2) { return; } if (this.sessionCheckLock.tryLock()) { try { for (WebSocketSessionHolder holder : this.sessions.values()) { if (holder.hasHandledMessages()) { continue; } long timeSinceCreated = currentTime - holder.getCreateTime(); if (timeSinceCreated < getTimeToFirstMessage()) { continue; } WebSocketSession session = holder.getSession(); if (logger.isInfoEnabled()) { logger.info("No messages received after " + timeSinceCreated + " ms. " + "Closing " + holder.getSession() + "."); } try { this.stats.incrementNoMessagesReceivedCount(); session.close(CloseStatus.SESSION_NOT_RELIABLE); } catch (Throwable ex) { if (logger.isWarnEnabled()) { logger.warn("Failed to close unreliable " + session, ex); } } } } finally { this.lastSessionCheckTime = currentTime; this.sessionCheckLock.unlock(); } } } private void clearSession(WebSocketSession session, CloseStatus closeStatus) throws Exception { if (logger.isDebugEnabled()) { logger.debug("Clearing session " + session.getId()); } if (this.sessions.remove(session.getId()) != null) { this.stats.decrementSessionCount(session); } findProtocolHandler(session).afterSessionEnded(session, closeStatus, this.clientInboundChannel); } @Override public String toString() { return "SubProtocolWebSocketHandler" + this.protocolHandlers; } private static
SubProtocolWebSocketHandler
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/beans/factory/support/QualifierAnnotationAutowireContextTests.java
{ "start": 30152, "end": 30397 }
class ____ { private Person person; @Autowired public void setPerson(@TestQualifier Person person) { this.person = person; } public Person getPerson() { return this.person; } } private static
QualifiedMethodParameterTestBean
java
spring-projects__spring-boot
module/spring-boot-artemis/src/main/java/org/springframework/boot/artemis/autoconfigure/ArtemisConnectionDetails.java
{ "start": 939, "end": 1509 }
interface ____ extends ConnectionDetails { /** * Artemis deployment mode, auto-detected by default. * @return the Artemis deployment mode, auto-detected by default */ @Nullable ArtemisMode getMode(); /** * Artemis broker url. * @return the Artemis broker url */ @Nullable String getBrokerUrl(); /** * Login user of the broker. * @return the login user of the broker */ @Nullable String getUser(); /** * Login password of the broker. * @return the login password of the broker */ @Nullable String getPassword(); }
ArtemisConnectionDetails
java
apache__maven
impl/maven-impl/src/test/java/org/apache/maven/impl/model/DefaultModelBuilderResultTest.java
{ "start": 1538, "end": 4121 }
class ____ { private ModelBuilderRequest request; private ProblemCollector<ModelProblem> problemCollector; private DefaultModelBuilderResult result; private ModelSource source; private Model fileModel; private Model rawModel; private Model effectiveModel; @BeforeEach void setUp() { request = mock(ModelBuilderRequest.class); problemCollector = ProblemCollector.create(10); result = new DefaultModelBuilderResult(request, problemCollector); source = mock(ModelSource.class); fileModel = mock(Model.class); rawModel = mock(Model.class); effectiveModel = mock(Model.class); } @Test void testModelLifecycle() { // Test initial state assertNull(result.getSource()); assertNull(result.getFileModel()); assertNull(result.getRawModel()); assertNull(result.getEffectiveModel()); assertEquals(0L, result.getProblemCollector().problems().count()); // Set and verify source result.setSource(source); assertSame(source, result.getSource()); // Set and verify file model result.setFileModel(fileModel); assertSame(fileModel, result.getFileModel()); // Set and verify raw model result.setRawModel(rawModel); assertSame(rawModel, result.getRawModel()); // Set and verify effective model result.setEffectiveModel(effectiveModel); assertSame(effectiveModel, result.getEffectiveModel()); } @Test void testProblemCollection() { ModelProblem problem = mock(ModelProblem.class); Mockito.when(problem.getSeverity()).thenReturn(BuilderProblem.Severity.ERROR); problemCollector.reportProblem(problem); assertEquals(1, result.getProblemCollector().problems().count()); assertSame(problem, result.getProblemCollector().problems().findFirst().get()); } @Test void testChildrenManagement() { DefaultModelBuilderResult child1 = new DefaultModelBuilderResult(request, problemCollector); DefaultModelBuilderResult child2 = new DefaultModelBuilderResult(request, problemCollector); result.getChildren().add(child1); result.getChildren().add(child2); assertEquals(2, result.getChildren().size()); assertTrue(result.getChildren().contains(child1)); assertTrue(result.getChildren().contains(child2)); } @Test void testRequestAssociation() { assertSame(request, result.getRequest()); } }
DefaultModelBuilderResultTest
java
apache__kafka
clients/src/test/java/org/apache/kafka/common/requests/LeaveGroupRequestTest.java
{ "start": 1607, "end": 5422 }
class ____ { private final String groupId = "group_id"; private final String memberIdOne = "member_1"; private final int throttleTimeMs = 10; private LeaveGroupRequest.Builder builder; private List<MemberIdentity> members; @BeforeEach public void setUp() { members = Arrays.asList(new MemberIdentity() .setMemberId(memberIdOne) .setGroupInstanceId("instance_1"), new MemberIdentity() .setMemberId("member_2") .setGroupInstanceId("instance_2")); builder = new LeaveGroupRequest.Builder( groupId, members ); } @Test public void testMultiLeaveConstructor() { final LeaveGroupRequestData expectedData = new LeaveGroupRequestData() .setGroupId(groupId) .setMembers(members); for (short version : ApiKeys.LEAVE_GROUP.allVersions()) { try { LeaveGroupRequest request = builder.build(version); if (version <= 2) { fail("Older version " + version + " request data should not be created due to non-single members"); } assertEquals(expectedData, request.data()); assertEquals(members, request.members()); LeaveGroupResponse expectedResponse = new LeaveGroupResponse( Collections.emptyList(), Errors.COORDINATOR_LOAD_IN_PROGRESS, throttleTimeMs, version ); assertEquals(expectedResponse, request.getErrorResponse(throttleTimeMs, Errors.COORDINATOR_LOAD_IN_PROGRESS.exception())); } catch (UnsupportedVersionException e) { assertTrue(e.getMessage().contains("leave group request only supports single member instance")); } } } @Test public void testSingleLeaveConstructor() { final LeaveGroupRequestData expectedData = new LeaveGroupRequestData() .setGroupId(groupId) .setMemberId(memberIdOne); List<MemberIdentity> singleMember = Collections.singletonList( new MemberIdentity() .setMemberId(memberIdOne)); builder = new LeaveGroupRequest.Builder(groupId, singleMember); for (short version = 0; version <= 2; version++) { LeaveGroupRequest request = builder.build(version); assertEquals(expectedData, request.data()); assertEquals(singleMember, request.members()); int expectedThrottleTime = version >= 1 ? throttleTimeMs : AbstractResponse.DEFAULT_THROTTLE_TIME; LeaveGroupResponse expectedResponse = new LeaveGroupResponse( new LeaveGroupResponseData() .setErrorCode(Errors.NOT_CONTROLLER.code()) .setThrottleTimeMs(expectedThrottleTime) ); assertEquals(expectedResponse, request.getErrorResponse(throttleTimeMs, Errors.NOT_CONTROLLER.exception())); } } @Test public void testBuildEmptyMembers() { assertThrows(IllegalArgumentException.class, () -> new LeaveGroupRequest.Builder(groupId, Collections.emptyList())); } }
LeaveGroupRequestTest
java
quarkusio__quarkus
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/proxies/PreGeneratedProxies.java
{ "start": 439, "end": 480 }
class ____ bytecode recordable. */ public
is
java
alibaba__druid
core/src/test/java/com/alibaba/druid/pvt/pool/TestIdle3_Concurrent.java
{ "start": 1032, "end": 5145 }
class ____ extends TestCase { protected void setUp() throws Exception { DruidDataSourceStatManager.clear(); } protected void tearDown() throws Exception { Assert.assertEquals(0, DruidDataSourceStatManager.getInstance().getDataSourceList().size()); } public void test_idle2() throws Exception { MockDriver driver = new MockDriver(); DruidDataSource dataSource = new DruidDataSource(); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setDriver(driver); dataSource.setInitialSize(1); dataSource.setMaxActive(14); dataSource.setMaxIdle(14); dataSource.setMinIdle(1); dataSource.setMinEvictableIdleTimeMillis(30 * 10); // 300 / 10 dataSource.setTimeBetweenEvictionRunsMillis(18 * 10); // 180 / 10 dataSource.setTestWhileIdle(true); dataSource.setTestOnBorrow(false); dataSource.setValidationQuery("SELECT 1"); dataSource.setFilters("stat"); // ManagementFactory.getPlatformMBeanServer().registerMBean(dataSource, new // ObjectName("com.alibaba:type=DataSource")); // 第一次创建连接 { Assert.assertEquals(0, dataSource.getCreateCount()); Assert.assertEquals(0, dataSource.getActiveCount()); Connection conn = dataSource.getConnection(); Assert.assertEquals(dataSource.getInitialSize(), dataSource.getCreateCount()); Assert.assertEquals(dataSource.getInitialSize(), driver.getConnections().size()); Assert.assertEquals(1, dataSource.getActiveCount()); conn.close(); Assert.assertEquals(0, dataSource.getDestroyCount()); Assert.assertEquals(1, driver.getConnections().size()); Assert.assertEquals(1, dataSource.getCreateCount()); Assert.assertEquals(0, dataSource.getActiveCount()); } { // 并发创建14个 concurrent(driver, dataSource, 30); } // 连续打开关闭单个连接 for (int i = 0; i < 1000; ++i) { Assert.assertEquals(0, dataSource.getActiveCount()); Connection conn = dataSource.getConnection(); Assert.assertEquals(1, dataSource.getActiveCount()); conn.close(); } Thread.sleep(1000); Assert.assertEquals(1, dataSource.getPoolingCount()); dataSource.close(); } private void concurrent(final MockDriver driver, final DruidDataSource dataSource, final int count) throws Exception { final int LOOP_COUNT = 1000; Thread[] threads = new Thread[count]; final CyclicBarrier barrier = new CyclicBarrier(count); final CountDownLatch endLatch = new CountDownLatch(count); for (int i = 0; i < count; ++i) { threads[i] = new Thread("thread-" + i) { public void run() { try { for (int i = 0; i < LOOP_COUNT; ++i) { barrier.await(); Connection conn = dataSource.getConnection(); { AtomicInteger c = new AtomicInteger(); for (int j = 0; j < 1000 * 1; ++j) { c.incrementAndGet(); } c.set(0); Thread.sleep(1); } conn.close(); } } catch (Exception e) { e.printStackTrace(); } finally { endLatch.countDown(); } } }; } for (int i = 0; i < count; ++i) { threads[i].start(); } endLatch.await(); System.out.println("concurrent end"); int max = count > dataSource.getMaxActive() ? dataSource.getMaxActive() : count; Assert.assertEquals(max, driver.getConnections().size()); } }
TestIdle3_Concurrent
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java
{ "start": 2816, "end": 3039 }
class ____ { DiskErrorCause cause; String message; DiskErrorInformation(DiskErrorCause cause, String message) { this.cause = cause; this.message = message; } } /** * The
DiskErrorInformation
java
apache__hadoop
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/StubDelegationTokenManager.java
{ "start": 1532, "end": 3068 }
class ____ extends ClassicDelegationTokenManager implements BoundDTExtension { private static final Logger LOG = LoggerFactory.getLogger( StubDelegationTokenManager.class); /** * Classname. */ public static final String NAME = "org.apache.hadoop.fs.azurebfs.extensions.StubDelegationTokenManager"; /** * Instantiate. */ public StubDelegationTokenManager() { } @Override public void bind(final URI uri, final Configuration conf) throws IOException { super.innerBind(uri, conf); } /** * Create a token. * * @param sequenceNumber sequence number. * @param uri FS URI * @param owner FS owner * @param renewer renewer * @return a token. */ public static Token<DelegationTokenIdentifier> createToken( final int sequenceNumber, final URI uri, final Text owner, final Text renewer) { return ClassicDelegationTokenManager.createToken(sequenceNumber, uri, owner, renewer); } /** * Patch a configuration to declare this the DT provider for a filesystem * built off the given configuration. * The ABFS Filesystem still needs to come up with security enabled. * @param conf configuration. * @return the patched configuration. */ public static Configuration useStubDTManager(Configuration conf) { conf.setBoolean(FS_AZURE_ENABLE_DELEGATION_TOKEN, true); conf.set(FS_AZURE_DELEGATION_TOKEN_PROVIDER_TYPE, StubDelegationTokenManager.NAME); return conf; } }
StubDelegationTokenManager
java
dropwizard__dropwizard
dropwizard-testing/src/test/java/io/dropwizard/testing/junit5/DropwizardAppExtensionRegisterExtensionTest.java
{ "start": 282, "end": 745 }
class ____ extends AbstractDropwizardAppExtensionTest { @RegisterExtension public static final DropwizardAppExtension<TestConfiguration> EXTENSION = new DropwizardAppExtension<>(DropwizardTestApplication.class, "test-config.yaml", new ResourceConfigurationSourceProvider()); @Override DropwizardAppExtension<TestConfiguration> getExtension() { return EXTENSION; } }
DropwizardAppExtensionRegisterExtensionTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
{ "start": 23062, "end": 23318 }
class ____ implements Iterable<FieldMapper>, ToXContent { private static final MultiFields EMPTY = new MultiFields(new FieldMapper[0]); public static MultiFields empty() { return EMPTY; } public static
MultiFields
java
apache__rocketmq
broker/src/test/java/org/apache/rocketmq/broker/offset/ConsumerOrderInfoManagerLockFreeNotifyTest.java
{ "start": 1603, "end": 6231 }
class ____ { private static final String TOPIC = "topic"; private static final String GROUP = "group"; private static final int QUEUE_ID_0 = 0; private long popTime; private ConsumerOrderInfoManager consumerOrderInfoManager; private AtomicBoolean notified; private final BrokerConfig brokerConfig = new BrokerConfig(); private final PopMessageProcessor popMessageProcessor = mock(PopMessageProcessor.class); private final BrokerController brokerController = mock(BrokerController.class); @Before public void before() throws ConsumeQueueException { notified = new AtomicBoolean(false); brokerConfig.setEnableNotifyAfterPopOrderLockRelease(true); when(brokerController.getBrokerConfig()).thenReturn(brokerConfig); when(brokerController.getPopMessageProcessor()).thenReturn(popMessageProcessor); doAnswer((Answer<Void>) mock -> { notified.set(true); return null; }).when(popMessageProcessor).notifyLongPollingRequestIfNeed(anyString(), anyString(), anyInt()); consumerOrderInfoManager = new ConsumerOrderInfoManager(brokerController); popTime = System.currentTimeMillis(); } @Test public void testConsumeMessageThenNoAck() { consumerOrderInfoManager.update( null, false, TOPIC, GROUP, QUEUE_ID_0, popTime, 3000, Lists.newArrayList(1L), new StringBuilder() ); await().atLeast(Duration.ofSeconds(2)).atMost(Duration.ofSeconds(4)).until(notified::get); assertTrue(consumerOrderInfoManager.getConsumerOrderInfoLockManager().getTimeoutMap().isEmpty()); } @Test public void testConsumeMessageThenAck() { consumerOrderInfoManager.update( null, false, TOPIC, GROUP, QUEUE_ID_0, popTime, 3000, Lists.newArrayList(1L), new StringBuilder() ); consumerOrderInfoManager.commitAndNext( TOPIC, GROUP, QUEUE_ID_0, 1, popTime ); await().atMost(Duration.ofSeconds(1)).until(notified::get); assertTrue(consumerOrderInfoManager.getConsumerOrderInfoLockManager().getTimeoutMap().isEmpty()); } @Test public void testConsumeTheChangeInvisibleLonger() { consumerOrderInfoManager.update( null, false, TOPIC, GROUP, QUEUE_ID_0, popTime, 3000, Lists.newArrayList(1L), new StringBuilder() ); consumerOrderInfoManager.updateNextVisibleTime( TOPIC, GROUP, QUEUE_ID_0, 1, popTime, popTime + 5000 ); await().atLeast(Duration.ofSeconds(4)).atMost(Duration.ofSeconds(6)).until(notified::get); assertTrue(consumerOrderInfoManager.getConsumerOrderInfoLockManager().getTimeoutMap().isEmpty()); } @Test public void testConsumeTheChangeInvisibleShorter() { consumerOrderInfoManager.update( null, false, TOPIC, GROUP, QUEUE_ID_0, popTime, 3000, Lists.newArrayList(1L), new StringBuilder() ); consumerOrderInfoManager.updateNextVisibleTime( TOPIC, GROUP, QUEUE_ID_0, 1, popTime, popTime + 1000 ); await().atLeast(Duration.ofMillis(500)).atMost(Duration.ofSeconds(2)).until(notified::get); assertTrue(consumerOrderInfoManager.getConsumerOrderInfoLockManager().getTimeoutMap().isEmpty()); } @Test public void testRecover() { ConsumerOrderInfoManager savedConsumerOrderInfoManager = new ConsumerOrderInfoManager(); savedConsumerOrderInfoManager.update( null, false, TOPIC, GROUP, QUEUE_ID_0, popTime, 3000, Lists.newArrayList(1L), new StringBuilder() ); String encodedData = savedConsumerOrderInfoManager.encode(); consumerOrderInfoManager.decode(encodedData); await().atLeast(Duration.ofSeconds(2)).atMost(Duration.ofSeconds(4)).until(notified::get); assertTrue(consumerOrderInfoManager.getConsumerOrderInfoLockManager().getTimeoutMap().isEmpty()); } }
ConsumerOrderInfoManagerLockFreeNotifyTest
java
spring-projects__spring-boot
loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/zip/VirtualZipDataBlockTests.java
{ "start": 1435, "end": 5561 }
class ____ { @TempDir File tempDir; private File file; @BeforeEach void setup() throws Exception { this.file = new File(this.tempDir, "test.jar"); TestJar.create(this.file); } @Test void createContainsValidZipContent() throws IOException { FileDataBlock data = new FileDataBlock(this.file.toPath()); data.open(); List<ZipCentralDirectoryFileHeaderRecord> centralRecords = new ArrayList<>(); List<Long> centralRecordPositions = new ArrayList<>(); ZipEndOfCentralDirectoryRecord eocd = ZipEndOfCentralDirectoryRecord.load(data).endOfCentralDirectoryRecord(); long pos = eocd.offsetToStartOfCentralDirectory(); for (int i = 0; i < eocd.totalNumberOfCentralDirectoryEntries(); i++) { ZipCentralDirectoryFileHeaderRecord centralRecord = ZipCentralDirectoryFileHeaderRecord.load(data, pos); String name = ZipString.readString(data, pos + ZipCentralDirectoryFileHeaderRecord.FILE_NAME_OFFSET, centralRecord.fileNameLength()); if (name.endsWith(".jar")) { centralRecords.add(centralRecord); centralRecordPositions.add(pos); } pos += centralRecord.size(); } NameOffsetLookups nameOffsetLookups = new NameOffsetLookups(2, centralRecords.size()); for (int i = 0; i < centralRecords.size(); i++) { nameOffsetLookups.enable(i, true); } nameOffsetLookups.enable(0, true); File outputFile = new File(this.tempDir, "out.jar"); try (VirtualZipDataBlock block = new VirtualZipDataBlock(data, nameOffsetLookups, centralRecords.toArray(ZipCentralDirectoryFileHeaderRecord[]::new), centralRecordPositions.stream().mapToLong(Long::longValue).toArray())) { try (FileOutputStream out = new FileOutputStream(outputFile)) { block.asInputStream().transferTo(out); } } try (FileSystem fileSystem = FileSystems.newFileSystem(outputFile.toPath())) { assertThatExceptionOfType(NoSuchFileException.class) .isThrownBy(() -> Files.size(fileSystem.getPath("nessted.jar"))); assertThat(Files.size(fileSystem.getPath("sted.jar"))).isGreaterThan(0); assertThat(Files.size(fileSystem.getPath("other-nested.jar"))).isGreaterThan(0); assertThat(Files.size(fileSystem.getPath("ace nested.jar"))).isGreaterThan(0); assertThat(Files.size(fileSystem.getPath("lti-release.jar"))).isGreaterThan(0); } } @Test // gh-38063 void createWithDescriptorRecordContainsValidZipContent() throws Exception { try (ZipOutputStream zip = new ZipOutputStream(new FileOutputStream(this.file))) { ZipEntry entry = new ZipEntry("META-INF/"); entry.setMethod(ZipEntry.DEFLATED); zip.putNextEntry(entry); zip.write(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8 }); zip.closeEntry(); } byte[] bytes = Files.readAllBytes(this.file.toPath()); CloseableDataBlock data = new ByteArrayDataBlock(bytes); List<ZipCentralDirectoryFileHeaderRecord> centralRecords = new ArrayList<>(); List<Long> centralRecordPositions = new ArrayList<>(); ZipEndOfCentralDirectoryRecord eocd = ZipEndOfCentralDirectoryRecord.load(data).endOfCentralDirectoryRecord(); long pos = eocd.offsetToStartOfCentralDirectory(); for (int i = 0; i < eocd.totalNumberOfCentralDirectoryEntries(); i++) { ZipCentralDirectoryFileHeaderRecord centralRecord = ZipCentralDirectoryFileHeaderRecord.load(data, pos); centralRecords.add(centralRecord); centralRecordPositions.add(pos); pos += centralRecord.size(); } NameOffsetLookups nameOffsetLookups = new NameOffsetLookups(0, centralRecords.size()); for (int i = 0; i < centralRecords.size(); i++) { nameOffsetLookups.enable(i, true); } nameOffsetLookups.enable(0, true); File outputFile = new File(this.tempDir, "out.jar"); try (VirtualZipDataBlock block = new VirtualZipDataBlock(data, nameOffsetLookups, centralRecords.toArray(ZipCentralDirectoryFileHeaderRecord[]::new), centralRecordPositions.stream().mapToLong(Long::longValue).toArray())) { try (FileOutputStream out = new FileOutputStream(outputFile)) { block.asInputStream().transferTo(out); } } byte[] virtualBytes = Files.readAllBytes(outputFile.toPath()); assertThat(bytes).isEqualTo(virtualBytes); } }
VirtualZipDataBlockTests
java
spring-projects__spring-framework
spring-beans/src/test/java/org/springframework/beans/BeanWrapperTests.java
{ "start": 13669, "end": 13770 }
class ____ { public void setMap(TypedReadOnlyMap map) { } } public static
TypedReadOnlyMapClient
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/inject/guice/ProvidesMethodOutsideOfModuleTest.java
{ "start": 3812, "end": 4111 }
class ____ implements Module { @Override public void configure(Binder binder) {} @Provides int providesFoo() { return 42; } } }\ """) .doTest(); } }
Module2
java
FasterXML__jackson-databind
src/test/java/tools/jackson/databind/struct/FormatFeatureAcceptSingleTest.java
{ "start": 3219, "end": 3435 }
class ____ { @JsonFormat(with = JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY) public List<Role> roles; } @JsonDeserialize(builder = RolesInListWithBuilder.Builder.class) static
RolesInList
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/pool/DruidConnectionHolderTest2.java
{ "start": 368, "end": 1385 }
class ____ extends PoolTestCase { private DruidDataSource dataSource; protected void setUp() throws Exception { super.setUp(); dataSource = new DruidDataSource(); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setTestOnBorrow(false); dataSource.setPoolPreparedStatements(true); dataSource.getProxyFilters().add(new FilterAdapter() { public int connection_getTransactionIsolation(FilterChain chain, ConnectionProxy connection) throws SQLException { throw new SQLException(); } }); } protected void tearDown() throws Exception { dataSource.close(); super.tearDown(); } public void test_mysqlSyntaxError() throws Exception { Exception error = null; try { dataSource.getConnection(100); } catch (GetConnectionTimeoutException e) { error = e; } assertNotNull(error); } }
DruidConnectionHolderTest2
java
mapstruct__mapstruct
processor/src/test/java/org/mapstruct/ap/test/bugs/_3884/Issue3884Mapper.java
{ "start": 537, "end": 723 }
interface ____ { Issue3884Mapper INSTANCE = Mappers.getMapper( Issue3884Mapper.class ); void update(@MappingTarget DestinationType destination, SourceType source); }
Issue3884Mapper
java
apache__camel
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
{ "start": 1311347, "end": 1314217 }
class ____ extends YamlDeserializerBase<ZooKeeperServiceCallServiceDiscoveryConfiguration> { public ZooKeeperServiceCallServiceDiscoveryConfigurationDeserializer() { super(ZooKeeperServiceCallServiceDiscoveryConfiguration.class); } @Override protected ZooKeeperServiceCallServiceDiscoveryConfiguration newInstance() { return new ZooKeeperServiceCallServiceDiscoveryConfiguration(); } @Override protected boolean setProperty(ZooKeeperServiceCallServiceDiscoveryConfiguration target, String propertyKey, String propertyName, Node node) { propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey); switch(propertyKey) { case "basePath": { String val = asText(node); target.setBasePath(val); break; } case "connectionTimeout": { String val = asText(node); target.setConnectionTimeout(val); break; } case "id": { String val = asText(node); target.setId(val); break; } case "namespace": { String val = asText(node); target.setNamespace(val); break; } case "nodes": { String val = asText(node); target.setNodes(val); break; } case "properties": { java.util.List<org.apache.camel.model.PropertyDefinition> val = asFlatList(node, org.apache.camel.model.PropertyDefinition.class); target.setProperties(val); break; } case "reconnectBaseSleepTime": { String val = asText(node); target.setReconnectBaseSleepTime(val); break; } case "reconnectMaxRetries": { String val = asText(node); target.setReconnectMaxRetries(val); break; } case "reconnectMaxSleepTime": { String val = asText(node); target.setReconnectMaxSleepTime(val); break; } case "sessionTimeout": { String val = asText(node); target.setSessionTimeout(val); break; } default: { return false; } } return true; } } }
ZooKeeperServiceCallServiceDiscoveryConfigurationDeserializer
java
quarkusio__quarkus
integration-tests/mongodb-rest-data-panache/src/main/java/io/quarkus/it/mongodb/rest/data/panache/Book.java
{ "start": 174, "end": 676 }
class ____ { @BsonId private String id; private String title; private Author author; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public Author getAuthor() { return author; } public void setAuthor(Author author) { this.author = author; } }
Book
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/observers/CallbackCompletableObserverTest.java
{ "start": 831, "end": 1408 }
class ____ extends RxJavaTest { @Test public void emptyActionShouldReportNoCustomOnError() { CallbackCompletableObserver o = new CallbackCompletableObserver(Functions.ON_ERROR_MISSING, Functions.EMPTY_ACTION); assertFalse(o.hasCustomOnError()); } @Test public void customOnErrorShouldReportCustomOnError() { CallbackCompletableObserver o = new CallbackCompletableObserver(Functions.<Throwable>emptyConsumer(), Functions.EMPTY_ACTION); assertTrue(o.hasCustomOnError()); } }
CallbackCompletableObserverTest
java
apache__camel
components/camel-velocity/src/test/java/org/apache/camel/component/velocity/VelocityBodyAsDomainObjectTest.java
{ "start": 1100, "end": 2067 }
class ____ extends CamelTestSupport { @Test public void testWithObject() throws Exception { String msg = String.format("Hi Claus how are you? Its a nice day.%sGive my regards to the family Ibsen.", "\n"); MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.expectedBodiesReceived(msg); MyPerson person = new MyPerson(); person.setFamilyName("Ibsen"); person.setGivenName("Claus"); template.requestBody("direct:in", person); mock.assertIsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:in") .to("velocity:org/apache/camel/component/velocity/BodyAsDomainObject.vm") .to("mock:result"); } }; } public static
VelocityBodyAsDomainObjectTest
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java
{ "start": 3010, "end": 27737 }
class ____ extends HandledTransportAction<BulkRequest, BulkResponse> { private static final Logger logger = LogManager.getLogger(TransportAbstractBulkAction.class); public static final Set<String> STREAMS_ALLOWED_PARAMS = new HashSet<>(9) { { add("error_trace"); add("filter_path"); add("id"); add("index"); add("op_type"); add("pretty"); add("refresh"); add("require_data_stream"); add("timeout"); } }; protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final IndexingPressure indexingPressure; protected final SystemIndices systemIndices; protected final ProjectResolver projectResolver; private final IngestService ingestService; private final IngestActionForwarder ingestForwarder; protected final LongSupplier relativeTimeNanosProvider; protected final Executor coordinationExecutor; protected final Executor systemCoordinationExecutor; private final ActionType<BulkResponse> bulkAction; protected final FeatureService featureService; protected final SamplingService samplingService; public TransportAbstractBulkAction( ActionType<BulkResponse> action, TransportService transportService, ActionFilters actionFilters, Writeable.Reader<BulkRequest> requestReader, ThreadPool threadPool, ClusterService clusterService, IngestService ingestService, IndexingPressure indexingPressure, SystemIndices systemIndices, ProjectResolver projectResolver, LongSupplier relativeTimeNanosProvider, FeatureService featureService, SamplingService samplingService ) { super(action.name(), transportService, actionFilters, requestReader, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; this.indexingPressure = indexingPressure; this.systemIndices = systemIndices; this.projectResolver = projectResolver; this.coordinationExecutor = threadPool.executor(ThreadPool.Names.WRITE_COORDINATION); this.systemCoordinationExecutor = threadPool.executor(ThreadPool.Names.SYSTEM_WRITE_COORDINATION); this.ingestForwarder = new IngestActionForwarder(transportService); this.featureService = featureService; clusterService.addStateApplier(this.ingestForwarder); this.relativeTimeNanosProvider = relativeTimeNanosProvider; this.bulkAction = action; this.samplingService = samplingService; } @Override protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener<BulkResponse> listener) { /* * This is called on the Transport thread so we can check the indexing * memory pressure *quickly* but we don't want to keep the transport * thread busy. Then, as soon as we have the indexing pressure in we fork * to the coordinator thread pool for coordination tasks. We do this because * juggling the bulk request can get expensive for a few reasons: * 1. Figuring out which shard should receive a bulk request might require * parsing the _source. * 2. When dispatching the sub-requests to shards we may have to compress * them. LZ4 is super fast, but slow enough that it's best not to do it * on the transport thread, especially for large sub-requests. * * We *could* detect these cases and only fork in then, but that is complex * to get right and the fork is fairly low overhead. */ final int indexingOps = bulkRequest.numberOfActions(); final long indexingBytes = bulkRequest.ramBytesUsed(); final boolean isOnlySystem = TransportBulkAction.isOnlySystem( bulkRequest, projectResolver.getProjectMetadata(clusterService.state()).getIndicesLookup(), systemIndices ); final Releasable releasable; if (bulkRequest.incrementalState().indexingPressureAccounted()) { releasable = () -> {}; } else { releasable = indexingPressure.markCoordinatingOperationStarted(indexingOps, indexingBytes, isOnlySystem); } final ActionListener<BulkResponse> releasingListener = ActionListener.runBefore(listener, releasable::close); // Use coordinationExecutor for dispatching coordination tasks final Executor executor = isOnlySystem ? systemCoordinationExecutor : coordinationExecutor; ensureClusterStateThenForkAndExecute(task, bulkRequest, executor, releasingListener); } private void ensureClusterStateThenForkAndExecute( Task task, BulkRequest bulkRequest, Executor executor, ActionListener<BulkResponse> releasingListener ) { final ClusterState initialState = clusterService.state(); ProjectId projectId = projectResolver.getProjectId(); final ClusterBlockException blockException = initialState.blocks().globalBlockedException(projectId, ClusterBlockLevel.WRITE); if (blockException != null) { if (false == blockException.retryable()) { releasingListener.onFailure(blockException); return; } logger.trace("cluster is blocked, waiting for it to recover", blockException); final ClusterStateObserver clusterStateObserver = new ClusterStateObserver( initialState, clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext() ); clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { @Override public void onNewClusterState(ClusterState state) { forkAndExecute(task, bulkRequest, executor, releasingListener); } @Override public void onClusterServiceClose() { releasingListener.onFailure(new NodeClosedException(clusterService.localNode())); } @Override public void onTimeout(TimeValue timeout) { releasingListener.onFailure(blockException); } }, newState -> false == newState.blocks().hasGlobalBlockWithLevel(projectId, ClusterBlockLevel.WRITE)); } else { forkAndExecute(task, bulkRequest, executor, releasingListener); } } private void forkAndExecute(Task task, BulkRequest bulkRequest, Executor executor, ActionListener<BulkResponse> releasingListener) { executor.execute(new ActionRunnable<>(releasingListener) { @Override protected void doRun() throws IOException { applyPipelinesAndDoInternalExecute(task, bulkRequest, executor, releasingListener, false); } }); } private boolean applyPipelines( Task task, BulkRequest bulkRequest, Executor executor, ActionListener<BulkResponse> listener, boolean haveRunIngestService ) throws IOException { boolean hasIndexRequestsWithPipelines = false; ClusterState state = clusterService.state(); ProjectId projectId = projectResolver.getProjectId(); final ProjectMetadata project; Map<String, ComponentTemplate> componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions(); Map<String, ComposableIndexTemplate> indexTemplateSubstitutions = bulkRequest.getIndexTemplateSubstitutions(); if (bulkRequest.isSimulated() && (componentTemplateSubstitutions.isEmpty() == false || indexTemplateSubstitutions.isEmpty() == false)) { /* * If this is a simulated request, and there are template substitutions, then we want to create and use a new project that has * those templates. That is, we want to add the new templates (which will replace any that already existed with the same name), * and remove the indices and data streams that are referred to from the bulkRequest so that we get settings from the templates * rather than from the indices/data streams. */ ProjectMetadata originalProject = state.metadata().getProject(projectId); ProjectMetadata.Builder simulatedMetadataBuilder = ProjectMetadata.builder(originalProject); if (componentTemplateSubstitutions.isEmpty() == false) { Map<String, ComponentTemplate> updatedComponentTemplates = new HashMap<>(); updatedComponentTemplates.putAll(originalProject.componentTemplates()); updatedComponentTemplates.putAll(componentTemplateSubstitutions); simulatedMetadataBuilder.componentTemplates(updatedComponentTemplates); } if (indexTemplateSubstitutions.isEmpty() == false) { Map<String, ComposableIndexTemplate> updatedIndexTemplates = new HashMap<>(); updatedIndexTemplates.putAll(originalProject.templatesV2()); updatedIndexTemplates.putAll(indexTemplateSubstitutions); simulatedMetadataBuilder.indexTemplates(updatedIndexTemplates); } /* * We now remove the index from the simulated project to force the templates to be used. Note that simulated requests are * always index requests -- no other type of request is supported. */ for (DocWriteRequest<?> actionRequest : bulkRequest.requests) { assert actionRequest != null : "Requests cannot be null in simulate mode"; assert actionRequest instanceof IndexRequest : "Only IndexRequests are supported in simulate mode, but got " + actionRequest.getClass(); if (actionRequest != null) { IndexRequest indexRequest = (IndexRequest) actionRequest; String indexName = indexRequest.index(); if (indexName != null) { simulatedMetadataBuilder.remove(indexName); simulatedMetadataBuilder.removeDataStream(indexName); } } } project = simulatedMetadataBuilder.build(); } else { project = state.metadata().getProject(projectId); } Map<String, IngestService.Pipelines> resolvedPipelineCache = new HashMap<>(); for (DocWriteRequest<?> actionRequest : bulkRequest.requests) { IndexRequest indexRequest = getIndexWriteRequest(actionRequest); if (indexRequest != null) { if (indexRequest.isPipelineResolved() == false) { var pipeline = resolvedPipelineCache.computeIfAbsent( indexRequest.index(), // TODO perhaps this should use `threadPool.absoluteTimeInMillis()`, but leaving as is for now. (index) -> IngestService.resolvePipelines(actionRequest, indexRequest, project, System.currentTimeMillis()) ); IngestService.setPipelineOnRequest(indexRequest, pipeline); } hasIndexRequestsWithPipelines |= IngestService.hasPipeline(indexRequest); } if (actionRequest instanceof IndexRequest ir) { if (ir.getAutoGeneratedTimestamp() != IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP) { throw new IllegalArgumentException("autoGeneratedTimestamp should not be set externally"); } } } if (hasIndexRequestsWithPipelines) { // this method (doExecute) will be called again, but with the bulk requests updated from the ingest node processing but // also with IngestService.NOOP_PIPELINE_NAME on each request. This ensures that this on the second time through this method, // this path is never taken. ActionListener.run(listener, l -> { if (Assertions.ENABLED) { final boolean arePipelinesResolved = bulkRequest.requests() .stream() .map(TransportBulkAction::getIndexWriteRequest) .filter(Objects::nonNull) .allMatch(IndexRequest::isPipelineResolved); assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { processBulkIndexIngestRequest(task, bulkRequest, executor, project, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } }); return true; } else if (haveRunIngestService == false && samplingService != null && samplingService.atLeastOneSampleConfigured(project)) { /* * Else ample only if this request has not passed through IngestService::executeBulkRequest. Otherwise, some request within the * bulk had pipelines and we sampled in IngestService already. */ for (DocWriteRequest<?> actionRequest : bulkRequest.requests) { if (actionRequest instanceof IndexRequest ir) { samplingService.maybeSample(project, ir); } } } return false; } private void processBulkIndexIngestRequest( Task task, BulkRequest original, Executor executor, ProjectMetadata metadata, ActionListener<BulkResponse> listener ) { final long ingestStartTimeInNanos = relativeTimeNanos(); final BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); final Thread originalThread = Thread.currentThread(); getIngestService(original).executeBulkRequest( metadata.id(), original.numberOfActions(), () -> bulkRequestModifier, bulkRequestModifier::markItemAsDropped, (indexName) -> resolveFailureStore(indexName, metadata, threadPool.absoluteTimeInMillis()), bulkRequestModifier::markItemForFailureStore, bulkRequestModifier::markItemAsFailed, listener.delegateFailureAndWrap((l, unused) -> { long ingestTookInMillis = TimeUnit.NANOSECONDS.toMillis(relativeTimeNanos() - ingestStartTimeInNanos); BulkRequest bulkRequest = bulkRequestModifier.getBulkRequest(); ActionListener<BulkResponse> actionListener = bulkRequestModifier.wrapActionListenerIfNeeded(ingestTookInMillis, l); if (bulkRequest.requests().isEmpty()) { // at this stage, the transport bulk action can't deal with a bulk request with no requests, // so we stop and send an empty response back to the client. // (this will happen if pre-processing all items in the bulk failed) actionListener.onResponse(new BulkResponse(new BulkItemResponse[0], 0)); } else { ActionRunnable<BulkResponse> runnable = new ActionRunnable<>(actionListener) { @Override protected void doRun() throws IOException { applyPipelinesAndDoInternalExecute(task, bulkRequest, executor, actionListener, true); } @Override public boolean isForceExecution() { // If we fork back to a coordination thread we **not** should fail, because tp queue is full. // (Otherwise the work done during ingest will be lost) // It is okay to force execution here. Throttling of write requests happens prior to // ingest when a node receives a bulk request. return true; } }; // If a processor went async and returned a response on a different thread then // before we continue the bulk request we should fork back on a coordination thread. Otherwise it is fine to perform // coordination steps on the write thread if (originalThread == Thread.currentThread()) { runnable.run(); } else { executor.execute(runnable); } } }) ); } /** * Determines if an index name is associated with either an existing data stream or a template * for one that has the failure store enabled. * * @param indexName The index name to check. * @param metadata Cluster state metadata. * @param epochMillis A timestamp to use when resolving date math in the index name. * @return true if this is not a simulation, and the given index name corresponds to a data stream with a failure store, or if it * matches a template that has a data stream failure store enabled, or if it matches a data stream template with no failure store * option specified and the name matches the cluster setting to enable the failure store. Returns false if the index name * corresponds to a data stream, but it doesn't have the failure store enabled by one of those conditions. Returns null when it * doesn't correspond to a data stream. */ protected abstract Boolean resolveFailureStore(String indexName, ProjectMetadata metadata, long epochMillis); /** * Retrieves the {@link IndexRequest} from the provided {@link DocWriteRequest} for index or upsert actions. Upserts are * modeled as {@link IndexRequest} inside the {@link UpdateRequest}. Ignores {@link org.elasticsearch.action.delete.DeleteRequest}'s * * @param docWriteRequest The request to find the {@link IndexRequest} * @return the found {@link IndexRequest} or {@code null} if one can not be found. */ public static IndexRequest getIndexWriteRequest(DocWriteRequest<?> docWriteRequest) { IndexRequest indexRequest = null; if (docWriteRequest instanceof IndexRequest) { indexRequest = (IndexRequest) docWriteRequest; } else if (docWriteRequest instanceof UpdateRequest updateRequest) { indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); } return indexRequest; } /* * This returns the IngestService to be used for the given request. The default implementation ignores the request and always returns * the same ingestService, but child classes might use information in the request in creating an IngestService specific to that request. */ protected IngestService getIngestService(BulkRequest request) { return ingestService; } protected long relativeTimeNanos() { return relativeTimeNanosProvider.getAsLong(); } protected long buildTookInMillis(long startTimeNanos) { return TimeUnit.NANOSECONDS.toMillis(relativeTimeNanos() - startTimeNanos); } private void applyPipelinesAndDoInternalExecute( Task task, BulkRequest bulkRequest, Executor executor, ActionListener<BulkResponse> listener, boolean haveRunIngestService ) throws IOException { final long relativeStartTimeNanos = relativeTimeNanos(); // Validate child stream writes before processing pipelines ProjectMetadata projectMetadata = projectResolver.getProjectMetadata(clusterService.state()); BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(bulkRequest); DocWriteRequest<?> req; int i = -1; while (bulkRequestModifier.hasNext()) { req = bulkRequestModifier.next(); i++; doStreamsChecks(bulkRequest, projectMetadata, req, bulkRequestModifier, i); } var wrappedListener = bulkRequestModifier.wrapActionListenerIfNeeded(listener); if (applyPipelines(task, bulkRequestModifier.getBulkRequest(), executor, wrappedListener, haveRunIngestService) == false) { doInternalExecute(task, bulkRequestModifier.getBulkRequest(), executor, wrappedListener, relativeStartTimeNanos); } } private void doStreamsChecks( BulkRequest bulkRequest, ProjectMetadata projectMetadata, DocWriteRequest<?> req, BulkRequestModifier bulkRequestModifier, int i ) { for (StreamType streamType : StreamType.getEnabledStreamTypesForProject(projectMetadata)) { if (req instanceof IndexRequest ir && ir.isPipelineResolved() == false) { IllegalArgumentException e = null; if (streamType.matchesStreamPrefix(req.index())) { e = new IllegalArgumentException( "Direct writes to child streams are prohibited. Index directly into the [" + streamType.getStreamName() + "] stream instead" ); } if (e == null && streamType.getStreamName().equals(ir.index()) && ir.getPipeline() != null) { e = new IllegalArgumentException( "Cannot provide a pipeline when writing to a stream " + "however the [" + ir.getPipeline() + "] pipeline was provided when writing to the [" + streamType.getStreamName() + "] stream" ); } if (e == null && streamsRestrictedParamsUsed(bulkRequest) && req.index().equals(streamType.getStreamName())) { e = new IllegalArgumentException( "When writing to a stream, only the following parameters are allowed: [" + String.join(", ", STREAMS_ALLOWED_PARAMS) + "] however the following were used: " + bulkRequest.requestParamsUsed() ); } if (e != null) { Boolean failureStoreEnabled = resolveFailureStore(req.index(), projectMetadata, threadPool.absoluteTimeInMillis()); if (featureService.clusterHasFeature(clusterService.state(), DataStream.DATA_STREAM_FAILURE_STORE_FEATURE)) { if (Boolean.TRUE.equals(failureStoreEnabled)) { bulkRequestModifier.markItemForFailureStore(i, req.index(), e); } else if (Boolean.FALSE.equals(failureStoreEnabled)) { bulkRequestModifier.markItemAsFailed(i, e, IndexDocFailureStoreStatus.NOT_ENABLED); } else { bulkRequestModifier.markItemAsFailed(i, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); } } else { bulkRequestModifier.markItemAsFailed(i, e, IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN); } break; } } } } private boolean streamsRestrictedParamsUsed(BulkRequest bulkRequest) { return Sets.difference(bulkRequest.requestParamsUsed(), STREAMS_ALLOWED_PARAMS).isEmpty() == false; } /** * This method creates any missing resources and actually applies the BulkRequest to the relevant indices * @param task The task in which this work is being done * @param bulkRequest The BulkRequest of changes to make to indices * @param executor The executor for the thread pool in which the work is to be done * @param listener The listener to be notified of results * @param relativeStartTimeNanos The relative start time of this bulk load, to be used in computing the time taken for the BulkResponse */ protected abstract void doInternalExecute( Task task, BulkRequest bulkRequest, Executor executor, ActionListener<BulkResponse> listener, long relativeStartTimeNanos ) throws IOException; }
TransportAbstractBulkAction
java
spring-projects__spring-framework
spring-oxm/src/main/java/org/springframework/oxm/Unmarshaller.java
{ "start": 818, "end": 955 }
interface ____ deserialize a given XML Stream to an Object graph. * * @author Arjen Poutsma * @since 3.0 * @see Marshaller */ public
can
java
apache__flink
flink-python/src/main/java/org/apache/flink/table/runtime/arrow/writers/RowWriter.java
{ "start": 3184, "end": 3767 }
class ____ extends RowWriter<RowData> { private RowWriterForRow( StructVector structVector, ArrowFieldWriter<RowData>[] fieldsWriters) { super(structVector, fieldsWriters); } @Override boolean isNullAt(RowData in, int ordinal) { return in.isNullAt(ordinal); } @Override RowData readRow(RowData in, int ordinal) { return in.getRow(ordinal, fieldsWriters.length); } } /** {@link RowWriter} for {@link ArrayData} input. */ public static final
RowWriterForRow
java
apache__camel
components/camel-grpc/src/main/java/org/apache/camel/component/grpc/client/GrpcResponseRouterStreamObserver.java
{ "start": 1257, "end": 4176 }
class ____ implements StreamObserver<Object> { private final Endpoint sourceEndpoint; private final GrpcConfiguration configuration; private final AsyncProducer producer; private final Exchange exchange; private final AsyncCallback callback; public GrpcResponseRouterStreamObserver(GrpcConfiguration configuration, Endpoint sourceEndpoint, AsyncProducer producer, Exchange exchange, AsyncCallback callback) { this.configuration = configuration; this.sourceEndpoint = sourceEndpoint; this.producer = producer; this.exchange = exchange; this.callback = callback; } @Override public void onNext(Object o) { Exchange newExchange = sourceEndpoint.createExchange(); inherit(newExchange); newExchange.getIn().setHeader(GrpcConstants.GRPC_EVENT_TYPE_HEADER, GrpcConstants.GRPC_EVENT_TYPE_ON_NEXT); newExchange.getIn().setBody(o); doSend(newExchange); } @Override public void onError(Throwable throwable) { if (configuration.isForwardOnError()) { Exchange newExchange = sourceEndpoint.createExchange(); inherit(newExchange); newExchange.getIn().setHeader(GrpcConstants.GRPC_EVENT_TYPE_HEADER, GrpcConstants.GRPC_EVENT_TYPE_ON_ERROR); newExchange.getIn().setBody(throwable); doSend(newExchange); } callback.done(true); } @Override public void onCompleted() { if (configuration.isForwardOnCompleted()) { Exchange newExchange = sourceEndpoint.createExchange(); inherit(newExchange); newExchange.getIn().setHeader(GrpcConstants.GRPC_EVENT_TYPE_HEADER, GrpcConstants.GRPC_EVENT_TYPE_ON_COMPLETED); doSend(newExchange); } callback.done(true); } private void doSend(Exchange newExchange) { producer.processAsync(newExchange); } private void inherit(Exchange newExchange) { if (configuration.isInheritExchangePropertiesForReplies()) { for (var entry : exchange.getProperties().entrySet()) { newExchange.setProperty(entry.getKey(), entry.getValue()); } } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GrpcResponseRouterStreamObserver that = (GrpcResponseRouterStreamObserver) o; return Objects.equals(sourceEndpoint, that.sourceEndpoint) && Objects.equals(producer, that.producer); } @Override public int hashCode() { return Objects.hash(sourceEndpoint, producer); } }
GrpcResponseRouterStreamObserver
java
redisson__redisson
redisson-spring-boot-starter/src/main/java/org/redisson/spring/starter/RedissonProperties.java
{ "start": 880, "end": 1235 }
class ____ { private String config; private String file; public String getConfig() { return config; } public void setConfig(String config) { this.config = config; } public String getFile() { return file; } public void setFile(String file) { this.file = file; } }
RedissonProperties
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/JUnit4ClassUsedInJUnit3.java
{ "start": 2366, "end": 2723 }
class ____ (!ASSUME_CHECK.matches(tree, state)) { return NO_MATCH; } return makeDescription("Assume", tree); } @Override public Description matchAnnotation(AnnotationTree tree, VisitorState state) { if (!enclosingClass(isJUnit3TestClass).matches(tree, state)) { return NO_MATCH; } // If we are inside a JUnit3 test
if
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/EntityGraphAttributeNodesTest.java
{ "start": 2985, "end": 3083 }
class ____ { @Id @GeneratedValue private BigInteger id; private String street; } }
Address
java
apache__avro
lang/java/avro/src/test/java/org/apache/avro/util/springframework/TestConcurrentReferenceHashMap.java
{ "start": 1933, "end": 18703 }
class ____ { private static final Comparator<? super String> NULL_SAFE_STRING_SORT = new NullSafeComparator<>( new ComparableComparator<String>(), true); private TestWeakConcurrentCache<Integer, String> map = new TestWeakConcurrentCache<>(); @Test void shouldCreateWithDefaults() { ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(); assertThat(map.getSegmentsSize(), equalTo(16)); assertThat(map.getSegment(0).getSize(), equalTo(1)); assertThat(map.getLoadFactor(), equalTo(0.75f)); } @Test void shouldCreateWithInitialCapacity() { ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(32); assertThat(map.getSegmentsSize(), equalTo(16)); assertThat(map.getSegment(0).getSize(), equalTo(2)); assertThat(map.getLoadFactor(), equalTo(0.75f)); } @Test void shouldCreateWithInitialCapacityAndLoadFactor() { ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(32, 0.5f); assertThat(map.getSegmentsSize(), equalTo(16)); assertThat(map.getSegment(0).getSize(), equalTo(2)); assertThat(map.getLoadFactor(), equalTo(0.5f)); } @Test void shouldCreateWithInitialCapacityAndConcurrentLevel() { ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(16, 2); assertThat(map.getSegmentsSize(), equalTo(2)); assertThat(map.getSegment(0).getSize(), equalTo(8)); assertThat(map.getLoadFactor(), equalTo(0.75f)); } @Test void shouldCreateFullyCustom() { ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(5, 0.5f, 3); // concurrencyLevel of 3 ends up as 4 (nearest power of 2) assertThat(map.getSegmentsSize(), equalTo(4)); // initialCapacity is 5/4 (rounded up, to nearest power of 2) assertThat(map.getSegment(0).getSize(), equalTo(2)); assertThat(map.getLoadFactor(), equalTo(0.5f)); } @Test void shouldNeedNonNegativeInitialCapacity() { new ConcurrentReferenceHashMap<Integer, String>(0, 1); IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> new TestWeakConcurrentCache<Integer, String>(-1, 1)); assertTrue(e.getMessage().contains("Initial capacity must not be negative")); } @Test void shouldNeedPositiveLoadFactor() { new ConcurrentReferenceHashMap<Integer, String>(0, 0.1f, 1); IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> new TestWeakConcurrentCache<Integer, String>(0, 0.0f, 1)); assertTrue(e.getMessage().contains("Load factor must be positive")); } @Test void shouldNeedPositiveConcurrencyLevel() { new ConcurrentReferenceHashMap<Integer, String>(1, 1); IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> new TestWeakConcurrentCache<Integer, String>(1, 0)); assertTrue(e.getMessage().contains("Concurrency level must be positive")); } @Test void shouldPutAndGet() { // NOTE we are using mock references so we don't need to worry about GC assertEquals(0, this.map.size()); this.map.put(123, "123"); assertThat(this.map.get(123), equalTo("123")); assertEquals(1, this.map.size()); this.map.put(123, "123b"); assertEquals(1, this.map.size()); this.map.put(123, null); assertEquals(1, this.map.size()); } @Test void shouldReplaceOnDoublePut() { this.map.put(123, "321"); this.map.put(123, "123"); assertThat(this.map.get(123), equalTo("123")); } @Test void shouldPutNullKey() { assertNull(this.map.get(null)); assertThat(this.map.getOrDefault(null, "456"), equalTo("456")); this.map.put(null, "123"); assertThat(this.map.get(null), equalTo("123")); assertThat(this.map.getOrDefault(null, "456"), equalTo("123")); } @Test void shouldPutNullValue() { assertNull(this.map.get(123)); assertThat(this.map.getOrDefault(123, "456"), equalTo("456")); this.map.put(123, "321"); assertThat(this.map.get(123), equalTo("321")); assertThat(this.map.getOrDefault(123, "456"), equalTo("321")); this.map.put(123, null); assertNull(this.map.get(123)); assertNull(this.map.getOrDefault(123, "456")); } @Test void shouldGetWithNoItems() { assertNull(this.map.get(123)); } @Test void shouldApplySupplementalHash() { Integer key = 123; this.map.put(key, "123"); assertNotEquals(this.map.getSupplementalHash(), key.hashCode()); assertNotEquals(this.map.getSupplementalHash() >> 30 & 0xFF, 0); } @Test void shouldGetFollowingNexts() { // Use loadFactor to disable resize this.map = new TestWeakConcurrentCache<>(1, 10.0f, 1); this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); assertThat(this.map.getSegment(0).getSize(), equalTo(1)); assertThat(this.map.get(1), equalTo("1")); assertThat(this.map.get(2), equalTo("2")); assertThat(this.map.get(3), equalTo("3")); assertNull(this.map.get(4)); } @Test void shouldResize() { this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1); this.map.put(1, "1"); assertThat(this.map.getSegment(0).getSize(), equalTo(1)); assertThat(this.map.get(1), equalTo("1")); this.map.put(2, "2"); assertThat(this.map.getSegment(0).getSize(), equalTo(2)); assertThat(this.map.get(1), equalTo("1")); assertThat(this.map.get(2), equalTo("2")); this.map.put(3, "3"); assertThat(this.map.getSegment(0).getSize(), equalTo(4)); assertThat(this.map.get(1), equalTo("1")); assertThat(this.map.get(2), equalTo("2")); assertThat(this.map.get(3), equalTo("3")); this.map.put(4, "4"); assertThat(this.map.getSegment(0).getSize(), equalTo(8)); assertThat(this.map.get(4), equalTo("4")); // Putting again should not increase the count for (int i = 1; i <= 5; i++) { this.map.put(i, String.valueOf(i)); } assertThat(this.map.getSegment(0).getSize(), equalTo(8)); assertThat(this.map.get(5), equalTo("5")); } @Test void shouldPurgeOnGet() { this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1); for (int i = 1; i <= 5; i++) { this.map.put(i, String.valueOf(i)); } this.map.getMockReference(1, Restructure.NEVER).queueForPurge(); this.map.getMockReference(3, Restructure.NEVER).queueForPurge(); assertNull(this.map.getReference(1, Restructure.WHEN_NECESSARY)); assertThat(this.map.get(2), equalTo("2")); assertNull(this.map.getReference(3, Restructure.WHEN_NECESSARY)); assertThat(this.map.get(4), equalTo("4")); assertThat(this.map.get(5), equalTo("5")); } @Test void shouldPurgeOnPut() { this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1); for (int i = 1; i <= 5; i++) { this.map.put(i, String.valueOf(i)); } this.map.getMockReference(1, Restructure.NEVER).queueForPurge(); this.map.getMockReference(3, Restructure.NEVER).queueForPurge(); this.map.put(1, "1"); assertThat(this.map.get(1), equalTo("1")); assertThat(this.map.get(2), equalTo("2")); assertNull(this.map.getReference(3, Restructure.WHEN_NECESSARY)); assertThat(this.map.get(4), equalTo("4")); assertThat(this.map.get(5), equalTo("5")); } @Test void shouldPutIfAbsent() { assertNull(this.map.putIfAbsent(123, "123")); assertThat(this.map.putIfAbsent(123, "123b"), equalTo("123")); assertThat(this.map.get(123), equalTo("123")); } @Test void shouldPutIfAbsentWithNullValue() { assertNull(this.map.putIfAbsent(123, null)); assertNull(this.map.putIfAbsent(123, "123")); assertNull(this.map.get(123)); } @Test void shouldPutIfAbsentWithNullKey() { assertNull(this.map.putIfAbsent(null, "123")); assertThat(this.map.putIfAbsent(null, "123b"), equalTo("123")); assertThat(this.map.get(null), equalTo("123")); } @Test void shouldRemoveKeyAndValue() { this.map.put(123, "123"); assertFalse(this.map.remove(123, "456")); assertThat(this.map.get(123), equalTo("123")); assertTrue(this.map.remove(123, "123")); assertFalse(this.map.containsKey(123)); assertTrue(this.map.isEmpty()); } @Test void shouldRemoveKeyAndValueWithExistingNull() { this.map.put(123, null); assertFalse(this.map.remove(123, "456")); assertNull(this.map.get(123)); assertTrue(this.map.remove(123, null)); assertFalse(this.map.containsKey(123)); assertTrue(this.map.isEmpty()); } @Test void shouldReplaceOldValueWithNewValue() { this.map.put(123, "123"); assertFalse(this.map.replace(123, "456", "789")); assertThat(this.map.get(123), equalTo("123")); assertTrue(this.map.replace(123, "123", "789")); assertThat(this.map.get(123), equalTo("789")); } @Test void shouldReplaceOldNullValueWithNewValue() { this.map.put(123, null); assertFalse(this.map.replace(123, "456", "789")); assertNull(this.map.get(123)); assertTrue(this.map.replace(123, null, "789")); assertThat(this.map.get(123), equalTo("789")); } @Test void shouldReplaceValue() { this.map.put(123, "123"); assertThat(this.map.replace(123, "456"), equalTo("123")); assertThat(this.map.get(123), equalTo("456")); } @Test void shouldReplaceNullValue() { this.map.put(123, null); assertNull(this.map.replace(123, "456")); assertThat(this.map.get(123), equalTo("456")); } @Test void shouldGetSize() { assertEquals(0, this.map.size()); this.map.put(123, "123"); this.map.put(123, null); this.map.put(456, "456"); assertEquals(2, this.map.size()); } @Test void shouldSupportIsEmpty() { assertTrue(this.map.isEmpty()); this.map.put(123, "123"); this.map.put(123, null); this.map.put(456, "456"); assertFalse(this.map.isEmpty()); } @Test void shouldContainKey() { assertFalse(this.map.containsKey(123)); assertFalse(this.map.containsKey(456)); this.map.put(123, "123"); this.map.put(456, null); assertTrue(this.map.containsKey(123)); assertTrue(this.map.containsKey(456)); } @Test void shouldContainValue() { assertFalse(this.map.containsValue("123")); assertFalse(this.map.containsValue(null)); this.map.put(123, "123"); this.map.put(456, null); assertTrue(this.map.containsValue("123")); assertTrue(this.map.containsValue(null)); } @Test void shouldRemoveWhenKeyIsInMap() { this.map.put(123, null); this.map.put(456, "456"); this.map.put(null, "789"); assertNull(this.map.remove(123)); assertThat(this.map.remove(456), equalTo("456")); assertThat(this.map.remove(null), equalTo("789")); assertTrue(this.map.isEmpty()); } @Test void shouldRemoveWhenKeyIsNotInMap() { assertNull(this.map.remove(123)); assertNull(this.map.remove(null)); assertTrue(this.map.isEmpty()); } @Test void shouldPutAll() { Map<Integer, String> m = new HashMap<>(); m.put(123, "123"); m.put(456, null); m.put(null, "789"); this.map.putAll(m); assertEquals(3, this.map.size()); assertThat(this.map.get(123), equalTo("123")); assertNull(this.map.get(456)); assertThat(this.map.get(null), equalTo("789")); } @Test void shouldClear() { this.map.put(123, "123"); this.map.put(456, null); this.map.put(null, "789"); this.map.clear(); assertEquals(0, this.map.size()); assertFalse(this.map.containsKey(123)); assertFalse(this.map.containsKey(456)); assertFalse(this.map.containsKey(null)); } @Test void shouldGetKeySet() { this.map.put(123, "123"); this.map.put(456, null); this.map.put(null, "789"); Set<Integer> expected = new HashSet<>(); expected.add(123); expected.add(456); expected.add(null); assertThat(this.map.keySet(), equalTo(expected)); } @Test void shouldGetValues() { this.map.put(123, "123"); this.map.put(456, null); this.map.put(null, "789"); List<String> actual = new ArrayList<>(this.map.values()); List<String> expected = new ArrayList<>(); expected.add("123"); expected.add(null); expected.add("789"); actual.sort(NULL_SAFE_STRING_SORT); expected.sort(NULL_SAFE_STRING_SORT); assertThat(actual, equalTo(expected)); } @Test void shouldGetEntrySet() { this.map.put(123, "123"); this.map.put(456, null); this.map.put(null, "789"); HashMap<Integer, String> expected = new HashMap<>(); expected.put(123, "123"); expected.put(456, null); expected.put(null, "789"); assertThat(this.map.entrySet(), equalTo(expected.entrySet())); } @Test void shouldGetEntrySetFollowingNext() { // Use loadFactor to disable resize this.map = new TestWeakConcurrentCache<>(1, 10.0f, 1); this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); HashMap<Integer, String> expected = new HashMap<>(); expected.put(1, "1"); expected.put(2, "2"); expected.put(3, "3"); assertThat(this.map.entrySet(), equalTo(expected.entrySet())); } @Test void shouldRemoveViaEntrySet() { this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); Iterator<Map.Entry<Integer, String>> iterator = this.map.entrySet().iterator(); iterator.next(); iterator.next(); iterator.remove(); assertThrows(IllegalStateException.class, iterator::remove); iterator.next(); assertFalse(iterator.hasNext()); assertEquals(2, this.map.size()); assertFalse(this.map.containsKey(2)); } @Test void shouldSetViaEntrySet() { this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); Iterator<Map.Entry<Integer, String>> iterator = this.map.entrySet().iterator(); iterator.next(); iterator.next().setValue("2b"); iterator.next(); assertFalse(iterator.hasNext()); assertEquals(3, this.map.size()); assertThat(this.map.get(2), equalTo("2b")); } @Test void containsViaEntrySet() { this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); Set<Map.Entry<Integer, String>> entrySet = this.map.entrySet(); Set<Map.Entry<Integer, String>> copy = new HashMap<>(this.map).entrySet(); copy.forEach(entry -> assertTrue(entrySet.contains(entry))); this.map.put(1, "A"); this.map.put(2, "B"); this.map.put(3, "C"); copy.forEach(entry -> assertFalse(entrySet.contains(entry))); this.map.put(1, "1"); this.map.put(2, "2"); this.map.put(3, "3"); copy.forEach(entry -> assertTrue(entrySet.contains(entry))); entrySet.clear(); copy.forEach(entry -> assertFalse(entrySet.contains(entry))); } @Test @Disabled("Intended for use during development only") void shouldBeFasterThanSynchronizedMap() throws InterruptedException { Map<Integer, WeakReference<String>> synchronizedMap = Collections .synchronizedMap(new WeakHashMap<Integer, WeakReference<String>>()); StopWatch mapTime = timeMultiThreaded("SynchronizedMap", synchronizedMap, v -> new WeakReference<>(String.valueOf(v))); System.out.println(mapTime.prettyPrint()); this.map.setDisableTestHooks(true); StopWatch cacheTime = timeMultiThreaded("WeakConcurrentCache", this.map, String::valueOf); System.out.println(cacheTime.prettyPrint()); // We should be at least 4 time faster assertTrue(cacheTime.getTotalTimeSeconds() < (mapTime.getTotalTimeSeconds() / 4.0)); } @Test void shouldSupportNullReference() { // GC could happen during restructure so we must be able to create a reference // for a null entry map.createReferenceManager().createReference(null, 1234, null); } /** * Time a multi-threaded access to a cache. * * @return the timing stopwatch */ private <V> StopWatch timeMultiThreaded(String id, final Map<Integer, V> map, ValueFactory<V> factory) throws InterruptedException { StopWatch stopWatch = new StopWatch(id); for (int i = 0; i < 500; i++) { map.put(i, factory.newValue(i)); } Thread[] threads = new Thread[30]; stopWatch.start("Running threads"); for (int threadIndex = 0; threadIndex < threads.length; threadIndex++) { threads[threadIndex] = new Thread("Cache access thread " + threadIndex) { @Override public void run() { for (int j = 0; j < 1000; j++) { for (int i = 0; i < 1000; i++) { map.get(i); } } } }; } for (Thread thread : threads) { thread.start(); } for (Thread thread : threads) { if (thread.isAlive()) { thread.join(2000); } } stopWatch.stop(); return stopWatch; } private
TestConcurrentReferenceHashMap
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerIdempotentRefTest.java
{ "start": 3114, "end": 3982 }
class ____ implements IdempotentRepository { @Override public boolean add(String messageId) { // will return true 1st time, and false 2nd time boolean result = invoked; invoked = true; assertEquals(testFile("report.txt").toAbsolutePath().toString(), messageId); return !result; } @Override public boolean contains(String key) { return invoked; } @Override public boolean remove(String key) { return true; } @Override public boolean confirm(String key) { return true; } @Override public void clear() { } @Override public void start() { } @Override public void stop() { } } }
MyIdempotentRepository
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/DoNotMockCheckerTest.java
{ "start": 10913, "end": 11233 }
class ____ {", " public abstract int getFoo();", " static DoNotMockMyAutoValue create(int foo) {", " return new AutoValue_AutoValueObjects_DoNotMockMyAutoValue(foo);", " }", " }", " @AutoValue public abstract static
DoNotMockMyAutoValue
java
apache__flink
flink-runtime/src/main/java/org/apache/flink/runtime/state/v2/ttl/TtlMapState.java
{ "start": 1872, "end": 5945 }
class ____<K, N, UK, UV> extends AbstractTtlState<K, N, UV, TtlValue<UV>, InternalMapState<K, N, UK, TtlValue<UV>>> implements InternalMapState<K, N, UK, UV> { protected TtlMapState( TtlStateContext<InternalMapState<K, N, UK, TtlValue<UV>>, UV> ttlStateContext) { super(ttlStateContext); } @Override public void clear() { original.clear(); } @Override public StateFuture<UV> asyncGet(UK key) { return original.asyncGet(key).thenApply(ttlValue -> getElementWithTtlCheck(ttlValue)); } @Override public StateFuture<Void> asyncPut(UK key, UV value) { return original.asyncPut(key, value == null ? null : wrapWithTs(value)); } @Override public StateFuture<Void> asyncPutAll(Map<UK, UV> map) { Map<UK, TtlValue<UV>> withTs = new HashMap(); for (Map.Entry<UK, UV> entry : map.entrySet()) { withTs.put( entry.getKey(), entry.getValue() == null ? null : wrapWithTs(entry.getValue())); } return original.asyncPutAll(withTs); } @Override public StateFuture<Void> asyncRemove(UK key) { return original.asyncRemove(key); } @Override public StateFuture<Boolean> asyncContains(UK key) { return original.asyncGet(key) .thenApply(ttlValue -> getElementWithTtlCheck(ttlValue) != null); } @Override public StateFuture<StateIterator<Map.Entry<UK, UV>>> asyncEntries() { return original.asyncEntries().thenApply(iter -> new AsyncEntriesIterator<>(iter, e -> e)); } @Override public StateFuture<StateIterator<UK>> asyncKeys() { return original.asyncEntries() .thenApply(iter -> new AsyncEntriesIterator<>(iter, e -> e.getKey())); } @Override public StateFuture<StateIterator<UV>> asyncValues() { return original.asyncEntries() .thenApply(iter -> new AsyncEntriesIterator<>(iter, e -> e.getValue())); } @Override public StateFuture<Boolean> asyncIsEmpty() { // the result may be wrong if state is expired. return original.asyncIsEmpty(); } @Override public UV get(UK key) { return getElementWithTtlCheck(original.get(key)); } @Override public void put(UK key, UV value) { original.put(key, value == null ? null : wrapWithTs(value)); } @Override public void putAll(Map<UK, UV> map) { Map<UK, TtlValue<UV>> withTs = new HashMap(); long currentTimestamp = timeProvider.currentTimestamp(); for (Map.Entry<UK, UV> entry : map.entrySet()) { withTs.put( entry.getKey(), entry.getValue() == null ? null : TtlUtils.wrapWithTs(entry.getValue(), currentTimestamp)); } original.putAll(withTs); } @Override public void remove(UK key) { original.remove(key); } @Override public boolean contains(UK key) { return getElementWithTtlCheck(original.get(key)) != null; } @Override public Iterable<Map.Entry<UK, UV>> entries() { return entries(e -> e); } @Override public Iterable<UK> keys() { return entries(e -> e.getKey()); } @Override public Iterable<UV> values() { return entries(e -> e.getValue()); } private <R> Iterable<R> entries(Function<Map.Entry<UK, UV>, R> resultMapper) { Iterable<Map.Entry<UK, TtlValue<UV>>> withTs = original.entries(); return () -> new EntriesIterator<>( withTs == null ? Collections.emptyList() : withTs, resultMapper); } @Override public Iterator<Map.Entry<UK, UV>> iterator() { return entries().iterator(); } @Override public boolean isEmpty() { // todo: poor performance, if return `original.isEmpty()` directly, the result may be wrong. return iterator().hasNext(); } private
TtlMapState
java
quarkusio__quarkus
extensions/spring-data-jpa/deployment/src/test/java/io/quarkus/spring/data/deployment/BasicTypeData.java
{ "start": 401, "end": 2128 }
class ____ { @Id @GeneratedValue private Integer id; private Double doubleValue; private BigDecimal bigDecimalValue; @JavaType(FixedLocaleJavaType.class) // Workaround for https://hibernate.atlassian.net/browse/HHH-17466 private Locale locale; private TimeZone timeZone; private java.net.URL url; private Class clazz; @Column(length = 16) private java.util.UUID uuid; private Duration duration; public Integer getId() { return id; } public Double getDoubleValue() { return doubleValue; } public void setDoubleValue(Double doubleValue) { this.doubleValue = doubleValue; } public BigDecimal getBigDecimalValue() { return bigDecimalValue; } public void setBigDecimalValue(BigDecimal bigDecimalValue) { this.bigDecimalValue = bigDecimalValue; } public Locale getLocale() { return locale; } public void setLocale(Locale locale) { this.locale = locale; } public TimeZone getTimeZone() { return timeZone; } public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } public URL getUrl() { return url; } public void setUrl(URL url) { this.url = url; } public Class getClazz() { return clazz; } public void setClazz(Class clazz) { this.clazz = clazz; } public UUID getUuid() { return uuid; } public void setUuid(UUID uuid) { this.uuid = uuid; } public Duration getDuration() { return duration; } public void setDuration(Duration duration) { this.duration = duration; } }
BasicTypeData
java
google__dagger
javatests/dagger/internal/codegen/ModuleValidationTest.java
{ "start": 1189, "end": 1422 }
class ____ { @Parameterized.Parameters(name = "{0}") public static Collection<Object[]> parameters() { return Arrays.asList(new Object[][] {{ModuleType.MODULE}, {ModuleType.PRODUCER_MODULE}}); } private
ModuleValidationTest
java
quarkusio__quarkus
integration-tests/narayana-jta/src/test/java/io/quarkus/narayana/jta/JdbcObjectStoreTestProfile.java
{ "start": 140, "end": 756 }
class ____ implements QuarkusTestProfile { @Override public Map<String, String> getConfigOverrides() { HashMap<String, String> props = new HashMap<>(); props.put("quarkus.transaction-manager.object-store.type", "jdbc"); props.put("quarkus.transaction-manager.object-store.create-table", "true"); props.put("quarkus.transaction-manager.object-store.datasource", "test"); props.put("quarkus.transaction-manager.enable-recovery", "true"); props.put("quarkus.datasource.test.jdbc.url", "jdbc:h2:mem:default"); return props; } }
JdbcObjectStoreTestProfile
java
spring-projects__spring-framework
spring-context/src/test/java/org/springframework/context/support/PropertySourcesPlaceholderConfigurerTests.java
{ "start": 3384, "end": 23227 }
class ____ { @Test void replacementFromEnvironmentProperties() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MockEnvironment env = new MockEnvironment(); env.setProperty("my.name", "myValue"); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setEnvironment(env); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("myValue"); assertThat(ppc.getAppliedPropertySources()).isNotNull(); } /** * Ensure that a {@link Converter} registered in the {@link ConversionService} * used by the {@code Environment} is applied during placeholder resolution * against a {@link PropertySource} registered in the {@code Environment}. */ @Test // gh-34936 void replacementFromEnvironmentPropertiesWithConversion() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); record Point(int x, int y) { } Converter<Point, String> pointToStringConverter = point -> "(%d,%d)".formatted(point.x, point.y); DefaultConversionService conversionService = new DefaultConversionService(); conversionService.addConverter(Point.class, String.class, pointToStringConverter); MockEnvironment env = new MockEnvironment(); env.setConversionService(conversionService); env.setProperty("my.name", new Point(4,5)); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setEnvironment(env); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("(4,5)"); } /** * Ensure that a {@link PropertySource} added to the {@code Environment} after context * refresh (i.e., after {@link PropertySourcesPlaceholderConfigurer#postProcessBeanFactory()} * has been invoked) can still contribute properties in late-binding scenarios. */ @Test // gh-34861 void replacementFromEnvironmentPropertiesWithLateBinding() { AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); MutablePropertySources propertySources = context.getEnvironment().getPropertySources(); propertySources.addFirst(new MockPropertySource("early properties").withProperty("foo", "bar")); context.register(PropertySourcesPlaceholderConfigurer.class); context.register(PrototypeBean.class); context.refresh(); // Verify that placeholder resolution works for early binding. PrototypeBean prototypeBean = context.getBean(PrototypeBean.class); assertThat(prototypeBean.getName()).isEqualTo("bar"); assertThat(prototypeBean.isJedi()).isFalse(); // Add new PropertySource after context refresh. propertySources.addFirst(new MockPropertySource("late properties").withProperty("jedi", "true")); // Verify that placeholder resolution works for late binding: isJedi() switches to true. prototypeBean = context.getBean(PrototypeBean.class); assertThat(prototypeBean.getName()).isEqualTo("bar"); assertThat(prototypeBean.isJedi()).isTrue(); // Add yet another PropertySource after context refresh. propertySources.addFirst(new MockPropertySource("even later properties").withProperty("foo", "enigma")); // Verify that placeholder resolution works for even later binding: getName() switches to enigma. prototypeBean = context.getBean(PrototypeBean.class); assertThat(prototypeBean.getName()).isEqualTo("enigma"); assertThat(prototypeBean.isJedi()).isTrue(); } @Test void localPropertiesViaResource() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); Resource resource = new ClassPathResource("PropertySourcesPlaceholderConfigurerTests.properties", getClass()); ppc.setLocation(resource); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("foo"); } @ParameterizedTest @ValueSource(booleans = {true, false}) void localPropertiesOverride(boolean override) { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${foo}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setLocalOverride(override); ppc.setProperties(new Properties() {{ setProperty("foo", "local"); }}); ppc.setEnvironment(new MockEnvironment().withProperty("foo", "enclosing")); ppc.postProcessBeanFactory(bf); if (override) { assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("local"); } else { assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("enclosing"); } } @Test void explicitPropertySources() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MutablePropertySources propertySources = new MutablePropertySources(); propertySources.addLast(new MockPropertySource().withProperty("my.name", "foo")); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setPropertySources(propertySources); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("foo"); assertThat(propertySources).containsExactlyElementsOf(ppc.getAppliedPropertySources()); } @Test void explicitPropertySourcesExcludesEnvironment() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MutablePropertySources propertySources = new MutablePropertySources(); propertySources.addLast(new MockPropertySource()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setPropertySources(propertySources); ppc.setEnvironment(new MockEnvironment().withProperty("my.name", "env")); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("${my.name}"); assertThat(propertySources).containsExactlyElementsOf(ppc.getAppliedPropertySources()); } @Test @SuppressWarnings("serial") public void explicitPropertySourcesExcludesLocalProperties() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MutablePropertySources propertySources = new MutablePropertySources(); propertySources.addLast(new MockPropertySource()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setPropertySources(propertySources); ppc.setProperties(new Properties() {{ put("my.name", "local"); }}); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("${my.name}"); } @Test void ignoreUnresolvablePlaceholders_falseIsDefault() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); //pc.setIgnoreUnresolvablePlaceholders(false); // the default assertThatExceptionOfType(BeanDefinitionStoreException.class) .isThrownBy(() -> ppc.postProcessBeanFactory(bf)) .havingCause() .isExactlyInstanceOf(PlaceholderResolutionException.class) .withMessage("Could not resolve placeholder 'my.name' in value \"${my.name}\""); } @Test void ignoreUnresolvablePlaceholders_true() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("${my.name}"); } @Test // https://github.com/spring-projects/spring-framework/issues/27947 public void ignoreUnresolvablePlaceholdersInAtValueAnnotation__falseIsDefault() { MockPropertySource mockPropertySource = new MockPropertySource("test"); mockPropertySource.setProperty("my.key", "${enigma}"); @SuppressWarnings("resource") AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.getEnvironment().getPropertySources().addLast(mockPropertySource); context.register(IgnoreUnresolvablePlaceholdersFalseConfig.class); assertThatExceptionOfType(BeanCreationException.class) .isThrownBy(context::refresh) .havingCause() .isExactlyInstanceOf(PlaceholderResolutionException.class) .withMessage("Could not resolve placeholder 'enigma' in value \"${enigma}\" <-- \"${my.key}\""); } @Test // https://github.com/spring-projects/spring-framework/issues/27947 public void ignoreUnresolvablePlaceholdersInAtValueAnnotation_true() { MockPropertySource mockPropertySource = new MockPropertySource("test"); mockPropertySource.setProperty("my.key", "${enigma}"); @SuppressWarnings("resource") AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(); context.getEnvironment().getPropertySources().addLast(mockPropertySource); context.register(IgnoreUnresolvablePlaceholdersTrueConfig.class); context.refresh(); IgnoreUnresolvablePlaceholdersTrueConfig config = context.getBean(IgnoreUnresolvablePlaceholdersTrueConfig.class); assertThat(config.value).isEqualTo("${enigma}"); } @Test @SuppressWarnings("serial") public void nestedUnresolvablePlaceholder() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setProperties(new Properties() {{ put("my.name", "${bogus}"); }}); assertThatExceptionOfType(BeanDefinitionStoreException.class).isThrownBy(() -> ppc.postProcessBeanFactory(bf)); } @Test @SuppressWarnings("serial") public void ignoredNestedUnresolvablePlaceholder() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setProperties(new Properties() {{ put("my.name", "${bogus}"); }}); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("${bogus}"); } @Test void withNonEnumerablePropertySource() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${foo}") .getBeanDefinition()); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); PropertySource<?> ps = new PropertySource<>("simplePropertySource", new Object()) { @Override public Object getProperty(String key) { return "bar"; } }; MockEnvironment env = new MockEnvironment(); env.getPropertySources().addFirst(ps); ppc.setEnvironment(env); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("bar"); } @Test // gh-34861 void withEnumerableAndNonEnumerablePropertySourcesInTheEnvironmentAndLocalProperties() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("name", "${foo:bogus}") .addPropertyValue("jedi", "${local:false}") .getBeanDefinition()); // 1) MockPropertySource is an EnumerablePropertySource. MockPropertySource mockPropertySource = new MockPropertySource("mockPropertySource") .withProperty("foo", "${bar}"); // 2) PropertySource is not an EnumerablePropertySource. PropertySource<?> rawPropertySource = new PropertySource<>("rawPropertySource", new Object()) { @Override public Object getProperty(String key) { return ("bar".equals(key) ? "quux" : null); } }; MockEnvironment env = new MockEnvironment(); env.getPropertySources().addFirst(mockPropertySource); env.getPropertySources().addLast(rawPropertySource); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setEnvironment(env); // 3) Local properties are stored in a PropertiesPropertySource which is an EnumerablePropertySource. ppc.setProperties(new Properties() {{ setProperty("local", "true"); }}); ppc.postProcessBeanFactory(bf); // Verify all properties can be resolved via the Environment. assertThat(env.getProperty("foo")).isEqualTo("quux"); assertThat(env.getProperty("bar")).isEqualTo("quux"); // Verify that placeholder resolution works. TestBean testBean = bf.getBean(TestBean.class); assertThat(testBean.getName()).isEqualTo("quux"); assertThat(testBean.isJedi()).isTrue(); // Verify that the presence of a non-EnumerablePropertySource does not prevent // accessing EnumerablePropertySources via getAppliedPropertySources(). List<String> propertyNames = new ArrayList<>(); for (PropertySource<?> propertySource : ppc.getAppliedPropertySources()) { if (propertySource instanceof EnumerablePropertySource<?> enumerablePropertySource) { Collections.addAll(propertyNames, enumerablePropertySource.getPropertyNames()); } } // Should not contain "foo" or "bar" from the Environment. assertThat(propertyNames).containsOnly("local"); } @Test void customPlaceholderPrefixAndSuffix() { PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setPlaceholderPrefix("@<"); ppc.setPlaceholderSuffix(">"); DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", rootBeanDefinition(TestBean.class) .addPropertyValue("name", "@<key1>") .addPropertyValue("sex", "${key2}") .getBeanDefinition()); System.setProperty("key1", "systemKey1Value"); System.setProperty("key2", "systemKey2Value"); ppc.setEnvironment(new StandardEnvironment()); ppc.postProcessBeanFactory(bf); System.clearProperty("key1"); System.clearProperty("key2"); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("systemKey1Value"); assertThat(bf.getBean(TestBean.class).getSex()).isEqualTo("${key2}"); } @Test void nullValueIsPreserved() { PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setNullValue("customNull"); DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", rootBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); ppc.setEnvironment(new MockEnvironment().withProperty("my.name", "customNull")); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isNull(); } @Test void trimValuesIsOffByDefault() { PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", rootBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); ppc.setEnvironment(new MockEnvironment().withProperty("my.name", " myValue ")); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo(" myValue "); } @Test void trimValuesIsApplied() { PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setTrimValues(true); DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", rootBeanDefinition(TestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); ppc.setEnvironment(new MockEnvironment().withProperty("my.name", " myValue ")); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).getName()).isEqualTo("myValue"); } @Test void getAppliedPropertySourcesTooEarly() { PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); assertThatIllegalStateException().isThrownBy( ppc::getAppliedPropertySources); } @Test void multipleLocationsWithDefaultResolvedValue() { // SPR-10619 PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ClassPathResource doesNotHave = new ClassPathResource("test.properties", getClass()); ClassPathResource setToTrue = new ClassPathResource("placeholder.properties", getClass()); ppc.setLocations(doesNotHave, setToTrue); ppc.setIgnoreResourceNotFound(true); ppc.setIgnoreUnresolvablePlaceholders(true); DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.registerBeanDefinition("testBean", genericBeanDefinition(TestBean.class) .addPropertyValue("jedi", "${jedi:false}") .getBeanDefinition()); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(TestBean.class).isJedi()).isTrue(); } @Test void optionalPropertyWithValue() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.setConversionService(new DefaultConversionService()); bf.registerBeanDefinition("testBean", genericBeanDefinition(OptionalTestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MockEnvironment env = new MockEnvironment(); env.setProperty("my.name", "myValue"); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setEnvironment(env); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(OptionalTestBean.class).getName()).contains("myValue"); } @Test void optionalPropertyWithoutValue() { DefaultListableBeanFactory bf = new DefaultListableBeanFactory(); bf.setConversionService(new DefaultConversionService()); bf.registerBeanDefinition("testBean", genericBeanDefinition(OptionalTestBean.class) .addPropertyValue("name", "${my.name}") .getBeanDefinition()); MockEnvironment env = new MockEnvironment(); env.setProperty("my.name", ""); PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer(); ppc.setEnvironment(env); ppc.setIgnoreUnresolvablePlaceholders(true); ppc.setNullValue(""); ppc.postProcessBeanFactory(bf); assertThat(bf.getBean(OptionalTestBean.class).getName()).isNotPresent(); } /** * Tests that use the escape character (or disable it) with nested placeholder * resolution. */ @Nested
PropertySourcesPlaceholderConfigurerTests
java
lettuce-io__lettuce-core
src/main/java/io/lettuce/core/json/DelegateJsonArray.java
{ "start": 696, "end": 2969 }
class ____ extends DelegateJsonValue implements JsonArray { DelegateJsonArray(ObjectMapper objectMapper) { super(new ArrayNode(JsonNodeFactory.instance), objectMapper); } DelegateJsonArray(JsonNode node, ObjectMapper objectMapper) { super(node, objectMapper); } @Override public JsonArray add(JsonValue element) { JsonNode newNode = null; if (element != null) { newNode = ((DelegateJsonValue) element).getNode(); } ((ArrayNode) node).add(newNode); return this; } @Override public void addAll(JsonArray element) { LettuceAssert.notNull(element, "Element must not be null"); ArrayNode otherArray = (ArrayNode) ((DelegateJsonValue) element).getNode(); ((ArrayNode) node).addAll(otherArray); } @Override public List<JsonValue> asList() { List<JsonValue> result = new ArrayList<>(); for (JsonNode jsonNode : node) { result.add(wrap(jsonNode, objectMapper)); } return result; } @Override public JsonValue get(int index) { JsonNode jsonNode = node.get(index); return jsonNode == null ? null : wrap(jsonNode, objectMapper); } @Override public JsonValue getFirst() { return get(0); } @Override public Iterator<JsonValue> iterator() { return asList().iterator(); } @Override public JsonValue remove(int index) { JsonNode jsonNode = ((ArrayNode) node).remove(index); return wrap(jsonNode, objectMapper); } @Override public JsonValue replace(int index, JsonValue newElement) { JsonNode replaceWith = ((DelegateJsonValue) newElement).getNode(); JsonNode replaced = ((ArrayNode) node).set(index, replaceWith); return wrap(replaced, objectMapper); } @Override public JsonArray swap(int index, JsonValue newElement) { JsonNode replaceWith = ((DelegateJsonValue) newElement).getNode(); ((ArrayNode) node).set(index, replaceWith); return this; } @Override public int size() { return node.size(); } @Override public JsonArray asJsonArray() { return this; } }
DelegateJsonArray
java
apache__flink
flink-runtime/src/test/java/org/apache/flink/runtime/messages/GenericMessageTester.java
{ "start": 1299, "end": 6906 }
class ____ { public static void testMessageInstance(Serializable instance) throws Exception { Serializable copy = CommonTestUtils.createCopySerializable(instance); // test equals, hash code, toString assertThat(instance).isEqualTo(copy); assertThat(copy).isEqualTo(instance); assertThat(instance).hasSameHashCodeAs(copy); assertThat(instance).hasToString(copy.toString()); } public static void testMessageInstances(Serializable instance1, Serializable instance2) throws Exception { // test equals, hash code, toString assertThat(instance1).isEqualTo(instance2); assertThat(instance2).isEqualTo(instance1); assertThat(instance1).hasSameHashCodeAs(instance2); assertThat(instance1).hasToString(instance2.toString()); // test serializability Serializable copy = CommonTestUtils.createCopySerializable(instance1); assertThat(instance1).isEqualTo(copy); assertThat(copy).isEqualTo(instance1); assertThat(instance1).hasSameHashCodeAs(copy); } // ------------------------------------------------------------------------ // Random Generators // ------------------------------------------------------------------------ @SuppressWarnings("unchecked") public static <T> T instantiateGeneric( Class<T> messageClass, Random rnd, Instantiator<?>... extraInstantiators) { try { // build the map of extra instantiators Map<Class<?>, Instantiator<?>> extraInsts = new HashMap<>(); for (Instantiator<?> inst : extraInstantiators) { Class<?> type = (Class<?>) ((ParameterizedType) inst.getClass().getGenericInterfaces()[0]) .getActualTypeArguments()[0]; assertThat(type).as("Cannot get type for extra instantiator").isNotNull(); extraInsts.put(type, inst); } Constructor<?>[] constructors = messageClass.getConstructors(); Class<?> missingType = null; outer: for (Constructor<?> constructor : constructors) { Class<?>[] paramTypes = constructor.getParameterTypes(); Object[] params = new Object[paramTypes.length]; for (int i = 0; i < paramTypes.length; i++) { Instantiator<?> inst = extraInsts.get(paramTypes[i]); if (inst == null) { inst = INSTANTIATORS.get(paramTypes[i]); } if (inst == null) { missingType = paramTypes[i]; continue outer; } params[i] = inst.instantiate(rnd); } return (T) constructor.newInstance(params); } //noinspection ConstantConditions fail("No instantiator available for type " + missingType.getCanonicalName()); throw new RuntimeException(); } catch (Exception e) { e.printStackTrace(); fail("Could not perform reflective tests: " + e.getMessage()); throw new RuntimeException(); } } public static String randomString(Random rnd) { int len = rnd.nextInt(64 + 1); char[] chars = new char[len]; for (int i = 0; i < len; i++) { chars[i] = (char) rnd.nextInt(); } return new String(chars); } public static JobID randomJobId(Random rnd) { return new JobID(rnd.nextLong(), rnd.nextLong()); } public static JobStatus randomJobStatus(Random rnd) { return JobStatus.values()[rnd.nextInt(JobStatus.values().length)]; } // ------------------------------------------------------------------------ // Map of Instantiators // ------------------------------------------------------------------------ private static final Map<Class<?>, Instantiator<?>> INSTANTIATORS = new HashMap<>(); static { INSTANTIATORS.put(boolean.class, new BooleanInstantiator()); INSTANTIATORS.put(Boolean.class, new BooleanInstantiator()); INSTANTIATORS.put(char.class, new CharInstantiator()); INSTANTIATORS.put(Character.class, new CharInstantiator()); INSTANTIATORS.put(byte.class, new ByteInstantiator()); INSTANTIATORS.put(Byte.class, new ByteInstantiator()); INSTANTIATORS.put(short.class, new ShortInstantiator()); INSTANTIATORS.put(Short.class, new ShortInstantiator()); INSTANTIATORS.put(int.class, new IntInstantiator()); INSTANTIATORS.put(Integer.class, new IntInstantiator()); INSTANTIATORS.put(long.class, new LongInstantiator()); INSTANTIATORS.put(Long.class, new LongInstantiator()); INSTANTIATORS.put(float.class, new FloatInstantiator()); INSTANTIATORS.put(Float.class, new FloatInstantiator()); INSTANTIATORS.put(double.class, new DoubleInstantiator()); INSTANTIATORS.put(Double.class, new DoubleInstantiator()); INSTANTIATORS.put(String.class, new StringInstantiator()); INSTANTIATORS.put(JobID.class, new JobIdInstantiator()); INSTANTIATORS.put(JobStatus.class, new JobStatusInstantiator()); } // ------------------------------------------------------------------------ // Instantiators // ------------------------------------------------------------------------ public
GenericMessageTester
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedInputStream.java
{ "start": 2657, "end": 20847 }
class ____ extends DFSInputStream { private static final ByteBufferPool BUFFER_POOL = new ElasticByteBufferPool(); private final BlockReaderInfo[] blockReaders; private final int cellSize; private final short dataBlkNum; private final short parityBlkNum; private final int groupSize; /** the buffer for a complete stripe. */ private ByteBuffer curStripeBuf; @VisibleForTesting protected ByteBuffer parityBuf; private final ErasureCodingPolicy ecPolicy; private RawErasureDecoder decoder; /** * Indicate the start/end offset of the current buffered stripe in the * block group. */ private StripeRange curStripeRange; /** * When warning the user of a lost block in striping mode, we remember the * dead nodes we've logged. All other striping blocks on these nodes can be * considered lost too, and we don't want to log a warning for each of them. * This is to prevent the log from being too verbose. Refer to HDFS-8920. * * To minimize the overhead, we only store the datanodeUuid in this set */ private final Set<String> warnedNodes = Collections.newSetFromMap(new ConcurrentHashMap<>()); DFSStripedInputStream(DFSClient dfsClient, String src, boolean verifyChecksum, ErasureCodingPolicy ecPolicy, LocatedBlocks locatedBlocks) throws IOException { super(dfsClient, src, verifyChecksum, locatedBlocks); this.readStatistics.setBlockType(BlockType.STRIPED); assert ecPolicy != null; this.ecPolicy = ecPolicy; this.cellSize = ecPolicy.getCellSize(); dataBlkNum = (short) ecPolicy.getNumDataUnits(); parityBlkNum = (short) ecPolicy.getNumParityUnits(); groupSize = dataBlkNum + parityBlkNum; blockReaders = new BlockReaderInfo[groupSize]; curStripeRange = new StripeRange(0, 0); ErasureCoderOptions coderOptions = new ErasureCoderOptions( dataBlkNum, parityBlkNum); decoder = CodecUtil.createRawDecoder(dfsClient.getConfiguration(), ecPolicy.getCodecName(), coderOptions); DFSClient.LOG.debug("Creating an striped input stream for file {}", src); } private boolean useDirectBuffer() { return decoder.preferDirectBuffer(); } private void resetCurStripeBuffer(boolean shouldAllocateBuf) { if (shouldAllocateBuf && curStripeBuf == null) { curStripeBuf = BUFFER_POOL.getBuffer(useDirectBuffer(), cellSize * dataBlkNum); } if (curStripeBuf != null) { curStripeBuf.clear(); } curStripeRange = new StripeRange(0, 0); } protected synchronized ByteBuffer getParityBuffer() { if (parityBuf == null) { parityBuf = BUFFER_POOL.getBuffer(useDirectBuffer(), cellSize * parityBlkNum); } parityBuf.clear(); return parityBuf; } protected ByteBuffer getCurStripeBuf() { return curStripeBuf; } protected ByteBufferPool getBufferPool() { return BUFFER_POOL; } protected ThreadPoolExecutor getStripedReadsThreadPool(){ return dfsClient.getStripedReadsThreadPool(); } /** * When seeking into a new block group, create blockReader for each internal * block in the group. */ @VisibleForTesting synchronized void blockSeekTo(long target) throws IOException { if (target >= getFileLength()) { throw new IOException("Attempted to read past end of file"); } maybeRegisterBlockRefresh(); // Will be getting a new BlockReader. closeCurrentBlockReaders(); // Compute desired striped block group LocatedStripedBlock targetBlockGroup = getBlockGroupAt(target); // Update current position this.pos = target; this.blockEnd = targetBlockGroup.getStartOffset() + targetBlockGroup.getBlockSize() - 1; currentLocatedBlock = targetBlockGroup; } @Override public synchronized void close() throws IOException { try { super.close(); } finally { if (curStripeBuf != null) { BUFFER_POOL.putBuffer(curStripeBuf); curStripeBuf = null; } if (parityBuf != null) { BUFFER_POOL.putBuffer(parityBuf); parityBuf = null; } if (decoder != null) { decoder.release(); decoder = null; } } } /** * Extend the super method with the logic of switching between cells. * When reaching the end of a cell, proceed to the next cell and read it * with the next blockReader. */ @Override protected void closeCurrentBlockReaders() { resetCurStripeBuffer(false); if (blockReaders == null || blockReaders.length == 0) { return; } for (int i = 0; i < groupSize; i++) { closeReader(blockReaders[i]); blockReaders[i] = null; } blockEnd = -1; } protected void closeReader(BlockReaderInfo readerInfo) { if (readerInfo != null) { if (readerInfo.reader != null) { try { readerInfo.reader.close(); } catch (Throwable ignored) { } } readerInfo.skip(); } } private long getOffsetInBlockGroup() { return getOffsetInBlockGroup(pos); } private long getOffsetInBlockGroup(long pos) { return pos - currentLocatedBlock.getStartOffset(); } boolean createBlockReader(LocatedBlock block, long offsetInBlock, LocatedBlock[] targetBlocks, BlockReaderInfo[] readerInfos, int chunkIndex, long readTo) throws IOException { BlockReader reader = null; final ReaderRetryPolicy retry = new ReaderRetryPolicy(); DFSInputStream.DNAddrPair dnInfo = new DFSInputStream.DNAddrPair(null, null, null, null); while (true) { try { // the cached block location might have been re-fetched, so always // get it from cache. block = refreshLocatedBlock(block); targetBlocks[chunkIndex] = block; // internal block has one location, just rule out the deadNodes dnInfo = getBestNodeDNAddrPair(block, null); if (dnInfo == null) { break; } if (readTo < 0 || readTo > block.getBlockSize()) { readTo = block.getBlockSize(); } reader = getBlockReader(block, offsetInBlock, readTo - offsetInBlock, dnInfo.addr, dnInfo.storageType, dnInfo.info); DFSClientFaultInjector.get().onCreateBlockReader(block, chunkIndex, offsetInBlock, readTo - offsetInBlock); } catch (IOException e) { if (e instanceof InvalidEncryptionKeyException && retry.shouldRefetchEncryptionKey()) { DFSClient.LOG.info("Will fetch a new encryption key and retry, " + "encryption key was invalid when connecting to " + dnInfo.addr + " : " + e); dfsClient.clearDataEncryptionKey(); retry.refetchEncryptionKey(); } else if (retry.shouldRefetchToken() && tokenRefetchNeeded(e, dnInfo.addr)) { fetchBlockAt(block.getStartOffset()); retry.refetchToken(); } else { //TODO: handles connection issues DFSClient.LOG.warn("Failed to connect to " + dnInfo.addr + " for " + "block" + block.getBlock(), e); // re-fetch the block in case the block has been moved fetchBlockAt(block.getStartOffset()); addToLocalDeadNodes(dnInfo.info); } } if (reader != null) { readerInfos[chunkIndex] = new BlockReaderInfo(reader, dnInfo.info, offsetInBlock); return true; } } return false; } /** * Read a new stripe covering the current position, and store the data in the * {@link #curStripeBuf}. */ private void readOneStripe(CorruptedBlocks corruptedBlocks) throws IOException { resetCurStripeBuffer(true); // compute stripe range based on pos final long offsetInBlockGroup = getOffsetInBlockGroup(); final long stripeLen = cellSize * dataBlkNum; final int stripeIndex = (int) (offsetInBlockGroup / stripeLen); final int stripeBufOffset = (int) (offsetInBlockGroup % stripeLen); final int stripeLimit = (int) Math.min(currentLocatedBlock.getBlockSize() - (stripeIndex * stripeLen), stripeLen); StripeRange stripeRange = new StripeRange(offsetInBlockGroup, stripeLimit - stripeBufOffset); LocatedStripedBlock blockGroup = (LocatedStripedBlock) currentLocatedBlock; AlignedStripe[] stripes = StripedBlockUtil.divideOneStripe(ecPolicy, cellSize, blockGroup, offsetInBlockGroup, offsetInBlockGroup + stripeRange.getLength() - 1, curStripeBuf); final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup( blockGroup, cellSize, dataBlkNum, parityBlkNum); // read the whole stripe for (AlignedStripe stripe : stripes) { // Parse group to get chosen DN location StripeReader sreader = new StatefulStripeReader(stripe, ecPolicy, blks, blockReaders, corruptedBlocks, decoder, this); sreader.readStripe(); } curStripeBuf.position(stripeBufOffset); curStripeBuf.limit(stripeLimit); curStripeRange = stripeRange; } /** * Update read statistics. Note that this has to be done on the thread that * initiates the read, rather than inside each async thread, for * {@link org.apache.hadoop.fs.FileSystem.Statistics} to work correctly with * its ThreadLocal. * * @param stats striped read stats * @param readTimeMS read time metrics in ms * */ void updateReadStats(final StripedBlockUtil.BlockReadStats stats, long readTimeMS) { if (stats == null) { return; } updateReadStatistics(readStatistics, stats.getBytesRead(), stats.isShortCircuit(), stats.getNetworkDistance()); dfsClient.updateFileSystemReadStats(stats.getNetworkDistance(), stats.getBytesRead(), readTimeMS); assert readStatistics.getBlockType() == BlockType.STRIPED; dfsClient.updateFileSystemECReadStats(stats.getBytesRead()); } /** * Seek to a new arbitrary location. */ @Override public synchronized void seek(long targetPos) throws IOException { if (targetPos > getFileLength()) { throw new EOFException("Cannot seek after EOF"); } if (targetPos < 0) { throw new EOFException("Cannot seek to negative offset"); } if (closed.get()) { throw new IOException("Stream is closed!"); } if (targetPos <= blockEnd) { final long targetOffsetInBlk = getOffsetInBlockGroup(targetPos); if (curStripeRange.include(targetOffsetInBlk)) { int bufOffset = getStripedBufOffset(targetOffsetInBlk); curStripeBuf.position(bufOffset); pos = targetPos; return; } } pos = targetPos; blockEnd = -1; } private int getStripedBufOffset(long offsetInBlockGroup) { final long stripeLen = cellSize * dataBlkNum; // compute the position in the curStripeBuf based on "pos" return (int) (offsetInBlockGroup % stripeLen); } @Override public synchronized boolean seekToNewSource(long targetPos) throws IOException { return false; } @Override protected synchronized int readWithStrategy(ReaderStrategy strategy) throws IOException { dfsClient.checkOpen(); if (closed.get()) { throw new IOException("Stream closed"); } // Number of bytes already read into buffer. int result = 0; int len = strategy.getTargetLength(); CorruptedBlocks corruptedBlocks = new CorruptedBlocks(); if (pos < getFileLength()) { int retries = 2; boolean isRetryRead = false; while (retries > 0) { try { if (pos > blockEnd || isRetryRead) { blockSeekTo(pos); } int realLen = (int) Math.min(len, (blockEnd - pos + 1L)); synchronized (infoLock) { if (locatedBlocks.isLastBlockComplete()) { realLen = (int) Math.min(realLen, locatedBlocks.getFileLength() - pos); } } while (result < realLen) { if (!curStripeRange.include(getOffsetInBlockGroup())) { DFSClientFaultInjector.get().failWhenReadWithStrategy(isRetryRead); readOneStripe(corruptedBlocks); } int ret = copyToTargetBuf(strategy, realLen - result); result += ret; pos += ret; len -= ret; } return result; } catch (IOException ioe) { retries--; if (retries > 0) { DFSClient.LOG.info( "DFSStripedInputStream read meets exception:{}, will retry again.", ioe.toString()); isRetryRead = true; } else { throw ioe; } } finally { // Check if need to report block replicas corruption either read // was successful or ChecksumException occurred. reportCheckSumFailure(corruptedBlocks, getCurrentBlockLocationsLength(), true); } } } return -1; } /** * Copy the data from {@link #curStripeBuf} into the given buffer. * @param strategy the ReaderStrategy containing the given buffer * @param length target length * @return number of bytes copied */ private int copyToTargetBuf(ReaderStrategy strategy, int length) { final long offsetInBlk = getOffsetInBlockGroup(); int bufOffset = getStripedBufOffset(offsetInBlk); curStripeBuf.position(bufOffset); return strategy.readFromBuffer(curStripeBuf, Math.min(length, curStripeBuf.remaining())); } /** * The super method {@link DFSInputStream#refreshLocatedBlock} refreshes * cached LocatedBlock by executing {@link DFSInputStream#getBlockAt} again. * This method extends the logic by first remembering the index of the * internal block, and re-parsing the refreshed block group with the same * index. */ @Override protected LocatedBlock refreshLocatedBlock(LocatedBlock block) throws IOException { int idx = StripedBlockUtil.getBlockIndex(block.getBlock().getLocalBlock()); LocatedBlock lb = getBlockGroupAt(block.getStartOffset()); // If indexing information is returned, iterate through the index array // to find the entry for position idx in the group LocatedStripedBlock lsb = (LocatedStripedBlock) lb; int i = 0; for (; i < lsb.getBlockIndices().length; i++) { if (lsb.getBlockIndices()[i] == idx) { break; } } DFSClient.LOG.debug("refreshLocatedBlock for striped blocks, offset={}." + " Obtained block {}, idx={}", block.getStartOffset(), lb, idx); return StripedBlockUtil.constructInternalBlock( lsb, i, cellSize, dataBlkNum, idx); } private LocatedStripedBlock getBlockGroupAt(long offset) throws IOException { LocatedBlock lb = super.getBlockAt(offset); assert lb instanceof LocatedStripedBlock : "NameNode" + " should return a LocatedStripedBlock for a striped file"; return (LocatedStripedBlock)lb; } /** * Real implementation of pread. * <p> * Note: exceptionMap is not populated with ioExceptions as what we added for DFSInputStream. If * you need this function, please implement it. */ @Override protected void fetchBlockByteRange(LocatedBlock block, long start, long end, ByteBuffer buf, CorruptedBlocks corruptedBlocks, final Map<InetSocketAddress, List<IOException>> exceptionMap) throws IOException { // Refresh the striped block group LocatedStripedBlock blockGroup = getBlockGroupAt(block.getStartOffset()); AlignedStripe[] stripes = StripedBlockUtil.divideByteRangeIntoStripes( ecPolicy, cellSize, blockGroup, start, end, buf); final LocatedBlock[] blks = StripedBlockUtil.parseStripedBlockGroup( blockGroup, cellSize, dataBlkNum, parityBlkNum); final BlockReaderInfo[] preaderInfos = new BlockReaderInfo[groupSize]; long readTo = -1; for (AlignedStripe stripe : stripes) { readTo = Math.max(readTo, stripe.getOffsetInBlock() + stripe.getSpanInBlock()); } try { for (AlignedStripe stripe : stripes) { // Parse group to get chosen DN location StripeReader preader = new PositionStripeReader(stripe, ecPolicy, blks, preaderInfos, corruptedBlocks, decoder, this); preader.setReadTo(readTo); try { preader.readStripe(); } finally { preader.close(); } } buf.position(buf.position() + (int)(end - start + 1)); } finally { for (BlockReaderInfo preaderInfo : preaderInfos) { closeReader(preaderInfo); } } } @Override protected void reportLostBlock(LocatedBlock lostBlock, Collection<DatanodeInfo> ignoredNodes) { DatanodeInfo[] nodes = lostBlock.getLocations(); if (nodes != null && nodes.length > 0) { List<String> dnUUIDs = new ArrayList<>(); for (DatanodeInfo node : nodes) { dnUUIDs.add(node.getDatanodeUuid()); } if (!warnedNodes.containsAll(dnUUIDs)) { DFSClient.LOG.warn(Arrays.toString(nodes) + " are unavailable and " + "all striping blocks on them are lost. " + "IgnoredNodes = {}", ignoredNodes); warnedNodes.addAll(dnUUIDs); } } else { super.reportLostBlock(lostBlock, ignoredNodes); } } /** * May need online read recovery, zero-copy read doesn't make * sense, so don't support it. */ @Override public synchronized ByteBuffer read(ByteBufferPool bufferPool, int maxLength, EnumSet<ReadOption> opts) throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException( "Not support enhanced byte buffer access."); } @Override public synchronized void releaseBuffer(ByteBuffer buffer) { throw new UnsupportedOperationException( "Not support enhanced byte buffer access."); } @Override public synchronized void unbuffer() { super.unbuffer(); if (curStripeBuf != null) { BUFFER_POOL.putBuffer(curStripeBuf); curStripeBuf = null; } if (parityBuf != null) { BUFFER_POOL.putBuffer(parityBuf); parityBuf = null; } } }
DFSStripedInputStream
java
apache__camel
components/camel-google/camel-google-sheets/src/main/java/org/apache/camel/component/google/sheets/stream/GoogleSheetsStreamConfiguration.java
{ "start": 1202, "end": 7532 }
class ____ implements Cloneable { @UriPath @Metadata(required = true) private String spreadsheetId; @UriParam private String scopes; @UriParam private String clientId; @UriParam(label = "security", secret = true) private String clientSecret; @UriParam(label = "security", secret = true) private String accessToken; @UriParam(label = "security", secret = true) private String refreshToken; @UriParam private String applicationName; @UriParam private int maxResults; @UriParam private String range; @UriParam private boolean includeGridData; @UriParam private boolean splitResults; @UriParam(enums = "ROWS,COLUMNS,DIMENSION_UNSPECIFIED", defaultValue = "ROWS") private String majorDimension = "ROWS"; @UriParam(enums = "FORMATTED_VALUE,UNFORMATTED_VALUE,FORMULA", defaultValue = "FORMATTED_VALUE") private String valueRenderOption = "FORMATTED_VALUE"; /* Service account */ @UriParam(label = "security") private String serviceAccountKey; @UriParam private String delegate; public String getClientId() { return clientId; } /** * Client ID of the sheets application */ public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } /** * Client secret of the sheets application */ public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } public String getAccessToken() { return accessToken; } /** * OAuth 2 access token. This typically expires after an hour so refreshToken is recommended for long term usage. */ public void setAccessToken(String accessToken) { this.accessToken = accessToken; } public String getRefreshToken() { return refreshToken; } /** * OAuth 2 refresh token. Using this, the Google Sheets component can obtain a new accessToken whenever the current * one expires - a necessity if the application is long-lived. */ public void setRefreshToken(String refreshToken) { this.refreshToken = refreshToken; } public String getApplicationName() { return applicationName; } /** * Google Sheets application name. Example would be "camel-google-sheets/1.0" */ public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getScopes() { return scopes; } public Collection<String> getScopesAsList() { if (scopes != null) { return List.of(scopes.split(",")); } else { return null; } } /** * Specifies the level of permissions you want a sheets application to have to a user account. See * https://developers.google.com/identity/protocols/googlescopes for more info. Multiple scopes can be separated by * comma. * * @see com.google.api.services.sheets.v4.SheetsScopes */ public void setScopes(String scopes) { this.scopes = scopes; } public String getSpreadsheetId() { return spreadsheetId; } /** * Specifies the spreadsheet identifier that is used to identify the target to obtain. */ public void setSpreadsheetId(String spreadsheetId) { this.spreadsheetId = spreadsheetId; } public int getMaxResults() { return maxResults; } /** * Specify the maximum number of returned results. This will limit the number of rows in a returned value range data * set or the number of returned value ranges in a batch request. */ public void setMaxResults(int maxResults) { this.maxResults = maxResults; } public String getRange() { return range; } /** * Specifies the range of rows and columns in a sheet to get data from. */ public void setRange(String range) { this.range = range; } public String getMajorDimension() { return majorDimension; } /** * Specifies the major dimension that results should use.. */ public void setMajorDimension(String majorDimension) { this.majorDimension = majorDimension; } public String getValueRenderOption() { return valueRenderOption; } /** * Determines how values should be rendered in the output. */ public void setValueRenderOption(String valueRenderOption) { this.valueRenderOption = valueRenderOption; } public boolean isIncludeGridData() { return includeGridData; } /** * True if grid data should be returned. */ public void setIncludeGridData(boolean includeGridData) { this.includeGridData = includeGridData; } public boolean isSplitResults() { return splitResults; } /** * True if value range result should be split into rows or columns to process each of them individually. When true * each row or column is represented with a separate exchange in batch processing. Otherwise value range object is * used as exchange junk size. */ public void setSplitResults(boolean splitResults) { this.splitResults = splitResults; } public String getServiceAccountKey() { return serviceAccountKey; } /** * Sets "*.json" file with credentials for Service account * * @param serviceAccountKey String file, classpath, or http url */ public void setServiceAccountKey(String serviceAccountKey) { this.serviceAccountKey = serviceAccountKey; } public String getDelegate() { return delegate; } /** * Delegate for wide-domain service account */ public void setDelegate(String delegate) { this.delegate = delegate; } // ************************************************* // // ************************************************* public GoogleSheetsStreamConfiguration copy() { try { return (GoogleSheetsStreamConfiguration) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeCamelException(e); } } }
GoogleSheetsStreamConfiguration
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/util/DummyNanoClock.java
{ "start": 895, "end": 957 }
interface ____ always returns a fixed value. */ public final
that
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/sql/postgresql/datatypes/TimestampTest.java
{ "start": 227, "end": 858 }
class ____ extends PGTest { public void test_timestamp() throws Exception { String sql = "TIMESTAMP '2004-10-19 10:23:54+02'"; PGExprParser parser = new PGExprParser(sql); SQLTimestampExpr expr = (SQLTimestampExpr) parser.expr(); System.out.println(expr.toString()); } public void test_timestamp_with_timezone() throws Exception { String sql = "TIMESTAMP WITH TIME ZONE '2004-10-19 10:23:54+02'"; PGExprParser parser = new PGExprParser(sql); SQLTimestampExpr expr = (SQLTimestampExpr) parser.expr(); System.out.println(expr.toString()); } }
TimestampTest
java
alibaba__nacos
naming/src/test/java/com/alibaba/nacos/naming/push/v2/task/PushDelayTaskTest.java
{ "start": 1383, "end": 3392 }
class ____ { private final Service service = Service.newService("N", "G", "S"); private final String singleTargetClientId = "testClientId"; private PushDelayTask pushToAllTask; private PushDelayTask singlePushTask; @BeforeEach void setUp() throws Exception { pushToAllTask = new PushDelayTask(service, 0L); singlePushTask = new PushDelayTask(service, 0L, singleTargetClientId); } @Test void testMergeAllToSingle() { PushDelayTask newTask = singlePushTask; PushDelayTask oldTask = pushToAllTask; newTask.merge(oldTask); assertTrue(newTask.isPushToAll()); assertNull(newTask.getTargetClients()); } @Test void testMergeSingleToAll() { PushDelayTask newTask = pushToAllTask; PushDelayTask oldTask = singlePushTask; newTask.merge(oldTask); assertTrue(newTask.isPushToAll()); assertNull(newTask.getTargetClients()); } @Test void testMergeSingleToSingle() { PushDelayTask oldTask = singlePushTask; PushDelayTask newTask = new PushDelayTask(service, 0L, "newClient"); newTask.merge(oldTask); assertFalse(newTask.isPushToAll()); assertNotNull(newTask.getTargetClients()); assertFalse(newTask.getTargetClients().isEmpty()); assertEquals(2, newTask.getTargetClients().size()); assertTrue(newTask.getTargetClients().contains(singleTargetClientId)); assertTrue(newTask.getTargetClients().contains("newClient")); } @Test void testMergeAllToAll() throws InterruptedException { PushDelayTask oldTask = pushToAllTask; TimeUnit.MILLISECONDS.sleep(10); PushDelayTask newTask = new PushDelayTask(service, 0L); newTask.merge(oldTask); newTask.merge(oldTask); assertTrue(newTask.isPushToAll()); assertEquals(oldTask.getLastProcessTime(), newTask.getLastProcessTime()); } }
PushDelayTaskTest
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java
{ "start": 415, "end": 745 }
class ____ extends ActionType<EsqlQueryResponse> { public static final EsqlAsyncGetResultAction INSTANCE = new EsqlAsyncGetResultAction(); public static final String NAME = EsqlAsyncActionNames.ESQL_ASYNC_GET_RESULT_ACTION_NAME; private EsqlAsyncGetResultAction() { super(NAME); } }
EsqlAsyncGetResultAction
java
quarkusio__quarkus
extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java
{ "start": 1082, "end": 1870 }
interface ____ enabled, the value will be resolved as a path relative to * `${quarkus.management.root-path}` (`q` by default), e.g. * `http://${quarkus.management.host}:${quarkus.management.port}/${quarkus.management.root-path}/metrics`. * If an absolute path is specified (`/metrics`), the prometheus endpoint will be served from the configured path, e.g. * `http://${quarkus.management.host}:${quarkus.management.port}/metrics`. * * @asciidoclet */ @WithDefault("metrics") String path(); /** * By default, this extension will create a Prometheus MeterRegistry instance. * <p> * Use this attribute to veto the creation of the default Prometheus MeterRegistry. */ @WithDefault("true") boolean defaultRegistry(); }
is
java
ReactiveX__RxJava
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableReduceTest.java
{ "start": 1189, "end": 12655 }
class ____ extends RxJavaTest { Observer<Object> observer; SingleObserver<Object> singleObserver; @Before public void before() { observer = TestHelper.mockObserver(); singleObserver = TestHelper.mockSingleObserver(); } BiFunction<Integer, Integer, Integer> sum = new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { return t1 + t2; } }; @Test public void aggregateAsIntSumObservable() { Observable<Integer> result = Observable.just(1, 2, 3, 4, 5).reduce(0, sum) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }).toObservable(); result.subscribe(observer); verify(observer).onNext(1 + 2 + 3 + 4 + 5); verify(observer).onComplete(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void aggregateAsIntSumSourceThrowsObservable() { Observable<Integer> result = Observable.concat(Observable.just(1, 2, 3, 4, 5), Observable.<Integer> error(new TestException())) .reduce(0, sum).map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }).toObservable(); result.subscribe(observer); verify(observer, never()).onNext(any()); verify(observer, never()).onComplete(); verify(observer, times(1)).onError(any(TestException.class)); } @Test public void aggregateAsIntSumAccumulatorThrowsObservable() { BiFunction<Integer, Integer, Integer> sumErr = new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { throw new TestException(); } }; Observable<Integer> result = Observable.just(1, 2, 3, 4, 5) .reduce(0, sumErr).map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }).toObservable(); result.subscribe(observer); verify(observer, never()).onNext(any()); verify(observer, never()).onComplete(); verify(observer, times(1)).onError(any(TestException.class)); } @Test public void aggregateAsIntSumResultSelectorThrowsObservable() { Function<Integer, Integer> error = new Function<Integer, Integer>() { @Override public Integer apply(Integer t1) { throw new TestException(); } }; Observable<Integer> result = Observable.just(1, 2, 3, 4, 5) .reduce(0, sum).toObservable().map(error); result.subscribe(observer); verify(observer, never()).onNext(any()); verify(observer, never()).onComplete(); verify(observer, times(1)).onError(any(TestException.class)); } @Test public void backpressureWithNoInitialValueObservable() throws InterruptedException { Observable<Integer> source = Observable.just(1, 2, 3, 4, 5, 6); Observable<Integer> reduced = source.reduce(sum).toObservable(); Integer r = reduced.blockingFirst(); assertEquals(21, r.intValue()); } @Test public void backpressureWithInitialValueObservable() throws InterruptedException { Observable<Integer> source = Observable.just(1, 2, 3, 4, 5, 6); Observable<Integer> reduced = source.reduce(0, sum).toObservable(); Integer r = reduced.blockingFirst(); assertEquals(21, r.intValue()); } @Test public void aggregateAsIntSum() { Single<Integer> result = Observable.just(1, 2, 3, 4, 5).reduce(0, sum) .map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }); result.subscribe(singleObserver); verify(singleObserver).onSuccess(1 + 2 + 3 + 4 + 5); verify(singleObserver, never()).onError(any(Throwable.class)); } @Test public void aggregateAsIntSumSourceThrows() { Single<Integer> result = Observable.concat(Observable.just(1, 2, 3, 4, 5), Observable.<Integer> error(new TestException())) .reduce(0, sum).map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }); result.subscribe(singleObserver); verify(singleObserver, never()).onSuccess(any()); verify(singleObserver, times(1)).onError(any(TestException.class)); } @Test public void aggregateAsIntSumAccumulatorThrows() { BiFunction<Integer, Integer, Integer> sumErr = new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer t1, Integer t2) { throw new TestException(); } }; Single<Integer> result = Observable.just(1, 2, 3, 4, 5) .reduce(0, sumErr).map(new Function<Integer, Integer>() { @Override public Integer apply(Integer v) { return v; } }); result.subscribe(singleObserver); verify(singleObserver, never()).onSuccess(any()); verify(singleObserver, times(1)).onError(any(TestException.class)); } @Test public void aggregateAsIntSumResultSelectorThrows() { Function<Integer, Integer> error = new Function<Integer, Integer>() { @Override public Integer apply(Integer t1) { throw new TestException(); } }; Single<Integer> result = Observable.just(1, 2, 3, 4, 5) .reduce(0, sum).map(error); result.subscribe(singleObserver); verify(singleObserver, never()).onSuccess(any()); verify(singleObserver, times(1)).onError(any(TestException.class)); } @Test public void backpressureWithNoInitialValue() throws InterruptedException { Observable<Integer> source = Observable.just(1, 2, 3, 4, 5, 6); Maybe<Integer> reduced = source.reduce(sum); Integer r = reduced.blockingGet(); assertEquals(21, r.intValue()); } @Test public void backpressureWithInitialValue() throws InterruptedException { Observable<Integer> source = Observable.just(1, 2, 3, 4, 5, 6); Single<Integer> reduced = source.reduce(0, sum); Integer r = reduced.blockingGet(); assertEquals(21, r.intValue()); } @Test public void reduceWithSingle() { Observable.range(1, 5) .reduceWith(new Supplier<Integer>() { @Override public Integer get() throws Exception { return 0; } }, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a + b; } }) .test() .assertResult(15); } @Test public void reduceMaybeDoubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeObservableToMaybe(new Function<Observable<Object>, MaybeSource<Object>>() { @Override public MaybeSource<Object> apply(Observable<Object> o) throws Exception { return o.reduce(new BiFunction<Object, Object, Object>() { @Override public Object apply(Object a, Object b) throws Exception { return a; } }); } }); } @Test public void reduceMaybeCheckDisposed() { TestHelper.checkDisposed(Observable.just(new Object()).reduce(new BiFunction<Object, Object, Object>() { @Override public Object apply(Object a, Object b) throws Exception { return a; } })); } @Test public void reduceMaybeBadSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Observable<Object>() { @Override protected void subscribeActual(Observer<? super Object> observer) { observer.onSubscribe(Disposable.empty()); observer.onComplete(); observer.onNext(1); observer.onError(new TestException()); observer.onComplete(); } }.reduce(new BiFunction<Object, Object, Object>() { @Override public Object apply(Object a, Object b) throws Exception { return a; } }) .test() .assertResult(); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } @Test public void seedDoubleOnSubscribe() { TestHelper.checkDoubleOnSubscribeObservableToSingle(new Function<Observable<Integer>, SingleSource<Integer>>() { @Override public SingleSource<Integer> apply(Observable<Integer> o) throws Exception { return o.reduce(0, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a; } }); } }); } @Test public void seedDisposed() { TestHelper.checkDisposed(PublishSubject.<Integer>create().reduce(0, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a; } })); } @Test public void seedBadSource() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { observer.onSubscribe(Disposable.empty()); observer.onComplete(); observer.onNext(1); observer.onError(new TestException()); observer.onComplete(); } } .reduce(0, new BiFunction<Integer, Integer, Integer>() { @Override public Integer apply(Integer a, Integer b) throws Exception { return a; } }) .test() .assertResult(0); TestHelper.assertUndeliverable(errors, 0, TestException.class); } finally { RxJavaPlugins.reset(); } } }
ObservableReduceTest
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_252.java
{ "start": 195, "end": 478 }
class ____ extends TestCase { public void test_for_issue() throws Exception { VO vo = new VO(); String text = JSON.toJSONString(vo, SerializerFeature.WriteMapNullValue); Assert.assertEquals("{\"type\":null}", text); } public static
Bug_for_issue_252
java
apache__flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/ComparableInputTypeStrategyTest.java
{ "start": 14803, "end": 23947 }
class ____ not implement Comparable", SpecificInputTypeStrategies.TWO_EQUALS_COMPARABLE) .calledWithArgumentTypes( rawType(NotComparableClass.class), rawType(NotComparableClass.class)) .expectErrorMessage( String.format( "All types in a comparison should support 'EQUALS' comparison with" + " each other. Can not compare RAW('%s', '...') with RAW('%s', '...')", NotComparableClass.class.getName(), NotComparableClass.class.getName())), TestSpec.forStrategy( "RAW types are not comparable if the types are different", SpecificInputTypeStrategies.TWO_EQUALS_COMPARABLE) .calledWithArgumentTypes( rawType(NotComparableClass.class), DataTypes.RAW( NotComparableClass.class, new PojoSerializer<>( NotComparableClass.class, new TypeSerializer[0], new Field[0], new SerializerConfigImpl()))) .expectErrorMessage( String.format( "All types in a comparison should support 'EQUALS' comparison with" + " each other. Can not compare RAW('%s', '...') with RAW('%s', '...')", NotComparableClass.class.getName(), NotComparableClass.class.getName())), TestSpec.forStrategy( "Not fully comparable structured types", SpecificInputTypeStrategies.TWO_FULLY_COMPARABLE) .calledWithArgumentTypes( structuredType( "type", singletonList(DataTypes.INT()), StructuredComparison.EQUALS), structuredType( "type", singletonList(DataTypes.INT()), StructuredComparison.EQUALS)) .expectErrorMessage( "All types in a comparison should support both 'EQUALS' and 'ORDER' comparison" + " with each other. Can not compare `cat`.`db`.`type` with `cat`.`db`.`type`"), TestSpec.forStrategy( "Two different structured types are not comparable", SpecificInputTypeStrategies.TWO_EQUALS_COMPARABLE) .calledWithArgumentTypes( structuredType( "type1", singletonList(DataTypes.INT()), StructuredComparison.EQUALS), structuredType( "type2", singletonList(DataTypes.INT()), StructuredComparison.EQUALS)) .expectErrorMessage( "All types in a comparison should support 'EQUALS' comparison with each other." + " Can not compare `cat`.`db`.`type1` with `cat`.`db`.`type2`"), TestSpec.forStrategy( "Two different different distinct types are not comparable even if point to the same type", SpecificInputTypeStrategies.TWO_EQUALS_COMPARABLE) .calledWithArgumentTypes( distinctType("type1", DataTypes.INT()), distinctType("type2", DataTypes.INT())) .expectErrorMessage( "All types in a comparison should support 'EQUALS' comparison with each other." + " Can not compare `cat`.`db`.`type1` with `cat`.`db`.`type2`"), TestSpec.forStrategy( "Not comparable array types", SpecificInputTypeStrategies.TWO_FULLY_COMPARABLE) .calledWithArgumentTypes( DataTypes.ARRAY(DataTypes.TINYINT()), DataTypes.ARRAY(DataTypes.VARCHAR(2))) .expectErrorMessage( "All types in a comparison should support both 'EQUALS' and 'ORDER' comparison" + " with each other. Can not compare ARRAY<TINYINT> with ARRAY<VARCHAR(2)>"), TestSpec.forStrategy( "Not comparable key types in map types", SpecificInputTypeStrategies.TWO_FULLY_COMPARABLE) .calledWithArgumentTypes( DataTypes.MAP(DataTypes.TINYINT(), DataTypes.TIMESTAMP()), DataTypes.MAP( DataTypes.VARCHAR(3), DataTypes.TIMESTAMP_WITH_TIME_ZONE())) .expectErrorMessage( "All types in a comparison should support both 'EQUALS' and 'ORDER' comparison" + " with each other. Can not compare MAP<TINYINT, TIMESTAMP(6)> with" + " MAP<VARCHAR(3), TIMESTAMP(6) WITH TIME ZONE>"), TestSpec.forStrategy( "Not comparable value types in map types", SpecificInputTypeStrategies.TWO_FULLY_COMPARABLE) .calledWithArgumentTypes( DataTypes.MAP(DataTypes.TINYINT(), DataTypes.TIMESTAMP()), DataTypes.MAP(DataTypes.DECIMAL(10, 3), DataTypes.INT())) .expectErrorMessage( "All types in a comparison should support both 'EQUALS' and 'ORDER' comparison" + " with each other. Can not compare MAP<TINYINT, TIMESTAMP(6)> with MAP<DECIMAL(10, 3), INT>"), TestSpec.forStrategy( "Not comparable types", SpecificInputTypeStrategies.TWO_FULLY_COMPARABLE) .calledWithArgumentTypes(DataTypes.TIMESTAMP(), DataTypes.BIGINT()) .expectErrorMessage( "All types in a comparison should support both 'EQUALS' and 'ORDER' comparison" + " with each other. Can not compare TIMESTAMP(6) with BIGINT")); } private static <T> DataType rawType(Class<T> clazz) { return DataTypes.RAW(clazz, new KryoSerializer<>(clazz, new SerializerConfigImpl())); } private static DataType distinctType(String typeName, DataType sourceType) { return new AtomicDataType( DistinctType.newBuilder( ObjectIdentifier.of("cat", "db", typeName), sourceType.getLogicalType()) .build(), sourceType.getConversionClass()); } private static DataType structuredType( String typeName, List<DataType> fieldDataTypes, StructuredComparison comparison) { return new FieldsDataType( StructuredType.newBuilder(ObjectIdentifier.of("cat", "db", typeName)) .attributes( IntStream.range(0, fieldDataTypes.size()) .mapToObj( idx -> new StructuredType.StructuredAttribute( "f" + idx, fieldDataTypes .get(idx) .getLogicalType())) .collect(Collectors.toList())) .comparison(comparison) .build(), fieldDataTypes); } private static
does
java
alibaba__nacos
config/src/main/java/com/alibaba/nacos/config/server/model/ConfigAdvanceInfo.java
{ "start": 780, "end": 3505 }
class ____ implements Serializable { static final long serialVersionUID = 3148031484920416869L; private long createTime; private long modifyTime; private String createUser; private String createIp; private String desc; private String use; private String effect; private String type; private String schema; private String configTags; public long getCreateTime() { return createTime; } public void setCreateTime(long createTime) { this.createTime = createTime; } public long getModifyTime() { return modifyTime; } public void setModifyTime(long modifyTime) { this.modifyTime = modifyTime; } public String getCreateUser() { return createUser; } public void setCreateUser(String createUser) { this.createUser = createUser; } public String getCreateIp() { return createIp; } public void setCreateIp(String createIp) { this.createIp = createIp; } public String getDesc() { return desc; } public void setDesc(String desc) { this.desc = desc; } public String getUse() { return use; } public void setUse(String use) { this.use = use; } public String getEffect() { return effect; } public void setEffect(String effect) { this.effect = effect; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getSchema() { return schema; } public void setSchema(String schema) { this.schema = schema; } public String getConfigTags() { return configTags; } public void setConfigTags(String configTags) { this.configTags = configTags; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ConfigAdvanceInfo that = (ConfigAdvanceInfo) o; return createTime == that.createTime && modifyTime == that.modifyTime && Objects.equals(createUser, that.createUser) && Objects.equals(createIp, that.createIp) && Objects.equals(desc, that.desc) && Objects.equals(use, that.use) && Objects.equals(effect, that.effect) && Objects.equals(type, that.type) && Objects.equals(schema, that.schema) && Objects.equals(configTags, that.configTags); } }
ConfigAdvanceInfo
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/FunctionalInterfaceClashTest.java
{ "start": 1831, "end": 2290 }
interface ____ { String call(); } // BUG: Diagnostic contains: foo(MyCallable) void foo(Callable<String> x) {} void foo(MyCallable c) {} } """) .doTest(); } @Test public void positiveInherited() { testHelper .addSourceLines( "Super.java", """ import java.util.function.Function;
MyCallable
java
quarkusio__quarkus
core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestTracingProcessor.java
{ "start": 2299, "end": 7153 }
class ____ { @BuildStep(onlyIfNot = IsProduction.class) LogCleanupFilterBuildItem handle() { return new LogCleanupFilterBuildItem("org.junit.platform.launcher.core.EngineDiscoveryOrchestrator", "0 containers"); } static volatile boolean testingSetup; @BuildStep TestListenerBuildItem sharedStateListener() { return new TestListenerBuildItem(new ContinuousTestingSharedStateListener()); } @BuildStep(onlyIf = IsDevelopment.class) @Produce(LogHandlerBuildItem.class) @Produce(TestSetupBuildItem.class) @Produce(ServiceStartBuildItem.class) void startTesting(TestConfig config, LiveReloadBuildItem liveReloadBuildItem, LaunchModeBuildItem launchModeBuildItem, List<TestListenerBuildItem> testListenerBuildItems) { if (TestSupport.instance().isEmpty()) { return; } TestSupport testSupport = TestSupport.instance().get(); testSupport.setConfig(config); if ((config.continuousTesting() == TestConfig.Mode.DISABLED) || config.flatClassPath()) { return; } DevModeType devModeType = launchModeBuildItem.getDevModeType().orElse(null); if (devModeType == null || !devModeType.isContinuousTestingSupported()) { return; } if (testingSetup) { return; } testingSetup = true; for (TestListenerBuildItem i : testListenerBuildItems) { testSupport.addListener(i.listener); } testSupport.setTags(config.includeTags().orElse(Collections.emptyList()), config.excludeTags().orElse(Collections.emptyList())); testSupport.setPatterns(config.includePattern().orElse(null), config.excludePattern().orElse(null)); String specificSelection = System.getProperty("quarkus-internal.test.specific-selection"); if (specificSelection != null) { testSupport.setSpecificSelection(specificSelection); } testSupport.setEngines(config.includeEngines().orElse(Collections.emptyList()), config.excludeEngines().orElse(Collections.emptyList())); testSupport.setConfiguredDisplayTestOutput(config.displayTestOutput()); testSupport.setTestType(config.type()); if (!liveReloadBuildItem.isLiveReload()) { if (config.continuousTesting() == TestConfig.Mode.ENABLED) { testSupport.start(); } else if (config.continuousTesting() == TestConfig.Mode.PAUSED) { testSupport.stop(); } } QuarkusClassLoader cl = (QuarkusClassLoader) Thread.currentThread().getContextClassLoader(); ((QuarkusClassLoader) cl.parent()).addCloseTask(ContinuousTestingSharedStateManager::reset); } @BuildStep(onlyIf = IsTest.class) public void instrumentTestClasses(CombinedIndexBuildItem combinedIndexBuildItem, LaunchModeBuildItem launchModeBuildItem, BuildProducer<BytecodeTransformerBuildItem> transformerProducer) { if (!launchModeBuildItem.isAuxiliaryApplication()) { return; } for (ClassInfo clazz : combinedIndexBuildItem.getIndex().getKnownClasses()) { String theClassName = clazz.name().toString(); if (isAppClass(theClassName)) { transformerProducer.produce(new BytecodeTransformerBuildItem.Builder() .setClassToTransform(theClassName) .setVisitorFunction( new BiFunction<String, ClassVisitor, ClassVisitor>() { @Override public ClassVisitor apply(String s, ClassVisitor classVisitor) { return new TracingClassVisitor(classVisitor, theClassName); } }) .setCacheable(true) .setContinueOnFailure(true) .build()); } } } @BuildStep(onlyIf = IsTest.class) public ServiceStartBuildItem searchForTags(CombinedIndexBuildItem combinedIndexBuildItem, LaunchModeBuildItem launchModeBuildItem) { if (!launchModeBuildItem.isAuxiliaryApplication()) { return null; } Set<String> ret = new HashSet<>(); for (AnnotationInstance clazz : combinedIndexBuildItem.getIndex() .getAnnotations(DotName.createSimple(Tag.class.getName()))) { ret.add(clazz.value().asString()); } KnownTags.setKnownTags(ret); return null; } public boolean isAppClass(String className) { return QuarkusClassLoader.isApplicationClass(className); } public static
TestTracingProcessor
java
spring-projects__spring-framework
spring-jdbc/src/test/java/org/springframework/jdbc/config/JdbcNamespaceIntegrationTests.java
{ "start": 2212, "end": 8143 }
class ____ { @Test @EnabledForTestGroups(LONG_RUNNING) void createEmbeddedDatabase() { assertCorrectSetup("jdbc-config.xml", "dataSource", "h2DataSource", "derbyDataSource"); } @Test @EnabledForTestGroups(LONG_RUNNING) void createEmbeddedDatabaseAgain() { // If Derby isn't cleaned up properly this will fail... assertCorrectSetup("jdbc-config.xml", "derbyDataSource"); } @Test void createWithResourcePattern() { assertCorrectSetup("jdbc-config-pattern.xml", "dataSource"); } @Test void createWithNonExistentResource() { assertThatExceptionOfType(BeanCreationException.class) .isThrownBy(() -> assertCorrectSetup("jdbc-config-nonexistent.xml", "dataSource")) .withCauseInstanceOf(CannotReadScriptException.class); } @Test void createWithAnonymousDataSourceAndDefaultDatabaseName() { assertThat(extractDataSourceUrl("jdbc-config-db-name-default-and-anonymous-datasource.xml")) .endsWith(DEFAULT_DATABASE_NAME); } @Test void createWithImplicitDatabaseName() { assertThat(extractDataSourceUrl("jdbc-config-db-name-implicit.xml")).endsWith("dataSource"); } @Test void createWithExplicitDatabaseName() { assertThat(extractDataSourceUrl("jdbc-config-db-name-explicit.xml")).endsWith("customDbName"); } @Test void createWithGeneratedDatabaseName() { assertThat(extractDataSourceUrl("jdbc-config-db-name-generated.xml")).startsWith("jdbc:hsqldb:mem:") .doesNotEndWith("dataSource").doesNotEndWith("shouldBeOverriddenByGeneratedName"); } @Test void createWithEndings() { assertCorrectSetupAndCloseContext("jdbc-initialize-endings-config.xml", 2, "dataSource"); } @Test void createWithEndingsNested() { assertCorrectSetupAndCloseContext("jdbc-initialize-endings-nested-config.xml", 2, "dataSource"); } @Test void createAndDestroy() { try (ClassPathXmlApplicationContext context = context("jdbc-destroy-config.xml")) { DataSource dataSource = context.getBean(DataSource.class); JdbcTemplate template = new JdbcTemplate(dataSource); assertNumRowsInTestTable(template, 1); context.getBean(DataSourceInitializer.class).destroy(); // Table has been dropped assertThatExceptionOfType(BadSqlGrammarException.class).isThrownBy(() -> assertNumRowsInTestTable(template, 1)); } } @Test void createAndDestroyNestedWithHsql() { try (ClassPathXmlApplicationContext context = context("jdbc-destroy-nested-config.xml")) { DataSource dataSource = context.getBean(DataSource.class); JdbcTemplate template = new JdbcTemplate(dataSource); assertNumRowsInTestTable(template, 1); context.getBean(EmbeddedDatabaseFactoryBean.class).destroy(); // Table has been dropped assertThatExceptionOfType(BadSqlGrammarException.class).isThrownBy(() -> assertNumRowsInTestTable(template, 1)); } } @Test void createAndDestroyNestedWithH2() { try (ClassPathXmlApplicationContext context = context("jdbc-destroy-nested-config-h2.xml")) { DataSource dataSource = context.getBean(DataSource.class); JdbcTemplate template = new JdbcTemplate(dataSource); assertNumRowsInTestTable(template, 1); context.getBean(EmbeddedDatabaseFactoryBean.class).destroy(); // Table has been dropped assertThatExceptionOfType(BadSqlGrammarException.class).isThrownBy(() -> assertNumRowsInTestTable(template, 1)); } } @Test void multipleDataSourcesHaveDifferentDatabaseNames() { DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); new XmlBeanDefinitionReader(factory).loadBeanDefinitions(new ClassPathResource( "jdbc-config-multiple-datasources.xml", getClass())); assertBeanPropertyValueOf("databaseName", "firstDataSource", factory); assertBeanPropertyValueOf("databaseName", "secondDataSource", factory); } @Test void initializeWithCustomSeparator() { assertCorrectSetupAndCloseContext("jdbc-initialize-custom-separator.xml", 2, "dataSource"); } @Test void embeddedWithCustomSeparator() { assertCorrectSetupAndCloseContext("jdbc-config-custom-separator.xml", 2, "dataSource"); } private ClassPathXmlApplicationContext context(String file) { return new ClassPathXmlApplicationContext(file, getClass()); } private void assertBeanPropertyValueOf(String propertyName, String expected, DefaultListableBeanFactory factory) { BeanDefinition bean = factory.getBeanDefinition(expected); PropertyValue value = bean.getPropertyValues().getPropertyValue(propertyName); assertThat(value).isNotNull(); assertThat(value.getValue().toString()).isEqualTo(expected); } private void assertNumRowsInTestTable(JdbcTemplate template, int count) { assertThat(template.queryForObject("select count(*) from T_TEST", Integer.class)).isEqualTo(count); } private void assertCorrectSetup(String file, String... dataSources) { assertCorrectSetupAndCloseContext(file, 1, dataSources); } private void assertCorrectSetupAndCloseContext(String file, int count, String... dataSources) { try (ConfigurableApplicationContext context = context(file)) { for (String dataSourceName : dataSources) { DataSource dataSource = context.getBean(dataSourceName, DataSource.class); assertNumRowsInTestTable(new JdbcTemplate(dataSource), count); assertThat(dataSource).isInstanceOf(AbstractDriverBasedDataSource.class); AbstractDriverBasedDataSource adbDataSource = (AbstractDriverBasedDataSource) dataSource; assertThat(adbDataSource.getUrl()).contains(dataSourceName); } } } private @Nullable String extractDataSourceUrl(String file) { try (ConfigurableApplicationContext context = context(file)) { DataSource dataSource = context.getBean(DataSource.class); assertNumRowsInTestTable(new JdbcTemplate(dataSource), 1); assertThat(dataSource).isInstanceOf(AbstractDriverBasedDataSource.class); AbstractDriverBasedDataSource adbDataSource = (AbstractDriverBasedDataSource) dataSource; return adbDataSource.getUrl(); } } }
JdbcNamespaceIntegrationTests
java
apache__rocketmq
tools/src/test/java/org/apache/rocketmq/tools/command/namesrv/WipeWritePermSubCommandTest.java
{ "start": 1332, "end": 2774 }
class ____ { private ServerResponseMocker brokerMocker; private ServerResponseMocker nameServerMocker; @Before public void before() { brokerMocker = startOneBroker(); nameServerMocker = startNameServer(); } @After public void after() { brokerMocker.shutdown(); nameServerMocker.shutdown(); } @Test public void testExecute() throws SubCommandException { WipeWritePermSubCommand cmd = new WipeWritePermSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[] {"-b default-broker"}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); } private ServerResponseMocker startNameServer() { HashMap<String, String> extMap = new HashMap<>(); extMap.put("wipeTopicCount", "1"); // start name server return NameServerMocker.startByDefaultConf(brokerMocker.listenPort(), extMap); } private ServerResponseMocker startOneBroker() { // start broker HashMap<String, String> extMap = new HashMap<>(); extMap.put("wipeTopicCount", "1"); return ServerResponseMocker.startServer(new byte[0], extMap); } }
WipeWritePermSubCommandTest
java
apache__logging-log4j2
log4j-core/src/main/java/org/apache/logging/log4j/core/filter/MutableThreadContextMapFilter.java
{ "start": 19301, "end": 19390 }
interface ____ { void onEvent(); } private static
FilterConfigUpdateListener
java
hibernate__hibernate-orm
hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/userguide/PersonPhoneCount.java
{ "start": 223, "end": 564 }
class ____ { private final String name; @Column(name = "phone_count") private final Number phoneCount; public PersonPhoneCount(String name, Number phoneCount) { this.name = name; this.phoneCount = phoneCount; } public String getName() { return name; } public Number getPhoneCount() { return phoneCount; } }
PersonPhoneCount
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Or.java
{ "start": 795, "end": 2037 }
class ____ extends BinaryLogic implements Negatable<BinaryLogic> { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Or", Or::new); public Or(Source source, Expression left, Expression right) { super(source, left, right, BinaryLogicOperation.OR); } private Or(StreamInput in) throws IOException { super(in, BinaryLogicOperation.OR); } @Override public String getWriteableName() { return ENTRY.name; } @Override protected NodeInfo<Or> info() { return NodeInfo.create(this, Or::new, left(), right()); } @Override protected Or replaceChildren(Expression newLeft, Expression newRight) { return new Or(source(), newLeft, newRight); } @Override public Or swapLeftAndRight() { return new Or(source(), right(), left()); } @Override public And negate() { return new And(source(), Not.negate(left()), Not.negate(right())); } @Override protected Expression canonicalize() { // NB: this add a circular dependency between Predicates / Logical package return Predicates.combineOr(Predicates.splitOr(super.canonicalize())); } }
Or
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/ExplicitDateConvertersTest.java
{ "start": 1846, "end": 3045 }
class ____ { @Id private Integer id; private String name; @Convert( converter = LongToDateConverter.class ) private Date theDate; public Entity1() { } public Entity1(Integer id, String name, Date theDate) { this.id = id; this.name = name; this.theDate = theDate; } } @Test public void testSimpleConvertUsage(SessionFactoryScope scope) { final EntityPersister ep = scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor(Entity1.class.getName()); final Type theDatePropertyType = ep.getPropertyType( "theDate" ); final ConvertedBasicTypeImpl type = assertTyping( ConvertedBasicTypeImpl.class, theDatePropertyType ); final JpaAttributeConverter converter = (JpaAttributeConverter) type.getValueConverter(); assertTrue( LongToDateConverter.class.isAssignableFrom( converter.getConverterJavaType().getJavaTypeClass() ) ); resetFlags(); scope.inTransaction( session -> session.persist(new Entity1(1, "1", new Date())) ); assertTrue( convertToDatabaseColumnCalled ); resetFlags(); scope.inTransaction( session -> session.find( Entity1.class, 1 ) ); assertTrue( convertToEntityAttributeCalled ); scope.dropData(); } }
Entity1
java
apache__kafka
generator/src/main/java/org/apache/kafka/message/TypeClassGenerator.java
{ "start": 902, "end": 975 }
interface ____ { /** * The short name of the type
TypeClassGenerator
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/persister/entity/CustomSqlNamespaceInjectionTests.java
{ "start": 6329, "end": 6425 }
class ____ { @Id public Integer id; private String name; } public static
CustomDomainEntity
java
google__dagger
javatests/dagger/internal/codegen/AssistedFactoryTest.java
{ "start": 7483, "end": 7702 }
interface ____ {", " Foo create(Integer arg);", "}"); Source bar = CompilerTests.javaSource( "test.Bar", "package test;", "", "
FooFactory
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/domain/DomainRowKey.java
{ "start": 3078, "end": 5741 }
class ____ implements KeyConverter<DomainRowKey>, KeyConverterToString<DomainRowKey> { private DomainRowKeyConverter() { } /** * The domain row key is of the form * clusterId!domainId with each segment separated by !. * The sizes below indicate sizes of each one of * these segements in sequence. * clusterId and domainId are strings. * Strings are variable in size * (i.e. they end whenever separator is encountered). * This is used while * decoding and helps in determining where to split. */ private static final int[] SEGMENT_SIZES = { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE}; /* * (non-Javadoc) * * Encodes DomainRowKey object into a byte array * * @see org.apache.hadoop.yarn.server.timelineservice.storage.common * .KeyConverter#encode(java.lang.Object) */ @Override public byte[] encode(DomainRowKey rowKey) { if (rowKey == null) { return Separator.EMPTY_BYTES; } byte[] cluster = Separator.encode(rowKey.getClusterId(), Separator.SPACE, Separator.TAB, Separator.QUALIFIERS); byte[] domainIdBytes = Separator.encode(rowKey.getDomainId(), Separator.SPACE, Separator.TAB, Separator.QUALIFIERS); return Separator.QUALIFIERS.join(cluster, domainIdBytes); } @Override public DomainRowKey decode(byte[] rowKey) { byte[][] rowKeyComponents = Separator.QUALIFIERS.split(rowKey, SEGMENT_SIZES); if (rowKeyComponents.length != 2) { throw new IllegalArgumentException("the row key is not valid for " + "a domain id"); } String clusterId = Separator.decode(Bytes.toString(rowKeyComponents[0]), Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); String domainId = Separator.decode(Bytes.toString(rowKeyComponents[1]), Separator.QUALIFIERS, Separator.TAB, Separator.SPACE); return new DomainRowKey(clusterId, domainId); } @Override public String encodeAsString(DomainRowKey key) { return TimelineReaderUtils.joinAndEscapeStrings( new String[] {key.clusterId, key.domainId}); } @Override public DomainRowKey decodeFromString(String encodedRowKey) { List<String> split = TimelineReaderUtils.split(encodedRowKey); if (split == null || split.size() != 2) { throw new IllegalArgumentException( "Invalid row key for domain id."); } return new DomainRowKey(split.get(0), split.get(1)); } } }
DomainRowKeyConverter
java
elastic__elasticsearch
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java
{ "start": 1822, "end": 2924 }
class ____ extends HandledTransportAction< TransportRepositoryVerifyIntegrityAction.Request, RepositoryVerifyIntegrityResponse> { // NB runs on the master because that's the expected place to read metadata blobs from the repository, but not an actual // TransportMasterNodeAction since we don't want to retry on a master failover static final String ACTION_NAME = TransportRepositoryVerifyIntegrityCoordinationAction.INSTANCE.name() + "[m]"; private final RepositoriesService repositoriesService; private final TransportService transportService; private final Executor executor; TransportRepositoryVerifyIntegrityAction( TransportService transportService, RepositoriesService repositoriesService, ActionFilters actionFilters, Executor executor ) { super(ACTION_NAME, transportService, actionFilters, Request::new, executor); this.repositoriesService = repositoriesService; this.transportService = transportService; this.executor = executor; } static
TransportRepositoryVerifyIntegrityAction
java
alibaba__nacos
config/src/main/java/com/alibaba/nacos/config/server/model/event/IstioConfigChangeEvent.java
{ "start": 742, "end": 1219 }
class ____ extends ConfigDataChangeEvent { private static final long serialVersionUID = -2618455009648617192L; public final String content; public final String type; public IstioConfigChangeEvent(String dataId, String group, String tenant, long gmtModified, String content, String type) { super(dataId, group, tenant, gmtModified); this.content = content; this.type = type; } }
IstioConfigChangeEvent
java
elastic__elasticsearch
x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java
{ "start": 3012, "end": 11911 }
class ____ extends AcknowledgedTransportMasterNodeAction<Request> { private static final Logger logger = LogManager.getLogger(TransportDeleteTransformAction.class); private final TransformConfigManager transformConfigManager; private final TransformAuditor auditor; private final Client client; private final ProjectResolver projectResolver; @Inject public TransportDeleteTransformAction( TransportService transportService, ActionFilters actionFilters, ThreadPool threadPool, ClusterService clusterService, TransformServices transformServices, Client client, ProjectResolver projectResolver ) { super( DeleteTransformAction.NAME, transportService, clusterService, threadPool, actionFilters, Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.transformConfigManager = transformServices.configManager(); this.auditor = transformServices.auditor(); this.client = client; this.projectResolver = projectResolver; } @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener<AcknowledgedResponse> listener) { if (TransformMetadata.upgradeMode(state)) { listener.onFailure( new ElasticsearchStatusException( "Cannot delete any Transform while the Transform feature is upgrading.", RestStatus.CONFLICT ) ); return; } final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final boolean transformIsRunning = TransformTask.getTransformTask(request.getId(), state) != null; if (transformIsRunning && request.isForce() == false) { listener.onFailure( new ElasticsearchStatusException( "Cannot delete transform [" + request.getId() + "] as the task is running. Stop the task first", RestStatus.CONFLICT ) ); return; } // <3> Delete transform config ActionListener<AcknowledgedResponse> deleteDestIndexListener = ActionListener.wrap( unusedAcknowledgedResponse -> transformConfigManager.deleteTransform(request.getId(), ActionListener.wrap(r -> { logger.info("[{}] deleted transform", request.getId()); auditor.info(request.getId(), "Deleted transform."); listener.onResponse(AcknowledgedResponse.of(r)); }, listener::onFailure)), listener::onFailure ); // <2> Delete destination index if requested ActionListener<StopTransformAction.Response> stopTransformActionListener = ActionListener.wrap(unusedStopResponse -> { if (request.isDeleteDestIndex()) { deleteDestinationIndex(parentTaskId, request.getId(), request.ackTimeout(), deleteDestIndexListener); } else { deleteDestIndexListener.onResponse(null); } }, listener::onFailure); // <1> Stop transform if it's currently running stopTransform(transformIsRunning, parentTaskId, request.getId(), request.ackTimeout(), stopTransformActionListener); } private void stopTransform( boolean transformIsRunning, TaskId parentTaskId, String transformId, TimeValue timeout, ActionListener<StopTransformAction.Response> listener ) { if (transformIsRunning == false) { listener.onResponse(null); return; } StopTransformAction.Request stopTransformRequest = new StopTransformAction.Request(transformId, true, true, timeout, true, false); stopTransformRequest.setParentTask(parentTaskId); executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, StopTransformAction.INSTANCE, stopTransformRequest, listener); } private void deleteDestinationIndex( TaskId parentTaskId, String transformId, TimeValue timeout, ActionListener<AcknowledgedResponse> listener ) { getTransformConfig(transformId).<AcknowledgedResponse>andThen((l, r) -> deleteDestinationIndex(r.v1(), parentTaskId, timeout, l)) .addListener(listener.delegateResponse((l, e) -> { if (e instanceof IndexNotFoundException) { l.onResponse(AcknowledgedResponse.TRUE); } else { l.onFailure(e); } })); } private SubscribableListener<Tuple<TransformConfig, SeqNoPrimaryTermAndIndex>> getTransformConfig(String transformId) { return SubscribableListener.newForked(l -> transformConfigManager.getTransformConfigurationForUpdate(transformId, l)); } /** * Delete the destination index. If the Transform is configured to write to an alias, then follow that alias to the concrete index. */ private void deleteDestinationIndex( TransformConfig config, TaskId parentTaskId, TimeValue timeout, ActionListener<AcknowledgedResponse> listener ) { SubscribableListener.<String>newForked(l -> resolveDestinationIndex(config, parentTaskId, timeout, l)) .<AcknowledgedResponse>andThen((l, destIndex) -> { DeleteIndexRequest deleteDestIndexRequest = new DeleteIndexRequest(destIndex); deleteDestIndexRequest.ackTimeout(timeout); deleteDestIndexRequest.setParentTask(parentTaskId); executeWithHeadersAsync( config.getHeaders(), TRANSFORM_ORIGIN, client, TransportDeleteIndexAction.TYPE, deleteDestIndexRequest, l ); }) .addListener(listener); } private void resolveDestinationIndex(TransformConfig config, TaskId parentTaskId, TimeValue timeout, ActionListener<String> listener) { var destIndex = config.getDestination().getIndex(); var responseListener = ActionListener.<GetAliasesResponse>wrap(r -> findDestinationIndexInAliases(r, destIndex, listener), e -> { if (e instanceof AliasesNotFoundException) { // no alias == the destIndex is our concrete index listener.onResponse(destIndex); } else { listener.onFailure(e); } }); GetAliasesRequest request = new GetAliasesRequest(timeout, destIndex); request.setParentTask(parentTaskId); executeWithHeadersAsync(config.getHeaders(), TRANSFORM_ORIGIN, client, GetAliasesAction.INSTANCE, request, responseListener); } private static void findDestinationIndexInAliases(GetAliasesResponse aliases, String destIndex, ActionListener<String> listener) { var indexToAliases = aliases.getAliases(); if (indexToAliases.isEmpty()) { // if the alias list is empty, that means the index is a concrete index listener.onResponse(destIndex); } else if (indexToAliases.size() == 1) { // if there is one value, the alias will treat it as the write index, so it's our destination index listener.onResponse(indexToAliases.keySet().iterator().next()); } else { // if there is more than one index, there may be more than one alias for each index // we have to search for the alias that matches our destination index name AND is declared the write index for that alias indexToAliases.entrySet().stream().map(entry -> { if (entry.getValue().stream().anyMatch(md -> destIndex.equals(md.getAlias()) && Boolean.TRUE.equals(md.writeIndex()))) { return entry.getKey(); } else { return null; } }).filter(Objects::nonNull).findFirst().ifPresentOrElse(listener::onResponse, () -> { listener.onFailure( new ElasticsearchStatusException( "Cannot disambiguate destination index alias [" + destIndex + "]. Alias points to many indices with no clear write alias. Retry with delete_dest_index=false and manually" + " clean up destination index.", RestStatus.CONFLICT ) ); }); } } @Override protected ClusterBlockException checkBlock(Request request, ClusterState state) { return state.blocks().globalBlockedException(projectResolver.getProjectId(), ClusterBlockLevel.METADATA_READ); } }
TransportDeleteTransformAction
java
hibernate__hibernate-orm
hibernate-core/src/main/java/org/hibernate/metamodel/internal/FullNameImplicitDiscriminatorStrategy.java
{ "start": 590, "end": 1501 }
class ____ implements ImplicitDiscriminatorStrategy { public static final FullNameImplicitDiscriminatorStrategy FULL_NAME_STRATEGY = new FullNameImplicitDiscriminatorStrategy(); @Override public Object toDiscriminatorValue(EntityMappingType entityMapping, NavigableRole discriminatorRole, MappingMetamodelImplementor mappingModel) { return entityMapping.getEntityName(); } @Override public EntityMappingType toEntityMapping(Object discriminatorValue, NavigableRole discriminatorRole, MappingMetamodelImplementor mappingModel) { if ( discriminatorValue instanceof String assumedEntityName ) { final var persister = mappingModel.findEntityDescriptor( assumedEntityName ); if ( persister != null ) { return persister; } } throw new HibernateException( "Cannot interpret discriminator value (" + discriminatorRole + ") : " + discriminatorValue ); } }
FullNameImplicitDiscriminatorStrategy
java
spring-projects__spring-security
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/oidc/authentication/OidcIdTokenDecoderFactoryTests.java
{ "start": 2248, "end": 9093 }
class ____ { // @formatter:off private ClientRegistration.Builder registration = TestClientRegistrations .clientRegistration() .scope("openid"); // @formatter:on private OidcIdTokenDecoderFactory idTokenDecoderFactory; @BeforeEach public void setUp() { this.idTokenDecoderFactory = new OidcIdTokenDecoderFactory(); } @Test public void createDefaultClaimTypeConvertersWhenCalledThenDefaultsAreCorrect() { Map<String, Converter<Object, ?>> claimTypeConverters = OidcIdTokenDecoderFactory .createDefaultClaimTypeConverters(); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.ISS); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.AUD); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.NONCE); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.EXP); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.IAT); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.AUTH_TIME); assertThat(claimTypeConverters).containsKey(IdTokenClaimNames.AMR); assertThat(claimTypeConverters).containsKey(StandardClaimNames.EMAIL_VERIFIED); assertThat(claimTypeConverters).containsKey(StandardClaimNames.PHONE_NUMBER_VERIFIED); assertThat(claimTypeConverters).containsKey(StandardClaimNames.UPDATED_AT); } @Test public void setJwtValidatorFactoryWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException().isThrownBy(() -> this.idTokenDecoderFactory.setJwtValidatorFactory(null)); } @Test public void setJwsAlgorithmResolverWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException().isThrownBy(() -> this.idTokenDecoderFactory.setJwsAlgorithmResolver(null)); } @Test public void setClaimTypeConverterFactoryWhenNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException() .isThrownBy(() -> this.idTokenDecoderFactory.setClaimTypeConverterFactory(null)); } @Test public void createDecoderWhenClientRegistrationNullThenThrowIllegalArgumentException() { assertThatIllegalArgumentException().isThrownBy(() -> this.idTokenDecoderFactory.createDecoder(null)); } @Test public void createDecoderWhenJwsAlgorithmDefaultAndJwkSetUriEmptyThenThrowOAuth2AuthenticationException() { assertThatExceptionOfType(OAuth2AuthenticationException.class) .isThrownBy(() -> this.idTokenDecoderFactory.createDecoder(this.registration.jwkSetUri(null).build())) .withMessage("[missing_signature_verifier] Failed to find a Signature Verifier " + "for Client Registration: 'registration-id'. " + "Check to ensure you have configured the JwkSet URI."); } @Test public void createDecoderWhenJwsAlgorithmEcAndJwkSetUriEmptyThenThrowOAuth2AuthenticationException() { this.idTokenDecoderFactory.setJwsAlgorithmResolver((clientRegistration) -> SignatureAlgorithm.ES256); assertThatExceptionOfType(OAuth2AuthenticationException.class) .isThrownBy(() -> this.idTokenDecoderFactory.createDecoder(this.registration.jwkSetUri(null).build())) .withMessage("[missing_signature_verifier] Failed to find a Signature Verifier " + "for Client Registration: 'registration-id'. " + "Check to ensure you have configured the JwkSet URI."); } @Test public void createDecoderWhenJwsAlgorithmHmacAndClientSecretNullThenThrowOAuth2AuthenticationException() { this.idTokenDecoderFactory.setJwsAlgorithmResolver((clientRegistration) -> MacAlgorithm.HS256); assertThatExceptionOfType(OAuth2AuthenticationException.class) .isThrownBy(() -> this.idTokenDecoderFactory.createDecoder(this.registration.clientSecret(null).build())) .withMessage("[missing_signature_verifier] Failed to find a Signature Verifier " + "for Client Registration: 'registration-id'. " + "Check to ensure you have configured the client secret."); } @Test public void createDecoderWhenJwsAlgorithmNullThenThrowOAuth2AuthenticationException() { this.idTokenDecoderFactory.setJwsAlgorithmResolver((clientRegistration) -> null); assertThatExceptionOfType(OAuth2AuthenticationException.class) .isThrownBy(() -> this.idTokenDecoderFactory.createDecoder(this.registration.build())) .withMessage("[missing_signature_verifier] Failed to find a Signature Verifier " + "for Client Registration: 'registration-id'. " + "Check to ensure you have configured a valid JWS Algorithm: 'null'"); } @Test public void createDecoderWhenClientRegistrationValidThenReturnDecoder() { assertThat(this.idTokenDecoderFactory.createDecoder(this.registration.build())).isNotNull(); } @Test public void createDecoderWhenCustomJwtValidatorFactorySetThenApplied() { Function<ClientRegistration, OAuth2TokenValidator<Jwt>> customJwtValidatorFactory = mock(Function.class); this.idTokenDecoderFactory.setJwtValidatorFactory(customJwtValidatorFactory); ClientRegistration clientRegistration = this.registration.build(); given(customJwtValidatorFactory.apply(same(clientRegistration))) .willReturn(new OidcIdTokenValidator(clientRegistration)); this.idTokenDecoderFactory.createDecoder(clientRegistration); verify(customJwtValidatorFactory).apply(same(clientRegistration)); } @Test public void createDecoderWhenCustomJwsAlgorithmResolverSetThenApplied() { Function<ClientRegistration, JwsAlgorithm> customJwsAlgorithmResolver = mock(Function.class); this.idTokenDecoderFactory.setJwsAlgorithmResolver(customJwsAlgorithmResolver); ClientRegistration clientRegistration = this.registration.build(); given(customJwsAlgorithmResolver.apply(same(clientRegistration))).willReturn(MacAlgorithm.HS256); this.idTokenDecoderFactory.createDecoder(clientRegistration); verify(customJwsAlgorithmResolver).apply(same(clientRegistration)); } @Test public void createDecoderWhenCustomClaimTypeConverterFactorySetThenApplied() { Function<ClientRegistration, Converter<Map<String, Object>, Map<String, Object>>> customClaimTypeConverterFactory = mock( Function.class); this.idTokenDecoderFactory.setClaimTypeConverterFactory(customClaimTypeConverterFactory); ClientRegistration clientRegistration = this.registration.build(); given(customClaimTypeConverterFactory.apply(same(clientRegistration))) .willReturn(new ClaimTypeConverter(OidcIdTokenDecoderFactory.createDefaultClaimTypeConverters())); this.idTokenDecoderFactory.createDecoder(clientRegistration); verify(customClaimTypeConverterFactory).apply(same(clientRegistration)); } // gh-16647 @Test public void createDecoderWhenCachingRemovedThenReturnNewDecoder() { ClientRegistration clientRegistration = this.registration.build(); JwtDecoder decoder1 = this.idTokenDecoderFactory.createDecoder(clientRegistration); JwtDecoder decoder2 = this.idTokenDecoderFactory.createDecoder(clientRegistration); assertThat(decoder1).isNotSameAs(decoder2); } }
OidcIdTokenDecoderFactoryTests
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/NativeQueryResultTypeAutoDiscoveryTest.java
{ "start": 24327, "end": 24671 }
class ____ extends AbstractSingleColumnStandardBasicType<byte[]> { public static final String NAME = "byte_array_as_nonvar_binary"; public ByteArrayAsNonVarBinaryType() { super( BinaryJdbcType.INSTANCE, PrimitiveByteArrayJavaType.INSTANCE ); } @Override public String getName() { return NAME; } } }
ByteArrayAsNonVarBinaryType
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/parser/EnumParserTest.java
{ "start": 2461, "end": 2512 }
enum ____ { A, B } private static
Type
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/method/configuration/NamespaceGlobalMethodSecurityTests.java
{ "start": 17197, "end": 17362 }
class ____ extends GlobalMethodSecurityConfiguration { } @Configuration @Import(PreAuthorizeExtendsGMSCConfig.class) public static
ExtendsNoEnableAnntotationConfig
java
apache__dubbo
dubbo-compatible/src/test/java/org/apache/dubbo/metadata/annotation/processing/model/Color.java
{ "start": 921, "end": 1243 }
enum ____ { RED(1), YELLOW(2), BLUE(3); private final int value; Color(int value) { this.value = value; } @Override public String toString() { return "Color{" + "value=" + value + "} " + super.toString(); } public int getValue() { return value; } }
Color
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java
{ "start": 5630, "end": 16438 }
class ____ extends ESTestCase { public static final String TEXT_FIELD_NAME = "mapped_string"; public static final String TEXT_ALIAS_FIELD_NAME = "mapped_string_alias"; protected static final String KEYWORD_FIELD_NAME = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; protected static final String INT_ALIAS_FIELD_NAME = "mapped_int_field_alias"; protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_NANOS_FIELD_NAME = "mapped_date_nanos"; protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String DATE_ALIAS_FIELD_NAME = "mapped_date_alias"; protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; // we don't include the binary field in the arrays below as it is not searchable protected static final String BINARY_FIELD_NAME = "mapped_binary"; protected static final String[] MAPPED_FIELD_NAMES = new String[] { TEXT_FIELD_NAME, TEXT_ALIAS_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME, INT_ALIAS_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_NANOS_FIELD_NAME, DATE_FIELD_NAME, DATE_ALIAS_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, BINARY_FIELD_NAME }; protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { TEXT_FIELD_NAME, TEXT_ALIAS_FIELD_NAME, KEYWORD_FIELD_NAME, INT_FIELD_NAME, INT_ALIAS_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_NANOS_FIELD_NAME, DATE_FIELD_NAME, DATE_ALIAS_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, BINARY_FIELD_NAME }; private static final Map<String, String> ALIAS_TO_CONCRETE_FIELD_NAME = new HashMap<>(); static { ALIAS_TO_CONCRETE_FIELD_NAME.put(TEXT_ALIAS_FIELD_NAME, TEXT_FIELD_NAME); ALIAS_TO_CONCRETE_FIELD_NAME.put(INT_ALIAS_FIELD_NAME, INT_FIELD_NAME); ALIAS_TO_CONCRETE_FIELD_NAME.put(DATE_ALIAS_FIELD_NAME, DATE_FIELD_NAME); ALIAS_TO_CONCRETE_FIELD_NAME.put(GEO_POINT_ALIAS_FIELD_NAME, GEO_POINT_FIELD_NAME); } private static ServiceHolder serviceHolder; private static ServiceHolder serviceHolderWithNoType; private static int queryNameId = 0; private static Settings nodeSettings; private static Index index; private static long nowInMillis; protected static Index getIndex() { return index; } protected Collection<Class<? extends Plugin>> getPlugins() { return Collections.emptyList(); } private TestThreadPool testThreadPool; /** * Allows additional plugins other than the required `TestGeoShapeFieldMapperPlugin` * Could probably be removed when dependencies against geo_shape is decoupled */ protected Collection<Class<? extends Plugin>> getExtraPlugins() { return Collections.emptyList(); } protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {} @BeforeClass public static void beforeClass() { nodeSettings = Settings.builder() .put("node.name", AbstractQueryTestCase.class.toString()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); index = new Index(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLength(10)); nowInMillis = randomNonNegativeLong(); } @Override protected NamedXContentRegistry xContentRegistry() { return serviceHolder.parserConfiguration.registry(); } protected NamedWriteableRegistry namedWriteableRegistry() { return serviceHolder.namedWriteableRegistry; } /** * make sure query names are unique by suffixing them with increasing counter */ protected static String createUniqueRandomName() { String queryName = randomAlphaOfLengthBetween(1, 10) + queryNameId; queryNameId++; return queryName; } protected Settings createTestIndexSettings() { // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. IndexVersion indexVersionCreated = randomBoolean() ? IndexVersion.current() : IndexVersionUtils.randomCompatibleVersion(random()); return Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, indexVersionCreated).build(); } protected static IndexSettings indexSettings() { return serviceHolder.idxSettings; } protected static MapperService mapperService() { return serviceHolder.mapperService; } protected static String expectedFieldName(String builderFieldName) { return ALIAS_TO_CONCRETE_FIELD_NAME.getOrDefault(builderFieldName, builderFieldName); } @AfterClass public static void afterClass() throws Exception { IOUtils.close(serviceHolder); IOUtils.close(serviceHolderWithNoType); serviceHolder = null; serviceHolderWithNoType = null; } @Before public void beforeTest() throws Exception { if (serviceHolder == null) { assert serviceHolderWithNoType == null; // we initialize the serviceHolder and serviceHolderWithNoType just once, but need some // calls to the randomness source during its setup. In order to not mix these calls with // the randomness source that is later used in the test method, we use the master seed during // this setup long masterSeed = SeedUtils.parseSeed(RandomizedTest.getContext().getRunnerSeedAsString()); RandomizedTest.getContext().runWithPrivateRandomness(masterSeed, (Callable<Void>) () -> { Collection<Class<? extends Plugin>> plugins = new ArrayList<>(getPlugins()); plugins.addAll(getExtraPlugins()); serviceHolder = new ServiceHolder( nodeSettings, createTestIndexSettings(), plugins, nowInMillis, AbstractBuilderTestCase.this, true ); serviceHolderWithNoType = new ServiceHolder( nodeSettings, createTestIndexSettings(), plugins, nowInMillis, AbstractBuilderTestCase.this, false ); return null; }); } serviceHolder.clientInvocationHandler.delegate = this; serviceHolderWithNoType.clientInvocationHandler.delegate = this; testThreadPool = new TestThreadPool(getTestName()); serviceHolder.clientInvocationHandler.testThreadPool = testThreadPool; serviceHolderWithNoType.clientInvocationHandler.testThreadPool = testThreadPool; } @After public void afterTest() { serviceHolder.clientInvocationHandler.delegate = null; serviceHolderWithNoType.clientInvocationHandler.delegate = null; testThreadPool.shutdown(); } /** * Override this to handle {@link Client#get(GetRequest)} calls from parsers / builders */ protected GetResponse executeGet(GetRequest getRequest) { throw new UnsupportedOperationException("this test can't handle GET requests"); } /** * Override this to handle {@link Client#multiTermVectors(MultiTermVectorsRequest, ActionListener)} * calls from parsers / builders */ protected MultiTermVectorsResponse executeMultiTermVectors(MultiTermVectorsRequest mtvRequest) { throw new UnsupportedOperationException("this test can't handle MultiTermVector requests"); } /** * Can the test simulate this {@code Method}. * If this function returns true {@link #simulateMethod(Method, Object[])} * should be implemented provide the expected response. * * @param method The method being proxied. In practice method will represent a client call. * @param args Method arguments * @return True if simulating the method call is supported */ protected boolean canSimulateMethod(Method method, Object[] args) throws NoSuchMethodException { return false; } /** * Override this to simulate client calls. */ protected Object simulateMethod(Method method, Object[] args) { throw new UnsupportedOperationException("this test can't simulate method [" + method.getName() + "]"); } /** * @return a new {@link SearchExecutionContext} with the provided searcher */ protected static SearchExecutionContext createSearchExecutionContext(IndexSearcher searcher) { return serviceHolder.createShardContext(searcher); } protected static CoordinatorRewriteContext createCoordinatorRewriteContext( DateFieldMapper.DateFieldType dateFieldType, long min, long max ) { return serviceHolder.createCoordinatorContext(dateFieldType, min, max); } protected static CoordinatorRewriteContext createCoordinatorRewriteContext( DateFieldMapper.DateFieldType dateFieldType, long min, long max, String tier ) { return serviceHolder.createCoordinatorContext(dateFieldType, min, max, tier); } protected static DataRewriteContext dataRewriteContext() { return serviceHolder.createDataContext(); } /** * @return a new {@link SearchExecutionContext} based on an index with no type registered */ protected static SearchExecutionContext createShardContextWithNoType() { return serviceHolderWithNoType.createShardContext(null); } /** * @return a new {@link SearchExecutionContext} based on the base test index and queryParserService */ protected static SearchExecutionContext createSearchExecutionContext() { return createSearchExecutionContext(null); } protected static QueryRewriteContext createQueryRewriteContext() { return serviceHolder.createQueryRewriteContext(); } private static
AbstractBuilderTestCase
java
alibaba__fastjson
src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_297.java
{ "start": 248, "end": 848 }
class ____ extends TestCase { public void test_for_issue() throws Exception { Response<User> resp = parse("{\"id\":1001,\"values\":[{}]}", User.class); Assert.assertEquals(1001, resp.id); Assert.assertEquals(1, resp.values.size()); Assert.assertEquals(User.class, resp.values.get(0).getClass()); } public <T> Response<T> parse(String text, Class<T> clazz) { ParameterizedTypeImpl type = new ParameterizedTypeImpl(new Type[] { User.class }, null, Response.class); return JSON.parseObject(text, type); } public static
Bug_for_issue_297
java
apache__dubbo
dubbo-registry/dubbo-registry-multiple/src/test/java/org/apache/dubbo/registry/multiple/MultipleRegistry2S2RTest.java
{ "start": 1417, "end": 10302 }
class ____ { private static final String SERVICE_NAME = "org.apache.dubbo.registry.MultipleService2S2R"; private static final String SERVICE2_NAME = "org.apache.dubbo.registry.MultipleService2S2R2"; private static MultipleRegistry multipleRegistry; // for test content private static ZookeeperClient zookeeperClient; private static ZookeeperClient zookeeperClient2; private static ZookeeperRegistry zookeeperRegistry; private static ZookeeperRegistry zookeeperRegistry2; private static String zookeeperConnectionAddress1, zookeeperConnectionAddress2; @BeforeAll public static void beforeAll() { zookeeperConnectionAddress1 = System.getProperty("zookeeper.connection.address.1"); zookeeperConnectionAddress2 = System.getProperty("zookeeper.connection.address.2"); URL url = URL.valueOf("multiple://127.0.0.1?application=vic&enable-empty-protection=false&" + MultipleRegistry.REGISTRY_FOR_SERVICE + "=" + zookeeperConnectionAddress1 + "," + zookeeperConnectionAddress2 + "&" + MultipleRegistry.REGISTRY_FOR_REFERENCE + "=" + zookeeperConnectionAddress1 + "," + zookeeperConnectionAddress2); multipleRegistry = (MultipleRegistry) new MultipleRegistryFactory().createRegistry(url); // for test validation zookeeperClient = new Curator5ZookeeperClient(URL.valueOf(zookeeperConnectionAddress1)); zookeeperRegistry = MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getServiceRegistries().values()); zookeeperClient2 = new Curator5ZookeeperClient(URL.valueOf(zookeeperConnectionAddress2)); zookeeperRegistry2 = MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getServiceRegistries().values()); } @Test void testParamConfig() { Assertions.assertEquals(2, multipleRegistry.origReferenceRegistryURLs.size()); Assertions.assertTrue(multipleRegistry.origReferenceRegistryURLs.contains(zookeeperConnectionAddress1)); Assertions.assertTrue(multipleRegistry.origReferenceRegistryURLs.contains(zookeeperConnectionAddress2)); Assertions.assertEquals(2, multipleRegistry.origServiceRegistryURLs.size()); Assertions.assertTrue(multipleRegistry.origServiceRegistryURLs.contains(zookeeperConnectionAddress1)); Assertions.assertTrue(multipleRegistry.origServiceRegistryURLs.contains(zookeeperConnectionAddress2)); Assertions.assertEquals(2, multipleRegistry.effectReferenceRegistryURLs.size()); Assertions.assertTrue(multipleRegistry.effectReferenceRegistryURLs.contains(zookeeperConnectionAddress1)); Assertions.assertTrue(multipleRegistry.effectReferenceRegistryURLs.contains(zookeeperConnectionAddress2)); Assertions.assertEquals(2, multipleRegistry.effectServiceRegistryURLs.size()); Assertions.assertTrue(multipleRegistry.effectServiceRegistryURLs.contains(zookeeperConnectionAddress1)); Assertions.assertTrue(multipleRegistry.effectServiceRegistryURLs.contains(zookeeperConnectionAddress2)); Assertions.assertTrue(multipleRegistry.getServiceRegistries().containsKey(zookeeperConnectionAddress1)); Assertions.assertTrue(multipleRegistry.getServiceRegistries().containsKey(zookeeperConnectionAddress2)); Assertions.assertEquals( 2, multipleRegistry.getServiceRegistries().values().size()); // java.util.Iterator<Registry> registryIterable = // multipleRegistry.getServiceRegistries().values().iterator(); // Registry firstRegistry = registryIterable.next(); // Registry secondRegistry = registryIterable.next(); Assertions.assertNotNull(MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getServiceRegistries().values())); Assertions.assertNotNull(MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getReferenceRegistries().values())); Assertions.assertEquals( MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getServiceRegistries().values()), MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getReferenceRegistries().values())); Assertions.assertEquals( MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getServiceRegistries().values()), MultipleRegistryTestUtil.getZookeeperRegistry( multipleRegistry.getReferenceRegistries().values())); Assertions.assertEquals(multipleRegistry.getApplicationName(), "vic"); Assertions.assertTrue(multipleRegistry.isAvailable()); } @Test void testRegistryAndUnRegistry() throws InterruptedException { URL serviceUrl = URL.valueOf( "http2://multiple/" + SERVICE_NAME + "?notify=false&methods=test1,test2&category=providers"); // URL serviceUrl2 = URL.valueOf("http2://multiple2/" + SERVICE_NAME + // "?notify=false&methods=test1,test2&category=providers"); multipleRegistry.register(serviceUrl); String path = "/dubbo/" + SERVICE_NAME + "/providers"; List<String> providerList = zookeeperClient.getChildren(path); Assertions.assertTrue(!providerList.isEmpty()); final List<URL> list = new ArrayList<URL>(); multipleRegistry.subscribe(serviceUrl, new NotifyListener() { @Override public void notify(List<URL> urls) { list.clear(); list.addAll(urls); } }); Thread.sleep(1500); Assertions.assertEquals(2, list.size()); multipleRegistry.unregister(serviceUrl); Thread.sleep(1500); Assertions.assertEquals(1, list.size()); List<URL> urls = MultipleRegistryTestUtil.getProviderURLsFromNotifyURLS(list); Assertions.assertEquals(1, list.size()); Assertions.assertEquals("empty", list.get(0).getProtocol()); } @Test void testSubscription() throws InterruptedException { URL serviceUrl = URL.valueOf( "http2://multiple/" + SERVICE2_NAME + "?notify=false&methods=test1,test2&category=providers"); // URL serviceUrl2 = URL.valueOf("http2://multiple2/" + SERVICE_NAME + // "?notify=false&methods=test1,test2&category=providers"); multipleRegistry.register(serviceUrl); String path = "/dubbo/" + SERVICE2_NAME + "/providers"; List<String> providerList = zookeeperClient.getChildren(path); Assumptions.assumeTrue(!providerList.isEmpty()); final List<URL> list = new ArrayList<URL>(); multipleRegistry.subscribe(serviceUrl, new NotifyListener() { @Override public void notify(List<URL> urls) { list.clear(); list.addAll(urls); } }); Thread.sleep(1500); Assertions.assertEquals(2, list.size()); List<Registry> serviceRegistries = new ArrayList<Registry>(multipleRegistry.getServiceRegistries().values()); serviceRegistries.get(0).unregister(serviceUrl); Thread.sleep(1500); Assertions.assertEquals(1, list.size()); List<URL> urls = MultipleRegistryTestUtil.getProviderURLsFromNotifyURLS(list); Assertions.assertEquals(1, list.size()); Assertions.assertTrue(!"empty".equals(list.get(0).getProtocol())); serviceRegistries.get(1).unregister(serviceUrl); Thread.sleep(1500); Assertions.assertEquals(1, list.size()); urls = MultipleRegistryTestUtil.getProviderURLsFromNotifyURLS(list); Assertions.assertEquals(1, list.size()); Assertions.assertEquals("empty", list.get(0).getProtocol()); } @Test void testAggregation() { List<URL> result = new ArrayList<URL>(); List<URL> listToAggregate = new ArrayList<URL>(); URL url1 = URL.valueOf("dubbo://127.0.0.1:20880/service1"); URL url2 = URL.valueOf("dubbo://127.0.0.1:20880/service1"); listToAggregate.add(url1); listToAggregate.add(url2); URL registryURL = URL.valueOf( "mock://127.0.0.1/RegistryService?attachments=zone=hangzhou,tag=middleware&enable-empty-protection=false"); MultipleRegistry.MultipleNotifyListenerWrapper.aggregateRegistryUrls(result, listToAggregate, registryURL); Assertions.assertEquals(2, result.size()); Assertions.assertEquals(2, result.get(0).getParameters().size()); Assertions.assertEquals("hangzhou", result.get(0).getParameter("zone")); Assertions.assertEquals("middleware", result.get(1).getParameter("tag")); } }
MultipleRegistry2S2RTest
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/dialect/functional/OracleDialectSequenceInformationTest.java
{ "start": 4802, "end": 5081 }
class ____ { private SequenceInformation sequenceInformation; public SequenceInformation get() { return sequenceInformation; } public void set(SequenceInformation sequenceInformation) { this.sequenceInformation = sequenceInformation; } } }
SequenceInformationWrapper
java
spring-cloud__spring-cloud-gateway
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/RouteToRequestUrlFilterTests.java
{ "start": 1724, "end": 7911 }
class ____ { @Test public void happyPath() { MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost/get?a=b").build(); ServerWebExchange webExchange = testFilter(request, "http://myhost/mypath"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost").hasPath("/get").hasParameter("a", "b"); } @Test public void happyPathLb() { MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost/getb").build(); ServerWebExchange webExchange = testFilter(request, "lb://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("lb").hasHost("myhost"); } @Test public void invalidHost() { assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> { MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost/getb").build(); testFilter(request, "lb://my_host"); }); } @Test public void happyPathLbPlusScheme() { MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost/getb").build(); ServerWebExchange webExchange = testFilter(request, "lb:http://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost"); String schemePrefix = webExchange.getRequiredAttribute(GATEWAY_SCHEME_PREFIX_ATTR); assertThat(schemePrefix).isEqualTo("lb"); } @Test public void noQueryParams() { MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost/get").build(); ServerWebExchange webExchange = testFilter(request, "http://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost"); } @Test public void encodedParameters() { URI url = UriComponentsBuilder.fromUriString("http://localhost/get?a=b&c=d[]") .buildAndExpand() .encode() .toUri(); // prove that it is encoded assertThat(url.getRawQuery()).isEqualTo("a=b&c=d%5B%5D"); assertThat(url).hasParameter("c", "d[]"); MockServerHttpRequest request = MockServerHttpRequest.method(HttpMethod.GET, url).build(); ServerWebExchange webExchange = testFilter(request, "http://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost").hasParameter("a", "b").hasParameter("c", "d[]"); // prove that it is not double encoded assertThat(uri.getRawQuery()).isEqualTo("a=b&c=d%5B%5D"); } @Test public void partialEncodedParameters() { URI url = UriComponentsBuilder.fromUriString("http://localhost/get?key[]=test= key&start=1533108081") .build() .toUri(); // prove that it is partial encoded assertThat(url.getRawQuery()).isEqualTo("key[]=test=%20key&start=1533108081"); assertThat(url).hasParameter("key[]", "test= key"); assertThat(url).hasParameter("start", "1533108081"); MockServerHttpRequest request = MockServerHttpRequest.method(HttpMethod.GET, url).build(); ServerWebExchange webExchange = testFilter(request, "http://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http") .hasHost("myhost") // since https://github.com/joel-costigliola/assertj-core/issues/1699 // assertj uses raw query .hasParameter("key[]", "test=%20key") .hasParameter("start", "1533108081"); // prove that it is double encoded since partial encoded uri is treated as // unencoded. assertThat(uri.getRawQuery()).isEqualTo("key[]=test=%2520key&start=1533108081"); } @Test public void encodedUrl() { URI url = UriComponentsBuilder.fromUriString("http://localhost/abc def/get").buildAndExpand().encode().toUri(); // prove that it is encoded assertThat(url.getRawPath()).isEqualTo("/abc%20def/get"); assertThat(url).hasPath("/abc def/get"); MockServerHttpRequest request = MockServerHttpRequest.method(HttpMethod.GET, url).build(); ServerWebExchange webExchange = testFilter(request, "http://myhost/abc%20def/get"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost").hasPath("/abc def/get"); // prove that it is not double encoded assertThat(uri.getRawPath()).isEqualTo("/abc%20def/get"); } @Test public void unencodedParameters() { URI url = URI.create("http://localhost/get?a=b&c=d[]"); // prove that it is unencoded assertThat(url.getRawQuery()).isEqualTo("a=b&c=d[]"); MockServerHttpRequest request = MockServerHttpRequest.method(HttpMethod.GET, url).build(); ServerWebExchange webExchange = testFilter(request, "http://myhost"); URI uri = webExchange.getRequiredAttribute(GATEWAY_REQUEST_URL_ATTR); assertThat(uri).hasScheme("http").hasHost("myhost").hasParameter("a", "b").hasParameter("c", "d[]"); // prove that it is NOT encoded assertThat(uri.getRawQuery()).isEqualTo("a=b&c=d[]"); } @Test public void matcherWorks() { testMatcher(true, "lb:a123:stuff", "lb:abc:stuff", "lb:a.bc:stuff", "lb:a-bc:stuff", "lb:a+bc:stuff"); testMatcher(false, "lb:a", "lb:a123", "lb:123:stuff", "lb:a//:stuff"); } private void testMatcher(boolean shouldMatch, String... uris) { for (String s : uris) { URI uri = URI.create(s); boolean result = RouteToRequestUrlFilter.hasAnotherScheme(uri); assertThat(result).as("%s should match: %s", s, result).isEqualTo(shouldMatch); } } private ServerWebExchange testFilter(MockServerHttpRequest request, String routeUri) { Route value = Route.async().id("1").uri(URI.create(routeUri)).order(0).predicate(swe -> true).build(); ServerWebExchange exchange = MockServerWebExchange.from(request); exchange.getAttributes().put(GATEWAY_ROUTE_ATTR, value); GatewayFilterChain filterChain = mock(GatewayFilterChain.class); ArgumentCaptor<ServerWebExchange> captor = ArgumentCaptor.forClass(ServerWebExchange.class); when(filterChain.filter(captor.capture())).thenReturn(Mono.empty()); RouteToRequestUrlFilter filter = new RouteToRequestUrlFilter(); filter.filter(exchange, filterChain); return captor.getValue(); } }
RouteToRequestUrlFilterTests
java
quarkusio__quarkus
core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java
{ "start": 10295, "end": 10377 }
class ____<X> implements Repo<Repo<X>> { } public static
GenericCompositeRepo