language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | integration-tests/hibernate-search-orm-elasticsearch-outbox-polling/src/test/java/io/quarkus/it/hibernate/search/orm/elasticsearch/coordination/outboxpolling/HibernateSearchOutboxPollingInGraalIT.java | {
"start": 171,
"end": 261
} | class ____ extends HibernateSearchOutboxPollingTest {
}
| HibernateSearchOutboxPollingInGraalIT |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageDefaults.java | {
"start": 6221,
"end": 7402
} | class ____ {
static final String TITLE_SETTING = Field.XField.TITLE.getPreferredName();
static final String VALUE_SETTING = Field.XField.VALUE.getPreferredName();
static final String SHORT_SETTING = Field.XField.SHORT.getPreferredName();
final String title;
final String value;
final Boolean isShort;
FieldDefaults(Settings settings) {
title = settings.get(TITLE_SETTING, null);
value = settings.get(VALUE_SETTING, null);
isShort = settings.getAsBoolean(SHORT_SETTING, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldDefaults that = (FieldDefaults) o;
return Objects.equals(title, that.title) && Objects.equals(value, that.value) && Objects.equals(isShort, that.isShort);
}
@Override
public int hashCode() {
return Objects.hash(title, value, isShort);
}
}
}
}
| FieldDefaults |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cacheable/annotation/ConfigurationTest.java | {
"start": 1083,
"end": 4510
} | class ____ {
private EntityManagerFactory emf;
@AfterEach
public void tearDown() {
if ( emf != null ) {
emf.close();
}
}
@Test
public void testSharedCacheModeNone() {
MetadataImplementor metadata = buildMetadata( SharedCacheMode.NONE );
PersistentClass pc = metadata.getEntityBinding( ExplicitlyCacheableEntity.class.getName() );
assertFalse( pc.isCached() );
pc = metadata.getEntityBinding( ExplicitlyNonCacheableEntity.class.getName() );
assertFalse( pc.isCached() );
pc = metadata.getEntityBinding( NoCacheableAnnotationEntity.class.getName() );
assertFalse( pc.isCached() );
}
@Test
public void testSharedCacheModeUnspecified() {
MetadataImplementor metadata = buildMetadata( SharedCacheMode.UNSPECIFIED );
PersistentClass pc = metadata.getEntityBinding( ExplicitlyCacheableEntity.class.getName() );
assertTrue( pc.isCached() );
pc = metadata.getEntityBinding( ExplicitlyNonCacheableEntity.class.getName() );
assertFalse( pc.isCached() );
pc = metadata.getEntityBinding( NoCacheableAnnotationEntity.class.getName() );
assertFalse( pc.isCached() );
}
@Test
public void testSharedCacheModeAll() {
MetadataImplementor metadata = buildMetadata( SharedCacheMode.ALL );
PersistentClass pc = metadata.getEntityBinding( ExplicitlyCacheableEntity.class.getName() );
assertTrue( pc.isCached() );
pc = metadata.getEntityBinding( ExplicitlyNonCacheableEntity.class.getName() );
assertTrue( pc.isCached() );
pc = metadata.getEntityBinding( NoCacheableAnnotationEntity.class.getName() );
assertTrue( pc.isCached() );
}
@Test
public void testSharedCacheModeEnable() {
MetadataImplementor metadata = buildMetadata( SharedCacheMode.ENABLE_SELECTIVE );
PersistentClass pc = metadata.getEntityBinding( ExplicitlyCacheableEntity.class.getName() );
assertTrue( pc.isCached() );
pc = metadata.getEntityBinding( ExplicitlyNonCacheableEntity.class.getName() );
assertFalse( pc.isCached() );
pc = metadata.getEntityBinding( NoCacheableAnnotationEntity.class.getName() );
assertFalse( pc.isCached() );
}
@Test
public void testSharedCacheModeDisable() {
MetadataImplementor metadata = buildMetadata( SharedCacheMode.DISABLE_SELECTIVE );
PersistentClass pc = metadata.getEntityBinding( ExplicitlyCacheableEntity.class.getName() );
assertTrue( pc.isCached() );
pc = metadata.getEntityBinding( ExplicitlyNonCacheableEntity.class.getName() );
assertFalse( pc.isCached() );
pc = metadata.getEntityBinding( NoCacheableAnnotationEntity.class.getName() );
assertTrue( pc.isCached() );
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private MetadataImplementor buildMetadata(SharedCacheMode mode) {
Map settings = ServiceRegistryUtil.createBaseSettings();
settings.put( AvailableSettings.JPA_SHARED_CACHE_MODE, mode );
settings.put( AvailableSettings.CACHE_REGION_FACTORY, CustomRegionFactory.class.getName() );
settings.put(
AvailableSettings.LOADED_CLASSES,
Arrays.asList(
ExplicitlyCacheableEntity.class,
ExplicitlyNonCacheableEntity.class,
NoCacheableAnnotationEntity.class
)
);
PersistenceUnitInfoAdapter adapter = new PersistenceUnitInfoAdapter();
final EntityManagerFactoryBuilderImpl emfb = (EntityManagerFactoryBuilderImpl) Bootstrap.getEntityManagerFactoryBuilder(
adapter,
settings
);
emf = emfb.build();
return emfb.getMetadata();
}
public static | ConfigurationTest |
java | grpc__grpc-java | cronet/src/main/java/io/grpc/cronet/CronetChannelBuilder.java | {
"start": 2054,
"end": 2219
} | class ____ building channels with the cronet transport. */
@ExperimentalApi("There is no plan to make this API stable, given transport API instability")
public final | for |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BonitaEndpointBuilderFactory.java | {
"start": 1444,
"end": 1567
} | interface ____ {
/**
* Builder for endpoint for the Bonita component.
*/
public | BonitaEndpointBuilderFactory |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerUpgradeTestSpecifications.java | {
"start": 1664,
"end": 1993
} | class ____ {
// ----------------------------------------------------------------------------------------------
// Test types
// ----------------------------------------------------------------------------------------------
@SuppressWarnings("WeakerAccess")
public static | PojoSerializerUpgradeTestSpecifications |
java | jhy__jsoup | src/main/java/org/jsoup/helper/RequestDispatch.java | {
"start": 402,
"end": 549
} | class ____ used if it can be instantiated, unless the system property
{@link SharedConstants#UseHttpClient} is explicitly set to {@code false}.
*/
| is |
java | quarkusio__quarkus | extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientsBuildTimeConfig.java | {
"start": 2433,
"end": 3993
} | interface ____ bean of the configured scope.
*/
Optional<String> scope();
/**
* If set to true, then Quarkus will ensure that all calls from the REST client go through a local proxy
* server (that is managed by Quarkus).
* This can be very useful for capturing network traffic to a service that uses HTTPS.
* <p>
* This property is not applicable to the RESTEasy Client, only the Quarkus REST client (formerly RESTEasy Reactive
* client).
* <p>
* This property only applicable to dev and test mode.
*/
@WithDefault("false")
boolean enableLocalProxy();
/**
* This setting is used to select which proxy provider to use if there are multiple ones.
* It only applies if {@code enable-local-proxy} is true.
* <p>
* The algorithm for picking between multiple provider is the following:
* <ul>
* <li>If only the default is around, use it (its name is {@code default})</li>
* <li>If there is only one besides the default, use it</li>
* <li>If there are multiple ones, fail</li>
* </ul>
*/
Optional<String> localProxyProvider();
/**
* If true, the extension will automatically remove the trailing slash in the paths if any.
* This property is not applicable to the RESTEasy Client.
*/
@WithName("removes-trailing-slash")
@WithDefault("true")
boolean removesTrailingSlash();
}
}
| a |
java | apache__spark | core/src/main/java/org/apache/spark/shuffle/sort/ShuffleSortDataFormat.java | {
"start": 994,
"end": 2433
} | class ____ extends SortDataFormat<PackedRecordPointer, LongArray> {
private final LongArray buffer;
ShuffleSortDataFormat(LongArray buffer) {
this.buffer = buffer;
}
@Override
public PackedRecordPointer getKey(LongArray data, int pos) {
// Since we re-use keys, this method shouldn't be called.
throw new UnsupportedOperationException();
}
@Override
public PackedRecordPointer newKey() {
return new PackedRecordPointer();
}
@Override
public PackedRecordPointer getKey(LongArray data, int pos, PackedRecordPointer reuse) {
reuse.set(data.get(pos));
return reuse;
}
@Override
public void swap(LongArray data, int pos0, int pos1) {
final long temp = data.get(pos0);
data.set(pos0, data.get(pos1));
data.set(pos1, temp);
}
@Override
public void copyElement(LongArray src, int srcPos, LongArray dst, int dstPos) {
dst.set(dstPos, src.get(srcPos));
}
@Override
public void copyRange(LongArray src, int srcPos, LongArray dst, int dstPos, int length) {
Platform.copyMemory(
src.getBaseObject(),
src.getBaseOffset() + srcPos * 8L,
dst.getBaseObject(),
dst.getBaseOffset() + dstPos * 8L,
length * 8L
);
}
@Override
public LongArray allocate(int length) {
assert (length <= buffer.size()) :
"the buffer is smaller than required: " + buffer.size() + " < " + length;
return buffer;
}
}
| ShuffleSortDataFormat |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/config/DefaultConfigClassesInheritedTests.java | {
"start": 1417,
"end": 1763
} | class ____ extends DefaultConfigClassesBaseTests {
@Autowired
Pet pet;
@Test
void verifyPetSetFromExtendedContextConfig() {
assertThat(this.pet).as("The pet should have been autowired.").isNotNull();
assertThat(this.pet.getName()).isEqualTo("Fido");
}
@Configuration(proxyBeanMethods = false)
static | DefaultConfigClassesInheritedTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithAbstractClassWithInterfaceInterceptionAndBindingsSourceTest.java | {
"start": 2326,
"end": 2400
} | interface ____ {
String hello1();
}
abstract static | MyNonbean |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/RequestMatcherDelegatingAuthenticationManagerResolver.java | {
"start": 4436,
"end": 5540
} | class ____ {
private final List<RequestMatcherEntry<AuthenticationManager>> entries = new ArrayList<>();
private Builder() {
}
/**
* Maps a {@link RequestMatcher} to an {@link AuthorizationManager}.
* @param matcher the {@link RequestMatcher} to use
* @param manager the {@link AuthenticationManager} to use
* @return the {@link Builder} for further
* customizationServerWebExchangeDelegatingReactiveAuthenticationManagerResolvers
*/
public Builder add(RequestMatcher matcher, AuthenticationManager manager) {
Assert.notNull(matcher, "matcher cannot be null");
Assert.notNull(manager, "manager cannot be null");
this.entries.add(new RequestMatcherEntry<>(matcher, manager));
return this;
}
/**
* Creates a {@link RequestMatcherDelegatingAuthenticationManagerResolver}
* instance.
* @return the {@link RequestMatcherDelegatingAuthenticationManagerResolver}
* instance
*/
public RequestMatcherDelegatingAuthenticationManagerResolver build() {
return new RequestMatcherDelegatingAuthenticationManagerResolver(this.entries);
}
}
}
| Builder |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/apikey/InvalidateApiKeyRequestTests.java | {
"start": 4294,
"end": 9470
} | class ____ extends LegacyActionRequest {
String realm;
String user;
String apiKeyId;
String apiKeyName;
boolean ownedByAuthenticatedUser;
Dummy(String[] a) {
realm = a[0];
user = a[1];
apiKeyId = a[2];
apiKeyName = a[3];
ownedByAuthenticatedUser = Booleans.parseBoolean(a[4]);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(realm);
out.writeOptionalString(user);
if (Strings.hasText(apiKeyId)) {
out.writeOptionalStringArray(new String[] { apiKeyId });
} else {
out.writeOptionalStringArray(null);
}
out.writeOptionalString(apiKeyName);
out.writeOptionalBoolean(ownedByAuthenticatedUser);
}
}
String[][] inputs = new String[][] {
{ randomNullOrEmptyString(), randomNullOrEmptyString(), randomNullOrEmptyString(), randomNullOrEmptyString(), "false" },
{ randomNullOrEmptyString(), "user", "api-kid", "api-kname", "false" },
{ "realm", randomNullOrEmptyString(), "api-kid", "api-kname", "false" },
{ "realm", "user", "api-kid", randomNullOrEmptyString(), "false" },
{ randomNullOrEmptyString(), randomNullOrEmptyString(), "api-kid", "api-kname", "false" },
{ "realm", randomNullOrEmptyString(), randomNullOrEmptyString(), randomNullOrEmptyString(), "true" },
{ randomNullOrEmptyString(), "user", randomNullOrEmptyString(), randomNullOrEmptyString(), "true" }, };
String[][] expectedErrorMessages = new String[][] {
{ "One of [api key id(s), api key name, username, realm name] must be specified if [owner] flag is false" },
{
"username or realm name must not be specified when the api key id(s) or api key name are specified",
"only one of [api key id(s), api key name] can be specified" },
{
"username or realm name must not be specified when the api key id(s) or api key name are specified",
"only one of [api key id(s), api key name] can be specified" },
{ "username or realm name must not be specified when the api key id(s) or api key name are specified" },
{ "only one of [api key id(s), api key name] can be specified" },
{ "neither username nor realm-name may be specified when invalidating owned API keys" },
{ "neither username nor realm-name may be specified when invalidating owned API keys" } };
for (int caseNo = 0; caseNo < inputs.length; caseNo++) {
try (
ByteArrayOutputStream bos = new ByteArrayOutputStream();
OutputStreamStreamOutput osso = new OutputStreamStreamOutput(bos)
) {
Dummy d = new Dummy(inputs[caseNo]);
d.writeTo(osso);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
InputStreamStreamInput issi = new InputStreamStreamInput(bis);
InvalidateApiKeyRequest request = new InvalidateApiKeyRequest(issi);
ActionRequestValidationException ve = request.validate();
assertNotNull(ve.getMessage(), ve);
assertEquals(expectedErrorMessages[caseNo].length, ve.validationErrors().size());
assertThat(ve.validationErrors(), containsInAnyOrder(expectedErrorMessages[caseNo]));
}
}
}
public void testSerialization() throws IOException {
final String apiKeyId = randomAlphaOfLength(5);
final boolean ownedByAuthenticatedUser = true;
InvalidateApiKeyRequest invalidateApiKeyRequest = InvalidateApiKeyRequest.usingApiKeyId(apiKeyId, ownedByAuthenticatedUser);
{
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
out.setTransportVersion(randomVersionBetween(random(), TransportVersions.V_7_10_0, TransportVersion.current()));
invalidateApiKeyRequest.writeTo(out);
InputStreamStreamInput inputStreamStreamInput = new InputStreamStreamInput(new ByteArrayInputStream(outBuffer.toByteArray()));
inputStreamStreamInput.setTransportVersion(
randomVersionBetween(random(), TransportVersions.V_7_10_0, TransportVersion.current())
);
InvalidateApiKeyRequest requestFromInputStream = new InvalidateApiKeyRequest(inputStreamStreamInput);
assertThat(requestFromInputStream, equalTo(invalidateApiKeyRequest));
}
}
private static String randomNullOrEmptyString() {
return randomFrom(new String[] { "", null });
}
}
| Dummy |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/Generators.java | {
"start": 11539,
"end": 20484
} | class ____ for the given method signature. */
static XMethodElement nearestSuperClassMethod(
MethodSignature methodSignature, AndroidEntryPointMetadata metadata) {
ImmutableList<XMethodElement> methodOnBaseElement =
asStream(metadata.baseElement().getAllMethods())
.filter(method -> MethodSignature.of(method).equals(methodSignature))
.collect(toImmutableList());
Preconditions.checkState(methodOnBaseElement.size() >= 1);
return Iterables.getLast(methodOnBaseElement);
}
// @Override
// public FragmentComponentManager componentManager() {
// if (componentManager == null) {
// synchronize (componentManagerLock) {
// if (componentManager == null) {
// componentManager = createComponentManager();
// }
// }
// }
// return componentManager;
// }
private static void addComponentManagerMethods(
AndroidEntryPointMetadata metadata, TypeSpec.Builder typeSpecBuilder) {
if (metadata.overridesAndroidEntryPointClass()) {
// We don't need to override this method if we are extending a Hilt type.
return;
}
ParameterSpec managerParam = metadata.componentManagerParam();
typeSpecBuilder.addField(componentManagerField(metadata));
typeSpecBuilder.addMethod(createComponentManagerMethod(metadata));
MethodSpec.Builder methodSpecBuilder =
MethodSpec.methodBuilder("componentManager")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC, Modifier.FINAL)
.returns(managerParam.type)
.beginControlFlow("if ($N == null)", managerParam);
// Views do not do double-checked locking because this is called from the constructor
if (metadata.androidType() != AndroidEntryPointMetadata.AndroidType.VIEW) {
typeSpecBuilder.addField(componentManagerLockField());
methodSpecBuilder
.beginControlFlow("synchronized (componentManagerLock)")
.beginControlFlow("if ($N == null)", managerParam);
}
methodSpecBuilder
.addStatement("$N = createComponentManager()", managerParam)
.endControlFlow();
if (metadata.androidType() != AndroidEntryPointMetadata.AndroidType.VIEW) {
methodSpecBuilder
.endControlFlow()
.endControlFlow();
}
methodSpecBuilder.addStatement("return $N", managerParam);
typeSpecBuilder.addMethod(methodSpecBuilder.build());
}
// protected FragmentComponentManager createComponentManager() {
// return new FragmentComponentManager(initArgs);
// }
private static MethodSpec createComponentManagerMethod(AndroidEntryPointMetadata metadata) {
Preconditions.checkState(
metadata.componentManagerInitArgs().isPresent(),
"This method should not have been called for metadata where the init args are not"
+ " present.");
return MethodSpec.methodBuilder("createComponentManager")
.addModifiers(Modifier.PROTECTED)
.returns(metadata.componentManager())
.addStatement(
"return new $T($L)",
metadata.componentManager(),
metadata.componentManagerInitArgs().get())
.build();
}
// private volatile ComponentManager componentManager;
private static FieldSpec componentManagerField(AndroidEntryPointMetadata metadata) {
ParameterSpec managerParam = metadata.componentManagerParam();
FieldSpec.Builder builder = FieldSpec.builder(managerParam.type, managerParam.name)
.addModifiers(Modifier.PRIVATE);
// Views do not need volatile since these are set in the constructor if ever set.
if (metadata.androidType() != AndroidEntryPointMetadata.AndroidType.VIEW) {
builder.addModifiers(Modifier.VOLATILE);
}
return builder.build();
}
// private final Object componentManagerLock = new Object();
private static FieldSpec componentManagerLockField() {
return FieldSpec.builder(TypeName.get(Object.class), "componentManagerLock")
.addModifiers(Modifier.PRIVATE, Modifier.FINAL)
.initializer("new Object()")
.build();
}
// protected void inject() {
// if (!injected) {
// generatedComponent().inject$CLASS(($CLASS) this);
// injected = true;
// }
// }
private static void addInjectAndMaybeOptionalInjectMethod(
AndroidEntryPointMetadata metadata, TypeSpec.Builder typeSpecBuilder) {
MethodSpec.Builder methodSpecBuilder = MethodSpec.methodBuilder("inject")
.addModifiers(Modifier.PROTECTED);
// Check if the parent is a Hilt type. If it isn't or if it is but it
// wasn't injected by hilt, then return.
// Object parent = ...depends on type...
// if (!optionalInjectParentUsesHilt()) {
// return;
//
if (metadata.allowsOptionalInjection()) {
CodeBlock parentCodeBlock;
if (metadata.androidType() != AndroidType.BROADCAST_RECEIVER) {
parentCodeBlock = CodeBlock.of("optionalInjectGetParent()");
// Also, add the optionalInjectGetParent method we just used. This is a separate method so
// other parts of the code when dealing with @OptionalInject. BroadcastReceiver can't have
// this method since the context is only accessible as a parameter to receive()/inject().
typeSpecBuilder.addMethod(MethodSpec.methodBuilder("optionalInjectGetParent")
.addModifiers(Modifier.PRIVATE)
.returns(TypeName.OBJECT)
.addStatement("return $L", getParentCodeBlock(metadata))
.build());
} else {
// For BroadcastReceiver, use the "context" field that is on the stack.
parentCodeBlock = CodeBlock.of(
"$T.getApplication(context.getApplicationContext())", ClassNames.CONTEXTS);
}
methodSpecBuilder
.beginControlFlow("if (!optionalInjectParentUsesHilt($L))", parentCodeBlock)
.addStatement("return")
.endControlFlow();
// Add the optionalInjectParentUsesHilt used above.
typeSpecBuilder.addMethod(MethodSpec.methodBuilder("optionalInjectParentUsesHilt")
.addModifiers(Modifier.PRIVATE)
.addParameter(TypeName.OBJECT, "parent")
.returns(TypeName.BOOLEAN)
.addStatement("return (parent instanceof $T) "
+ "&& (!(parent instanceof $T) || (($T) parent).wasInjectedByHilt())",
ClassNames.GENERATED_COMPONENT_MANAGER,
AndroidClassNames.INJECTED_BY_HILT,
AndroidClassNames.INJECTED_BY_HILT)
.build());
}
// Only add @Override if an ancestor extends a generated Hilt class.
// When using bytecode injection, this isn't always guaranteed.
if (metadata.overridesAndroidEntryPointClass()
&& ancestorExtendsGeneratedHiltClass(metadata)) {
methodSpecBuilder.addAnnotation(Override.class);
}
typeSpecBuilder.addField(injectedField(metadata));
switch (metadata.androidType()) {
case ACTIVITY:
case FRAGMENT:
case VIEW:
case SERVICE:
methodSpecBuilder
.beginControlFlow("if (!injected)")
.addStatement("injected = true")
.addStatement(
"(($T) $L).$L($L)",
metadata.injectorClassName(),
generatedComponentCallBlock(metadata),
metadata.injectMethodName(),
unsafeCastThisTo(metadata.elementClassName()))
.endControlFlow();
break;
case BROADCAST_RECEIVER:
typeSpecBuilder.addField(injectedLockField());
methodSpecBuilder
.addParameter(ParameterSpec.builder(AndroidClassNames.CONTEXT, "context").build())
.beginControlFlow("if (!injected)")
.beginControlFlow("synchronized (injectedLock)")
.beginControlFlow("if (!injected)")
.addStatement(
"(($T) $T.generatedComponent(context)).$L($L)",
metadata.injectorClassName(),
metadata.componentManager(),
metadata.injectMethodName(),
unsafeCastThisTo(metadata.elementClassName()))
.addStatement("injected = true")
.endControlFlow()
.endControlFlow()
.endControlFlow();
break;
default:
throw new AssertionError();
}
// Also add a wasInjectedByHilt method if needed.
// Even if we aren't optionally injected, if we override an optionally injected Hilt class
// we also need to override the wasInjectedByHilt method.
if (metadata.allowsOptionalInjection() || metadata.baseAllowsOptionalInjection()) {
typeSpecBuilder.addMethod(
MethodSpec.methodBuilder("wasInjectedByHilt")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(boolean.class)
.addStatement("return injected")
.build());
// Only add the | method |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClientFaultInjector.java | {
"start": 1349,
"end": 2429
} | class ____ {
private static DFSClientFaultInjector instance = new DFSClientFaultInjector();
public static AtomicLong exceptionNum = new AtomicLong(0);
public static DFSClientFaultInjector get() {
return instance;
}
public static void set(DFSClientFaultInjector instance) {
DFSClientFaultInjector.instance = instance;
}
public boolean corruptPacket() {
return false;
}
public boolean uncorruptPacket() {
return false;
}
public boolean failPacket() {
return false;
}
public void startFetchFromDatanode() {}
public void fetchFromDatanodeException() {}
public void readFromDatanodeDelay() {}
public boolean skipRollingRestartWait() {
return false;
}
public void sleepBeforeHedgedGet() {}
public void delayWhenRenewLeaseTimeout() {}
public void onCreateBlockReader(LocatedBlock block, int chunkIndex, long offset, long length) {}
public void failCreateBlockReader() throws InvalidBlockTokenException {}
public void failWhenReadWithStrategy(boolean isRetryRead) throws IOException {};
}
| DFSClientFaultInjector |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/test/java/org/hibernate/community/dialect/DerbyDateTimeParameterTest.java | {
"start": 8174,
"end": 8723
} | class ____ {
@Id
private Integer id;
private String body;
@Column(name = "POST_DATE")
@Temporal(TemporalType.DATE)
private Date postDate;
@Column(name = "POST_TIME")
@Temporal(TemporalType.TIME)
private Date postTime;
@Column(name = "TS")
@Temporal(TemporalType.TIMESTAMP)
private Date ts;
public Message() {
}
public Message(Integer id, String body, Date postDate, Date postTime, Date ts) {
this.id = id;
this.body = body;
this.postDate = postDate;
this.postTime = postTime;
this.ts = ts;
}
}
}
| Message |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/callback/QuarkusTestAfterConstructCallback.java | {
"start": 256,
"end": 351
} | interface ____ {
void afterConstruct(Object testInstance);
}
| QuarkusTestAfterConstructCallback |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/reactive/PayloadMethodArgumentResolverTests.java | {
"start": 2253,
"end": 7202
} | class ____ {
private final List<Decoder<?>> decoders = new ArrayList<>();
private final ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build();
@Test
void supportsParameter() {
boolean useDefaultResolution = true;
PayloadMethodArgumentResolver resolver = createResolver(null, useDefaultResolution);
assertThat(resolver.supportsParameter(this.testMethod.annotPresent(Payload.class).arg())).isTrue();
assertThat(resolver.supportsParameter(this.testMethod.annotNotPresent(Payload.class).arg(String.class))).isTrue();
useDefaultResolution = false;
resolver = createResolver(null, useDefaultResolution);
assertThat(resolver.supportsParameter(this.testMethod.annotPresent(Payload.class).arg())).isTrue();
assertThat(resolver.supportsParameter(this.testMethod.annotNotPresent(Payload.class).arg(String.class))).isFalse();
}
@Test
void emptyBodyWhenRequired() {
MethodParameter param = this.testMethod.arg(ResolvableType.forClassWithGenerics(Mono.class, String.class));
Mono<Object> mono = resolveValue(param, Mono.empty(), null);
StepVerifier.create(mono)
.consumeErrorWith(ex -> {
assertThat(ex.getClass()).isEqualTo(MethodArgumentResolutionException.class);
assertThat(ex.getMessage()).as(ex.getMessage()).contains("Payload content is missing");
})
.verify();
}
@Test
void emptyBodyWhenNotRequired() {
MethodParameter param = this.testMethod.annotPresent(Payload.class).arg();
assertThat(this.<Object>resolveValue(param, Mono.empty(), null)).isNull();
}
@Test
void stringMono() {
String body = "foo";
MethodParameter param = this.testMethod.arg(ResolvableType.forClassWithGenerics(Mono.class, String.class));
Mono<Object> mono = resolveValue(param,
Mono.delay(Duration.ofMillis(10)).map(aLong -> toDataBuffer(body)), null);
assertThat(mono.block()).isEqualTo(body);
}
@Test
void stringFlux() {
List<String> body = Arrays.asList("foo", "bar");
ResolvableType type = ResolvableType.forClassWithGenerics(Flux.class, String.class);
MethodParameter param = this.testMethod.arg(type);
Flux<Object> flux = resolveValue(param,
Flux.fromIterable(body).delayElements(Duration.ofMillis(10)).map(this::toDataBuffer), null);
assertThat(flux.collectList().block()).isEqualTo(body);
}
@Test
void string() {
String body = "foo";
MethodParameter param = this.testMethod.annotNotPresent(Payload.class).arg(String.class);
Object value = resolveValue(param, Mono.just(toDataBuffer(body)), null);
assertThat(value).isEqualTo(body);
}
@Test
void validateStringMono() {
TestValidator validator = new TestValidator();
ResolvableType type = ResolvableType.forClassWithGenerics(Mono.class, String.class);
MethodParameter param = this.testMethod.arg(type);
Mono<Object> mono = resolveValue(param, Mono.just(toDataBuffer("12345")), validator);
StepVerifier.create(mono).expectNextCount(0)
.expectError(MethodArgumentNotValidException.class).verify();
}
@Test
void validateStringFlux() {
TestValidator validator = new TestValidator();
ResolvableType type = ResolvableType.forClassWithGenerics(Flux.class, String.class);
MethodParameter param = this.testMethod.arg(type);
Flux<DataBuffer> content = Flux.just(toDataBuffer("12345678"), toDataBuffer("12345"));
Flux<Object> flux = resolveValue(param, content, validator);
StepVerifier.create(flux)
.expectNext("12345678")
.expectError(MethodArgumentNotValidException.class)
.verify();
}
private DataBuffer toDataBuffer(String value) {
return DefaultDataBufferFactory.sharedInstance.wrap(value.getBytes(StandardCharsets.UTF_8));
}
@SuppressWarnings("unchecked")
private <T> @Nullable T resolveValue(MethodParameter param, Publisher<DataBuffer> content, Validator validator) {
Message<?> message = new GenericMessage<>(content,
Collections.singletonMap(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.TEXT_PLAIN));
Mono<Object> result = createResolver(validator, true).resolveArgument(param, message);
Object value = result.block(Duration.ofSeconds(5));
if (value != null) {
Class<?> expectedType = param.getParameterType();
assertThat(expectedType.isAssignableFrom(value.getClass())).as("Unexpected return value type: " + value).isTrue();
}
return (T) value;
}
private PayloadMethodArgumentResolver createResolver(@Nullable Validator validator, boolean useDefaultResolution) {
if (this.decoders.isEmpty()) {
this.decoders.add(StringDecoder.allMimeTypes());
}
List<StringDecoder> decoders = Collections.singletonList(StringDecoder.allMimeTypes());
return new PayloadMethodArgumentResolver(decoders, validator, null, useDefaultResolution) {};
}
@SuppressWarnings("unused")
private void handle(
@Validated Mono<String> valueMono,
@Validated Flux<String> valueFlux,
@Payload(required = false) String optionalValue,
String value) {
}
private static | PayloadMethodArgumentResolverTests |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/AbfsDriverException.java | {
"start": 1202,
"end": 3107
} | class ____ extends AbfsRestOperationException {
/** Default error message used when no inner exception is provided. */
private static final String ERROR_MESSAGE = "Runtime Exception Occurred In ABFS Driver";
/**
* Constructs an {@code AbfsDriverException} with the specified inner exception.
*
* @param innerException the underlying exception that caused the failure
*/
public AbfsDriverException(final Exception innerException) {
super(
AzureServiceErrorCode.UNKNOWN.getStatusCode(),
AzureServiceErrorCode.UNKNOWN.getErrorCode(),
innerException != null
? innerException.toString()
: ERROR_MESSAGE,
innerException);
}
/**
* Constructs an {@code AbfsDriverException} with the specified inner exception
* and activity ID for correlation.
*
* @param innerException the underlying exception that caused the failure
* @param activityId the request or operation ID for traceability
*/
public AbfsDriverException(final Exception innerException, final String activityId) {
super(
AzureServiceErrorCode.UNKNOWN.getStatusCode(),
AzureServiceErrorCode.UNKNOWN.getErrorCode(),
innerException != null
? innerException.toString() + ", rId: " + activityId
: ERROR_MESSAGE + ", rId: " + activityId,
null);
}
/**
* Constructs an {@code AbfsDriverException} with a custom error message and
* inner exception.
*
* @param errorMessage a custom error message describing the failure
* @param innerException the underlying exception that caused the failure
*/
public AbfsDriverException(final String errorMessage, final Exception innerException) {
super(
AzureServiceErrorCode.UNKNOWN.getStatusCode(),
AzureServiceErrorCode.UNKNOWN.getErrorCode(),
errorMessage,
innerException);
}
}
| AbfsDriverException |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/imports/nested/Source.java | {
"start": 236,
"end": 433
} | class ____ {
private Nested value;
public Nested getValue() {
return value;
}
public void setValue(Nested value) {
this.value = value;
}
public static | Source |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/FileChannelBoundedDataTest.java | {
"start": 1829,
"end": 7656
} | class ____ extends BoundedDataTestBase {
private static final String tempDir = EnvironmentInformation.getTemporaryFileDirectory();
private static FileChannelManager fileChannelManager;
@BeforeAll
static void setUp() {
fileChannelManager = new FileChannelManagerImpl(new String[] {tempDir}, "testing");
}
@AfterAll
static void shutdown() throws Exception {
fileChannelManager.close();
}
@Override
protected boolean isRegionBased() {
return false;
}
@Override
protected BoundedData createBoundedData(Path tempFilePath) throws IOException {
return FileChannelBoundedData.create(tempFilePath, BUFFER_SIZE);
}
@Override
protected BoundedData createBoundedDataWithRegion(Path tempFilePath, int regionSize) {
throw new UnsupportedOperationException();
}
@TestTemplate
void testReadNextBuffer() throws Exception {
final int numberOfBuffers = 3;
try (final BoundedData data = createBoundedData()) {
writeBuffers(data, numberOfBuffers);
final BoundedData.Reader reader = data.createReader();
final Buffer buffer1 = reader.nextBuffer();
final Buffer buffer2 = reader.nextBuffer();
assertThat(buffer1).isNotNull();
assertThat(buffer2).isNotNull();
// there are only two available memory segments for reading data
assertThat(reader.nextBuffer()).isNull();
// cleanup
buffer1.recycleBuffer();
buffer2.recycleBuffer();
}
}
@TestTemplate
void testRecycleBufferForNotifyingSubpartitionView() throws Exception {
final int numberOfBuffers = 2;
try (final BoundedData data = createBoundedData()) {
writeBuffers(data, numberOfBuffers);
final VerifyNotificationResultSubpartitionView subpartitionView =
new VerifyNotificationResultSubpartitionView();
final BoundedData.Reader reader = data.createReader(subpartitionView);
final Buffer buffer1 = reader.nextBuffer();
final Buffer buffer2 = reader.nextBuffer();
assertThat(buffer1).isNotNull();
assertThat(buffer2).isNotNull();
assertThat(subpartitionView.isAvailable).isFalse();
buffer1.recycleBuffer();
// the view is notified while recycling buffer if reader has not tagged finished
assertThat(subpartitionView.isAvailable).isTrue();
subpartitionView.resetAvailable();
assertThat(subpartitionView.isAvailable).isFalse();
// the next buffer is null to make reader tag finished
assertThat(reader.nextBuffer()).isNull();
buffer2.recycleBuffer();
// the view is not notified while recycling buffer if reader already finished
assertThat(subpartitionView.isAvailable).isFalse();
}
}
@TestTemplate
void testRecycleBufferForNotifyingBufferAvailabilityListener() throws Exception {
final ResultSubpartition subpartition = createFileBoundedBlockingSubpartition();
final int numberOfBuffers = 2;
writeBuffers(subpartition, numberOfBuffers);
final VerifyNotificationBufferAvailabilityListener listener =
new VerifyNotificationBufferAvailabilityListener();
final ResultSubpartitionView subpartitionView = createView(subpartition, listener);
assertThat(listener.isAvailable).isFalse();
final BufferAndBacklog buffer1 = subpartitionView.getNextBuffer();
final BufferAndBacklog buffer2 = subpartitionView.getNextBuffer();
assertThat(buffer1).isNotNull();
assertThat(buffer2).isNotNull();
// the next buffer is null in view because FileBufferReader has no available buffers for
// reading ahead
assertThat(subpartitionView.getAvailabilityAndBacklog(true).isAvailable()).isFalse();
// recycle a buffer to trigger notification of data available
buffer1.buffer().recycleBuffer();
assertThat(listener.isAvailable).isTrue();
// cleanup
buffer2.buffer().recycleBuffer();
subpartitionView.releaseAllResources();
subpartition.release();
}
private static ResultSubpartition createFileBoundedBlockingSubpartition() {
final BoundedBlockingResultPartition resultPartition =
(BoundedBlockingResultPartition)
new ResultPartitionBuilder()
.setNetworkBufferSize(BUFFER_SIZE)
.setResultPartitionType(ResultPartitionType.BLOCKING)
.setBoundedBlockingSubpartitionType(
BoundedBlockingSubpartitionType.FILE)
.setFileChannelManager(fileChannelManager)
.setSSLEnabled(true)
.build();
return resultPartition.subpartitions[0];
}
private static void writeBuffers(BoundedData data, int numberOfBuffers) throws IOException {
for (int i = 0; i < numberOfBuffers; i++) {
data.writeBuffer(buildSomeBuffer(BUFFER_SIZE));
}
data.finishWrite();
}
private static void writeBuffers(ResultSubpartition subpartition, int numberOfBuffers)
throws IOException {
for (int i = 0; i < numberOfBuffers; i++) {
subpartition.add(createFilledFinishedBufferConsumer(BUFFER_SIZE));
}
subpartition.finish();
}
/**
* This subpartition view is used for verifying the {@link
* ResultSubpartitionView#notifyDataAvailable()} was ever called before.
*/
private static | FileChannelBoundedDataTest |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecValues.java | {
"start": 1900,
"end": 3593
} | class ____ extends ExecNodeBase<RowData>
implements SingleTransformationTranslator<RowData> {
public static final String VALUES_TRANSFORMATION = "values";
public static final String FIELD_NAME_TUPLES = "tuples";
private final List<List<RexLiteral>> tuples;
public CommonExecValues(
int id,
ExecNodeContext context,
ReadableConfig persistedConfig,
List<List<RexLiteral>> tuples,
RowType outputType,
String description) {
super(id, context, persistedConfig, Collections.emptyList(), outputType, description);
this.tuples = tuples;
}
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
final ValuesInputFormat inputFormat =
ValuesCodeGenerator.generatorInputFormat(
config,
planner.getFlinkContext().getClassLoader(),
(RowType) getOutputType(),
tuples,
getClass().getSimpleName());
final Transformation<RowData> transformation =
planner.getExecEnv()
.createInput(inputFormat, inputFormat.getProducedType())
.getTransformation();
createTransformationMeta(VALUES_TRANSFORMATION, config).fill(transformation);
transformation.setParallelism(1);
transformation.setMaxParallelism(1);
return transformation;
}
@JsonProperty(value = FIELD_NAME_TUPLES)
public List<List<RexLiteral>> getTuples() {
return tuples;
}
}
| CommonExecValues |
java | redisson__redisson | redisson/src/test/java/org/redisson/RedissonMapReactiveTest.java | {
"start": 1908,
"end": 17592
} | class ____ implements Serializable {
private String value;
public SimpleValue() {
}
public SimpleValue(String field) {
this.value = field;
}
public void setValue(String field) {
this.value = field;
}
public String getValue() {
return value;
}
@Override
public String toString() {
return "value: " + value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SimpleValue other = (SimpleValue) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
}
@Test
public void testIteratorSequence() throws InterruptedException {
RMapReactive<Long, Long> map = redisson.getMap("map");
for (int i = 0; i < 1000; i++) {
sync(map.put(Long.valueOf(i), Long.valueOf(i)));
}
Map<Long, Long> setCopy = new HashMap<>();
for (int i = 0; i < 1000; i++) {
setCopy.put(Long.valueOf(i), Long.valueOf(i));
}
checkIterator(map, setCopy);
}
private <K, V> void checkIterator(RMapReactive<K, V> set, Map<K, V> setCopy) {
for (Iterator<Entry<K, V>> iterator = toIterator(set.entryIterator()); iterator.hasNext();) {
Entry<K, V> entry = iterator.next();
if (!setCopy.remove(entry.getKey(), entry.getValue())) {
Assertions.fail();
}
}
Assertions.assertEquals(0, setCopy.size());
}
@Test
public void testAddAndGet() throws InterruptedException {
RMapReactive<Integer, Integer> map = redisson.getMap("getAll", new CompositeCodec(redisson.getConfig().getCodec(), IntegerCodec.INSTANCE));
sync(map.put(1, 100));
Integer res = sync(map.addAndGet(1, 12));
Assertions.assertEquals(112, (int)res);
res = sync(map.get(1));
Assertions.assertEquals(112, (int)res);
RMapReactive<Integer, Double> map2 = redisson.getMap("getAll2", new CompositeCodec(redisson.getConfig().getCodec(), DoubleCodec.INSTANCE));
sync(map2.put(1, new Double(100.2)));
Double res2 = sync(map2.addAndGet(1, new Double(12.1)));
Assertions.assertTrue(new Double(112.3).compareTo(res2) == 0);
res2 = sync(map2.get(1));
Assertions.assertTrue(new Double(112.3).compareTo(res2) == 0);
RMapReactive<String, Integer> mapStr = redisson.getMap("mapStr", new CompositeCodec(redisson.getConfig().getCodec(), IntegerCodec.INSTANCE));
assertThat(sync(mapStr.put("1", 100))).isNull();
assertThat(sync(mapStr.addAndGet("1", 12))).isEqualTo(112);
assertThat(sync(mapStr.get("1"))).isEqualTo(112);
}
@Test
public void testGetAll() {
RMapReactive<Integer, Integer> map = redisson.getMap("getAll");
sync(map.put(1, 100));
sync(map.put(2, 200));
sync(map.put(3, 300));
sync(map.put(4, 400));
Map<Integer, Integer> filtered = sync(map.getAll(new HashSet<Integer>(Arrays.asList(2, 3, 5))));
Map<Integer, Integer> expectedMap = new HashMap<Integer, Integer>();
expectedMap.put(2, 200);
expectedMap.put(3, 300);
Assertions.assertEquals(expectedMap, filtered);
}
@Test
public void testGetAllWithStringKeys() {
RMapReactive<String, Integer> map = redisson.getMap("getAllStrings");
sync(map.put("A", 100));
sync(map.put("B", 200));
sync(map.put("C", 300));
sync(map.put("D", 400));
Map<String, Integer> filtered = sync(map.getAll(new HashSet<String>(Arrays.asList("B", "C", "E"))));
Map<String, Integer> expectedMap = new HashMap<String, Integer>();
expectedMap.put("B", 200);
expectedMap.put("C", 300);
Assertions.assertEquals(expectedMap, filtered);
}
@Test
public void testInteger() {
RMapReactive<Integer, Integer> map = redisson.getMap("test_int");
sync(map.put(1, 2));
sync(map.put(3, 4));
Assertions.assertEquals(2, sync(map.size()).intValue());
Integer val = sync(map.get(1));
Assertions.assertEquals(2, val.intValue());
Integer val2 = sync(map.get(3));
Assertions.assertEquals(4, val2.intValue());
}
@Test
public void testLong() {
RMapReactive<Long, Long> map = redisson.getMap("test_long");
sync(map.put(1L, 2L));
sync(map.put(3L, 4L));
Assertions.assertEquals(2, sync(map.size()).intValue());
Long val = sync(map.get(1L));
Assertions.assertEquals(2L, val.longValue());
Long val2 = sync(map.get(3L));
Assertions.assertEquals(4L, val2.longValue());
}
@Test
public void testSimpleTypes() {
RMapReactive<Integer, String> map = redisson.getMap("simple12");
sync(map.put(1, "12"));
sync(map.put(2, "33"));
sync(map.put(3, "43"));
String val = sync(map.get(2));
Assertions.assertEquals("33", val);
}
@Test
public void testRemove() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("33"), new SimpleValue("44")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
sync(map.remove(new SimpleKey("33")));
sync(map.remove(new SimpleKey("5")));
Assertions.assertEquals(1, sync(map.size()).intValue());
}
@Test
public void testPutAll() {
RMapReactive<Integer, String> map = redisson.getMap("simple");
sync(map.put(1, "1"));
sync(map.put(2, "2"));
sync(map.put(3, "3"));
Map<Integer, String> joinMap = new HashMap<Integer, String>();
joinMap.put(4, "4");
joinMap.put(5, "5");
joinMap.put(6, "6");
sync(map.putAll(joinMap));
assertThat(toIterable(map.keyIterator())).contains(1, 2, 3, 4, 5, 6);
}
@Test
public void testContainsValue() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("33"), new SimpleValue("44")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
Assertions.assertTrue(sync(map.containsValue(new SimpleValue("2"))));
Assertions.assertFalse(sync(map.containsValue(new SimpleValue("441"))));
Assertions.assertFalse(sync(map.containsValue(new SimpleKey("5"))));
}
@Test
public void testContainsKey() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("33"), new SimpleValue("44")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
Assertions.assertTrue(sync(map.containsKey(new SimpleKey("33"))));
Assertions.assertFalse(sync(map.containsKey(new SimpleKey("34"))));
}
@Test
public void testRemoveValue() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
boolean size = sync(map.remove(new SimpleKey("1"), new SimpleValue("2")));
Assertions.assertTrue(size);
SimpleValue val1 = sync(map.get(new SimpleKey("1")));
Assertions.assertNull(val1);
Assertions.assertEquals(0, sync(map.size()).intValue());
}
@Test
public void testRemoveValueFail() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
boolean removed = sync(map.remove(new SimpleKey("2"), new SimpleValue("1")));
Assertions.assertFalse(removed);
boolean size2 = sync(map.remove(new SimpleKey("1"), new SimpleValue("3")));
Assertions.assertFalse(size2);
SimpleValue val1 = sync(map.get(new SimpleKey("1")));
Assertions.assertEquals("2", val1.getValue());
}
@Test
public void testReplaceOldValueFail() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
boolean res = sync(map.replace(new SimpleKey("1"), new SimpleValue("43"), new SimpleValue("31")));
Assertions.assertFalse(res);
SimpleValue val1 = sync(map.get(new SimpleKey("1")));
Assertions.assertEquals("2", val1.getValue());
}
@Test
public void testReplaceOldValueSuccess() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
boolean res = sync(map.replace(new SimpleKey("1"), new SimpleValue("2"), new SimpleValue("3")));
Assertions.assertTrue(res);
boolean res1 = sync(map.replace(new SimpleKey("1"), new SimpleValue("2"), new SimpleValue("3")));
Assertions.assertFalse(res1);
SimpleValue val1 = sync(map.get(new SimpleKey("1")));
Assertions.assertEquals("3", val1.getValue());
}
@Test
public void testReplaceValue() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
SimpleValue res = sync(map.replace(new SimpleKey("1"), new SimpleValue("3")));
Assertions.assertEquals("2", res.getValue());
SimpleValue val1 = sync(map.get(new SimpleKey("1")));
Assertions.assertEquals("3", val1.getValue());
}
@Test
public void testReplace() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("33"), new SimpleValue("44")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
SimpleValue val1 = sync(map.get(new SimpleKey("33")));
Assertions.assertEquals("44", val1.getValue());
sync(map.put(new SimpleKey("33"), new SimpleValue("abc")));
SimpleValue val2 = sync(map.get(new SimpleKey("33")));
Assertions.assertEquals("abc", val2.getValue());
}
@Test
public void testPutGet() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("33"), new SimpleValue("44")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
SimpleValue val1 = sync(map.get(new SimpleKey("33")));
Assertions.assertEquals("44", val1.getValue());
SimpleValue val2 = sync(map.get(new SimpleKey("5")));
Assertions.assertEquals("6", val2.getValue());
}
@Test
public void testPutIfAbsent() throws Exception {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
SimpleKey key = new SimpleKey("1");
SimpleValue value = new SimpleValue("2");
sync(map.put(key, value));
Assertions.assertEquals(value, sync(map.putIfAbsent(key, new SimpleValue("3"))));
Assertions.assertEquals(value, sync(map.get(key)));
SimpleKey key1 = new SimpleKey("2");
SimpleValue value1 = new SimpleValue("4");
Assertions.assertNull(sync(map.putIfAbsent(key1, value1)));
Assertions.assertEquals(value1, sync(map.get(key1)));
}
@Test
public void testSize() {
RMapReactive<SimpleKey, SimpleValue> map = redisson.getMap("simple");
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
sync(map.put(new SimpleKey("5"), new SimpleValue("6")));
Assertions.assertEquals(3, sync(map.size()).intValue());
sync(map.put(new SimpleKey("1"), new SimpleValue("2")));
sync(map.put(new SimpleKey("3"), new SimpleValue("4")));
Assertions.assertEquals(3, sync(map.size()).intValue());
sync(map.put(new SimpleKey("1"), new SimpleValue("21")));
sync(map.put(new SimpleKey("3"), new SimpleValue("41")));
Assertions.assertEquals(3, sync(map.size()).intValue());
sync(map.put(new SimpleKey("51"), new SimpleValue("6")));
Assertions.assertEquals(4, sync(map.size()).intValue());
sync(map.remove(new SimpleKey("3")));
Assertions.assertEquals(3, sync(map.size()).intValue());
}
@Test
public void testEmptyRemove() {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
assertThat(sync(map.remove(1, 3))).isFalse();
sync(map.put(4, 5));
assertThat(sync(map.remove(4, 5))).isTrue();
}
@Test
public void testFastRemoveAsync() throws InterruptedException, ExecutionException {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
sync(map.put(1, 3));
sync(map.put(3, 5));
sync(map.put(4, 6));
sync(map.put(7, 8));
Assertions.assertEquals((Long) 3L, sync(map.fastRemove(1, 3, 7)));
Assertions.assertEquals(1, sync(map.size()).intValue());
}
@Test
public void testKeyIterator() {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
sync(map.put(1, 0));
sync(map.put(3, 5));
sync(map.put(4, 6));
sync(map.put(7, 8));
List<Integer> keys = new ArrayList<Integer>(Arrays.asList(1, 3, 4, 7));
for (Iterator<Integer> iterator = toIterator(map.keyIterator()); iterator.hasNext();) {
Integer value = iterator.next();
if (!keys.remove(value)) {
Assertions.fail();
}
}
Assertions.assertEquals(0, keys.size());
}
@Test
public void testValueIterator() {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
sync(map.put(1, 0));
sync(map.put(3, 5));
sync(map.put(4, 6));
sync(map.put(7, 8));
List<Integer> values = new ArrayList<Integer>(Arrays.asList(0, 5, 6, 8));
for (Iterator<Integer> iterator = toIterator(map.valueIterator()); iterator.hasNext();) {
Integer value = iterator.next();
if (!values.remove(value)) {
Assertions.fail();
}
}
Assertions.assertEquals(0, values.size());
}
@Test
public void testFastPut() throws Exception {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
Assertions.assertTrue(sync(map.fastPut(1, 2)));
Assertions.assertFalse(sync(map.fastPut(1, 3)));
Assertions.assertEquals(1, sync(map.size()).intValue());
}
@Test
public void testFastRemoveEmpty() throws Exception {
RMapReactive<Integer, Integer> map = redisson.getMap("simple");
sync(map.put(1, 3));
Assertions.assertEquals(0, sync(map.fastRemove()).intValue());
Assertions.assertEquals(1, sync(map.size()).intValue());
}
public static | SimpleValue |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/threadpool/support/AbortPolicyWithReportTest.java | {
"start": 2245,
"end": 9038
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(AbortPolicyWithReportTest.class);
@BeforeEach
public void setUp() {
AbortPolicyWithReport.lastPrintTime = 0;
}
@Test
void jStackDumpTest() {
URL url = URL.valueOf(
"dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory=/tmp&version=1.0.0&application=morgan&noValue=");
AtomicReference<FileOutputStream> fileOutputStream = new AtomicReference<>();
AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url) {
@Override
protected void jstack(FileOutputStream jStackStream) {
fileOutputStream.set(jStackStream);
}
};
ExecutorService executorService = Executors.newFixedThreadPool(1);
AbortPolicyWithReport.lastPrintTime = 0;
Assertions.assertThrows(RejectedExecutionException.class, () -> {
abortPolicyWithReport.rejectedExecution(() -> logger.debug("hello"), (ThreadPoolExecutor) executorService);
});
await().until(() -> AbortPolicyWithReport.guard.availablePermits() == 1);
Assertions.assertNotNull(fileOutputStream.get());
executorService.shutdown();
}
@Test
void jStack_ConcurrencyDump_Silence_10Min() {
URL url = URL.valueOf(
"dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory=/tmp&version=1.0.0&application=morgan&noValue=");
AtomicInteger jStackCount = new AtomicInteger(0);
AtomicInteger failureCount = new AtomicInteger(0);
AtomicInteger finishedCount = new AtomicInteger(0);
AtomicInteger timeoutCount = new AtomicInteger(0);
AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url) {
@Override
protected void jstack(FileOutputStream jStackStream) {
jStackCount.incrementAndGet();
// try to simulate the jstack cost long time, so that AbortPolicyWithReport may jstack repeatedly.
long startTime = System.currentTimeMillis();
await().atLeast(200, TimeUnit.MILLISECONDS).until(() -> System.currentTimeMillis() - startTime >= 300);
}
};
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(
4,
4,
0,
TimeUnit.MILLISECONDS,
new SynchronousQueue<>(),
new NamedInternalThreadFactory("jStack_ConcurrencyDump_Silence_10Min", false),
abortPolicyWithReport);
int runTimes = 100;
List<Future<?>> futureList = new LinkedList<>();
for (int i = 0; i < runTimes; i++) {
try {
futureList.add(threadPoolExecutor.submit(() -> {
finishedCount.incrementAndGet();
long start = System.currentTimeMillis();
// try to await 1s to make sure jstack dump thread scheduled
await().atLeast(300, TimeUnit.MILLISECONDS).until(() -> System.currentTimeMillis() - start >= 300);
}));
} catch (Exception ignored) {
failureCount.incrementAndGet();
}
}
futureList.forEach(f -> {
try {
f.get(500, TimeUnit.MILLISECONDS);
} catch (Exception ignored) {
timeoutCount.incrementAndGet();
}
});
logger.info(
"jStackCount: {}, finishedCount: {}, failureCount: {}, timeoutCount: {}",
jStackCount.get(),
finishedCount.get(),
failureCount.get(),
timeoutCount.get());
Assertions.assertEquals(
runTimes, finishedCount.get() + failureCount.get(), "all the test thread should be run completely");
Assertions.assertEquals(1, jStackCount.get(), "'jstack' should be called only once in 10 minutes");
threadPoolExecutor.shutdown();
}
@Test
void jStackDumpTest_dumpDirectoryNotExists_cannotBeCreatedTakeUserHome() {
final String dumpDirectory = dumpDirectoryCannotBeCreated();
URL url = URL.valueOf("dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory="
+ dumpDirectory
+ "&version=1.0.0&application=morgan&noValue=true");
AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url);
Assertions.assertEquals(
SystemPropertyConfigUtils.getSystemProperty(USER_HOME), abortPolicyWithReport.getDumpPath());
}
private String dumpDirectoryCannotBeCreated() {
final String os =
SystemPropertyConfigUtils.getSystemProperty(SYSTEM_OS_NAME).toLowerCase();
if (os.contains(OS_WIN_PREFIX)) {
// colon is a reserved character which could not be used in a file or directory name,
// https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
return "c:o:n";
} else {
return "/dev/full/" + UUID.randomUUID().toString();
}
}
@Test
void jStackDumpTest_dumpDirectoryNotExists_canBeCreated() {
final String dumpDirectory = UUID.randomUUID().toString();
URL url = URL.valueOf("dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory="
+ dumpDirectory
+ "&version=1.0.0&application=morgan&noValue=true");
AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url);
Assertions.assertNotEquals(
SystemPropertyConfigUtils.getSystemProperty(USER_HOME), abortPolicyWithReport.getDumpPath());
}
@Test
void test_dispatchThreadPoolExhaustedEvent() {
URL url = URL.valueOf(
"dubbo://admin:hello1234@10.20.130.230:20880/context/path?dump.directory=/tmp&version=1.0.0&application=morgan&noValue=");
AbortPolicyWithReport abortPolicyWithReport = new AbortPolicyWithReport("Test", url);
String msg =
"Thread pool is EXHAUSTED! Thread Name: DubboServerHandler-127.0.0.1:12345, Pool Size: 1 (active: 0, core: 1, max: 1, largest: 1), Task: 6 (completed: 6), Executor status:(isShutdown:false, isTerminated:false, isTerminating:false), in dubbo://127.0.0.1:12345!, dubbo version: 2.7.3, current host: 127.0.0.1";
MyListener listener = new MyListener();
abortPolicyWithReport.addThreadPoolExhaustedEventListener(listener);
abortPolicyWithReport.dispatchThreadPoolExhaustedEvent(msg);
assertEquals(listener.getThreadPoolExhaustedEvent().getMsg(), msg);
}
static | AbortPolicyWithReportTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/ReadFromImpl.java | {
"start": 3796,
"end": 4008
} | class ____ extends UnorderedPredicateReadFromAdapter {
public ReadFromAnyReplica() {
super(IS_REPLICA);
}
}
/**
* Read from any node in the subnets. This | ReadFromAnyReplica |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/aot/hint/PrePostAuthorizeHintsRegistrar.java | {
"start": 1424,
"end": 1892
} | class ____ implements SecurityHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, ConfigurableListableBeanFactory beanFactory) {
List<Class<?>> beans = Arrays.stream(beanFactory.getBeanDefinitionNames())
.map((beanName) -> RegisteredBean.of(beanFactory, beanName).getBeanClass())
.collect(Collectors.toList());
new PrePostAuthorizeExpressionBeanHintsRegistrar(beans).registerHints(hints, beanFactory);
}
}
| PrePostAuthorizeHintsRegistrar |
java | apache__flink | flink-core/src/test/java/org/apache/flink/core/classloading/ComponentClassLoaderTest.java | {
"start": 1477,
"end": 9688
} | class ____ {
private static final String NON_EXISTENT_CLASS_NAME = "foo.Bar";
private static final Class<?> CLASS_TO_LOAD = Class.class;
private static final Class<?> CLASS_RETURNED_BY_OWNER = ComponentClassLoaderTest.class;
private static final String NON_EXISTENT_RESOURCE_NAME = "foo/Bar";
private static final URL RESOURCE_RETURNED_BY_OWNER = createURL();
@TempDir private static java.nio.file.Path tempFolder;
// ----------------------------------------------------------------------------------------------
// Class loading
// ----------------------------------------------------------------------------------------------
@Test
void testComponentOnlyIsDefaultForClasses() throws Exception {
assertThatExceptionOfType(ClassNotFoundException.class)
.isThrownBy(
() -> {
TestUrlClassLoader owner =
new TestUrlClassLoader(
NON_EXISTENT_CLASS_NAME, CLASS_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[0],
new String[0],
Collections.emptyMap());
componentClassLoader.loadClass(NON_EXISTENT_CLASS_NAME);
});
}
@Test
void testOwnerFirstClassFoundIgnoresComponent() throws Exception {
TestUrlClassLoader owner =
new TestUrlClassLoader(CLASS_TO_LOAD.getName(), CLASS_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[] {CLASS_TO_LOAD.getName()},
new String[0],
Collections.emptyMap());
final Class<?> loadedClass = componentClassLoader.loadClass(CLASS_TO_LOAD.getName());
assertThat(loadedClass).isSameAs(CLASS_RETURNED_BY_OWNER);
}
@Test
void testOwnerFirstClassNotFoundFallsBackToComponent() throws Exception {
TestUrlClassLoader owner = new TestUrlClassLoader();
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[] {CLASS_TO_LOAD.getName()},
new String[0],
Collections.emptyMap());
final Class<?> loadedClass = componentClassLoader.loadClass(CLASS_TO_LOAD.getName());
assertThat(loadedClass).isSameAs(CLASS_TO_LOAD);
}
@Test
void testComponentFirstClassFoundIgnoresOwner() throws Exception {
TestUrlClassLoader owner =
new TestUrlClassLoader(CLASS_TO_LOAD.getName(), CLASS_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[0],
new String[] {CLASS_TO_LOAD.getName()},
Collections.emptyMap());
final Class<?> loadedClass = componentClassLoader.loadClass(CLASS_TO_LOAD.getName());
assertThat(loadedClass).isSameAs(CLASS_TO_LOAD);
}
@Test
void testComponentFirstClassNotFoundFallsBackToOwner() throws Exception {
TestUrlClassLoader owner =
new TestUrlClassLoader(NON_EXISTENT_CLASS_NAME, CLASS_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[0],
new String[] {NON_EXISTENT_CLASS_NAME},
Collections.emptyMap());
final Class<?> loadedClass = componentClassLoader.loadClass(NON_EXISTENT_CLASS_NAME);
assertThat(loadedClass).isSameAs(CLASS_RETURNED_BY_OWNER);
}
// ----------------------------------------------------------------------------------------------
// Resource loading
// ----------------------------------------------------------------------------------------------
@Test
void testComponentOnlyIsDefaultForResources() throws IOException {
TestUrlClassLoader owner = new TestUrlClassLoader();
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0], owner, new String[0], new String[0], Collections.emptyMap());
assertThat(componentClassLoader.getResource(NON_EXISTENT_RESOURCE_NAME)).isNull();
assertThat(componentClassLoader.getResources(NON_EXISTENT_RESOURCE_NAME).hasMoreElements())
.isFalse();
}
@Test
void testOwnerFirstResourceFoundIgnoresComponent() throws IOException {
String resourceToLoad =
TempDirUtils.newFile(tempFolder, "tmpfile" + UUID.randomUUID()).getName();
TestUrlClassLoader owner =
new TestUrlClassLoader(resourceToLoad, RESOURCE_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[] {},
owner,
new String[] {resourceToLoad},
new String[0],
Collections.emptyMap());
final URL loadedResource = componentClassLoader.getResource(resourceToLoad);
assertThat(loadedResource).isSameAs(RESOURCE_RETURNED_BY_OWNER);
}
@Test
void testOwnerFirstResourceNotFoundFallsBackToComponent() throws Exception {
String resourceToLoad = TempDirUtils.newFile(tempFolder).getName();
TestUrlClassLoader owner = new TestUrlClassLoader();
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[] {tempFolder.toUri().toURL()},
owner,
new String[] {resourceToLoad},
new String[0],
Collections.emptyMap());
final URL loadedResource = componentClassLoader.getResource(resourceToLoad);
assertThat(loadedResource.toString()).contains(resourceToLoad);
}
@Test
void testComponentFirstResourceFoundIgnoresOwner() throws Exception {
String resourceToLoad = TempDirUtils.newFile(tempFolder).getName();
TestUrlClassLoader owner =
new TestUrlClassLoader(resourceToLoad, RESOURCE_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[] {tempFolder.toUri().toURL()},
owner,
new String[0],
new String[] {resourceToLoad},
Collections.emptyMap());
final URL loadedResource = componentClassLoader.getResource(resourceToLoad);
assertThat(loadedResource.toString()).contains(resourceToLoad);
}
@Test
void testComponentFirstResourceNotFoundFallsBackToOwner() {
TestUrlClassLoader owner =
new TestUrlClassLoader(NON_EXISTENT_RESOURCE_NAME, RESOURCE_RETURNED_BY_OWNER);
final ComponentClassLoader componentClassLoader =
new ComponentClassLoader(
new URL[0],
owner,
new String[0],
new String[] {NON_EXISTENT_RESOURCE_NAME},
Collections.emptyMap());
final URL loadedResource = componentClassLoader.getResource(NON_EXISTENT_RESOURCE_NAME);
assertThat(loadedResource).isSameAs(RESOURCE_RETURNED_BY_OWNER);
}
private static | ComponentClassLoaderTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineFilter.java | {
"start": 1228,
"end": 1895
} | enum ____ {
/**
* Combines multiple filters.
*/
LIST,
/**
* Filter which is used for key-value comparison.
*/
COMPARE,
/**
* Filter which is used for checking key-value equality.
*/
KEY_VALUE,
/**
* Filter which is used for checking key-multiple values equality.
*/
KEY_VALUES,
/**
* Filter which matches prefix for a config or a metric.
*/
PREFIX,
/**
* Filter which checks existence of a value.
*/
EXISTS
}
public abstract TimelineFilterType getFilterType();
public String toString() {
return this.getClass().getSimpleName();
}
} | TimelineFilterType |
java | apache__flink | flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/session/CloseSessionResponseBody.java | {
"start": 1291,
"end": 1686
} | class ____ implements ResponseBody {
private static final String FIELD_NAME_STATUS = "status";
@JsonProperty(FIELD_NAME_STATUS)
private final String status;
@JsonCreator
public CloseSessionResponseBody(@JsonProperty(FIELD_NAME_STATUS) String status) {
this.status = status;
}
public String getStatus() {
return status;
}
}
| CloseSessionResponseBody |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/config/plugins/convert/TypeConverters.java | {
"start": 2009,
"end": 2375
} | class ____ {
/**
* The {@link Plugin#category() Plugin Category} to use for {@link TypeConverter} plugins.
*
* @since 2.1
*/
public static final String CATEGORY = "TypeConverter";
/**
* Parses a {@link String} into a {@link BigDecimal}.
*/
@Plugin(name = "BigDecimal", category = CATEGORY)
public static | TypeConverters |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/ConstructorDetectorTest.java | {
"start": 2104,
"end": 2332
} | class ____ {
protected int v;
SingleArgNoMode() { v = -1; }
@JsonCreator
public SingleArgNoMode(@ImplicitName("value") int value) {
v = value;
}
}
static | SingleArgNoMode |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/AlterTableQuoteDefaultSchemaTest.java | {
"start": 1807,
"end": 5764
} | class ____ extends AbstractAlterTableQuoteSchemaTest {
@BeforeEach
protected void init() {
try {
inTransaction(
session -> session.createNativeQuery( "DROP TABLE " + quote( "default-schema", "my_entity" ) )
.executeUpdate()
);
}
catch (Exception e) {
}
try {
inTransaction(
session -> session.createNativeQuery( "DROP SCHEMA " + quote( "default-schema" ) )
.executeUpdate()
);
}
catch (Exception e) {
}
try {
inTransaction(
session -> session.createNativeQuery( "CREATE SCHEMA " + quote( "default-schema" ) )
.executeUpdate()
);
}
catch (Exception e) {
}
}
@AfterEach
protected void tearDown() {
try {
inTransaction(
session -> session.createNativeQuery( "DROP SCHEMA " + quote( "default-schema" ) )
.executeUpdate()
);
}
catch (Exception e) {
}
}
@Test
public void testDefaultSchema() throws IOException {
File output = File.createTempFile( "update_script", ".sql" );
output.deleteOnExit();
StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, Boolean.TRUE.toString() )
.build();
try {
final MetadataSources metadataSources = new MetadataSources( ssr ) {
@Override
public MetadataBuilder getMetadataBuilder() {
MetadataBuilder metadataBuilder = super.getMetadataBuilder();
metadataBuilder.applyImplicitSchemaName( "default-schema" );
return metadataBuilder;
}
};
metadataSources.addAnnotatedClass( MyEntity.class );
final MetadataImplementor metadata = (MetadataImplementor) metadataSources.buildMetadata();
metadata.orderColumns( false );
metadata.validate();
new SchemaUpdate()
.setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setDelimiter( ";" )
.setFormat( true )
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
try {
String fileContent = new String( Files.readAllBytes( output.toPath() ) );
Pattern fileContentPattern = Pattern
.compile( "create table " + regexpQuote( "default-schema", "my_entity" ) );
Matcher fileContentMatcher = fileContentPattern.matcher( fileContent.toLowerCase() );
assertThat( fileContentMatcher.find(), is( true ) );
}
catch (IOException e) {
fail( e.getMessage() );
}
ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, Boolean.TRUE.toString() )
.build();
try {
final MetadataSources metadataSources = new MetadataSources( ssr ) {
@Override
public MetadataBuilder getMetadataBuilder() {
MetadataBuilder metadataBuilder = super.getMetadataBuilder();
metadataBuilder.applyImplicitSchemaName( "default-schema" );
return metadataBuilder;
}
};
metadataSources.addAnnotatedClass( MyEntityUpdated.class );
final MetadataImplementor metadata = (MetadataImplementor) metadataSources.buildMetadata();
metadata.orderColumns( false );
metadata.validate();
new SchemaUpdate()
.setHaltOnError( true )
.setOutputFile( output.getAbsolutePath() )
.setDelimiter( ";" )
.setFormat( true )
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
try {
String fileContent = new String( Files.readAllBytes( output.toPath() ) );
Pattern fileContentPattern = Pattern
.compile( "alter table.* " + regexpQuote( "default-schema", "my_entity" ) );
Matcher fileContentMatcher = fileContentPattern.matcher( fileContent.toLowerCase() );
assertThat( fileContentMatcher.find(), is( true ) );
}
catch (IOException e) {
fail( e.getMessage() );
}
}
@Entity(name = "MyEntity")
@Table(name = "my_entity")
public static | AlterTableQuoteDefaultSchemaTest |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/LongFormatFactory.java | {
"start": 1094,
"end": 1652
} | class ____ extends AbstractFormatFactory {
private final LongFormat longFormat = new LongFormat();
{
supportedClasses.add(long.class);
supportedClasses.add(Long.class);
}
@Override
public boolean canBuild(FormattingOptions formattingOptions) {
return super.canBuild(formattingOptions)
&& ObjectHelper.isEmpty(formattingOptions.getPattern());
}
@Override
public Format<?> build(FormattingOptions formattingOptions) {
return longFormat;
}
private static | LongFormatFactory |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java | {
"start": 3524,
"end": 16524
} | interface ____ support netgroups)");
return;
}
LOG.info("Testing netgroups using: " + groupMappingClassName);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING,
groupMappingClassName);
Groups groups = Groups.getUserToGroupsMappingService(conf);
AccessControlList acl;
// create these ACLs to populate groups cache
acl = new AccessControlList("ja my"); // plain
acl = new AccessControlList("sinatra ratpack,@lasVegas"); // netgroup
acl = new AccessControlList(" somegroup,@someNetgroup"); // no user
// this ACL will be used for testing ACLs
acl = new AccessControlList("carlPerkins ratpack,@lasVegas");
acl.addGroup("@memphis");
// validate the netgroups before and after rehresh to make
// sure refresh works correctly
validateNetgroups(groups, acl);
groups.refresh();
validateNetgroups(groups, acl);
}
/**
* Validate the netgroups, both group membership and ACL
* functionality
*
* Note: assumes a specific acl setup done by testNetgroups
*
* @param groups group to user mapping service
* @param acl ACL set up in a specific way, see testNetgroups
*/
private void validateNetgroups(Groups groups,
AccessControlList acl) throws Exception {
// check that the netgroups are working
List<String> elvisGroups = groups.getGroups("elvis");
assertTrue(elvisGroups.contains("@lasVegas"));
assertTrue(elvisGroups.contains("@memphis"));
List<String> jerryLeeLewisGroups = groups.getGroups("jerryLeeLewis");
assertTrue(jerryLeeLewisGroups.contains("@memphis"));
// allowed because his netgroup is in ACL
UserGroupInformation elvis =
UserGroupInformation.createRemoteUser("elvis");
assertUserAllowed(elvis, acl);
// allowed because he's in ACL
UserGroupInformation carlPerkins =
UserGroupInformation.createRemoteUser("carlPerkins");
assertUserAllowed(carlPerkins, acl);
// not allowed because he's not in ACL and has no netgroups
UserGroupInformation littleRichard =
UserGroupInformation.createRemoteUser("littleRichard");
assertUserNotAllowed(littleRichard, acl);
}
@Test
public void testWildCardAccessControlList() throws Exception {
AccessControlList acl;
acl = new AccessControlList("*");
assertTrue(acl.isAllAllowed());
acl = new AccessControlList(" * ");
assertTrue(acl.isAllAllowed());
acl = new AccessControlList(" *");
assertTrue(acl.isAllAllowed());
acl = new AccessControlList("* ");
assertTrue(acl.isAllAllowed());
}
// Check if AccessControlList.toString() works as expected.
// Also validate if getAclString() for various cases.
@Test
public void testAclString() {
AccessControlList acl;
acl = new AccessControlList("*");
assertThat(acl.toString()).isEqualTo("All users are allowed");
validateGetAclString(acl);
acl = new AccessControlList(" ");
assertThat(acl.toString()).isEqualTo("No users are allowed");
acl = new AccessControlList("user1,user2");
assertThat(acl.toString()).isEqualTo("Users [user1, user2] are allowed");
validateGetAclString(acl);
acl = new AccessControlList("user1,user2 ");// with space
assertThat(acl.toString()).isEqualTo("Users [user1, user2] are allowed");
validateGetAclString(acl);
acl = new AccessControlList(" group1,group2");
assertThat(acl.toString()).isEqualTo(
"Members of the groups [group1, group2] are allowed");
validateGetAclString(acl);
acl = new AccessControlList("user1,user2 group1,group2");
assertThat(acl.toString()).isEqualTo(
"Users [user1, user2] and " +
"members of the groups [group1, group2] are allowed");
validateGetAclString(acl);
}
// Validates if getAclString() is working as expected. i.e. if we can build
// a new ACL instance from the value returned by getAclString().
private void validateGetAclString(AccessControlList acl) {
assertTrue(acl.toString().equals(
new AccessControlList(acl.getAclString()).toString()));
}
@Test
public void testAccessControlList() throws Exception {
AccessControlList acl;
Collection<String> users;
Collection<String> groups;
acl = new AccessControlList("drwho tardis");
users = acl.getUsers();
assertThat(users.size()).isOne();
assertThat(users.iterator().next()).isEqualTo("drwho");
groups = acl.getGroups();
assertThat(groups.size()).isOne();
assertThat(groups.iterator().next()).isEqualTo("tardis");
acl = new AccessControlList("drwho");
users = acl.getUsers();
assertThat(users.size()).isOne();
assertThat(users.iterator().next()).isEqualTo("drwho");
groups = acl.getGroups();
assertThat(groups.size()).isZero();
acl = new AccessControlList("drwho ");
users = acl.getUsers();
assertThat(users.size()).isOne();
assertThat(users.iterator().next()).isEqualTo("drwho");
groups = acl.getGroups();
assertThat(groups.size()).isZero();
acl = new AccessControlList(" tardis");
users = acl.getUsers();
assertThat(users.size()).isZero();
groups = acl.getGroups();
assertThat(groups.size()).isOne();
assertThat(groups.iterator().next()).isEqualTo("tardis");
Iterator<String> iter;
acl = new AccessControlList("drwho,joe tardis, users");
users = acl.getUsers();
assertThat(users.size()).isEqualTo(2);
iter = users.iterator();
assertThat(iter.next()).isEqualTo("drwho");
assertThat(iter.next()).isEqualTo("joe");
groups = acl.getGroups();
assertThat(groups.size()).isEqualTo(2);
iter = groups.iterator();
assertThat(iter.next()).isEqualTo("tardis");
assertThat(iter.next()).isEqualTo("users");
}
/**
* Test addUser/Group and removeUser/Group api.
*/
@Test
public void testAddRemoveAPI() {
AccessControlList acl;
Collection<String> users;
Collection<String> groups;
acl = new AccessControlList(" ");
assertThat(acl.getUsers().size()).isZero();
assertThat(acl.getGroups().size()).isZero();
assertThat(acl.getAclString()).isEqualTo(" ");
acl.addUser("drwho");
users = acl.getUsers();
assertThat(users.size()).isOne();
assertThat(users.iterator().next()).isEqualTo("drwho");
assertThat(acl.getAclString()).isEqualTo("drwho ");
acl.addGroup("tardis");
groups = acl.getGroups();
assertThat(groups.size()).isOne();
assertThat(groups.iterator().next()).isEqualTo("tardis");
assertThat(acl.getAclString()).isEqualTo("drwho tardis");
acl.addUser("joe");
acl.addGroup("users");
users = acl.getUsers();
assertThat(users.size()).isEqualTo(2);
Iterator<String> iter = users.iterator();
assertThat(iter.next()).isEqualTo("drwho");
assertThat(iter.next()).isEqualTo("joe");
groups = acl.getGroups();
assertThat(groups.size()).isEqualTo(2);
iter = groups.iterator();
assertThat(iter.next()).isEqualTo("tardis");
assertThat(iter.next()).isEqualTo("users");
assertThat(acl.getAclString()).isEqualTo("drwho,joe tardis,users");
acl.removeUser("joe");
acl.removeGroup("users");
users = acl.getUsers();
assertThat(users.size()).isOne();
assertFalse(users.contains("joe"));
groups = acl.getGroups();
assertThat(groups.size()).isOne();
assertFalse(groups.contains("users"));
assertThat(acl.getAclString()).isEqualTo("drwho tardis");
acl.removeGroup("tardis");
groups = acl.getGroups();
assertThat(groups.size()).isZero();
assertFalse(groups.contains("tardis"));
assertThat(acl.getAclString()).isEqualTo("drwho ");
acl.removeUser("drwho");
assertThat(users.size()).isZero();
assertFalse(users.contains("drwho"));
assertThat(acl.getGroups().size()).isZero();
assertThat(acl.getUsers().size()).isZero();
assertThat(acl.getAclString()).isEqualTo(" ");
}
/**
* Tests adding/removing wild card as the user/group.
*/
@Test
public void testAddRemoveWildCard() {
AccessControlList acl = new AccessControlList("drwho tardis");
Throwable th = null;
try {
acl.addUser(" * ");
} catch (Throwable t) {
th = t;
}
assertThat(th).isNotNull();
assertThat(th).isInstanceOf(IllegalArgumentException.class);
th = null;
try {
acl.addGroup(" * ");
} catch (Throwable t) {
th = t;
}
assertThat(th).isNotNull();
assertThat(th).isInstanceOf(IllegalArgumentException.class);
th = null;
try {
acl.removeUser(" * ");
} catch (Throwable t) {
th = t;
}
assertThat(th).isNotNull();
assertThat(th).isInstanceOf(IllegalArgumentException.class);
th = null;
try {
acl.removeGroup(" * ");
} catch (Throwable t) {
th = t;
}
assertThat(th).isNotNull();
assertThat(th).isInstanceOf(IllegalArgumentException.class);
}
/**
* Tests adding user/group to an wild card acl.
*/
@Test
public void testAddRemoveToWildCardACL() {
AccessControlList acl = new AccessControlList(" * ");
assertTrue(acl.isAllAllowed());
UserGroupInformation drwho =
UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
new String[] { "aliens" });
UserGroupInformation drwho2 =
UserGroupInformation.createUserForTesting("drwho2@EXAMPLE.COM",
new String[] { "tardis" });
acl.addUser("drwho");
assertTrue(acl.isAllAllowed());
assertFalse(acl.getAclString().contains("drwho"));
acl.addGroup("tardis");
assertTrue(acl.isAllAllowed());
assertFalse(acl.getAclString().contains("tardis"));
acl.removeUser("drwho");
assertTrue(acl.isAllAllowed());
assertUserAllowed(drwho, acl);
acl.removeGroup("tardis");
assertTrue(acl.isAllAllowed());
assertUserAllowed(drwho2, acl);
}
/**
* Verify the method isUserAllowed()
*/
@Test
public void testIsUserAllowed() {
AccessControlList acl;
UserGroupInformation drwho =
UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
new String[] { "aliens", "humanoids", "timelord" });
UserGroupInformation susan =
UserGroupInformation.createUserForTesting("susan@EXAMPLE.COM",
new String[] { "aliens", "humanoids", "timelord" });
UserGroupInformation barbara =
UserGroupInformation.createUserForTesting("barbara@EXAMPLE.COM",
new String[] { "humans", "teachers" });
UserGroupInformation ian =
UserGroupInformation.createUserForTesting("ian@EXAMPLE.COM",
new String[] { "humans", "teachers" });
acl = new AccessControlList("drwho humanoids");
assertUserAllowed(drwho, acl);
assertUserAllowed(susan, acl);
assertUserNotAllowed(barbara, acl);
assertUserNotAllowed(ian, acl);
acl = new AccessControlList("drwho");
assertUserAllowed(drwho, acl);
assertUserNotAllowed(susan, acl);
assertUserNotAllowed(barbara, acl);
assertUserNotAllowed(ian, acl);
acl = new AccessControlList("drwho ");
assertUserAllowed(drwho, acl);
assertUserNotAllowed(susan, acl);
assertUserNotAllowed(barbara, acl);
assertUserNotAllowed(ian, acl);
acl = new AccessControlList(" humanoids");
assertUserAllowed(drwho, acl);
assertUserAllowed(susan, acl);
assertUserNotAllowed(barbara, acl);
assertUserNotAllowed(ian, acl);
acl = new AccessControlList("drwho,ian aliens,teachers");
assertUserAllowed(drwho, acl);
assertUserAllowed(susan, acl);
assertUserAllowed(barbara, acl);
assertUserAllowed(ian, acl);
acl = new AccessControlList("");
UserGroupInformation spyUser = spy(drwho);
acl.isUserAllowed(spyUser);
verify(spyUser, never()).getGroupNames();
}
private void assertUserAllowed(UserGroupInformation ugi,
AccessControlList acl) {
assertTrue(acl.isUserAllowed(ugi),
"User " + ugi + " is not granted the access-control!!");
}
private void assertUserNotAllowed(UserGroupInformation ugi,
AccessControlList acl) {
assertFalse(acl.isUserAllowed(ugi), "User " + ugi
+ " is incorrectly granted the access-control!!");
}
@Test
public void testUseRealUserAclsForProxiedUser() {
String realUser = "realUser";
AccessControlList acl = new AccessControlList(realUser);
UserGroupInformation realUserUgi =
UserGroupInformation.createRemoteUser(realUser);
UserGroupInformation user1 =
UserGroupInformation.createProxyUserForTesting("regularJane",
realUserUgi, new String [] {"group1"});
assertFalse(acl.isUserAllowed(user1),
"User " + user1 + " should not have been granted access.");
acl = new AccessControlList(AccessControlList.USE_REAL_ACLS + realUser);
assertTrue(acl.isUserAllowed(user1),
"User " + user1 + " should have access but was denied.");
}
}
| and |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/BaselineSessionEventsListenerBuilder.java | {
"start": 790,
"end": 2442
} | class ____ {
private static final SessionEventListener[] EMPTY = new SessionEventListener[0];
private final Class<? extends SessionEventListener> autoListener;
public BaselineSessionEventsListenerBuilder(Class<? extends SessionEventListener> autoListener) {
this.autoListener = autoListener;
}
@SuppressWarnings("unused")
public Class<? extends SessionEventListener> getAutoListener() {
return autoListener;
}
public List<SessionEventListener> buildBaselineList() {
final SessionEventListener[] sessionEventListeners = buildBaseline();
// Capacity: needs to hold at least all elements from the baseline,
// but also expect to add a little more later.
final List<SessionEventListener> list =
new ArrayList<>( sessionEventListeners.length + 3 );
addAll( list, sessionEventListeners );
return list;
}
public SessionEventListener[] buildBaseline() {
if ( StatisticalLoggingSessionEventListener.isLoggingEnabled() ) {
return autoListener == null
? new SessionEventListener[] { statsListener() }
: new SessionEventListener[] { statsListener(), autoListener() };
}
else {
return autoListener == null
? EMPTY
: new SessionEventListener[] { autoListener() };
}
}
private SessionEventListener autoListener() {
try {
return autoListener.newInstance();
}
catch (Exception e) {
throw new HibernateException(
"Unable to instantiate specified auto SessionEventListener: " + autoListener.getName(),
e
);
}
}
private static SessionEventListener statsListener() {
return new StatisticalLoggingSessionEventListener();
}
}
| BaselineSessionEventsListenerBuilder |
java | grpc__grpc-java | api/src/main/java/io/grpc/TlsServerCredentials.java | {
"start": 13194,
"end": 15409
} | interface ____ used. So generally there will
* just be a single entry and it implements {@link javax.net.ssl.X509KeyManager}.
*/
public Builder keyManager(KeyManager... keyManagers) {
List<KeyManager> keyManagerList = Collections.unmodifiableList(new ArrayList<>(
Arrays.asList(keyManagers)));
clearKeyManagers();
this.keyManagers = keyManagerList;
return this;
}
private void clearKeyManagers() {
this.certificateChain = null;
this.privateKey = null;
this.privateKeyPassword = null;
this.keyManagers = null;
}
/**
* Indicates whether the server should expect a client's identity. Must not be {@code null}.
* Defaults to {@link ClientAuth#NONE}.
*/
public Builder clientAuth(ClientAuth clientAuth) {
Preconditions.checkNotNull(clientAuth, "clientAuth");
this.clientAuth = clientAuth;
return this;
}
/**
* Use the provided root certificates to verify the client's identity instead of the system's
* default. Generally they should be PEM-encoded with all the certificates concatenated together
* (file header has "BEGIN CERTIFICATE", and would occur once per certificate).
*/
public Builder trustManager(File rootCerts) throws IOException {
InputStream rootCertsIs = new FileInputStream(rootCerts);
try {
return trustManager(rootCertsIs);
} finally {
rootCertsIs.close();
}
}
/**
* Use the provided root certificates to verify the client's identity instead of the system's
* default. Generally they should be PEM-encoded with all the certificates concatenated together
* (file header has "BEGIN CERTIFICATE", and would occur once per certificate).
*/
public Builder trustManager(InputStream rootCerts) throws IOException {
byte[] rootCertsBytes = ByteStreams.toByteArray(rootCerts);
clearTrustManagers();
this.rootCertificates = rootCertsBytes;
return this;
}
/**
* Have the provided trust manager verify the client's identity instead of the system's default.
* Although multiple are allowed, only the first instance implementing a particular | is |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1541/Issue1541Test.java | {
"start": 517,
"end": 6560
} | class ____ {
@ProcessorTest
public void testMappingWithVarArgs() {
Target target = Issue1541Mapper.INSTANCE.mapWithVarArgs( "code", "1", "2" );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testMappingWithArray() {
Target target = Issue1541Mapper.INSTANCE.mapWithArray( "code", new String[] { "1", "2" } );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).isNull();
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testMappingWithVarArgsReassignment() {
Target target = Issue1541Mapper.INSTANCE.mapWithReassigningVarArgs( "code", "1", "2" );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).isNull();
assertThat( target.getParameters2() ).contains( "1", "2" );
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testMappingWithArrayAndVarArgs() {
Target target = Issue1541Mapper.INSTANCE.mapWithArrayAndVarArgs( "code", new String[] { "1", "2" }, "3", "4" );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).contains( "3", "4" );
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testVarArgsInAfterMappingAsArray() {
Target target = Issue1541Mapper.INSTANCE.mapParametersAsArrayInAfterMapping( "code", "1", "2" );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).isNull();
assertThat( target.isAfterMappingWithArrayCalled() ).isTrue();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testVarArgsInAfterMappingAsVarArgs() {
Target target = Issue1541Mapper.INSTANCE.mapParametersAsVarArgsInAfterMapping( "code", "1", "2" );
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).isNull();
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isTrue();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testVarArgsInContextWithVarArgsAfterMapping() {
Target target = Issue1541Mapper.INSTANCE.mapContextWithVarArgsInAfterMappingWithVarArgs(
"code",
new String[] { "1", "2" },
"3",
"4"
);
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).contains( "3", "4" );
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isTrue();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isFalse();
}
@ProcessorTest
public void testVarArgsInContextWithArrayAfterMapping() {
Target target = Issue1541Mapper.INSTANCE.mapContextWithVarArgsInAfterMappingWithArray(
"code",
new String[] { "1", "2" },
"3",
"4"
);
assertThat( target ).isNotNull();
assertThat( target.getCode() ).isEqualTo( "code" );
assertThat( target.getParameters() ).contains( "1", "2" );
assertThat( target.getParameters2() ).contains( "3", "4" );
assertThat( target.isAfterMappingWithArrayCalled() ).isFalse();
assertThat( target.isAfterMappingWithVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsVarArgsCalled() ).isFalse();
assertThat( target.isAfterMappingContextWithVarArgsAsArrayCalled() ).isTrue();
}
}
| Issue1541Test |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/CountWindow.java | {
"start": 2703,
"end": 4444
} | class ____ extends TypeSerializerSingleton<CountWindow> {
private static final long serialVersionUID = 1L;
@Override
public boolean isImmutableType() {
return true;
}
@Override
public CountWindow createInstance() {
return null;
}
@Override
public CountWindow copy(CountWindow from) {
return from;
}
@Override
public CountWindow copy(CountWindow from, CountWindow reuse) {
return from;
}
@Override
public int getLength() {
return Long.BYTES;
}
@Override
public void serialize(CountWindow record, DataOutputView target) throws IOException {
target.writeLong(record.id);
}
@Override
public CountWindow deserialize(DataInputView source) throws IOException {
return new CountWindow(source.readLong());
}
@Override
public CountWindow deserialize(CountWindow reuse, DataInputView source) throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
target.writeLong(source.readLong());
}
// ------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<CountWindow> snapshotConfiguration() {
return new CountWindow.Serializer.CountWindowSerializerSnapshot();
}
/** Serializer configuration snapshot for compatibility and format evolution. */
@SuppressWarnings("WeakerAccess")
public static final | Serializer |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/matchers/Asserts.java | {
"start": 922,
"end": 1372
} | class ____ implements Matcher<StatementTree> {
private final Matcher<ExpressionTree> expressionMatcher;
public Asserts(Matcher<ExpressionTree> expressionMatcher) {
this.expressionMatcher = expressionMatcher;
}
@Override
public boolean matches(StatementTree statementTree, VisitorState state) {
return statementTree instanceof AssertTree assertTree
&& expressionMatcher.matches(assertTree.getCondition(), state);
}
}
| Asserts |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/ExtendedQuarkusVertxHttpMetrics.java | {
"start": 1073,
"end": 1218
} | interface ____ {
void onConnectionRejected();
void initialize(int maxConnections, AtomicInteger current);
}
}
| ConnectionTracker |
java | elastic__elasticsearch | x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java | {
"start": 2148,
"end": 14304
} | class ____ extends MapperTestCase {
@Override
protected void writeField(XContentBuilder builder) {
// do nothing
}
@Override
protected Object getSampleValueForDocument() {
throw new UnsupportedOperationException();
}
@Override
protected Object getSampleValueForQuery() {
return "test";
}
@Override
protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) {
assertThat(query, instanceOf(MatchNoDocsQuery.class));
assertNoFieldNamesField(fields);
}
@Override
protected Collection<Plugin> getPlugins() {
return List.of(new ConstantKeywordMapperPlugin());
}
@Override
protected boolean supportsStoredFields() {
return false;
}
@Override
protected boolean supportsIgnoreMalformed() {
return false;
}
public void testDefaults() throws Exception {
XContentBuilder mapping = fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo"));
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(Strings.toString(mapping), mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(source(b -> {}));
assertNull(doc.rootDoc().getField("field"));
doc = mapper.parse(source(b -> b.field("field", "foo")));
assertNull(doc.rootDoc().getField("field"));
DocumentParsingException e = expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> b.field("field", "bar"))));
assertEquals(
"[constant_keyword] field [field] only accepts values that are equal to the value defined in the mappings [foo], "
+ "but got [bar]",
e.getCause().getMessage()
);
}
public void testDynamicValue() throws Exception {
MapperService mapperService = createMapperService(fieldMapping(b -> b.field("type", "constant_keyword")));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "foo")));
assertNull(doc.rootDoc().getField("field"));
assertNotNull(doc.dynamicMappingsUpdate());
CompressedXContent mappingUpdate = new CompressedXContent(Strings.toString(doc.dynamicMappingsUpdate()));
DocumentMapper updatedMapper = mapperService.merge("_doc", mappingUpdate, MergeReason.MAPPING_UPDATE);
String expectedMapping = Strings.toString(fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
assertEquals(expectedMapping, updatedMapper.mappingSource().toString());
doc = updatedMapper.parse(source(b -> b.field("field", "foo")));
assertNull(doc.rootDoc().getField("field"));
assertNull(doc.dynamicMappingsUpdate());
}
public void testDynamicValueFieldLimit() throws Exception {
MapperService mapperService = createMapperService(
Settings.builder().put(INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), 1).build(),
fieldMapping(b -> b.field("type", "constant_keyword"))
);
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("field", "foo")));
assertNull(doc.rootDoc().getField("field"));
assertNotNull(doc.dynamicMappingsUpdate());
CompressedXContent mappingUpdate = new CompressedXContent(Strings.toString(doc.dynamicMappingsUpdate()));
DocumentMapper updatedMapper = mapperService.merge("_doc", mappingUpdate, MergeReason.MAPPING_UPDATE);
String expectedMapping = Strings.toString(fieldMapping(b -> b.field("type", "constant_keyword").field("value", "foo")));
assertEquals(expectedMapping, updatedMapper.mappingSource().toString());
doc = updatedMapper.parse(source(b -> b.field("field", "foo")));
assertNull(doc.rootDoc().getField("field"));
assertNull(doc.dynamicMappingsUpdate());
}
public void testBadValues() {
{
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "constant_keyword");
b.nullField("value");
})));
assertEquals(
e.getMessage(),
"Failed to parse mapping: [value] on mapper [field] of type [constant_keyword] must not have a [null] value"
);
}
{
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "constant_keyword");
b.startObject("value").field("foo", "bar").endObject();
})));
assertEquals(
e.getMessage(),
"Failed to parse mapping: Property [value] on field [field] must be a number or a string, but got [{foo=bar}]"
);
}
}
public void testNumericValue() throws IOException {
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "constant_keyword");
b.field("value", 74);
}));
ConstantKeywordFieldType ft = (ConstantKeywordFieldType) mapperService.fieldType("field");
assertEquals("74", ft.value());
}
public void testUpdate() throws IOException {
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "constant_keyword");
b.field("value", "foo");
}));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, fieldMapping(b -> {
b.field("type", "constant_keyword");
b.field("value", "bar");
})));
assertEquals(
e.getMessage(),
"Mapper for [field] conflicts with existing mapper:\n" + "\tCannot update parameter [value] from [foo] to [bar]"
);
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "constant_keyword");
}
@Override
protected void registerParameters(ParameterChecker checker) throws IOException {
checker.registerUpdateCheck(b -> b.field("value", "foo"), m -> {
ConstantKeywordFieldType ft = (ConstantKeywordFieldType) m.fieldType();
assertEquals("foo", ft.value());
});
checker.registerConflictCheck("value", fieldMapping(b -> {
b.field("type", "constant_keyword");
b.field("value", "foo");
}), fieldMapping(b -> {
b.field("type", "constant_keyword");
b.field("value", "bar");
}));
}
@Override
protected String generateRandomInputValue(MappedFieldType ft) {
return ((ConstantKeywordFieldType) ft).value();
}
@Override
protected void randomFetchTestFieldConfig(XContentBuilder b) throws IOException {
b.field("type", "constant_keyword").field("value", randomAlphaOfLengthBetween(1, 10));
}
@Override
protected boolean allowsNullValues() {
return false; // null is an error for constant keyword
}
/**
* Test loading blocks when there is no defined value. This is allowed
* for newly created indices that haven't received any documents that
* contain the field.
*/
public void testNullValueBlockLoader() throws IOException {
MapperService mapper = createSytheticSourceMapperService(mapping(b -> {
b.startObject("field");
b.field("type", "constant_keyword");
b.endObject();
}));
BlockLoader loader = mapper.fieldType("field").blockLoader(new MappedFieldType.BlockLoaderContext() {
@Override
public String indexName() {
throw new UnsupportedOperationException();
}
@Override
public IndexSettings indexSettings() {
throw new UnsupportedOperationException();
}
@Override
public MappedFieldType.FieldExtractPreference fieldExtractPreference() {
return MappedFieldType.FieldExtractPreference.NONE;
}
@Override
public SearchLookup lookup() {
throw new UnsupportedOperationException();
}
@Override
public Set<String> sourcePaths(String name) {
return mapper.mappingLookup().sourcePaths(name);
}
@Override
public String parentField(String field) {
throw new UnsupportedOperationException();
}
@Override
public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
return FieldNamesFieldMapper.FieldNamesFieldType.get(true);
}
});
try (Directory directory = newDirectory()) {
RandomIndexWriter iw = new RandomIndexWriter(random(), directory);
LuceneDocument doc = mapper.documentMapper().parse(source(b -> {})).rootDoc();
iw.addDocument(doc);
iw.close();
try (DirectoryReader reader = DirectoryReader.open(directory)) {
TestBlock block = (TestBlock) loader.columnAtATimeReader(reader.leaves().get(0))
.read(TestBlock.factory(), new BlockLoader.Docs() {
@Override
public int count() {
return 1;
}
@Override
public int get(int i) {
return 0;
}
}, 0, false);
assertThat(block.get(0), nullValue());
}
}
}
@Override
protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) {
assertFalse("constant_keyword doesn't support ignore_malformed", ignoreMalformed);
String value = randomUnicodeOfLength(5);
return new SyntheticSourceSupport() {
@Override
public SyntheticSourceExample example(int maxValues) {
return new SyntheticSourceExample(value, value, b -> {
b.field("type", "constant_keyword");
b.field("value", value);
});
}
@Override
public List<SyntheticSourceInvalidExample> invalidExample() throws IOException {
throw new AssumptionViolatedException("copy_to on constant_keyword not supported");
}
};
}
@Override
protected IngestScriptSupport ingestScriptSupport() {
throw new AssumptionViolatedException("not supported");
}
public void testNullValueSyntheticSource() throws IOException {
DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> {
b.startObject("field");
b.field("type", "constant_keyword");
b.endObject();
})).documentMapper();
assertThat(syntheticSource(mapper, b -> {}), equalTo("{}"));
}
public void testNoValueInDocumentSyntheticSource() throws IOException {
DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> {
b.startObject("field");
b.field("type", "constant_keyword");
b.field("value", randomAlphaOfLength(5));
b.endObject();
})).documentMapper();
assertThat(syntheticSource(mapper, b -> {}), equalTo("{}"));
}
@Override
protected boolean supportsEmptyInputArray() {
return false;
}
@Override
protected boolean addsValueWhenNotSupplied() {
return true;
}
@Override
protected List<SortShortcutSupport> getSortShortcutSupport() {
return List.of(
// TODO this should surely be able to support pruning
new SortShortcutSupport(this::minimalMapping, this::writeField, false)
);
}
@Override
protected boolean supportsDocValuesSkippers() {
return false;
}
}
| ConstantKeywordFieldMapperTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/engine/PrunePostingsMergePolicy.java | {
"start": 2026,
"end": 6744
} | class ____ extends OneMergeWrappingMergePolicy {
PrunePostingsMergePolicy(MergePolicy in, String idField) {
super(in, toWrap -> new OneMerge(toWrap.segments) {
@Override
public CodecReader wrapForMerge(CodecReader reader) throws IOException {
CodecReader wrapped = toWrap.wrapForMerge(reader);
return wrapReader(wrapped, idField);
}
});
}
private static CodecReader wrapReader(CodecReader reader, String idField) {
Bits liveDocs = reader.getLiveDocs();
if (liveDocs == null) {
return reader; // no deleted docs - we are good!
}
final boolean fullyDeletedSegment = reader.numDocs() == 0;
return new FilterCodecReader(reader) {
@Override
public FieldsProducer getPostingsReader() {
FieldsProducer postingsReader = super.getPostingsReader();
if (postingsReader == null) {
return null;
}
return new FieldsProducer() {
@Override
public void close() throws IOException {
postingsReader.close();
}
@Override
public void checkIntegrity() throws IOException {
postingsReader.checkIntegrity();
}
@Override
public Iterator<String> iterator() {
return postingsReader.iterator();
}
@Override
public Terms terms(String field) throws IOException {
Terms in = postingsReader.terms(field);
if (idField.equals(field) && in != null) {
return new FilterLeafReader.FilterTerms(in) {
@Override
public TermsEnum iterator() throws IOException {
TermsEnum iterator = super.iterator();
return new FilteredTermsEnum(iterator, false) {
private PostingsEnum internal;
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
if (fullyDeletedSegment) {
return AcceptStatus.END; // short-cut this if we don't match anything
}
internal = postings(internal, PostingsEnum.NONE);
if (internal.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
return AcceptStatus.YES;
}
return AcceptStatus.NO;
}
@Override
public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException {
if (reuse instanceof OnlyLiveDocsPostingsEnum reuseInstance) {
reuseInstance.reset(super.postings(reuseInstance.in, flags));
return reuseInstance;
}
return new OnlyLiveDocsPostingsEnum(super.postings(null, flags), liveDocs);
}
@Override
public ImpactsEnum impacts(int flags) throws IOException {
throw new UnsupportedOperationException();
}
};
}
};
} else {
return in;
}
}
@Override
public int size() {
return postingsReader.size();
}
};
}
@Override
public CacheHelper getCoreCacheHelper() {
return null;
}
@Override
public CacheHelper getReaderCacheHelper() {
return null;
}
};
}
private static final | PrunePostingsMergePolicy |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/event/ApplicationListenerMethodAdapterTests.java | {
"start": 22903,
"end": 23175
} | class ____<V, T> extends PayloadApplicationEvent<T> {
private final V something;
public PayloadTestEvent(Object source, T payload, V something) {
super(source, payload);
this.something = something;
}
}
@SuppressWarnings({ "serial" })
static | PayloadTestEvent |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/StringFieldTest_special_singquote.java | {
"start": 221,
"end": 1988
} | class ____ extends TestCase {
public void test_special() throws Exception {
Model model = new Model();
StringBuilder buf = new StringBuilder();
for (int i = Character.MIN_VALUE; i < Character.MAX_VALUE; ++i) {
buf.append((char) i);
}
model.name = buf.toString();
StringWriter writer = new StringWriter();
JSON.writeJSONString(writer, model);
Model model2 = JSON.parseObject(writer.toString(), Model.class);
Assert.assertEquals(model.name, model2.name);
}
public void test_special_browsecue() throws Exception {
Model model = new Model();
StringBuilder buf = new StringBuilder();
for (int i = Character.MIN_VALUE; i < Character.MAX_VALUE; ++i) {
buf.append((char) i);
}
model.name = buf.toString();
StringWriter writer = new StringWriter();
JSON.writeJSONString(writer, model, SerializerFeature.UseSingleQuotes);
Model model2 = JSON.parseObject(writer.toString(), Model.class);
Assert.assertEquals(model.name, model2.name);
}
public void test_special_browsecompatible() throws Exception {
Model model = new Model();
StringBuilder buf = new StringBuilder();
for (int i = Character.MIN_VALUE; i < Character.MAX_VALUE; ++i) {
buf.append((char) i);
}
model.name = buf.toString();
StringWriter writer = new StringWriter();
JSON.writeJSONString(writer, model, SerializerFeature.UseSingleQuotes);
Model model2 = JSON.parseObject(writer.toString(), Model.class);
Assert.assertEquals(model.name, model2.name);
}
private static | StringFieldTest_special_singquote |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/StringBufferFieldTest.java | {
"start": 296,
"end": 2336
} | class ____ extends TestCase {
public void test_codec_null() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue);
Assert.assertEquals("{\"value\":null}", text);
ParserConfig config = new ParserConfig();
config.setAsmEnable(false);
V0 v1 = JSON.parseObject(text, V0.class, config, JSON.DEFAULT_PARSER_FEATURE);
Assert.assertEquals(v1.getValue(), v.getValue());
}
public void test_codec_null_1() throws Exception {
V0 v = new V0();
SerializeConfig mapping = new SerializeConfig();
mapping.setAsmEnable(false);
String text = JSON.toJSONString(v, mapping, SerializerFeature.WriteMapNullValue,
SerializerFeature.WriteNullStringAsEmpty);
Assert.assertEquals("{\"value\":\"\"}", text);
}
public void test_deserialize_1() throws Exception {
String json = "{\"value\":\"\"}";
V0 vo = JSON.parseObject(json, V0.class);
Assert.assertNotNull(vo.getValue());
Assert.assertEquals("", vo.getValue().toString());
}
public void test_deserialize_2() throws Exception {
String json = "{\"value\":null}";
V0 vo = JSON.parseObject(json, V0.class);
Assert.assertNull(vo.getValue());
}
public void test_deserialize_3() throws Exception {
String json = "{\"value\":\"true\"}";
V0 vo = JSON.parseObject(json, V0.class);
Assert.assertNotNull(vo.getValue());
Assert.assertEquals("true", vo.getValue().toString());
}
public void test_deserialize_4() throws Exception {
String json = "{\"value\":\"123\"}";
V0 vo = JSON.parseObject(json, V0.class);
Assert.assertNotNull(vo.getValue());
Assert.assertEquals("123", vo.getValue().toString());
}
public static | StringBufferFieldTest |
java | apache__rocketmq | tieredstore/src/main/java/org/apache/rocketmq/tieredstore/provider/FileSegment.java | {
"start": 1735,
"end": 12598
} | class ____ implements Comparable<FileSegment>, FileSegmentProvider {
private static final Logger log = LoggerFactory.getLogger(MessageStoreUtil.TIERED_STORE_LOGGER_NAME);
protected static final Long GET_FILE_SIZE_ERROR = -1L;
protected final long baseOffset;
protected final String filePath;
protected final FileSegmentType fileType;
protected final MessageStoreConfig storeConfig;
protected final long maxSize;
protected final MessageStoreExecutor executor;
protected final ReentrantLock fileLock = new ReentrantLock();
protected final Semaphore commitLock = new Semaphore(1);
protected volatile boolean closed = false;
protected volatile long minTimestamp = Long.MAX_VALUE;
protected volatile long maxTimestamp = Long.MAX_VALUE;
protected volatile long commitPosition = 0L;
protected volatile long appendPosition = 0L;
protected volatile List<ByteBuffer> bufferList = new ArrayList<>();
protected volatile FileSegmentInputStream fileSegmentInputStream;
protected volatile CompletableFuture<Boolean> flightCommitRequest;
public FileSegment(MessageStoreConfig storeConfig, FileSegmentType fileType,
String filePath, long baseOffset, MessageStoreExecutor executor) {
this.storeConfig = storeConfig;
this.fileType = fileType;
this.filePath = filePath;
this.baseOffset = baseOffset;
this.executor = executor;
this.maxSize = this.getMaxSizeByFileType();
}
@Override
public int compareTo(FileSegment o) {
return Long.compare(this.baseOffset, o.baseOffset);
}
public long getBaseOffset() {
return baseOffset;
}
public void initPosition(long pos) {
fileLock.lock();
try {
this.commitPosition = pos;
this.appendPosition = pos;
} finally {
fileLock.unlock();
}
}
public long getCommitPosition() {
return commitPosition;
}
public long getAppendPosition() {
return appendPosition;
}
public long getCommitOffset() {
return baseOffset + commitPosition;
}
public long getAppendOffset() {
return baseOffset + appendPosition;
}
public FileSegmentType getFileType() {
return fileType;
}
public long getMaxSizeByFileType() {
switch (fileType) {
case COMMIT_LOG:
return storeConfig.getTieredStoreCommitLogMaxSize();
case CONSUME_QUEUE:
return storeConfig.getTieredStoreConsumeQueueMaxSize();
case INDEX:
default:
return Long.MAX_VALUE;
}
}
public long getMaxSize() {
return maxSize;
}
public long getMinTimestamp() {
return minTimestamp;
}
public void setMinTimestamp(long minTimestamp) {
this.minTimestamp = minTimestamp;
}
public long getMaxTimestamp() {
return maxTimestamp;
}
public void setMaxTimestamp(long maxTimestamp) {
this.maxTimestamp = maxTimestamp;
}
public boolean isClosed() {
return closed;
}
public void close() {
fileLock.lock();
try {
this.closed = true;
} finally {
fileLock.unlock();
}
}
protected List<ByteBuffer> borrowBuffer() {
List<ByteBuffer> temp;
fileLock.lock();
try {
temp = bufferList;
bufferList = new ArrayList<>();
} finally {
fileLock.unlock();
}
return temp;
}
@SuppressWarnings("NonAtomicOperationOnVolatileField")
protected void updateTimestamp(long timestamp) {
fileLock.lock();
try {
if (maxTimestamp == Long.MAX_VALUE && minTimestamp == Long.MAX_VALUE) {
maxTimestamp = timestamp;
minTimestamp = timestamp;
return;
}
maxTimestamp = Math.max(maxTimestamp, timestamp);
minTimestamp = Math.min(minTimestamp, timestamp);
} finally {
fileLock.unlock();
}
}
@SuppressWarnings("NonAtomicOperationOnVolatileField")
public AppendResult append(ByteBuffer buffer, long timestamp) {
fileLock.lock();
try {
if (closed) {
return AppendResult.FILE_CLOSED;
}
if (appendPosition + buffer.remaining() > maxSize) {
return AppendResult.FILE_FULL;
}
if (bufferList.size() >= storeConfig.getTieredStoreMaxGroupCommitCount()) {
return AppendResult.BUFFER_FULL;
}
this.appendPosition += buffer.remaining();
this.bufferList.add(buffer);
this.updateTimestamp(timestamp);
} finally {
fileLock.unlock();
}
return AppendResult.SUCCESS;
}
public boolean needCommit() {
return appendPosition > commitPosition;
}
@SuppressWarnings("NonAtomicOperationOnVolatileField")
public CompletableFuture<Boolean> commitAsync() {
if (closed) {
return CompletableFuture.completedFuture(false);
}
if (!needCommit()) {
return CompletableFuture.completedFuture(true);
}
// acquire lock
if (commitLock.drainPermits() <= 0) {
return CompletableFuture.completedFuture(false);
}
// handle last commit error
if (fileSegmentInputStream != null) {
long fileSize = this.getSize();
if (fileSize == GET_FILE_SIZE_ERROR) {
log.error("FileSegment correct position error, fileName={}, commit={}, append={}, buffer={}",
this.getPath(), commitPosition, appendPosition, fileSegmentInputStream.getContentLength());
releaseCommitLock();
return CompletableFuture.completedFuture(false);
}
if (correctPosition(fileSize)) {
fileSegmentInputStream = null;
}
}
int bufferSize;
if (fileSegmentInputStream != null) {
fileSegmentInputStream.rewind();
bufferSize = fileSegmentInputStream.available();
} else {
List<ByteBuffer> bufferList = this.borrowBuffer();
bufferSize = bufferList.stream().mapToInt(ByteBuffer::remaining).sum();
if (bufferSize == 0) {
releaseCommitLock();
return CompletableFuture.completedFuture(true);
}
fileSegmentInputStream = FileSegmentInputStreamFactory.build(
fileType, this.getCommitOffset(), bufferList, null, bufferSize);
}
boolean append = fileType != FileSegmentType.INDEX;
return flightCommitRequest =
this.commit0(fileSegmentInputStream, commitPosition, bufferSize, append)
.thenApply(result -> {
if (result) {
commitPosition += bufferSize;
fileSegmentInputStream = null;
return true;
} else {
fileSegmentInputStream.rewind();
return false;
}
})
.exceptionally(this::handleCommitException)
.whenComplete((result, e) -> releaseCommitLock());
}
private boolean handleCommitException(Throwable e) {
log.warn("FileSegment commit exception, filePath={}", this.filePath, e);
// Get root cause here
Throwable rootCause = e.getCause() != null ? e.getCause() : e;
long fileSize = rootCause instanceof TieredStoreException ?
((TieredStoreException) rootCause).getPosition() : this.getSize();
long expectPosition = commitPosition + fileSegmentInputStream.getContentLength();
if (fileSize == GET_FILE_SIZE_ERROR) {
log.error("Get file size error after commit, FileName: {}, Commit: {}, Content: {}, Expect: {}, Append: {}",
this.getPath(), commitPosition, fileSegmentInputStream.getContentLength(), expectPosition, appendPosition);
return false;
}
if (correctPosition(fileSize)) {
fileSegmentInputStream = null;
return true;
} else {
fileSegmentInputStream.rewind();
return false;
}
}
private void releaseCommitLock() {
if (commitLock.availablePermits() == 0) {
commitLock.release();
}
}
/**
* return true to clear buffer
*/
private boolean correctPosition(long fileSize) {
// Current we have three offsets here: commit offset, expect offset, file size.
// We guarantee that the commit offset is less than or equal to the expect offset.
// Max offset will increase because we can continuously put in new buffers
// We are believing that the file size returned by the server is correct,
// can reset the commit offset to the file size reported by the storage system.
long expectPosition = commitPosition + fileSegmentInputStream.getContentLength();
commitPosition = fileSize;
return expectPosition == fileSize;
}
public ByteBuffer read(long position, int length) {
return readAsync(position, length).join();
}
public CompletableFuture<ByteBuffer> readAsync(long position, int length) {
CompletableFuture<ByteBuffer> future = new CompletableFuture<>();
if (position < 0 || position >= commitPosition) {
future.completeExceptionally(new TieredStoreException(TieredStoreErrorCode.ILLEGAL_PARAM,
String.format("FileSegment read position illegal, filePath=%s, fileType=%s, position=%d, length=%d, commit=%d",
filePath, fileType, position, length, commitPosition)));
return future;
}
if (length <= 0) {
future.completeExceptionally(new TieredStoreException(TieredStoreErrorCode.ILLEGAL_PARAM,
String.format("FileSegment read length illegal, filePath=%s, fileType=%s, position=%d, length=%d, commit=%d",
filePath, fileType, position, length, commitPosition)));
return future;
}
int readableBytes = (int) (commitPosition - position);
if (readableBytes < length) {
length = readableBytes;
log.debug("FileSegment expect request position is greater than commit position, " +
"file: {}, request position: {}, commit position: {}, change length from {} to {}",
getPath(), position, commitPosition, length, readableBytes);
}
return this.read0(position, length);
}
}
| FileSegment |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java | {
"start": 889,
"end": 3078
} | class ____ extends ESTestCase {
private static final String JOB_ID = "foo";
private static final String INDEX_NAME = "foo-index";
private static final String QUANTILES_STATE = "someState";
private static final int BUCKET_SPAN = 600;
private static final double INITIAL_SCORE = 3.0;
private static final double FACTOR = 2.0;
private Bucket generateBucket(Date timestamp) {
return new Bucket(JOB_ID, timestamp, BUCKET_SPAN);
}
private BucketInfluencer createTimeBucketInfluencer(Date timestamp, double probability, double anomalyScore) {
BucketInfluencer influencer = new BucketInfluencer(JOB_ID, timestamp, BUCKET_SPAN);
influencer.setInfluencerFieldName(BucketInfluencer.BUCKET_TIME);
influencer.setProbability(probability);
influencer.setInitialAnomalyScore(anomalyScore);
influencer.setAnomalyScore(anomalyScore);
return influencer;
}
public void testNormalize() throws IOException, InterruptedException {
ExecutorService threadpool = Executors.newScheduledThreadPool(1);
try {
NormalizerProcessFactory processFactory = mock(NormalizerProcessFactory.class);
when(processFactory.createNormalizerProcess(eq(JOB_ID), eq(QUANTILES_STATE), eq(BUCKET_SPAN), any())).thenReturn(
new MultiplyingNormalizerProcess(FACTOR)
);
Normalizer normalizer = new Normalizer(JOB_ID, processFactory, threadpool);
Bucket bucket = generateBucket(new Date(0));
bucket.setAnomalyScore(0.0);
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.07, INITIAL_SCORE));
List<Normalizable> asNormalizables = Arrays.asList(new BucketNormalizable(bucket, INDEX_NAME));
normalizer.normalize(BUCKET_SPAN, asNormalizables, QUANTILES_STATE);
assertEquals(1, asNormalizables.size());
assertEquals(FACTOR * INITIAL_SCORE, asNormalizables.get(0).getNormalizedScore(), 0.0001);
} finally {
threadpool.shutdown();
}
assertTrue(threadpool.awaitTermination(1, TimeUnit.SECONDS));
}
}
| NormalizerTests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/validation/support/BindingAwareConcurrentModel.java | {
"start": 1207,
"end": 1585
} | interface ____ a parameter type.
* There is typically no need to create it within user code.
* If necessary a handler method can return a regular {@code java.util.Map},
* likely a {@code java.util.ConcurrentMap}, for a pre-determined model.
*
* @author Rossen Stoyanchev
* @since 5.0
* @see BindingResult
* @see BindingAwareModelMap
*/
@SuppressWarnings("serial")
public | as |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/MultipleNullnessAnnotationsTest.java | {
"start": 4223,
"end": 4782
} | class ____<X> {
// BUG: Diagnostic contains:
@NullableDecl @NonNull X f;
@NullableDecl
// BUG: Diagnostic contains:
@NonNull X g() {
return null;
}
}
""")
.doTest();
}
@Test
public void arrayTypeAnnotation() {
testHelper
.addSourceLines(
"T.java",
"""
import javax.annotation.CheckForNull;
import org.checkerframework.checker.nullness.qual.NonNull;
| T |
java | quarkusio__quarkus | test-framework/common/src/main/java/io/quarkus/test/common/IntegrationTestStartedNotifier.java | {
"start": 755,
"end": 810
} | interface ____ {
Path logFile();
}
| Context |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/vectors/es816/ES816HnswBinaryQuantizedRWVectorsFormat.java | {
"start": 1511,
"end": 2464
} | class ____ extends ES816HnswBinaryQuantizedVectorsFormat {
private static final FlatVectorsFormat flatVectorsFormat = new ES816BinaryQuantizedRWVectorsFormat();
/** Constructs a format using default graph construction parameters */
ES816HnswBinaryQuantizedRWVectorsFormat() {
this(DEFAULT_MAX_CONN, DEFAULT_BEAM_WIDTH);
}
ES816HnswBinaryQuantizedRWVectorsFormat(int maxConn, int beamWidth) {
this(maxConn, beamWidth, DEFAULT_NUM_MERGE_WORKER, null);
}
ES816HnswBinaryQuantizedRWVectorsFormat(int maxConn, int beamWidth, int numMergeWorkers, ExecutorService mergeExec) {
super(maxConn, beamWidth, numMergeWorkers, mergeExec);
}
@Override
public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException {
return new Lucene99HnswVectorsWriter(state, maxConn, beamWidth, flatVectorsFormat.fieldsWriter(state), 1, null);
}
}
| ES816HnswBinaryQuantizedRWVectorsFormat |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/AdapterMethodsTest.java | {
"start": 16576,
"end": 19294
} | class ____ {
@ToJson
void pointToJson(JsonWriter writer, Point point) throws Exception {
if (point != null) throw new Exception("pointToJson fail!");
writer.nullValue();
}
@FromJson
Point pointFromJson(JsonReader reader) throws Exception {
if (reader.peek() == JsonReader.Token.NULL) return reader.nextNull();
throw new Exception("pointFromJson fail!");
}
}
@Test
public void adapterDoesToJsonOnly() throws Exception {
Object shapeToJsonAdapter =
new Object() {
@ToJson
String shapeToJson(Shape shape) {
throw new AssertionError();
}
};
Moshi toJsonMoshi = new Moshi.Builder().add(shapeToJsonAdapter).build();
try {
toJsonMoshi.adapter(Shape.class);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
"No @FromJson adapter for interface "
+ "com.squareup.moshi.AdapterMethodsTest$Shape (with no annotations)");
assertThat(e).hasCauseThat().isInstanceOf(IllegalArgumentException.class);
assertThat(e.getCause())
.hasMessageThat()
.isEqualTo(
"No next JsonAdapter for interface "
+ "com.squareup.moshi.AdapterMethodsTest$Shape (with no annotations)");
}
}
@Test
public void adapterDoesFromJsonOnly() throws Exception {
Object shapeFromJsonAdapter =
new Object() {
@FromJson
Shape shapeFromJson(String shape) {
throw new AssertionError();
}
};
Moshi fromJsonMoshi = new Moshi.Builder().add(shapeFromJsonAdapter).build();
try {
fromJsonMoshi.adapter(Shape.class);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
"No @ToJson adapter for interface "
+ "com.squareup.moshi.AdapterMethodsTest$Shape (with no annotations)");
assertThat(e).hasCauseThat().isInstanceOf(IllegalArgumentException.class);
assertThat(e.getCause())
.hasMessageThat()
.isEqualTo(
"No next JsonAdapter for interface "
+ "com.squareup.moshi.AdapterMethodsTest$Shape (with no annotations)");
}
}
/**
* Unfortunately in some versions of Android the implementations of {@link ParameterizedType}
* doesn't implement equals and hashCode. Confirm that we work around that.
*/
@Test
public void parameterizedTypeEqualsNotUsed() throws Exception {
Moshi moshi = new Moshi.Builder().add(new ListOfStringJsonAdapter()).build();
// This | ExceptionThrowingPointJsonAdapter |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/xmlmetacomplete/multiplepus/XmlMetaDataCompleteMultiplePersistenceUnitsTest.java | {
"start": 661,
"end": 1125
} | class ____ {
@Test
@WithClasses(Dummy.class)
@WithProcessorOption(key = HibernateProcessor.PERSISTENCE_XML_OPTION,
value = "org/hibernate/processor/test/xmlmetacomplete/multiplepus/persistence.xml")
void testMetaModelGenerated() {
// only one of the xml files in the example uses 'xml-mapping-metadata-complete', hence annotation processing
// kicks in
assertMetamodelClassGeneratedFor( Dummy.class );
}
}
| XmlMetaDataCompleteMultiplePersistenceUnitsTest |
java | apache__camel | dsl/camel-jbang/camel-jbang-core/src/main/java/org/apache/camel/dsl/jbang/core/common/SourceHelper.java | {
"start": 1511,
"end": 7684
} | class ____ {
private static final String[] ACCEPTED_FILE_EXT
= new String[] { "java", "xml", "yaml" };
public static Source resolveSource(String source) {
List<Source> resolved = resolveSources(Collections.singletonList(source));
if (resolved.isEmpty()) {
throw new RuntimeCamelException("Failed to resolve source file: " + source);
} else {
return resolved.get(0);
}
}
public static List<Source> resolveSources(List<String> sources) {
return resolveSources(sources, false);
}
public static List<Source> resolveSources(List<String> sources, boolean compression) {
List<Source> resolved = new ArrayList<>();
for (String source : sources) {
SourceScheme sourceScheme = SourceScheme.fromUri(source);
String fileExtension = FileUtil.onlyExt(source, true);
String fileName = SourceScheme.onlyName(FileUtil.onlyName(source));
if (fileExtension != null) {
fileName = fileName + "." + fileExtension;
}
try {
switch (sourceScheme) {
case GIST -> {
StringJoiner all = new StringJoiner(",");
GistHelper.fetchGistUrls(source, all);
try (ResourceResolver resolver = new GistResourceResolver()) {
for (String uri : all.toString().split(",")) {
resolved.add(new Source(
sourceScheme,
FileUtil.stripPath(uri),
IOHelper.loadText(resolver.resolve(uri).getInputStream()),
FileUtil.onlyExt(uri), compression));
}
}
}
case HTTP -> {
try (ResourceResolver resolver = new DefaultResourceResolvers.HttpResolver()) {
resolved.add(new Source(
sourceScheme,
fileName,
IOHelper.loadText(resolver.resolve(source).getInputStream()),
fileExtension, compression));
}
}
case HTTPS -> {
try (ResourceResolver resolver = new DefaultResourceResolvers.HttpsResolver()) {
resolved.add(new Source(
sourceScheme,
fileName,
IOHelper.loadText(resolver.resolve(source).getInputStream()),
fileExtension, compression));
}
}
case FILE -> {
try (ResourceResolver resolver = new DefaultResourceResolvers.FileResolver()) {
resolved.add(new Source(
sourceScheme,
fileName,
IOHelper.loadText(resolver.resolve(source).getInputStream()),
fileExtension, compression));
}
}
case CLASSPATH -> {
try (ResourceResolver resolver = new DefaultResourceResolvers.ClasspathResolver()) {
resolver.setCamelContext(new DefaultCamelContext());
resolved.add(new Source(
sourceScheme,
fileName,
IOHelper.loadText(resolver.resolve(source).getInputStream()),
fileExtension, compression));
}
}
case GITHUB, RAW_GITHUB -> {
StringJoiner all = new StringJoiner(",");
GitHubHelper.fetchGithubUrls(source, all);
try (ResourceResolver resolver = new GitHubResourceResolver()) {
for (String uri : all.toString().split(",")) {
resolved.add(new Source(
sourceScheme,
FileUtil.stripPath(uri),
IOHelper.loadText(resolver.resolve(uri).getInputStream()),
FileUtil.onlyExt(uri), compression));
}
}
}
case UNKNOWN -> {
if (isAcceptedSourceFile(fileExtension)) {
Path sourcePath = Paths.get(source);
if (!Files.exists(sourcePath)) {
throw new FileNotFoundException("Source file '%s' does not exist".formatted(source));
}
if (!Files.isDirectory(sourcePath)) {
try (var is = Files.newInputStream(sourcePath)) {
resolved.add(
new Source(
sourceScheme,
fileName,
IOHelper.loadText(is),
fileExtension, compression));
}
}
}
}
}
} catch (Exception e) {
throw new RuntimeCamelException("Failed to resolve sources", e);
}
}
return resolved;
}
public static boolean isAcceptedSourceFile(String fileExt) {
return Arrays.stream(ACCEPTED_FILE_EXT).anyMatch(e -> e.equalsIgnoreCase(fileExt));
}
}
| SourceHelper |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/creation/MockClassWithMissingStaticDepTest.java | {
"start": 2770,
"end": 3105
} | class ____.");
} catch (ExceptionInInitializerError ex) {
// Note: mock-maker-subclass will throw the ExceptionInInitializerError as is
var cause = ex.getCause();
assertThat(cause).isInstanceOf(IllegalStateException.class);
assertThat(cause.getMessage()).isEqualTo("Exception in | init |
java | apache__maven | compat/maven-compat/src/test/java/org/apache/maven/AbstractCoreMavenComponentTestCase.java | {
"start": 9629,
"end": 12348
} | class ____ {
private MavenProject project;
public ProjectBuilder(MavenProject project) {
this.project = project;
}
public ProjectBuilder(String groupId, String artifactId, String version) {
Model model = new Model();
model.setModelVersion("4.0.0");
model.setGroupId(groupId);
model.setArtifactId(artifactId);
model.setVersion(version);
model.setBuild(new Build());
project = new MavenProject(model);
}
public ProjectBuilder setGroupId(String groupId) {
project.setGroupId(groupId);
return this;
}
public ProjectBuilder setArtifactId(String artifactId) {
project.setArtifactId(artifactId);
return this;
}
public ProjectBuilder setVersion(String version) {
project.setVersion(version);
return this;
}
// Dependencies
//
public ProjectBuilder addDependency(String groupId, String artifactId, String version, String scope) {
return addDependency(groupId, artifactId, version, scope, (Exclusion) null);
}
public ProjectBuilder addDependency(
String groupId, String artifactId, String version, String scope, Exclusion exclusion) {
return addDependency(groupId, artifactId, version, scope, null, exclusion);
}
public ProjectBuilder addDependency(
String groupId, String artifactId, String version, String scope, String systemPath) {
return addDependency(groupId, artifactId, version, scope, systemPath, null);
}
public ProjectBuilder addDependency(
String groupId,
String artifactId,
String version,
String scope,
String systemPath,
Exclusion exclusion) {
Dependency d = new Dependency();
d.setGroupId(groupId);
d.setArtifactId(artifactId);
d.setVersion(version);
d.setScope(scope);
if (systemPath != null && scope.equals(Artifact.SCOPE_SYSTEM)) {
d.setSystemPath(systemPath);
}
if (exclusion != null) {
d.addExclusion(exclusion);
}
project.getDependencies().add(d);
return this;
}
// Plugins
//
public ProjectBuilder addPlugin(Plugin plugin) {
project.getBuildPlugins().add(plugin);
return this;
}
public MavenProject get() {
return project;
}
}
}
| ProjectBuilder |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedCounter.java | {
"start": 1108,
"end": 1948
} | class ____ implements ManagedCounterMBean {
protected Statistic exchangesTotal;
protected Date startTimestamp;
protected Date resetTimestamp;
public void init(ManagementStrategy strategy) {
this.exchangesTotal = new StatisticCounter();
Date now = new Date();
startTimestamp = now;
resetTimestamp = now;
}
@Override
public void reset() {
exchangesTotal.reset();
resetTimestamp = new Date();
}
@Override
public Date getStartTimestamp() {
return startTimestamp;
}
@Override
public Date getResetTimestamp() {
return resetTimestamp;
}
@Override
public long getExchangesTotal() {
return exchangesTotal.getValue();
}
public void increment() {
exchangesTotal.increment();
}
}
| ManagedCounter |
java | netty__netty | handler/src/test/java/io/netty/handler/stream/ChunkedWriteHandlerTest.java | {
"start": 26736,
"end": 27679
} | class ____ implements ChunkedInput<ByteBuf> {
private final Exception error;
private volatile boolean closed;
ThrowingChunkedInput(Exception error) {
this.error = error;
}
@Override
public boolean isEndOfInput() {
return false;
}
@Override
public void close() {
closed = true;
}
@Override
public ByteBuf readChunk(ChannelHandlerContext ctx) throws Exception {
return readChunk(ctx.alloc());
}
@Override
public ByteBuf readChunk(ByteBufAllocator allocator) throws Exception {
throw error;
}
@Override
public long length() {
return -1;
}
@Override
public long progress() {
return -1;
}
boolean isClosed() {
return closed;
}
}
}
| ThrowingChunkedInput |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedAConcurrentThrottlerTest.java | {
"start": 1571,
"end": 4664
} | class ____ extends AbstractManagedThrottlerTest {
@Test
public void testManageThrottler() throws Exception {
final Long total = super.runTestManageThrottler();
// 10 * delay (100) + tolerance (200)
assertTrue(total < 1200, "Should take at most 1.2 sec: was " + total);
}
@DisabledOnOs(OS.WINDOWS)
@Test
public void testThrottleVisibleViaJmx() throws Exception {
super.runTestThrottleAsyncVisibleViaJmx();
}
@Override
protected RouteBuilder createRouteBuilder() {
final ScheduledExecutorService badService = new ScheduledThreadPoolExecutor(1) {
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> command, long delay, TimeUnit unit) {
throw new RejectedExecutionException();
}
};
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").id("route1")
.to("log:foo")
.throttle(10).mode("ConcurrentRequests").id("mythrottler")
.delay(100)
.to("mock:result");
from("seda:throttleCount").id("route2")
.throttle(1).concurrentRequestsMode().id("mythrottler2").delay(250)
.to("mock:end");
from("seda:throttleCountAsync").id("route3")
.throttle(1).concurrentRequestsMode().asyncDelayed().id("mythrottler3").delay(250)
.to("mock:endAsync");
from("seda:throttleCountAsyncException").id("route4")
.throttle(1).concurrentRequestsMode().asyncDelayed().id("mythrottler4").delay(250)
.to("mock:endAsyncException")
.process(exchange -> {
throw new RuntimeException("Fail me");
});
from("seda:throttleCountRejectExecutionCallerRuns").id("route5")
.onException(RejectedExecutionException.class).to("mock:rejectedExceptionEndpoint1").end()
.throttle(1).concurrentRequestsMode()
.asyncDelayed()
.executorService(badService)
.callerRunsWhenRejected(true)
.id("mythrottler5")
.delay(250)
.to("mock:endAsyncRejectCallerRuns");
from("seda:throttleCountRejectExecution").id("route6")
.onException(RejectedExecutionException.class).to("mock:rejectedExceptionEndpoint1").end()
.throttle(1).concurrentRequestsMode()
.asyncDelayed()
.executorService(badService)
.callerRunsWhenRejected(false)
.id("mythrottler6")
.delay(250)
.to("mock:endAsyncReject");
}
};
}
}
| ManagedAConcurrentThrottlerTest |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpTcpServerConsumerTransactionTest.java | {
"start": 1856,
"end": 5402
} | class ____ extends CamelTestSupport {
@RegisterExtension
public static ArtemisService service = ArtemisServiceFactory.createVMService();
@RegisterExtension
public MllpClientResource mllpClient = new MllpClientResource();
@EndpointInject("mock://result")
MockEndpoint result;
@EndpointInject("mock://on-complete-only")
MockEndpoint complete;
@EndpointInject("mock://on-failure-only")
MockEndpoint failure;
@Override
protected CamelContext createCamelContext() throws Exception {
DefaultCamelContext context = (DefaultCamelContext) super.createCamelContext();
context.setUseMDCLogging(true);
context.getCamelContextExtension().setName(this.getClass().getSimpleName());
return context;
}
@BindToRegistry("target")
public SjmsComponent addTargetComponent() {
SjmsComponent target = new SjmsComponent();
target.setConnectionFactory(new ActiveMQConnectionFactory(service.serviceAddress()));
return target;
}
@Override
protected RouteBuilder createRouteBuilder() {
mllpClient.setMllpHost("localhost");
mllpClient.setMllpPort(AvailablePortFinder.getNextAvailable());
return new RouteBuilder() {
int connectTimeout = 500;
int responseTimeout = 5000;
@Override
public void configure() {
String routeId = "mllp-test-receiver-route";
onCompletion()
.onCompleteOnly()
.log(LoggingLevel.INFO, routeId, "Test route complete")
.to(complete);
onCompletion()
.onFailureOnly()
.log(LoggingLevel.INFO, routeId, "Test route failed")
.to(failure);
fromF("mllp://%s:%d?autoAck=true&connectTimeout=%d&receiveTimeout=%d",
mllpClient.getMllpHost(), mllpClient.getMllpPort(), connectTimeout, responseTimeout)
.routeId(routeId)
.log(LoggingLevel.INFO, routeId, "Test route received message")
.to("target://test-queue?transacted=true");
from("target://test-queue")
.routeId("jms-consumer")
.process(exchange -> System.out.println(exchange.getIn().getBody()))
.log(LoggingLevel.INFO, routeId, "Test JMS Consumer received message")
.to(result);
}
};
}
@Test
public void testReceiveSingleMessage() throws Exception {
result.expectedMessageCount(1);
complete.expectedMessageCount(2);
failure.expectedMessageCount(0);
mllpClient.connect();
mllpClient.sendMessageAndWaitForAcknowledgement(Hl7TestMessageGenerator.generateMessage(), 10000);
MockEndpoint.assertIsSatisfied(context, 10, TimeUnit.SECONDS);
}
@Test
public void testAcknowledgementWriteFailure() throws Exception {
result.expectedMessageCount(1);
result.setAssertPeriod(1000);
complete.expectedMessageCount(1);
failure.expectedMessageCount(1);
mllpClient.connect();
mllpClient.setDisconnectMethod(MllpClientResource.DisconnectMethod.RESET);
mllpClient.sendFramedData(Hl7TestMessageGenerator.generateMessage(), true);
MockEndpoint.assertIsSatisfied(context, 10, TimeUnit.SECONDS);
}
}
| MllpTcpServerConsumerTransactionTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/JoinTest.java | {
"start": 3661,
"end": 4336
} | class ____ {
@Id
private Integer id;
private String description;
@ManyToOne
private Customer customer;
@OneToMany
private Collection<LineItem> lineItems = new ArrayList<>();
public Order() {
}
public Order(Integer id, Customer customer) {
this.id = id;
this.customer = customer;
customer.addOrder( this );
}
public Integer getId() {
return id;
}
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
public void addLineItem(LineItem lineItem) {
lineItems.add( lineItem );
}
}
@Entity
@Table(name = "CUSTOMER_TABLE")
public static | Order |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeFilter.java | {
"start": 1096,
"end": 1533
} | class ____<T> extends AbstractMaybeWithUpstream<T, T> {
final Predicate<? super T> predicate;
public MaybeFilter(MaybeSource<T> source, Predicate<? super T> predicate) {
super(source);
this.predicate = predicate;
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
source.subscribe(new FilterMaybeObserver<>(observer, predicate));
}
static final | MaybeFilter |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/CompositeTypeComparator.java | {
"start": 1022,
"end": 1508
} | class ____<T> extends TypeComparator<T> {
private static final long serialVersionUID = 1L;
@Override
public TypeComparator[] getFlatComparators() {
List<TypeComparator> flatComparators = new LinkedList<TypeComparator>();
this.getFlatComparator(flatComparators);
return flatComparators.toArray(new TypeComparator[flatComparators.size()]);
}
public abstract void getFlatComparator(List<TypeComparator> flatComparators);
}
| CompositeTypeComparator |
java | google__guava | android/guava-testlib/test/com/google/common/collect/testing/MinimalIterableTest.java | {
"start": 1071,
"end": 2709
} | class ____ extends TestCase {
public void testOf_empty() {
Iterable<String> iterable = MinimalIterable.<String>of();
Iterator<String> iterator = iterable.iterator();
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(IllegalStateException.class, () -> iterable.iterator());
}
public void testOf_one() {
Iterable<String> iterable = MinimalIterable.of("a");
Iterator<String> iterator = iterable.iterator();
assertTrue(iterator.hasNext());
assertEquals("a", iterator.next());
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(IllegalStateException.class, () -> iterable.iterator());
}
public void testFrom_empty() {
Iterable<String> iterable = MinimalIterable.from(Collections.<String>emptySet());
Iterator<String> iterator = iterable.iterator();
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(IllegalStateException.class, () -> iterable.iterator());
}
public void testFrom_one() {
Iterable<String> iterable = MinimalIterable.from(singleton("a"));
Iterator<String> iterator = iterable.iterator();
assertTrue(iterator.hasNext());
assertEquals("a", iterator.next());
assertThrows(UnsupportedOperationException.class, () -> iterator.remove());
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(IllegalStateException.class, () -> iterable.iterator());
}
}
| MinimalIterableTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYMinFromWKBGeoEvaluator.java | {
"start": 4715,
"end": 5308
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory wkb;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory wkb) {
this.source = source;
this.wkb = wkb;
}
@Override
public StYMinFromWKBGeoEvaluator get(DriverContext context) {
return new StYMinFromWKBGeoEvaluator(source, wkb.get(context), context);
}
@Override
public String toString() {
return "StYMinFromWKBGeoEvaluator[" + "wkb=" + wkb + "]";
}
}
}
| Factory |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ClassUtils.java | {
"start": 2676,
"end": 3206
} | class ____ is {@code null} or empty
* @see #nullSafeToString(Class...)
* @see StringUtils#nullSafeToString(Object)
*/
public static String nullSafeToString(Function<? super Class<?>, ? extends String> mapper,
@Nullable Class<?> @Nullable... classes) {
Preconditions.notNull(mapper, "Mapping function must not be null");
if (classes == null || classes.length == 0) {
return "";
}
return stream(classes) //
.map(clazz -> clazz == null ? "null" : mapper.apply(clazz)) //
.collect(joining(", "));
}
}
| array |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nonvoidsetter/NonVoidSettersTest.java | {
"start": 565,
"end": 973
} | class ____ {
@ProcessorTest
@IssueKey("353")
public void shouldMapAttributeWithoutSetterInSourceType() {
ActorDto target = ActorMapper.INSTANCE.actorToActorDto( new Actor( 3, "Hickory Black" ) );
assertThat( target ).isNotNull();
assertThat( target.getName() ).isEqualTo( "Hickory Black" );
assertThat( target.getOscars() ).isEqualTo( 3 );
}
}
| NonVoidSettersTest |
java | google__dagger | javatests/dagger/internal/codegen/SubcomponentCreatorValidationTest.java | {
"start": 24471,
"end": 24913
} | interface ____ {",
" ChildComponent.Builder childComponentBuilder();",
"}");
Source childComponentFile =
javaFileBuilder("test.ChildComponent")
.addLines(
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract | ParentComponent |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/AllowOverridingParameterConstraintsTest.java | {
"start": 2812,
"end": 3144
} | class ____
implements InterfaceWithNotNullMethodParameterConstraint {
/**
* Adds constraints to a constrained method from a super-type, which is not allowed.
*/
@Override
public void bar(@Size(min = 3) String s) {
}
}
}
| RealizationWithAdditionalMethodParameterConstraint |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/multi-build-mode/src/main/java/org/acme/HelloService.java | {
"start": 728,
"end": 927
} | class ____ implements HelloService{
@Override
public String name() {
return "from foo";
}
}
@IfBuildProfile("bar")
@ApplicationScoped
| HelloServiceFoo |
java | apache__camel | test-infra/camel-test-infra-solr/src/main/java/org/apache/camel/test/infra/solr/common/SolrProperties.java | {
"start": 867,
"end": 1158
} | class ____ {
public static final String SOLR_HOST = "solr.host";
public static final String SOLR_PORT = "solr.port";
public static final String SOLR_CONTAINER = "solr.container";
public static final int DEFAULT_PORT = 8983;
private SolrProperties() {
}
}
| SolrProperties |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/reservedstate/ReservedRepositoryAction.java | {
"start": 1623,
"end": 4986
} | class ____ implements ReservedProjectStateHandler<List<PutRepositoryRequest>> {
public static final String NAME = "snapshot_repositories";
private final RepositoriesService repositoriesService;
/**
* Creates a ReservedRepositoryAction
*
* @param repositoriesService requires RepositoriesService for the cluster state operations
*/
public ReservedRepositoryAction(RepositoriesService repositoriesService) {
this.repositoriesService = repositoriesService;
}
@Override
public String name() {
return NAME;
}
@SuppressWarnings("unchecked")
public Collection<PutRepositoryRequest> prepare(ProjectId projectId, Object input) {
List<PutRepositoryRequest> repositories = (List<PutRepositoryRequest>) input;
for (var repositoryRequest : repositories) {
validate(repositoryRequest);
RepositoriesService.validateRepositoryName(repositoryRequest.name());
repositoriesService.validateRepositoryCanBeCreated(projectId, repositoryRequest);
}
return repositories;
}
@Override
public TransformState transform(ProjectId projectId, List<PutRepositoryRequest> source, TransformState prevState) throws Exception {
var requests = prepare(projectId, source);
ClusterState state = prevState.state();
for (var request : requests) {
RepositoriesService.RegisterRepositoryTask task = new RepositoriesService.RegisterRepositoryTask(
repositoriesService,
projectId,
request
);
state = task.execute(state);
}
Set<String> entities = requests.stream().map(r -> r.name()).collect(Collectors.toSet());
Set<String> toDelete = new HashSet<>(prevState.keys());
toDelete.removeAll(entities);
for (var repositoryToDelete : toDelete) {
var task = new RepositoriesService.UnregisterRepositoryTask(
RESERVED_CLUSTER_STATE_HANDLER_IGNORED_TIMEOUT,
projectId,
repositoryToDelete
);
state = task.execute(state);
}
return new TransformState(state, entities);
}
@Override
public ClusterState remove(ProjectId projectId, TransformState prevState) throws Exception {
return transform(projectId, List.of(), prevState).state();
}
@Override
public List<PutRepositoryRequest> fromXContent(XContentParser parser) throws IOException {
List<PutRepositoryRequest> result = new ArrayList<>();
Map<String, ?> source = parser.map();
for (var entry : source.entrySet()) {
PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(
RESERVED_CLUSTER_STATE_HANDLER_IGNORED_TIMEOUT,
RESERVED_CLUSTER_STATE_HANDLER_IGNORED_TIMEOUT,
entry.getKey()
);
@SuppressWarnings("unchecked")
Map<String, ?> content = (Map<String, ?>) entry.getValue();
try (XContentParser repoParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) {
putRepositoryRequest.source(repoParser.mapOrdered());
}
result.add(putRepositoryRequest);
}
return result;
}
}
| ReservedRepositoryAction |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/logging/Log.java | {
"start": 57541,
"end": 66658
} | class ____
* @param message the message
* @param params the message parameters
* @param t the throwable
*/
public static void warn(String loggerFqcn, Object message, Object[] params, Throwable t) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warn(loggerFqcn, message, params, t);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param format the message format string
* @param params the parameters
*/
public static void warnv(String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(format, params);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param format the message format string
* @param param1 the sole parameter
*/
public static void warnv(String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(format, param1);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void warnv(String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(format, param1, param2);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void warnv(String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(format, param1, param2, param3);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param t the throwable
* @param format the message format string
* @param params the parameters
*/
public static void warnv(Throwable t, String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(t, format, params);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param t the throwable
* @param format the message format string
* @param param1 the sole parameter
*/
public static void warnv(Throwable t, String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(t, format, param1);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param t the throwable
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void warnv(Throwable t, String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(t, format, param1, param2);
}
/**
* Issue a log message with a level of WARN using {@link java.text.MessageFormat}-style formatting.
*
* @param t the throwable
* @param format the message format string
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void warnv(Throwable t, String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnv(t, format, param1, param2, param3);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param format the format string as per {@link String#format(String, Object...)} or resource bundle key therefor
* @param params the parameters
*/
public static void warnf(String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(format, params);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param format the format string as per {@link String#format(String, Object...)} or resource bundle key therefor
* @param param1 the sole parameter
*/
public static void warnf(String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(format, param1);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param format the format string as per {@link String#format(String, Object...)} or resource bundle key therefor
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void warnf(String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(format, param1, param2);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param format the format string as per {@link String#format(String, Object...)} or resource bundle key therefor
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void warnf(String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(format, param1, param2, param3);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param t the throwable
* @param format the format string, as per {@link String#format(String, Object...)}
* @param params the parameters
*/
public static void warnf(Throwable t, String format, Object... params) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(t, format, params);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param t the throwable
* @param format the format string, as per {@link String#format(String, Object...)}
* @param param1 the sole parameter
*/
public static void warnf(Throwable t, String format, Object param1) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(t, format, param1);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param t the throwable
* @param format the format string, as per {@link String#format(String, Object...)}
* @param param1 the first parameter
* @param param2 the second parameter
*/
public static void warnf(Throwable t, String format, Object param1, Object param2) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(t, format, param1, param2);
}
/**
* Issue a formatted log message with a level of WARN.
*
* @param t the throwable
* @param format the format string, as per {@link String#format(String, Object...)}
* @param param1 the first parameter
* @param param2 the second parameter
* @param param3 the third parameter
*/
public static void warnf(Throwable t, String format, Object param1, Object param2, Object param3) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).warnf(t, format, param1, param2, param3);
}
/**
* Issue a log message with a level of ERROR.
*
* @param message the message
*/
public static void error(Object message) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).error(message);
}
/**
* Issue a log message and throwable with a level of ERROR.
*
* @param message the message
* @param t the throwable
*/
public static void error(Object message, Throwable t) {
if (shouldFail) {
throw fail();
}
Logger.getLogger(stackWalker.getCallerClass()).error(message, t);
}
/**
* Issue a log message and throwable with a level of ERROR and a specific logger | name |
java | spring-projects__spring-security | oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/reactive/function/client/ServletOAuth2AuthorizedClientExchangeFilterFunction.java | {
"start": 6311,
"end": 26620
} | class ____ implements ExchangeFilterFunction {
// Same key as in
// SecurityReactorContextConfiguration.SecurityReactorContextSubscriber.SECURITY_CONTEXT_ATTRIBUTES
static final String SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY = "org.springframework.security.SECURITY_CONTEXT_ATTRIBUTES";
/**
* The request attribute name used to locate the {@link OAuth2AuthorizedClient}.
*/
private static final String OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME = OAuth2AuthorizedClient.class.getName();
private static final String AUTHENTICATION_ATTR_NAME = Authentication.class.getName();
private static final String HTTP_SERVLET_REQUEST_ATTR_NAME = HttpServletRequest.class.getName();
private static final String HTTP_SERVLET_RESPONSE_ATTR_NAME = HttpServletResponse.class.getName();
private static final Authentication ANONYMOUS_AUTHENTICATION = new AnonymousAuthenticationToken("anonymous",
"anonymousUser", AuthorityUtils.createAuthorityList("ROLE_ANONYMOUS"));
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
private OAuth2AuthorizedClientManager authorizedClientManager;
private boolean defaultOAuth2AuthorizedClient;
private String defaultClientRegistrationId;
private ClientResponseHandler clientResponseHandler;
public ServletOAuth2AuthorizedClientExchangeFilterFunction() {
}
/**
* Constructs a {@code ServletOAuth2AuthorizedClientExchangeFilterFunction} using the
* provided parameters.
*
* <p>
* When this constructor is used, authentication (HTTP 401) and authorization (HTTP
* 403) failures returned from an OAuth 2.0 Resource Server will <em>NOT</em> be
* forwarded to an {@link OAuth2AuthorizationFailureHandler}. Therefore, future
* requests to the Resource Server will most likely use the same (likely invalid)
* token, resulting in the same errors returned from the Resource Server. It is
* recommended to configure a
* {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler} via
* {@link #setAuthorizationFailureHandler(OAuth2AuthorizationFailureHandler)} so that
* authentication and authorization failures returned from a Resource Server will
* result in removing the authorized client, so that a new token is retrieved for
* future requests.
* @param authorizedClientManager the {@link OAuth2AuthorizedClientManager} which
* manages the authorized client(s)
* @since 5.2
*/
public ServletOAuth2AuthorizedClientExchangeFilterFunction(OAuth2AuthorizedClientManager authorizedClientManager) {
Assert.notNull(authorizedClientManager, "authorizedClientManager cannot be null");
this.authorizedClientManager = authorizedClientManager;
this.clientResponseHandler = (request, responseMono) -> responseMono;
}
/**
* Constructs a {@code ServletOAuth2AuthorizedClientExchangeFilterFunction} using the
* provided parameters.
*
* <p>
* Since 5.3, when this constructor is used, authentication (HTTP 401) and
* authorization (HTTP 403) failures returned from an OAuth 2.0 Resource Server will
* be forwarded to a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler},
* which will potentially remove the {@link OAuth2AuthorizedClient} from the given
* {@link OAuth2AuthorizedClientRepository}, depending on the OAuth 2.0 error code
* returned. Authentication failures returned from an OAuth 2.0 Resource Server
* typically indicate that the token is invalid, and should not be used in future
* requests. Removing the authorized client from the repository will ensure that the
* existing token will not be sent for future requests to the Resource Server, and a
* new token is retrieved from the Authorization Server and used for future requests
* to the Resource Server.
* @param clientRegistrationRepository the repository of client registrations
* @param authorizedClientRepository the repository of authorized clients
*/
public ServletOAuth2AuthorizedClientExchangeFilterFunction(
ClientRegistrationRepository clientRegistrationRepository,
OAuth2AuthorizedClientRepository authorizedClientRepository) {
OAuth2AuthorizationFailureHandler authorizationFailureHandler = new RemoveAuthorizedClientOAuth2AuthorizationFailureHandler(
(clientRegistrationId, principal, attributes) -> removeAuthorizedClient(authorizedClientRepository,
clientRegistrationId, principal, attributes));
DefaultOAuth2AuthorizedClientManager defaultAuthorizedClientManager = new DefaultOAuth2AuthorizedClientManager(
clientRegistrationRepository, authorizedClientRepository);
defaultAuthorizedClientManager.setAuthorizationFailureHandler(authorizationFailureHandler);
this.authorizedClientManager = defaultAuthorizedClientManager;
this.clientResponseHandler = new AuthorizationFailureForwarder(authorizationFailureHandler);
}
private void removeAuthorizedClient(OAuth2AuthorizedClientRepository authorizedClientRepository,
String clientRegistrationId, Authentication principal, Map<String, Object> attributes) {
HttpServletRequest request = getRequest(attributes);
HttpServletResponse response = getResponse(attributes);
authorizedClientRepository.removeAuthorizedClient(clientRegistrationId, principal, request, response);
}
/**
* If true, a default {@link OAuth2AuthorizedClient} can be discovered from the
* current Authentication. It is recommended to be cautious with this feature since
* all HTTP requests will receive the access token if it can be resolved from the
* current Authentication.
* @param defaultOAuth2AuthorizedClient true if a default
* {@link OAuth2AuthorizedClient} should be used, else false. Default is false.
*/
public void setDefaultOAuth2AuthorizedClient(boolean defaultOAuth2AuthorizedClient) {
this.defaultOAuth2AuthorizedClient = defaultOAuth2AuthorizedClient;
}
/**
* If set, will be used as the default {@link ClientRegistration#getRegistrationId()}.
* It is recommended to be cautious with this feature since all HTTP requests will
* receive the access token.
* @param clientRegistrationId the id to use
*/
public void setDefaultClientRegistrationId(String clientRegistrationId) {
this.defaultClientRegistrationId = clientRegistrationId;
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
this.securityContextHolderStrategy = securityContextHolderStrategy;
}
/**
* Configures the builder with {@link #defaultRequest()} and adds this as a
* {@link ExchangeFilterFunction}
* @return the {@link Consumer} to configure the builder
*/
public Consumer<WebClient.Builder> oauth2Configuration() {
return (builder) -> builder.defaultRequest(defaultRequest()).filter(this);
}
/**
* Provides defaults for the {@link HttpServletRequest} and the
* {@link HttpServletResponse} using {@link RequestContextHolder}. It also provides
* defaults for the {@link Authentication} using {@link SecurityContextHolder}. It
* also can default the {@link OAuth2AuthorizedClient} using the
* {@link #clientRegistrationId(String)} or the
* {@link #authentication(Authentication)}.
* @return the {@link Consumer} to populate the attributes
*/
public Consumer<WebClient.RequestHeadersSpec<?>> defaultRequest() {
return (spec) -> spec.attributes((attrs) -> {
populateDefaultRequestResponse(attrs);
populateDefaultAuthentication(attrs);
});
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link OAuth2AuthorizedClient} to be used for providing the Bearer Token.
* @param authorizedClient the {@link OAuth2AuthorizedClient} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> oauth2AuthorizedClient(OAuth2AuthorizedClient authorizedClient) {
return (attributes) -> {
if (authorizedClient == null) {
attributes.remove(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME);
}
else {
attributes.put(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME, authorizedClient);
}
};
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link ClientRegistration#getRegistrationId()} to be used to look up the
* {@link OAuth2AuthorizedClient}.
* @param clientRegistrationId the {@link ClientRegistration#getRegistrationId()} to
* be used to look up the {@link OAuth2AuthorizedClient}.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> clientRegistrationId(String clientRegistrationId) {
return ClientAttributes.clientRegistrationId(clientRegistrationId);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link Authentication} used to look up and save the {@link OAuth2AuthorizedClient}.
* The value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param authentication the {@link Authentication} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> authentication(Authentication authentication) {
return (attributes) -> attributes.put(AUTHENTICATION_ATTR_NAME, authentication);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link HttpServletRequest} used to look up and save the
* {@link OAuth2AuthorizedClient}. The value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param request the {@link HttpServletRequest} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> httpServletRequest(HttpServletRequest request) {
return (attributes) -> attributes.put(HTTP_SERVLET_REQUEST_ATTR_NAME, request);
}
/**
* Modifies the {@link ClientRequest#attributes()} to include the
* {@link HttpServletResponse} used to save the {@link OAuth2AuthorizedClient}. The
* value is defaulted in
* {@link ServletOAuth2AuthorizedClientExchangeFilterFunction#defaultRequest()}
* @param response the {@link HttpServletResponse} to use.
* @return the {@link Consumer} to populate the attributes
*/
public static Consumer<Map<String, Object>> httpServletResponse(HttpServletResponse response) {
return (attributes) -> attributes.put(HTTP_SERVLET_RESPONSE_ATTR_NAME, response);
}
/**
* Sets the {@link OAuth2AuthorizationFailureHandler} that handles authentication and
* authorization failures when communicating to the OAuth 2.0 Resource Server.
*
* <p>
* For example, a {@link RemoveAuthorizedClientOAuth2AuthorizationFailureHandler} is
* typically used to remove the cached {@link OAuth2AuthorizedClient}, so that the
* same token is no longer used in future requests to the Resource Server.
*
* <p>
* The failure handler used by default depends on which constructor was used to
* construct this {@link ServletOAuth2AuthorizedClientExchangeFilterFunction}. See the
* constructors for more details.
* @param authorizationFailureHandler the {@link OAuth2AuthorizationFailureHandler}
* that handles authentication and authorization failures
* @since 5.3
*/
public void setAuthorizationFailureHandler(OAuth2AuthorizationFailureHandler authorizationFailureHandler) {
Assert.notNull(authorizationFailureHandler, "authorizationFailureHandler cannot be null");
this.clientResponseHandler = new AuthorizationFailureForwarder(authorizationFailureHandler);
}
@Override
public Mono<ClientResponse> filter(ClientRequest request, ExchangeFunction next) {
// @formatter:off
return mergeRequestAttributesIfNecessary(request)
.filter((req) -> req.attribute(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME).isPresent())
.flatMap((req) -> reauthorizeClient(getOAuth2AuthorizedClient(req.attributes()), req))
.switchIfEmpty(
Mono.defer(() ->
mergeRequestAttributesIfNecessary(request)
.filter((req) -> resolveClientRegistrationId(req) != null)
.flatMap((req) -> authorizeClient(resolveClientRegistrationId(req), req))
)
)
.map((authorizedClient) -> bearer(request, authorizedClient))
.flatMap((requestWithBearer) -> exchangeAndHandleResponse(requestWithBearer, next))
.switchIfEmpty(Mono.defer(() -> exchangeAndHandleResponse(request, next)));
// @formatter:on
}
private Mono<ClientResponse> exchangeAndHandleResponse(ClientRequest request, ExchangeFunction next) {
return next.exchange(request)
.transform((responseMono) -> this.clientResponseHandler.handleResponse(request, responseMono));
}
private Mono<ClientRequest> mergeRequestAttributesIfNecessary(ClientRequest request) {
if (request.attribute(HTTP_SERVLET_REQUEST_ATTR_NAME).isEmpty()
|| request.attribute(HTTP_SERVLET_RESPONSE_ATTR_NAME).isEmpty()
|| request.attribute(AUTHENTICATION_ATTR_NAME).isEmpty()) {
return mergeRequestAttributesFromContext(request);
}
return Mono.just(request);
}
private Mono<ClientRequest> mergeRequestAttributesFromContext(ClientRequest request) {
ClientRequest.Builder builder = ClientRequest.from(request);
return Mono.deferContextual(Mono::just)
.cast(Context.class)
.map((ctx) -> builder.attributes((attrs) -> populateRequestAttributes(attrs, ctx)))
.map(ClientRequest.Builder::build);
}
private void populateRequestAttributes(Map<String, Object> attrs, Context ctx) {
// NOTE: SecurityReactorContextConfiguration.SecurityReactorContextSubscriber adds
// this key
if (!ctx.hasKey(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY)) {
return;
}
Map<Object, Object> contextAttributes = ctx.get(SECURITY_REACTOR_CONTEXT_ATTRIBUTES_KEY);
HttpServletRequest servletRequest = (HttpServletRequest) contextAttributes.get(HttpServletRequest.class);
if (servletRequest != null) {
attrs.putIfAbsent(HTTP_SERVLET_REQUEST_ATTR_NAME, servletRequest);
}
HttpServletResponse servletResponse = (HttpServletResponse) contextAttributes.get(HttpServletResponse.class);
if (servletResponse != null) {
attrs.putIfAbsent(HTTP_SERVLET_RESPONSE_ATTR_NAME, servletResponse);
}
Authentication authentication = (Authentication) contextAttributes.get(Authentication.class);
if (authentication != null) {
attrs.putIfAbsent(AUTHENTICATION_ATTR_NAME, authentication);
}
}
private void populateDefaultRequestResponse(Map<String, Object> attrs) {
if (attrs.containsKey(HTTP_SERVLET_REQUEST_ATTR_NAME) && attrs.containsKey(HTTP_SERVLET_RESPONSE_ATTR_NAME)) {
return;
}
RequestAttributes context = RequestContextHolder.getRequestAttributes();
if (context instanceof ServletRequestAttributes) {
attrs.putIfAbsent(HTTP_SERVLET_REQUEST_ATTR_NAME, ((ServletRequestAttributes) context).getRequest());
attrs.putIfAbsent(HTTP_SERVLET_RESPONSE_ATTR_NAME, ((ServletRequestAttributes) context).getResponse());
}
}
private void populateDefaultAuthentication(Map<String, Object> attrs) {
if (attrs.containsKey(AUTHENTICATION_ATTR_NAME)) {
return;
}
Authentication authentication = this.securityContextHolderStrategy.getContext().getAuthentication();
attrs.putIfAbsent(AUTHENTICATION_ATTR_NAME, authentication);
}
private String resolveClientRegistrationId(ClientRequest request) {
Map<String, Object> attrs = request.attributes();
String clientRegistrationId = getClientRegistrationId(attrs);
if (clientRegistrationId == null) {
clientRegistrationId = this.defaultClientRegistrationId;
}
Authentication authentication = getAuthentication(attrs);
if (clientRegistrationId == null && this.defaultOAuth2AuthorizedClient
&& authentication instanceof OAuth2AuthenticationToken) {
clientRegistrationId = ((OAuth2AuthenticationToken) authentication).getAuthorizedClientRegistrationId();
}
return clientRegistrationId;
}
private Mono<OAuth2AuthorizedClient> authorizeClient(String clientRegistrationId, ClientRequest request) {
if (this.authorizedClientManager == null) {
return Mono.empty();
}
Map<String, Object> attrs = request.attributes();
Authentication authentication = getAuthentication(attrs);
if (authentication == null) {
authentication = ANONYMOUS_AUTHENTICATION;
}
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
OAuth2AuthorizeRequest.Builder builder = OAuth2AuthorizeRequest.withClientRegistrationId(clientRegistrationId)
.principal(authentication);
builder.attributes((attributes) -> addToAttributes(attributes, servletRequest, servletResponse));
OAuth2AuthorizeRequest authorizeRequest = builder.build();
// NOTE: 'authorizedClientManager.authorize()' needs to be executed on a dedicated
// thread via subscribeOn(Schedulers.boundedElastic()) since it performs a
// blocking I/O operation using RestClient internally
return Mono.fromSupplier(() -> this.authorizedClientManager.authorize(authorizeRequest))
.subscribeOn(Schedulers.boundedElastic());
}
private Mono<OAuth2AuthorizedClient> reauthorizeClient(OAuth2AuthorizedClient authorizedClient,
ClientRequest request) {
if (this.authorizedClientManager == null) {
return Mono.just(authorizedClient);
}
Map<String, Object> attrs = request.attributes();
Authentication authentication = getAuthentication(attrs);
if (authentication == null) {
authentication = createAuthentication(authorizedClient.getPrincipalName());
}
HttpServletRequest servletRequest = getRequest(attrs);
HttpServletResponse servletResponse = getResponse(attrs);
OAuth2AuthorizeRequest.Builder builder = OAuth2AuthorizeRequest.withAuthorizedClient(authorizedClient)
.principal(authentication);
builder.attributes((attributes) -> addToAttributes(attributes, servletRequest, servletResponse));
OAuth2AuthorizeRequest reauthorizeRequest = builder.build();
// NOTE: 'authorizedClientManager.authorize()' needs to be executed on a dedicated
// thread via subscribeOn(Schedulers.boundedElastic()) since it performs a
// blocking I/O operation using RestClient internally
return Mono.fromSupplier(() -> this.authorizedClientManager.authorize(reauthorizeRequest))
.subscribeOn(Schedulers.boundedElastic());
}
private void addToAttributes(Map<String, Object> attributes, HttpServletRequest servletRequest,
HttpServletResponse servletResponse) {
if (servletRequest != null) {
attributes.put(HTTP_SERVLET_REQUEST_ATTR_NAME, servletRequest);
}
if (servletResponse != null) {
attributes.put(HTTP_SERVLET_RESPONSE_ATTR_NAME, servletResponse);
}
}
private ClientRequest bearer(ClientRequest request, OAuth2AuthorizedClient authorizedClient) {
// @formatter:off
return ClientRequest.from(request)
.headers((headers) -> headers.setBearerAuth(authorizedClient.getAccessToken().getTokenValue()))
.attributes(oauth2AuthorizedClient(authorizedClient))
.build();
// @formatter:on
}
static OAuth2AuthorizedClient getOAuth2AuthorizedClient(Map<String, Object> attrs) {
return (OAuth2AuthorizedClient) attrs.get(OAUTH2_AUTHORIZED_CLIENT_ATTR_NAME);
}
static String getClientRegistrationId(Map<String, Object> attrs) {
return ClientAttributes.resolveClientRegistrationId(attrs);
}
static Authentication getAuthentication(Map<String, Object> attrs) {
return (Authentication) attrs.get(AUTHENTICATION_ATTR_NAME);
}
static HttpServletRequest getRequest(Map<String, Object> attrs) {
return (HttpServletRequest) attrs.get(HTTP_SERVLET_REQUEST_ATTR_NAME);
}
static HttpServletResponse getResponse(Map<String, Object> attrs) {
return (HttpServletResponse) attrs.get(HTTP_SERVLET_RESPONSE_ATTR_NAME);
}
private static Authentication createAuthentication(final String principalName) {
Assert.hasText(principalName, "principalName cannot be empty");
return new AbstractAuthenticationToken((Collection<? extends GrantedAuthority>) null) {
@Override
public Object getCredentials() {
return "";
}
@Override
public Object getPrincipal() {
return principalName;
}
};
}
@FunctionalInterface
private | ServletOAuth2AuthorizedClientExchangeFilterFunction |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/cluster/ClusterPubSubConnectionProvider.java | {
"start": 4792,
"end": 6894
} | class ____ extends RedisPubSubAdapter<K, V> {
private final String nodeId;
private final String host;
private final int port;
DelegatingRedisClusterPubSubListener(String nodeId) {
this.nodeId = nodeId;
this.host = null;
this.port = 0;
}
DelegatingRedisClusterPubSubListener(String host, int port) {
this.nodeId = null;
this.host = host;
this.port = port;
}
@Override
public void message(K channel, V message) {
notifications.message(getNode(), channel, message);
}
@Override
public void message(K pattern, K channel, V message) {
notifications.message(getNode(), pattern, channel, message);
}
@Override
public void subscribed(K channel, long count) {
notifications.subscribed(getNode(), channel, count);
}
@Override
public void psubscribed(K pattern, long count) {
notifications.psubscribed(getNode(), pattern, count);
}
@Override
public void unsubscribed(K channel, long count) {
notifications.unsubscribed(getNode(), channel, count);
}
@Override
public void punsubscribed(K pattern, long count) {
notifications.punsubscribed(getNode(), pattern, count);
}
@Override
public void smessage(K shardChannel, V message) {
notifications.smessage(getNode(), shardChannel, message);
}
@Override
public void ssubscribed(K channel, long count) {
notifications.ssubscribed(getNode(), channel, count);
}
@Override
public void sunsubscribed(K channel, long count) {
notifications.sunsubscribed(getNode(), channel, count);
}
private RedisClusterNode getNode() {
return nodeId != null ? getPartitions().getPartitionByNodeId(nodeId) : getPartitions().getPartition(host, port);
}
}
}
| DelegatingRedisClusterPubSubListener |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProxyCombiner.java | {
"start": 1677,
"end": 2069
} | interface ____ {@code extends}
* multiple other protocol interfaces. The returned proxy will implement
* all of the methods of the combined proxy interface, delegating calls
* to which proxy implements that method. If multiple proxies implement the
* same method, the first in the list will be used for delegation.
* <p>
* This will check that every method on the combined | which |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/NamedEncryptedPemKeyStoreTest.java | {
"start": 885,
"end": 2357
} | class ____ {
private static final String configuration = """
quarkus.tls.http.key-store.pem.foo.cert=target/certs/test-formats-encrypted-pem.crt
quarkus.tls.http.key-store.pem.foo.key=target/certs/test-formats-encrypted-pem.key
quarkus.tls.http.key-store.pem.foo.password=password
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.add(new StringAsset(configuration), "application.properties"));
@Inject
TlsConfigurationRegistry certificates;
@Test
void test() throws KeyStoreException, CertificateParsingException {
TlsConfiguration def = certificates.getDefault().orElseThrow();
TlsConfiguration named = certificates.get("http").orElseThrow();
assertThat(def.getKeyStoreOptions()).isNull();
assertThat(def.getKeyStore()).isNull();
assertThat(named.getKeyStoreOptions()).isNotNull();
assertThat(named.getKeyStore()).isNotNull();
X509Certificate certificate = (X509Certificate) named.getKeyStore().getCertificate("dummy-entry-0");
assertThat(certificate).isNotNull();
assertThat(certificate.getSubjectAlternativeNames()).anySatisfy(l -> {
assertThat(l.get(0)).isEqualTo(2);
assertThat(l.get(1)).isEqualTo("localhost");
});
}
}
| NamedEncryptedPemKeyStoreTest |
java | qos-ch__slf4j | slf4j-reload4j/src/main/java/org/slf4j/reload4j/Reload4jMDCAdapter.java | {
"start": 1414,
"end": 4078
} | class ____ implements MDCAdapter {
private final ThreadLocalMapOfStacks threadLocalMapOfDeques = new ThreadLocalMapOfStacks();
@Override
public void clear() {
@SuppressWarnings("rawtypes")
Map map = org.apache.log4j.MDC.getContext();
if (map != null) {
map.clear();
}
}
@Override
public String get(String key) {
return (String) org.apache.log4j.MDC.get(key);
}
/**
* Put a context value (the <code>val</code> parameter) as identified with
* the <code>key</code> parameter into the current thread's context map. The
* <code>key</code> parameter cannot be null. Log4j does <em>not</em>
* support null for the <code>val</code> parameter.
*
* <p>
* This method delegates all work to log4j's MDC.
*
* @throws IllegalArgumentException
* in case the "key" or <b>"val"</b> parameter is null
*/
@Override
public void put(String key, String val) {
org.apache.log4j.MDC.put(key, val);
}
@Override
public void remove(String key) {
org.apache.log4j.MDC.remove(key);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public Map getCopyOfContextMap() {
Map old = org.apache.log4j.MDC.getContext();
if (old != null) {
return new HashMap(old);
} else {
return null;
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void setContextMap(Map<String, String> contextMap) {
Map old = org.apache.log4j.MDC.getContext();
// we must cater for the case where the contextMap argument is null
if (contextMap == null) {
if (old != null) {
old.clear();
}
return;
}
if (old == null) {
for (Map.Entry<String, String> mapEntry : contextMap.entrySet()) {
org.apache.log4j.MDC.put(mapEntry.getKey(), mapEntry.getValue());
}
} else {
old.clear();
old.putAll(contextMap);
}
}
@Override
public void pushByKey(String key, String value) {
threadLocalMapOfDeques.pushByKey(key, value);
}
@Override
public String popByKey(String key) {
return threadLocalMapOfDeques.popByKey(key);
}
@Override
public Deque<String> getCopyOfDequeByKey(String key) {
return threadLocalMapOfDeques.getCopyOfDequeByKey(key);
}
@Override
public void clearDequeByKey(String key) {
threadLocalMapOfDeques.clearDequeByKey(key);
}
}
| Reload4jMDCAdapter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RxReturnValueIgnoredTest.java | {
"start": 4336,
"end": 4438
} | class ____<T> {
@CanIgnoreReturnValue
abstract T ignoringFunction();
}
private | IgnoringParent |
java | netty__netty | handler/src/main/java/io/netty/handler/traffic/GlobalChannelTrafficShapingHandler.java | {
"start": 6459,
"end": 23922
} | class ____ {
ArrayDeque<ToSend> messagesQueue;
TrafficCounter channelTrafficCounter;
long queueSize;
long lastWriteTimestamp;
long lastReadTimestamp;
}
/**
* Create the global TrafficCounter
*/
void createGlobalTrafficCounter(ScheduledExecutorService executor) {
// Default
setMaxDeviation(DEFAULT_DEVIATION, DEFAULT_SLOWDOWN, DEFAULT_ACCELERATION);
checkNotNullWithIAE(executor, "executor");
TrafficCounter tc = new GlobalChannelTrafficCounter(this, executor, "GlobalChannelTC", checkInterval);
setTrafficCounter(tc);
tc.start();
}
@Override
protected int userDefinedWritabilityIndex() {
return AbstractTrafficShapingHandler.GLOBALCHANNEL_DEFAULT_USER_DEFINED_WRITABILITY_INDEX;
}
/**
* Create a new instance.
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}.
* @param writeGlobalLimit
* 0 or a limit in bytes/s
* @param readGlobalLimit
* 0 or a limit in bytes/s
* @param writeChannelLimit
* 0 or a limit in bytes/s
* @param readChannelLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
* @param maxTime
* The maximum delay to wait in case of traffic excess.
*/
public GlobalChannelTrafficShapingHandler(ScheduledExecutorService executor,
long writeGlobalLimit, long readGlobalLimit,
long writeChannelLimit, long readChannelLimit,
long checkInterval, long maxTime) {
super(writeGlobalLimit, readGlobalLimit, checkInterval, maxTime);
createGlobalTrafficCounter(executor);
this.writeChannelLimit = writeChannelLimit;
this.readChannelLimit = readChannelLimit;
}
/**
* Create a new instance.
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}.
* @param writeGlobalLimit
* 0 or a limit in bytes/s
* @param readGlobalLimit
* 0 or a limit in bytes/s
* @param writeChannelLimit
* 0 or a limit in bytes/s
* @param readChannelLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
*/
public GlobalChannelTrafficShapingHandler(ScheduledExecutorService executor,
long writeGlobalLimit, long readGlobalLimit,
long writeChannelLimit, long readChannelLimit,
long checkInterval) {
super(writeGlobalLimit, readGlobalLimit, checkInterval);
this.writeChannelLimit = writeChannelLimit;
this.readChannelLimit = readChannelLimit;
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance.
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}.
* @param writeGlobalLimit
* 0 or a limit in bytes/s
* @param readGlobalLimit
* 0 or a limit in bytes/s
* @param writeChannelLimit
* 0 or a limit in bytes/s
* @param readChannelLimit
* 0 or a limit in bytes/s
*/
public GlobalChannelTrafficShapingHandler(ScheduledExecutorService executor,
long writeGlobalLimit, long readGlobalLimit,
long writeChannelLimit, long readChannelLimit) {
super(writeGlobalLimit, readGlobalLimit);
this.writeChannelLimit = writeChannelLimit;
this.readChannelLimit = readChannelLimit;
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance.
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}.
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
*/
public GlobalChannelTrafficShapingHandler(ScheduledExecutorService executor, long checkInterval) {
super(checkInterval);
createGlobalTrafficCounter(executor);
}
/**
* Create a new instance.
*
* @param executor
* the {@link ScheduledExecutorService} to use for the {@link TrafficCounter}.
*/
public GlobalChannelTrafficShapingHandler(ScheduledExecutorService executor) {
createGlobalTrafficCounter(executor);
}
/**
* @return the current max deviation
*/
public float maxDeviation() {
return maxDeviation;
}
/**
* @return the current acceleration factor
*/
public float accelerationFactor() {
return accelerationFactor;
}
/**
* @return the current slow down factor
*/
public float slowDownFactor() {
return slowDownFactor;
}
/**
* @param maxDeviation
* the maximum deviation to allow during computation of average, default deviation
* being 0.1, so +/-10% of the desired bandwidth. Maximum being 0.4.
* @param slowDownFactor
* the factor set as +x% to the too fast client (minimal value being 0, meaning no
* slow down factor), default being 40% (0.4).
* @param accelerationFactor
* the factor set as -x% to the too slow client (maximal value being 0, meaning no
* acceleration factor), default being -10% (-0.1).
*/
public void setMaxDeviation(float maxDeviation, float slowDownFactor, float accelerationFactor) {
if (maxDeviation > MAX_DEVIATION) {
throw new IllegalArgumentException("maxDeviation must be <= " + MAX_DEVIATION);
}
checkPositiveOrZero(slowDownFactor, "slowDownFactor");
if (accelerationFactor > 0) {
throw new IllegalArgumentException("accelerationFactor must be <= 0");
}
this.maxDeviation = maxDeviation;
this.accelerationFactor = 1 + accelerationFactor;
this.slowDownFactor = 1 + slowDownFactor;
}
private void computeDeviationCumulativeBytes() {
// compute the maximum cumulativeXxxxBytes among still connected Channels
long maxWrittenBytes = 0;
long maxReadBytes = 0;
long minWrittenBytes = Long.MAX_VALUE;
long minReadBytes = Long.MAX_VALUE;
for (PerChannel perChannel : channelQueues.values()) {
long value = perChannel.channelTrafficCounter.cumulativeWrittenBytes();
if (maxWrittenBytes < value) {
maxWrittenBytes = value;
}
if (minWrittenBytes > value) {
minWrittenBytes = value;
}
value = perChannel.channelTrafficCounter.cumulativeReadBytes();
if (maxReadBytes < value) {
maxReadBytes = value;
}
if (minReadBytes > value) {
minReadBytes = value;
}
}
boolean multiple = channelQueues.size() > 1;
readDeviationActive = multiple && minReadBytes < maxReadBytes / 2;
writeDeviationActive = multiple && minWrittenBytes < maxWrittenBytes / 2;
cumulativeWrittenBytes.set(maxWrittenBytes);
cumulativeReadBytes.set(maxReadBytes);
}
@Override
protected void doAccounting(TrafficCounter counter) {
computeDeviationCumulativeBytes();
super.doAccounting(counter);
}
private long computeBalancedWait(float maxLocal, float maxGlobal, long wait) {
if (maxGlobal == 0) {
// no change
return wait;
}
float ratio = maxLocal / maxGlobal;
// if in the boundaries, same value
if (ratio > maxDeviation) {
if (ratio < 1 - maxDeviation) {
return wait;
} else {
ratio = slowDownFactor;
if (wait < MINIMAL_WAIT) {
wait = MINIMAL_WAIT;
}
}
} else {
ratio = accelerationFactor;
}
return (long) (wait * ratio);
}
/**
* @return the maxGlobalWriteSize
*/
public long getMaxGlobalWriteSize() {
return maxGlobalWriteSize;
}
/**
* Note the change will be taken as best effort, meaning
* that all already scheduled traffics will not be
* changed, but only applied to new traffics.<br>
* So the expected usage of this method is to be used not too often,
* accordingly to the traffic shaping configuration.
*
* @param maxGlobalWriteSize the maximum Global Write Size allowed in the buffer
* globally for all channels before write suspended is set.
*/
public void setMaxGlobalWriteSize(long maxGlobalWriteSize) {
this.maxGlobalWriteSize = checkPositive(maxGlobalWriteSize, "maxGlobalWriteSize");
}
/**
* @return the global size of the buffers for all queues.
*/
public long queuesSize() {
return queuesSize.get();
}
/**
* @param newWriteLimit Channel write limit
* @param newReadLimit Channel read limit
*/
public void configureChannel(long newWriteLimit, long newReadLimit) {
writeChannelLimit = newWriteLimit;
readChannelLimit = newReadLimit;
long now = TrafficCounter.milliSecondFromNano();
for (PerChannel perChannel : channelQueues.values()) {
perChannel.channelTrafficCounter.resetAccounting(now);
}
}
/**
* @return Channel write limit
*/
public long getWriteChannelLimit() {
return writeChannelLimit;
}
/**
* @param writeLimit Channel write limit
*/
public void setWriteChannelLimit(long writeLimit) {
writeChannelLimit = writeLimit;
long now = TrafficCounter.milliSecondFromNano();
for (PerChannel perChannel : channelQueues.values()) {
perChannel.channelTrafficCounter.resetAccounting(now);
}
}
/**
* @return Channel read limit
*/
public long getReadChannelLimit() {
return readChannelLimit;
}
/**
* @param readLimit Channel read limit
*/
public void setReadChannelLimit(long readLimit) {
readChannelLimit = readLimit;
long now = TrafficCounter.milliSecondFromNano();
for (PerChannel perChannel : channelQueues.values()) {
perChannel.channelTrafficCounter.resetAccounting(now);
}
}
/**
* Release all internal resources of this instance.
*/
public final void release() {
trafficCounter.stop();
}
private PerChannel getOrSetPerChannel(ChannelHandlerContext ctx) {
// ensure creation is limited to one thread per channel
Channel channel = ctx.channel();
Integer key = channel.hashCode();
PerChannel perChannel = channelQueues.get(key);
if (perChannel == null) {
perChannel = new PerChannel();
perChannel.messagesQueue = new ArrayDeque<ToSend>();
// Don't start it since managed through the Global one
perChannel.channelTrafficCounter = new TrafficCounter(this, null, "ChannelTC" +
ctx.channel().hashCode(), checkInterval);
perChannel.queueSize = 0L;
perChannel.lastReadTimestamp = TrafficCounter.milliSecondFromNano();
perChannel.lastWriteTimestamp = perChannel.lastReadTimestamp;
channelQueues.put(key, perChannel);
}
return perChannel;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
getOrSetPerChannel(ctx);
trafficCounter.resetCumulativeTime();
super.handlerAdded(ctx);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
trafficCounter.resetCumulativeTime();
Channel channel = ctx.channel();
Integer key = channel.hashCode();
PerChannel perChannel = channelQueues.remove(key);
if (perChannel != null) {
// write operations need synchronization
synchronized (perChannel) {
if (channel.isActive()) {
for (ToSend toSend : perChannel.messagesQueue) {
long size = calculateSize(toSend.toSend);
trafficCounter.bytesRealWriteFlowControl(size);
perChannel.channelTrafficCounter.bytesRealWriteFlowControl(size);
perChannel.queueSize -= size;
queuesSize.addAndGet(-size);
ctx.write(toSend.toSend, toSend.promise);
}
} else {
queuesSize.addAndGet(-perChannel.queueSize);
for (ToSend toSend : perChannel.messagesQueue) {
if (toSend.toSend instanceof ByteBuf) {
((ByteBuf) toSend.toSend).release();
}
}
}
perChannel.messagesQueue.clear();
}
}
releaseWriteSuspended(ctx);
releaseReadSuspended(ctx);
super.handlerRemoved(ctx);
}
@Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) throws Exception {
long size = calculateSize(msg);
long now = TrafficCounter.milliSecondFromNano();
if (size > 0) {
// compute the number of ms to wait before reopening the channel
long waitGlobal = trafficCounter.readTimeToWait(size, getReadLimit(), maxTime, now);
Integer key = ctx.channel().hashCode();
PerChannel perChannel = channelQueues.get(key);
long wait = 0;
if (perChannel != null) {
wait = perChannel.channelTrafficCounter.readTimeToWait(size, readChannelLimit, maxTime, now);
if (readDeviationActive) {
// now try to balance between the channels
long maxLocalRead;
maxLocalRead = perChannel.channelTrafficCounter.cumulativeReadBytes();
long maxGlobalRead = cumulativeReadBytes.get();
if (maxLocalRead <= 0) {
maxLocalRead = 0;
}
if (maxGlobalRead < maxLocalRead) {
maxGlobalRead = maxLocalRead;
}
wait = computeBalancedWait(maxLocalRead, maxGlobalRead, wait);
}
}
if (wait < waitGlobal) {
wait = waitGlobal;
}
wait = checkWaitReadTime(ctx, wait, now);
if (wait >= MINIMAL_WAIT) { // At least 10ms seems a minimal
// time in order to try to limit the traffic
// Only AutoRead AND HandlerActive True means Context Active
Channel channel = ctx.channel();
ChannelConfig config = channel.config();
if (logger.isDebugEnabled()) {
logger.debug("Read Suspend: " + wait + ':' + config.isAutoRead() + ':'
+ isHandlerActive(ctx));
}
if (config.isAutoRead() && isHandlerActive(ctx)) {
config.setAutoRead(false);
channel.attr(READ_SUSPENDED).set(true);
// Create a Runnable to reactive the read if needed. If one was create before it will just be
// reused to limit object creation
Attribute<Runnable> attr = channel.attr(REOPEN_TASK);
Runnable reopenTask = attr.get();
if (reopenTask == null) {
reopenTask = new ReopenReadTimerTask(ctx);
attr.set(reopenTask);
}
ctx.executor().schedule(reopenTask, wait, TimeUnit.MILLISECONDS);
if (logger.isDebugEnabled()) {
logger.debug("Suspend final status => " + config.isAutoRead() + ':'
+ isHandlerActive(ctx) + " will reopened at: " + wait);
}
}
}
}
informReadOperation(ctx, now);
ctx.fireChannelRead(msg);
}
@Override
protected long checkWaitReadTime(final ChannelHandlerContext ctx, long wait, final long now) {
Integer key = ctx.channel().hashCode();
PerChannel perChannel = channelQueues.get(key);
if (perChannel != null) {
if (wait > maxTime && now + wait - perChannel.lastReadTimestamp > maxTime) {
wait = maxTime;
}
}
return wait;
}
@Override
protected void informReadOperation(final ChannelHandlerContext ctx, final long now) {
Integer key = ctx.channel().hashCode();
PerChannel perChannel = channelQueues.get(key);
if (perChannel != null) {
perChannel.lastReadTimestamp = now;
}
}
private static final | PerChannel |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/function/FailableToDoubleBiFunction.java | {
"start": 916,
"end": 1219
} | interface ____ {@link ToDoubleBiFunction} that declares a {@link Throwable}.
*
* @param <T> the type of the first argument to the function
* @param <U> the type of the second argument to the function
* @param <E> The kind of thrown exception or error.
* @since 3.11
*/
@FunctionalInterface
public | like |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/webmonitor/threadinfo/VertexThreadInfoTracker.java | {
"start": 2770,
"end": 15925
} | class ____ implements VertexStatsTracker<VertexThreadInfoStats> {
private static final Logger LOG = LoggerFactory.getLogger(VertexThreadInfoTracker.class);
/** Lock guarding trigger operations. */
private final Object lock = new Object();
@GuardedBy("lock")
private final ThreadInfoRequestCoordinator coordinator;
private final ExecutorService executor;
private final GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever;
@GuardedBy("lock")
private final Cache<JobVertexKey, VertexThreadInfoStats> jobVertexStatsCache;
@GuardedBy("lock")
private final Set<JobVertexKey> pendingJobVertexStats = new HashSet<>();
/**
* The stats collected for job vertex and execution vertex will be collected into the
* executionVertexStatsCache.
*/
@GuardedBy("lock")
private final Cache<ExecutionVertexKey, VertexThreadInfoStats> executionVertexStatsCache;
@GuardedBy("lock")
private final Set<ExecutionVertexKey> pendingExecutionVertexStats = new HashSet<>();
private final int numSamples;
private final Duration statsRefreshInterval;
private final Duration delayBetweenSamples;
private final int maxThreadInfoDepth;
// Used for testing purposes
private final CompletableFuture<Void> resultAvailableFuture = new CompletableFuture<>();
/** Flag indicating whether the stats tracker has been shut down. */
private boolean shutDown;
private final Duration rpcTimeout;
VertexThreadInfoTracker(
ThreadInfoRequestCoordinator coordinator,
GatewayRetriever<ResourceManagerGateway> resourceManagerGatewayRetriever,
ScheduledExecutorService executor,
Duration cleanUpInterval,
int numSamples,
Duration statsRefreshInterval,
Duration delayBetweenSamples,
int maxStackTraceDepth,
Duration rpcTimeout,
Cache<JobVertexKey, VertexThreadInfoStats> jobVertexStatsCache,
Cache<ExecutionVertexKey, VertexThreadInfoStats> executionVertexStatsCache) {
this.coordinator = checkNotNull(coordinator, "Thread info samples coordinator");
this.resourceManagerGatewayRetriever =
checkNotNull(resourceManagerGatewayRetriever, "Gateway retriever");
this.executor = checkNotNull(executor, "Scheduled executor");
this.statsRefreshInterval =
checkNotNull(statsRefreshInterval, "Statistics refresh interval");
this.rpcTimeout = rpcTimeout;
checkArgument(cleanUpInterval.toMillis() > 0, "Clean up interval must be greater than 0");
checkArgument(numSamples >= 1, "Number of samples");
this.numSamples = numSamples;
checkArgument(
statsRefreshInterval.toMillis() > 0,
"Stats refresh interval must be greater than 0");
this.delayBetweenSamples = checkNotNull(delayBetweenSamples, "Delay between samples");
checkArgument(maxStackTraceDepth > 0, "Max stack trace depth must be greater than 0");
this.maxThreadInfoDepth = maxStackTraceDepth;
this.jobVertexStatsCache = checkNotNull(jobVertexStatsCache, "Job vertex stats cache");
this.executionVertexStatsCache =
checkNotNull(executionVertexStatsCache, "Execution vertex stats cache");
executor.scheduleWithFixedDelay(
this::cleanUpStatsCache,
cleanUpInterval.toMillis(),
cleanUpInterval.toMillis(),
TimeUnit.MILLISECONDS);
}
@Override
public Optional<VertexThreadInfoStats> getJobVertexStats(
JobID jobId, AccessExecutionJobVertex vertex) {
synchronized (lock) {
final JobVertexKey jobVertexKey = getKey(jobId, vertex);
final VertexThreadInfoStats stats = jobVertexStatsCache.getIfPresent(jobVertexKey);
if (stats == null
|| System.currentTimeMillis()
>= stats.getEndTime() + statsRefreshInterval.toMillis()) {
triggerThreadInfoSampleInternal(jobVertexKey, vertex);
}
return Optional.ofNullable(stats);
}
}
@Override
public Optional<VertexThreadInfoStats> getExecutionVertexStats(
JobID jobId, AccessExecutionJobVertex vertex, int subtaskIndex) {
synchronized (lock) {
ExecutionVertexKey executionVertexKey = getKey(jobId, vertex, subtaskIndex);
final VertexThreadInfoStats stats =
executionVertexStatsCache.getIfPresent(executionVertexKey);
if (stats == null
|| System.currentTimeMillis()
>= stats.getEndTime() + statsRefreshInterval.toMillis()) {
triggerThreadInfoSampleInternal(executionVertexKey, vertex);
}
return Optional.ofNullable(stats);
}
}
/**
* Triggers a request for a job vertex to gather the thread info statistics. If there is a
* sample in progress for the vertex, the call is ignored.
*
* @param jobVertexKey cache key
* @param vertex Vertex to get the stats for.
*/
private void triggerThreadInfoSampleInternal(
final JobVertexKey jobVertexKey, final AccessExecutionJobVertex vertex) {
assert (Thread.holdsLock(lock));
if (shutDown) {
return;
}
if (pendingJobVertexStats.contains(jobVertexKey)) {
return;
}
pendingJobVertexStats.add(jobVertexKey);
triggerThreadInfoRequestForVertices(
new JobVertexThreadInfoSampleCompletionCallback(jobVertexKey, vertex.getName()),
vertex.getTaskVertices());
}
/**
* Triggers a request for a execution vertex to gather the thread info statistics. If there is a
* sample in progress for the execution vertex or job vertex, the call is ignored.
*
* @param executionVertexKey cache key
* @param vertex Vertex to get the stats for.
*/
private void triggerThreadInfoSampleInternal(
final ExecutionVertexKey executionVertexKey, final AccessExecutionJobVertex vertex) {
assert (Thread.holdsLock(lock));
if (shutDown) {
return;
}
if (pendingJobVertexStats.contains(executionVertexKey.getJobVertexKey())
|| pendingExecutionVertexStats.contains(executionVertexKey)) {
return;
}
pendingExecutionVertexStats.add(executionVertexKey);
final AccessExecutionVertex[] executionVertices =
Arrays.stream(vertex.getTaskVertices())
.filter(
executionVertex ->
executionVertex.getParallelSubtaskIndex()
== executionVertexKey.subtaskIndex)
.toArray(AccessExecutionVertex[]::new);
if (executionVertices.length == 0) {
return;
}
triggerThreadInfoRequestForVertices(
new ExecutionVertexThreadInfoSampleCompletionCallback(
executionVertexKey, executionVertices[0].getTaskNameWithSubtaskIndex()),
executionVertices);
}
private void triggerThreadInfoRequestForVertices(
ThreadInfoSampleCompletionCallback completionCallback,
AccessExecutionVertex[] executionVertices) {
if (LOG.isDebugEnabled()) {
LOG.debug(
"Triggering thread info sample for tasks: {}",
Arrays.toString(executionVertices));
}
final CompletableFuture<ResourceManagerGateway> gatewayFuture =
resourceManagerGatewayRetriever.getFuture();
gatewayFuture
.thenCompose(
(ResourceManagerGateway resourceManagerGateway) ->
coordinator.triggerThreadInfoRequest(
matchExecutionsWithGateways(
executionVertices, resourceManagerGateway),
numSamples,
delayBetweenSamples,
maxThreadInfoDepth))
.whenCompleteAsync(completionCallback, executor);
}
private Map<ImmutableSet<ExecutionAttemptID>, CompletableFuture<TaskExecutorThreadInfoGateway>>
matchExecutionsWithGateways(
AccessExecutionVertex[] executionVertices,
ResourceManagerGateway resourceManagerGateway) {
// Group executions by their TaskManagerLocation to be able to issue one sampling
// request per TaskManager for all relevant tasks at once
final Map<TaskManagerLocation, ImmutableSet<ExecutionAttemptID>> executionsByLocation =
groupExecutionsByLocation(executionVertices);
return mapExecutionsToGateways(resourceManagerGateway, executionsByLocation);
}
private Map<ImmutableSet<ExecutionAttemptID>, CompletableFuture<TaskExecutorThreadInfoGateway>>
mapExecutionsToGateways(
ResourceManagerGateway resourceManagerGateway,
Map<TaskManagerLocation, ImmutableSet<ExecutionAttemptID>> verticesByLocation) {
final Map<
ImmutableSet<ExecutionAttemptID>,
CompletableFuture<TaskExecutorThreadInfoGateway>>
executionsWithGateways = new HashMap<>();
for (Map.Entry<TaskManagerLocation, ImmutableSet<ExecutionAttemptID>> entry :
verticesByLocation.entrySet()) {
TaskManagerLocation tmLocation = entry.getKey();
ImmutableSet<ExecutionAttemptID> attemptIds = entry.getValue();
CompletableFuture<TaskExecutorThreadInfoGateway> taskExecutorGatewayFuture =
resourceManagerGateway.requestTaskExecutorThreadInfoGateway(
tmLocation.getResourceID(), rpcTimeout);
executionsWithGateways.put(attemptIds, taskExecutorGatewayFuture);
}
return executionsWithGateways;
}
private Map<TaskManagerLocation, ImmutableSet<ExecutionAttemptID>> groupExecutionsByLocation(
AccessExecutionVertex[] executionVertices) {
final Map<TaskManagerLocation, Set<ExecutionAttemptID>> executionAttemptsByLocation =
new HashMap<>();
for (AccessExecutionVertex executionVertex : executionVertices) {
if (executionVertex.getExecutionState() != ExecutionState.RUNNING
&& executionVertex.getExecutionState() != ExecutionState.INITIALIZING) {
LOG.trace(
"{} not running or initializing, but {}; not sampling",
executionVertex.getTaskNameWithSubtaskIndex(),
executionVertex.getExecutionState());
continue;
}
for (AccessExecution execution : executionVertex.getCurrentExecutions()) {
TaskManagerLocation tmLocation = execution.getAssignedResourceLocation();
if (tmLocation == null) {
LOG.trace("ExecutionVertex {} is currently not assigned", executionVertex);
continue;
}
Set<ExecutionAttemptID> groupedAttemptIds =
executionAttemptsByLocation.getOrDefault(tmLocation, new HashSet<>());
ExecutionAttemptID attemptId = execution.getAttemptId();
groupedAttemptIds.add(attemptId);
executionAttemptsByLocation.put(tmLocation, groupedAttemptIds);
}
}
return executionAttemptsByLocation.entrySet().stream()
.collect(
Collectors.toMap(
Map.Entry::getKey, e -> ImmutableSet.copyOf(e.getValue())));
}
@VisibleForTesting
void cleanUpStatsCache() {
jobVertexStatsCache.cleanUp();
executionVertexStatsCache.cleanUp();
}
@Override
public void shutDown() {
synchronized (lock) {
if (!shutDown) {
jobVertexStatsCache.invalidateAll();
pendingJobVertexStats.clear();
executionVertexStatsCache.invalidateAll();
pendingExecutionVertexStats.clear();
shutDown = true;
}
}
}
@VisibleForTesting
CompletableFuture<Void> getResultAvailableFuture() {
return resultAvailableFuture;
}
private static JobVertexKey getKey(JobID jobId, AccessExecutionJobVertex vertex) {
return new JobVertexKey(jobId, vertex.getJobVertexId());
}
private static ExecutionVertexKey getKey(
JobID jobId, AccessExecutionJobVertex vertex, int subtaskIndex) {
return new ExecutionVertexKey(jobId, vertex.getJobVertexId(), subtaskIndex);
}
static | VertexThreadInfoTracker |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java | {
"start": 1669,
"end": 5942
} | class ____ extends DummyTotalHitCountCollector {
private final int id;
TestCollector(int id) {
this.id = id;
}
}
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
try (RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig())) {
numDocs = randomIntBetween(900, 1000);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new StringField("field1", "value", Field.Store.NO));
writer.addDocument(doc);
}
writer.flush();
}
reader = DirectoryReader.open(directory);
searcher = newSearcher(reader);
}
@Override
public void tearDown() throws Exception {
super.tearDown();
reader.close();
directory.close();
}
/**
* This test checks that each new collector is a different instance on each call and that
* the call to reduce() is forwarded to the wrapped collector manager.
*/
public void testBasic() throws IOException {
final SetOnce<Boolean> reduceCalled = new SetOnce<>();
ProfileCollectorManager<Integer> pcm = new ProfileCollectorManager<>(new CollectorManager<TestCollector, Integer>() {
private int counter = 0;
@Override
public TestCollector newCollector() {
return new TestCollector(counter++);
}
@Override
public Integer reduce(Collection<TestCollector> collectors) {
reduceCalled.set(true);
return counter;
}
}, "test_reason");
int runs = randomIntBetween(5, 10);
List<InternalProfileCollector> collectors = new ArrayList<>();
for (int i = 0; i < runs; i++) {
collectors.add(pcm.newCollector());
assertEquals(i, ((TestCollector) collectors.get(i).getWrappedCollector()).id);
}
long totalTime = 0;
LeafReaderContext leafReaderContext = reader.leaves().get(0);
for (InternalProfileCollector collector : collectors) {
LeafCollector leafCollector = collector.getLeafCollector(leafReaderContext);
leafCollector.collect(0);
totalTime += collector.getTime();
}
Integer returnValue = pcm.reduce(collectors);
assertEquals(runs, returnValue.intValue());
assertTrue(reduceCalled.get());
assertEquals(totalTime, pcm.getCollectorTree().getTime());
assertEquals("test_reason", pcm.getCollectorTree().getReason());
assertEquals("TestCollector", pcm.getCollectorTree().getName());
assertEquals(0, pcm.getCollectorTree().getProfiledChildren().size());
}
/**
* This test checks functionality with potentially more than one slice on a real searcher,
* wrapping a {@link TopScoreDocCollector} into {@link ProfileCollectorManager} and checking the
* result from calling the collector tree contains profile results for each slice.
*/
public void testManagerWithSearcher() throws IOException {
{
CollectorManager<TopScoreDocCollector, TopDocs> topDocsManager = new TopScoreDocCollectorManager(10, null, 1000);
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager);
assertEquals(numDocs, topDocs.totalHits.value());
}
{
CollectorManager<TopScoreDocCollector, TopDocs> topDocsManager = new TopScoreDocCollectorManager(10, null, 1000);
String profileReason = "profiler_reason";
ProfileCollectorManager<TopDocs> profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason);
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager);
assertEquals(numDocs, topDocs.totalHits.value());
CollectorResult result = profileCollectorManager.getCollectorTree();
assertEquals("profiler_reason", result.getReason());
assertEquals("TopScoreDocCollector", result.getName());
assertTrue(result.getTime() > 0);
}
}
}
| TestCollector |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/AbstractRegistryTest.java | {
"start": 1408,
"end": 3737
} | class ____ extends AbstractZKRegistryTest {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractRegistryTest.class);
protected RegistryAdminService registry;
protected RegistryOperations operations;
@BeforeEach
public void setupRegistry() throws IOException {
registry = new RegistryAdminService("yarnRegistry");
operations = registry;
registry.init(createRegistryConfiguration());
registry.start();
operations.delete("/", true);
registry.createRootRegistryPaths();
addToTeardown(registry);
}
/**
* Create a service entry with the sample endpoints, and put it
* at the destination
* @param path path
* @param createFlags flags
* @return the record
* @throws IOException on a failure
*/
protected ServiceRecord putExampleServiceEntry(String path, int createFlags) throws
IOException,
URISyntaxException {
return putExampleServiceEntry(path, createFlags, PersistencePolicies.PERMANENT);
}
/**
* Create a service entry with the sample endpoints, and put it
* at the destination
* @param path path
* @param createFlags flags
* @return the record
* @throws IOException on a failure
*/
protected ServiceRecord putExampleServiceEntry(String path,
int createFlags,
String persistence)
throws IOException, URISyntaxException {
ServiceRecord record = buildExampleServiceEntry(persistence);
registry.mknode(RegistryPathUtils.parentOf(path), true);
operations.bind(path, record, createFlags);
return record;
}
/**
* Assert a path exists
* @param path path in the registry
* @throws IOException
*/
public void assertPathExists(String path) throws IOException {
operations.stat(path);
}
/**
* assert that a path does not exist
* @param path path in the registry
* @throws IOException
*/
public void assertPathNotFound(String path) throws IOException {
try {
operations.stat(path);
fail("Path unexpectedly found: " + path);
} catch (PathNotFoundException e) {
}
}
/**
* Assert that a path resolves to a service record
* @param path path in the registry
* @throws IOException
*/
public void assertResolves(String path) throws IOException {
operations.resolve(path);
}
}
| AbstractRegistryTest |
java | elastic__elasticsearch | modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4EmptyChunkHandler.java | {
"start": 811,
"end": 2043
} | class ____ extends ChannelInboundHandlerAdapter {
private HttpRequest currentRequest;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
switch (msg) {
case HttpRequest request -> {
if (request.decoderResult().isSuccess() && HttpUtil.isTransferEncodingChunked(request)) {
currentRequest = request;
ctx.read();
} else {
currentRequest = null;
ctx.fireChannelRead(request);
}
}
case HttpContent content -> {
if (currentRequest != null) {
if (content instanceof LastHttpContent && content.content().readableBytes() == 0) {
HttpUtil.setTransferEncodingChunked(currentRequest, false);
}
ctx.fireChannelRead(currentRequest);
ctx.fireChannelRead(content);
currentRequest = null;
} else {
ctx.fireChannelRead(content);
}
}
default -> ctx.fireChannelRead(msg);
}
}
}
| Netty4EmptyChunkHandler |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeAssert_isEmpty_Test.java | {
"start": 1021,
"end": 1966
} | class ____ {
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
Range<Integer> actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_when_range_is_not_empty() {
// GIVEN
final Range<Integer> actual = Range.openClosed(1, 10);
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage("%nExpecting empty but was: (1..10]".formatted());
}
@Test
public void should_pass_if_range_is_empty() throws Exception {
// GIVEN
final Range<Integer> actual = Range.openClosed(1, 1);
// THEN
assertThat(actual).isEmpty();
}
}
| RangeAssert_isEmpty_Test |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/annotation/EnableTransactionManagement.java | {
"start": 4269,
"end": 4485
} | interface ____ be implemented -
* notice the {@code implements} clause and the {@code @Override}-annotated method below:
*
* <pre class="code">
* @Configuration
* @EnableTransactionManagement
* public | may |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/internal/entities/mapper/relation/query/OneEntityQueryGenerator.java | {
"start": 1121,
"end": 4947
} | class ____ extends AbstractRelationQueryGenerator {
private final MiddleComponentData[] componentDatas;
public OneEntityQueryGenerator(
Configuration configuration,
String versionsMiddleEntityName,
MiddleIdData referencingIdData,
boolean revisionTypeInId,
MiddleComponentData... componentData) {
super(
configuration,
versionsMiddleEntityName,
referencingIdData,
revisionTypeInId,
null
);
this.componentDatas = componentData;
/*
* The valid query that we need to create:
* SELECT ee FROM middleEntity ee WHERE
* (only entities referenced by the association; id_ref_ing = id of the referencing entity)
* ee.originalId.id_ref_ing = :id_ref_ing AND
*
* (the association at revision :revision)
* --> for DefaultAuditStrategy:
* ee.revision = (SELECT max(ee2.revision) FROM middleEntity ee2
* WHERE ee2.revision <= :revision AND ee2.originalId.* = ee.originalId.*)
*
* --> for ValidityAuditStrategy:
* ee.revision <= :revision and (ee.endRevision > :revision or ee.endRevision is null)
*
* AND
*
* (only non-deleted entities and associations)
* ee.revision_type != DEL
*/
}
@Override
protected QueryBuilder buildQueryBuilderCommon(SessionFactoryImplementor sessionFactory) {
// SELECT ee FROM middleEntity ee
final QueryBuilder qb = new QueryBuilder( entityName, MIDDLE_ENTITY_ALIAS, sessionFactory );
qb.addProjection( null, MIDDLE_ENTITY_ALIAS, null, false );
// WHERE
// ee.originalId.id_ref_ing = :id_ref_ing
referencingIdData.getPrefixedMapper().addNamedIdEqualsToQuery(
qb.getRootParameters(),
configuration.getOriginalIdPropertyName(),
true
);
// NOTE:
// No `orderBy` fragment is specified because this generator is used for
// embeddables and enumerations where either a Set-based container will
// force the SETORDINAL property to give us a unique primary key tuple
// or an @IndexColumn/@OrderColumn must be specified that takes priority
// over an @OrderBy fragment.
return qb;
}
@Override
protected void applyValidPredicates(QueryBuilder qb, Parameters rootParameters, boolean inclusive) {
final String revisionPropertyPath = configuration.getRevisionNumberPath();
final String originalIdPropertyName = configuration.getOriginalIdPropertyName();
final String eeOriginalIdPropertyPath = MIDDLE_ENTITY_ALIAS + "." + originalIdPropertyName;
// (with ee association at revision :revision)
// --> based on auditStrategy (see above)
auditStrategy.addAssociationAtRevisionRestriction(
qb,
rootParameters,
revisionPropertyPath,
configuration.getRevisionEndFieldName(),
true,
referencingIdData,
entityName,
eeOriginalIdPropertyPath,
revisionPropertyPath,
originalIdPropertyName,
MIDDLE_ENTITY_ALIAS,
inclusive,
componentDatas
);
// ee.revision_type != DEL
rootParameters.addWhereWithNamedParam( getRevisionTypePath(), "!=", DEL_REVISION_TYPE_PARAMETER );
}
@Override
protected void applyValidAndRemovePredicates(QueryBuilder remQb) {
final Parameters disjoint = remQb.getRootParameters().addSubParameters( "or" );
// Restrictions to match all valid rows.
final Parameters valid = disjoint.addSubParameters( "and" );
// Restrictions to match all rows deleted at exactly given revision.
final Parameters removed = disjoint.addSubParameters( "and" );
// Excluding current revision, because we need to match data valid at the previous one.
applyValidPredicates( remQb, valid, false );
// ee.revision = :revision
removed.addWhereWithNamedParam( configuration.getRevisionNumberPath(), "=", REVISION_PARAMETER );
// ee.revision_type = DEL
removed.addWhereWithNamedParam( getRevisionTypePath(), "=", DEL_REVISION_TYPE_PARAMETER );
}
}
| OneEntityQueryGenerator |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProvider2Test.java | {
"start": 19979,
"end": 21609
} | class ____ implements Car {
@Inject @Assisted Provider<Color> colorProvider;
}
@Test
public void testInjectingProviderOfParameter() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Subaru.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Subaru subaru = (Subaru) carFactory.create(Color.RED);
assertSame(Color.RED, subaru.colorProvider.get());
assertSame(Color.RED, subaru.colorProvider.get());
Subaru sedan = (Subaru) carFactory.create(Color.BLUE);
assertSame(Color.BLUE, sedan.colorProvider.get());
assertSame(Color.BLUE, sedan.colorProvider.get());
// and make sure the subaru is still red
assertSame(Color.RED, subaru.colorProvider.get());
}
@Test
public void testInjectingNullParameter() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ColoredCarFactory.class)
.toProvider(FactoryProvider.newFactory(ColoredCarFactory.class, Subaru.class));
}
});
ColoredCarFactory carFactory = injector.getInstance(ColoredCarFactory.class);
Subaru subaru = (Subaru) carFactory.create(null);
assertNull(subaru.colorProvider.get());
assertNull(subaru.colorProvider.get());
}
| Subaru |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/eventbus/ClusteredEventBusTest.java | {
"start": 18626,
"end": 27029
} | class ____ implements Handler<Message<Object>> {
AtomicInteger counter = new AtomicInteger();
@Override
public void handle(Message<Object> msg) {
assertTrue(counter.incrementAndGet() <= maxMessages);
complete();
}
}
CountDownLatch completionLatch = new CountDownLatch(8);
EventBus eb0 = vertices[0].eventBus();
String firstAddress = "foo";
for (int i = 0; i < 2; i++) {
eb0.localConsumer(firstAddress, message -> fail()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
eb0.consumer(firstAddress, new CountingHandler()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
}
String secondAddress = "bar";
for (int i = 0; i < 2; i++) {
eb0.consumer(secondAddress, new CountingHandler()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
eb0.localConsumer(secondAddress, message -> fail()).completion().onComplete(onSuccess(v -> completionLatch.countDown()));
}
awaitLatch(completionLatch);
EventBus eb1 = vertices[1].eventBus();
String[] addresses = {firstAddress, secondAddress};
for (int i = 0; i < 2 * maxMessages; i++) {
for (String address : addresses) {
eb1.send(address, "content");
}
}
await();
}
@Test
public void testRejectedClusterSerializableNotSent() {
testRejectedNotSent(SomeClusterSerializableObject.class, new SomeClusterSerializableObject("bar"));
}
@Test
public void testRejectedSerializableNotSent() {
testRejectedNotSent(SomeSerializableObject.class, new SomeSerializableObject("bar"));
}
private <T> void testRejectedNotSent(Class<T> clazz, T message) {
startNodes(2);
vertices[0].eventBus()
.clusterSerializableChecker(s -> Boolean.FALSE)
.serializableChecker(s -> Boolean.FALSE);
vertices[1].eventBus().consumer("foo", msg -> fail()).completion().onComplete(onSuccess(reg -> {
try {
vertices[0].eventBus().send("foo", message);
fail();
} catch (IllegalArgumentException e) {
assertEquals("No message codec for type: class " + clazz.getName(), e.getMessage());
testComplete();
}
}));
await();
}
@Test
public void testRejectedClusterSerializableNotReceived() {
testRejectedNotReceived(SomeClusterSerializableObject.class, new SomeClusterSerializableObject("bar"));
}
@Test
public void testRejectedSerializableNotReceived() {
testRejectedNotReceived(SomeSerializableObject.class, new SomeSerializableObject("bar"));
}
private <T> void testRejectedNotReceived(Class<T> clazz, T message) {
startNodes(2);
vertices[1].eventBus()
.clusterSerializableChecker(s -> Boolean.FALSE)
.serializableChecker(s -> Boolean.FALSE);
vertices[1].eventBus().consumer("foo", msg -> {
try {
Object body = msg.body();
fail(String.valueOf(body));
} catch (RuntimeException e) {
Throwable cause = e.getCause();
String exceptionMsg = cause instanceof InvalidClassException ? cause.getMessage() : e.getMessage();
assertEquals("Class not allowed: " + clazz.getName(), exceptionMsg);
testComplete();
}
}).completion().onComplete(onSuccess(reg -> {
vertices[0].eventBus().send("foo", message);
}));
await();
}
@Test
public void testMultiHeaders() {
startNodes(2);
waitFor(1);
MultiMap expectedHeaders = MultiMap.caseInsensitiveMultiMap()
.add("a", "1")
.add("c", "2")
.add("b", "3")
.add("d", "4")
.add("a", "5")
.add("a", "6")
.add("a", "7")
.add("b", "8")
.add("b", "9")
.add("c", "10");
vertices[1].eventBus().consumer(ADDRESS1, msg -> {
MultiMap headers = msg.headers();
assertEquals("headers should have expected size", 4, headers.size());
assertEquals("headers should have expected number of entries", 10, headers.entries().size());
assertEquals("entry 'a' should have 4 elements", Arrays.asList("1", "5", "6", "7"), headers.getAll("a"));
assertEquals("entry 'b' should have 3 elements", Arrays.asList("3", "8", "9"), headers.getAll("b"));
assertEquals("entry 'c' should have 2 elements", Arrays.asList("2", "10"), headers.getAll("c"));
assertEquals("entry 'd' should have 1 element", Collections.singletonList("4"), headers.getAll("d"));
complete();
}).completion().onComplete(v1 -> {
vertices[0].eventBus().send(ADDRESS1, "foo", new DeliveryOptions().setHeaders(expectedHeaders));
});
await();
}
@Test
public void testPreserveMessageOrderingOnContext() {
int num = 256;
startNodes(2);
ClusterManager clusterManager = ((VertxInternal) vertices[0]).clusterManager();
if (clusterManager instanceof FakeClusterManager) {
// Other CM will exhibit latency for this one we must fake it
FakeClusterManager fakeClusterManager = (FakeClusterManager) clusterManager;
fakeClusterManager.getRegistrationsLatency(500);
}
AtomicInteger received = new AtomicInteger();
vertices[1].eventBus().consumer(ADDRESS1, msg -> {
int val = received.getAndIncrement();
assertEquals(val, msg.body());
if (val == num - 1) {
testComplete();
}
}).completion().await();
Context ctx = vertices[0].getOrCreateContext();
ctx.runOnContext(v -> {
for (int i = 0;i < num;i++) {
vertices[0].eventBus().send(ADDRESS1, i);
}
});
await();
}
@Test
public void testSocketCleanup() {
startNodes(1);
vertices[0].eventBus().consumer(ADDRESS1, msg -> {
msg.reply("pong");
});
AtomicInteger numberOfOutboundConnections = new AtomicInteger();
AtomicInteger numberOfInboundConnections = new AtomicInteger();
Vertx vertx = vertx(() -> Vertx.builder()
.withClusterManager(getClusterManager())
.withMetrics(options -> new VertxMetrics() {
@Override
public TCPMetrics<?> createNetClientMetrics(NetClientOptions options) {
return new TCPMetrics<>() {
@Override
public Object connected(SocketAddress remoteAddress, String remoteName) {
numberOfOutboundConnections.incrementAndGet();
return null;
}
@Override
public void disconnected(Object socketMetric, SocketAddress remoteAddress) {
numberOfOutboundConnections.decrementAndGet();
}
};
}
@Override
public TCPMetrics<?> createNetServerMetrics(NetServerOptions options, SocketAddress localAddress) {
return new TCPMetrics<>() {
@Override
public Object connected(SocketAddress remoteAddress, String remoteName) {
numberOfInboundConnections.incrementAndGet();
return null;
}
@Override
public void disconnected(Object socketMetric, SocketAddress remoteAddress) {
numberOfInboundConnections.decrementAndGet();
}
};
}
})
.buildClustered()
.await());
vertx.eventBus().request(ADDRESS1, "ping").await();
assertWaitUntil(() -> numberOfOutboundConnections.get() == 1 && numberOfInboundConnections.get() == 1);
ClusteredEventBus eventBus = (ClusteredEventBus) vertices[0].eventBus();
Future.future(eventBus::close).await();
assertWaitUntil(() -> numberOfOutboundConnections.get() == 0 && numberOfInboundConnections.get() == 0);
}
@Test
public void testHandleCloseRemovesStaleOutboundConnectionOnConnectFailure() {
AtomicInteger idx = new AtomicInteger();
startNodes(2, () -> new WrappedClusterManager(getClusterManager()) {
@Override
public void getNodeInfo(String nodeId, Completable<io.vertx.core.spi.cluster.NodeInfo> promise) {
if (idx.getAndIncrement() == 0) {
promise.fail("induced failure");
} else {
super.getNodeInfo(nodeId, promise);
}
}
});
vertices[1].eventBus().consumer(ADDRESS1, msg -> {
testComplete();
}).completion().await();
try {
vertices[0].eventBus().sender(ADDRESS1).write("will fail").await();
fail("Should have failed");
} catch (Exception e) {
vertices[0].eventBus().request(ADDRESS1, "will succeed");
}
await();
}
}
| CountingHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedMethodTest.java | {
"start": 11679,
"end": 12021
} | class ____ {
// BUG: Diagnostic contains: Constructor 'Test'
private Test(int a) {}
}
""")
.doTest();
}
@Test
public void unusedConstructor_refactoredToPrivateNoArgVersion() {
refactoringHelper
.addInputLines(
"Test.java",
"""
| Test |
java | quarkusio__quarkus | devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/devtools/project/buildfile/GradleKotlinProjectBuildFile.java | {
"start": 238,
"end": 1695
} | class ____ extends GradleProjectBuildFile {
static final String BUILD_GRADLE_PATH = "build.gradle.kts";
static final String SETTINGS_GRADLE_PATH = "settings.gradle.kts";
public GradleKotlinProjectBuildFile(Project project, ExtensionCatalog catalog) {
super(project, catalog);
}
@Override
String getSettingsGradlePath() {
return SETTINGS_GRADLE_PATH;
}
@Override
String getBuildGradlePath() {
return BUILD_GRADLE_PATH;
}
@Override
protected boolean importBom(ArtifactCoords coords) {
return importBomInModel(getModel(), toBomImportCoords(coords));
}
@Override
protected boolean addDependency(ArtifactCoords coords, boolean managed) {
return addDependencyInModel(getModel(), coords, managed);
}
@Override
public BuildTool getBuildTool() {
return BuildTool.GRADLE_KOTLIN_DSL;
}
static boolean importBomInModel(Model model, ArtifactCoords coords) {
return addDependencyInModel(model,
String.format(" implementation(%s)%n",
createDependencyCoordinatesString(coords, false, '"')));
}
static boolean addDependencyInModel(Model model, ArtifactCoords coords, boolean managed) {
return addDependencyInModel(model,
String.format(" implementation(%s)%n", createDependencyCoordinatesString(coords, managed, '"')));
}
}
| GradleKotlinProjectBuildFile |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/rest/messages/JobAccumulatorsInfoTest.java | {
"start": 1198,
"end": 2225
} | class ____ extends RestResponseMarshallingTestBase<JobAccumulatorsInfo> {
@Override
protected Class<JobAccumulatorsInfo> getTestResponseClass() {
return JobAccumulatorsInfo.class;
}
@Override
protected JobAccumulatorsInfo getTestResponseInstance() throws Exception {
List<JobAccumulatorsInfo.UserTaskAccumulator> userAccumulatorList = new ArrayList<>(3);
userAccumulatorList.add(
new JobAccumulatorsInfo.UserTaskAccumulator(
"uta1.name", "uta1.type", "uta1.value"));
userAccumulatorList.add(
new JobAccumulatorsInfo.UserTaskAccumulator(
"uta2.name", "uta2.type", "uta2.value"));
userAccumulatorList.add(
new JobAccumulatorsInfo.UserTaskAccumulator(
"uta3.name", "uta3.type", "uta3.value"));
return new JobAccumulatorsInfo(
Collections.emptyList(), userAccumulatorList, Collections.emptyMap());
}
}
| JobAccumulatorsInfoTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/FormatFunction.java | {
"start": 3249,
"end": 6350
} | class ____ extends AbstractSqmFunctionDescriptor implements FunctionRenderer {
private final String nativeFunctionName;
private final boolean reversedArguments;
private final boolean concatPattern;
private final boolean supportsTime;
public FormatFunction(String nativeFunctionName, TypeConfiguration typeConfiguration) {
this( nativeFunctionName, false, true, typeConfiguration );
}
public FormatFunction(
String nativeFunctionName,
boolean reversedArguments,
boolean concatPattern,
TypeConfiguration typeConfiguration) {
this( nativeFunctionName, reversedArguments, concatPattern, true, typeConfiguration );
}
public FormatFunction(
String nativeFunctionName,
boolean reversedArguments,
boolean concatPattern,
boolean supportsTime,
TypeConfiguration typeConfiguration) {
super(
"format",
new ArgumentTypesValidator( exactly( 2 ), TEMPORAL, STRING ),
invariant( typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.STRING ) ),
invariant( typeConfiguration, TEMPORAL, STRING )
);
this.nativeFunctionName = nativeFunctionName;
this.reversedArguments = reversedArguments;
this.concatPattern = concatPattern;
this.supportsTime = supportsTime;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( nativeFunctionName );
sqlAppender.append( '(' );
final SqlAstNode expression = sqlAstArguments.get( 0 );
final SqlAstNode format = sqlAstArguments.get( 1 );
if ( reversedArguments ) {
format.accept( walker );
sqlAppender.append( ',' );
if ( !supportsTime && isTimeTemporal( expression ) ) {
sqlAppender.append( "date'1970-01-01'+" );
}
expression.accept( walker );
}
else {
if ( !supportsTime && isTimeTemporal( expression ) ) {
sqlAppender.append( "date'1970-01-01'+" );
}
expression.accept( walker );
sqlAppender.append( ',' );
format.accept( walker );
}
sqlAppender.append( ')' );
}
private boolean isTimeTemporal(SqlAstNode expression) {
if ( expression instanceof Expression expr ) {
final JdbcMappingContainer expressionType = expr.getExpressionType();
if ( expressionType.getJdbcTypeCount() == 1 ) {
switch ( expressionType.getSingleJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
return true;
default:
break;
}
}
}
return false;
}
@Override
protected <T> SelfRenderingSqmFunction<T> generateSqmFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
return new FormatSqmFunction<>(
this,
this,
arguments,
impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
concatPattern,
queryEngine
);
}
@Override
public String getArgumentListSignature() {
return "(TEMPORAL datetime as STRING pattern)";
}
protected static | FormatFunction |
java | quarkusio__quarkus | integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/NoPagingPMT.java | {
"start": 779,
"end": 2871
} | class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar
.addClasses(PageItem.class, NoPagingTestEndpoint.class))
.setApplicationName("no-paging-test")
.setApplicationVersion(Version.getVersion())
.setRun(true)
.setLogFileName("no-paging-test.log")
.withConfigurationResource("nopaging.properties");
@LogFile
private Path logfile;
@Test
public void test() {
assertThat(logfile).isRegularFile().hasFileName("no-paging-test.log");
RestAssured.when().get("/no-paging-test").then().body(is("OK"));
// the logs might not be flushed to disk immediately, so wait a few seconds before giving up completely
await().atMost(3, TimeUnit.SECONDS).untilAsserted(this::checkLog);
}
private void checkLog() {
final List<String> lines;
try {
lines = Files.readAllLines(logfile);
} catch (IOException e) {
throw new RuntimeException(e);
}
/*
* Test the SQL was logged, this could fail if Hibernate decides to change how it logs the generated SQL, here in order
* to not silently skip the following test
*/
final boolean sqlFound = lines.stream()
.filter(line -> line.matches(".*select .* from PageItem .*"))
.findAny()
.isPresent();
assertThat(sqlFound)
.as("Hibernate query wasn't logged")
.isTrue();
// Search for the presence of a SQL with a limit or offset
final boolean wrongSqlFound = lines.stream()
.filter(line -> line.matches(".*select .* limit .*") || line.matches(".*select .* offset .*"))
.findAny()
.isPresent();
assertThat(wrongSqlFound)
.as("PanacheQuery is generating SQL with limits and/or offsets when no paging has been requested")
.isFalse();
}
}
| NoPagingPMT |
java | netty__netty | codec-compression/src/main/java/io/netty/handler/codec/compression/Bzip2DivSufSort.java | {
"start": 1033,
"end": 10139
} | class ____ {
private static final int STACK_SIZE = 64;
private static final int BUCKET_A_SIZE = 256;
private static final int BUCKET_B_SIZE = 65536;
private static final int SS_BLOCKSIZE = 1024;
private static final int INSERTIONSORT_THRESHOLD = 8;
private static final int[] LOG_2_TABLE = {
-1, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
};
private final int[] SA;
private final byte[] T;
private final int n;
/**
* @param block The input array
* @param bwtBlock The output array
* @param blockLength The length of the input data
*/
Bzip2DivSufSort(final byte[] block, final int[] bwtBlock, final int blockLength) {
T = block;
SA = bwtBlock;
n = blockLength;
}
private static void swapElements(final int[] array1, final int idx1, final int[] array2, final int idx2) {
final int temp = array1[idx1];
array1[idx1] = array2[idx2];
array2[idx2] = temp;
}
private int ssCompare(final int p1, final int p2, final int depth) {
final int[] SA = this.SA;
final byte[] T = this.T;
// pointers within T
final int U1n = SA[p1 + 1] + 2;
final int U2n = SA[p2 + 1] + 2;
int U1 = depth + SA[p1];
int U2 = depth + SA[p2];
while (U1 < U1n && U2 < U2n && T[U1] == T[U2]) {
++U1;
++U2;
}
return U1 < U1n ?
U2 < U2n ? (T[U1] & 0xff) - (T[U2] & 0xff) : 1
: U2 < U2n ? -1 : 0;
}
private int ssCompareLast(int pa, int p1, int p2, int depth, int size) {
final int[] SA = this.SA;
final byte[] T = this.T;
int U1 = depth + SA[p1];
int U2 = depth + SA[p2];
int U1n = size;
int U2n = SA[p2 + 1] + 2;
while (U1 < U1n && U2 < U2n && T[U1] == T[U2]) {
++U1;
++U2;
}
if (U1 < U1n) {
return U2 < U2n ? (T[U1] & 0xff) - (T[U2] & 0xff) : 1;
}
if (U2 == U2n) {
return 1;
}
U1 %= size;
U1n = SA[pa] + 2;
while (U1 < U1n && U2 < U2n && T[U1] == T[U2]) {
++U1;
++U2;
}
return U1 < U1n ?
U2 < U2n ? (T[U1] & 0xff) - (T[U2] & 0xff) : 1
: U2 < U2n ? -1 : 0;
}
private void ssInsertionSort(int pa, int first, int last, int depth) {
final int[] SA = this.SA;
int i, j; // pointer within SA
int t;
int r;
for (i = last - 2; first <= i; --i) {
for (t = SA[i], j = i + 1; 0 < (r = ssCompare(pa + t, pa + SA[j], depth));) {
do {
SA[j - 1] = SA[j];
} while (++j < last && SA[j] < 0);
if (last <= j) {
break;
}
}
if (r == 0) {
SA[j] = ~SA[j];
}
SA[j - 1] = t;
}
}
private void ssFixdown(int td, int pa, int sa, int i, int size) {
final int[] SA = this.SA;
final byte[] T = this.T;
int j, k;
int v;
int c, d, e;
for (v = SA[sa + i], c = T[td + SA[pa + v]] & 0xff; (j = 2 * i + 1) < size; SA[sa + i] = SA[sa + k], i = k) {
d = T[td + SA[pa + SA[sa + (k = j++)]]] & 0xff;
if (d < (e = T[td + SA[pa + SA[sa + j]]] & 0xff)) {
k = j;
d = e;
}
if (d <= c) {
break;
}
}
SA[sa + i] = v;
}
private void ssHeapSort(int td, int pa, int sa, int size) {
final int[] SA = this.SA;
final byte[] T = this.T;
int i, m;
int t;
m = size;
if (size % 2 == 0) {
m--;
if ((T[td + SA[pa + SA[sa + m / 2]]] & 0xff) < (T[td + SA[pa + SA[sa + m]]] & 0xff)) {
swapElements(SA, sa + m, SA, sa + m / 2);
}
}
for (i = m / 2 - 1; 0 <= i; --i) {
ssFixdown(td, pa, sa, i, m);
}
if (size % 2 == 0) {
swapElements(SA, sa, SA, sa + m);
ssFixdown(td, pa, sa, 0, m);
}
for (i = m - 1; 0 < i; --i) {
t = SA[sa];
SA[sa] = SA[sa + i];
ssFixdown(td, pa, sa, 0, i);
SA[sa + i] = t;
}
}
private int ssMedian3(final int td, final int pa, int v1, int v2, int v3) {
final int[] SA = this.SA;
final byte[] T = this.T;
int T_v1 = T[td + SA[pa + SA[v1]]] & 0xff;
int T_v2 = T[td + SA[pa + SA[v2]]] & 0xff;
int T_v3 = T[td + SA[pa + SA[v3]]] & 0xff;
if (T_v1 > T_v2) {
final int temp = v1;
v1 = v2;
v2 = temp;
final int T_vtemp = T_v1;
T_v1 = T_v2;
T_v2 = T_vtemp;
}
if (T_v2 > T_v3) {
if (T_v1 > T_v3) {
return v1;
}
return v3;
}
return v2;
}
private int ssMedian5(final int td, final int pa, int v1, int v2, int v3, int v4, int v5) {
final int[] SA = this.SA;
final byte[] T = this.T;
int T_v1 = T[td + SA[pa + SA[v1]]] & 0xff;
int T_v2 = T[td + SA[pa + SA[v2]]] & 0xff;
int T_v3 = T[td + SA[pa + SA[v3]]] & 0xff;
int T_v4 = T[td + SA[pa + SA[v4]]] & 0xff;
int T_v5 = T[td + SA[pa + SA[v5]]] & 0xff;
int temp;
int T_vtemp;
if (T_v2 > T_v3) {
temp = v2;
v2 = v3;
v3 = temp;
T_vtemp = T_v2;
T_v2 = T_v3;
T_v3 = T_vtemp;
}
if (T_v4 > T_v5) {
temp = v4;
v4 = v5;
v5 = temp;
T_vtemp = T_v4;
T_v4 = T_v5;
T_v5 = T_vtemp;
}
if (T_v2 > T_v4) {
temp = v2;
v4 = temp;
T_vtemp = T_v2;
T_v4 = T_vtemp;
temp = v3;
v3 = v5;
v5 = temp;
T_vtemp = T_v3;
T_v3 = T_v5;
T_v5 = T_vtemp;
}
if (T_v1 > T_v3) {
temp = v1;
v1 = v3;
v3 = temp;
T_vtemp = T_v1;
T_v1 = T_v3;
T_v3 = T_vtemp;
}
if (T_v1 > T_v4) {
temp = v1;
v4 = temp;
T_vtemp = T_v1;
T_v4 = T_vtemp;
v3 = v5;
T_v3 = T_v5;
}
if (T_v3 > T_v4) {
return v4;
}
return v3;
}
private int ssPivot(final int td, final int pa, final int first, final int last) {
int middle;
int t;
t = last - first;
middle = first + t / 2;
if (t <= 512) {
if (t <= 32) {
return ssMedian3(td, pa, first, middle, last - 1);
}
t >>= 2;
return ssMedian5(td, pa, first, first + t, middle, last - 1 - t, last - 1);
}
t >>= 3;
return ssMedian3(
td, pa,
ssMedian3(td, pa, first, first + t, first + (t << 1)),
ssMedian3(td, pa, middle - t, middle, middle + t),
ssMedian3(td, pa, last - 1 - (t << 1), last - 1 - t, last - 1)
);
}
private static int ssLog(final int n) {
return (n & 0xff00) != 0 ?
8 + LOG_2_TABLE[n >> 8 & 0xff]
: LOG_2_TABLE[n & 0xff];
}
private int ssSubstringPartition(final int pa, final int first, final int last, final int depth) {
final int[] SA = this.SA;
int a, b;
int t;
for (a = first - 1, b = last;;) {
while (++a < b && (SA[pa + SA[a]] + depth >= SA[pa + SA[a] + 1] + 1)) {
SA[a] = ~SA[a];
}
--b;
while (a < b && (SA[pa + SA[b]] + depth < SA[pa + SA[b] + 1] + 1)) {
--b;
}
if (b <= a) {
break;
}
t = ~SA[b];
SA[b] = SA[a];
SA[a] = t;
}
if (first < a) {
SA[first] = ~SA[first];
}
return a;
}
private static | Bzip2DivSufSort |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/support/JettyHeadersAdapter.java | {
"start": 1437,
"end": 6185
} | class ____ implements MultiValueMap<String, String> {
private final HttpFields headers;
private final HttpFields.@Nullable Mutable mutable;
/**
* Creates a new {@code JettyHeadersAdapter} based on the given
* {@code HttpFields} instance.
* @param headers the {@code HttpFields} to base this adapter on
*/
public JettyHeadersAdapter(HttpFields headers) {
Assert.notNull(headers, "Headers must not be null");
this.headers = headers;
this.mutable = headers instanceof HttpFields.Mutable m ? m : null;
}
@Override
public String getFirst(String key) {
return this.headers.get(key);
}
@Override
public void add(String key, @Nullable String value) {
if (value != null) {
HttpFields.Mutable mutableHttpFields = mutableFields();
mutableHttpFields.add(key, value);
}
}
@Override
public void addAll(String key, List<? extends String> values) {
values.forEach(value -> add(key, value));
}
@Override
public void addAll(MultiValueMap<String, String> values) {
values.forEach(this::addAll);
}
@Override
public void set(String key, @Nullable String value) {
HttpFields.Mutable mutableHttpFields = mutableFields();
if (value != null) {
mutableHttpFields.put(key, value);
}
else {
mutableHttpFields.remove(key);
}
}
@Override
public void setAll(Map<String, String> values) {
values.forEach(this::set);
}
@Override
public Map<String, String> toSingleValueMap() {
Map<String, String> singleValueMap = new LinkedCaseInsensitiveMap<>(
this.headers.size(), Locale.ROOT);
Iterator<HttpField> iterator = this.headers.iterator();
iterator.forEachRemaining(field -> {
if (!singleValueMap.containsKey(field.getName())) {
singleValueMap.put(field.getName(), field.getValue());
}
});
return singleValueMap;
}
@Override
public int size() {
return this.headers.getFieldNamesCollection().size();
}
@Override
public boolean isEmpty() {
return (this.headers.size() == 0);
}
@Override
public boolean containsKey(Object key) {
return (key instanceof String name && this.headers.contains(name));
}
@Override
public boolean containsValue(Object value) {
if (value instanceof String searchString) {
for (HttpField field : this.headers) {
if (field.contains(searchString)) {
return true;
}
}
}
return false;
}
@Override
public @Nullable List<String> get(Object key) {
List<String> list = null;
if (key instanceof String name) {
for (HttpField f : this.headers) {
if (f.is(name)) {
if (list == null) {
list = new ArrayList<>();
}
list.add(f.getValue());
}
}
}
return list;
}
@Override
public @Nullable List<String> put(String key, List<String> value) {
HttpFields.Mutable mutableHttpFields = mutableFields();
List<String> oldValues = get(key);
if (oldValues == null) {
switch (value.size()) {
case 0 -> {}
case 1 -> mutableHttpFields.add(key, value.get(0));
default -> mutableHttpFields.add(key, value);
}
}
else {
switch (value.size()) {
case 0 -> mutableHttpFields.remove(key);
case 1 -> mutableHttpFields.put(key, value.get(0));
default -> mutableHttpFields.put(key, value);
}
}
return oldValues;
}
@Override
public @Nullable List<String> remove(Object key) {
HttpFields.Mutable mutableHttpFields = mutableFields();
List<String> list = null;
if (key instanceof String name) {
for (ListIterator<HttpField> i = mutableHttpFields.listIterator(); i.hasNext(); ) {
HttpField f = i.next();
if (f.is(name)) {
if (list == null) {
list = new ArrayList<>();
}
list.add(f.getValue());
i.remove();
}
}
}
return list;
}
@Override
public void putAll(Map<? extends String, ? extends List<String>> map) {
map.forEach(this::put);
}
@Override
public void clear() {
HttpFields.Mutable mutableHttpFields = mutableFields();
mutableHttpFields.clear();
}
@Override
public Set<String> keySet() {
return new HeaderNames();
}
@Override
public Collection<List<String>> values() {
return this.headers.getFieldNamesCollection().stream()
.map(this.headers::getValuesList).toList();
}
@Override
public Set<Entry<String, List<String>>> entrySet() {
return new AbstractSet<>() {
@Override
public Iterator<Entry<String, List<String>>> iterator() {
return new EntryIterator();
}
@Override
public int size() {
return headers.getFieldNamesCollection().size();
}
};
}
private HttpFields.Mutable mutableFields() {
if (this.mutable == null) {
throw new IllegalStateException("Immutable headers");
}
return this.mutable;
}
@Override
public String toString() {
return HttpHeaders.formatHeaders(this);
}
private | JettyHeadersAdapter |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/WatsonLanguageEndpointBuilderFactory.java | {
"start": 14698,
"end": 19744
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final WatsonLanguageHeaderNameBuilder INSTANCE = new WatsonLanguageHeaderNameBuilder();
/**
* The operation to perform.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code IBMWatsonLanguageOperation}.
*/
public String iBMWatsonLanguageOperation() {
return "CamelIBMWatsonLanguageOperation";
}
/**
* The text to analyze.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code IBMWatsonLanguageText}.
*/
public String iBMWatsonLanguageText() {
return "CamelIBMWatsonLanguageText";
}
/**
* The URL to analyze.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code IBMWatsonLanguageUrl}.
*/
public String iBMWatsonLanguageUrl() {
return "CamelIBMWatsonLanguageUrl";
}
/**
* Enable sentiment analysis.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeSentiment}.
*/
public String iBMWatsonLanguageAnalyzeSentiment() {
return "CamelIBMWatsonLanguageAnalyzeSentiment";
}
/**
* Enable emotion analysis.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeEmotion}.
*/
public String iBMWatsonLanguageAnalyzeEmotion() {
return "CamelIBMWatsonLanguageAnalyzeEmotion";
}
/**
* Enable entity extraction.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeEntities}.
*/
public String iBMWatsonLanguageAnalyzeEntities() {
return "CamelIBMWatsonLanguageAnalyzeEntities";
}
/**
* Enable keyword extraction.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeKeywords}.
*/
public String iBMWatsonLanguageAnalyzeKeywords() {
return "CamelIBMWatsonLanguageAnalyzeKeywords";
}
/**
* Enable concept extraction.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeConcepts}.
*/
public String iBMWatsonLanguageAnalyzeConcepts() {
return "CamelIBMWatsonLanguageAnalyzeConcepts";
}
/**
* Enable category classification.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageAnalyzeCategories}.
*/
public String iBMWatsonLanguageAnalyzeCategories() {
return "CamelIBMWatsonLanguageAnalyzeCategories";
}
/**
* The language of the text.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code IBMWatsonLanguageLanguage}.
*/
public String iBMWatsonLanguageLanguage() {
return "CamelIBMWatsonLanguageLanguage";
}
/**
* The sentiment score.
*
* The option is a: {@code Double} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageSentimentScore}.
*/
public String iBMWatsonLanguageSentimentScore() {
return "CamelIBMWatsonLanguageSentimentScore";
}
/**
* The sentiment label (positive, negative, neutral).
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code
* IBMWatsonLanguageSentimentLabel}.
*/
public String iBMWatsonLanguageSentimentLabel() {
return "CamelIBMWatsonLanguageSentimentLabel";
}
}
static WatsonLanguageEndpointBuilder endpointBuilder(String componentName, String path) {
| WatsonLanguageHeaderNameBuilder |
java | quarkusio__quarkus | integration-tests/reactive-messaging-hibernate-orm/src/main/java/io/quarkus/it/kafka/people/PeopleState.java | {
"start": 158,
"end": 360
} | class ____ extends CheckpointEntity {
String names;
public String getNames() {
return names;
}
public void setNames(String names) {
this.names = names;
}
}
| PeopleState |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/tools/OrderSequenceGenerationTest.java | {
"start": 1469,
"end": 3944
} | class ____ {
private SessionFactoryImplementor sf;
private File createSchema;
private File dropSchema;
private Integer entityId;
@BeforeAll
public void initData() throws Exception {
createSchema = File.createTempFile( "create_schema", ".sql" );
createSchema.deleteOnExit();
dropSchema = File.createTempFile( "drop_schema", ".sql" );
dropSchema.deleteOnExit();
final var cfg = new Configuration();
cfg.addAnnotatedClass( StrTestEntity.class );
final var ssrb = cfg.getStandardServiceRegistryBuilder();
ServiceRegistryUtil.applySettings( ssrb );
// Configure settings for DDL script generation
ssrb.applySetting( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_CREATE_TARGET, createSchema.toPath() );
ssrb.applySetting( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_DROP_TARGET, dropSchema.toPath() );
ssrb.applySetting( AvailableSettings.JAKARTA_HBM2DDL_SCRIPTS_ACTION, "drop-and-create" );
ssrb.applySetting( AvailableSettings.JAKARTA_HBM2DDL_DATABASE_ACTION, "create-drop" );
ssrb.applySetting( AvailableSettings.HBM2DDL_AUTO, "create-drop" );
// Standard envers test settings
ssrb.applySetting( EnversSettings.USE_REVISION_ENTITY_WITH_NATIVE_ID, "false" );
ssrb.applySetting( EnversSettings.REVISION_SEQUENCE_NOCACHE, "true" );
sf = cfg.buildSessionFactory().unwrap( SessionFactoryImplementor.class );
sf.inTransaction( em -> {
StrTestEntity e = new StrTestEntity( "Acme" );
em.persist( e );
entityId = e.getId();
} );
}
@AfterAll
public void cleanUp() {
if ( sf != null ) {
sf.close();
}
}
@Test
public void testCreateSequenceExportScripts() throws Exception {
final var dialect = sf.getJdbcServices().getDialect();
final String[] createStrings = dialect
.getSequenceSupport()
.getCreateSequenceStrings( "REVISION_GENERATOR", 1, 1 );
final String content = new String( Files.readAllBytes( createSchema.toPath() ) ).toLowerCase();
for ( final var createString : createStrings ) {
if ( dialect instanceof OracleDialect ) {
assertTrue( content.contains( ( createString + " NOCACHE ORDER" ).toLowerCase() ) );
}
else {
assertTrue( content.contains( createString.toLowerCase() ) );
}
}
}
@Test
public void testBasicPersistAndAuditFetch() {
sf.inSession( session -> {
StrTestEntity rev1 = AuditReaderFactory.get( session ).find( StrTestEntity.class, entityId, 1 );
assertEquals( new StrTestEntity( "Acme", entityId ), rev1 );
} );
}
}
| OrderSequenceGenerationTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.