language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/oidc/authentication/OidcLogoutAuthenticationTokenTests.java | {
"start": 1156,
"end": 5112
} | class ____ {
private final String idTokenHint = "id-token";
private final OidcIdToken idToken = OidcIdToken.withTokenValue(this.idTokenHint)
.issuer("https://provider.com")
.subject("principal")
.issuedAt(Instant.now().minusSeconds(60))
.expiresAt(Instant.now().plusSeconds(60))
.build();
private final TestingAuthenticationToken principal = new TestingAuthenticationToken("principal", "credentials");
private final String sessionId = "session-1";
private final String clientId = "client-1";
private final String postLogoutRedirectUri = "https://example.com/oidc-post-logout";
private final String state = "state-1";
@Test
public void constructorWhenIdTokenHintEmptyThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OidcLogoutAuthenticationToken("", this.principal, this.sessionId, this.clientId,
this.postLogoutRedirectUri, this.state))
.withMessage("idTokenHint cannot be empty");
assertThatIllegalArgumentException()
.isThrownBy(() -> new OidcLogoutAuthenticationToken((String) null, this.principal, this.sessionId,
this.clientId, this.postLogoutRedirectUri, this.state))
.withMessage("idTokenHint cannot be empty");
}
@Test
public void constructorWhenIdTokenNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OidcLogoutAuthenticationToken((OidcIdToken) null, this.principal, this.sessionId,
this.clientId, this.postLogoutRedirectUri, this.state))
.withMessage("idToken cannot be null");
}
@Test
public void constructorWhenPrincipalNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OidcLogoutAuthenticationToken(this.idTokenHint, null, this.sessionId, this.clientId,
this.postLogoutRedirectUri, this.state))
.withMessage("principal cannot be null");
assertThatIllegalArgumentException()
.isThrownBy(() -> new OidcLogoutAuthenticationToken(this.idToken, null, this.sessionId, this.clientId,
this.postLogoutRedirectUri, this.state))
.withMessage("principal cannot be null");
}
@Test
public void constructorWhenIdTokenHintProvidedThenCreated() {
OidcLogoutAuthenticationToken authentication = new OidcLogoutAuthenticationToken(this.idTokenHint,
this.principal, this.sessionId, this.clientId, this.postLogoutRedirectUri, this.state);
assertThat(authentication.getPrincipal()).isEqualTo(this.principal);
assertThat(authentication.getCredentials().toString()).isEmpty();
assertThat(authentication.getIdTokenHint()).isEqualTo(this.idTokenHint);
assertThat(authentication.getIdToken()).isNull();
assertThat(authentication.getSessionId()).isEqualTo(this.sessionId);
assertThat(authentication.getClientId()).isEqualTo(this.clientId);
assertThat(authentication.getPostLogoutRedirectUri()).isEqualTo(this.postLogoutRedirectUri);
assertThat(authentication.getState()).isEqualTo(this.state);
assertThat(authentication.isAuthenticated()).isFalse();
}
@Test
public void constructorWhenIdTokenProvidedThenCreated() {
OidcLogoutAuthenticationToken authentication = new OidcLogoutAuthenticationToken(this.idToken, this.principal,
this.sessionId, this.clientId, this.postLogoutRedirectUri, this.state);
assertThat(authentication.getPrincipal()).isEqualTo(this.principal);
assertThat(authentication.getCredentials().toString()).isEmpty();
assertThat(authentication.getIdTokenHint()).isEqualTo(this.idToken.getTokenValue());
assertThat(authentication.getIdToken()).isEqualTo(this.idToken);
assertThat(authentication.getSessionId()).isEqualTo(this.sessionId);
assertThat(authentication.getClientId()).isEqualTo(this.clientId);
assertThat(authentication.getPostLogoutRedirectUri()).isEqualTo(this.postLogoutRedirectUri);
assertThat(authentication.getState()).isEqualTo(this.state);
assertThat(authentication.isAuthenticated()).isTrue();
}
}
| OidcLogoutAuthenticationTokenTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java | {
"start": 1141,
"end": 1309
} | class ____ timeline entity and defines parent-child relationships
* with other entities.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract | extends |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/jmx/AppenderDynamicMBean.java | {
"start": 2156,
"end": 12134
} | class ____ extends AbstractDynamicMBean {
// This category instance is for logging.
private static final Logger cat = Logger.getLogger(AppenderDynamicMBean.class);
private final MBeanConstructorInfo[] dConstructors = new MBeanConstructorInfo[1];
private final Vector dAttributes = new Vector();
private final String dClassName = this.getClass().getName();
private final Hashtable dynamicProps = new Hashtable(5);
private final MBeanOperationInfo[] dOperations = new MBeanOperationInfo[2];
private final String dDescription = "This MBean acts as a management facade for log4j appenders.";
// We wrap this appender instance.
private final Appender appender;
public AppenderDynamicMBean(final Appender appender) throws IntrospectionException {
this.appender = appender;
buildDynamicMBeanInfo();
}
private void buildDynamicMBeanInfo() throws IntrospectionException {
final Constructor[] constructors = this.getClass().getConstructors();
dConstructors[0] = new MBeanConstructorInfo(
"AppenderDynamicMBean(): Constructs a AppenderDynamicMBean instance", constructors[0]);
final BeanInfo bi = Introspector.getBeanInfo(appender.getClass());
final PropertyDescriptor[] pd = bi.getPropertyDescriptors();
final int size = pd.length;
for (int i = 0; i < size; i++) {
final String name = pd[i].getName();
final Method readMethod = pd[i].getReadMethod();
final Method writeMethod = pd[i].getWriteMethod();
if (readMethod != null) {
final Class returnClass = readMethod.getReturnType();
if (isSupportedType(returnClass)) {
String returnClassName;
if (returnClass.isAssignableFrom(Priority.class)) {
returnClassName = "java.lang.String";
} else {
returnClassName = returnClass.getName();
}
dAttributes.add(
new MBeanAttributeInfo(name, returnClassName, "Dynamic", true, writeMethod != null, false));
dynamicProps.put(name, new MethodUnion(readMethod, writeMethod));
}
}
}
MBeanParameterInfo[] params = new MBeanParameterInfo[0];
dOperations[0] = new MBeanOperationInfo(
"activateOptions", "activateOptions(): add an appender", params, "void", MBeanOperationInfo.ACTION);
params = new MBeanParameterInfo[1];
params[0] = new MBeanParameterInfo("layout class", "java.lang.String", "layout class");
dOperations[1] = new MBeanOperationInfo(
"setLayout", "setLayout(): add a layout", params, "void", MBeanOperationInfo.ACTION);
}
@Override
public Object getAttribute(final String attributeName)
throws AttributeNotFoundException, MBeanException, ReflectionException {
// Check attributeName is not null to avoid NullPointerException later on
if (attributeName == null) {
throw new RuntimeOperationsException(
new IllegalArgumentException("Attribute name cannot be null"),
"Cannot invoke a getter of " + dClassName + " with null attribute name");
}
cat.debug("getAttribute called with [" + attributeName + "].");
if (attributeName.startsWith("appender=" + appender.getName() + ",layout")) {
try {
return new ObjectName("log4j:" + attributeName);
} catch (final MalformedObjectNameException e) {
cat.error("attributeName", e);
} catch (final RuntimeException e) {
cat.error("attributeName", e);
}
}
final MethodUnion mu = (MethodUnion) dynamicProps.get(attributeName);
// cat.debug("----name="+attributeName+", b="+b);
if (mu != null && mu.readMethod != null) {
try {
return mu.readMethod.invoke(appender, null);
} catch (final IllegalAccessException e) {
return null;
} catch (final InvocationTargetException e) {
if (e.getTargetException() instanceof InterruptedException
|| e.getTargetException() instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
return null;
} catch (final RuntimeException e) {
return null;
}
}
// If attributeName has not been recognized throw an AttributeNotFoundException
throw (new AttributeNotFoundException("Cannot find " + attributeName + " attribute in " + dClassName));
}
@Override
protected Logger getLogger() {
return cat;
}
@Override
public MBeanInfo getMBeanInfo() {
cat.debug("getMBeanInfo called.");
final MBeanAttributeInfo[] attribs = new MBeanAttributeInfo[dAttributes.size()];
dAttributes.toArray(attribs);
return new MBeanInfo(
dClassName, dDescription, attribs, dConstructors, dOperations, new MBeanNotificationInfo[0]);
}
@Override
public Object invoke(final String operationName, final Object params[], final String signature[])
throws MBeanException, ReflectionException {
if (operationName.equals("activateOptions") && appender instanceof OptionHandler) {
final OptionHandler oh = (OptionHandler) appender;
oh.activateOptions();
return "Options activated.";
} else if (operationName.equals("setLayout")) {
final Layout layout =
(Layout) OptionConverter.instantiateByClassName((String) params[0], Layout.class, null);
appender.setLayout(layout);
registerLayoutMBean(layout);
}
return null;
}
private boolean isSupportedType(final Class clazz) {
if (clazz.isPrimitive() || (clazz == String.class) || clazz.isAssignableFrom(Priority.class)) {
return true;
}
return false;
}
@Override
public ObjectName preRegister(final MBeanServer server, final ObjectName name) {
cat.debug("preRegister called. Server=" + server + ", name=" + name);
this.server = server;
registerLayoutMBean(appender.getLayout());
return name;
}
void registerLayoutMBean(final Layout layout) {
if (layout == null) {
return;
}
final String name =
getAppenderName(appender) + ",layout=" + layout.getClass().getName();
cat.debug("Adding LayoutMBean:" + name);
ObjectName objectName = null;
try {
final LayoutDynamicMBean appenderMBean = new LayoutDynamicMBean(layout);
objectName = new ObjectName("log4j:appender=" + name);
if (!server.isRegistered(objectName)) {
registerMBean(appenderMBean, objectName);
dAttributes.add(new MBeanAttributeInfo(
"appender=" + name,
"javax.management.ObjectName",
"The " + name + " layout.",
true,
true,
false));
}
} catch (final JMException e) {
cat.error("Could not add DynamicLayoutMBean for [" + name + "].", e);
} catch (final java.beans.IntrospectionException e) {
cat.error("Could not add DynamicLayoutMBean for [" + name + "].", e);
} catch (final RuntimeException e) {
cat.error("Could not add DynamicLayoutMBean for [" + name + "].", e);
}
}
@Override
public void setAttribute(final Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
// Check attribute is not null to avoid NullPointerException later on
if (attribute == null) {
throw new RuntimeOperationsException(
new IllegalArgumentException("Attribute cannot be null"),
"Cannot invoke a setter of " + dClassName + " with null attribute");
}
final String name = attribute.getName();
Object value = attribute.getValue();
if (name == null) {
throw new RuntimeOperationsException(
new IllegalArgumentException("Attribute name cannot be null"),
"Cannot invoke the setter of " + dClassName + " with null attribute name");
}
final MethodUnion mu = (MethodUnion) dynamicProps.get(name);
if (mu != null && mu.writeMethod != null) {
final Object[] o = new Object[1];
final Class[] params = mu.writeMethod.getParameterTypes();
if (params[0] == org.apache.log4j.Priority.class) {
value = OptionConverter.toLevel((String) value, (Level) getAttribute(name));
}
o[0] = value;
try {
mu.writeMethod.invoke(appender, o);
} catch (final InvocationTargetException e) {
if (e.getTargetException() instanceof InterruptedException
|| e.getTargetException() instanceof InterruptedIOException) {
Thread.currentThread().interrupt();
}
cat.error("FIXME", e);
} catch (final IllegalAccessException e) {
cat.error("FIXME", e);
} catch (final RuntimeException e) {
cat.error("FIXME", e);
}
} else if (name.endsWith(".layout")) {
} else {
throw (new AttributeNotFoundException(
"Attribute " + name + " not found in " + this.getClass().getName()));
}
}
}
| AppenderDynamicMBean |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/scheduler/SchedulerEngine.java | {
"start": 4261,
"end": 4337
} | interface ____ {
void triggered(Event event);
}
public | Listener |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/util/datetime/FastDatePrinter.java | {
"start": 27583,
"end": 27659
} | class ____ output a constant single character.</p>
*/
private static | to |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/specific/InvalidDefaultValueFloatingPointProperties.java | {
"start": 1020,
"end": 1280
} | class ____ {
private final Double ratio;
public InvalidDefaultValueFloatingPointProperties(@TestDefaultValue("55.55.33") Double ratio) {
this.ratio = ratio;
}
public Double getRatio() {
return this.ratio;
}
}
| InvalidDefaultValueFloatingPointProperties |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/build/BuilderBuildpack.java | {
"start": 3368,
"end": 4098
} | class ____ {
private final String id;
private final @Nullable String version;
BuilderReference(String id, @Nullable String version) {
this.id = id;
this.version = version;
}
@Override
public String toString() {
return (this.version != null) ? this.id + "@" + this.version : this.id;
}
boolean matches(BuildpackMetadata candidate) {
return this.id.equals(candidate.getId())
&& (this.version == null || this.version.equals(candidate.getVersion()));
}
static BuilderReference of(String value) {
if (value.contains("@")) {
String[] parts = value.split("@");
return new BuilderReference(parts[0], parts[1]);
}
return new BuilderReference(value, null);
}
}
}
| BuilderReference |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/tracing/Http1xTracingTest.java | {
"start": 495,
"end": 625
} | class ____ extends HttpTracingTestBase {
public Http1xTracingTest() {
super(HttpConfig.Http1x.DEFAULT);
}
}
| Http1xTracingTest |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue963.java | {
"start": 1376,
"end": 2249
} | class ____ implements ObjectSerializer, ObjectDeserializer {
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName) {
String uncasedSensitive = StringCodec.instance.deserialze(parser, type, fieldName);
return (T) EnumType.valueOf(uncasedSensitive.toUpperCase());
}
public int getFastMatchToken() {
return JSONToken.LITERAL_STRING;
}
public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType, int features) throws IOException {
SerializeWriter out = serializer.out;
if (object == null) {
out.writeNull();
return;
}
StringCodec.instance.write(serializer, ((EnumType) object).name().toLowerCase(), fieldName, fieldType, features);
}
}
}
| EnumTypeCodec |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/json/JobResultDeserializer.java | {
"start": 2133,
"end": 7181
} | class ____ extends StdDeserializer<JobResult> {
private static final long serialVersionUID = 1L;
private final JobIDDeserializer jobIdDeserializer = new JobIDDeserializer();
private final SerializedThrowableDeserializer serializedThrowableDeserializer =
new SerializedThrowableDeserializer();
private final SerializedValueDeserializer serializedValueDeserializer;
public JobResultDeserializer() {
super(JobResult.class);
final JavaType objectSerializedValueType =
TypeFactory.defaultInstance()
.constructType(new TypeReference<SerializedValue<Object>>() {});
serializedValueDeserializer = new SerializedValueDeserializer(objectSerializedValueType);
}
@Override
public JobResult deserialize(final JsonParser p, final DeserializationContext ctxt)
throws IOException {
JobID jobId = null;
ApplicationStatus applicationStatus = ApplicationStatus.UNKNOWN;
long netRuntime = -1;
SerializedThrowable serializedThrowable = null;
Map<String, SerializedValue<OptionalFailure<Object>>> accumulatorResults = null;
while (true) {
final JsonToken jsonToken = p.nextToken();
assertNotEndOfInput(p, jsonToken);
if (jsonToken == JsonToken.END_OBJECT) {
break;
}
final String fieldName = p.getValueAsString();
switch (fieldName) {
case JobResultSerializer.FIELD_NAME_JOB_ID:
assertNextToken(p, JsonToken.VALUE_STRING);
jobId = jobIdDeserializer.deserialize(p, ctxt);
break;
case JobResultSerializer.FIELD_NAME_APPLICATION_STATUS:
assertNextToken(p, JsonToken.VALUE_STRING);
applicationStatus =
ApplicationStatus.valueOf(p.getValueAsString().toUpperCase());
break;
case JobResultSerializer.FIELD_NAME_NET_RUNTIME:
assertNextToken(p, JsonToken.VALUE_NUMBER_INT);
netRuntime = p.getLongValue();
break;
case JobResultSerializer.FIELD_NAME_ACCUMULATOR_RESULTS:
assertNextToken(p, JsonToken.START_OBJECT);
accumulatorResults = parseAccumulatorResults(p, ctxt);
break;
case JobResultSerializer.FIELD_NAME_FAILURE_CAUSE:
assertNextToken(p, JsonToken.START_OBJECT);
serializedThrowable = serializedThrowableDeserializer.deserialize(p, ctxt);
break;
default:
// ignore unknown fields
}
}
try {
return new JobResult.Builder()
.jobId(jobId)
.applicationStatus(applicationStatus)
.netRuntime(netRuntime)
.accumulatorResults(accumulatorResults)
.serializedThrowable(serializedThrowable)
.build();
} catch (final RuntimeException e) {
throw new JsonMappingException(
null, "Could not deserialize " + JobResult.class.getSimpleName(), e);
}
}
@SuppressWarnings("unchecked")
private Map<String, SerializedValue<OptionalFailure<Object>>> parseAccumulatorResults(
final JsonParser p, final DeserializationContext ctxt) throws IOException {
final Map<String, SerializedValue<OptionalFailure<Object>>> accumulatorResults =
new HashMap<>();
while (true) {
final JsonToken jsonToken = p.nextToken();
assertNotEndOfInput(p, jsonToken);
if (jsonToken == JsonToken.END_OBJECT) {
break;
}
final String accumulatorName = p.getValueAsString();
p.nextValue();
accumulatorResults.put(
accumulatorName,
(SerializedValue<OptionalFailure<Object>>)
serializedValueDeserializer.deserialize(p, ctxt));
}
return accumulatorResults;
}
/** Asserts that the provided JsonToken is not null, i.e., not at the end of the input. */
private static void assertNotEndOfInput(
final JsonParser p, @Nullable final JsonToken jsonToken) {
checkState(jsonToken != null, "Unexpected end of input at %s", p.getCurrentLocation());
}
/** Advances the token and asserts that it matches the required {@link JsonToken}. */
private static void assertNextToken(final JsonParser p, final JsonToken requiredJsonToken)
throws IOException {
final JsonToken jsonToken = p.nextToken();
if (jsonToken != requiredJsonToken) {
throw new JsonMappingException(
p, String.format("Expected token %s (was %s)", requiredJsonToken, jsonToken));
}
}
}
| JobResultDeserializer |
java | google__guava | android/guava/src/com/google/common/hash/MacHashFunction.java | {
"start": 1097,
"end": 2785
} | class ____ extends AbstractHashFunction {
@SuppressWarnings("Immutable") // cloned before each use
private final Mac prototype;
@SuppressWarnings("Immutable") // keys are immutable, but not provably so
private final Key key;
private final String toString;
private final int bits;
private final boolean supportsClone;
MacHashFunction(String algorithmName, Key key, String toString) {
this.prototype = getMac(algorithmName, key);
this.key = checkNotNull(key);
this.toString = checkNotNull(toString);
this.bits = prototype.getMacLength() * Byte.SIZE;
this.supportsClone = supportsClone(prototype);
}
@Override
public int bits() {
return bits;
}
private static boolean supportsClone(Mac mac) {
try {
Object unused = mac.clone();
return true;
} catch (CloneNotSupportedException e) {
return false;
}
}
private static Mac getMac(String algorithmName, Key key) {
try {
Mac mac = Mac.getInstance(algorithmName);
mac.init(key);
return mac;
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException(e);
} catch (InvalidKeyException e) {
throw new IllegalArgumentException(e);
}
}
@Override
public Hasher newHasher() {
if (supportsClone) {
try {
return new MacHasher((Mac) prototype.clone());
} catch (CloneNotSupportedException e) {
// falls through
}
}
return new MacHasher(getMac(prototype.getAlgorithm(), key));
}
@Override
public String toString() {
return toString;
}
/** Hasher that updates a {@link Mac} (message authentication code). */
private static final | MacHashFunction |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 43243,
"end": 43393
} | class ____ extends PublicGrandparent {
@Override
public String foo() {
return "foo";
}
}
@AutoValue
abstract static | PrivateParent |
java | micronaut-projects__micronaut-core | http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/Http2ServerHandler.java | {
"start": 14594,
"end": 17232
} | class ____ extends MultiplexedStream {
final io.netty.handler.codec.http2.Http2Stream stream;
private boolean closeInput = false;
Http2Stream(io.netty.handler.codec.http2.Http2Stream stream) {
super(stream.id());
this.stream = stream;
}
@Override
void notifyDataConsumed(int n) {
if (stream.id() == 1 && upgradedFromHttp1) {
// ignore for upgrade stream
return;
}
try {
connectionHandler.connection().local().flowController().consumeBytes(stream, n);
} catch (Http2Exception e) {
throw new IllegalArgumentException("n > unconsumedBytes", e);
}
}
@Override
boolean reset(Throwable cause) {
if (cause instanceof Http2Exception h2e) {
connectionHandler.encoder().writeRstStream(ctx, stream.id(), h2e.error().code(), ctx.voidPromise());
return true;
} else if (cause instanceof ByteBody.BodyDiscardedException) {
connectionHandler.encoder().writeRstStream(ctx, stream.id(), Http2Error.CANCEL.code(), ctx.voidPromise());
return true;
} else {
connectionHandler.encoder().writeRstStream(ctx, stream.id(), Http2Error.INTERNAL_ERROR.code(), ctx.voidPromise());
return false;
}
}
@Override
void closeInput() {
closeInput = true;
if (stream.state() == io.netty.handler.codec.http2.Http2Stream.State.HALF_CLOSED_LOCAL) {
connectionHandler.encoder().writeRstStream(ctx, stream.id(), Http2Error.CANCEL.code(), ctx.voidPromise());
flush();
}
}
@Override
void writeHeaders(HttpResponse headers, boolean endStream, ChannelPromise promise) {
if (endStream && closeInput) {
promise = promise.unvoid();
promise.addListener(future -> closeInput());
}
connectionHandler.encoder().writeHeaders(ctx, stream.id(), HttpConversionUtil.toHttp2Headers(headers, true), 0, endStream, promise);
}
@Override
void writeData0(ByteBuf data, boolean endStream, ChannelPromise promise) {
if (endStream && closeInput) {
promise = promise.unvoid();
promise.addListener(future -> closeInput());
}
connectionHandler.encoder().writeData(ctx, stream.id(), data, 0, endStream, promise);
}
}
private static final | Http2Stream |
java | apache__camel | components/camel-aws/camel-aws2-ecs/src/main/java/org/apache/camel/component/aws2/ecs/client/impl/ECS2ClientSessionTokenImpl.java | {
"start": 1859,
"end": 5191
} | class ____ implements ECS2InternalClient {
private static final Logger LOG = LoggerFactory.getLogger(ECS2ClientStandardImpl.class);
private ECS2Configuration configuration;
/**
* Constructor that uses the config file.
*/
public ECS2ClientSessionTokenImpl(ECS2Configuration configuration) {
LOG.trace("Creating an AWS ECS manager using static credentials.");
this.configuration = configuration;
}
/**
* Getting the ECS AWS client that is used.
*
* @return Amazon ECS Client.
*/
@Override
public EcsClient getEcsClient() {
EcsClient client = null;
EcsClientBuilder clientBuilder = EcsClient.builder();
ProxyConfiguration.Builder proxyConfig = null;
ApacheHttpClient.Builder httpClientBuilder = null;
boolean isClientConfigFound = false;
if (ObjectHelper.isNotEmpty(configuration.getProxyHost()) && ObjectHelper.isNotEmpty(configuration.getProxyPort())) {
proxyConfig = ProxyConfiguration.builder();
URI proxyEndpoint = URI.create(configuration.getProxyProtocol() + "://" + configuration.getProxyHost() + ":"
+ configuration.getProxyPort());
proxyConfig.endpoint(proxyEndpoint);
httpClientBuilder = ApacheHttpClient.builder().proxyConfiguration(proxyConfig.build());
isClientConfigFound = true;
}
if (configuration.getAccessKey() != null && configuration.getSecretKey() != null
&& configuration.getSessionToken() != null) {
AwsSessionCredentials cred = AwsSessionCredentials.create(configuration.getAccessKey(),
configuration.getSecretKey(), configuration.getSessionToken());
if (isClientConfigFound) {
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder)
.credentialsProvider(StaticCredentialsProvider.create(cred));
} else {
clientBuilder = clientBuilder.credentialsProvider(StaticCredentialsProvider.create(cred));
}
} else {
if (!isClientConfigFound) {
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder);
}
}
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
clientBuilder = clientBuilder.region(Region.of(configuration.getRegion()));
}
if (configuration.isOverrideEndpoint()) {
clientBuilder.endpointOverride(URI.create(configuration.getUriEndpointOverride()));
}
if (configuration.isTrustAllCertificates()) {
if (httpClientBuilder == null) {
httpClientBuilder = ApacheHttpClient.builder();
}
SdkHttpClient ahc = httpClientBuilder.buildWithDefaults(AttributeMap
.builder()
.put(
SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES,
Boolean.TRUE)
.build());
// set created http client to use instead of builder
clientBuilder.httpClient(ahc);
clientBuilder.httpClientBuilder(null);
}
client = clientBuilder.build();
return client;
}
}
| ECS2ClientSessionTokenImpl |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/simple/SimplePropertiesTests.java | {
"start": 896,
"end": 1201
} | class ____ {
@Test
void defaultValuesAreConsistent() {
SimpleProperties properties = new SimpleProperties();
SimpleConfig config = SimpleConfig.DEFAULT;
assertThat(properties.getStep()).isEqualTo(config.step());
assertThat(properties.getMode()).isEqualTo(config.mode());
}
}
| SimplePropertiesTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/MapOptions.java | {
"start": 1134,
"end": 6833
} | enum ____ {
/**
* In write behind mode all data written in map object
* also written using MapWriter in asynchronous mode.
*/
WRITE_BEHIND,
/**
* In write through mode all write operations for map object
* are synchronized with MapWriter write operations.
* If MapWriter throws an error then it will be re-thrown to Map operation caller.
*/
WRITE_THROUGH
}
private MapLoader<K, V> loader;
private MapWriter<K, V> writer;
private MapWriterAsync<K, V> writerAsync;
private MapLoaderAsync<K, V> loaderAsync;
private WriteMode writeMode = WriteMode.WRITE_THROUGH;
private int writeBehindBatchSize = 50;
private int writeBehindDelay = 1000;
private int writerRetryAttempts = 0;
//ms
private long writerRetryInterval = 100;
protected MapOptions() {
}
protected MapOptions(MapOptions<K, V> copy) {
}
/**
* Creates a new instance of MapOptions with default options.
* <p>
* This is equivalent to:
* <pre>
* new MapOptions()
* .writer(null, null).loader(null);
* </pre>
*
* @param <K> key type
* @param <V> value type
*
* @return MapOptions instance
*
*/
public static <K, V> MapOptions<K, V> defaults() {
return new MapOptions<K, V>();
}
/**
* Defines {@link MapWriter} object which is invoked during write operation.
*
* @param writer object
* @return MapOptions instance
*/
public MapOptions<K, V> writer(MapWriter<K, V> writer) {
if (writer != null) {
this.writer = new RetryableMapWriter<>(this, writer);
}
return this;
}
public MapWriter<K, V> getWriter() {
return writer;
}
/**
* Defines {@link MapWriterAsync} object which is invoked during write operation.
*
* @param writer object
* @return MapOptions instance
*/
public MapOptions<K, V> writerAsync(MapWriterAsync<K, V> writer) {
if (writer != null) {
this.writerAsync = new RetryableMapWriterAsync<>(this, writer);
}
return this;
}
public MapWriterAsync<K, V> getWriterAsync() {
return writerAsync;
}
/**
* Sets write behind tasks batch size.
* All updates accumulated into a batch of specified size and written with {@link MapWriter}.
* <p>
* Default is <code>50</code>
*
* @param writeBehindBatchSize - size of batch
* @return MapOptions instance
*/
public MapOptions<K, V> writeBehindBatchSize(int writeBehindBatchSize) {
this.writeBehindBatchSize = writeBehindBatchSize;
return this;
}
public int getWriteBehindBatchSize() {
return writeBehindBatchSize;
}
/**
* Sets write behind tasks execution delay.
* All updates written with {@link MapWriter} and lag not more than specified delay.
* <p>
* Default is <code>1000</code> milliseconds
*
* @param writeBehindDelay - delay in milliseconds
* @return MapOptions instance
*/
public MapOptions<K, V> writeBehindDelay(int writeBehindDelay) {
this.writeBehindDelay = writeBehindDelay;
return this;
}
public int getWriteBehindDelay() {
return writeBehindDelay;
}
/**
* Sets write mode.
* <p>
* Default is <code>{@link WriteMode#WRITE_THROUGH}</code>
*
* @param writeMode - write mode
* @return MapOptions instance
*/
public MapOptions<K, V> writeMode(WriteMode writeMode) {
this.writeMode = writeMode;
return this;
}
public WriteMode getWriteMode() {
return writeMode;
}
public int getWriterRetryAttempts() {
return writerRetryAttempts;
}
/**
* Sets max retry attempts for {@link RetryableMapWriter} or {@link RetryableMapWriterAsync}
*
* @param writerRetryAttempts object
* @return MapOptions instance
*/
public MapOptions<K, V> writerRetryAttempts(int writerRetryAttempts) {
if (writerRetryAttempts <= 0){
throw new IllegalArgumentException("writerRetryAttempts must be bigger than 0");
}
this.writerRetryAttempts = writerRetryAttempts;
return this;
}
public long getWriterRetryInterval() {
return writerRetryInterval;
}
/**
* Sets retry interval for {@link RetryableMapWriter} or {@link RetryableMapWriterAsync}
*
* @param writerRetryInterval {@link Duration}
* @return MapOptions instance
*/
public MapOptions<K, V> writerRetryInterval(Duration writerRetryInterval) {
if (writerRetryInterval.isNegative()) {
throw new IllegalArgumentException("writerRetryInterval must be positive");
}
this.writerRetryInterval = writerRetryInterval.toMillis();
return this;
}
/**
* Sets {@link MapLoader} object.
*
* @param loader object
* @return MapOptions instance
*/
public MapOptions<K, V> loader(MapLoader<K, V> loader) {
this.loader = loader;
return this;
}
public MapLoader<K, V> getLoader() {
return loader;
}
/**
* Sets {@link MapLoaderAsync} object.
*
* @param loaderAsync object
* @return MapOptions instance
*/
public MapOptions<K, V> loaderAsync(MapLoaderAsync<K, V> loaderAsync) {
this.loaderAsync = loaderAsync;
return this;
}
public MapLoaderAsync<K, V> getLoaderAsync() {
return loaderAsync;
}
}
| WriteMode |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/EchoService.java | {
"start": 122,
"end": 208
} | class ____ {
public String echo(String msg) {
return msg;
}
}
| EchoService |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/issues/MyStatefulBean.java | {
"start": 851,
"end": 1015
} | class ____ {
private volatile String state = "";
public String doSomething(String body) {
state += body;
return state;
}
}
| MyStatefulBean |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 76709,
"end": 77193
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableByteSupplier_Throwable() {
assertThrows(IOException.class, () -> new FailableByteSupplier<Throwable>() {
@Override
public byte getAsByte() throws Throwable {
throw new IOException("test");
}
}.getAsByte());
}
/**
* Tests that our failable | is |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/AbstractAssertBaseTest.java | {
"start": 660,
"end": 806
} | class ____ test the concrete methods of {@link AbstractAssert} (using a dummy implementation).
*
* @author Olivier Michallat
*/
public abstract | to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/internal/AbstractEmbeddableMapping.java | {
"start": 5881,
"end": 27166
} | interface ____ {
void check(String name, Type type) throws IllegalAttributeType;
}
protected boolean inverseInitializeCallback(
TableGroupProducer declaringTableGroupProducer,
SelectableMappings selectableMappings,
EmbeddableMappingType inverseMappingType,
MappingModelCreationProcess creationProcess,
ManagedMappingType declaringType,
MutableAttributeMappingList mappings) {
final int size = inverseMappingType.getNumberOfAttributeMappings();
if ( size == 0 ) {
return false;
}
// Reset the attribute mappings that were added in previous attempts
mappings.clear();
int currentIndex = 0;
// We copy the attributes from the inverse mappings and replace the selection mappings
for ( int j = 0; j < size; j++ ) {
var attributeMapping = inverseMappingType.getAttributeMapping( j );
if ( attributeMapping instanceof BasicAttributeMapping original ) {
final var selectableMapping = selectableMappings.getSelectable( currentIndex );
attributeMapping = BasicAttributeMapping.withSelectableMapping(
declaringType,
original,
original.getPropertyAccess(),
selectableMapping.isInsertable(),
selectableMapping.isUpdateable(),
selectableMapping
);
currentIndex++;
}
else if ( attributeMapping instanceof ToOneAttributeMapping original ) {
final var foreignKeyDescriptor = original.getForeignKeyDescriptor();
if ( foreignKeyDescriptor == null ) {
// This is expected to happen when processing a
// PostInitCallbackEntry because the callbacks
// are not ordered. The exception is caught in
// MappingModelCreationProcess.executePostInitCallbacks()
// and the callback is re-queued.
throw new IllegalStateException( "Not yet ready: " + original );
}
final var toOne = original.copy( declaringType, declaringTableGroupProducer );
final int offset = currentIndex;
toOne.setIdentifyingColumnsTableExpression(
selectableMappings.getSelectable( offset ).getContainingTableExpression()
);
toOne.setForeignKeyDescriptor(
foreignKeyDescriptor.withKeySelectionMapping(
declaringType,
declaringTableGroupProducer,
index -> selectableMappings.getSelectable( offset + index ),
creationProcess
)
);
toOne.setupCircularFetchModelPart( creationProcess );
attributeMapping = toOne;
currentIndex += attributeMapping.getJdbcTypeCount();
}
else if ( attributeMapping instanceof EmbeddableValuedModelPart embeddableValuedModelPart ) {
final var subMappings = new SelectableMapping[attributeMapping.getJdbcTypeCount()];
for ( int i = 0; i < subMappings.length; i++ ) {
subMappings[i] = selectableMappings.getSelectable( currentIndex++ );
}
attributeMapping = MappingModelCreationHelper.createInverseModelPart(
embeddableValuedModelPart,
declaringType,
declaringTableGroupProducer,
new SelectableMappingsImpl( subMappings ),
creationProcess
);
}
else {
throw new UnsupportedMappingException(
"Only basic and to-one attributes are supported in composite fks" );
}
mappings.add( attributeMapping );
}
buildGetterSetterCache();
return true;
}
protected boolean finishInitialization(
NavigableRole navigableRole,
Component bootDescriptor,
CompositeType compositeType,
String rootTableExpression,
String[] rootTableKeyColumnNames,
EmbeddableMappingType declarer,
EmbeddableRepresentationStrategy representationStrategy,
AttributeTypeValidator attributeTypeValidator,
ConcreteTableResolver concreteTableResolver,
Consumer<AttributeMapping> attributeConsumer,
SuccessfulCompletionCallback completionCallback,
MappingModelCreationProcess creationProcess) {
final var creationContext = creationProcess.getCreationContext();
final var typeConfiguration = creationContext.getTypeConfiguration();
final var jdbcServices = creationContext.getJdbcServices();
final var jdbcEnvironment = jdbcServices.getJdbcEnvironment();
final var dialect = jdbcEnvironment.getDialect();
final var subtypes = compositeType.getSubtypes();
int attributeIndex = 0;
int columnPosition = 0;
for ( var bootPropertyDescriptor : bootDescriptor.getProperties() ) {
final var subtype = subtypes[ attributeIndex ];
attributeTypeValidator.check( bootPropertyDescriptor.getName(), subtype );
final var propertyAccess = representationStrategy.resolvePropertyAccess( bootPropertyDescriptor );
final AttributeMapping attributeMapping;
final var value = bootPropertyDescriptor.getValue();
if ( subtype instanceof BasicType ) {
final BasicValue basicValue = (BasicValue) value;
final Selectable selectable = basicValue.getColumn();
final String containingTableExpression;
final String columnExpression;
if ( rootTableKeyColumnNames == null ) {
if ( selectable.isFormula() ) {
columnExpression = selectable.getTemplate(
dialect,
creationContext.getTypeConfiguration()
);
}
else {
columnExpression = selectable.getText( dialect );
}
if ( selectable instanceof Column column ) {
containingTableExpression = concreteTableResolver.resolve( column, jdbcEnvironment );
}
else {
containingTableExpression = rootTableExpression;
}
}
else {
containingTableExpression = rootTableExpression;
columnExpression = rootTableKeyColumnNames[ columnPosition ];
}
final var role = navigableRole.append( bootPropertyDescriptor.getName() );
final SelectablePath selectablePath;
final String columnDefinition;
final Long length;
final Integer arrayLength;
final Integer precision;
final Integer scale;
final Integer temporalPrecision;
final boolean isLob;
final boolean nullable;
if ( selectable instanceof Column column ) {
columnDefinition = column.getSqlType();
length = column.getLength();
arrayLength = column.getArrayLength();
precision = column.getPrecision();
scale = column.getScale();
temporalPrecision = column.getTemporalPrecision();
nullable = column.isNullable();
isLob = column.isSqlTypeLob( creationContext.getMetadata() );
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
MappingModelCreationHelper.resolveAggregateColumnBasicType( creationProcess, role, column );
}
else {
columnDefinition = null;
length = null;
arrayLength = null;
precision = null;
scale = null;
temporalPrecision = null;
nullable = true;
isLob = false;
selectablePath = new SelectablePath( determineEmbeddablePrefix() + bootPropertyDescriptor.getName() );
}
attributeMapping = MappingModelCreationHelper.buildBasicAttributeMapping(
bootPropertyDescriptor.getName(),
role,
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
declarer,
basicValue.getResolution().getLegacyResolvedBasicType(),
containingTableExpression,
columnExpression,
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getWriteExpr(
basicValue.getResolution().getJdbcMapping(),
dialect,
creationContext.getBootModel()
),
columnDefinition,
length,
arrayLength,
precision,
scale,
temporalPrecision,
isLob,
nullable,
value.isColumnInsertable( 0 ),
value.isColumnUpdateable( 0 ),
propertyAccess,
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition++;
}
else if ( subtype instanceof AnyType anyType ) {
final var bootValueMapping = (Any) value;
final boolean nullable = bootValueMapping.isNullable();
final boolean insertable = value.isColumnInsertable( 0 );
final boolean updateable = value.isColumnUpdateable( 0 );
final boolean includeInOptimisticLocking = bootPropertyDescriptor.isOptimisticLocked();
final var cascadeStyle = compositeType.getCascadeStyle( attributeIndex );
final var attributeMetadataAccess = new SimpleAttributeMetadata(
propertyAccess,
getMutabilityPlan( updateable ),
nullable,
insertable,
updateable,
includeInOptimisticLocking,
true,
cascadeStyle
);
attributeMapping = new DiscriminatedAssociationAttributeMapping(
navigableRole.append( bootPropertyDescriptor.getName() ),
typeConfiguration.getJavaTypeRegistry().resolveDescriptor( Object.class ),
declarer,
attributeIndex,
attributeIndex,
attributeMetadataAccess,
bootPropertyDescriptor.isLazy() ? FetchTiming.DELAYED : FetchTiming.IMMEDIATE,
propertyAccess,
bootPropertyDescriptor,
anyType,
bootValueMapping,
creationProcess
);
}
else if ( subtype instanceof CompositeType subCompositeType ) {
final int columnSpan =
subCompositeType.getColumnSpan( creationContext.getMetadata() );
final String subTableExpression;
final String[] subRootTableKeyColumnNames;
if ( rootTableKeyColumnNames == null ) {
subTableExpression = rootTableExpression;
subRootTableKeyColumnNames = null;
}
else {
subTableExpression = rootTableExpression;
subRootTableKeyColumnNames = new String[ columnSpan ];
System.arraycopy( rootTableKeyColumnNames, columnPosition, subRootTableKeyColumnNames, 0, columnSpan );
}
attributeMapping = MappingModelCreationHelper.buildEmbeddedAttributeMapping(
bootPropertyDescriptor.getName(),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
declarer,
subCompositeType,
subTableExpression,
subRootTableKeyColumnNames,
propertyAccess,
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition += columnSpan;
}
else if ( subtype instanceof CollectionType ) {
attributeMapping = MappingModelCreationHelper.buildPluralAttributeMapping(
bootPropertyDescriptor.getName(),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
propertyAccess,
compositeType.getCascadeStyle( attributeIndex ),
compositeType.getFetchMode( attributeIndex ),
creationProcess
);
}
else if ( subtype instanceof EntityType ) {
attributeMapping = MappingModelCreationHelper.buildSingularAssociationAttributeMapping(
bootPropertyDescriptor.getName(),
navigableRole.append( bootPropertyDescriptor.getName() ),
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
creationProcess.getEntityPersister( bootDescriptor.getOwner().getEntityName() ),
(EntityType) subtype,
representationStrategy.resolvePropertyAccess( bootPropertyDescriptor ),
compositeType.getCascadeStyle( attributeIndex ),
creationProcess
);
columnPosition += bootPropertyDescriptor.getColumnSpan();
}
else {
throw new MappingException(
String.format(
Locale.ROOT,
"Unable to determine attribute nature : %s#%s",
bootDescriptor.getOwner().getEntityName(),
bootPropertyDescriptor.getName()
)
);
}
attributeConsumer.accept( attributeMapping );
attributeIndex++;
}
completionCallback.success();
return true;
}
protected String determineEmbeddablePrefix() {
var root = getNavigableRole().getParent();
while ( !root.isRoot() ) {
root = root.getParent();
}
return getNavigableRole().getFullPath().substring( root.getFullPath().length() + 1 ) + ".";
}
@Override
public int getNumberOfFetchables() {
return getAttributeMappings().size();
}
@Override
public Fetchable getFetchable(int position) {
return getAttributeMappings().get( position );
}
@Override
public void visitFetchables(Consumer<? super Fetchable> consumer, EntityMappingType treatTargetType) {
forEachAttributeMapping( consumer );
}
@Override
public void visitFetchables(IndexedConsumer<? super Fetchable> indexedConsumer, EntityMappingType treatTargetType) {
this.getAttributeMappings().indexedForEach( indexedConsumer );
}
@Override
public int getNumberOfAttributeMappings() {
return getAttributeMappings().size();
}
@Override
public AttributeMapping getAttributeMapping(int position) {
return getAttributeMappings().get( position );
}
@Override
public AttributeMapping findAttributeMapping(String name) {
final var attributes = getAttributeMappings();
for ( int i = 0; i < attributes.size(); i++ ) {
final var attribute = attributes.get( i );
if ( name.equals( attribute.getAttributeName() ) ) {
return attribute;
}
}
return null;
}
@Override
public AttributeMappingsList getAttributeMappings() {
checkIsReady();
return attributeMappings;
}
private void checkIsReady() {
if ( selectableMappings == null ) {
// This is expected to happen when processing a
// PostInitCallbackEntry because the callbacks
// are not ordered. The exception is caught in
// MappingModelCreationProcess.executePostInitCallbacks()
// and the callback is re-queued.
throw new IllegalStateException( "Not yet ready" );
}
}
@Override
public SelectableMapping getSelectable(int columnIndex) {
return getSelectableMappings().getSelectable( columnIndex );
}
@Override
public int forEachSelectable(SelectableConsumer consumer) {
return getSelectableMappings().forEachSelectable( 0, consumer );
}
@Override
public int forEachSelectable(int offset, SelectableConsumer consumer) {
return getSelectableMappings().forEachSelectable( offset, consumer );
}
@Override
public int getJdbcTypeCount() {
return getSelectableMappings().getJdbcTypeCount();
}
@Override
public int forEachJdbcType(int offset, IndexedConsumer<JdbcMapping> action) {
return getSelectableMappings().forEachSelectable(
offset,
(index, selectable) -> action.accept( index, selectable.getJdbcMapping() )
);
}
@Override
public JdbcMapping getJdbcMapping(int index) {
return getSelectable( index ).getJdbcMapping();
}
@Override
public void forEachAttributeMapping(final IndexedConsumer<? super AttributeMapping> consumer) {
getAttributeMappings().indexedForEach( consumer );
}
@Override
public void forEachAttributeMapping(final Consumer<? super AttributeMapping> action) {
getAttributeMappings().forEach( action );
}
@Override
public ModelPart findSubPart(String name, EntityMappingType treatTargetType) {
return findAttributeMapping( name );
}
@Override
public void forEachSubPart(IndexedConsumer<ModelPart> consumer, EntityMappingType treatTarget) {
final var attributes = getAttributeMappings();
for ( int i = 0; i < attributes.size(); i++ ) {
consumer.accept( i, attributes.get(i) );
}
}
@Override
public void visitSubParts(Consumer<ModelPart> consumer, EntityMappingType treatTargetType) {
forEachAttributeMapping( consumer );
}
@Override
public <X, Y> int breakDownJdbcValues(
Object domainValue,
int offset,
X x,
Y y,
JdbcValueBiConsumer<X, Y> valueConsumer, SharedSessionContractImplementor session) {
int span = 0;
if ( domainValue == null ) {
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var attribute = attributeMappings.get( i );
span += attribute.breakDownJdbcValues( null, offset + span, x, y, valueConsumer, session );
}
}
else {
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var attribute = attributeMappings.get( i );
final Object attributeValue = attribute.getValue( domainValue );
span += attribute.breakDownJdbcValues( attributeValue, offset + span, x, y, valueConsumer, session );
}
}
return span;
}
@Override
public Object disassemble(Object value, SharedSessionContractImplementor session) {
if ( value == null ) {
return null;
}
final int size = attributeMappings.size();
final var result = new Object[ size ];
for ( int i = 0; i < size; i++ ) {
final var attributeMapping = attributeMappings.get( i );
final Object object = attributeMapping.getValue( value );
result[i] = attributeMapping.disassemble( object, session );
}
return result;
}
@Override
public void addToCacheKey(MutableCacheKeyBuilder cacheKey, Object value, SharedSessionContractImplementor session) {
final int size = attributeMappings.size();
if ( value == null ) {
for ( int i = 0; i < size; i++ ) {
attributeMappings.get( i ).addToCacheKey( cacheKey, null, session );
}
}
else {
for ( int i = 0; i < size; i++ ) {
final var attributeMapping = attributeMappings.get( i );
attributeMapping.addToCacheKey( cacheKey, attributeMapping.getValue( value ), session );
}
}
if ( isPolymorphic() ) {
final EmbeddableDiscriminatorMapping discriminatorMapping = getDiscriminatorMapping();
final Object discriminatorValue = value != null ?
discriminatorMapping.getDiscriminatorValue( value.getClass().getName() )
: null;
discriminatorMapping.addToCacheKey( cacheKey, discriminatorValue, session );
}
}
@Override
public <X, Y> int forEachDisassembledJdbcValue(
Object value,
int offset,
X x,
Y y,
JdbcValuesBiConsumer<X, Y> valuesConsumer,
SharedSessionContractImplementor session) {
int span = 0;
if ( value == null ) {
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var mapping = attributeMappings.get( i );
span += mapping.forEachDisassembledJdbcValue( null, span + offset, x, y, valuesConsumer, session );
}
}
else {
final var values = (Object[]) value;
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var mapping = attributeMappings.get( i );
span += mapping.forEachDisassembledJdbcValue( values[i], span + offset, x, y, valuesConsumer, session );
}
}
return span;
}
@Override
public <X, Y> int forEachJdbcValue(
Object value,
int offset,
X x,
Y y,
JdbcValuesBiConsumer<X, Y> valuesConsumer,
SharedSessionContractImplementor session) {
int span = 0;
if ( value == null ) {
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var attributeMapping = attributeMappings.get( i );
if ( !(attributeMapping instanceof PluralAttributeMapping) ) {
span += attributeMapping.forEachJdbcValue( null, span + offset, x, y, valuesConsumer, session );
}
}
}
else {
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var attributeMapping = attributeMappings.get( i );
if ( !(attributeMapping instanceof PluralAttributeMapping) ) {
span += attributeMapping.forEachJdbcValue( getValue( value, i ), span + offset, x, y, valuesConsumer, session );
}
}
}
return span;
}
protected void addAttribute(AttributeMapping attributeMapping) {
// check if we've already seen this attribute...
for ( int i = 0; i < attributeMappings.size(); i++ ) {
final var previous = attributeMappings.get( i );
if ( attributeMapping.getAttributeName().equals( previous.getAttributeName() ) ) {
attributeMappings.setAttributeMapping( i, attributeMapping );
return;
}
}
attributeMappings.add( attributeMapping );
}
protected SelectableMappings getSelectableMappings() {
checkIsReady();
return selectableMappings;
}
protected boolean initColumnMappings() {
final int propertySpan = attributeMappings.size();
final List<SelectableMapping> selectableMappings = CollectionHelper.arrayList( propertySpan );
attributeMappings.indexedForEach(
(index, attributeMapping) -> attributeMapping.forEachSelectable(
(columnIndex, selection) -> selectableMappings.add( selection )
)
);
if ( getDiscriminatorMapping() != null ) {
getDiscriminatorMapping().forEachSelectable( (index, selection) -> selectableMappings.add( selection ) );
}
this.selectableMappings = new SelectableMappingsImpl( selectableMappings.toArray( new SelectableMapping[0] ) );
buildGetterSetterCache();
return true;
}
protected void buildGetterSetterCache() {
final int propertySpan = attributeMappings.size();
final Getter[] getterCache = new Getter[propertySpan];
final Setter[] setterCache = new Setter[propertySpan];
for ( int i = 0; i < propertySpan; i++ ) {
final PropertyAccess propertyAccess = attributeMappings.get( i ).getPropertyAccess();
getterCache[i] = propertyAccess.getGetter();
setterCache[i] = propertyAccess.getSetter();
}
this.getterCache = getterCache;
this.setterCache = setterCache;
}
private static MutabilityPlan<?> getMutabilityPlan(boolean updateable) {
if ( updateable ) {
return new MutabilityPlan<>() {
@Override
public boolean isMutable() {
return true;
}
@Override
public Object deepCopy(Object value) {
return value;
}
@Override
public Serializable disassemble(Object value, SharedSessionContract session) {
throw new UnsupportedOperationException();
}
@Override
public Object assemble(Serializable cached, SharedSessionContract session) {
throw new UnsupportedOperationException();
}
};
}
else {
return ImmutableMutabilityPlan.instance();
}
}
}
| AttributeTypeValidator |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockRecoveryWorker.java | {
"start": 2574,
"end": 2918
} | class ____ {
public static final Logger LOG = DataNode.LOG;
private final DataNode datanode;
private final Configuration conf;
private final DNConf dnConf;
BlockRecoveryWorker(DataNode datanode) {
this.datanode = datanode;
conf = datanode.getConf();
dnConf = datanode.getDnConf();
}
/** A convenient | BlockRecoveryWorker |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/sym/SymbolTableMergingTest.java | {
"start": 948,
"end": 4172
} | class ____ extends JsonFactory
{
public int byteSymbolCount() { return _byteSymbolCanonicalizer.size(); }
public int charSymbolCount() { return _rootCharSymbols.size(); }
}
final static String JSON = "{ \"a\" : 3, \"aaa\" : 4, \"_a\" : 0 }";
@Test
void byteSymbolsWithClose() throws Exception
{
_testWithClose(true);
}
@Test
void byteSymbolsWithEOF() throws Exception
{
MyJsonFactory f = new MyJsonFactory();
JsonParser p = _getParser(f, JSON, true);
while (p.nextToken() != null) {
// shouldn't update before hitting end
assertEquals(0, f.byteSymbolCount());
}
// but now should have it after hitting EOF
assertEquals(3, f.byteSymbolCount());
p.close();
assertEquals(3, f.byteSymbolCount());
}
@Test
void hashCalc() throws Exception
{
CharsToNameCanonicalizer sym = CharsToNameCanonicalizer.createRoot(new JsonFactory());
char[] str1 = "foo".toCharArray();
char[] str2 = " foo ".toCharArray();
assertEquals(sym.calcHash(str1, 0, 3), sym.calcHash(str2, 1, 3));
}
@Test
void charSymbolsWithClose() throws Exception
{
_testWithClose(false);
}
@Test
void charSymbolsWithEOF() throws Exception
{
MyJsonFactory f = new MyJsonFactory();
JsonParser p = _getParser(f, JSON, false);
while (p.nextToken() != null) {
// shouldn't update before hitting end
assertEquals(0, f.charSymbolCount());
}
// but now should have it
assertEquals(3, f.charSymbolCount());
p.close();
assertEquals(3, f.charSymbolCount());
}
/*
/**********************************************************
/* Helper methods
/**********************************************************
*/
private void _testWithClose(boolean useBytes) throws IOException
{
MyJsonFactory f = new MyJsonFactory();
JsonParser p = _getParser(f, JSON, useBytes);
// Let's check 2 names
assertToken(JsonToken.START_OBJECT, p.nextToken());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
assertToken(JsonToken.PROPERTY_NAME, p.nextToken());
// shouldn't update before close or EOF:
assertEquals(0, useBytes ? f.byteSymbolCount() : f.charSymbolCount());
p.close();
// but should after close
assertEquals(2, useBytes ? f.byteSymbolCount() : f.charSymbolCount());
}
private JsonParser _getParser(MyJsonFactory f, String doc, boolean useBytes) throws IOException
{
JsonParser p;
if (useBytes) {
p = f.createParser(ObjectReadContext.empty(), doc.getBytes("UTF-8"));
assertEquals(UTF8StreamJsonParser.class, p.getClass());
assertEquals(0, f.byteSymbolCount());
} else {
p = f.createParser(ObjectReadContext.empty(), doc);
assertEquals(ReaderBasedJsonParser.class, p.getClass());
assertEquals(0, f.charSymbolCount());
}
return p;
}
}
| MyJsonFactory |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/KafkaShareConsumer.java | {
"start": 24315,
"end": 46853
} | class ____<K, V> implements ShareConsumer<K, V> {
private static final ShareConsumerDelegateCreator CREATOR = new ShareConsumerDelegateCreator();
private final ShareConsumerDelegate<K, V> delegate;
/**
* A consumer is instantiated by providing a set of key-value pairs as configuration. Valid configuration strings
* are documented <a href="http://kafka.apache.org/documentation.html#consumerconfigs" >here</a>. Values can be
* either strings or objects of the appropriate type (for example a numeric configuration would accept either the
* string "42" or the integer 42).
* <p>
* Valid configuration strings are documented at {@link ConsumerConfig}.
* <p>
* Note: after creating a {@code KafkaShareConsumer} you must always {@link #close()} it to avoid resource leaks.
*
* @param configs The consumer configs
*/
public KafkaShareConsumer(Map<String, Object> configs) {
this(configs, null, null);
}
/**
* A consumer is instantiated by providing a {@link java.util.Properties} object as configuration.
* <p>
* Valid configuration strings are documented at {@link ConsumerConfig}.
* <p>
* Note: after creating a {@code KafkaShareConsumer} you must always {@link #close()} it to avoid resource leaks.
*
* @param properties The consumer configuration properties
*/
public KafkaShareConsumer(Properties properties) {
this(properties, null, null);
}
/**
* A consumer is instantiated by providing a {@link java.util.Properties} object as configuration, and a
* key and a value {@link Deserializer}.
* <p>
* Valid configuration strings are documented at {@link ConsumerConfig}.
* <p>
* Note: after creating a {@code KafkaShareConsumer} you must always {@link #close()} it to avoid resource leaks.
*
* @param properties The consumer configuration properties
* @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method
* won't be called in the consumer when the deserializer is passed in directly.
* @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method
* won't be called in the consumer when the deserializer is passed in directly.
*/
public KafkaShareConsumer(Properties properties,
Deserializer<K> keyDeserializer,
Deserializer<V> valueDeserializer) {
this(propsToMap(properties), keyDeserializer, valueDeserializer);
}
/**
* A consumer is instantiated by providing a set of key-value pairs as configuration, and a key and a value {@link Deserializer}.
* <p>
* Valid configuration strings are documented at {@link ConsumerConfig}.
* <p>
* Note: after creating a {@code KafkaShareConsumer} you must always {@link #close()} it to avoid resource leaks.
*
* @param configs The consumer configs
* @param keyDeserializer The deserializer for key that implements {@link Deserializer}. The configure() method
* won't be called in the consumer when the deserializer is passed in directly.
* @param valueDeserializer The deserializer for value that implements {@link Deserializer}. The configure() method
* won't be called in the consumer when the deserializer is passed in directly.
*/
public KafkaShareConsumer(Map<String, Object> configs,
Deserializer<K> keyDeserializer,
Deserializer<V> valueDeserializer) {
this(new ShareConsumerConfig(ShareConsumerConfig.appendDeserializerToConfig(configs, keyDeserializer, valueDeserializer)),
keyDeserializer, valueDeserializer);
}
KafkaShareConsumer(ShareConsumerConfig config,
Deserializer<K> keyDeserializer,
Deserializer<V> valueDeserializer) {
delegate = CREATOR.create(config, keyDeserializer, valueDeserializer);
}
KafkaShareConsumer(final LogContext logContext,
final String clientId,
final String groupId,
final ShareConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer,
final Time time,
final KafkaClient client,
final SubscriptionState subscriptions,
final ShareConsumerMetadata metadata) {
delegate = CREATOR.create(
logContext, clientId, groupId, config, keyDeserializer, valueDeserializer,
time, client, subscriptions, metadata);
}
/**
* Get the current subscription. Will return the same topics used in the most recent call to
* {@link #subscribe(Collection)}, or an empty set if no such call has been made.
*
* @return The set of topics currently subscribed to
*/
@Override
public Set<String> subscription() {
return delegate.subscription();
}
/**
* Subscribe to the given list of topics to get dynamically assigned partitions.
* <b>Topic subscriptions are not incremental. This list will replace the current
* assignment, if there is one.</b> If the given list of topics is empty, it is treated the same as {@link #unsubscribe()}.
*
* <p>
* As part of group management, the coordinator will keep track of the list of consumers that belong to a particular
* group and will trigger a rebalance operation if any one of the following events are triggered:
* <ul>
* <li>A member joins or leaves the share group
* <li>An existing member of the share group is shut down or fails
* <li>The number of partitions changes for any of the subscribed topics
* <li>A subscribed topic is created or deleted
* </ul>
*
* @param topics The list of topics to subscribe to
*
* @throws IllegalArgumentException if topics is null or contains null or empty elements
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public void subscribe(Collection<String> topics) {
delegate.subscribe(topics);
}
/**
* Unsubscribe from topics currently subscribed with {@link #subscribe(Collection)}.
*
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public void unsubscribe() {
delegate.unsubscribe();
}
/**
* Deliver records for the topics specified using {@link #subscribe(Collection)}. It is an error to not have
* subscribed to any topics before polling for data.
*
* <p>
* This method returns immediately if there are records available. Otherwise, it will await the passed timeout.
* If the timeout expires, an empty record set will be returned.
*
* @param timeout The maximum time to block (must not be greater than {@link Long#MAX_VALUE} milliseconds)
*
* @return map of topic to records
*
* @throws AuthenticationException if authentication fails. See the exception for more details
* @throws AuthorizationException if caller lacks Read access to any of the subscribed
* topics or to the share group. See the exception for more details
* @throws IllegalArgumentException if the timeout value is negative
* @throws IllegalStateException if the consumer is not subscribed to any topics, or it is using
* explicit acknowledgement and has not acknowledged all records previously delivered
* @throws ArithmeticException if the timeout is greater than {@link Long#MAX_VALUE} milliseconds.
* @throws InvalidTopicException if the current subscription contains any invalid
* topic (per {@link org.apache.kafka.common.internals.Topic#validate(String)})
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the calling thread is interrupted before or while this method is called
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public ConsumerRecords<K, V> poll(Duration timeout) {
return delegate.poll(timeout);
}
/**
* Acknowledge successful delivery of a record returned on the last {@link #poll(Duration)} call.
* The acknowledgement is committed on the next {@link #commitSync()}, {@link #commitAsync()} or
* {@link #poll(Duration)} call.
* <p>This method can only be used if the consumer is using <b>explicit acknowledgement</b>.
*
* @param record The record to acknowledge
*
* @throws IllegalStateException if the record is not waiting to be acknowledged, or the consumer is not using
* explicit acknowledgement
*/
@Override
public void acknowledge(ConsumerRecord<K, V> record) {
delegate.acknowledge(record);
}
/**
* Acknowledge delivery of a record returned on the last {@link #poll(Duration)} call indicating whether
* it was processed successfully. The acknowledgement is committed on the next {@link #commitSync()},
* {@link #commitAsync()} or {@link #poll(Duration)} call.
* <p>This method can only be used if the consumer is using <b>explicit acknowledgement</b>.
*
* @param record The record to acknowledge
* @param type The acknowledge type which indicates whether it was processed successfully
*
* @throws IllegalStateException if the record is not waiting to be acknowledged, or the consumer is not using
* explicit acknowledgement
*/
@Override
public void acknowledge(ConsumerRecord<K, V> record, AcknowledgeType type) {
delegate.acknowledge(record, type);
}
/**
* Acknowledge delivery of a record returned on the last {@link #poll(Duration)} call indicating whether
* it was processed successfully. The acknowledgement is committed on the next {@link #commitSync()},
* {@link #commitAsync()} or {@link #poll(Duration)} call.
* <p>This method can only be used if the consumer is using <b>explicit acknowledgement</b>.
* <p>It provides an alternative to {@link #acknowledge(ConsumerRecord, AcknowledgeType)} for
* situations where the {@link ConsumerRecord} is not available, such as when the record could not be deserialized.
*
* @param topic The topic of the record to acknowledge
* @param partition The partition of the record to acknowledge
* @param offset The offset of the record to acknowledge
* @param type The acknowledge type which indicates whether it was processed successfully
*
* @throws IllegalStateException if the record is not waiting to be acknowledged, or the consumer is not using
* explicit acknowledgement
*/
@Override
public void acknowledge(String topic, int partition, long offset, AcknowledgeType type) {
delegate.acknowledge(topic, partition, offset, type);
}
/**
* Commit the acknowledgements for the records returned. If the consumer is using explicit acknowledgement,
* the acknowledgements to commit have been indicated using {@link #acknowledge(ConsumerRecord)} or
* {@link #acknowledge(ConsumerRecord, AcknowledgeType)}. If the consumer is using implicit acknowledgement,
* all the records returned by the latest call to {@link #poll(Duration)} are acknowledged.
*
* <p>
* This is a synchronous commit and will block until either the commit succeeds, an unrecoverable error is
* encountered (in which case it is thrown to the caller), or the timeout specified by {@code default.api.timeout.ms}
* expires.
*
* @return A map of the results for each topic-partition for which delivery was acknowledged.
* If the acknowledgement failed for a topic-partition, an exception is present.
*
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the thread is interrupted while blocked
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public Map<TopicIdPartition, Optional<KafkaException>> commitSync() {
return delegate.commitSync();
}
/**
* Commit the acknowledgements for the records returned. If the consumer is using explicit acknowledgement,
* the acknowledgements to commit have been indicated using {@link #acknowledge(ConsumerRecord)} or
* {@link #acknowledge(ConsumerRecord, AcknowledgeType)}. If the consumer is using implicit acknowledgement,
* all the records returned by the latest call to {@link #poll(Duration)} are acknowledged.
*
* <p>
* This is a synchronous commit and will block until either the commit succeeds, an unrecoverable error is
* encountered (in which case it is thrown to the caller), or the timeout expires.
*
* @param timeout The maximum amount of time to await completion of the acknowledgement
*
* @return A map of the results for each topic-partition for which delivery was acknowledged.
* If the acknowledgement failed for a topic-partition, an exception is present.
*
* @throws IllegalArgumentException if the {@code timeout} is negative
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the thread is interrupted while blocked
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public Map<TopicIdPartition, Optional<KafkaException>> commitSync(Duration timeout) {
return delegate.commitSync(timeout);
}
/**
* Commit the acknowledgements for the records returned. If the consumer is using explicit acknowledgement,
* the acknowledgements to commit have been indicated using {@link #acknowledge(ConsumerRecord)} or
* {@link #acknowledge(ConsumerRecord, AcknowledgeType)}. If the consumer is using implicit acknowledgement,
* all the records returned by the latest call to {@link #poll(Duration)} are acknowledged.
*
* @throws KafkaException for any other unrecoverable errors
*/
@Override
public void commitAsync() {
delegate.commitAsync();
}
/**
* Sets the acknowledgement commit callback which can be used to handle acknowledgement completion.
*
* @param callback The acknowledgement commit callback
*/
@Override
public void setAcknowledgementCommitCallback(AcknowledgementCommitCallback callback) {
delegate.setAcknowledgementCommitCallback(callback);
}
/**
* Determines the client's unique client instance ID used for telemetry. This ID is unique to
* this specific client instance and will not change after it is initially generated.
* The ID is useful for correlating client operations with telemetry sent to the broker and
* to its eventual monitoring destinations.
* <p>
* If telemetry is enabled, this will first require a connection to the cluster to generate
* the unique client instance ID. This method waits up to {@code timeout} for the consumer
* client to complete the request.
* <p>
* Client telemetry is controlled by the {@link ConsumerConfig#ENABLE_METRICS_PUSH_CONFIG}
* configuration property.
*
* @param timeout The maximum time to wait for consumer client to determine its client instance ID.
* The value must be non-negative. Specifying a timeout of zero means do not
* wait for the initial request to complete if it hasn't already.
*
* @return The client's assigned instance id used for metrics collection.
*
* @throws IllegalArgumentException if the {@code timeout} is negative
* @throws IllegalStateException if telemetry is not enabled
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the thread is interrupted while blocked
* @throws KafkaException if an unexpected error occurs while trying to determine the client
* instance ID, though this error does not necessarily imply the
* consumer client is otherwise unusable
*/
@Override
public Uuid clientInstanceId(Duration timeout) {
return delegate.clientInstanceId(timeout);
}
/**
* Returns the acquisition lock timeout for the last set of records fetched from the cluster.
*
* @return The acquisition lock timeout in milliseconds, or {@code Optional.empty()} if the timeout is not known.
*/
@Override
public Optional<Integer> acquisitionLockTimeoutMs() {
return delegate.acquisitionLockTimeoutMs();
}
/**
* Get the metrics kept by the consumer
*/
@Override
public Map<MetricName, ? extends Metric> metrics() {
return delegate.metrics();
}
/**
* Add the provided application metric for subscription. This metric will be added to this client's metrics
* that are available for subscription and sent as telemetry data to the broker.
* The provided metric must map to an OTLP metric data point type in the OpenTelemetry v1 metrics protobuf message types.
* Specifically, the metric should be one of the following:
* <ul>
* <li>
* Sum: Monotonic total count meter (Counter). Suitable for metrics like total number of X, e.g., total bytes sent.
* </li>
* <li>
* Gauge: Non-monotonic current value meter (UpDownCounter). Suitable for metrics like current value of Y, e.g., current queue count.
* </li>
* </ul>
* Metrics not matching these types are silently ignored. Executing this method for a previously registered metric
* is a benign operation and results in updating that metric's entry.
*
* @param metric The application metric to register
*/
@Override
public void registerMetricForSubscription(KafkaMetric metric) {
delegate.registerMetricForSubscription(metric);
}
/**
* Remove the provided application metric for subscription. This metric is removed from this client's metrics
* and will not be available for subscription any longer. Executing this method with a metric that has not been registered is a
* benign operation and does not result in any action taken (no-op).
*
* @param metric The application metric to remove
*/
@Override
public void unregisterMetricFromSubscription(KafkaMetric metric) {
delegate.unregisterMetricFromSubscription(metric);
}
/**
* Close the consumer, waiting for up to the default timeout of 30 seconds for any needed cleanup.
* This will commit acknowledgements if possible within the default timeout.
* See {@link #close(Duration)} for details. Note that {@link #wakeup()} cannot be used to interrupt close.
* <p>
* This close operation will attempt all shutdown steps even if one of them fails.
* It logs all encountered errors, continues to execute the next steps, and finally throws the first error found.
*
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the thread is interrupted before or while this method is called
* @throws KafkaException for any other error during close
*/
@Override
public void close() {
delegate.close();
}
/**
* Tries to close the consumer cleanly within the specified timeout. This method waits up to
* {@code timeout} for the consumer to complete acknowledgements and leave the group.
* If the consumer is unable to complete acknowledgements and gracefully leave the group
* before the timeout expires, the consumer is force closed. Note that {@link #wakeup()} cannot be
* used to interrupt close.
* <p>
* The actual maximum wait time is bounded by the {@link ConsumerConfig#REQUEST_TIMEOUT_MS_CONFIG} setting, which
* only applies to operations performed with the broker (coordinator-related requests).
* Even if a larger timeout is specified, the consumer will not wait longer than
* {@link ConsumerConfig#REQUEST_TIMEOUT_MS_CONFIG} for these requests to complete during the close operation.
* Note that the execution time of callbacks (such as {@link AcknowledgementCommitCallback}) do not consume time from the close timeout.
* <p>
* This close operation will attempt all shutdown steps even if one of them fails.
* It logs all encountered errors, continues to execute the next steps, and finally throws the first error found.
*
* @param timeout The maximum time to wait for consumer to close gracefully. The value must be
* non-negative. Specifying a timeout of zero means do not wait for pending requests to complete.
* @throws IllegalArgumentException if the {@code timeout} is negative
* @throws WakeupException if {@link #wakeup()} is called before or while this method is called
* @throws InterruptException if the thread is interrupted before or while this method is called
* @throws KafkaException for any other error during close
*/
@Override
public void close(Duration timeout) {
delegate.close(timeout);
}
/**
* Wake up the consumer. This method is thread-safe and is useful in particular to abort a long poll.
* The thread which is blocking in an operation will throw {@link WakeupException}.
* If no thread is blocking in a method which can throw {@link WakeupException},
* the next call to such a method will raise it instead.
*/
@Override
public void wakeup() {
delegate.wakeup();
}
// Functions below are for testing only
String clientId() {
return delegate.clientId();
}
Metrics metricsRegistry() {
return delegate.metricsRegistry();
}
KafkaShareConsumerMetrics kafkaShareConsumerMetrics() {
return delegate.kafkaShareConsumerMetrics();
}
}
| KafkaShareConsumer |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/fs/ClosingFSDataInputStream.java | {
"start": 969,
"end": 1233
} | class ____ a {@link org.apache.flink.util.WrappingProxy} for {@link FSDataInputStream} that is
* used to implement a safety net against unclosed streams.
*
* <p>See {@link SafetyNetCloseableRegistry} for more details on how this is utilized.
*/
@Internal
public | is |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/context/optimized/OptimizeContextsAutoTest.java | {
"start": 443,
"end": 1164
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(SimpleBean.class))
.overrideConfigKey("quarkus.arc.optimize-contexts", "auto");
@Inject
SimpleBean bean;
@Test
public void testContexts() {
assertTrue(bean.ping());
for (ComponentsProvider componentsProvider : ServiceLoader.load(ComponentsProvider.class)) {
// We have less than 1000 beans
assertFalse(componentsProvider.getComponents(Arc.container().getCurrentContextFactory()).getContextInstances()
.isEmpty());
}
}
}
| OptimizeContextsAutoTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/Shorts_assertIsOdd_Test.java | {
"start": 1300,
"end": 2584
} | class ____ extends ShortsBaseTest {
@ParameterizedTest
@ValueSource(shorts = { 1, 3, -5, 7 })
void should_pass_since_actual_is_odd(short actual) {
// WHEN/THEN
shorts.assertIsOdd(someInfo(), actual);
}
@ParameterizedTest
@ValueSource(shorts = { 0, 2, -4, 6 })
void should_fail_since_actual_is_not_odd(short actual) {
// WHEN
var assertionError = expectAssertionError(() -> shorts.assertIsOdd(someInfo(), actual));
// THEN
then(assertionError).hasMessage(shouldBeOdd(actual).create());
}
@ParameterizedTest
@ValueSource(shorts = { 1, 3, -5, 7 })
void should_pass_since_actual_is_odd_whatever_custom_comparison_strategy_is(short actual) {
// WHEN/THEN
shortsWithAbsValueComparisonStrategy.assertIsOdd(someInfo(), actual);
}
@ParameterizedTest
@ValueSource(shorts = { 0, 2, -4, 6 })
void should_fail_since_actual_is_not_odd_whatever_custom_comparison_strategy_is(short actual) {
// WHEN
var assertionError = expectAssertionError(() -> shortsWithAbsValueComparisonStrategy.assertIsOdd(someInfo(),
actual));
// THEN
then(assertionError).hasMessage(shouldBeOdd(actual).create());
}
}
| Shorts_assertIsOdd_Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierRecordingService.java | {
"start": 2944,
"end": 5251
} | class ____ {
private String currentAction;
private long startMillis;
private boolean recording;
private SubscribableListener<Void> currentListener;
private final List<Tuple<String, Long>> recordings = new LinkedList<>();
private final ThreadPool threadPool;
private final TimeValue debugLoggingTimeout;
Recorder(ThreadPool threadPool, TimeValue debugLoggingTimeout) {
this.threadPool = threadPool;
this.debugLoggingTimeout = debugLoggingTimeout;
}
Releasable record(String action) {
if (recording) {
throw new IllegalStateException("already recording");
}
this.recording = true;
this.currentAction = action;
this.startMillis = threadPool.rawRelativeTimeInMillis();
if (logger.isDebugEnabled()) {
currentListener = new SubscribableListener<>();
currentListener.addTimeout(debugLoggingTimeout, threadPool, threadPool.generic());
currentListener.addListener(new ActionListener<>() {
@Override
public void onResponse(Void unused) {}
@Override
public void onFailure(Exception e) {
assert e instanceof ElasticsearchTimeoutException : e; // didn't complete in time
HotThreads.logLocalHotThreads(
logger,
Level.DEBUG,
"hot threads while applying cluster state [" + currentAction + ']',
ReferenceDocs.LOGGING
);
}
});
}
return this::stop;
}
void stop() {
recording = false;
long elapsedMillis = threadPool.rawRelativeTimeInMillis() - this.startMillis;
recordings.add(new Tuple<>(currentAction, elapsedMillis));
if (currentListener != null) {
currentListener.onResponse(null);
currentListener = null;
}
}
List<Tuple<String, Long>> getRecordings() {
return recordings;
}
}
public static | Recorder |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefLookup.java | {
"start": 737,
"end": 3789
} | class ____ implements ReleasableIterator<BytesRefBlock> {
private final BytesRef firstScratch = new BytesRef();
private final BytesRef valueScratch = new BytesRef();
private final BytesRefBlock values;
private final IntBlock positions;
private final long targetByteSize;
private int position;
private BytesRef first;
private int valuesInPosition;
BytesRefLookup(BytesRefBlock values, IntBlock positions, ByteSizeValue targetBlockSize) {
values.incRef();
positions.incRef();
this.values = values;
this.positions = positions;
this.targetByteSize = targetBlockSize.getBytes();
}
@Override
public boolean hasNext() {
return position < positions.getPositionCount();
}
@Override
public BytesRefBlock next() {
try (BytesRefBlock.Builder builder = positions.blockFactory().newBytesRefBlockBuilder(positions.getTotalValueCount())) {
int count = 0;
while (position < positions.getPositionCount()) {
int start = positions.getFirstValueIndex(position);
int end = start + positions.getValueCount(position);
valuesInPosition = 0;
for (int i = start; i < end; i++) {
copy(builder, positions.getInt(i));
}
switch (valuesInPosition) {
case 0 -> builder.appendNull();
case 1 -> builder.appendBytesRef(first);
default -> builder.endPositionEntry();
}
position++;
// TOOD what if the estimate is super huge? should we break even with less than MIN_TARGET?
if (++count > Operator.MIN_TARGET_PAGE_SIZE && builder.estimatedBytes() < targetByteSize) {
break;
}
}
return builder.build();
}
}
private void copy(BytesRefBlock.Builder builder, int valuePosition) {
if (valuePosition >= values.getPositionCount()) {
return;
}
int start = values.getFirstValueIndex(valuePosition);
int end = start + values.getValueCount(valuePosition);
for (int i = start; i < end; i++) {
if (valuesInPosition == 0) {
first = values.getBytesRef(i, firstScratch);
valuesInPosition++;
continue;
}
if (valuesInPosition == 1) {
builder.beginPositionEntry();
builder.appendBytesRef(first);
}
if (valuesInPosition > Block.MAX_LOOKUP) {
// TODO replace this with a warning and break
throw new IllegalArgumentException("Found a single entry with " + valuesInPosition + " entries");
}
builder.appendBytesRef(values.getBytesRef(i, valueScratch));
valuesInPosition++;
}
}
@Override
public void close() {
Releasables.close(values, positions);
}
}
| BytesRefLookup |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/main/java/org/apache/dubbo/config/spring/beans/factory/annotation/ServicePackagesHolder.java | {
"start": 982,
"end": 2678
} | class ____ {
public static final String BEAN_NAME = "dubboServicePackagesHolder";
private final Set<String> scannedPackages = new HashSet<>();
private final Set<String> scannedClasses = new HashSet<>();
public void addScannedPackage(String apackage) {
apackage = normalizePackage(apackage);
synchronized (scannedPackages) {
scannedPackages.add(apackage);
}
}
public boolean isPackageScanned(String packageName) {
packageName = normalizePackage(packageName);
synchronized (scannedPackages) {
if (scannedPackages.contains(packageName)) {
return true;
}
for (String scannedPackage : scannedPackages) {
if (isSubPackage(packageName, scannedPackage)) {
return true;
}
}
}
return false;
}
public void addScannedClass(String className) {
synchronized (scannedClasses) {
scannedClasses.add(className);
}
}
public boolean isClassScanned(String className) {
synchronized (scannedClasses) {
return scannedClasses.contains(className);
}
}
/**
* Whether test package is sub package of parent package
* @param testPkg
* @param parent
* @return
*/
private boolean isSubPackage(String testPkg, String parent) {
// child pkg startsWith parent pkg
return testPkg.startsWith(parent);
}
private String normalizePackage(String apackage) {
if (!apackage.endsWith(".")) {
apackage += ".";
}
return apackage;
}
}
| ServicePackagesHolder |
java | spring-projects__spring-framework | spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/HasMap.java | {
"start": 975,
"end": 2593
} | class ____ {
private Map<?, ?> map;
private Set<?> set;
private Properties props;
private Object[] objectArray;
private Integer[] intArray;
private Class<?>[] classArray;
private List<Class<?>> classList;
private IdentityHashMap<?, ?> identityMap;
private CopyOnWriteArraySet<?> concurrentSet;
private HasMap() {
}
public Map<?, ?> getMap() {
return map;
}
public void setMap(Map<?, ?> map) {
this.map = map;
}
public Set<?> getSet() {
return set;
}
public void setSet(Set<?> set) {
this.set = set;
}
public Properties getProps() {
return props;
}
public void setProps(Properties props) {
this.props = props;
}
public Object[] getObjectArray() {
return objectArray;
}
public void setObjectArray(Object[] objectArray) {
this.objectArray = objectArray;
}
public Integer[] getIntegerArray() {
return intArray;
}
public void setIntegerArray(Integer[] is) {
intArray = is;
}
public Class<?>[] getClassArray() {
return classArray;
}
public void setClassArray(Class<?>[] classArray) {
this.classArray = classArray;
}
public List<Class<?>> getClassList() {
return classList;
}
public void setClassList(List<Class<?>> classList) {
this.classList = classList;
}
public IdentityHashMap<?, ?> getIdentityMap() {
return identityMap;
}
public void setIdentityMap(IdentityHashMap<?, ?> identityMap) {
this.identityMap = identityMap;
}
public CopyOnWriteArraySet<?> getConcurrentSet() {
return concurrentSet;
}
public void setConcurrentSet(CopyOnWriteArraySet<?> concurrentSet) {
this.concurrentSet = concurrentSet;
}
}
| HasMap |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/filter/UnknownPropertyDeserTest.java | {
"start": 3200,
"end": 3254
} | class ____ {
public int y, z;
}
static | YZ |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/util/HtmlCharacterEntityReferences.java | {
"start": 1284,
"end": 2285
} | class ____ {
private static final String PROPERTIES_FILE = "HtmlCharacterEntityReferences.properties";
static final char REFERENCE_START = '&';
static final String DECIMAL_REFERENCE_START = "&#";
static final String HEX_REFERENCE_START = "&#x";
static final char REFERENCE_END = ';';
static final char CHAR_NULL = (char) -1;
private final String[] characterToEntityReferenceMap = new String[3000];
private final Map<String, Character> entityReferenceToCharacterMap = new HashMap<>(512);
/**
* Returns a new set of character entity references reflecting the HTML 4.0 character set.
*/
public HtmlCharacterEntityReferences() {
Properties entityReferences = new Properties();
// Load reference definition file
InputStream is = HtmlCharacterEntityReferences.class.getResourceAsStream(PROPERTIES_FILE);
if (is == null) {
throw new IllegalStateException(
"Cannot find reference definition file [HtmlCharacterEntityReferences.properties] as | HtmlCharacterEntityReferences |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/deser/OffsetDateTimeDeserTest.java | {
"start": 1462,
"end": 1737
} | class ____ {
@JsonFormat(
pattern="yyyy_MM_dd'T'HH:mm:ssZ",
shape=JsonFormat.Shape.STRING)
public OffsetDateTime value;
public Wrapper() { }
public Wrapper(OffsetDateTime v) { value = v; }
}
static | Wrapper |
java | google__guava | android/guava-tests/benchmark/com/google/common/util/concurrent/MonitorBasedArrayBlockingQueue.java | {
"start": 1954,
"end": 2304
} | class ____ an optional fairness policy for ordering waiting producer and consumer
* threads. By default, this ordering is not guaranteed. However, a queue constructed with fairness
* set to {@code true} grants threads access in FIFO order. Fairness generally decreases throughput
* but reduces variability and avoids starvation.
*
* <p>This | supports |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java | {
"start": 17163,
"end": 19643
} | class ____ {
public final String appId;
public final String execId;
@JsonCreator
public AppExecId(@JsonProperty("appId") String appId, @JsonProperty("execId") String execId) {
this.appId = appId;
this.execId = execId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AppExecId appExecId = (AppExecId) o;
return Objects.equals(appId, appExecId.appId) && Objects.equals(execId, appExecId.execId);
}
@Override
public int hashCode() {
return Objects.hash(appId, execId);
}
@Override
public String toString() {
return "ExternalShuffleBlockResolver[appId=" + appId + ",execId=" + execId + "]";
}
}
private static byte[] dbAppExecKey(AppExecId appExecId) throws IOException {
// we stick a common prefix on all the keys so we can find them in the DB
String appExecJson = mapper.writeValueAsString(appExecId);
String key = (APP_KEY_PREFIX + ";" + appExecJson);
return key.getBytes(StandardCharsets.UTF_8);
}
private static AppExecId parseDbAppExecKey(String s) throws IOException {
if (!s.startsWith(APP_KEY_PREFIX)) {
throw new IllegalArgumentException("expected a string starting with " + APP_KEY_PREFIX);
}
String json = s.substring(APP_KEY_PREFIX.length() + 1);
AppExecId parsed = mapper.readValue(json, AppExecId.class);
return parsed;
}
@VisibleForTesting
static ConcurrentMap<AppExecId, ExecutorShuffleInfo> reloadRegisteredExecutors(DB db)
throws IOException {
ConcurrentMap<AppExecId, ExecutorShuffleInfo> registeredExecutors = new ConcurrentHashMap<>();
if (db != null) {
try (DBIterator itr = db.iterator()) {
itr.seek(APP_KEY_PREFIX.getBytes(StandardCharsets.UTF_8));
while (itr.hasNext()) {
Map.Entry<byte[], byte[]> e = itr.next();
String key = new String(e.getKey(), StandardCharsets.UTF_8);
if (!key.startsWith(APP_KEY_PREFIX)) {
break;
}
AppExecId id = parseDbAppExecKey(key);
logger.info("Reloading registered executors: {}",
MDC.of(LogKeys.APP_EXECUTOR_ID, id));
ExecutorShuffleInfo shuffleInfo =
mapper.readValue(e.getValue(), ExecutorShuffleInfo.class);
registeredExecutors.put(id, shuffleInfo);
}
}
}
return registeredExecutors;
}
}
| AppExecId |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-cos/src/test/java/org/apache/hadoop/fs/cosn/contract/TestCosNContractRootDir.java | {
"start": 1092,
"end": 1309
} | class ____
extends AbstractContractRootDirectoryTest {
@Override
protected AbstractFSContract createContract(Configuration configuration) {
return new CosNContract(configuration);
}
}
| TestCosNContractRootDir |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java | {
"start": 37707,
"end": 39269
} | class ____ implements Transport.Connection {
private final DiscoveryNode node;
MockConnection(DiscoveryNode node) {
this.node = node;
}
@Override
public DiscoveryNode getNode() {
return node;
}
@Override
public TransportVersion getTransportVersion() {
return TransportVersion.current();
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws TransportException {
throw new UnsupportedOperationException();
}
@Override
public void addCloseListener(ActionListener<Void> listener) {}
@Override
public void addRemovedListener(ActionListener<Void> listener) {}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {
throw new UnsupportedOperationException();
}
@Override
public void onRemoved() {
throw new UnsupportedOperationException();
}
@Override
public void incRef() {}
@Override
public boolean tryIncRef() {
return true;
}
@Override
public boolean decRef() {
assert false : "shouldn't release a mock connection";
return false;
}
@Override
public boolean hasReferences() {
return true;
}
}
}
| MockConnection |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/observers/ResourceMaybeObserverTest.java | {
"start": 1101,
"end": 6223
} | class ____<T> extends ResourceMaybeObserver<T> {
T value;
final List<Throwable> errors = new ArrayList<>();
int complete;
int start;
@Override
protected void onStart() {
super.onStart();
start++;
}
@Override
public void onSuccess(final T value) {
this.value = value;
dispose();
}
@Override
public void onComplete() {
complete++;
dispose();
}
@Override
public void onError(Throwable e) {
errors.add(e);
dispose();
}
}
@Test(expected = NullPointerException.class)
public void nullResource() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
rmo.add(null);
}
@Test
public void addResources() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
Disposable d = Disposable.empty();
rmo.add(d);
assertFalse(d.isDisposed());
rmo.dispose();
assertTrue(rmo.isDisposed());
assertTrue(d.isDisposed());
rmo.dispose();
assertTrue(rmo.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void onCompleteCleansUp() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
Disposable d = Disposable.empty();
rmo.add(d);
assertFalse(d.isDisposed());
rmo.onComplete();
assertTrue(rmo.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void onSuccessCleansUp() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
Disposable d = Disposable.empty();
rmo.add(d);
assertFalse(d.isDisposed());
rmo.onSuccess(1);
assertTrue(rmo.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void onErrorCleansUp() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
Disposable d = Disposable.empty();
rmo.add(d);
assertFalse(d.isDisposed());
rmo.onError(new TestException());
assertTrue(rmo.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void normal() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
assertEquals(0, rmo.start);
assertNull(rmo.value);
assertTrue(rmo.errors.isEmpty());
Maybe.just(1).subscribe(rmo);
assertTrue(rmo.isDisposed());
assertEquals(1, rmo.start);
assertEquals(Integer.valueOf(1), rmo.value);
assertEquals(0, rmo.complete);
assertTrue(rmo.errors.isEmpty());
}
@Test
public void empty() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
assertEquals(0, rmo.start);
assertNull(rmo.value);
assertTrue(rmo.errors.isEmpty());
Maybe.<Integer>empty().subscribe(rmo);
assertTrue(rmo.isDisposed());
assertEquals(1, rmo.start);
assertNull(rmo.value);
assertEquals(1, rmo.complete);
assertTrue(rmo.errors.isEmpty());
}
@Test
public void error() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
assertFalse(rmo.isDisposed());
assertEquals(0, rmo.start);
assertNull(rmo.value);
assertTrue(rmo.errors.isEmpty());
final RuntimeException error = new RuntimeException("error");
Maybe.<Integer>error(error).subscribe(rmo);
assertTrue(rmo.isDisposed());
assertEquals(1, rmo.start);
assertNull(rmo.value);
assertEquals(0, rmo.complete);
assertEquals(1, rmo.errors.size());
assertTrue(rmo.errors.contains(error));
}
@Test
public void startOnce() {
List<Throwable> error = TestHelper.trackPluginErrors();
try {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
rmo.onSubscribe(Disposable.empty());
Disposable d = Disposable.empty();
rmo.onSubscribe(d);
assertTrue(d.isDisposed());
assertEquals(1, rmo.start);
TestHelper.assertError(error, 0, IllegalStateException.class, EndConsumerHelper.composeMessage(rmo.getClass().getName()));
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestResourceMaybeObserver<Integer> rmo = new TestResourceMaybeObserver<>();
rmo.dispose();
Disposable d = Disposable.empty();
rmo.onSubscribe(d);
assertTrue(d.isDisposed());
assertEquals(0, rmo.start);
}
}
| TestResourceMaybeObserver |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/config/StandardConfigDataLoaderTests.java | {
"start": 2388,
"end": 2613
} | class ____ resource [application.yml]' "
+ "via location 'classpath:application.yml' (document #0)");
assertThat(source1.getProperty("foo")).isEqualTo("bar");
assertThat(source2.getName()).isEqualTo("Config resource ' | path |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ReflectionUtils.java | {
"start": 59568,
"end": 62137
} | interface ____,
* excluding Object.
*/
private static List<Method> findAllMethodsInHierarchy(Class<?> clazz, HierarchyTraversalMode traversalMode) {
Preconditions.notNull(clazz, "Class must not be null");
Preconditions.notNull(traversalMode, "HierarchyTraversalMode must not be null");
// @formatter:off
Method[] localMethods = getDeclaredMethods(clazz, traversalMode).stream()
.filter(method -> !method.isSynthetic())
.toArray(Method[]::new);
Method[] superclassMethods = getSuperclassMethods(clazz, traversalMode).stream()
.filter(method -> isNotOverriddenByLocalMethods(method, localMethods))
.toArray(Method[]::new);
Method[] interfaceMethods = getInterfaceMethods(clazz, traversalMode).stream()
.filter(method -> isNotOverriddenByLocalMethods(method, localMethods))
.toArray(Method[]::new);
// @formatter:on
List<Method> methods = new ArrayList<>(
superclassMethods.length + interfaceMethods.length + localMethods.length);
if (traversalMode == TOP_DOWN) {
Collections.addAll(methods, superclassMethods);
Collections.addAll(methods, interfaceMethods);
}
Collections.addAll(methods, localMethods);
if (traversalMode == BOTTOM_UP) {
Collections.addAll(methods, interfaceMethods);
Collections.addAll(methods, superclassMethods);
}
return methods;
}
/**
* Custom alternative to {@link Class#getDeclaredFields()} that sorts the
* fields and converts them to a mutable list.
*/
private static List<Field> getDeclaredFields(Class<?> clazz) {
return toSortedMutableList(clazz.getDeclaredFields());
}
/**
* Custom alternative to {@link Class#getMethods()} that sorts the methods
* and converts them to a mutable list.
*/
private static List<Method> getMethods(Class<?> clazz) {
return toSortedMutableList(clazz.getMethods());
}
/**
* Custom alternative to {@link Class#getDeclaredMethods()} that sorts the
* methods and converts them to a mutable list.
*
* <p>In addition, the list returned by this method includes interface
* default methods which are either prepended or appended to the list of
* declared methods depending on the supplied traversal mode.
*/
private static List<Method> getDeclaredMethods(Class<?> clazz, HierarchyTraversalMode traversalMode) {
// Note: getDefaultMethods() already sorts the methods,
List<Method> defaultMethods = getDefaultMethods(clazz);
List<Method> declaredMethods = toSortedMutableList(clazz.getDeclaredMethods());
// Take the traversal mode into account in order to retain the inherited
// nature of | hierarchy |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/AbstractNestedConditionTests.java | {
"start": 2603,
"end": 2689
} | class ____ {
@Bean
String myBean() {
return "myBean";
}
}
static | ValidConfig |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/codestarts/quarkus/QuarkusCodestartProjectInputBuilder.java | {
"start": 755,
"end": 5124
} | class ____ extends CodestartProjectInputBuilder {
private static final List<AppContent> FULL_CONTENT = Arrays.asList(AppContent.values());
Collection<ArtifactCoords> extensions = new ArrayList<>();
Collection<ArtifactCoords> platforms = new ArrayList<>();
Set<AppContent> appContent = new HashSet<>(FULL_CONTENT);
String example;
BuildTool buildTool = BuildTool.MAVEN;
String defaultCodestart;
QuarkusCodestartProjectInputBuilder() {
super();
}
public QuarkusCodestartProjectInputBuilder addExtensions(Collection<ArtifactCoords> extensions) {
this.extensions.addAll(extensions);
super.addDependencies(extensions.stream().map(Extensions::toGAV).collect(Collectors.toList()));
return this;
}
public QuarkusCodestartProjectInputBuilder addExtension(ArtifactCoords extension) {
return this.addExtensions(Collections.singletonList(extension));
}
public QuarkusCodestartProjectInputBuilder addExtension(ArtifactKey extension) {
return this.addExtension(Extensions.toCoords(extension, null));
}
public QuarkusCodestartProjectInputBuilder addPlatforms(Collection<ArtifactCoords> boms) {
this.platforms.addAll(boms);
super.addBoms(boms.stream().map(Extensions::toGAV).collect(Collectors.toList()));
return this;
}
public QuarkusCodestartProjectInputBuilder example(String example) {
this.example = example;
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder addCodestarts(Collection<String> codestarts) {
super.addCodestarts(codestarts);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder addCodestart(String codestart) {
super.addCodestart(codestart);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder addData(Map<String, Object> data) {
super.addData(data);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder addBoms(Collection<String> boms) {
super.addBoms(boms);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder putData(String key, Object value) {
super.putData(key, value);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder putData(DataKey key, Object value) {
super.putData(key, value);
return this;
}
@Override
public QuarkusCodestartProjectInputBuilder messageWriter(MessageWriter messageWriter) {
super.messageWriter(messageWriter);
return this;
}
public QuarkusCodestartProjectInputBuilder noCode() {
return this.noCode(true);
}
public QuarkusCodestartProjectInputBuilder noCode(boolean noCode) {
if (noCode) {
appContent.remove(AppContent.CODE);
} else {
appContent.add(AppContent.CODE);
}
return this;
}
public QuarkusCodestartProjectInputBuilder noDockerfiles() {
return this.noDockerfiles(true);
}
public QuarkusCodestartProjectInputBuilder noDockerfiles(boolean noDockerfiles) {
if (noDockerfiles) {
appContent.remove(AppContent.DOCKERFILES);
} else {
appContent.add(AppContent.DOCKERFILES);
}
return this;
}
public QuarkusCodestartProjectInputBuilder noBuildToolWrapper() {
return this.noBuildToolWrapper(true);
}
public QuarkusCodestartProjectInputBuilder noBuildToolWrapper(boolean noBuildToolWrapper) {
if (noBuildToolWrapper) {
appContent.remove(AppContent.BUILD_TOOL_WRAPPER);
} else {
appContent.add(AppContent.BUILD_TOOL_WRAPPER);
}
return this;
}
public QuarkusCodestartProjectInputBuilder buildTool(BuildTool buildTool) {
if (buildTool == null) {
return this;
}
this.buildTool = buildTool;
return this;
}
public QuarkusCodestartProjectInputBuilder defaultCodestart(String defaultCodestart) {
if (defaultCodestart != null) {
this.defaultCodestart = defaultCodestart;
}
return this;
}
public QuarkusCodestartProjectInput build() {
return new QuarkusCodestartProjectInput(this);
}
}
| QuarkusCodestartProjectInputBuilder |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationTests.java | {
"start": 532,
"end": 1363
} | class ____ extends AbstractNumericMetricTestCase<MedianAbsoluteDeviationAggregationBuilder> {
@Override
protected MedianAbsoluteDeviationAggregationBuilder doCreateTestAggregatorFactory() {
MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder(
randomAlphaOfLengthBetween(1, 20)
);
if (randomBoolean()) {
builder.compression(randomDoubleBetween(0, 1000.0, false));
}
if (randomBoolean()) {
builder.parseExecutionHint(randomFrom(TDigestExecutionHint.values()).toString());
}
if (randomBoolean()) {
builder.missing("MISSING");
}
if (randomBoolean()) {
builder.format("###.00");
}
return builder;
}
}
| MedianAbsoluteDeviationTests |
java | grpc__grpc-java | services/src/generated/main/grpc/io/grpc/health/v1/HealthGrpc.java | {
"start": 153,
"end": 6171
} | class ____ {
private HealthGrpc() {}
public static final java.lang.String SERVICE_NAME = "grpc.health.v1.Health";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest,
io.grpc.health.v1.HealthCheckResponse> getCheckMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "Check",
requestType = io.grpc.health.v1.HealthCheckRequest.class,
responseType = io.grpc.health.v1.HealthCheckResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest,
io.grpc.health.v1.HealthCheckResponse> getCheckMethod() {
io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest, io.grpc.health.v1.HealthCheckResponse> getCheckMethod;
if ((getCheckMethod = HealthGrpc.getCheckMethod) == null) {
synchronized (HealthGrpc.class) {
if ((getCheckMethod = HealthGrpc.getCheckMethod) == null) {
HealthGrpc.getCheckMethod = getCheckMethod =
io.grpc.MethodDescriptor.<io.grpc.health.v1.HealthCheckRequest, io.grpc.health.v1.HealthCheckResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "Check"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.health.v1.HealthCheckRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.health.v1.HealthCheckResponse.getDefaultInstance()))
.setSchemaDescriptor(new HealthMethodDescriptorSupplier("Check"))
.build();
}
}
}
return getCheckMethod;
}
private static volatile io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest,
io.grpc.health.v1.HealthCheckResponse> getWatchMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "Watch",
requestType = io.grpc.health.v1.HealthCheckRequest.class,
responseType = io.grpc.health.v1.HealthCheckResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING)
public static io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest,
io.grpc.health.v1.HealthCheckResponse> getWatchMethod() {
io.grpc.MethodDescriptor<io.grpc.health.v1.HealthCheckRequest, io.grpc.health.v1.HealthCheckResponse> getWatchMethod;
if ((getWatchMethod = HealthGrpc.getWatchMethod) == null) {
synchronized (HealthGrpc.class) {
if ((getWatchMethod = HealthGrpc.getWatchMethod) == null) {
HealthGrpc.getWatchMethod = getWatchMethod =
io.grpc.MethodDescriptor.<io.grpc.health.v1.HealthCheckRequest, io.grpc.health.v1.HealthCheckResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "Watch"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.health.v1.HealthCheckRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.health.v1.HealthCheckResponse.getDefaultInstance()))
.setSchemaDescriptor(new HealthMethodDescriptorSupplier("Watch"))
.build();
}
}
}
return getWatchMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static HealthStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HealthStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HealthStub>() {
@java.lang.Override
public HealthStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HealthStub(channel, callOptions);
}
};
return HealthStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports all types of calls on the service
*/
public static HealthBlockingV2Stub newBlockingV2Stub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HealthBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HealthBlockingV2Stub>() {
@java.lang.Override
public HealthBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HealthBlockingV2Stub(channel, callOptions);
}
};
return HealthBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static HealthBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HealthBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HealthBlockingStub>() {
@java.lang.Override
public HealthBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HealthBlockingStub(channel, callOptions);
}
};
return HealthBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static HealthFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HealthFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HealthFutureStub>() {
@java.lang.Override
public HealthFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HealthFutureStub(channel, callOptions);
}
};
return HealthFutureStub.newStub(factory, channel);
}
/**
*/
public | HealthGrpc |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java | {
"start": 13750,
"end": 13922
} | class ____ the XML properties file.
*/
@BeforeEach
public void setupTestConfigurationFields() {
initializeMemberVariables();
// Error if subclass hasn't set | and |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/issues/FilterPojoIssueTest.java | {
"start": 985,
"end": 2040
} | class ____ extends ContextTestSupport {
public String doSomething(String body) {
if ("Hello World".equals(body)) {
return "Bye World";
}
return null;
}
@Test
public void testFilterPojo() throws Exception {
getMockEndpoint("mock:filter").expectedBodiesReceived("Bye World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testFilterPojoNull() throws Exception {
getMockEndpoint("mock:filter").expectedMessageCount(0);
template.sendBody("direct:start", "Foo");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").bean(FilterPojoIssueTest.class, "doSomething").to("log:foo").filter(body().isNotNull())
.to("mock:filter").end();
}
};
}
}
| FilterPojoIssueTest |
java | jhy__jsoup | src/main/java/org/jsoup/nodes/NodeIterator.java | {
"start": 633,
"end": 1199
} | class ____<T extends Node> implements Iterator<T> {
private Node root; // root / starting node
private @Nullable T next; // the next node to return
private Node current; // the current (last emitted) node
private Node previous; // the previously emitted node; used to recover from structural changes
private @Nullable Node currentParent; // the current node's parent; used to detect structural changes
private final Class<T> type; // the desired node | NodeIterator |
java | apache__camel | components/camel-ignite/src/main/java/org/apache/camel/component/ignite/idgen/IgniteIdGenComponent.java | {
"start": 1293,
"end": 2816
} | class ____ extends AbstractIgniteComponent {
public static IgniteIdGenComponent fromIgnite(Ignite ignite) {
IgniteIdGenComponent answer = new IgniteIdGenComponent();
answer.setIgnite(ignite);
return answer;
}
public static IgniteIdGenComponent fromConfiguration(IgniteConfiguration configuration) {
IgniteIdGenComponent answer = new IgniteIdGenComponent();
answer.setIgniteConfiguration(configuration);
return answer;
}
public static IgniteIdGenComponent fromInputStream(InputStream inputStream) {
IgniteIdGenComponent answer = new IgniteIdGenComponent();
answer.setConfigurationResource(inputStream);
return answer;
}
public static IgniteIdGenComponent fromUrl(URL url) {
IgniteIdGenComponent answer = new IgniteIdGenComponent();
answer.setConfigurationResource(url);
return answer;
}
public static IgniteIdGenComponent fromLocation(String location) {
IgniteIdGenComponent answer = new IgniteIdGenComponent();
answer.setConfigurationResource(location);
return answer;
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
ObjectHelper.notNull(getCamelContext(), "Camel Context");
IgniteIdGenEndpoint answer = new IgniteIdGenEndpoint(uri, remaining, parameters, this);
setProperties(answer, parameters);
return answer;
}
}
| IgniteIdGenComponent |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/EnableAspectJAutoProxyTests.java | {
"start": 4669,
"end": 4714
} | interface ____ {
}
@Loggable
static | Loggable |
java | processing__processing4 | java/src/processing/mode/java/ShowUsage.java | {
"start": 1085,
"end": 8940
} | class ____ {
final JDialog window;
final JTree tree;
final JavaEditor editor;
final PreprocService pps;
final Consumer<PreprocSketch> reloadListener;
IBinding binding;
ShowUsage(JavaEditor editor, PreprocService pps) {
this.editor = editor;
this.pps = pps;
// Add show usage option
JMenuItem showUsageItem =
new JMenuItem(Language.text("editor.popup.show_usage"));
showUsageItem.addActionListener(e -> handleShowUsage());
editor.getTextArea().getRightClickPopup().add(showUsageItem);
reloadListener = this::reloadShowUsage;
{ // Show Usage window
window = new JDialog(editor);
window.setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE);
window.setAutoRequestFocus(false);
window.addComponentListener(new ComponentAdapter() {
@Override
public void componentHidden(ComponentEvent e) {
binding = null;
tree.setModel(null);
pps.unregisterListener(reloadListener);
}
@Override
public void componentShown(ComponentEvent e) {
pps.registerListener(reloadListener);
}
});
window.setSize(Toolkit.zoom(300, 400));
window.setFocusableWindowState(false);
Toolkit.setIcon(window);
JScrollPane sp2 = new JScrollPane();
tree = new JTree();
ZoomTreeCellRenderer renderer =
new ZoomTreeCellRenderer();
tree.setCellRenderer(renderer);
renderer.setLeafIcon(null);
renderer.setClosedIcon(null);
renderer.setOpenIcon(null);
renderer.setBackgroundSelectionColor(new Color(228, 248, 246));
renderer.setBorderSelectionColor(new Color(0, 0, 0, 0));
renderer.setTextSelectionColor(Color.BLACK);
sp2.setViewportView(tree);
window.add(sp2);
}
tree.addTreeSelectionListener(e -> {
if (tree.getLastSelectedPathComponent() != null) {
DefaultMutableTreeNode tnode =
(DefaultMutableTreeNode) tree.getLastSelectedPathComponent();
if (tnode.getUserObject() instanceof ShowUsageTreeNode) {
ShowUsageTreeNode node = (ShowUsageTreeNode) tnode.getUserObject();
editor.highlight(node.tabIndex, node.startTabOffset, node.stopTabOffset);
}
}
});
}
// Thread: EDT
void handleShowUsage() {
int startOffset = editor.getSelectionStart();
int stopOffset = editor.getSelectionStop();
int tabIndex = editor.getSketch().getCurrentCodeIndex();
pps.whenDoneBlocking(ps -> handleShowUsage(ps, tabIndex, startOffset, stopOffset));
}
// Thread: worker
void handleShowUsage(PreprocSketch ps, int tabIndex,
int startTabOffset, int stopTabOffset) {
// Map offsets
int startJavaOffset = ps.tabOffsetToJavaOffset(tabIndex, startTabOffset);
int stopJavaOffset = ps.tabOffsetToJavaOffset(tabIndex, stopTabOffset);
// Find the node
SimpleName name = ASTUtils.getSimpleNameAt(ps.compilationUnit, startJavaOffset, stopJavaOffset);
if (name == null) {
editor.statusMessage("Cannot find any name under cursor", EditorStatus.NOTICE);
return;
}
// Find binding
IBinding binding = ASTUtils.resolveBinding(name);
if (binding == null) {
editor.statusMessage("Cannot find usages, try to fix errors in your code first",
EditorStatus.NOTICE);
return;
}
findUsageAndUpdateTree(ps, binding);
}
// Thread: worker
void findUsageAndUpdateTree(PreprocSketch ps, IBinding binding) {
this.binding = binding;
// Get label
String bindingType = "";
switch (binding.getKind()) {
case IBinding.METHOD:
IMethodBinding method = (IMethodBinding) binding;
if (method.isConstructor()) bindingType = "Constructor";
else bindingType = "Method";
break;
case IBinding.TYPE:
bindingType = "Type";
break;
case IBinding.VARIABLE:
IVariableBinding variable = (IVariableBinding) binding;
if (variable.isField()) bindingType = "Field";
else if (variable.isParameter()) bindingType = "Parameter";
else if (variable.isEnumConstant()) bindingType = "Enum constant";
else bindingType = "Local variable";
break;
}
// Find usages, map to tree nodes, add to root node
String bindingKey = binding.getKey();
List<SketchInterval> intervals =
findAllOccurrences(ps.compilationUnit, bindingKey).stream()
.map(ps::mapJavaToSketch)
// remove occurrences which fall into generated header
.filter(ps::inRange)
// remove empty intervals (happens when occurence was inserted)
.filter(in -> in.startPdeOffset < in.stopPdeOffset)
.collect(Collectors.toList());
int usageCount = intervals.size();
// Get element name from PDE code if possible, otherwise use one from Java
String elementName = intervals.stream()
.findAny()
.map(si -> ps.pdeCode.substring(si.startPdeOffset, si.stopPdeOffset))
.orElseGet(binding::getName);
// Create root node
DefaultMutableTreeNode rootNode =
new DefaultMutableTreeNode(bindingType + ": " + elementName);
intervals.stream()
// Convert to TreeNodes
.map(in -> ShowUsageTreeNode.fromSketchInterval(ps, in))
// Group by tab index
.collect(Collectors.groupingBy(node -> node.tabIndex))
// Stream Map Entries of (tab index) <-> (List<ShowUsageTreeNode>)
.entrySet().stream()
// Sort by tab index
.sorted(Comparator.comparing(Map.Entry::getKey))
.map(entry -> {
Integer tabIndex = entry.getKey();
List<ShowUsageTreeNode> nodes = entry.getValue();
int count = nodes.size();
String usageLabel = count == 1 ? "usage" : "usages";
// Create new DefaultMutableTreeNode for this tab
String tabLabel = "<html><font color=#222222>" +
ps.sketch.getCode(tabIndex).getPrettyName() +
"</font> <font color=#999999>" + count + " " + usageLabel + "</font></html>";
DefaultMutableTreeNode tabNode = new DefaultMutableTreeNode(tabLabel);
// Stream nodes belonging to this tab
nodes.stream()
// Convert TreeNodes to DefaultMutableTreeNodes
.map(DefaultMutableTreeNode::new)
// Add all as children of tab node
.forEach(tabNode::add);
return tabNode;
})
// Add all tab nodes as children of root node
.forEach(rootNode::add);
TreeModel treeModel = new DefaultTreeModel(rootNode);
// Update tree
EventQueue.invokeLater(() -> {
tree.setModel(treeModel);
// Expand all nodes
for (int i = 0; i < tree.getRowCount(); i++) {
tree.expandRow(i);
}
tree.setRootVisible(true);
if (!window.isVisible()) {
window.setVisible(true);
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsDevice defaultScreen = ge.getDefaultScreenDevice();
Rectangle rect = defaultScreen.getDefaultConfiguration().getBounds();
int maxX = (int) rect.getMaxX() - window.getWidth();
int x = Math.min(editor.getX() + editor.getWidth(), maxX);
int y = (x == maxX) ? 10 : editor.getY();
window.setLocation(x, y);
}
window.toFront();
window.setTitle("Usage of \"" + elementName + "\" : " +
usageCount + " time(s)");
});
}
// Thread: worker
void reloadShowUsage(PreprocSketch ps) {
if (binding != null) {
findUsageAndUpdateTree(ps, binding);
}
}
void hide() {
window.setVisible(false);
}
void dispose() {
if (window != null) {
window.dispose();
}
}
}
| ShowUsage |
java | quarkusio__quarkus | devtools/cli/src/main/java/io/quarkus/cli/CreateExtension.java | {
"start": 4237,
"end": 9996
} | class ____ extends ArrayList<String> {
VersionCandidates() {
super(JavaVersion.JAVA_VERSIONS_LTS.stream().map(String::valueOf).collect(Collectors.toList()));
}
}
@CommandLine.Spec
protected CommandLine.Model.CommandSpec spec;
@CommandLine.Mixin
ExtensionGAVMixin gav = new ExtensionGAVMixin();
@CommandLine.ArgGroup(order = 1, heading = "%nQuarkus version:%n")
TargetQuarkusPlatformGroup targetQuarkusVersion = new TargetQuarkusPlatformGroup();
// Ideally we should use TargetLanguageGroup once we support creating extensions with Kotlin
@CommandLine.Option(names = {
"--java" }, description = "Target Java version.\n Valid values: ${COMPLETION-CANDIDATES}", completionCandidates = VersionCandidates.class, defaultValue = JavaVersion.DEFAULT_JAVA_VERSION_FOR_EXTENSION)
String javaVersion;
@CommandLine.ArgGroup(order = 2, exclusive = false, heading = "%nGenerated artifacts%n")
ExtensionNameGenerationGroup nameGeneration = new ExtensionNameGenerationGroup();
@CommandLine.ArgGroup(order = 3, exclusive = false, heading = "%nCode Generation (Optional):%n")
ExtensionCodeGenerationGroup codeGeneration = new ExtensionCodeGenerationGroup();
@CommandLine.ArgGroup(order = 4, exclusive = false, validate = false)
PropertiesOptions propertiesOptions = new PropertiesOptions();
@Override
public Integer call() throws Exception {
try {
output.debug("Creating a new extension project with initial parameters: %s", this);
output.throwIfUnmatchedArguments(spec.commandLine());
setExtensionId(gav.getExtensionId());
setTestOutputDirectory(output.getTestDirectory());
if (checkProjectRootAlreadyExists(runMode.isDryRun())) {
return CommandLine.ExitCode.USAGE;
}
BuildTool buildTool = BuildTool.MAVEN;
QuarkusProject quarkusProject = getExtensionVersions(buildTool, targetQuarkusVersion);
ExtensionCatalog catalog = quarkusProject.getExtensionsCatalog();
ArtifactCoords quarkusBom = catalog.getBom();
final CreateExtensionCommandHandler createExtension = new io.quarkus.devtools.commands.CreateExtension(
outputDirectory())
.extensionId(gav.getExtensionId())
.groupId(gav.getGroupId())
.version(gav.getVersion())
.extensionName(nameGeneration.getExtensionName())
.extensionDescription(nameGeneration.extensionDescription())
.namespaceId(nameGeneration.getNamespaceId())
.namespaceName(nameGeneration.getNamespaceName())
.packageName(nameGeneration.getPackageName())
.quarkusVersion(catalog.getQuarkusCoreVersion())
.quarkusBomGroupId(quarkusBom.getGroupId())
.quarkusBomArtifactId(quarkusBom.getArtifactId())
.quarkusBomVersion(quarkusBom.getVersion())
.javaVersion(javaVersion)
.withCodestart(codeGeneration.withCodestart())
.withoutUnitTest(codeGeneration.skipUnitTest())
.withoutDevModeTest(codeGeneration.skipDevModeTest())
.withoutIntegrationTests(codeGeneration.skipIntegrationTests())
.prepare();
QuarkusCommandOutcome outcome = QuarkusCommandOutcome.success();
if (runMode.isDryRun()) {
dryRun(buildTool, createExtension, output);
} else { // maven or gradle
outcome = createExtension.execute(output);
}
if (outcome.isSuccess()) {
if (!runMode.isDryRun()) {
output.info(
"Navigate into this directory and get started: " + spec.root().qualifiedName() + " build");
}
return CommandLine.ExitCode.OK;
}
return CommandLine.ExitCode.SOFTWARE;
} catch (Exception e) {
output.error("Extension creation failed, " + e.getMessage());
return output.handleCommandException(e,
"Unable to create extension: " + e.getMessage());
}
}
public void dryRun(BuildTool buildTool, CreateExtensionCommandHandler invocation, OutputOptionMixin output) {
CommandLine.Help help = spec.commandLine().getHelp();
output.printText(new String[] {
"\nA new extension would have been created in",
"\t" + outputDirectory().toString(),
"\nThe extension would have been created using the following settings:\n"
});
Map<String, String> dryRunOutput = new TreeMap<>();
for (Map.Entry<String, Object> entry : invocation.getData().entrySet()) {
dryRunOutput.put(prettyName(entry.getKey()), entry.getValue().toString());
}
dryRunOutput.put("Extension Codestart", "" + codeGeneration.withCodestart());
dryRunOutput.put("Skip Unit Test", "" + codeGeneration.skipUnitTest());
dryRunOutput.put("Skip Dev-mode Test", "" + codeGeneration.skipDevModeTest());
dryRunOutput.put("Skip Integration Test", "" + codeGeneration.skipIntegrationTests());
output.info(help.createTextTable(dryRunOutput).toString());
}
@Override
public String toString() {
return "CreateExtension{" + "gav=" + gav
+ ", quarkusVersion=" + targetQuarkusVersion
+ ", nameGeneration=" + nameGeneration
+ ", testGeneration=" + codeGeneration
+ '}';
}
}
| VersionCandidates |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilderTests.java | {
"start": 767,
"end": 3200
} | class ____ extends AbstractXContentSerializingTestCase<StringStatsAggregationBuilder> {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(
Arrays.asList(
new NamedXContentRegistry.Entry(
BaseAggregationBuilder.class,
new ParseField(StringStatsAggregationBuilder.NAME),
(p, c) -> StringStatsAggregationBuilder.PARSER.parse(p, (String) c)
)
)
);
}
@Override
protected StringStatsAggregationBuilder doParseInstance(XContentParser parser) throws IOException {
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
String name = parser.currentName();
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
assertThat(parser.currentName(), equalTo("string_stats"));
StringStatsAggregationBuilder parsed = StringStatsAggregationBuilder.PARSER.apply(parser, name);
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
return parsed;
}
@Override
protected Reader<StringStatsAggregationBuilder> instanceReader() {
return StringStatsAggregationBuilder::new;
}
@Override
protected StringStatsAggregationBuilder createTestInstance() {
StringStatsAggregationBuilder builder = new StringStatsAggregationBuilder(randomAlphaOfLength(5));
builder.field("foo");
builder.showDistribution(randomBoolean());
return builder;
}
@Override
protected StringStatsAggregationBuilder mutateInstance(StringStatsAggregationBuilder instance) {
if (randomBoolean()) {
StringStatsAggregationBuilder mutant = new StringStatsAggregationBuilder(instance.getName());
mutant.showDistribution(instance.showDistribution() == false);
return mutant;
}
StringStatsAggregationBuilder mutant = new StringStatsAggregationBuilder(randomAlphaOfLength(4));
mutant.showDistribution(instance.showDistribution());
return mutant;
}
}
| StringStatsAggregationBuilderTests |
java | quarkusio__quarkus | extensions/undertow/runtime/src/main/java/io/quarkus/undertow/runtime/ServletThreadContextProvider.java | {
"start": 263,
"end": 1343
} | class ____ implements ThreadContextProvider {
@Override
public ThreadContextSnapshot currentContext(Map<String, String> props) {
ServletRequestContext captured = ServletRequestContext.current();
return () -> {
ServletRequestContext current = restore(captured);
return () -> restore(current);
};
}
private ServletRequestContext restore(ServletRequestContext context) {
ServletRequestContext currentContext = ServletRequestContext.current();
if (context == null)
ServletRequestContext.clearCurrentServletAttachments();
else
ServletRequestContext.setCurrentRequestContext(context);
return currentContext;
}
@Override
public ThreadContextSnapshot clearedContext(Map<String, String> props) {
return () -> {
ServletRequestContext current = restore(null);
return () -> restore(current);
};
}
@Override
public String getThreadContextType() {
return "Servlet";
}
}
| ServletThreadContextProvider |
java | apache__logging-log4j2 | log4j-jpa/src/test/java/org/apache/logging/log4j/core/appender/db/jpa/JpaHsqldbAppenderTest.java | {
"start": 1174,
"end": 4300
} | class ____ extends AbstractJpaAppenderTest {
private static final String USER_ID = "sa";
private static final String PASSWORD = "123";
public JpaHsqldbAppenderTest() {
super("hsqldb");
}
@Override
protected Connection setUpConnection() throws SQLException {
final Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:Log4j", USER_ID, PASSWORD);
try (final Statement statement = connection.createStatement()) {
statement.executeUpdate("CREATE TABLE jpaBaseLogEntry ( "
+ "id INTEGER IDENTITY, eventDate DATETIME, instant NVARCHAR(64), level VARCHAR(10), "
+ "logger VARCHAR(255), message VARCHAR(1024), exception VARCHAR(1048576) )");
}
try (final Statement statement = connection.createStatement()) {
statement.executeUpdate("CREATE TABLE jpaBasicLogEntry ( "
+ "id INTEGER IDENTITY, timemillis BIGINT, instant NVARCHAR(64), nanoTime BIGINT, "
+ "level VARCHAR(10), loggerName VARCHAR(255), message VARCHAR(1024), thrown VARCHAR(1048576), "
+ "contextMapJson VARCHAR(1048576), loggerFQCN VARCHAR(1024), "
+ "contextStack VARCHAR(1048576), marker VARCHAR(255), source VARCHAR(2048),"
+ "threadId BIGINT, threadName NVARCHAR(255), threadPriority INTEGER )");
}
return connection;
}
@Test
void testNoEntityClassName() {
final JpaAppender appender = JpaAppender.createAppender("name", null, null, null, null, "jpaAppenderTestUnit");
assertNull(appender, "The appender should be null.");
}
@Test
void testNoPersistenceUnitName() {
final JpaAppender appender =
JpaAppender.createAppender("name", null, null, null, TestBaseEntity.class.getName(), null);
assertNull(appender, "The appender should be null.");
}
@Test
void testBadEntityClassName() {
final JpaAppender appender =
JpaAppender.createAppender("name", null, null, null, "com.foo.Bar", "jpaAppenderTestUnit");
assertNull(appender, "The appender should be null.");
}
@Test
void testNonLogEventEntity() {
final JpaAppender appender =
JpaAppender.createAppender("name", null, null, null, Object.class.getName(), "jpaAppenderTestUnit");
assertNull(appender, "The appender should be null.");
}
@Test
void testBadConstructorEntity01() {
final JpaAppender appender = JpaAppender.createAppender(
"name", null, null, null, BadConstructorEntity1.class.getName(), "jpaAppenderTestUnit");
assertNull(appender, "The appender should be null.");
}
@Test
void testBadConstructorEntity02() {
final JpaAppender appender = JpaAppender.createAppender(
"name", null, null, null, BadConstructorEntity2.class.getName(), "jpaAppenderTestUnit");
assertNull(appender, "The appender should be null.");
}
@SuppressWarnings("unused")
public static | JpaHsqldbAppenderTest |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlShowProcedureConverter.java | {
"start": 1204,
"end": 2143
} | class ____ extends AbstractSqlShowConverter<SqlShowProcedures> {
@Override
public Operation getOperationWithoutPrep(
SqlShowProcedures sqlShowCall,
@Nullable String catalogName,
@Nullable String databaseName,
@Nullable ShowLikeOperator likeOp) {
return new ShowProceduresOperation(catalogName, databaseName, likeOp);
}
@Override
public Operation getOperation(
SqlShowProcedures sqlShowCall,
@Nullable String catalogName,
@Nullable String databaseName,
String prep,
@Nullable ShowLikeOperator likeOp) {
return new ShowProceduresOperation(catalogName, databaseName, prep, likeOp);
}
@Override
public Operation convertSqlNode(SqlShowProcedures sqlShowProcedures, ConvertContext context) {
return convertShowOperation(sqlShowProcedures, context);
}
}
| SqlShowProcedureConverter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/namingstrategy/ejb3joincolumn/Tests.java | {
"start": 1363,
"end": 2678
} | class ____ {
@Test
@JiraKey(value = "HHH-9961")
public void testJpaJoinColumnPhysicalNaming() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySettings( Environment.getProperties() )
.build();
try {
final MetadataSources metadataSources = new MetadataSources( ssr );
metadataSources.addAnnotatedClass( Language.class );
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder();
metadataBuilder.applyImplicitNamingStrategy( ImplicitNamingStrategyJpaCompliantImpl.INSTANCE );
metadataBuilder.applyPhysicalNamingStrategy( PhysicalNamingStrategyImpl.INSTANCE );
final Metadata metadata = metadataBuilder.build();
( (MetadataImplementor) metadata ).orderColumns( false );
( (MetadataImplementor) metadata ).validate();
final PersistentClass languageBinding = metadata.getEntityBinding( Language.class.getName() );
final Property property = languageBinding.getProperty( "fallBack" );
List<Selectable> selectables = property.getValue().getSelectables();
assertTrue( selectables.size() == 1 );
final Column column = (Column) selectables.get( 0 );
assertEquals( "C_FALLBACK_ID", column.getName().toUpperCase( Locale.ROOT ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
}
| Tests |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/util/JvmExitOnFatalErrorTest.java | {
"start": 13227,
"end": 13455
} | class ____ implements InputSplitProvider {
@Override
public InputSplit getNextInputSplit(ClassLoader userCodeClassLoader) {
return null;
}
}
}
}
| NoOpInputSplitProvider |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/ParallelMergeReduceTest.java | {
"start": 1182,
"end": 5416
} | class ____ {
@Test
public void reduceFull() {
for (int i = 1;
i <= Runtime.getRuntime()
.availableProcessors() * 2;
i++) {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 10)
.parallel(i)
.reduce((a, b) -> a + b)
.subscribe(ts);
ts.assertValues(55);
}
}
@Test
public void parallelReduceFull() {
int m = 100_000;
for (int n = 1; n <= m; n *= 10) {
// System.out.println(n);
for (int i = 1;
i <= Runtime.getRuntime()
.availableProcessors();
i++) {
// System.out.println(" " + i);
Scheduler scheduler = Schedulers.newParallel("test", i);
try {
AssertSubscriber<Long> ts = AssertSubscriber.create();
Flux.range(1, n)
.map(v -> (long) v)
.parallel(i)
.runOn(scheduler)
.reduce((a, b) -> a + b)
.subscribe(ts);
ts.await(Duration.ofSeconds(500));
long e = ((long) n) * (1 + n) / 2;
ts.assertValues(e);
}
finally {
scheduler.dispose();
}
}
}
}
@Test
public void scanOperator() {
ParallelFlux<Integer> source = Flux.range(1, 4).parallel();
ParallelMergeReduce<Integer> test = new ParallelMergeReduce<>(source, (a, b) -> a + b);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(source);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanMainSubscriber() {
CoreSubscriber<? super Integer> subscriber = new LambdaSubscriber<>(null, e -> { }, null,
sub -> sub.request(2));
MergeReduceMain<Integer> test = new MergeReduceMain<>(Flux.<Integer>never()
.parallel(2),
subscriber, 2, (a, b) -> a + b);
subscriber.onSubscribe(test);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(subscriber);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
test.innerComplete(1);
test.innerComplete(2);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void scanMainSubscriberError() {
CoreSubscriber<? super Integer> subscriber = new LambdaSubscriber<>(null, e -> { }, null,
sub -> sub.request(2));
MergeReduceMain<Integer> test = new MergeReduceMain<>(Flux.range(0, 10)
.parallel(2), subscriber, 2, (a, b) -> a + b);
subscriber.onSubscribe(test);
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
test.innerError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom");
}
@Test
public void scanInnerSubscriber() {
CoreSubscriber<? super Integer> subscriber = new LambdaSubscriber<>(null, e -> { }, null, null);
MergeReduceMain<Integer> main = new MergeReduceMain<>(Flux.range(0, 10)
.parallel(2),
subscriber, 2, (a, b) -> a + b);
MergeReduceInner<Integer> test = new MergeReduceInner<>(main, (a, b) -> a + b);
Subscription s = Operators.emptySubscription();
test.onSubscribe(s);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(s);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(main);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(Integer.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.done = true;
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.BUFFERED)).isZero();
test.value = 3;
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(1);
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
}
| ParallelMergeReduceTest |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java | {
"start": 42846,
"end": 43614
} | class ____ {
public final Integer intField;
public final boolean primitiveBooleanField;
public final int primitiveIntField;
public final String stringField;
public SimplePojoWithAssigningConstructor(
Integer intField,
boolean primitiveBooleanField,
int primitiveIntField,
String stringField) {
this.intField = intField;
this.primitiveBooleanField = primitiveBooleanField;
this.primitiveIntField = primitiveIntField;
this.stringField = stringField;
}
}
// --------------------------------------------------------------------------------------------
private static | SimplePojoWithAssigningConstructor |
java | apache__dubbo | dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/OpenAPIFilter.java | {
"start": 1625,
"end": 3064
} | interface ____ extends OpenAPIExtension {
default OpenAPI filterOpenAPI(OpenAPI openAPI, Context context) {
return openAPI;
}
default PathItem filterPathItem(String key, PathItem pathItem, Context context) {
return pathItem;
}
default Operation filterOperation(HttpMethods key, Operation operation, PathItem pathItem, Context context) {
return operation;
}
default Parameter filterParameter(Parameter parameter, Operation operation, Context context) {
return parameter;
}
default RequestBody filterRequestBody(RequestBody body, Operation operation, Context context) {
return body;
}
default ApiResponse filterResponse(ApiResponse response, Operation operation, Context context) {
return response;
}
default Header filterHeader(Header header, ApiResponse response, Operation operation, Context context) {
return header;
}
default Schema filterSchema(Schema schema, Node<?> node, Context context) {
return schema;
}
default Schema filterSchemaProperty(String name, Schema schema, Schema owner, Context context) {
return schema;
}
default SecurityScheme filterSecurityScheme(SecurityScheme securityScheme, Context context) {
return securityScheme;
}
default OpenAPI filterOpenAPICompletion(OpenAPI openAPI, Context context) {
return openAPI;
}
}
| OpenAPIFilter |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/core/parameters/AnnotationParameterNameDiscoverer.java | {
"start": 6534,
"end": 6737
} | interface ____ looking up the parameter names.
*
* @param <T> the type to inspect (i.e. {@link Method} or {@link Constructor})
* @author Rob Winch
* @since 3.2
*/
@FunctionalInterface
private | for |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ahs/RMApplicationHistoryWriter.java | {
"start": 12882,
"end": 13834
} | class ____ extends CompositeService
implements Dispatcher {
private List<AsyncDispatcher> dispatchers =
new ArrayList<AsyncDispatcher>();
public MultiThreadedDispatcher(int num) {
super(MultiThreadedDispatcher.class.getName());
for (int i = 0; i < num; ++i) {
AsyncDispatcher dispatcher = createDispatcher();
dispatchers.add(dispatcher);
addIfService(dispatcher);
}
}
@Override
public EventHandler<Event> getEventHandler() {
return new CompositEventHandler();
}
@Override
public void register(Class<? extends Enum> eventType, EventHandler handler) {
for (AsyncDispatcher dispatcher : dispatchers) {
dispatcher.register(eventType, handler);
}
}
public void setDrainEventsOnStop() {
for (AsyncDispatcher dispatcher : dispatchers) {
dispatcher.setDrainEventsOnStop();
}
}
private | MultiThreadedDispatcher |
java | apache__maven | impl/maven-xml/src/test/java/org/apache/maven/internal/xml/XmlPlexusConfigurationConcurrencyBenchmark.java | {
"start": 2179,
"end": 7620
} | class ____ {
private XmlNode testNode;
private PlexusConfiguration configOld;
private PlexusConfiguration configNew;
@Setup
public void setup() {
testNode = createTestNode();
configOld = new XmlPlexusConfigurationOld(testNode);
configNew = new XmlPlexusConfiguration(testNode);
}
/**
* Test concurrent child access with old implementation
* This may expose race conditions and inconsistent behavior
*/
@Benchmark
@Group("concurrentAccessOld")
public void concurrentChildAccessOld(Blackhole bh) {
try {
for (int i = 0; i < configOld.getChildCount(); i++) {
PlexusConfiguration child = configOld.getChild(i);
bh.consume(child.getName());
bh.consume(child.getValue());
// Access nested children to stress the implementation
for (int j = 0; j < child.getChildCount(); j++) {
PlexusConfiguration nested = child.getChild(j);
bh.consume(nested.getName());
bh.consume(nested.getValue());
}
}
} catch (Exception e) {
// Old implementation may throw exceptions under concurrent access
bh.consume(e);
}
}
/**
* Test concurrent child access with new implementation
* This should be thread-safe and perform consistently
*/
@Benchmark
@Group("concurrentAccessNew")
public void concurrentChildAccessNew(Blackhole bh) {
for (int i = 0; i < configNew.getChildCount(); i++) {
PlexusConfiguration child = configNew.getChild(i);
bh.consume(child.getName());
bh.consume(child.getValue());
// Access nested children to stress the implementation
for (int j = 0; j < child.getChildCount(); j++) {
PlexusConfiguration nested = child.getChild(j);
bh.consume(nested.getName());
bh.consume(nested.getValue());
}
}
}
/**
* Test concurrent construction and access with old implementation
*/
@Benchmark
public void concurrentConstructionOld(Blackhole bh) {
try {
PlexusConfiguration config = new XmlPlexusConfigurationOld(testNode);
// Immediately access children to trigger potential race conditions
for (int i = 0; i < config.getChildCount(); i++) {
bh.consume(config.getChild(i).getName());
}
} catch (Exception e) {
bh.consume(e);
}
}
/**
* Test concurrent construction and access with new implementation
*/
@Benchmark
public void concurrentConstructionNew(Blackhole bh) {
PlexusConfiguration config = new XmlPlexusConfiguration(testNode);
// Immediately access children to test thread safety
for (int i = 0; i < config.getChildCount(); i++) {
bh.consume(config.getChild(i).getName());
}
}
/**
* Test concurrent attribute access
*/
@Benchmark
public void concurrentAttributeAccessOld(Blackhole bh) {
try {
String[] attrNames = configOld.getAttributeNames();
for (String attrName : attrNames) {
bh.consume(configOld.getAttribute(attrName));
}
} catch (Exception e) {
bh.consume(e);
}
}
@Benchmark
public void concurrentAttributeAccessNew(Blackhole bh) {
String[] attrNames = configNew.getAttributeNames();
for (String attrName : attrNames) {
bh.consume(configNew.getAttribute(attrName));
}
}
private XmlNode createTestNode() {
Map<String, String> rootAttrs = Map.of("id", "test-root", "version", "1.0", "type", "benchmark");
List<XmlNode> children = List.of(
XmlNode.newBuilder()
.name("section1")
.attributes(Map.of("name", "section1"))
.children(List.of(
XmlNode.newInstance("item1", "value1"),
XmlNode.newInstance("item2", "value2"),
XmlNode.newInstance("item3", "value3")))
.build(),
XmlNode.newBuilder()
.name("section2")
.attributes(Map.of("name", "section2"))
.children(
List.of(XmlNode.newInstance("item4", "value4"), XmlNode.newInstance("item5", "value5")))
.build(),
XmlNode.newBuilder()
.name("section3")
.attributes(Map.of("name", "section3"))
.children(List.of(XmlNode.newBuilder()
.name("nested")
.children(List.of(
XmlNode.newInstance("deep1", "deep-value1"),
XmlNode.newInstance("deep2", "deep-value2")))
.build()))
.build());
return XmlNode.newBuilder()
.name("root")
.attributes(rootAttrs)
.children(children)
.build();
}
}
| XmlPlexusConfigurationConcurrencyBenchmark |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerCacheProvider.java | {
"start": 365,
"end": 1075
} | class ____ extends TypePool.CacheProvider.Simple {
private final ThreadLocal<EnhancementState> enhancementState = new ThreadLocal<>();
@Override
public TypePool.Resolution find(final String name) {
final EnhancementState enhancementState = getEnhancementState();
if ( enhancementState != null && enhancementState.getClassName().equals( name ) ) {
return enhancementState.getTypePoolResolution();
}
return super.find( name );
}
EnhancementState getEnhancementState() {
return enhancementState.get();
}
void setEnhancementState(EnhancementState state) {
enhancementState.set( state );
}
void removeEnhancementState() {
enhancementState.remove();
}
static final | EnhancerCacheProvider |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/header/writers/frameoptions/AbstractRequestParameterAllowFromStrategyTests.java | {
"start": 933,
"end": 2681
} | class ____ {
private MockHttpServletRequest request;
@BeforeEach
public void setup() {
this.request = new MockHttpServletRequest();
}
@Test
public void nullAllowFromParameterValue() {
RequestParameterAllowFromStrategyStub strategy = new RequestParameterAllowFromStrategyStub(true);
assertThat(strategy.getAllowFromValue(this.request)).isEqualTo("DENY");
}
@Test
public void emptyAllowFromParameterValue() {
this.request.setParameter("x-frames-allow-from", "");
RequestParameterAllowFromStrategyStub strategy = new RequestParameterAllowFromStrategyStub(true);
assertThat(strategy.getAllowFromValue(this.request)).isEqualTo("DENY");
}
@Test
public void emptyAllowFromCustomParameterValue() {
String customParam = "custom";
this.request.setParameter(customParam, "");
RequestParameterAllowFromStrategyStub strategy = new RequestParameterAllowFromStrategyStub(true);
strategy.setAllowFromParameterName(customParam);
assertThat(strategy.getAllowFromValue(this.request)).isEqualTo("DENY");
}
@Test
public void allowFromParameterValueAllowed() {
String value = "https://example.com";
this.request.setParameter("x-frames-allow-from", value);
RequestParameterAllowFromStrategyStub strategy = new RequestParameterAllowFromStrategyStub(true);
assertThat(strategy.getAllowFromValue(this.request)).isEqualTo(value);
}
@Test
public void allowFromParameterValueDenied() {
String value = "https://example.com";
this.request.setParameter("x-frames-allow-from", value);
RequestParameterAllowFromStrategyStub strategy = new RequestParameterAllowFromStrategyStub(false);
assertThat(strategy.getAllowFromValue(this.request)).isEqualTo("DENY");
}
private static | AbstractRequestParameterAllowFromStrategyTests |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/metrics/stats/MeterTest.java | {
"start": 1194,
"end": 3135
} | class ____ {
private static final double EPS = 0.0000001d;
@Test
public void testMeter() {
Map<String, String> emptyTags = Collections.emptyMap();
MetricName rateMetricName = new MetricName("rate", "test", "", emptyTags);
MetricName totalMetricName = new MetricName("total", "test", "", emptyTags);
Meter meter = new Meter(rateMetricName, totalMetricName);
List<NamedMeasurable> stats = meter.stats();
assertEquals(2, stats.size());
NamedMeasurable total = stats.get(0);
NamedMeasurable rate = stats.get(1);
assertEquals(rateMetricName, rate.name());
assertEquals(totalMetricName, total.name());
Rate rateStat = (Rate) rate.stat();
CumulativeSum totalStat = (CumulativeSum) total.stat();
MetricConfig config = new MetricConfig();
double nextValue = 0.0;
double expectedTotal = 0.0;
long now = 0;
int intervalMs = 100;
double delta = 5.0;
// Record values in multiple windows and verify that rates are reported
// for time windows and that the total is cumulative.
for (int i = 1; i <= 100; i++) {
for (; now < i * 1000; now += intervalMs, nextValue += delta) {
expectedTotal += nextValue;
meter.record(config, nextValue, now);
}
assertEquals(expectedTotal, totalStat.measure(config, now), EPS);
long windowSizeMs = rateStat.windowSize(config, now);
long windowStartMs = Math.max(now - windowSizeMs, 0);
double sampledTotal = 0.0;
double prevValue = nextValue - delta;
for (long timeMs = now - 100; timeMs >= windowStartMs; timeMs -= intervalMs, prevValue -= delta)
sampledTotal += prevValue;
assertEquals(sampledTotal * 1000 / windowSizeMs, rateStat.measure(config, now), EPS);
}
}
}
| MeterTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java | {
"start": 1713,
"end": 5684
} | class ____ { //extends ViewFileSystemTestSetup {
static Configuration conf;
static FileSystem viewFs;
static FakeFileSystem fs1;
static FakeFileSystem fs2;
@BeforeAll
public static void setup() throws Exception {
conf = ViewFileSystemTestSetup.createConfig();
setupFileSystem(new URI("fs1:/"), FakeFileSystem.class);
setupFileSystem(new URI("fs2:/"), FakeFileSystem.class);
viewFs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
fs1 = (FakeFileSystem) getChildFileSystem((ViewFileSystem) viewFs,
new URI("fs1:/"));
fs2 = (FakeFileSystem) getChildFileSystem((ViewFileSystem) viewFs,
new URI("fs2:/"));
}
static void setupFileSystem(URI uri, Class clazz)
throws Exception {
String scheme = uri.getScheme();
conf.set("fs."+scheme+".impl", clazz.getName());
FakeFileSystem fs = (FakeFileSystem)FileSystem.get(uri, conf);
assertEquals(uri, fs.getUri());
Path targetPath = new FileSystemTestHelper().getAbsoluteTestRootPath(fs);
ConfigUtil.addLink(conf, "/mounts/"+scheme, targetPath.toUri());
}
private static void setupMockFileSystem(Configuration config, URI uri)
throws Exception {
String scheme = uri.getScheme();
config.set("fs." + scheme + ".impl", MockFileSystem.class.getName());
ConfigUtil.addLink(config, "/mounts/" + scheme, uri);
}
@Test
public void testSanity() throws URISyntaxException {
assertEquals(new URI("fs1:/").getScheme(), fs1.getUri().getScheme());
assertEquals(new URI("fs1:/").getAuthority(), fs1.getUri().getAuthority());
assertEquals(new URI("fs2:/").getScheme(), fs2.getUri().getScheme());
assertEquals(new URI("fs2:/").getAuthority(), fs2.getUri().getAuthority());
}
/**
* Tests that ViewFileSystem dispatches calls for every ACL method through the
* mount table to the correct underlying FileSystem with all Path arguments
* translated as required.
*/
@Test
public void testAclMethods() throws Exception {
Configuration conf = ViewFileSystemTestSetup.createConfig();
setupMockFileSystem(conf, new URI("mockfs1:/"));
setupMockFileSystem(conf, new URI("mockfs2:/"));
FileSystem viewFs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
FileSystem mockFs1 =
((MockFileSystem) getChildFileSystem((ViewFileSystem) viewFs,
new URI("mockfs1:/"))).getRawFileSystem();
FileSystem mockFs2 =
((MockFileSystem) getChildFileSystem((ViewFileSystem) viewFs,
new URI("mockfs2:/"))).getRawFileSystem();
Path viewFsPath1 = new Path("/mounts/mockfs1/a/b/c");
Path mockFsPath1 = new Path("/a/b/c");
Path viewFsPath2 = new Path("/mounts/mockfs2/d/e/f");
Path mockFsPath2 = new Path("/d/e/f");
List<AclEntry> entries = Collections.emptyList();
viewFs.modifyAclEntries(viewFsPath1, entries);
verify(mockFs1).modifyAclEntries(mockFsPath1, entries);
viewFs.modifyAclEntries(viewFsPath2, entries);
verify(mockFs2).modifyAclEntries(mockFsPath2, entries);
viewFs.removeAclEntries(viewFsPath1, entries);
verify(mockFs1).removeAclEntries(mockFsPath1, entries);
viewFs.removeAclEntries(viewFsPath2, entries);
verify(mockFs2).removeAclEntries(mockFsPath2, entries);
viewFs.removeDefaultAcl(viewFsPath1);
verify(mockFs1).removeDefaultAcl(mockFsPath1);
viewFs.removeDefaultAcl(viewFsPath2);
verify(mockFs2).removeDefaultAcl(mockFsPath2);
viewFs.removeAcl(viewFsPath1);
verify(mockFs1).removeAcl(mockFsPath1);
viewFs.removeAcl(viewFsPath2);
verify(mockFs2).removeAcl(mockFsPath2);
viewFs.setAcl(viewFsPath1, entries);
verify(mockFs1).setAcl(mockFsPath1, entries);
viewFs.setAcl(viewFsPath2, entries);
verify(mockFs2).setAcl(mockFsPath2, entries);
viewFs.getAclStatus(viewFsPath1);
verify(mockFs1).getAclStatus(mockFsPath1);
viewFs.getAclStatus(viewFsPath2);
verify(mockFs2).getAclStatus(mockFsPath2);
}
static | TestViewFileSystemDelegation |
java | apache__camel | components/camel-test/camel-test-junit5/src/test/java/org/apache/camel/test/junit5/CamelTestSupportTest.java | {
"start": 1157,
"end": 2729
} | class ____ extends CamelTestSupport {
@Override
public void doPreSetup() throws Exception {
replaceRouteFromWith("routeId", "direct:start");
}
@Test
public void replacesFromEndpoint() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void exceptionThrownWhenEndpointNotFoundAndNoCreate() {
assertThrows(NoSuchEndpointException.class, () -> getMockEndpoint("mock:bogus", false));
}
@Test
public void exceptionThrownWhenEndpointNotAMockEndpoint() {
assertThrows(NoSuchEndpointException.class, () -> getMockEndpoint("direct:something", false));
}
@Test
public void autoCreateNonExisting() {
MockEndpoint mock = getMockEndpoint("mock:bogus2", true);
assertNotNull(mock);
}
@Test
public void testExpression() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.message(0).body().matches(expression().simple().expression("${body} contains ' foo '").trim(false).end());
template.sendBody("direct:start", " foo ");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:something").id("routeId").to("mock:result");
}
};
}
}
| CamelTestSupportTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1273/Issue1273Test.java | {
"start": 560,
"end": 1474
} | class ____ {
@ProcessorTest
public void shouldCorrectlyMapCollectionWithNullValueMappingStrategyReturnDefault() {
EntityMapperReturnDefault entityMapper = Mappers.getMapper( EntityMapperReturnDefault.class );
Entity entity = createEntityWithEmptyList();
Dto dto = entityMapper.asTarget( entity );
assertThat( dto.getLongs() ).isNotNull();
}
@ProcessorTest
public void shouldCorrectlyMapCollectionWithNullValueMappingStrategyReturnNull() {
EntityMapperReturnNull entityMapper = Mappers.getMapper( EntityMapperReturnNull.class );
Entity entity = createEntityWithEmptyList();
Dto dto = entityMapper.asTarget( entity );
assertThat( dto.getLongs() ).isNull();
}
private Entity createEntityWithEmptyList() {
Entity entity = new Entity();
entity.setLongs( null );
return entity;
}
}
| Issue1273Test |
java | google__guice | extensions/persist/test/com/google/inject/persist/jpa/JpaWorkManagerTest.java | {
"start": 3715,
"end": 4205
} | class ____ {
@Inject EntityManager em;
@Transactional
public void runOperationInTxn() {
JpaTestEntity testEntity = new JpaTestEntity();
testEntity.setText(UNIQUE_TEXT_3);
em.persist(testEntity);
}
@Transactional
public void runOperationInTxnError() {
JpaTestEntity testEntity = new JpaTestEntity();
testEntity.setText(UNIQUE_TEXT_3 + "transient never in db!" + hashCode());
em.persist(testEntity);
}
}
}
| TransactionalObject |
java | google__dagger | javatests/dagger/hilt/android/PackagePrivateConstructorTest.java | {
"start": 2067,
"end": 2240
} | class ____ {
@Rule public final HiltAndroidRule rule = new HiltAndroidRule(this);
@AndroidEntryPoint(BaseActivity.class)
public static final | PackagePrivateConstructorTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSwitchMap.java | {
"start": 2049,
"end": 10899
} | class ____<T, R> extends AtomicInteger implements FlowableSubscriber<T>, Subscription {
private static final long serialVersionUID = -3491074160481096299L;
final Subscriber<? super R> downstream;
final Function<? super T, ? extends Publisher<? extends R>> mapper;
final int bufferSize;
final boolean delayErrors;
volatile boolean done;
final AtomicThrowable errors;
volatile boolean cancelled;
Subscription upstream;
final AtomicReference<SwitchMapInnerSubscriber<T, R>> active = new AtomicReference<>();
final AtomicLong requested = new AtomicLong();
static final SwitchMapInnerSubscriber<Object, Object> CANCELLED;
static {
CANCELLED = new SwitchMapInnerSubscriber<>(null, -1L, 1);
CANCELLED.cancel();
}
volatile long unique;
SwitchMapSubscriber(Subscriber<? super R> actual,
Function<? super T, ? extends Publisher<? extends R>> mapper, int bufferSize,
boolean delayErrors) {
this.downstream = actual;
this.mapper = mapper;
this.bufferSize = bufferSize;
this.delayErrors = delayErrors;
this.errors = new AtomicThrowable();
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
if (done) {
return;
}
long c = unique + 1;
unique = c;
SwitchMapInnerSubscriber<T, R> inner = active.get();
if (inner != null) {
inner.cancel();
}
Publisher<? extends R> p;
try {
p = Objects.requireNonNull(mapper.apply(t), "The publisher returned is null");
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
upstream.cancel();
onError(e);
return;
}
SwitchMapInnerSubscriber<T, R> nextInner = new SwitchMapInnerSubscriber<>(this, c, bufferSize);
for (;;) {
inner = active.get();
if (inner == CANCELLED) {
break;
}
if (active.compareAndSet(inner, nextInner)) {
p.subscribe(nextInner);
break;
}
}
}
@Override
public void onError(Throwable t) {
if (!done && errors.tryAddThrowable(t)) {
if (!delayErrors) {
disposeInner();
}
done = true;
drain();
} else {
RxJavaPlugins.onError(t);
}
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
drain();
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
if (unique == 0L) {
upstream.request(Long.MAX_VALUE);
} else {
drain();
}
}
}
@Override
public void cancel() {
if (!cancelled) {
cancelled = true;
upstream.cancel();
disposeInner();
errors.tryTerminateAndReport();
}
}
@SuppressWarnings("unchecked")
void disposeInner() {
SwitchMapInnerSubscriber<T, R> a = active.getAndSet((SwitchMapInnerSubscriber<T, R>)CANCELLED);
if (a != CANCELLED && a != null) {
a.cancel();
}
}
void drain() {
if (getAndIncrement() != 0) {
return;
}
final Subscriber<? super R> a = downstream;
int missing = 1;
for (;;) {
if (cancelled) {
return;
}
if (done) {
if (delayErrors) {
if (active.get() == null) {
errors.tryTerminateConsumer(a);
return;
}
} else {
Throwable err = errors.get();
if (err != null) {
disposeInner();
errors.tryTerminateConsumer(a);
return;
} else
if (active.get() == null) {
a.onComplete();
return;
}
}
}
SwitchMapInnerSubscriber<T, R> inner = active.get();
SimpleQueue<R> q = inner != null ? inner.queue : null;
if (q != null) {
long r = requested.get();
long e = 0L;
boolean retry = false;
while (e != r) {
if (cancelled) {
return;
}
boolean d = inner.done;
R v;
try {
v = q.poll();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
inner.cancel();
errors.tryAddThrowableOrReport(ex);
d = true;
v = null;
}
boolean empty = v == null;
if (inner != active.get()) {
retry = true;
break;
}
if (d) {
if (!delayErrors) {
Throwable err = errors.get();
if (err != null) {
errors.tryTerminateConsumer(a);
return;
} else
if (empty) {
active.compareAndSet(inner, null);
retry = true;
break;
}
} else {
if (empty) {
active.compareAndSet(inner, null);
retry = true;
break;
}
}
}
if (empty) {
break;
}
a.onNext(v);
e++;
}
if (e == r) {
if (inner.done) {
if (!delayErrors) {
Throwable err = errors.get();
if (err != null) {
disposeInner();
errors.tryTerminateConsumer(a);
return;
} else
if (q.isEmpty()) {
active.compareAndSet(inner, null);
continue;
}
} else {
if (q.isEmpty()) {
active.compareAndSet(inner, null);
continue;
}
}
}
}
if (e != 0L) {
if (!cancelled) {
if (r != Long.MAX_VALUE) {
requested.addAndGet(-e);
}
inner.request(e);
}
}
if (retry) {
continue;
}
}
missing = addAndGet(-missing);
if (missing == 0) {
break;
}
}
}
}
static final | SwitchMapSubscriber |
java | apache__flink | flink-kubernetes/src/main/java/org/apache/flink/kubernetes/KubernetesClusterClientFactory.java | {
"start": 1752,
"end": 3379
} | class ____
extends AbstractContainerizedClusterClientFactory<String> {
private static final String CLUSTER_ID_PREFIX = "flink-cluster-";
@Override
public boolean isCompatibleWith(Configuration configuration) {
checkNotNull(configuration);
final String deploymentTarget = configuration.get(DeploymentOptions.TARGET);
return KubernetesDeploymentTarget.isValidKubernetesTarget(deploymentTarget);
}
@Override
public KubernetesClusterDescriptor createClusterDescriptor(Configuration configuration) {
checkNotNull(configuration);
if (!configuration.contains(KubernetesConfigOptions.CLUSTER_ID)) {
final String clusterId = generateClusterId();
configuration.set(KubernetesConfigOptions.CLUSTER_ID, clusterId);
}
return new KubernetesClusterDescriptor(
configuration,
FlinkKubeClientFactory.getInstance(),
new DefaultKubernetesArtifactUploader());
}
@Nullable
@Override
public String getClusterId(Configuration configuration) {
checkNotNull(configuration);
return configuration.get(KubernetesConfigOptions.CLUSTER_ID);
}
@Override
public Optional<String> getApplicationTargetName() {
return Optional.of(KubernetesDeploymentTarget.APPLICATION.getName());
}
private String generateClusterId() {
final String randomID = new AbstractID().toString();
return (CLUSTER_ID_PREFIX + randomID)
.substring(0, Constants.MAXIMUM_CHARACTERS_OF_CLUSTER_ID);
}
}
| KubernetesClusterClientFactory |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/aot/generate/ValueCodeGeneratorTests.java | {
"start": 6226,
"end": 6615
} | class ____ {
@Test
void generateWhenEnum() {
assertThat(resolve(generateCode(ChronoUnit.DAYS)))
.hasImport(ChronoUnit.class).hasValueCode("ChronoUnit.DAYS");
}
@Test
void generateWhenEnumWithClassBody() {
assertThat(resolve(generateCode(EnumWithClassBody.TWO)))
.hasImport(EnumWithClassBody.class).hasValueCode("EnumWithClassBody.TWO");
}
}
@Nested
| EnumTests |
java | google__guava | android/guava/src/com/google/common/util/concurrent/Service.java | {
"start": 2472,
"end": 7443
} | interface ____ {
/**
* If the service state is {@link State#NEW}, this initiates service startup and returns
* immediately. A stopped service may not be restarted.
*
* @return this
* @throws IllegalStateException if the service is not {@link State#NEW}
* @since 15.0
*/
@CanIgnoreReturnValue
Service startAsync();
/** Returns {@code true} if this service is {@linkplain State#RUNNING running}. */
boolean isRunning();
/** Returns the lifecycle state of the service. */
State state();
/**
* If the service is {@linkplain State#STARTING starting} or {@linkplain State#RUNNING running},
* this initiates service shutdown and returns immediately. If the service is {@linkplain
* State#NEW new}, it is {@linkplain State#TERMINATED terminated} without having been started nor
* stopped. If the service has already been stopped, this method returns immediately without
* taking action.
*
* @return this
* @since 15.0
*/
@CanIgnoreReturnValue
Service stopAsync();
/**
* Waits for the {@link Service} to reach the {@linkplain State#RUNNING running state}.
*
* @throws IllegalStateException if the service reaches a state from which it is not possible to
* enter the {@link State#RUNNING} state. e.g. if the {@code state} is {@code
* State#TERMINATED} when this method is called then this will throw an IllegalStateException.
* @since 15.0
*/
void awaitRunning();
/**
* Waits for the {@link Service} to reach the {@linkplain State#RUNNING running state} for no more
* than the given time.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @throws TimeoutException if the service has not reached the given state within the deadline
* @throws IllegalStateException if the service reaches a state from which it is not possible to
* enter the {@link State#RUNNING RUNNING} state. e.g. if the {@code state} is {@code
* State#TERMINATED} when this method is called then this will throw an IllegalStateException.
* @since 15.0
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException;
/**
* Waits for the {@link Service} to reach the {@linkplain State#TERMINATED terminated state}.
*
* @throws IllegalStateException if the service {@linkplain State#FAILED fails}.
* @since 15.0
*/
void awaitTerminated();
/**
* Waits for the {@link Service} to reach a terminal state (either {@link Service.State#TERMINATED
* terminated} or {@link Service.State#FAILED failed}) for no more than the given time.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @throws TimeoutException if the service has not reached the given state within the deadline
* @throws IllegalStateException if the service {@linkplain State#FAILED fails}.
* @since 15.0
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException;
/**
* Returns the {@link Throwable} that caused this service to fail.
*
* @throws IllegalStateException if this service's state isn't {@linkplain State#FAILED FAILED}.
* @since 14.0
*/
Throwable failureCause();
/**
* Registers a {@link Listener} to be {@linkplain Executor#execute executed} on the given
* executor. The listener will have the corresponding transition method called whenever the
* service changes state. The listener will not have previous state changes replayed, so it is
* suggested that listeners are added before the service starts.
*
* <p>{@code addListener} guarantees execution ordering across calls to a given listener but not
* across calls to multiple listeners. Specifically, a given listener will have its callbacks
* invoked in the same order as the underlying service enters those states. Additionally, at most
* one of the listener's callbacks will execute at once. However, multiple listeners' callbacks
* may execute concurrently, and listeners may execute in an order different from the one in which
* they were registered.
*
* <p>RuntimeExceptions thrown by a listener will be caught and logged. Any exception thrown
* during {@code Executor.execute} (e.g., a {@code RejectedExecutionException}) will be caught and
* logged.
*
* @param listener the listener to run when the service changes state is complete
* @param executor the executor in which the listeners callback methods will be run. For fast,
* lightweight listeners that would be safe to execute in any thread, consider {@link
* MoreExecutors#directExecutor}.
* @since 13.0
*/
void addListener(Listener listener, Executor executor);
/**
* The lifecycle states of a service.
*
* <p>The ordering of the {@link State} | Service |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/CanBeStaticAnalyzer.java | {
"start": 4132,
"end": 4198
} | class ____ unqualified references to sibling types, e.g.:
//
// | in |
java | spring-projects__spring-boot | module/spring-boot-servlet/src/main/java/org/springframework/boot/servlet/autoconfigure/actuate/web/ServletEndpointManagementContextConfiguration.java | {
"start": 1520,
"end": 2058
} | class ____ {
@Bean
@SuppressWarnings("removal")
public IncludeExcludeEndpointFilter<org.springframework.boot.actuate.endpoint.web.ExposableServletEndpoint> servletExposeExcludePropertyEndpointFilter(
WebEndpointProperties properties) {
WebEndpointProperties.Exposure exposure = properties.getExposure();
return new IncludeExcludeEndpointFilter<>(
org.springframework.boot.actuate.endpoint.web.ExposableServletEndpoint.class, exposure.getInclude(),
exposure.getExclude());
}
}
| ServletEndpointManagementContextConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/OneToManyTest.java | {
"start": 18673,
"end": 18723
} | class ____ {
@Id
Long id;
}
}
| ParentUnawareChild |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/generic/ErasedGenericTest.java | {
"start": 689,
"end": 1100
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(ErasedTypeProducer.class, Claim.class, Target.class);
@Test
public void testPrimitiveProducers() {
ArcContainer arc = Arc.container();
Target target = arc.instance(Target.class).get();
assertEquals("something", target.getSomething());
}
@Singleton
static | ErasedGenericTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java | {
"start": 5858,
"end": 9890
} | class ____ extends AbstractService {
private static final Logger LOG =
LoggerFactory.getLogger(HistoryClientService.class);
private HSClientProtocol protocolHandler;
private Server server;
private WebApp webApp;
private InetSocketAddress bindAddress;
private HistoryContext history;
private JHSDelegationTokenSecretManager jhsDTSecretManager;
public HistoryClientService(HistoryContext history,
JHSDelegationTokenSecretManager jhsDTSecretManager) {
super("HistoryClientService");
this.history = history;
this.protocolHandler = new HSClientProtocolHandler();
this.jhsDTSecretManager = jhsDTSecretManager;
}
protected void serviceStart() throws Exception {
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
initializeWebApp(conf);
InetSocketAddress address = conf.getSocketAddr(
JHAdminConfig.MR_HISTORY_BIND_HOST,
JHAdminConfig.MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_PORT);
server =
rpc.getServer(HSClientProtocol.class, protocolHandler, address,
conf, jhsDTSecretManager,
conf.getInt(JHAdminConfig.MR_HISTORY_CLIENT_THREAD_COUNT,
JHAdminConfig.DEFAULT_MR_HISTORY_CLIENT_THREAD_COUNT));
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
server.refreshServiceAcl(conf, new ClientHSPolicyProvider());
}
server.start();
this.bindAddress = conf.updateConnectAddr(JHAdminConfig.MR_HISTORY_BIND_HOST,
JHAdminConfig.MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS,
server.getListenerAddress());
LOG.info("Instantiated HistoryClientService at " + this.bindAddress);
super.serviceStart();
}
@VisibleForTesting
protected void initializeWebApp(Configuration conf) throws IOException {
webApp = new HsWebApp(history);
setupFilters(conf);
InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf);
ApplicationClientProtocol appClientProtocol =
ClientRMProxy.createRMProxy(conf, ApplicationClientProtocol.class);
// NOTE: there should be a .at(InetSocketAddress)
WebApps
.$for("jobhistory", HistoryClientService.class, this, "hs-ws")
.with(conf)
.withHttpSpnegoKeytabKey(
JHAdminConfig.MR_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
.withHttpSpnegoPrincipalKey(
JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
.withCSRFProtection(JHAdminConfig.MR_HISTORY_CSRF_PREFIX)
.withXFSProtection(JHAdminConfig.MR_HISTORY_XFS_PREFIX)
.withAppClientProtocol(appClientProtocol)
.withResourceConfig(configure(conf, appClientProtocol))
.at(NetUtils.getHostPortString(bindAddress)).start(webApp);
String connectHost = MRWebAppUtil.getJHSWebappURLWithoutScheme(conf).split(":")[0];
MRWebAppUtil.setJHSWebappURLWithoutScheme(conf,
connectHost + ":" + webApp.getListenerAddress().getPort());
}
@Override
protected void serviceStop() throws Exception {
if (server != null) {
server.stop();
}
if (webApp != null) {
webApp.stop();
}
super.serviceStop();
}
@Private
public MRClientProtocol getClientHandler() {
return this.protocolHandler;
}
@Private
public InetSocketAddress getBindAddress() {
return this.bindAddress;
}
private void setupFilters(Configuration conf) {
boolean enableCorsFilter =
conf.getBoolean(JHAdminConfig.MR_HISTORY_ENABLE_CORS_FILTER,
JHAdminConfig.DEFAULT_MR_HISTORY_ENABLE_CORS_FILTER);
if (enableCorsFilter) {
conf.setBoolean(HttpCrossOriginFilterInitializer.PREFIX
+ HttpCrossOriginFilterInitializer.ENABLED_SUFFIX, true);
}
}
private | HistoryClientService |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/server/upload/UploadController.java | {
"start": 1294,
"end": 3032
} | class ____ {
// end::class[]
// tag::file[]
@Post(value = "/", consumes = MULTIPART_FORM_DATA, produces = TEXT_PLAIN) // <1>
@SingleResult
public Publisher<HttpResponse<String>> upload(StreamingFileUpload file) { // <2>
File tempFile;
try {
tempFile = File.createTempFile(file.getFilename(), "temp");
} catch (IOException e) {
return Mono.error(e);
}
Publisher<Boolean> uploadPublisher = file.transferTo(tempFile); // <3>
return Mono.from(uploadPublisher) // <4>
.map(success -> {
if (success) {
return HttpResponse.ok("Uploaded");
} else {
return HttpResponse.<String>status(CONFLICT)
.body("Upload Failed");
}
});
}
// end::file[]
// tag::outputStream[]
@Post(value = "/outputStream", consumes = MULTIPART_FORM_DATA, produces = TEXT_PLAIN) // <1>
@SingleResult
public Mono<HttpResponse<String>> uploadOutputStream(StreamingFileUpload file) { // <2>
OutputStream outputStream = new ByteArrayOutputStream(); // <3>
Publisher<Boolean> uploadPublisher = file.transferTo(outputStream); // <4>
return Mono.from(uploadPublisher) // <5>
.map(success -> {
if (success) {
return HttpResponse.ok("Uploaded");
} else {
return HttpResponse.<String>status(CONFLICT)
.body("Upload Failed");
}
});
}
// end::outputStream[]
// tag::endclass[]
}
// end::endclass[]
| UploadController |
java | quarkusio__quarkus | integration-tests/jpa/src/main/java/io/quarkus/it/jpa/attributeconverter/MyDataNotRequiringCDINoInjectionConverter.java | {
"start": 386,
"end": 1142
} | class ____ implements AttributeConverter<MyDataNotRequiringCDI, String> {
private final BeanInstantiator beanInstantiator;
public MyDataNotRequiringCDINoInjectionConverter() {
this.beanInstantiator = BeanInstantiator.fromCaller();
}
@Override
public String convertToDatabaseColumn(MyDataNotRequiringCDI attribute) {
MyCdiContext.checkNotAvailable(null, beanInstantiator);
return attribute == null ? null : attribute.getContent();
}
@Override
public MyDataNotRequiringCDI convertToEntityAttribute(String dbData) {
MyCdiContext.checkNotAvailable(null, beanInstantiator);
return dbData == null ? null : new MyDataNotRequiringCDI(dbData);
}
}
| MyDataNotRequiringCDINoInjectionConverter |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/IntegerSyntheticSourceNativeArrayIntegrationTests.java | {
"start": 586,
"end": 991
} | class ____ extends NativeArrayIntegrationTestCase {
@Override
protected String getFieldTypeName() {
return "integer";
}
@Override
protected Integer getRandomValue() {
return randomInt();
}
@Override
protected String getMalformedValue() {
return RandomStrings.randomAsciiOfLength(random(), 8);
}
}
| IntegerSyntheticSourceNativeArrayIntegrationTests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConditionEvaluator.java | {
"start": 1893,
"end": 2470
} | class ____ {
private final ConditionContextImpl context;
/**
* Create a new {@link ConditionEvaluator} instance.
*/
public ConditionEvaluator(@Nullable BeanDefinitionRegistry registry,
@Nullable Environment environment, @Nullable ResourceLoader resourceLoader) {
this.context = new ConditionContextImpl(registry, environment, resourceLoader);
}
/**
* Determine if an item should be skipped based on {@code @Conditional} annotations.
* The {@link ConfigurationPhase} will be deduced from the type of item (i.e. a
* {@code @Configuration} | ConditionEvaluator |
java | micronaut-projects__micronaut-core | http-server-netty/src/test/groovy/io/micronaut/http/server/netty/interceptor/TestReactiveFilter.java | {
"start": 1167,
"end": 1759
} | class ____ implements HttpServerFilter{
@Override
public int getOrder() {
return TestSecurityFilter.POSITION - 10;
}
@Override
public Publisher<MutableHttpResponse<?>> doFilter(HttpRequest<?> request, ServerFilterChain chain) {
SomeService someService = new SomeService();
return someService
.getSomething()
.switchMap(s -> {
request.getAttributes().put("SomeServiceValue", s);
return chain.proceed(request);
});
}
| TestReactiveFilter |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerStringConcatenationTest.java | {
"start": 4136,
"end": 4553
} | class ____ {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
public void method(int x, int y) {
logger.atInfo().log(x + y + " sum; mean " + (x + y) / 2);
}
}
""")
.addOutputLines(
"out/Test.java",
"""
import com.google.common.flogger.FluentLogger;
| Test |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitterOnPrepareExceptionTest.java | {
"start": 1924,
"end": 2243
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) {
String name = exchange.getIn().getBody(String.class);
if ("Kaboom".equals(name)) {
throw new IllegalArgumentException("Forced error");
}
}
}
}
| FixNamePrepare |
java | quarkusio__quarkus | test-framework/maven/src/main/java/io/quarkus/maven/it/verifier/MavenProcessInvocationResult.java | {
"start": 413,
"end": 1770
} | class ____ implements InvocationResult {
private Process process;
private CommandLineException exception;
void destroy() {
if (process != null) {
process.destroy();
try {
process.waitFor();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
e.printStackTrace();
}
}
}
MavenProcessInvocationResult setProcess(Process process) {
this.process = process;
return this;
}
public MavenProcessInvocationResult setException(CommandLineException exception) {
// Print the stack trace immediately to give some feedback early
// In intellij, the used `mvn` executable is not "executable" by default on Mac and probably linux.
// You need to chmod +x the file.
exception.printStackTrace();
this.exception = exception;
return this;
}
@Override
public CommandLineException getExecutionException() {
return exception;
}
@Override
public int getExitCode() {
if (process == null) {
throw new IllegalStateException("No process");
} else {
return process.exitValue();
}
}
public Process getProcess() {
return process;
}
}
| MavenProcessInvocationResult |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/IdClassRepeatedQueryTest.java | {
"start": 4248,
"end": 4786
} | class ____ {
@Id
@ManyToOne
@JoinColumn(name = "id_person", updatable = false)
private Person person;
@Id
@ManyToOne
@JoinColumn(name = "id_corporation", updatable = false)
private Corporation corporation;
public CorporationUser() {
}
public CorporationUser(Person person, Corporation corporation) {
this.person = person;
this.corporation = corporation;
}
public Person getPerson() {
return person;
}
public Corporation getCorporation() {
return corporation;
}
public static | CorporationUser |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/web/server/OneTimeTokenLoginSpecTests.java | {
"start": 11930,
"end": 12679
} | class ____ {
@Bean
SecurityWebFilterChain securityWebFilterChain(ServerHttpSecurity http,
ServerOneTimeTokenGenerationSuccessHandler ottSuccessHandler) {
// @formatter:off
http
.authorizeExchange((authorize) -> authorize
.anyExchange()
.authenticated()
)
.oneTimeTokenLogin((ott) -> ott
.tokenGenerationSuccessHandler(ottSuccessHandler)
);
// @formatter:on
return http.build();
}
@Bean
TestServerOneTimeTokenGenerationSuccessHandler ottSuccessHandler() {
return new TestServerOneTimeTokenGenerationSuccessHandler();
}
}
@Configuration(proxyBeanMethods = false)
@EnableWebFlux
@EnableWebFluxSecurity
@Import(UserDetailsServiceConfig.class)
static | OneTimeTokenDefaultConfig |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/transport/HttpClientTransport.java | {
"start": 2255,
"end": 6764
} | class ____ implements HttpTransport {
static final String REGISTRY_AUTH_HEADER = "X-Registry-Auth";
private final HttpClient client;
private final HttpHost host;
protected HttpClientTransport(HttpClient client, HttpHost host) {
Assert.notNull(client, "'client' must not be null");
Assert.notNull(host, "'host' must not be null");
this.client = client;
this.host = host;
}
/**
* Perform an HTTP GET operation.
* @param uri the destination URI
* @return the operation response
*/
@Override
public Response get(URI uri) {
return execute(new HttpGet(uri));
}
/**
* Perform an HTTP POST operation.
* @param uri the destination URI
* @return the operation response
*/
@Override
public Response post(URI uri) {
return execute(new HttpPost(uri));
}
/**
* Perform an HTTP POST operation.
* @param uri the destination URI
* @param registryAuth registry authentication credentials
* @return the operation response
*/
@Override
public Response post(URI uri, @Nullable String registryAuth) {
return execute(new HttpPost(uri), registryAuth);
}
/**
* Perform an HTTP POST operation.
* @param uri the destination URI
* @param contentType the content type to write
* @param writer a content writer
* @return the operation response
*/
@Override
public Response post(URI uri, String contentType, IOConsumer<OutputStream> writer) {
return execute(new HttpPost(uri), contentType, writer);
}
/**
* Perform an HTTP PUT operation.
* @param uri the destination URI
* @param contentType the content type to write
* @param writer a content writer
* @return the operation response
*/
@Override
public Response put(URI uri, String contentType, IOConsumer<OutputStream> writer) {
return execute(new HttpPut(uri), contentType, writer);
}
/**
* Perform an HTTP DELETE operation.
* @param uri the destination URI
* @return the operation response
*/
@Override
public Response delete(URI uri) {
return execute(new HttpDelete(uri));
}
/**
* Perform an HTTP HEAD operation.
* @param uri the destination URI
* @return the operation response
*/
@Override
public Response head(URI uri) {
return execute(new HttpHead(uri));
}
private Response execute(HttpUriRequestBase request, String contentType, IOConsumer<OutputStream> writer) {
request.setEntity(new WritableHttpEntity(contentType, writer));
return execute(request);
}
private Response execute(HttpUriRequestBase request, @Nullable String registryAuth) {
if (StringUtils.hasText(registryAuth)) {
request.setHeader(REGISTRY_AUTH_HEADER, registryAuth);
}
return execute(request);
}
private Response execute(HttpUriRequest request) {
try {
beforeExecute(request);
ClassicHttpResponse response = this.client.executeOpen(this.host, request, null);
int statusCode = response.getCode();
if (statusCode >= 400 && statusCode <= 500) {
byte[] content = readContent(response);
response.close();
Errors errors = (statusCode != 500) ? deserializeErrors(content) : null;
Message message = deserializeMessage(content);
throw new DockerEngineException(this.host.toHostString(), request.getUri(), statusCode,
response.getReasonPhrase(), errors, message, content);
}
return new HttpClientResponse(response);
}
catch (IOException | URISyntaxException ex) {
throw new DockerConnectionException(this.host.toHostString(), ex);
}
}
protected void beforeExecute(HttpRequest request) {
}
private byte @Nullable [] readContent(ClassicHttpResponse response) throws IOException {
HttpEntity entity = response.getEntity();
if (entity == null) {
return null;
}
try (InputStream stream = entity.getContent()) {
return (stream != null) ? stream.readAllBytes() : null;
}
}
private @Nullable Errors deserializeErrors(byte @Nullable [] content) {
if (content == null) {
return null;
}
try {
return SharedJsonMapper.get().readValue(content, Errors.class);
}
catch (JacksonException ex) {
return null;
}
}
private @Nullable Message deserializeMessage(byte @Nullable [] content) {
if (content == null) {
return null;
}
try {
Message message = SharedJsonMapper.get().readValue(content, Message.class);
return (message.getMessage() != null) ? message : null;
}
catch (JacksonException ex) {
return null;
}
}
HttpHost getHost() {
return this.host;
}
/**
* {@link HttpEntity} to send {@link Content} content.
*/
private static | HttpClientTransport |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/bugs/GenericsMockitoAnnotationsTest.java | {
"start": 820,
"end": 1062
} | class ____ {
<T extends Collection<E>, E> T getCollection(T collection) {
return collection;
}
}
@Before
public void setUp() throws Exception {
openMocks(this);
}
}
| TestCollectionSourceProvider |
java | elastic__elasticsearch | x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/analyze/ContendedRegisterAnalyzeAction.java | {
"start": 4815,
"end": 9037
} | class ____ extends ActionRunnable<Void> {
private long currentValue;
private final ActionListener<OptionalBytesReference> witnessListener;
Execution(long currentValue) {
super(l);
this.currentValue = currentValue;
this.witnessListener = listener.delegateFailure(this::handleWitness);
}
@Override
protected void doRun() {
if (((CancellableTask) task).notifyIfCancelled(listener) == false) {
blobContainer.compareAndExchangeRegister(
OperationPurpose.REPOSITORY_ANALYSIS,
registerName,
bytesFromLong(currentValue),
bytesFromLong(currentValue + 1L),
witnessListener
);
}
}
private void handleWitness(ActionListener<Void> delegate, OptionalBytesReference witnessOrEmpty) {
if (witnessOrEmpty.isPresent() == false) {
// Concurrent activity prevented us from updating the value, or even reading the concurrently-updated
// result, so we must just try again.
executor.execute(Execution.this);
return;
}
final long witness = longFromBytes(witnessOrEmpty.bytesReference());
if (witness == currentValue) {
delegate.onResponse(null);
} else if (witness < currentValue || witness >= request.getRequestCount()) {
delegate.onFailure(new IllegalStateException("register holds unexpected value [" + witness + "]"));
} else {
currentValue = witness;
executor.execute(Execution.this);
}
}
}
new Execution(initialValue).run();
});
}
@Override
public void onFailure(Exception e) {
if (e instanceof UnsupportedOperationException) {
// Registers are not supported on all repository types, and that's ok. If it's not supported here then the final
// check will also be unsupported, so it doesn't matter that we didn't do anything before this successful response.
outerListener.onResponse(ActionResponse.Empty.INSTANCE);
} else {
outerListener.onFailure(e);
}
}
};
if (request.getInitialRead() > request.getRequestCount()) {
// This is just the initial read, so we can use getRegister() despite its weaker read-after-write semantics: all subsequent
// operations of this action use compareAndExchangeRegister() and do not rely on this value being accurate.
blobContainer.getRegister(OperationPurpose.REPOSITORY_ANALYSIS, registerName, initialValueListener.delegateFailure((l, r) -> {
if (r.isPresent()) {
l.onResponse(r);
} else {
l.onFailure(new IllegalStateException("register read failed due to contention"));
}
}));
} else {
blobContainer.compareAndExchangeRegister(
OperationPurpose.REPOSITORY_ANALYSIS,
registerName,
bytesFromLong(request.getInitialRead()),
bytesFromLong(
request.getInitialRead() == request.getRequestCount() ? request.getRequestCount() + 1 : request.getInitialRead()
),
initialValueListener
);
}
}
static | Execution |
java | spring-projects__spring-boot | module/spring-boot-kafka/src/test/java/org/springframework/boot/kafka/autoconfigure/metrics/KafkaMetricsAutoConfigurationTests.java | {
"start": 2026,
"end": 4620
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(KafkaMetricsAutoConfiguration.class));
@Test
void whenThereIsAMeterRegistryThenMetricsListenersAreAdded() {
this.contextRunner.withBean(SimpleMeterRegistry.class, SimpleMeterRegistry::new)
.withConfiguration(AutoConfigurations.of(KafkaAutoConfiguration.class))
.run((context) -> {
assertThat(((DefaultKafkaProducerFactory<?, ?>) context.getBean(DefaultKafkaProducerFactory.class))
.getListeners()).hasSize(1).hasOnlyElementsOfTypes(MicrometerProducerListener.class);
assertThat(((DefaultKafkaConsumerFactory<?, ?>) context.getBean(DefaultKafkaConsumerFactory.class))
.getListeners()).hasSize(1).hasOnlyElementsOfTypes(MicrometerConsumerListener.class);
});
}
@Test
void whenThereIsNoMeterRegistryThenListenerCustomizationBacksOff() {
this.contextRunner.withConfiguration(AutoConfigurations.of(KafkaAutoConfiguration.class)).run((context) -> {
assertThat(((DefaultKafkaProducerFactory<?, ?>) context.getBean(DefaultKafkaProducerFactory.class))
.getListeners()).isEmpty();
assertThat(((DefaultKafkaConsumerFactory<?, ?>) context.getBean(DefaultKafkaConsumerFactory.class))
.getListeners()).isEmpty();
});
}
@Test
void whenKafkaStreamsIsEnabledAndThereIsAMeterRegistryThenMetricsListenersAreAdded() {
this.contextRunner.withConfiguration(AutoConfigurations.of(KafkaAutoConfiguration.class))
.withUserConfiguration(EnableKafkaStreamsConfiguration.class)
.withPropertyValues("spring.application.name=my-test-app")
.withBean(SimpleMeterRegistry.class, SimpleMeterRegistry::new)
.run((context) -> {
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean(StreamsBuilderFactoryBean.class);
assertThat(streamsBuilderFactoryBean.getListeners()).hasSize(1)
.hasOnlyElementsOfTypes(KafkaStreamsMicrometerListener.class);
});
}
@Test
void whenKafkaStreamsIsEnabledAndThereIsNoMeterRegistryThenListenerCustomizationBacksOff() {
this.contextRunner.withConfiguration(AutoConfigurations.of(KafkaAutoConfiguration.class))
.withUserConfiguration(EnableKafkaStreamsConfiguration.class)
.withPropertyValues("spring.application.name=my-test-app")
.run((context) -> {
StreamsBuilderFactoryBean streamsBuilderFactoryBean = context.getBean(StreamsBuilderFactoryBean.class);
assertThat(streamsBuilderFactoryBean.getListeners()).isEmpty();
});
}
@Configuration(proxyBeanMethods = false)
@EnableKafkaStreams
static | KafkaMetricsAutoConfigurationTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java | {
"start": 1166,
"end": 3125
} | class ____ extends LegacyActionRequest {
private final Job job;
public static Request parseRequest(XContentParser parser) {
Job.Builder jobBuilder = Job.REST_REQUEST_PARSER.apply(parser, null);
// When jobs are PUT their ID must be supplied in the URL - assume this will
// be valid unless an invalid job ID is specified in the JSON to be validated
jobBuilder.setId(jobBuilder.getId() != null ? jobBuilder.getId() : "ok");
// Validate that detector configs are unique.
// This validation logically belongs to validateInputFields call but we perform it only for PUT action to avoid BWC issues which
// would occur when parsing an old job config that already had duplicate detectors.
jobBuilder.validateDetectorsAreUnique();
return new Request(jobBuilder.build(new Date()));
}
public Request() {
this.job = null;
}
public Request(Job job) {
this.job = job;
}
public Request(StreamInput in) throws IOException {
super(in);
job = new Job(in);
}
public Job getJob() {
return job;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
job.writeTo(out);
}
@Override
public int hashCode() {
return Objects.hash(job);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Request other = (Request) obj;
return Objects.equals(job, other.job);
}
}
}
| Request |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/intTest/java/org/springframework/boot/maven/TestRunIntegrationTests.java | {
"start": 1384,
"end": 1906
} | class ____ = org.test.TestSampleApplication")
.contains("1. " + canonicalPathOf(project, "target/test-classes"))
.contains("2. " + canonicalPathOf(project, "target/classes"))
.containsPattern("3\\. .*spring-core")
.containsPattern("4\\. .*commons-logging"));
}
private String canonicalPathOf(File project, String path) throws IOException {
return new File(project, path).getCanonicalPath();
}
private String buildLog(File project) {
return contentOf(new File(project, "target/build.log"));
}
}
| name |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3104/Issue3104Test.java | {
"start": 507,
"end": 1185
} | class ____ {
@ProcessorTest
void shouldCorrectlyMapUpdateMappingWithTargetImmutableCollectionStrategy() {
Issue3104Mapper.Target target = new Issue3104Mapper.Target();
Issue3104Mapper.INSTANCE.update( target, new Issue3104Mapper.Source( null ) );
assertThat( target.getChildren() ).isEmpty();
Issue3104Mapper.INSTANCE.update(
target,
new Issue3104Mapper.Source( Collections.singletonList( new Issue3104Mapper.ChildSource( "tester" ) ) )
);
assertThat( target.getChildren() )
.extracting( Issue3104Mapper.Child::getMyField )
.containsExactly( "tester" );
}
}
| Issue3104Test |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/tracing/EventBusTracerTestBase.java | {
"start": 2888,
"end": 12698
} | class ____ implements VertxTracer<Object, Object> {
final Object receiveVal = new Object();
final Object receiveTrace = new Object();
final Object sendVal = new Object();
final Object sendTrace = new Object();
final List<String> sendEvents = new CopyOnWriteArrayList<>();
final List<String> receiveEvents = new CopyOnWriteArrayList<>();
private <T> String addressOf(T obj, TagExtractor<T> extractor) {
int len = extractor.len(obj);
for (int idx = 0;idx < len;idx++) {
if (extractor.name(obj, idx).equals("messaging.destination.name")) {
String value = extractor.value(obj, idx);
if (value.startsWith("__vertx")) {
value = "generated";
}
return value;
}
}
return null;
}
@Override
public <R> Object receiveRequest(Context context, SpanKind kind, TracingPolicy policy, R request, String operation, Iterable<Map.Entry<String, String>> headers, TagExtractor<R> tagExtractor) {
receiveKey.put(context, receiveVal);
Object body = ((Message)request).body();
receiveEvents.add("receiveRequest[" + addressOf(request, tagExtractor) + "]");
return receiveTrace;
}
@Override
public <R> void sendResponse(Context context, R response, Object payload, Throwable failure, TagExtractor<R> tagExtractor) {
assertSame(receiveTrace, payload);
assertSame(receiveVal, receiveKey.get(context));
receiveEvents.add("sendResponse[]");
}
@Override
public <R> Object sendRequest(Context context, SpanKind kind, TracingPolicy policy, R request, String operation, BiConsumer<String, String> headers, TagExtractor<R> tagExtractor) {
assertSame(sendVal, sendKey.get(context));
sendEvents.add("sendRequest[" + addressOf(request, tagExtractor) + "]");
assertTrue(request instanceof Message<?>);
return sendTrace;
}
@Override
public <R> void receiveResponse(Context context, R response, Object payload, Throwable failure, TagExtractor<R> tagExtractor) {
assertSame(sendTrace, payload);
assertSame(sendVal, sendKey.get(context));
if (failure != null) {
assertTrue(failure instanceof ReplyException);
ReplyException replyException = (ReplyException) failure;
sendEvents.add("receiveResponse[" + replyException.failureType() + "]");
} else {
Object body = response != null ? ((Message)response).body() : null;
sendEvents.add("receiveResponse[]");
}
}
}
@Test
public void testEventBusSend() throws Exception {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
CountDownLatch latch = new CountDownLatch(1);
vertx2.runOnContext(v1 -> {
Context ctx = vertx2.getOrCreateContext();
vertx2.eventBus().consumer("the_address", msg -> {
assertNotSame(Vertx.currentContext(), ctx);
assertSameEventLoop(ctx, Vertx.currentContext());
assertEquals("msg", msg.body());
}).completion().onComplete(onSuccess(v2 -> {
latch.countDown();
}));
});
awaitLatch(latch);
vertx1.runOnContext(v -> {
Context ctx = vertx1.getOrCreateContext();
sendKey.put(ctx, ebTracer.sendVal);
vertx1.eventBus().send("the_address", "msg");
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 4);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[]"), ebTracer.sendEvents);
assertEquals(Arrays.asList("receiveRequest[the_address]", "sendResponse[]"), ebTracer.receiveEvents);
}
@Test
public void testEventBusSendNoConsumer() {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
Context ctx = vertx1.getOrCreateContext();
ctx.runOnContext(v -> {
sendKey.put(ctx, ebTracer.sendVal);
vertx1.eventBus().send("the_address", "msg");
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 2);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[NO_HANDLERS]"), ebTracer.sendEvents);
assertEquals(Collections.emptyList(), ebTracer.receiveEvents);
}
@Test
public void testEventBusRequestReply() throws Exception {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
CountDownLatch latch = new CountDownLatch(1);
vertx2.runOnContext(v1 -> {
Context ctx = vertx2.getOrCreateContext();
vertx2.eventBus().consumer("the_address", msg -> {
assertNotSame(ctx, vertx2.getOrCreateContext());
assertSameEventLoop(ctx, vertx2.getOrCreateContext());
assertEquals("msg_1", msg.body());
sendKey.put(vertx.getOrCreateContext(), ebTracer.sendVal);
msg.reply("msg_2");
}).completion().onComplete(onSuccess(v2 -> {
latch.countDown();
}));
});
awaitLatch(latch);
vertx1.runOnContext(v -> {
Context ctx = vertx1.getOrCreateContext();
sendKey.put(ctx, ebTracer.sendVal);
vertx1.eventBus().request("the_address", "msg_1").onComplete(onSuccess(reply -> {
assertSame(ctx, vertx1.getOrCreateContext());
assertSameEventLoop(ctx, vertx1.getOrCreateContext());
}));
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 4);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[]"), ebTracer.sendEvents);
assertEquals(Arrays.asList("receiveRequest[the_address]", "sendResponse[]"), ebTracer.receiveEvents);
}
@Test
public void testEventBusRequestReplyFailure() throws Exception {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
CountDownLatch latch = new CountDownLatch(1);
vertx1.eventBus().consumer("the_address", msg -> {
assertEquals("msg", msg.body());
sendKey.put(vertx.getOrCreateContext(), ebTracer.sendVal);
msg.fail(10, "it failed");
}).completion().onComplete(onSuccess(v -> {
latch.countDown();
}));
awaitLatch(latch);
Context ctx = vertx2.getOrCreateContext();
ctx.runOnContext(v1 -> {
sendKey.put(ctx, ebTracer.sendVal);
vertx2.eventBus().request("the_address", "msg").onComplete(onFailure(failure -> {
}));
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 4);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[RECIPIENT_FAILURE]"), ebTracer.sendEvents);
assertEquals(Arrays.asList("receiveRequest[the_address]", "sendResponse[]"), ebTracer.receiveEvents);
}
@Test
public void testEventBusRequestNoConsumer() {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
Context ctx = vertx2.getOrCreateContext();
ctx.runOnContext(v -> {
sendKey.put(ctx, ebTracer.sendVal);
vertx2.eventBus().request("the_address", "msg").onComplete(onFailure(failure -> { }));
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 2);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[NO_HANDLERS]"), ebTracer.sendEvents);
assertEquals(Collections.emptyList(), ebTracer.receiveEvents);
}
@Test
public void testEventBusRequestTimeout() throws Exception {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
CountDownLatch latch = new CountDownLatch(1);
vertx1.eventBus().consumer("the_address", msg -> {
// Let timeout
}).completion().onComplete(onSuccess(v -> {
latch.countDown();
}));
awaitLatch(latch);
Context ctx = vertx2.getOrCreateContext();
ctx.runOnContext(v1 -> {
sendKey.put(ctx, ebTracer.sendVal);
vertx2.eventBus().request("the_address", "msg", new DeliveryOptions().setSendTimeout(100)).onComplete(onFailure(failure -> {
}));
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 3);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[TIMEOUT]"), ebTracer.sendEvents);
assertEquals(Arrays.asList("receiveRequest[the_address]"), ebTracer.receiveEvents);
}
@Test
public void testEventBusRequestReplyReply() throws Exception {
EventBusTracer ebTracer = new EventBusTracer();
tracer = ebTracer;
CountDownLatch latch = new CountDownLatch(1);
vertx2.runOnContext(v1 -> {
Context ctx = vertx2.getOrCreateContext();
vertx2.eventBus().consumer("the_address", msg -> {
Context consumerCtx = vertx2.getOrCreateContext();
assertNotSame(ctx, consumerCtx);
assertSameEventLoop(ctx, consumerCtx);
assertEquals("msg_1", msg.body());
sendKey.put(vertx.getOrCreateContext(), ebTracer.sendVal);
msg.replyAndRequest("msg_2").onComplete(reply -> {
assertSame(consumerCtx, vertx2.getOrCreateContext());
assertSameEventLoop(consumerCtx, vertx2.getOrCreateContext());
});
}).completion().onComplete(onSuccess(v2 -> {
latch.countDown();
}));
});
awaitLatch(latch);
vertx1.runOnContext(v -> {
Context ctx = vertx1.getOrCreateContext();
sendKey.put(ctx, ebTracer.sendVal);
vertx1.eventBus().request("the_address", "msg_1").onComplete(onSuccess(reply -> {
assertSame(Vertx.currentContext(), ctx);
sendKey.put(ctx, ebTracer.sendVal);
reply.reply("msg_3");
}));
});
waitUntil(() -> ebTracer.sendEvents.size() + ebTracer.receiveEvents.size() == 8);
assertEquals(Arrays.asList("sendRequest[the_address]", "receiveResponse[]", "sendRequest[generated]", "receiveResponse[]"), ebTracer.sendEvents);
assertEquals(Arrays.asList("receiveRequest[the_address]", "sendResponse[]", "receiveRequest[generated]", "sendResponse[]"), ebTracer.receiveEvents);
}
}
| EventBusTracer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/comment/CommentElementCollectionTest.java | {
"start": 2511,
"end": 2727
} | class ____ implements Integrator {
@Override
public void integrate(Metadata metadata, BootstrapContext bootstrapContext, SessionFactoryImplementor sessionFactory) {
METADATA = metadata;
}
}
}
| MetadataIntegrator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.