language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/concurrent/DelegatingSecurityContextRunnable.java | {
"start": 1417,
"end": 6076
} | class ____ implements Runnable {
private final Runnable delegate;
private final boolean explicitSecurityContextProvided;
private SecurityContextHolderStrategy securityContextHolderStrategy = SecurityContextHolder
.getContextHolderStrategy();
/**
* The {@link SecurityContext} that the delegate {@link Runnable} will be ran as.
*/
private SecurityContext delegateSecurityContext;
/**
* The {@link SecurityContext} that was on the {@link SecurityContextHolder} prior to
* being set to the delegateSecurityContext.
*/
private @Nullable SecurityContext originalSecurityContext;
/**
* Creates a new {@link DelegatingSecurityContextRunnable} with a specific
* {@link SecurityContext}.
* @param delegate the delegate {@link Runnable} to run with the specified
* {@link SecurityContext}. Cannot be null.
* @param securityContext the {@link SecurityContext} to establish for the delegate
* {@link Runnable}. Cannot be null.
*/
public DelegatingSecurityContextRunnable(Runnable delegate, SecurityContext securityContext) {
this(delegate, securityContext, true);
}
/**
* Creates a new {@link DelegatingSecurityContextRunnable} with the
* {@link SecurityContext} from the {@link SecurityContextHolder}.
* @param delegate the delegate {@link Runnable} to run under the current
* {@link SecurityContext}. Cannot be null.
*/
public DelegatingSecurityContextRunnable(Runnable delegate) {
this(delegate, SecurityContextHolder.getContext(), false);
}
private DelegatingSecurityContextRunnable(Runnable delegate, SecurityContext securityContext,
boolean explicitSecurityContextProvided) {
Assert.notNull(delegate, "delegate cannot be null");
Assert.notNull(securityContext, "securityContext cannot be null");
this.delegate = delegate;
this.delegateSecurityContext = securityContext;
this.explicitSecurityContextProvided = explicitSecurityContextProvided;
}
@Override
public void run() {
this.originalSecurityContext = this.securityContextHolderStrategy.getContext();
try {
this.securityContextHolderStrategy.setContext(this.delegateSecurityContext);
this.delegate.run();
}
finally {
SecurityContext emptyContext = this.securityContextHolderStrategy.createEmptyContext();
if (emptyContext.equals(this.originalSecurityContext)) {
this.securityContextHolderStrategy.clearContext();
}
else {
this.securityContextHolderStrategy.setContext(this.originalSecurityContext);
}
this.originalSecurityContext = null;
}
}
/**
* Sets the {@link SecurityContextHolderStrategy} to use. The default action is to use
* the {@link SecurityContextHolderStrategy} stored in {@link SecurityContextHolder}.
*
* @since 5.8
*/
public void setSecurityContextHolderStrategy(SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
this.securityContextHolderStrategy = securityContextHolderStrategy;
if (!this.explicitSecurityContextProvided) {
this.delegateSecurityContext = this.securityContextHolderStrategy.getContext();
}
}
@Override
public String toString() {
return this.delegate.toString();
}
/**
* Factory method for creating a {@link DelegatingSecurityContextRunnable}.
* @param delegate the original {@link Runnable} that will be delegated to after
* establishing a {@link SecurityContext} on the {@link SecurityContextHolder}. Cannot
* have null.
* @param securityContext the {@link SecurityContext} to establish before invoking the
* delegate {@link Runnable}. If null, the current {@link SecurityContext} from the
* {@link SecurityContextHolder} will be used.
* @return
*/
public static Runnable create(Runnable delegate, @Nullable SecurityContext securityContext) {
Assert.notNull(delegate, "delegate cannot be null");
return (securityContext != null) ? new DelegatingSecurityContextRunnable(delegate, securityContext)
: new DelegatingSecurityContextRunnable(delegate);
}
static Runnable create(Runnable delegate, @Nullable SecurityContext securityContext,
SecurityContextHolderStrategy securityContextHolderStrategy) {
Assert.notNull(delegate, "delegate cannot be null");
Assert.notNull(securityContextHolderStrategy, "securityContextHolderStrategy cannot be null");
DelegatingSecurityContextRunnable runnable = (securityContext != null)
? new DelegatingSecurityContextRunnable(delegate, securityContext)
: new DelegatingSecurityContextRunnable(delegate);
runnable.setSecurityContextHolderStrategy(securityContextHolderStrategy);
return runnable;
}
}
| DelegatingSecurityContextRunnable |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/a/User.java | {
"start": 42,
"end": 503
} | class ____{
public User() {
}
public User(int age, String name) {
super();
this.age = age;
this.name = name;
}
private int age;
private String name;
@Override
public String toString() {
return "User [age=" + age + ", name=" + name + "]";
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| User |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/TestEmptyClass.java | {
"start": 936,
"end": 2194
} | class ____ extends ValueSerializer<NonZero>
{
@Override
public void serialize(NonZero value, JsonGenerator jgen, SerializationContext provider)
{
jgen.writeNumber(value.nr);
}
@Override
public boolean isEmpty(SerializationContext provider, NonZero value) {
if (value == null) return true;
return (value.nr == 0);
}
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
protected final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testEmptyWithAnnotations() throws Exception
{
// First: without annotations, should complain
try {
MAPPER.writer()
.with(SerializationFeature.FAIL_ON_EMPTY_BEANS)
.writeValueAsString(new Empty());
fail("Should fail");
} catch (InvalidDefinitionException e) {
verifyException(e, "No serializer found for class");
}
// But not if there is a recognized annotation
assertEquals("{}", MAPPER.writeValueAsString(new EmptyWithAnno()));
// Including | NonZeroSerializer |
java | netty__netty | codec-http3/src/main/java/io/netty/handler/codec/http3/Http3UnidirectionalStreamInboundClientHandler.java | {
"start": 966,
"end": 2968
} | class ____ extends Http3UnidirectionalStreamInboundHandler {
private final LongFunction<ChannelHandler> pushStreamHandlerFactory;
Http3UnidirectionalStreamInboundClientHandler(
Http3FrameCodecFactory codecFactory,
Http3ControlStreamInboundHandler localControlStreamHandler,
Http3ControlStreamOutboundHandler remoteControlStreamHandler,
@Nullable LongFunction<ChannelHandler> unknownStreamHandlerFactory,
@Nullable LongFunction<ChannelHandler> pushStreamHandlerFactory,
Supplier<ChannelHandler> qpackEncoderHandlerFactory, Supplier<ChannelHandler> qpackDecoderHandlerFactory) {
super(codecFactory, localControlStreamHandler, remoteControlStreamHandler, unknownStreamHandlerFactory,
qpackEncoderHandlerFactory, qpackDecoderHandlerFactory);
this.pushStreamHandlerFactory = pushStreamHandlerFactory == null ? __ -> ReleaseHandler.INSTANCE :
pushStreamHandlerFactory;
}
@Override
void initPushStream(ChannelHandlerContext ctx, long pushId) {
// See https://tools.ietf.org/html/draft-ietf-quic-http-32#section-4.4
Long maxPushId = remoteControlStreamHandler.sentMaxPushId();
if (maxPushId == null) {
Http3CodecUtils.connectionError(ctx, Http3ErrorCode.H3_ID_ERROR,
"Received push stream before sending MAX_PUSH_ID frame.", false);
} else if (maxPushId < pushId) {
Http3CodecUtils.connectionError(ctx, Http3ErrorCode.H3_ID_ERROR,
"Received push stream with ID " + pushId + " greater than the max push ID " + maxPushId
+ '.', false);
} else {
// Replace this handler with the actual push stream handlers.
final ChannelHandler pushStreamHandler = pushStreamHandlerFactory.apply(pushId);
ctx.pipeline().replace(this, null, pushStreamHandler);
}
}
}
| Http3UnidirectionalStreamInboundClientHandler |
java | quarkusio__quarkus | independent-projects/bootstrap/maven-resolver/src/test/java/io/quarkus/bootstrap/resolver/maven/test/ChainedLocalRepositoryManagerTest.java | {
"start": 649,
"end": 10541
} | class ____ extends BootstrapMavenContextTestBase {
private static final String M2_LOCAL_1;
private static final String M2_LOCAL_2;
private static final String M2_FROM_REMOTE;
static {
final String projectLocation;
try {
projectLocation = getProjectLocation("workspace-with-local-repo-tail").toString();
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
M2_LOCAL_1 = Paths.get(projectLocation, ".m2-local-1", "repository").toAbsolutePath().toString();
M2_LOCAL_2 = Paths.get(projectLocation, ".m2-local-2", "repository").toAbsolutePath().toString();
M2_FROM_REMOTE = Paths.get(projectLocation, ".m2-from-remote", "repository").toAbsolutePath().toString();
}
// Tail configuration tests
@Test
public void testNoTail() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config().setOffline(true));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testTailConfiguredButEmptyString() throws Exception {
setSystemProp("maven.repo.local.tail", "");
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config().setOffline(true));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testTailConfiguredButBlank() throws Exception {
setSystemProp("maven.repo.local.tail", " ");
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config().setOffline(true));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testTailConfiguredButNonExistent() throws Exception {
setSystemProp("maven.repo.local.tail", "/tmp/this-dir-does-not-exist");
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config().setOffline(true));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailViaSystemProp() throws Exception {
setSystemProp("maven.repo.local.tail", M2_LOCAL_1);
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config().setOffline(true));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailViaConfig() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_LOCAL_1));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailResolutionOrder() throws Exception {
final BootstrapMavenContext mvnLocal1first = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_LOCAL_1, M2_LOCAL_2));
final BootstrapMavenContext mvnLocal2first = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_LOCAL_2, M2_LOCAL_1));
assertEquals(resolveOrgAcmeFooJar001(mvnLocal1first).getFile().getAbsolutePath(),
Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString());
assertEquals(resolveOrgAcmeFooJar001(mvnLocal2first).getFile().getAbsolutePath(),
Paths.get(M2_LOCAL_2, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString());
}
@Test
public void testValidTailMultiplicity() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_LOCAL_1, M2_LOCAL_2));
final Artifact foo = resolveOrgAcmeFooJar001(mvn);
assertNotNull(foo);
assertEquals(foo.getFile().getAbsolutePath(),
Paths.get(M2_LOCAL_1, "org", "acme", "foo", "0.0.1", "foo-0.0.1.jar").toAbsolutePath().toString());
final Artifact bar = resolveOrgAcmeBarJar002(mvn);
assertNotNull(bar);
assertEquals(bar.getFile().getAbsolutePath(),
Paths.get(M2_LOCAL_2, "org", "acme", "bar", "0.0.2", "bar-0.0.2.jar").toAbsolutePath().toString());
}
// ignoreAvailability tests
@Test
public void testValidTailLocalCheckingForAvailabilityViaConfig() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTailIgnoreAvailability(false)
.setLocalRepositoryTail(M2_LOCAL_1));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteCheckingForAvailabilityViaConfig() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTailIgnoreAvailability(false)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteCheckingForAvailabilityViaSystemProp() throws Exception {
setSystemProp("maven.repo.local.tail.ignoreAvailability", "false");
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertThrowsExactly(BootstrapMavenException.class, () -> resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropEmpty() throws Exception {
setSystemProp("maven.repo.local.tail.ignoreAvailability", ""); // will become `true`
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropBlank() throws Exception {
setSystemProp("maven.repo.local.tail.ignoreAvailability", " "); // will become `true`
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteIgnoringAvailabilityViaSystemPropTruthy() throws Exception {
setSystemProp("maven.repo.local.tail.ignoreAvailability", "fals"); // will become `true`
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailLocalIgnoringAvailabilityViaConfig() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTailIgnoreAvailability(true)
.setLocalRepositoryTail(M2_LOCAL_1));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
@Test
public void testValidTailFromRemoteIgnoringAvailabilityViaConfig() throws Exception {
final BootstrapMavenContext mvn = bootstrapMavenContextForProject("workspace-with-local-repo-tail",
BootstrapMavenContext.config()
.setOffline(true)
.setLocalRepositoryTailIgnoreAvailability(true)
.setLocalRepositoryTail(M2_FROM_REMOTE));
assertNotNull(resolveOrgAcmeFooJar001(mvn));
}
private Artifact resolveOrgAcmeFooJar001(BootstrapMavenContext ctx) throws BootstrapMavenException {
final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx);
return resolver.resolve(new DefaultArtifact("org.acme", "foo", "", "jar", "0.0.1")).getArtifact();
}
private Artifact resolveOrgAcmeBarJar002(BootstrapMavenContext ctx) throws BootstrapMavenException {
final MavenArtifactResolver resolver = new MavenArtifactResolver(ctx);
return resolver.resolve(new DefaultArtifact("org.acme", "bar", "", "jar", "0.0.2")).getArtifact();
}
}
| ChainedLocalRepositoryManagerTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/PropertyMetadata.java | {
"start": 1144,
"end": 7977
} | class ____
// NOTE: need not be Serializable, not persisted
{
public final AnnotatedMember getter;
/**
* Flag that is set if the information came from global defaults,
* and not from explicit per-property annotations or per-type
* config overrides.
*/
public final boolean fromDefaults;
protected MergeInfo(AnnotatedMember getter, boolean fromDefaults) {
this.getter = getter;
this.fromDefaults = fromDefaults;
}
public static MergeInfo createForDefaults(AnnotatedMember getter) {
return new MergeInfo(getter, true);
}
public static MergeInfo createForTypeOverride(AnnotatedMember getter) {
return new MergeInfo(getter, false);
}
public static MergeInfo createForPropertyOverride(AnnotatedMember getter) {
return new MergeInfo(getter, false);
}
}
/**
* Three states: required, not required and unknown; unknown represented
* as null.
*/
protected final Boolean _required;
/**
* Optional human-readable description associated with the property.
*/
protected final String _description;
/**
* Optional index of the property within containing Object.
*
* @since 2.4
*/
protected final Integer _index;
/**
* Optional default value, as String, for property; not used for
* any functionality by core databind, offered as metadata for
* extensions.
*/
protected final String _defaultValue;
/**
* Settings regarding merging, if property is determined to possibly
* be mergeable (possibly since global settings may be omitted for
* non-mergeable types).
*<p>
* NOTE: transient since it is assumed that this information is only
* relevant during initial setup and not needed after full initialization.
* May be changed if this proves necessary.
*
* @since 2.9
*/
protected final transient MergeInfo _mergeInfo;
/**
* Settings regarding handling of incoming `null`s, both for value itself
* and, for structured types, content values (array/Collection elements,
* Map values).
*
* @since 2.9
*/
protected Nulls _valueNulls, _contentNulls;
/*
/**********************************************************
/* Construction, configuration
/**********************************************************
*/
/**
* @since 2.9
*/
protected PropertyMetadata(Boolean req, String desc, Integer index, String def,
MergeInfo mergeInfo, Nulls valueNulls, Nulls contentNulls)
{
_required = req;
_description = desc;
_index = index;
_defaultValue = (def == null || def.isEmpty()) ? null : def;
_mergeInfo = mergeInfo;
_valueNulls = valueNulls;
_contentNulls = contentNulls;
}
public static PropertyMetadata construct(Boolean req, String desc, Integer index,
String defaultValue) {
if ((desc != null) || (index != null) || (defaultValue != null)) {
return new PropertyMetadata(req, desc, index, defaultValue,
null, null, null);
}
if (req == null) {
return STD_REQUIRED_OR_OPTIONAL;
}
return req ? STD_REQUIRED : STD_OPTIONAL;
}
/**
* Minor optimization: let's canonicalize back to placeholders in cases
* where there is no real data to consider
*/
protected Object readResolve()
{
if ((_description == null) && (_index == null) && (_defaultValue == null)
&& (_mergeInfo == null)
&& (_valueNulls == null) && (_contentNulls == null)) {
if (_required == null) {
return STD_REQUIRED_OR_OPTIONAL;
}
return _required.booleanValue() ? STD_REQUIRED : STD_OPTIONAL;
}
return this;
}
public PropertyMetadata withDescription(String desc) {
return new PropertyMetadata(_required, desc, _index, _defaultValue,
_mergeInfo, _valueNulls, _contentNulls);
}
public PropertyMetadata withMergeInfo(MergeInfo mergeInfo) {
return new PropertyMetadata(_required, _description, _index, _defaultValue,
mergeInfo, _valueNulls, _contentNulls);
}
public PropertyMetadata withNulls(Nulls valueNulls,
Nulls contentNulls) {
return new PropertyMetadata(_required, _description, _index, _defaultValue,
_mergeInfo, valueNulls, contentNulls);
}
public PropertyMetadata withDefaultValue(String def) {
if ((def == null) || def.isEmpty()) {
if (_defaultValue == null) {
return this;
}
def = null;
} else if (def.equals(_defaultValue)) {
return this;
}
return new PropertyMetadata(_required, _description, _index, def,
_mergeInfo, _valueNulls, _contentNulls);
}
public PropertyMetadata withIndex(Integer index) {
return new PropertyMetadata(_required, _description, index, _defaultValue,
_mergeInfo, _valueNulls, _contentNulls);
}
public PropertyMetadata withRequired(Boolean b) {
if (b == null) {
if (_required == null) {
return this;
}
} else if (b.equals(_required)) {
return this;
}
return new PropertyMetadata(b, _description, _index, _defaultValue,
_mergeInfo, _valueNulls, _contentNulls);
}
/*
/**********************************************************
/* Accessors
/**********************************************************
*/
public String getDescription() { return _description; }
/**
* @since 2.5
*/
public String getDefaultValue() { return _defaultValue; }
/**
* Accessor for determining whether property has declared "default value",
* which may be used by extension modules.
*
* @since 2.6
*/
public boolean hasDefaultValue() { return (_defaultValue != null); }
public boolean isRequired() { return (_required != null) && _required.booleanValue(); }
public Boolean getRequired() { return _required; }
/**
* @since 2.4
*/
public Integer getIndex() { return _index; }
/**
* @since 2.4
*/
public boolean hasIndex() { return _index != null; }
/**
* @since 2.9
*/
public MergeInfo getMergeInfo() { return _mergeInfo; }
/**
* @since 2.9
*/
public Nulls getValueNulls() { return _valueNulls; }
/**
* @since 2.9
*/
public Nulls getContentNulls() { return _contentNulls; }
}
| MergeInfo |
java | quarkusio__quarkus | extensions/hibernate-search-standalone-elasticsearch/runtime/src/main/java/io/quarkus/hibernate/search/standalone/elasticsearch/SearchExtension.java | {
"start": 995,
"end": 1330
} | interface ____ {
/**
* @return The name of the Hibernate Search backend that the qualified bean should be assigned to.
*/
String backend() default "";
/**
* @return The name of the Hibernate Search index that the qualified bean should be assigned to.
*/
String index() default "";
| SearchExtension |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/rtsp/RtspMethods.java | {
"start": 899,
"end": 4891
} | class ____ {
/**
* The OPTIONS getMethod represents a request for information about the communication options
* available on the request/response chain identified by the Request-URI. This getMethod allows
* the client to determine the options and/or requirements associated with a resource, or the
* capabilities of a server, without implying a resource action or initiating a resource
* retrieval.
*/
public static final HttpMethod OPTIONS = HttpMethod.OPTIONS;
/**
* The DESCRIBE getMethod retrieves the description of a presentation or
* media object identified by the request URL from a server.
*/
public static final HttpMethod DESCRIBE = HttpMethod.valueOf("DESCRIBE");
/**
* The ANNOUNCE posts the description of a presentation or media object
* identified by the request URL to a server, or updates the client-side
* session description in real-time.
*/
public static final HttpMethod ANNOUNCE = HttpMethod.valueOf("ANNOUNCE");
/**
* The SETUP request for a URI specifies the transport mechanism to be
* used for the streamed media.
*/
public static final HttpMethod SETUP = HttpMethod.valueOf("SETUP");
/**
* The PLAY getMethod tells the server to start sending data via the
* mechanism specified in SETUP.
*/
public static final HttpMethod PLAY = HttpMethod.valueOf("PLAY");
/**
* The PAUSE request causes the stream delivery to be interrupted
* (halted) temporarily.
*/
public static final HttpMethod PAUSE = HttpMethod.valueOf("PAUSE");
/**
* The TEARDOWN request stops the stream delivery for the given URI,
* freeing the resources associated with it.
*/
public static final HttpMethod TEARDOWN = HttpMethod.valueOf("TEARDOWN");
/**
* The GET_PARAMETER request retrieves the value of a parameter of a
* presentation or stream specified in the URI.
*/
public static final HttpMethod GET_PARAMETER = HttpMethod.valueOf("GET_PARAMETER");
/**
* The SET_PARAMETER requests to set the value of a parameter for a
* presentation or stream specified by the URI.
*/
public static final HttpMethod SET_PARAMETER = HttpMethod.valueOf("SET_PARAMETER");
/**
* The REDIRECT request informs the client that it must connect to another
* server location.
*/
public static final HttpMethod REDIRECT = HttpMethod.valueOf("REDIRECT");
/**
* The RECORD getMethod initiates recording a range of media data according to
* the presentation description.
*/
public static final HttpMethod RECORD = HttpMethod.valueOf("RECORD");
private static final Map<String, HttpMethod> methodMap = new HashMap<String, HttpMethod>();
static {
methodMap.put(DESCRIBE.toString(), DESCRIBE);
methodMap.put(ANNOUNCE.toString(), ANNOUNCE);
methodMap.put(GET_PARAMETER.toString(), GET_PARAMETER);
methodMap.put(OPTIONS.toString(), OPTIONS);
methodMap.put(PAUSE.toString(), PAUSE);
methodMap.put(PLAY.toString(), PLAY);
methodMap.put(RECORD.toString(), RECORD);
methodMap.put(REDIRECT.toString(), REDIRECT);
methodMap.put(SETUP.toString(), SETUP);
methodMap.put(SET_PARAMETER.toString(), SET_PARAMETER);
methodMap.put(TEARDOWN.toString(), TEARDOWN);
}
/**
* Returns the {@link HttpMethod} represented by the specified name.
* If the specified name is a standard RTSP getMethod name, a cached instance
* will be returned. Otherwise, a new instance will be returned.
*/
public static HttpMethod valueOf(String name) {
name = checkNonEmptyAfterTrim(name, "name").toUpperCase();
HttpMethod result = methodMap.get(name);
if (result != null) {
return result;
} else {
return HttpMethod.valueOf(name);
}
}
private RtspMethods() {
}
}
| RtspMethods |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JodaConstructorsTest.java | {
"start": 3642,
"end": 4126
} | class ____ {
// BUG: Diagnostic contains: Duration ONE_MILLI = Duration.millis(1);
private static final Duration ONE_MILLI = new Duration(1);
}
""")
.doTest();
}
@Test
public void durationConstructorInteger() {
// TODO(kak): This really should be an error too :(
helper
.addSourceLines(
"TestClass.java",
"""
import org.joda.time.Duration;
public | TestClass |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParserForCompareExpr.java | {
"start": 1520,
"end": 2573
} | class ____ parsing compare expressions.
* Compare expressions are of the form :
* (<key> <compareop> <value>) <op> (<key
* > <compareop> <value>)
* compareop is used to compare value of a the specified key in the backend
* storage. compareop can be :
* 1. eq - Equals
* 2. ne - Not equals (matches if key does not exist)
* 3. ene - Exists and not equals (key must exist for match to occur)
* 4. lt - Less than
* 5. gt - Greater than
* 6. le - Less than or equals
* 7. ge - Greater than or equals
* compareop's supported would depend on implementation. For instance, all
* the above compareops' will be supported for metric filters but only eq,ne and
* ene would be supported for KV filters like config/info filters.
*
* op is a logical operator and can be either AND or OR.
*
* The way values will be interpreted would also depend on implementation
*
* A typical compare expression would look as under:
* ((key1 eq val1 OR key2 ne val2) AND (key5 gt val45))
*/
@Private
@Unstable
abstract | for |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ContainerContext.java | {
"start": 1292,
"end": 1398
} | class ____ {@link AuxiliaryService} initializing and stopping a
* container.
*/
@Public
@Evolving
public | for |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/serialization/Serdes.java | {
"start": 4074,
"end": 4269
} | class ____ extends WrapperSerde<Boolean> {
public BooleanSerde() {
super(new BooleanSerializer(), new BooleanDeserializer());
}
}
public static final | BooleanSerde |
java | netty__netty | resolver/src/main/java/io/netty/resolver/NoopAddressResolver.java | {
"start": 1076,
"end": 1776
} | class ____ extends AbstractAddressResolver<SocketAddress> {
public NoopAddressResolver(EventExecutor executor) {
super(executor, SocketAddress.class);
}
@Override
protected boolean doIsResolved(SocketAddress address) {
return true;
}
@Override
protected void doResolve(SocketAddress unresolvedAddress, Promise<SocketAddress> promise) throws Exception {
promise.setSuccess(unresolvedAddress);
}
@Override
protected void doResolveAll(
SocketAddress unresolvedAddress, Promise<List<SocketAddress>> promise) throws Exception {
promise.setSuccess(Collections.singletonList(unresolvedAddress));
}
}
| NoopAddressResolver |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/module/SimpleDeserializers.java | {
"start": 727,
"end": 994
} | class ____
extends Deserializers.Base
implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
protected HashMap<ClassKey,ValueDeserializer<?>> _classMappings = null;
/**
* Flag to help find "generic" | SimpleDeserializers |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractGetResourcesResponse.java | {
"start": 832,
"end": 2408
} | class ____<T extends ToXContent & Writeable> extends ActionResponse implements ToXContentObject {
private QueryPage<T> resources;
protected AbstractGetResourcesResponse() {}
@SuppressWarnings("this-escape")
protected AbstractGetResourcesResponse(StreamInput in) throws IOException {
resources = new QueryPage<>(in, getReader());
}
protected AbstractGetResourcesResponse(QueryPage<T> resources) {
this.resources = Objects.requireNonNull(resources);
}
public QueryPage<T> getResources() {
return resources;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
resources.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
resources.doXContentBody(builder, params);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(resources);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj instanceof AbstractGetResourcesResponse == false) {
return false;
}
AbstractGetResourcesResponse<?> other = (AbstractGetResourcesResponse<?>) obj;
return Objects.equals(resources, other.resources);
}
@Override
public final String toString() {
return Strings.toString(this);
}
protected abstract Reader<T> getReader();
}
| AbstractGetResourcesResponse |
java | apache__logging-log4j2 | log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/appender/EncodingListAppender.java | {
"start": 1913,
"end": 3622
} | class ____ implements ByteBufferDestination {
// JUnit 5 stack traces can start to get looooong
ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[16384]);
@Override
public ByteBuffer getByteBuffer() {
return byteBuffer;
}
@Override
public ByteBuffer drain(final ByteBuffer buf) {
throw new IllegalStateException("Unexpected message larger than 16384 bytes");
}
@Override
public void writeBytes(final ByteBuffer data) {
byteBuffer.put(data);
}
@Override
public void writeBytes(final byte[] data, final int offset, final int length) {
byteBuffer.put(data, offset, length);
}
}
@Override
public synchronized void append(final LogEvent event) {
final Layout<? extends Serializable> layout = getLayout();
if (layout == null) {
events.add(event);
} else if (layout instanceof SerializedLayout) {
final Destination content = new Destination();
content.byteBuffer.put(layout.getHeader());
layout.encode(event, content);
content.getByteBuffer().flip();
final byte[] record = new byte[content.getByteBuffer().remaining()];
content.getByteBuffer().get(record);
data.add(record);
} else {
final Destination content = new Destination();
layout.encode(event, content);
content.getByteBuffer().flip();
final byte[] record = new byte[content.getByteBuffer().remaining()];
content.getByteBuffer().get(record);
write(record);
}
}
}
| Destination |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/util/json/DecodeJson.java | {
"start": 3001,
"end": 4586
} | class ____ implements DecodeJson<String> {
@Override
public String decode(JsonNode node) throws JsonMappingException {
if (node.isTextual()) {
return node.textValue();
}
throw throwJsonMappingException(String.class.getSimpleName(), node);
}
}
static <E> DecodeJson<Optional<E>> decodeOptional(DecodeJson<E> decodeJson) {
return node -> {
if (node.isNull()) return Optional.empty();
return Optional.of(decodeJson.decode(node));
};
}
static <E> DecodeJson<List<E>> decodeList(DecodeJson<E> decodeJson) {
return node -> {
if (node.isArray()) {
List<E> result = new ArrayList<>();
Iterator<JsonNode> elements = node.elements();
while (elements.hasNext()) {
result.add(decodeJson.decode(elements.next()));
}
return result;
}
throw throwJsonMappingException("JSON array", node);
};
}
static <V> DecodeJson<Map<String, V>> decodeMap(DecodeJson<V> decodeJson) {
return node -> {
if (node.isObject()) {
Map<String, V> result = new HashMap<>();
for (Map.Entry<String, JsonNode> entry : node.properties()) {
result.put(entry.getKey(), decodeJson.decode(entry.getValue()));
}
return result;
}
throw throwJsonMappingException("JSON object", node);
};
}
}
| DecodeString |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-client-jackson/deployment/src/test/java/io/quarkus/restclient/jackson/deployment/ZonedDateTimeObjectMapperCustomizer.java | {
"start": 1505,
"end": 1905
} | class ____ extends JsonDeserializer<ZonedDateTime> {
@Override
public ZonedDateTime deserialize(JsonParser p, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
return ZonedDateTime.parse(p.getValueAsString())
.withZoneSameInstant(ZoneId.of("Europe/London"));
}
}
}
| ZonedDateTimeEuropeLondonDeserializer |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyBinaryDocValuesWrapper.java | {
"start": 1251,
"end": 2726
} | class ____ extends BinaryDocValues {
private final Bits docsWithField;
private final LegacyBinaryDocValues values;
private final int maxDoc;
private int docID = -1;
public LegacyBinaryDocValuesWrapper(Bits docsWithField, LegacyBinaryDocValues values) {
this.docsWithField = docsWithField;
this.values = values;
this.maxDoc = docsWithField.length();
}
@Override
public int docID() {
return docID;
}
@Override
public int nextDoc() {
docID++;
while (docID < maxDoc) {
if (docsWithField.get(docID)) {
return docID;
}
docID++;
}
docID = NO_MORE_DOCS;
return NO_MORE_DOCS;
}
@Override
public int advance(int target) {
if (target < docID) {
throw new IllegalArgumentException("cannot advance backwards: docID=" + docID + " target=" + target);
}
if (target == NO_MORE_DOCS) {
this.docID = NO_MORE_DOCS;
} else {
this.docID = target - 1;
nextDoc();
}
return docID;
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return docsWithField.get(target);
}
@Override
public long cost() {
return 0;
}
@Override
public BytesRef binaryValue() {
return values.get(docID);
}
}
| LegacyBinaryDocValuesWrapper |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregatorFactory.java | {
"start": 1478,
"end": 5429
} | class ____ extends ValuesSourceAggregatorFactory {
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
builder.register(
DiversifiedAggregationBuilder.REGISTRY_KEY,
List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN),
(
String name,
int shardSize,
AggregatorFactories factories,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata,
ValuesSourceConfig valuesSourceConfig,
int maxDocsPerValue,
String executionHint) -> new DiversifiedNumericSamplerAggregator(
name,
shardSize,
factories,
context,
parent,
metadata,
valuesSourceConfig,
maxDocsPerValue
),
true
);
builder.register(
DiversifiedAggregationBuilder.REGISTRY_KEY,
CoreValuesSourceType.KEYWORD,
(
String name,
int shardSize,
AggregatorFactories factories,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata,
ValuesSourceConfig valuesSourceConfig,
int maxDocsPerValue,
String executionHint) -> {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint);
}
// In some cases using ordinals is just not supported: override it
if (execution == null) {
execution = ExecutionMode.GLOBAL_ORDINALS;
}
if ((execution.needsGlobalOrdinals()) && (valuesSourceConfig.hasOrdinals() == false)) {
execution = ExecutionMode.MAP;
}
return execution.create(name, factories, shardSize, maxDocsPerValue, valuesSourceConfig, context, parent, metadata);
},
true
);
}
private final DiversifiedAggregatorSupplier aggregatorSupplier;
private final int shardSize;
private final int maxDocsPerValue;
private final String executionHint;
DiversifiedAggregatorFactory(
String name,
ValuesSourceConfig config,
int shardSize,
int maxDocsPerValue,
String executionHint,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata,
DiversifiedAggregatorSupplier aggregatorSupplier
) throws IOException {
super(name, config, context, parent, subFactoriesBuilder, metadata);
this.shardSize = shardSize;
this.maxDocsPerValue = maxDocsPerValue;
this.executionHint = executionHint;
this.aggregatorSupplier = aggregatorSupplier;
}
@Override
protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
return aggregatorSupplier.build(name, shardSize, factories, context, parent, metadata, config, maxDocsPerValue, executionHint);
}
@Override
protected Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException {
final UnmappedSampler aggregation = new UnmappedSampler(name, metadata);
return new NonCollectingAggregator(name, context, parent, factories, metadata) {
@Override
public InternalAggregation buildEmptyAggregation() {
return aggregation;
}
};
}
}
| DiversifiedAggregatorFactory |
java | google__guava | android/guava/src/com/google/common/collect/ImmutableRangeSet.java | {
"start": 23235,
"end": 24288
} | class ____<C extends Comparable> implements Serializable {
private final ImmutableList<Range<C>> ranges;
private final DiscreteDomain<C> domain;
AsSetSerializedForm(ImmutableList<Range<C>> ranges, DiscreteDomain<C> domain) {
this.ranges = ranges;
this.domain = domain;
}
Object readResolve() {
return new ImmutableRangeSet<C>(ranges).asSet(domain);
}
}
/**
* Returns {@code true} if this immutable range set's implementation contains references to
* user-created objects that aren't accessible via this range set's methods. This is generally
* used to determine whether {@code copyOf} implementations should make an explicit copy to avoid
* memory leaks.
*/
boolean isPartialView() {
return ranges.isPartialView();
}
/** Returns a new builder for an immutable range set. */
public static <C extends Comparable<?>> Builder<C> builder() {
return new Builder<>();
}
/**
* A builder for immutable range sets.
*
* @since 14.0
*/
public static | AsSetSerializedForm |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/recording/RecorderContext.java | {
"start": 1797,
"end": 2721
} | class ____
* @return A Class instance that can be passed to a recording proxy
* @deprecated This construct should not be needed in most use cases since directly loading deployment/application classes
* at processing time in build steps is safe. However, there are use cases where this method comes in handy,
* such as referring to classes that were generated in previous build steps using
* {@link io.quarkus.deployment.builditem.GeneratedClassBuildItem}.
*/
@Deprecated(forRemoval = false)
Class<?> classProxy(String name);
/**
* Creates a RuntimeValue object that represents an object created via the default constructor.
* <p>
* This object can be passed into recorders, but must not be used directly at deployment time
*
* @param name The name of the class
* @param <T> The type of the class
* @return The | name |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/cfg/BytecodeSettings.java | {
"start": 310,
"end": 2326
} | interface ____ {
/**
* Selects a bytecode enhancement library.
* <p>
* At present only bytebuddy is supported, bytebuddy being the default since version 5.3.
*
* @settingDefault {@code "bytebuddy"}
* @deprecated Will be removed, Hibernate ORM will use the BytecodeProvider implementation it finds on the
* classpath loading it via the standard ServiceLoader mechanism. Currently, there is only a single
* implementation which is included in Hibernate ORM, so it's not possible to override this.
* See HHH-17643
*/
@Deprecated( forRemoval = true )
@SuppressWarnings("DeprecatedIsStillUsed")
String BYTECODE_PROVIDER = "hibernate.bytecode.provider";
/**
* This is similar to the now deprecated legacy property {@code hibernate.bytecode.provider} except
* it's used specifically to pass an existing instance of a {@link org.hibernate.bytecode.spi.BytecodeProvider};
* this happens to also allow to override the implementation, but is primarily intended to allow reusing a
* specific instance; this could be useful when the implementation benefits from internal caches.
* When not set, Hibernate will create its default implementation.
*
* @settingDefault {@code null}
*/
String BYTECODE_PROVIDER_INSTANCE = "hibernate.enhancer.bytecodeprovider.instance";
/**
* Enable association management feature in runtime bytecode enhancement
*
* @settingDefault {@code false}
*/
String ENHANCER_ENABLE_ASSOCIATION_MANAGEMENT = "hibernate.enhancer.enableAssociationManagement";
/**
* @deprecated Will be removed without replacement. See HHH-15641
*/
@Deprecated(forRemoval = true)
@SuppressWarnings("DeprecatedIsStillUsed")
String ENHANCER_ENABLE_DIRTY_TRACKING = "hibernate.enhancer.enableDirtyTracking";
/**
* @deprecated Will be removed without replacement. See HHH-15641
*/
@SuppressWarnings("DeprecatedIsStillUsed")
@Deprecated(forRemoval = true)
String ENHANCER_ENABLE_LAZY_INITIALIZATION = "hibernate.enhancer.enableLazyInitialization";
}
| BytecodeSettings |
java | square__javapoet | src/test/java/com/squareup/javapoet/JavaFileTest.java | {
"start": 5977,
"end": 6897
} | class ____ {\n"
+ " static {\n"
+ " assert valueOf(\"BLOCKED\") == BLOCKED;\n"
+ " gc();\n"
+ " out.println(nanoTime());\n"
+ " }\n"
+ "\n"
+ " Taco(Thread.State... states) {\n"
+ " }\n"
+ "}\n");
}
@Ignore("addStaticImport doesn't support members with $L")
@Test public void importStaticDynamic() {
JavaFile source = JavaFile.builder("com.squareup.tacos",
TypeSpec.classBuilder("Taco")
.addMethod(MethodSpec.methodBuilder("main")
.addStatement("$T.$L.println($S)", System.class, "out", "hello")
.build())
.build())
.addStaticImport(System.class, "out")
.build();
assertThat(source.toString()).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import static java.lang.System.out;\n"
+ "\n"
+ " | Taco |
java | netty__netty | codec-http3/src/test/java/io/netty/handler/codec/http3/QpackStaticTableTest.java | {
"start": 845,
"end": 2671
} | class ____ {
@Test
public void testFieldNotFound() {
assertEquals(QpackStaticTable.NOT_FOUND, QpackStaticTable.findFieldIndex("x-netty-quic", "incubating"));
}
@Test
public void testFieldNameAndValueMatch() {
// first in range
assertEquals(15, QpackStaticTable.findFieldIndex(":method", "CONNECT"));
// last in range
assertEquals(21, QpackStaticTable.findFieldIndex(":method", "PUT"));
// non-consequent range
assertEquals(24, QpackStaticTable.findFieldIndex(":status", "103"));
assertEquals(69, QpackStaticTable.findFieldIndex(":status", "421"));
}
@Test
public void testFieldNameRefForEmptyField() {
int nameIndex1 = QpackStaticTable.findFieldIndex("cookie", "netty.io");
int nameIndex2 = QpackStaticTable.findFieldIndex("cookie", "quic.io");
// should give the same name ref for any values
assertNotEquals(QpackStaticTable.NOT_FOUND, nameIndex1);
assertNotEquals(QpackStaticTable.NOT_FOUND, nameIndex2);
assertEquals(nameIndex1, nameIndex2);
// index should be masked
assertEquals(nameIndex1 & QpackStaticTable.MASK_NAME_REF, QpackStaticTable.MASK_NAME_REF);
assertEquals(5, nameIndex1 ^ QpackStaticTable.MASK_NAME_REF);
}
@Test
public void testFieldNameRefForSingleMatch() {
// note the value differs from static table ("1" rather than "0")
int nameIndex = QpackStaticTable.findFieldIndex("age", "1");
assertEquals(2, nameIndex ^ QpackStaticTable.MASK_NAME_REF);
}
@Test
public void testFieldNameRefForMultipleMatches() {
int nameIndex = QpackStaticTable.findFieldIndex(":method", "ALLTHETHINGS");
assertEquals(15, nameIndex ^ QpackStaticTable.MASK_NAME_REF);
}
}
| QpackStaticTableTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingElementComparator_Test.java | {
"start": 1081,
"end": 1524
} | class ____ extends ObjectArrayAssertBaseTest {
private Comparator<Object> elementComparator = alwaysEqual();
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.usingElementComparator(elementComparator);
}
@Override
protected void verify_internal_effects() {
assertThat(getArrays(assertions).getComparator()).isSameAs(elementComparator);
}
}
| ObjectArrayAssert_usingElementComparator_Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/formatstring/LenientFormatStringValidationTest.java | {
"start": 906,
"end": 1438
} | class ____ {
private final CompilationTestHelper helper =
CompilationTestHelper.newInstance(LenientFormatStringValidation.class, getClass());
private final BugCheckerRefactoringTestHelper refactoring =
BugCheckerRefactoringTestHelper.newInstance(LenientFormatStringValidation.class, getClass());
@Test
public void tooFewArguments() {
helper
.addSourceLines(
"Test.java",
"""
import com.google.common.base.Preconditions;
| LenientFormatStringValidationTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HttpPutFailedException.java | {
"start": 977,
"end": 1320
} | class ____ extends IOException {
private static final long serialVersionUID = 1L;
private final int responseCode;
public HttpPutFailedException(String msg, int responseCode) throws IOException {
super(msg);
this.responseCode = responseCode;
}
public int getResponseCode() {
return responseCode;
}
}
| HttpPutFailedException |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotBe.java | {
"start": 905,
"end": 1595
} | class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldNotBe}</code>.
* @param <T> guarantees that the type of the actual value and the generic type of the {@code Condition} are the same.
* @param actual the actual value in the failed assertion.
* @param condition the {@code Condition}.
* @return the created {@code ErrorMessageFactory}.
*/
public static <T> ErrorMessageFactory shouldNotBe(T actual, Condition<? super T> condition) {
return new ShouldNotBe(actual, condition);
}
private ShouldNotBe(Object actual, Condition<?> condition) {
super("%nExpecting actual:%n %s%nnot to be %s", actual, condition);
}
}
| ShouldNotBe |
java | grpc__grpc-java | api/src/main/java/io/grpc/Status.java | {
"start": 2616,
"end": 8931
} | enum ____ {
/**
* The operation completed successfully.
*/
OK(0),
/**
* The operation was cancelled (typically by the caller).
*/
CANCELLED(1),
/**
* Unknown error. An example of where this error may be returned is
* if a Status value received from another address space belongs to
* an error-space that is not known in this address space. Also
* errors raised by APIs that do not return enough error information
* may be converted to this error.
*/
UNKNOWN(2),
/**
* Client specified an invalid argument. Note that this differs
* from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
* that are problematic regardless of the state of the system
* (e.g., a malformed file name).
*/
INVALID_ARGUMENT(3),
/**
* Deadline expired before operation could complete. For operations
* that change the state of the system, this error may be returned
* even if the operation has completed successfully. For example, a
* successful response from a server could have been delayed long
* enough for the deadline to expire.
*/
DEADLINE_EXCEEDED(4),
/**
* Some requested entity (e.g., file or directory) was not found.
*/
NOT_FOUND(5),
/**
* Some entity that we attempted to create (e.g., file or directory) already exists.
*/
ALREADY_EXISTS(6),
/**
* The caller does not have permission to execute the specified
* operation. PERMISSION_DENIED must not be used for rejections
* caused by exhausting some resource (use RESOURCE_EXHAUSTED
* instead for those errors). PERMISSION_DENIED must not be
* used if the caller cannot be identified (use UNAUTHENTICATED
* instead for those errors).
*/
PERMISSION_DENIED(7),
/**
* Some resource has been exhausted, perhaps a per-user quota, or
* perhaps the entire file system is out of space.
*/
RESOURCE_EXHAUSTED(8),
/**
* Operation was rejected because the system is not in a state
* required for the operation's execution. For example, directory
* to be deleted may be non-empty, an rmdir operation is applied to
* a non-directory, etc.
*
* <p>A litmus test that may help a service implementor in deciding
* between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
* (a) Use UNAVAILABLE if the client can retry just the failing call.
* (b) Use ABORTED if the client should retry at a higher-level
* (e.g., restarting a read-modify-write sequence).
* (c) Use FAILED_PRECONDITION if the client should not retry until
* the system state has been explicitly fixed. E.g., if an "rmdir"
* fails because the directory is non-empty, FAILED_PRECONDITION
* should be returned since the client should not retry unless
* they have first fixed up the directory by deleting files from it.
*/
FAILED_PRECONDITION(9),
/**
* The operation was aborted, typically due to a concurrency issue
* like sequencer check failures, transaction aborts, etc.
*
* <p>See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
ABORTED(10),
/**
* Operation was attempted past the valid range. E.g., seeking or
* reading past end of file.
*
* <p>Unlike INVALID_ARGUMENT, this error indicates a problem that may
* be fixed if the system state changes. For example, a 32-bit file
* system will generate INVALID_ARGUMENT if asked to read at an
* offset that is not in the range [0,2^32-1], but it will generate
* OUT_OF_RANGE if asked to read from an offset past the current
* file size.
*
* <p>There is a fair bit of overlap between FAILED_PRECONDITION and OUT_OF_RANGE.
* We recommend using OUT_OF_RANGE (the more specific error) when it applies
* so that callers who are iterating through
* a space can easily look for an OUT_OF_RANGE error to detect when they are done.
*/
OUT_OF_RANGE(11),
/**
* Operation is not implemented or not supported/enabled in this service.
*/
UNIMPLEMENTED(12),
/**
* Internal errors. Means some invariants expected by underlying
* system has been broken. If you see one of these errors,
* something is very broken.
*/
INTERNAL(13),
/**
* The service is currently unavailable. This is a most likely a
* transient condition and may be corrected by retrying with
* a backoff. Note that it is not always safe to retry
* non-idempotent operations.
*
* <p>See litmus test above for deciding between FAILED_PRECONDITION,
* ABORTED, and UNAVAILABLE.
*/
UNAVAILABLE(14),
/**
* Unrecoverable data loss or corruption.
*/
DATA_LOSS(15),
/**
* The request does not have valid authentication credentials for the
* operation.
*/
UNAUTHENTICATED(16);
private final int value;
@SuppressWarnings("ImmutableEnumChecker") // we make sure the byte[] can't be modified
private final byte[] valueAscii;
private Code(int value) {
this.value = value;
this.valueAscii = Integer.toString(value).getBytes(US_ASCII);
}
/**
* The numerical value of the code.
*/
public int value() {
return value;
}
/**
* Returns a {@link Status} object corresponding to this status code.
*/
public Status toStatus() {
return STATUS_LIST.get(value);
}
private byte[] valueAscii() {
return valueAscii;
}
}
// Create the canonical list of Status instances indexed by their code values.
private static final List<Status> STATUS_LIST = buildStatusList();
private static List<Status> buildStatusList() {
TreeMap<Integer, Status> canonicalizer = new TreeMap<>();
for (Code code : Code.values()) {
Status replaced = canonicalizer.put(code.value(), new Status(code));
if (replaced != null) {
throw new IllegalStateException("Code value duplication between "
+ replaced.getCode().name() + " & " + code.name());
}
}
return Collections.unmodifiableList(new ArrayList<>(canonicalizer.values()));
}
// A pseudo- | Code |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/remote/FuzzyWatchChangeNotifyTask.java | {
"start": 1344,
"end": 5415
} | class ____ implements Runnable {
private static final String POINT_FUZZY_WATCH_CONFIG_PUSH = "POINT_FUZZY_WATCH_CONFIG_PUSH";
private static final String POINT_FUZZY_WATCH_CONFIG_PUSH_SUCCESS = "POINT_FUZZY_WATCH_CONFIG_PUSH_SUCCESS";
private static final String POINT_FUZZY_WATCH_CONFIG_PUSH_FAIL = "POINT_FUZZY_WATCH_CONFIG_PUSH_FAIL";
ConfigFuzzyWatchChangeNotifyRequest notifyRequest;
final ConnectionManager connectionManager;
final RpcPushService rpcPushService;
int maxRetryTimes;
int tryTimes = 0;
String connectionId;
/**
* Constructs a RpcPushTask with the specified parameters.
*
* @param notifyRequest The notification request to be sent.
* @param maxRetryTimes The maximum number of retry times.
* @param connectionId The ID of the connection.
*/
public FuzzyWatchChangeNotifyTask(ConnectionManager connectionManager, RpcPushService rpcPushService,
ConfigFuzzyWatchChangeNotifyRequest notifyRequest, int maxRetryTimes, String connectionId) {
this.connectionManager = connectionManager;
this.rpcPushService = rpcPushService;
this.notifyRequest = notifyRequest;
this.maxRetryTimes = maxRetryTimes;
this.connectionId = connectionId;
}
/**
* Checks if the number of retry times exceeds the maximum limit.
*
* @return {@code true} if the number of retry times exceeds the maximum limit; otherwise, {@code false}.
*/
public boolean isOverTimes() {
return maxRetryTimes > 0 && this.tryTimes >= maxRetryTimes;
}
@Override
public void run() {
if (isOverTimes()) {
Loggers.REMOTE_PUSH.warn(
"push callback retry fail over times.groupKey={},,clientId={}, will unregister client.",
notifyRequest.getGroupKey(), connectionId);
connectionManager.unregister(connectionId);
} else if (connectionManager.getConnection(connectionId) == null) {
// Client is already offline, ignore the task.
Loggers.REMOTE_PUSH.warn(
"Client is already offline, ignore the task. dataId={},groupKey={},tenant={},clientId={}",
notifyRequest.getGroupKey(), connectionId);
return;
}
TpsCheckRequest tpsCheckRequest = new TpsCheckRequest();
tpsCheckRequest.setPointName(POINT_FUZZY_WATCH_CONFIG_PUSH);
if (!ControlManagerCenter.getInstance().getTpsControlManager().check(tpsCheckRequest).isSuccess()) {
scheduleSelf();
} else {
long timeout = ConfigCommonConfig.getInstance().getPushTimeout();
rpcPushService.pushWithCallback(connectionId, notifyRequest, new AbstractPushCallBack(timeout) {
@Override
public void onSuccess() {
TpsCheckRequest tpsCheckRequest = new TpsCheckRequest();
tpsCheckRequest.setPointName(POINT_FUZZY_WATCH_CONFIG_PUSH_SUCCESS);
ControlManagerCenter.getInstance().getTpsControlManager().check(tpsCheckRequest);
}
@Override
public void onFail(Throwable e) {
TpsCheckRequest tpsCheckRequest = new TpsCheckRequest();
tpsCheckRequest.setPointName(POINT_FUZZY_WATCH_CONFIG_PUSH_FAIL);
ControlManagerCenter.getInstance().getTpsControlManager().check(tpsCheckRequest);
Loggers.REMOTE_PUSH.warn("Push fail, groupKey={}, clientId={}", notifyRequest.getGroupKey(),
connectionId, e);
FuzzyWatchChangeNotifyTask.this.scheduleSelf();
}
}, ConfigExecutor.getClientConfigNotifierServiceExecutor());
}
}
void scheduleSelf() {
ConfigExecutor.scheduleClientConfigNotifier(this, tryTimes * 2L, TimeUnit.SECONDS);
}
}
| FuzzyWatchChangeNotifyTask |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/HttpMethod.java | {
"start": 793,
"end": 5441
} | enum ____ implements CharSequence {
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.2.
*/
OPTIONS(false, true),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.3.
*/
GET(false, false),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.4.
*/
HEAD(false, false),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.5.
*/
POST(true, true),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6.
*/
PUT(true, true),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.7.
*/
DELETE(false, true),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.8.
*/
TRACE(false, false),
/**
* See https://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.9.
*/
CONNECT(false, false),
/**
* See https://tools.ietf.org/html/rfc5789.
*/
PATCH(true, true),
/**
* A custom non-standard HTTP method.
*/
CUSTOM(false, true);
private final boolean requiresRequestBody;
private final boolean permitsRequestBody;
HttpMethod(boolean requiresRequestBody, boolean permitsRequestBody) {
this.requiresRequestBody = requiresRequestBody;
this.permitsRequestBody = permitsRequestBody;
}
@Override
public int length() {
return name().length();
}
@Override
public char charAt(int index) {
return name().charAt(index);
}
@Override
public CharSequence subSequence(int start, int end) {
return name().subSequence(start, end);
}
/**
* Whether the given method requires a request body.
*
* @return Does the method require a request body.
* @since 4.0.0
*/
public boolean requiresRequestBody() {
return requiresRequestBody;
}
/**
* Whether the given method allows a request body.
*
* @return Does the method allows a request body.
* @since 4.0.0
*/
public boolean permitsRequestBody() {
return permitsRequestBody;
}
/**
* Whether the given method allows a request body.
*
* @return Does the method allows a request body.
* @since 4.0.0
*/
public boolean permitsResponseBody() {
return permitsRequestBody;
}
/**
* Whether the given method requires a request body.
*
* @param method The {@link HttpMethod}
* @return True if it does
*/
public static boolean requiresRequestBody(HttpMethod method) {
return method != null && (method.equals(POST) || method.equals(PUT) || method.equals(PATCH));
}
/**
* Whether the given method allows a request body.
*
* @param method The {@link HttpMethod}
* @return True if it does
*/
public static boolean permitsRequestBody(HttpMethod method) {
return method != null && (requiresRequestBody(method)
|| method.equals(OPTIONS)
|| method.equals(DELETE)
|| method.equals(CUSTOM)
);
}
/**
*
* @param httpMethodName Name of the http method (maybe nonstandard)
* @return the value of enum (CUSTOM by default).
*/
public static HttpMethod parse(String httpMethodName) {
HttpMethod httpMethod = parseString(httpMethodName);
if (httpMethod != null) {
return httpMethod;
}
httpMethodName = httpMethodName.toUpperCase();
httpMethod = parseString(httpMethodName);
if (httpMethod != null) {
return httpMethod;
}
return CUSTOM;
}
private static HttpMethod parseString(String httpMethodName) {
switch (httpMethodName) {
case "OPTIONS":
case "options":
return OPTIONS;
case "GET":
case "get":
return GET;
case "HEAD":
case "head":
return HEAD;
case "POST":
case "post":
return POST;
case "PUT":
case "put":
return PUT;
case "DELETE":
case "delete":
return DELETE;
case "TRACE":
case "trace":
return TRACE;
case "CONNECT":
case "connect":
return CONNECT;
case "PATCH":
case "patch":
return PATCH;
default:
return null;
}
}
}
| HttpMethod |
java | apache__flink | flink-filesystems/flink-s3-fs-base/src/test/java/org/apache/flink/fs/s3/common/token/DynamicTemporaryAWSCredentialsProviderTest.java | {
"start": 1387,
"end": 3319
} | class ____ {
private static final String ACCESS_KEY_ID = "testAccessKeyId";
private static final String SECRET_ACCESS_KEY = "testSecretAccessKey";
private static final String SESSION_TOKEN = "testSessionToken";
@BeforeEach
void beforeEach() {
AbstractS3DelegationTokenReceiver.credentials = null;
}
@AfterEach
void afterEach() {
AbstractS3DelegationTokenReceiver.credentials = null;
}
@Test
void getCredentialsShouldThrowExceptionWhenNoCredentials() {
DynamicTemporaryAWSCredentialsProvider provider =
new DynamicTemporaryAWSCredentialsProvider();
assertThatThrownBy(provider::getCredentials).isInstanceOf(NoAwsCredentialsException.class);
}
@Test
void getCredentialsShouldStoreCredentialsWhenCredentialsProvided() throws Exception {
DynamicTemporaryAWSCredentialsProvider provider =
new DynamicTemporaryAWSCredentialsProvider();
Credentials credentials =
new Credentials(ACCESS_KEY_ID, SECRET_ACCESS_KEY, SESSION_TOKEN, null);
AbstractS3DelegationTokenReceiver receiver =
new AbstractS3DelegationTokenReceiver() {
@Override
public String serviceName() {
return "s3";
}
};
receiver.onNewTokensObtained(InstantiationUtil.serializeObject(credentials));
BasicSessionCredentials returnedCredentials =
(BasicSessionCredentials) provider.getCredentials();
assertThat(returnedCredentials.getAWSAccessKeyId()).isEqualTo(credentials.getAccessKeyId());
assertThat(returnedCredentials.getAWSSecretKey())
.isEqualTo(credentials.getSecretAccessKey());
assertThat(returnedCredentials.getSessionToken()).isEqualTo(credentials.getSessionToken());
}
}
| DynamicTemporaryAWSCredentialsProviderTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoProtocolVersion.java | {
"start": 1012,
"end": 2537
} | enum ____ {
UNKNOWN("Unknown", 1),
ENCRYPTION_ZONES("Encryption zones", 2);
private final String description;
private final int version;
private Integer unknownValue = null;
private static CryptoProtocolVersion[] supported = {ENCRYPTION_ZONES};
/**
* @return Array of supported protocol versions.
*/
public static CryptoProtocolVersion[] supported() {
return supported;
}
CryptoProtocolVersion(String description, int version) {
this.description = description;
this.version = version;
}
/**
* Returns if a given protocol version is supported.
*
* @param version version number
* @return true if the version is supported, else false
*/
public static boolean supports(CryptoProtocolVersion version) {
if (version.getVersion() == UNKNOWN.getVersion()) {
return false;
}
for (CryptoProtocolVersion v : CryptoProtocolVersion.values()) {
if (v.getVersion() == version.getVersion()) {
return true;
}
}
return false;
}
public void setUnknownValue(int unknown) {
this.unknownValue = unknown;
}
public int getUnknownValue() {
return unknownValue;
}
public String getDescription() {
return description;
}
public int getVersion() {
return version;
}
@Override
public String toString() {
return "CryptoProtocolVersion{" +
"description='" + description + '\'' +
", version=" + version +
", unknownValue=" + unknownValue +
'}';
}
}
| CryptoProtocolVersion |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/indices/recovery/AsyncRecoveryTarget.java | {
"start": 1218,
"end": 4922
} | class ____ implements RecoveryTargetHandler {
private final RecoveryTargetHandler target;
private final Executor executor;
public AsyncRecoveryTarget(RecoveryTargetHandler target, Executor executor) {
this.executor = executor;
this.target = target;
}
@Override
public void prepareForTranslogOperations(int totalTranslogOps, ActionListener<Void> listener) {
executor.execute(() -> target.prepareForTranslogOperations(totalTranslogOps, listener));
}
@Override
public void finalizeRecovery(long globalCheckpoint, long trimAboveSeqNo, ActionListener<Void> listener) {
executor.execute(() -> target.finalizeRecovery(globalCheckpoint, trimAboveSeqNo, listener));
}
@Override
public void handoffPrimaryContext(ReplicationTracker.PrimaryContext primaryContext, ActionListener<Void> listener) {
executor.execute(() -> target.handoffPrimaryContext(primaryContext, listener));
}
@Override
public void indexTranslogOperations(
List<Translog.Operation> operations,
int totalTranslogOps,
long maxSeenAutoIdTimestampOnPrimary,
long maxSeqNoOfDeletesOrUpdatesOnPrimary,
RetentionLeases retentionLeases,
long mappingVersionOnPrimary,
ActionListener<Long> listener
) {
executor.execute(
() -> target.indexTranslogOperations(
operations,
totalTranslogOps,
maxSeenAutoIdTimestampOnPrimary,
maxSeqNoOfDeletesOrUpdatesOnPrimary,
retentionLeases,
mappingVersionOnPrimary,
listener
)
);
}
@Override
public void receiveFileInfo(
List<String> phase1FileNames,
List<Long> phase1FileSizes,
List<String> phase1ExistingFileNames,
List<Long> phase1ExistingFileSizes,
int totalTranslogOps,
ActionListener<Void> listener
) {
executor.execute(
() -> target.receiveFileInfo(
phase1FileNames,
phase1FileSizes,
phase1ExistingFileNames,
phase1ExistingFileSizes,
totalTranslogOps,
listener
)
);
}
@Override
public void cleanFiles(
int totalTranslogOps,
long globalCheckpoint,
Store.MetadataSnapshot sourceMetadata,
ActionListener<Void> listener
) {
executor.execute(() -> target.cleanFiles(totalTranslogOps, globalCheckpoint, sourceMetadata, listener));
}
@Override
public void writeFileChunk(
StoreFileMetadata fileMetadata,
long position,
ReleasableBytesReference content,
boolean lastChunk,
int totalTranslogOps,
ActionListener<Void> listener
) {
final ReleasableBytesReference retained = content.retain();
final ActionListener<Void> wrappedListener = ActionListener.runBefore(listener, retained::close);
boolean success = false;
try {
executor.execute(() -> target.writeFileChunk(fileMetadata, position, retained, lastChunk, totalTranslogOps, wrappedListener));
success = true;
} finally {
if (success == false) {
content.decRef();
}
}
}
@Override
public void restoreFileFromSnapshot(
String repository,
IndexId indexId,
BlobStoreIndexShardSnapshot.FileInfo snapshotFile,
ActionListener<Void> listener
) {
executor.execute(() -> target.restoreFileFromSnapshot(repository, indexId, snapshotFile, listener));
}
}
| AsyncRecoveryTarget |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/BootstrapUtilsTests.java | {
"start": 9152,
"end": 9252
} | class ____ {
}
@NestedTestConfiguration(OVERRIDE)
| DoubleNestedWithImplicitlyInheritedWebConfig |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/IdempotentRepository.java | {
"start": 3714,
"end": 4158
} | class ____ about eager vs non-eager mode.
*
* @param key the key of the message
* @return <tt>true</tt> if this repository contains the specified element
*/
default boolean contains(Exchange exchange, String key) {
return contains(key);
}
/**
* Removes the key from the repository.
* <p/>
* Is usually invoked if the exchange failed.
* <p/>
* <b>Important:</b> Read the | javadoc |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/wall/WallProviderCreator.java | {
"start": 172,
"end": 511
} | interface ____ {
/**
* @param dataSource mabye exists wall config
* @param config maybe null
* @param dbType maybe null
* @return
*/
WallProvider createWallConfig(DataSourceProxy dataSource, WallConfig config, DbType dbType);
default int getOrder() {
return 0;
}
}
| WallProviderCreator |
java | alibaba__nacos | console/src/test/java/com/alibaba/nacos/console/handler/impl/remote/naming/InstanceRemoteHandlerTest.java | {
"start": 1263,
"end": 2550
} | class ____ extends AbstractRemoteHandlerTest {
InstanceRemoteHandler instanceRemoteHandler;
@BeforeEach
void setUp() {
super.setUpWithNaming();
instanceRemoteHandler = new InstanceRemoteHandler(clientHolder);
}
@Test
void listInstances() throws NacosException {
when(namingMaintainerService.listInstances("namespaceId", "groupName", "serviceName", "clusterName",
false)).thenReturn(Collections.singletonList(new Instance()));
Page<? extends Instance> page = instanceRemoteHandler.listInstances("namespaceId", "serviceName", "groupName",
"clusterName", 1, 10);
assertEquals(1, page.getPageItems().size());
}
@Test
void updateInstance() throws NacosException {
InstanceForm instanceForm = new InstanceForm();
instanceForm.setServiceName("test");
instanceForm.setIp("127.0.0.1");
instanceForm.setPort(3306);
instanceForm.validate();
Instance instance = new Instance();
instanceRemoteHandler.updateInstance(instanceForm, instance);
verify(namingMaintainerService).updateInstance(Constants.DEFAULT_NAMESPACE_ID, Constants.DEFAULT_GROUP, "test",
instance);
}
} | InstanceRemoteHandlerTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/unused/RemoveUnusedBeansTest.java | {
"start": 4054,
"end": 4266
} | class ____ implements Comparable<FooAlternative> {
@Override
public int compareTo(FooAlternative o) {
return 0;
}
}
@Singleton
static | InjectedViaInstanceWithWildcard |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalListCommandsTest.java | {
"start": 627,
"end": 2829
} | class ____ extends DatasourceTestBase {
private RedisDataSource blocking;
private ReactiveRedisDataSource reactive;
@BeforeEach
void initialize() {
blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60));
reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api);
}
@AfterEach
public void clear() {
blocking.flushall();
}
@Test
public void listBlocking() {
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalListCommands<String, String> list = tx.list(String.class);
assertThat(list.getDataSource()).isEqualTo(tx);
list.lpush(key, "a", "b", "c", "d");
list.linsertBeforePivot(key, "c", "1");
list.lpos(key, "c");
list.llen(key);
list.lpop(key);
});
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((long) result.get(0)).isEqualTo(4);
assertThat((long) result.get(1)).isEqualTo(5);
assertThat((long) result.get(2)).isEqualTo(2);
assertThat((long) result.get(3)).isEqualTo(5);
assertThat((String) result.get(4)).isEqualTo("d");
}
@Test
public void listReactive() {
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalListCommands<String, String> list = tx.list(String.class);
return list.lpush(key, "a", "b", "c", "d")
.chain(() -> list.linsertBeforePivot(key, "c", "1"))
.chain(() -> list.lpos(key, "c"))
.chain(() -> list.llen(key))
.chain(() -> list.lpop(key));
}).await().atMost(Duration.ofSeconds(5));
assertThat(result.size()).isEqualTo(5);
assertThat(result.discarded()).isFalse();
assertThat((long) result.get(0)).isEqualTo(4);
assertThat((long) result.get(1)).isEqualTo(5);
assertThat((long) result.get(2)).isEqualTo(2);
assertThat((long) result.get(3)).isEqualTo(5);
assertThat((String) result.get(4)).isEqualTo("d");
}
}
| TransactionalListCommandsTest |
java | quarkusio__quarkus | integration-tests/main/src/main/java/io/quarkus/it/websocket/AddWebSocketHandler.java | {
"start": 1171,
"end": 1379
} | class ____ extends Endpoint {
@Override
public void onOpen(Session session, EndpointConfig config) {
session.getAsyncRemote().sendText("DYNAMIC");
}
}
}
| WebSocketEndpoint |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/AnnotationLiteralGenerator.java | {
"start": 4477,
"end": 4566
} | class ____ be generated already exists.
* <p>
* The generated annotation literal | to |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/convert/multiple/StringToQueueConverterTest.java | {
"start": 1885,
"end": 4256
} | class ____ {
private StringToQueueConverter converter;
@BeforeEach
public void init() {
converter = new StringToQueueConverter(FrameworkModel.defaultModel());
}
@Test
void testAccept() {
assertFalse(converter.accept(String.class, Collection.class));
assertFalse(converter.accept(String.class, List.class));
assertFalse(converter.accept(String.class, AbstractList.class));
assertTrue(converter.accept(String.class, LinkedList.class));
assertFalse(converter.accept(String.class, ArrayList.class));
assertTrue(converter.accept(String.class, Queue.class));
assertTrue(converter.accept(String.class, BlockingQueue.class));
assertTrue(converter.accept(String.class, TransferQueue.class));
assertTrue(converter.accept(String.class, Deque.class));
assertTrue(converter.accept(String.class, BlockingDeque.class));
assertFalse(converter.accept(String.class, Set.class));
assertFalse(converter.accept(String.class, SortedSet.class));
assertFalse(converter.accept(String.class, NavigableSet.class));
assertFalse(converter.accept(String.class, TreeSet.class));
assertFalse(converter.accept(String.class, ConcurrentSkipListSet.class));
assertFalse(converter.accept(null, char[].class));
assertFalse(converter.accept(null, String.class));
assertFalse(converter.accept(null, String.class));
assertFalse(converter.accept(null, null));
}
@Test
void testConvert() {
Queue values = new ArrayDeque(asList(1.0, 2.0, 3.0));
Queue result = (Queue<Double>) converter.convert("1.0,2.0,3.0", Queue.class, Double.class);
assertTrue(CollectionUtils.equals(values, result));
values.clear();
values.add(123);
result = (Queue) converter.convert("123", Queue.class, Integer.class);
assertTrue(CollectionUtils.equals(values, result));
assertNull(converter.convert(null, Collection.class, Integer.class));
assertNull(converter.convert("", Collection.class, null));
}
@Test
void testGetSourceType() {
assertEquals(String.class, converter.getSourceType());
}
@Test
void testGetPriority() {
assertEquals(Integer.MAX_VALUE - 2, converter.getPriority());
}
}
| StringToQueueConverterTest |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/ProxifyMethodChangingTheObjectUnderTest.java | {
"start": 2487,
"end": 10556
} | class ____ {
public static final String FIELD_NAME = "dispatcher";
private final SoftProxies proxies;
ProxifyMethodChangingTheObjectUnderTest(SoftProxies proxies) {
this.proxies = proxies;
}
@RuntimeType
public static AbstractAssert<?, ?> intercept(@FieldValue(FIELD_NAME) ProxifyMethodChangingTheObjectUnderTest dispatcher,
@SuperCall Callable<AbstractAssert<?, ?>> assertionMethod,
@This AbstractAssert<?, ?> currentAssertInstance) throws Exception {
AbstractAssert<?, ?> result = assertionMethod.call();
return dispatcher.createAssertProxy(result).withAssertionState(currentAssertInstance);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private AbstractAssert<?, ?> createAssertProxy(AbstractAssert<?, ?> currentAssert) {
if (currentAssert instanceof IterableSizeAssert) return createIterableSizeAssertProxy(currentAssert);
if (currentAssert instanceof FileSizeAssert) return createFileSizeAssertProxy(currentAssert);
if (currentAssert instanceof BigDecimalScaleAssert) return createBigDecimalScaleAssertProxy(currentAssert);
if (currentAssert instanceof MapSizeAssert) return createMapSizeAssertProxy(currentAssert);
if (currentAssert instanceof RecursiveComparisonAssert assert1)
return createRecursiveComparisonAssertProxy(assert1);
return (AbstractAssert) proxies.createSoftAssertionProxy(currentAssert.getClass(), actualClass(currentAssert),
actual(currentAssert));
}
private RecursiveComparisonAssert<?> createRecursiveComparisonAssertProxy(RecursiveComparisonAssert<?> currentAssert) {
return proxies.createRecursiveComparisonAssertProxy(currentAssert);
}
private MapSizeAssert<?, ?> createMapSizeAssertProxy(Object currentAssert) {
MapSizeAssert<?, ?> mapSizeAssert = (MapSizeAssert<?, ?>) currentAssert;
// can't use the usual way of building soft proxy since MapSizeAssert takes 2 parameters
return proxies.createMapSizeAssertProxy(mapSizeAssert);
}
private IterableSizeAssert<?> createIterableSizeAssertProxy(Object currentAssert) {
IterableSizeAssert<?> iterableSizeAssert = (IterableSizeAssert<?>) currentAssert;
// can't use the usual way of building soft proxy since IterableSizeAssert takes 2 parameters
return proxies.createIterableSizeAssertProxy(iterableSizeAssert);
}
private FileSizeAssert<?> createFileSizeAssertProxy(Object currentAssert) {
FileSizeAssert<?> fileSizeAssert = (FileSizeAssert<?>) currentAssert;
return proxies.createFileSizeAssertProxy(fileSizeAssert);
}
private BigDecimalScaleAssert<?> createBigDecimalScaleAssertProxy(Object currentAssert) {
BigDecimalScaleAssert<?> bigDecimalScaleAssert = (BigDecimalScaleAssert<?>) currentAssert;
return proxies.createBigDecimalScaleAssertProxy(bigDecimalScaleAssert);
}
@SuppressWarnings("rawtypes")
private static Class actualClass(Object currentAssert) {
if (currentAssert instanceof AbstractObjectArrayAssert) return Array.newInstance(Object.class, 0).getClass();
if (currentAssert instanceof StringAssert) return String.class;
if (currentAssert instanceof RecursiveComparisonAssert) return Object.class;
if (currentAssert instanceof AtomicIntegerFieldUpdaterAssert) return AtomicIntegerFieldUpdater.class;
if (currentAssert instanceof AtomicLongFieldUpdaterAssert) return AtomicLongFieldUpdater.class;
if (currentAssert instanceof AtomicMarkableReferenceAssert) return AtomicMarkableReference.class;
if (currentAssert instanceof AtomicReferenceAssert) return AtomicReference.class;
if (currentAssert instanceof AtomicReferenceArrayAssert) return AtomicReferenceArray.class;
if (currentAssert instanceof AtomicReferenceFieldUpdaterAssert) return AtomicReferenceFieldUpdater.class;
if (currentAssert instanceof AtomicStampedReferenceAssert) return AtomicStampedReference.class;
if (currentAssert instanceof BigDecimalAssert) return BigDecimal.class;
if (currentAssert instanceof BigIntegerAssert) return BigInteger.class;
if (currentAssert instanceof BooleanAssert) return Boolean.class;
if (currentAssert instanceof BooleanArrayAssert) return boolean[].class;
if (currentAssert instanceof ByteAssert) return Byte.class;
if (currentAssert instanceof ByteArrayAssert) return byte[].class;
if (currentAssert instanceof CharArrayAssert) return char[].class;
if (currentAssert instanceof CharSequenceAssert) return CharSequence.class;
if (currentAssert instanceof CharacterAssert) return Character.class;
if (currentAssert instanceof ClassAssert) return Class.class;
if (currentAssert instanceof CompletableFutureAssert) return CompletionStage.class;
if (currentAssert instanceof DateAssert) return Date.class;
if (currentAssert instanceof DoubleAssert) return Double.class;
if (currentAssert instanceof DoubleArrayAssert) return double[].class;
if (currentAssert instanceof DoublePredicateAssert) return DoublePredicate.class;
if (currentAssert instanceof DurationAssert) return Duration.class;
if (currentAssert instanceof PeriodAssert) return Period.class;
if (currentAssert instanceof FileAssert) return File.class;
if (currentAssert instanceof FloatAssert) return Float.class;
if (currentAssert instanceof FloatArrayAssert) return float[].class;
if (currentAssert instanceof FutureAssert) return Future.class; // must be after CompletionStage / CompletableFuture
if (currentAssert instanceof InputStreamAssert) return InputStream.class;
if (currentAssert instanceof InstantAssert) return Instant.class;
if (currentAssert instanceof IntArrayAssert) return int[].class;
if (currentAssert instanceof IntPredicateAssert) return IntPredicate.class;
if (currentAssert instanceof IntegerAssert) return Integer.class;
if (currentAssert instanceof IteratorAssert) return Iterator.class;
if (currentAssert instanceof LocalDateAssert) return LocalDate.class;
if (currentAssert instanceof LocalDateTimeAssert) return LocalDateTime.class;
if (currentAssert instanceof LongAdderAssert) return LongAdder.class;
if (currentAssert instanceof LongArrayAssert) return long[].class;
if (currentAssert instanceof LongAssert) return Long.class;
if (currentAssert instanceof LongPredicateAssert) return LongPredicate.class;
if (currentAssert instanceof MapAssert) return Map.class;
if (currentAssert instanceof MatcherAssert) return Matcher.class;
if (currentAssert instanceof ObjectAssert) return Object.class;
if (currentAssert instanceof OffsetDateTimeAssert) return OffsetDateTime.class;
if (currentAssert instanceof OffsetTimeAssert) return OffsetTime.class;
if (currentAssert instanceof OptionalAssert) return Optional.class;
if (currentAssert instanceof OptionalDoubleAssert) return OptionalDouble.class;
if (currentAssert instanceof OptionalIntAssert) return OptionalInt.class;
if (currentAssert instanceof OptionalLongAssert) return OptionalLong.class;
if (currentAssert instanceof PathAssert) return Path.class;
if (currentAssert instanceof PredicateAssert) return Predicate.class;
if (currentAssert instanceof ShortAssert) return Short.class;
if (currentAssert instanceof ShortArrayAssert) return short[].class;
if (currentAssert instanceof ThrowableAssert) return Throwable.class;
if (currentAssert instanceof ThrowableAssertAlternative) return Throwable.class;
if (currentAssert instanceof UriAssert) return URI.class;
if (currentAssert instanceof UrlAssert) return URL.class;
if (currentAssert instanceof ZonedDateTimeAssert) return ZonedDateTime.class;
// Trying to create a proxy will only match exact constructor argument types.
// To initialize one for ListAssert for example we can't use an ArrayList, we have to use a List.
// So we can't just return actual.getClass() as we could read a concrete | ProxifyMethodChangingTheObjectUnderTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/uniqueconstraint/UniqueConstraintColumnOrderTest.java | {
"start": 2575,
"end": 2666
} | class ____ {
@Id
private Long id;
private String a;
private String b;
}
}
| TestEntity |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/AbstractGenerator.java | {
"start": 1335,
"end": 1582
} | class ____ from a target package, base name and suffix. When the class
* is located in a default package, the target package name is an empty string.
*
* @param targetPackage name of the target package
* @param baseName simple | name |
java | playframework__playframework | documentation/manual/working/commonGuide/pekko/code/javaguide/pekko/typed/fp/ConfiguredActor.java | {
"start": 458,
"end": 928
} | class ____ {
public final ActorRef<String> replyTo;
public GetConfig(ActorRef<String> replyTo) {
this.replyTo = replyTo;
}
}
public static Behavior<ConfiguredActor.GetConfig> create(Config config) {
String myConfig = config.getString("my.config");
return Behaviors.receiveMessage(
(GetConfig message) -> {
message.replyTo.tell(myConfig);
return Behaviors.same();
});
}
}
// #fp-configured-actor
| GetConfig |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/NamedQueryBinder.java | {
"start": 1354,
"end": 9277
} | class ____ {
public static void processNamedQuery(
HbmLocalMetadataBuildingContext context,
JaxbHbmNamedQueryType namedQueryBinding) {
processNamedQuery( context, namedQueryBinding, "" );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Named HQL query
public static void processNamedQuery(
HbmLocalMetadataBuildingContext context,
JaxbHbmNamedQueryType namedQueryBinding,
String prefix) {
final String registrationName = prefix + namedQueryBinding.getName();
final NamedHqlQueryDefinition.Builder<?> queryBuilder =
new NamedHqlQueryDefinition.Builder<>( registrationName )
.setComment( namedQueryBinding.getComment() )
.setCacheable( namedQueryBinding.isCacheable() )
.setCacheMode( namedQueryBinding.getCacheMode() )
.setCacheRegion( namedQueryBinding.getCacheRegion() )
.setTimeout( namedQueryBinding.getTimeout() )
.setReadOnly( namedQueryBinding.isReadOnly() )
.setFlushMode( namedQueryBinding.getFlushMode() )
.setFetchSize( namedQueryBinding.getFetchSize() );
boolean foundQuery = false;
for ( Object content : namedQueryBinding.getContent() ) {
if ( content instanceof String string ) {
final String hqlString = nullIfEmpty( string.trim() );
if ( isNotEmpty( hqlString ) ) {
queryBuilder.setHqlString( hqlString );
foundQuery = true;
}
}
else {
final JaxbHbmQueryParamType paramTypeBinding = (JaxbHbmQueryParamType)
( (JAXBElement<?>) content ).getValue();
queryBuilder.addParameterTypeHint( paramTypeBinding.getName(), paramTypeBinding.getType() );
}
}
if ( ! foundQuery ) {
throw new org.hibernate.boot.MappingException(
String.format(
"Named query [%s] did not specify query string",
namedQueryBinding.getName()
),
context.getOrigin()
);
}
context.getMetadataCollector().addNamedQuery( queryBuilder.build() );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Named native query
public static void processNamedNativeQuery(
HbmLocalMetadataBuildingContext context,
JaxbHbmNamedNativeQueryType namedQueryBinding) {
processNamedNativeQuery( context, namedQueryBinding, "" );
}
public static void processNamedNativeQuery(
final HbmLocalMetadataBuildingContext context,
JaxbHbmNamedNativeQueryType namedQueryBinding,
String prefix) {
if ( namedQueryBinding.isCallable() ) {
DeprecationLogger.DEPRECATION_LOGGER.warn(
"Marking named native queries as callable is no longer supported; use `@jakarta.persistence.NamedStoredProcedureQuery` instead. Ignoring."
);
}
final String registrationName = prefix + namedQueryBinding.getName();
final NamedNativeQueryDefinition.Builder<?> builder =
new NamedNativeQueryDefinition.Builder<>( registrationName )
.setComment( namedQueryBinding.getComment() )
.setCacheable( namedQueryBinding.isCacheable() )
.setCacheMode( namedQueryBinding.getCacheMode() )
.setCacheRegion( namedQueryBinding.getCacheRegion() )
.setTimeout( namedQueryBinding.getTimeout() )
.setReadOnly( namedQueryBinding.isReadOnly() )
.setFlushMode( namedQueryBinding.getFlushMode() )
.setFetchSize( namedQueryBinding.getFetchSize() )
.setResultSetMappingName( namedQueryBinding.getResultsetRef() );
final ImplicitHbmResultSetMappingDescriptorBuilder implicitResultSetMappingBuilder =
new ImplicitHbmResultSetMappingDescriptorBuilder( registrationName, context );
boolean foundQuery = false;
for ( Object content : namedQueryBinding.getContent() ) {
final boolean wasQuery = processNamedQueryContentItem(
content,
builder,
implicitResultSetMappingBuilder,
namedQueryBinding,
context
);
if ( wasQuery ) {
foundQuery = true;
}
}
if ( !foundQuery ) {
throw new org.hibernate.boot.MappingException(
String.format(
"Named native query [%s] did not specify query string",
namedQueryBinding.getName()
),
context.getOrigin()
);
}
if ( implicitResultSetMappingBuilder.hasAnyReturns() ) {
if ( isNotEmpty( namedQueryBinding.getResultsetRef() ) ) {
throw new org.hibernate.boot.MappingException(
String.format(
"Named native query [%s] specified both a resultset-ref and an inline mapping of results",
namedQueryBinding.getName()
),
context.getOrigin()
);
}
context.getMetadataCollector().addResultSetMapping( implicitResultSetMappingBuilder.build( context ) );
builder.setResultSetMappingName( implicitResultSetMappingBuilder.getRegistrationName() );
}
if ( namedQueryBinding.isCallable() ) {
final NamedProcedureCallDefinition definition = QueryBinder.createStoredProcedure(
builder, context,
() -> illegalCallSyntax( context, namedQueryBinding, builder.getSqlString() )
);
context.getMetadataCollector().addNamedProcedureCallDefinition( definition );
DeprecationLogger.DEPRECATION_LOGGER.warn(
"Marking named native queries as callable is deprecated; use `<named-stored-procedure-query/>` instead."
);
}
else {
context.getMetadataCollector().addNamedNativeQuery( builder.build() );
}
}
private static MappingException illegalCallSyntax(
HbmLocalMetadataBuildingContext context,
JaxbHbmNamedNativeQueryType namedQueryBinding,
String sqlString) {
return new MappingException(
String.format(
"Callable named native query [%s] doesn't use the JDBC call syntax: %s",
namedQueryBinding.getName(),
sqlString
),
context.getOrigin()
);
}
private static boolean processNamedQueryContentItem(
Object content,
NamedNativeQueryDefinition.Builder<?> queryBuilder,
ImplicitHbmResultSetMappingDescriptorBuilder implicitResultSetMappingBuilder,
JaxbHbmNamedNativeQueryType namedQueryBinding,
HbmLocalMetadataBuildingContext context) {
if ( content instanceof String string ) {
// Especially when the query string is wrapped in CDATA we will get
// "extra" Strings here containing just spaces and/or newlines. This
// bit tries to account for them.
final String contentString = nullIfEmpty( string.trim() );
if ( contentString != null ) {
queryBuilder.setSqlString( string );
return true;
}
else {
return false;
}
}
else if ( content instanceof JAXBElement<?> element ) {
return processNamedQueryContentItem(
element.getValue(),
queryBuilder,
implicitResultSetMappingBuilder,
namedQueryBinding,
context
);
}
if ( content instanceof JaxbHbmQueryParamType paramTypeBinding ) {
queryBuilder.addParameterTypeHint( paramTypeBinding.getName(), paramTypeBinding.getType() );
}
else if ( content instanceof JaxbHbmSynchronizeType synchronizedSpace ) {
queryBuilder.addSynchronizedQuerySpace( synchronizedSpace.getTable() );
}
else if ( content instanceof JaxbHbmNativeQueryScalarReturnType scalarReturnType ) {
implicitResultSetMappingBuilder.addReturn( scalarReturnType );
}
else if ( content instanceof JaxbHbmNativeQueryReturnType returnType ) {
implicitResultSetMappingBuilder.addReturn( returnType );
}
else if ( content instanceof JaxbHbmNativeQueryJoinReturnType jaxbHbmNativeQueryJoinReturnType ) {
implicitResultSetMappingBuilder.addReturn( jaxbHbmNativeQueryJoinReturnType );
}
else if ( content instanceof JaxbHbmNativeQueryCollectionLoadReturnType collectionLoadReturnType ) {
implicitResultSetMappingBuilder.addReturn( collectionLoadReturnType );
}
else {
throw new org.hibernate.boot.MappingException(
String.format(
Locale.ENGLISH,
"Encountered unexpected content type [%s] for named native query [%s] : [%s]",
content.getClass().getName(),
namedQueryBinding.getName(),
content
),
context.getOrigin()
);
}
return false;
}
}
| NamedQueryBinder |
java | alibaba__nacos | client/src/test/java/com/alibaba/nacos/client/config/impl/YmlChangeParserTest.java | {
"start": 1116,
"end": 3169
} | class ____ {
private final YmlChangeParser parser = new YmlChangeParser();
private final String type = "yaml";
@Test
void testType() {
assertTrue(parser.isResponsibleFor(type));
}
@Test
void testAddKey() throws IOException {
Map<String, ConfigChangeItem> map = parser.doParse("", "app:\n name: nacos", type);
assertNull(map.get("app.name").getOldValue());
assertEquals("nacos", map.get("app.name").getNewValue());
}
@Test
void testRemoveKey() throws IOException {
Map<String, ConfigChangeItem> map = parser.doParse("app:\n name: nacos", "", type);
assertEquals("nacos", map.get("app.name").getOldValue());
assertNull(map.get("app.name").getNewValue());
}
@Test
void testModifyKey() throws IOException {
Map<String, ConfigChangeItem> map = parser.doParse("app:\n name: rocketMQ", "app:\n name: nacos", type);
assertEquals("rocketMQ", map.get("app.name").getOldValue());
assertEquals("nacos", map.get("app.name").getNewValue());
}
@Test
void testComplexYaml() throws IOException {
/*
* map:
* key1: "string"
* key2:
* - item1
* - item2
* - item3
* key3: 123
*/
String s = "map:\n" + " key1: \"string\"\n" + " key2:\n" + " - item1\n" + " - item2\n" + " - item3\n"
+ " key3: 123 \n";
Map<String, ConfigChangeItem> map = parser.doParse(s, s, type);
assertEquals(0, map.size());
}
@Test
void testChangeInvalidKey() {
assertThrows(NacosRuntimeException.class, () -> {
parser.doParse("anykey:\n a",
"anykey: !!javax.script.ScriptEngineManager [\n" + " !!java.net.URLClassLoader [[\n"
+ " !!java.net.URL [\"http://[yourhost]:[port]/yaml-payload.jar\"]\n" + " ]]\n" + "]",
type);
});
}
}
| YmlChangeParserTest |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableWithLatestFromTest.java | {
"start": 1436,
"end": 26670
} | class ____ extends RxJavaTest {
static final BiFunction<Integer, Integer, Integer> COMBINER = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
return (t1 << 8) + t2;
}
};
static final BiFunction<Integer, Integer, Integer> COMBINER_ERROR = new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer t1, Integer t2) {
throw new TestException("Forced failure");
}
};
@Test
public void simple() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Subscriber<Integer> subscriber = TestHelper.mockSubscriber();
InOrder inOrder = inOrder(subscriber);
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
result.subscribe(subscriber);
source.onNext(1);
inOrder.verify(subscriber, never()).onNext(anyInt());
other.onNext(1);
inOrder.verify(subscriber, never()).onNext(anyInt());
source.onNext(2);
inOrder.verify(subscriber).onNext((2 << 8) + 1);
other.onNext(2);
inOrder.verify(subscriber, never()).onNext(anyInt());
other.onComplete();
inOrder.verify(subscriber, never()).onComplete();
source.onNext(3);
inOrder.verify(subscriber).onNext((3 << 8) + 2);
source.onComplete();
inOrder.verify(subscriber).onComplete();
verify(subscriber, never()).onError(any(Throwable.class));
}
@Test
public void emptySource() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onComplete();
ts.assertNoErrors();
ts.assertTerminated();
ts.assertNoValues();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void emptyOther() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
source.onNext(1);
source.onComplete();
ts.assertNoErrors();
ts.assertTerminated();
ts.assertNoValues();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void unsubscription() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
ts.cancel();
ts.assertValue((1 << 8) + 1);
ts.assertNoErrors();
ts.assertNotComplete();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void sourceThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
source.onError(new TestException());
ts.assertTerminated();
ts.assertValue((1 << 8) + 1);
ts.assertError(TestException.class);
ts.assertNotComplete();
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void otherThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
other.onError(new TestException());
ts.assertTerminated();
ts.assertValue((1 << 8) + 1);
ts.assertNotComplete();
ts.assertError(TestException.class);
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void functionThrows() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER_ERROR);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
result.subscribe(ts);
assertTrue(source.hasSubscribers());
assertTrue(other.hasSubscribers());
other.onNext(1);
source.onNext(1);
ts.assertTerminated();
ts.assertNotComplete();
ts.assertNoValues();
ts.assertError(TestException.class);
assertFalse(source.hasSubscribers());
assertFalse(other.hasSubscribers());
}
@Test
public void noDownstreamUnsubscribe() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>();
result.subscribe(ts);
source.onComplete();
assertFalse(ts.isCancelled());
}
@Test
public void backpressure() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>(0L);
result.subscribe(ts);
assertTrue("Other has no observers!", other.hasSubscribers());
ts.request(1);
source.onNext(1);
assertTrue("Other has no observers!", other.hasSubscribers());
ts.assertNoValues();
other.onNext(1);
source.onNext(2);
ts.assertValue((2 << 8) + 1);
ts.request(5);
source.onNext(3);
source.onNext(4);
source.onNext(5);
source.onNext(6);
source.onNext(7);
ts.assertValues(
(2 << 8) + 1, (3 << 8) + 1, (4 << 8) + 1, (5 << 8) + 1,
(6 << 8) + 1, (7 << 8) + 1
);
ts.cancel();
assertFalse("Other has observers!", other.hasSubscribers());
ts.assertNoErrors();
}
static final Function<Object[], String> toArray = new Function<Object[], String>() {
@Override
public String apply(Object[] args) {
return Arrays.toString(args);
}
};
@Test
public void manySources() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
PublishProcessor<String> pp3 = PublishProcessor.create();
PublishProcessor<String> main = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>();
main.withLatestFrom(new Flowable[] { pp1, pp2, pp3 }, toArray)
.subscribe(ts);
main.onNext("1");
ts.assertNoValues();
pp1.onNext("a");
ts.assertNoValues();
pp2.onNext("A");
ts.assertNoValues();
pp3.onNext("=");
ts.assertNoValues();
main.onNext("2");
ts.assertValues("[2, a, A, =]");
pp2.onNext("B");
ts.assertValues("[2, a, A, =]");
pp3.onComplete();
ts.assertValues("[2, a, A, =]");
pp1.onNext("b");
main.onNext("3");
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
main.onComplete();
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
assertFalse("ps3 has subscribers?", pp3.hasSubscribers());
}
@Test
public void manySourcesIterable() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
PublishProcessor<String> pp3 = PublishProcessor.create();
PublishProcessor<String> main = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>();
main.withLatestFrom(Arrays.<Flowable<?>>asList(pp1, pp2, pp3), toArray)
.subscribe(ts);
main.onNext("1");
ts.assertNoValues();
pp1.onNext("a");
ts.assertNoValues();
pp2.onNext("A");
ts.assertNoValues();
pp3.onNext("=");
ts.assertNoValues();
main.onNext("2");
ts.assertValues("[2, a, A, =]");
pp2.onNext("B");
ts.assertValues("[2, a, A, =]");
pp3.onComplete();
ts.assertValues("[2, a, A, =]");
pp1.onNext("b");
main.onNext("3");
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
main.onComplete();
ts.assertValues("[2, a, A, =]", "[3, b, B, =]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
assertFalse("ps3 has subscribers?", pp3.hasSubscribers());
}
@Test
public void manySourcesIterableSweep() {
for (String val : new String[] { "1" /*, null*/ }) {
int n = 35;
for (int i = 0; i < n; i++) {
List<Flowable<?>> sources = new ArrayList<>();
List<String> expected = new ArrayList<>();
expected.add(val);
for (int j = 0; j < i; j++) {
sources.add(Flowable.just(val));
expected.add(String.valueOf(val));
}
TestSubscriber<String> ts = new TestSubscriber<>();
PublishProcessor<String> main = PublishProcessor.create();
main.withLatestFrom(sources, toArray).subscribe(ts);
ts.assertNoValues();
main.onNext(val);
main.onComplete();
ts.assertValue(expected.toString());
ts.assertNoErrors();
ts.assertComplete();
}
}
}
@Test
public void backpressureNoSignal() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 10).withLatestFrom(new Flowable<?>[] { pp1, pp2 }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
}
@Test
public void backpressureWithSignal() {
PublishProcessor<String> pp1 = PublishProcessor.create();
PublishProcessor<String> pp2 = PublishProcessor.create();
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(new Flowable<?>[] { pp1, pp2 }, toArray)
.subscribe(ts);
ts.assertNoValues();
pp1.onNext("1");
pp2.onNext("1");
ts.request(1);
ts.assertValue("[1, 1, 1]");
ts.request(1);
ts.assertValues("[1, 1, 1]", "[2, 1, 1]");
ts.request(1);
ts.assertValues("[1, 1, 1]", "[2, 1, 1]", "[3, 1, 1]");
ts.assertNoErrors();
ts.assertComplete();
assertFalse("ps1 has subscribers?", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?", pp2.hasSubscribers());
}
@Test
public void withEmpty() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.empty() }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void withError() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.range(1, 3).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.error(new TestException()) }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotComplete();
}
@Test
public void withMainError() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flowable.error(new TestException()).withLatestFrom(
new Flowable<?>[] { Flowable.just(1), Flowable.just(1) }, toArray)
.subscribe(ts);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotComplete();
}
@Test
public void with2Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, new Function3<Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c) {
return Arrays.asList(a, b, c);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void with3Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, just, new Function4<Integer, Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c, Integer d) {
return Arrays.asList(a, b, c, d);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void with4Others() {
Flowable<Integer> just = Flowable.just(1);
TestSubscriber<List<Integer>> ts = new TestSubscriber<>();
just.withLatestFrom(just, just, just, just, new Function5<Integer, Integer, Integer, Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b, Integer c, Integer d, Integer e) {
return Arrays.asList(a, b, c, d, e);
}
})
.subscribe(ts);
ts.assertValue(Arrays.asList(1, 1, 1, 1, 1));
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.just(1).withLatestFrom(Flowable.just(2), new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return a;
}
}));
TestHelper.checkDisposed(Flowable.just(1).withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
return a;
}
}));
}
@Test
public void manyIteratorThrows() {
Flowable.just(1)
.withLatestFrom(new CrashingMappedIterable<>(1, 100, 100, new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return Flowable.just(2);
}
}), new Function<Object[], Object>() {
@Override
public Object apply(Object[] a) throws Exception {
return a;
}
})
.to(TestHelper.testConsumer())
.assertFailureAndMessage(TestException.class, "iterator()");
}
@Test
public void manyCombinerThrows() {
Flowable.just(1).withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void manyErrors() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onError(new TestException("First"));
subscriber.onNext(1);
subscriber.onError(new TestException("Second"));
subscriber.onComplete();
}
}.withLatestFrom(Flowable.just(2), Flowable.just(3), new Function3<Integer, Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b, Integer c) throws Exception {
return a;
}
})
.to(TestHelper.testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void otherErrors() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.just(1)
.withLatestFrom(new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onError(new TestException("First"));
s.onError(new TestException("Second"));
}
}, new BiFunction<Integer, Integer, Integer>() {
@Override
public Integer apply(Integer a, Integer b) throws Exception {
return a + b;
}
})
.to(TestHelper.<Integer>testConsumer())
.assertFailureAndMessage(TestException.class, "First");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second");
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void combineToNull1() {
Flowable.just(1)
.withLatestFrom(Flowable.just(2), new BiFunction<Integer, Integer, Object>() {
@Override
public Object apply(Integer a, Integer b) throws Exception {
return null;
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void combineToNull2() {
Flowable.just(1)
.withLatestFrom(Arrays.asList(Flowable.just(2), Flowable.just(3)), new Function<Object[], Object>() {
@Override
public Object apply(Object[] o) throws Exception {
return null;
}
})
.test()
.assertFailure(NullPointerException.class);
}
@Test
public void zeroOtherCombinerReturnsNull() {
Flowable.just(1)
.withLatestFrom(new Flowable[0], Functions.justFunction(null))
.to(TestHelper.testConsumer())
.assertFailureAndMessage(NullPointerException.class, "The combiner returned a null value");
}
@Test
public void singleRequestNotForgottenWhenNoData() {
PublishProcessor<Integer> source = PublishProcessor.create();
PublishProcessor<Integer> other = PublishProcessor.create();
Flowable<Integer> result = source.withLatestFrom(other, COMBINER);
TestSubscriber<Integer> ts = new TestSubscriber<>(0L);
result.subscribe(ts);
ts.request(1);
source.onNext(1);
ts.assertNoValues();
other.onNext(1);
ts.assertNoValues();
source.onNext(2);
ts.assertValue((2 << 8) + 1);
}
@Test
public void coldSourceConsumedWithoutOther() {
Flowable.range(1, 10).withLatestFrom(Flowable.never(),
new BiFunction<Integer, Object, Object>() {
@Override
public Object apply(Integer a, Object b) throws Exception {
return a;
}
})
.test(1)
.assertResult();
}
@Test
public void coldSourceConsumedWithoutManyOthers() {
Flowable.range(1, 10).withLatestFrom(Flowable.never(), Flowable.never(), Flowable.never(),
new Function4<Integer, Object, Object, Object, Object>() {
@Override
public Object apply(Integer a, Object b, Object c, Object d) throws Exception {
return a;
}
})
.test(1)
.assertResult();
}
@Test
public void otherOnSubscribeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp0 = PublishProcessor.create();
final PublishProcessor<Integer> pp1 = PublishProcessor.create();
final PublishProcessor<Integer> pp2 = PublishProcessor.create();
final PublishProcessor<Integer> pp3 = PublishProcessor.create();
final Flowable<Object> source = pp0.withLatestFrom(pp1, pp2, pp3, new Function4<Object, Integer, Integer, Integer, Object>() {
@Override
public Object apply(Object a, Integer b, Integer c, Integer d)
throws Exception {
return a;
}
});
final TestSubscriber<Object> ts = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
source.subscribe(ts);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
ts.assertEmpty();
assertFalse(pp0.hasSubscribers());
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
assertFalse(pp3.hasSubscribers());
}
}
@Test
public void otherCompleteRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp0 = PublishProcessor.create();
final PublishProcessor<Integer> pp1 = PublishProcessor.create();
final PublishProcessor<Integer> pp2 = PublishProcessor.create();
final PublishProcessor<Integer> pp3 = PublishProcessor.create();
final Flowable<Object> source = pp0.withLatestFrom(pp1, pp2, pp3, new Function4<Object, Integer, Integer, Integer, Object>() {
@Override
public Object apply(Object a, Integer b, Integer c, Integer d)
throws Exception {
return a;
}
});
final TestSubscriber<Object> ts = new TestSubscriber<>();
Runnable r1 = new Runnable() {
@Override
public void run() {
source.subscribe(ts);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
pp1.onComplete();
}
};
TestHelper.race(r1, r2);
ts.assertResult();
assertFalse(pp0.hasSubscribers());
assertFalse(pp1.hasSubscribers());
assertFalse(pp2.hasSubscribers());
assertFalse(pp3.hasSubscribers());
}
}
}
| FlowableWithLatestFromTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncRouteWithErrorTest.java | {
"start": 1253,
"end": 4437
} | class ____ extends ContextTestSupport {
private static String route = "";
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
route = "";
}
@Test
public void testAsyncRouteWithError() throws Exception {
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:result").expectedMessageCount(0);
// send a request reply to the direct start endpoint
try {
template.requestBody("direct:start", "Hello");
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
// expected an execution exception
assertEquals("Damn forced by unit test", e.getCause().getMessage());
}
// we should run before the async processor that sets B
route += "A";
assertMockEndpointsSatisfied();
assertEquals("BA", route);
}
@Test
public void testAsyncRouteWithTypeConverted() throws Exception {
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:result").expectedMessageCount(0);
// send a request reply to the direct start endpoint, but will use
// future type converter that will wait for the response
try {
template.requestBody("direct:start", "Hello", String.class);
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
// expected an execution exception
assertEquals("Damn forced by unit test", e.getCause().getMessage());
}
// we should wait for the async response as we ask for the result as a
// String body
route += "A";
assertMockEndpointsSatisfied();
assertEquals("BA", route);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// we start this route async
from("direct:start")
// we play a bit with the message
.transform(body().append(" World"))
// now turn the route into async from this point forward
// the caller will have a Future<Exchange> returned as
// response in OUT
// to be used to grap the async response when he fell like
// it
.threads()
// from this point forward this is the async route doing its
// work
// so we do a bit of delay to simulate heavy work that takes
// time
.to("mock:foo").delay(100)
// and we also work with the message so we can prepare a
// response
.process(new MyProcessor())
// and we use mocks for unit testing
.to("mock:result");
}
};
}
public static | AsyncRouteWithErrorTest |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/support/BeanFactoryGenericsTests.java | {
"start": 38518,
"end": 38786
} | class ____ {
public CollectionDependentBean(NamedUrlList list, NamedUrlSet set, NamedUrlMap map) {
assertThat(list).hasSize(1);
assertThat(set).hasSize(1);
assertThat(map).hasSize(1);
}
}
@SuppressWarnings("serial")
public static | CollectionDependentBean |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledForJreRange.java | {
"start": 4729,
"end": 5264
} | enum ____ does not exist for a particular JRE
* version, you can specify the maximum version via
* {@link #maxVersion() maxVersion} instead.
*
* <p>Defaults to {@link JRE#UNDEFINED UNDEFINED}, which will be interpreted
* as {@link JRE#OTHER OTHER} if the {@link #maxVersion() maxVersion} is not
* set.
*
* @see JRE
* @see #maxVersion()
*/
JRE max() default JRE.UNDEFINED;
/**
* Java Runtime Environment version which is used as the lower boundary for
* the version range that determines if the annotated | constant |
java | quarkusio__quarkus | extensions/hibernate-search-standalone-elasticsearch/deployment/src/main/java/io/quarkus/hibernate/search/standalone/elasticsearch/deployment/HibernateSearchStandaloneProcessor.java | {
"start": 3581,
"end": 15145
} | class ____ {
private static final Logger LOG = Logger.getLogger(HibernateSearchStandaloneProcessor.class);
@BuildStep
void registerAnnotations(BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<BeanDefiningAnnotationBuildItem> beanDefiningAnnotations) {
// add the @SearchExtension class
// otherwise it won't be registered as qualifier
additionalBeans.produce(AdditionalBeanBuildItem.builder()
.addBeanClasses(HibernateSearchTypes.SEARCH_EXTENSION.toString())
.build());
// Register the default scope for @SearchExtension and make such beans unremovable by default
beanDefiningAnnotations
.produce(new BeanDefiningAnnotationBuildItem(HibernateSearchTypes.SEARCH_EXTENSION, DotNames.APPLICATION_SCOPED,
false));
}
@BuildStep
public void configure(CombinedIndexBuildItem combinedIndexBuildItem,
BuildProducer<HibernateSearchStandaloneEnabledBuildItem> enabled) {
IndexView index = combinedIndexBuildItem.getIndex();
Collection<AnnotationInstance> indexedAnnotations = index.getAnnotations(INDEXED);
if (indexedAnnotations.isEmpty()) {
// we don't have any indexed entity, we can disable Hibernate Search
return;
}
Set<String> backendNamesForIndexedEntities = new LinkedHashSet<>();
for (AnnotationInstance indexedAnnotation : indexedAnnotations) {
AnnotationValue backendNameValue = indexedAnnotation.value("backend");
String backendName = backendNameValue == null ? null : backendNameValue.asString();
backendNamesForIndexedEntities.add(backendName);
}
Map<String, Set<String>> backendAndIndexNamesForSearchExtensions = collectBackendAndIndexNamesForSearchExtensions(
index);
Set<String> rootAnnotationMappedClassNames = collectRootAnnotationMappedClassNames(index);
var mapperContext = new HibernateSearchStandaloneElasticsearchMapperContext(backendNamesForIndexedEntities,
backendAndIndexNamesForSearchExtensions);
enabled.produce(new HibernateSearchStandaloneEnabledBuildItem(mapperContext, rootAnnotationMappedClassNames));
}
@BuildStep
void enableBackend(Optional<HibernateSearchStandaloneEnabledBuildItem> enabled,
HibernateSearchStandaloneBuildTimeConfig buildTimeConfig,
BuildProducer<HibernateSearchBackendElasticsearchEnabledBuildItem> elasticsearchEnabled) {
if (!enabled.isPresent()) {
// No boot
return;
}
elasticsearchEnabled.produce(new HibernateSearchBackendElasticsearchEnabledBuildItem(enabled.get().mapperContext,
buildTimeConfig.backends()));
}
private static Map<String, Set<String>> collectBackendAndIndexNamesForSearchExtensions(
IndexView index) {
Map<String, Set<String>> result = new LinkedHashMap<>();
for (AnnotationInstance annotation : index.getAnnotations(HibernateSearchTypes.SEARCH_EXTENSION)) {
var backendName = annotation.value("backend");
var indexName = annotation.value("index");
Set<String> indexNames = result
.computeIfAbsent(backendName == null ? null : backendName.asString(), ignored -> new LinkedHashSet<>());
if (indexName != null) {
indexNames.add(indexName.asString());
}
}
return result;
}
private static Set<String> collectRootAnnotationMappedClassNames(IndexView index) {
// Look for classes annotated with annotations meta-annotated with @RootMapping:
// those classes will have their annotations processed.
// Built-in annotations from Hibernate Search must be added explicitly,
// because Hibernate Search may not be part of the index.
Set<DotName> rootMappingAnnotationNames = new LinkedHashSet<>(BUILT_IN_ROOT_MAPPING_ANNOTATIONS);
// We'll also consider @Indexed as a "root mapping" annotation,
// even if that's not true in Hibernate Search,
// because it's more convenient with the Standalone mapper.
rootMappingAnnotationNames.add(INDEXED);
// Users can theoretically declare their own root mapping annotations
// (replacements for @ProjectionConstructor, for example),
// so we need to consider those as well.
for (AnnotationInstance rootMappingAnnotationInstance : index.getAnnotations(ROOT_MAPPING)) {
rootMappingAnnotationNames.add(rootMappingAnnotationInstance.target().asClass().name());
}
// We'll collect all classes annotated with "root mapping" annotations
// anywhere (type level, constructor, ...)
Set<String> rootAnnotationMappedClassNames = new LinkedHashSet<>();
for (DotName rootMappingAnnotationName : rootMappingAnnotationNames) {
for (AnnotationInstance annotation : index.getAnnotations(rootMappingAnnotationName)) {
rootAnnotationMappedClassNames.add(JandexUtil.getEnclosingClass(annotation).name().toString());
}
}
return rootAnnotationMappedClassNames;
}
@Record(ExecutionTime.RUNTIME_INIT)
@BuildStep
void defineSearchMappingBean(Optional<HibernateSearchStandaloneEnabledBuildItem> enabled,
HibernateSearchStandaloneRecorder recorder,
BuildProducer<SyntheticBeanBuildItem> syntheticBeanBuildItemBuildProducer) {
if (!enabled.isPresent()) {
// No boot
return;
}
syntheticBeanBuildItemBuildProducer.produce(SyntheticBeanBuildItem
.configure(SearchMapping.class)
// NOTE: this is using ApplicationScoped and not Singleton, by design, in order to be mockable
// See https://github.com/quarkusio/quarkus/issues/16437
.scope(ApplicationScoped.class)
.unremovable()
.addQualifier(Default.class)
.setRuntimeInit()
.createWith(recorder.createSearchMappingFunction(enabled.get().mapperContext))
.destroyer(BeanDestroyer.AutoCloseableDestroyer.class)
// This startup() call is necessary in order to trigger Arc's usage checks (fail startup if bean injected when inactive).
.startup()
.checkActive(recorder.checkActiveSupplier())
.done());
}
@BuildStep
@Record(ExecutionTime.STATIC_INIT)
@Consume(BeanContainerBuildItem.class) // Pre-boot needs access to the CDI container
public void preBoot(Optional<HibernateSearchStandaloneEnabledBuildItem> enabled,
RecorderContext recorderContext,
HibernateSearchStandaloneRecorder recorder) {
if (enabled.isEmpty()) {
// No pre-boot
return;
}
// Make it possible to record the settings as bytecode:
recorderContext.registerSubstitution(ElasticsearchVersion.class,
String.class, ElasticsearchVersionSubstitution.class);
recorder.preBoot(enabled.get().mapperContext, enabled.get().getRootAnnotationMappedClassNames());
}
@BuildStep
@Record(ExecutionTime.RUNTIME_INIT)
@Consume(BeanContainerBuildItem.class)
void boot(Optional<HibernateSearchStandaloneEnabledBuildItem> enabled,
HibernateSearchStandaloneRecorder recorder,
BuildProducer<ServiceStartBuildItem> serviceStart) {
if (enabled.isEmpty()) {
// No boot
return;
}
serviceStart.produce(new ServiceStartBuildItem("Hibernate Search Standalone"));
}
@BuildStep(onlyIf = IsDevServicesSupportedByLaunchMode.class)
void devServices(Optional<HibernateSearchStandaloneEnabledBuildItem> enabled,
HibernateSearchStandaloneBuildTimeConfig buildTimeConfig,
BuildProducer<DevservicesElasticsearchBuildItem> buildItemBuildProducer,
BuildProducer<DevServicesAdditionalConfigBuildItem> devServicesAdditionalConfigProducer) {
if (enabled.isEmpty()) {
// No dev services necessary
return;
}
// Currently we only start dev-services for the default backend
// See https://github.com/quarkusio/quarkus/issues/24011
var defaultBackendConfig = buildTimeConfig.backends().get(null);
if (defaultBackendConfig == null || !defaultBackendConfig.version().isPresent()) {
// If the version is not set, the default backend is not in use.
return;
}
Optional<Boolean> active = ConfigUtils.getFirstOptionalValue(
mapperPropertyKeys("active"), Boolean.class);
if (active.isPresent() && !active.get()) {
// If Hibernate Search is deactivated, we don't want to trigger dev services.
return;
}
ElasticsearchVersion version = defaultBackendConfig.version().get();
String hostsPropertyKey = backendPropertyKey(null, null,
"hosts");
buildItemBuildProducer.produce(new DevservicesElasticsearchBuildItem(hostsPropertyKey,
version.versionString(),
Distribution.valueOf(version.distribution().toString().toUpperCase())));
// Force schema generation when using dev services
List<String> propertyKeysIndicatingHostsConfigured = defaultBackendPropertyKeys("hosts");
if (!ConfigUtils.isAnyPropertyPresent(propertyKeysIndicatingHostsConfigured)) {
String schemaManagementStrategyPropertyKey = mapperPropertyKey("schema-management.strategy");
if (!ConfigUtils.isPropertyPresent(schemaManagementStrategyPropertyKey)) {
devServicesAdditionalConfigProducer
.produce(new DevServicesAdditionalConfigBuildItem(devServicesConfig -> {
if (propertyKeysIndicatingHostsConfigured.stream()
.anyMatch(devServicesConfig::containsKey)) {
String forcedValue = "drop-and-create-and-drop";
LOG.infof("Setting %s=%s to initialize Dev Services managed Elasticsearch server",
schemaManagementStrategyPropertyKey, forcedValue);
return Map.of(schemaManagementStrategyPropertyKey, forcedValue);
} else {
return Map.of();
}
}));
}
}
}
@Record(ExecutionTime.RUNTIME_INIT)
@BuildStep(onlyIf = HibernateSearchStandaloneManagementEnabled.class)
void createManagementRoutes(BuildProducer<RouteBuildItem> routes,
HibernateSearchStandaloneRecorder recorder,
HibernateSearchStandaloneBuildTimeConfig hibernateSearchStandaloneBuildTimeConfig) {
String managementRootPath = hibernateSearchStandaloneBuildTimeConfig.management().rootPath();
routes.produce(RouteBuildItem.newManagementRoute(
managementRootPath + (managementRootPath.endsWith("/") ? "" : "/") + "reindex")
.withRoutePathConfigKey("quarkus.hibernate-search-standalone.management.root-path")
.withRequestHandler(recorder.managementHandler())
.displayOnNotFoundPage()
.build());
}
}
| HibernateSearchStandaloneProcessor |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/JsonPointer.java | {
"start": 31235,
"end": 32614
} | class ____ {
public final PointerSegment next;
public final String property;
public final int index;
// Offset within external buffer, updated when constructing
public int pathOffset;
// And we actually need 2-way traversal, it turns out so:
public PointerSegment prev;
public PointerSegment(PointerSegment next, String pn, int ix) {
this.next = next;
property = pn;
index = ix;
// Ok not the cleanest thing but...
if (next != null) {
next.prev = this;
}
}
}
/*
/**********************************************************
/* Support for JDK serialization (2.14+)
/**********************************************************
*/
// Since 2.14: needed for efficient JDK serializability
private Object writeReplace() {
// 11-Oct-2022, tatu: very important, must serialize just contents!
return new Serialization(toString());
}
/**
* This must only exist to allow both final properties and implementation of
* Externalizable/Serializable for JsonPointer.
* Note that here we do not store offset but simply use (and expect use)
* full path, from which we need to decode actual structure.
*
* @since 2.14
*/
static | PointerSegment |
java | google__error-prone | test_helpers/src/test/java/com/google/errorprone/CompilationTestHelperTest.java | {
"start": 15042,
"end": 15274
} | class ____ {}
@Test
public void withClassPath_success() {
compilationHelper
.addSourceLines(
"Test.java",
"import " + WithClassPath.class.getCanonicalName() + ";",
" | WithClassPathSuper |
java | grpc__grpc-java | binder/src/testFixtures/java/io/grpc/binder/PeerUidTestHelper.java | {
"start": 278,
"end": 1585
} | class ____ {
/** The UID of the calling package is set with the value of this key. */
public static final Metadata.Key<Integer> UID_KEY =
Metadata.Key.of("binder-remote-uid-for-unit-testing", PeerUidTestMarshaller.INSTANCE);
/**
* Creates an interceptor that associates the {@link PeerUids#REMOTE_PEER} key in the request
* {@link Context} with a UID provided by the client in the {@link #UID_KEY} request header, if
* present.
*
* <p>The returned interceptor works with any gRPC transport but is meant for in-process unit
* testing of gRPC/binder services that depend on {@link PeerUids}.
*/
public static ServerInterceptor newTestPeerIdentifyingServerInterceptor() {
return new ServerInterceptor() {
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) {
if (headers.containsKey(UID_KEY)) {
Context context =
Context.current().withValue(PeerUids.REMOTE_PEER, new PeerUid(headers.get(UID_KEY)));
return Contexts.interceptCall(context, call, headers, next);
}
return next.startCall(call, headers);
}
};
}
private PeerUidTestHelper() {}
private static | PeerUidTestHelper |
java | netty__netty | buffer/src/main/java/io/netty/buffer/AdaptivePoolingAllocator.java | {
"start": 22126,
"end": 22233
} | interface ____ {
ChunkController create(MagazineGroup group);
}
private | ChunkControllerFactory |
java | google__gson | gson/src/main/java/com/google/gson/stream/JsonReader.java | {
"start": 7730,
"end": 61429
} | class ____ implements Closeable {
private static final long MIN_INCOMPLETE_INTEGER = Long.MIN_VALUE / 10;
private static final int PEEKED_NONE = 0;
private static final int PEEKED_BEGIN_OBJECT = 1;
private static final int PEEKED_END_OBJECT = 2;
private static final int PEEKED_BEGIN_ARRAY = 3;
private static final int PEEKED_END_ARRAY = 4;
private static final int PEEKED_TRUE = 5;
private static final int PEEKED_FALSE = 6;
private static final int PEEKED_NULL = 7;
private static final int PEEKED_SINGLE_QUOTED = 8;
private static final int PEEKED_DOUBLE_QUOTED = 9;
private static final int PEEKED_UNQUOTED = 10;
/** When this is returned, the string value is stored in peekedString. */
private static final int PEEKED_BUFFERED = 11;
private static final int PEEKED_SINGLE_QUOTED_NAME = 12;
private static final int PEEKED_DOUBLE_QUOTED_NAME = 13;
private static final int PEEKED_UNQUOTED_NAME = 14;
/** When this is returned, the integer value is stored in peekedLong. */
private static final int PEEKED_LONG = 15;
private static final int PEEKED_NUMBER = 16;
private static final int PEEKED_EOF = 17;
/* State machine when parsing numbers */
private static final int NUMBER_CHAR_NONE = 0;
private static final int NUMBER_CHAR_SIGN = 1;
private static final int NUMBER_CHAR_DIGIT = 2;
private static final int NUMBER_CHAR_DECIMAL = 3;
private static final int NUMBER_CHAR_FRACTION_DIGIT = 4;
private static final int NUMBER_CHAR_EXP_E = 5;
private static final int NUMBER_CHAR_EXP_SIGN = 6;
private static final int NUMBER_CHAR_EXP_DIGIT = 7;
/** The input JSON. */
private final Reader in;
private Strictness strictness = Strictness.LEGACY_STRICT;
// Default nesting limit is based on
// https://github.com/square/moshi/blob/parent-1.15.0/moshi/src/main/java/com/squareup/moshi/JsonReader.java#L228-L230
static final int DEFAULT_NESTING_LIMIT = 255;
private int nestingLimit = DEFAULT_NESTING_LIMIT;
static final int BUFFER_SIZE = 1024;
/**
* Use a manual buffer to easily read and unread upcoming characters, and also so we can create
* strings without an intermediate StringBuilder. We decode literals directly out of this buffer,
* so it must be at least as long as the longest token that can be reported as a number.
*/
private final char[] buffer = new char[BUFFER_SIZE];
private int pos = 0;
private int limit = 0;
private int lineNumber = 0;
private int lineStart = 0;
int peeked = PEEKED_NONE;
/**
* A peeked value that was composed entirely of digits with an optional leading dash. Positive
* values may not have a leading 0.
*/
private long peekedLong;
/**
* The number of characters in a peeked number literal. Increment 'pos' by this after reading a
* number.
*/
private int peekedNumberLength;
/**
* A peeked string that should be parsed on the next double, long or string. This is populated
* before a numeric value is parsed and used if that parsing fails.
*/
private String peekedString;
/** The nesting stack. Using a manual array rather than an ArrayList saves 20%. */
private int[] stack = new int[32];
private int stackSize = 0;
{
stack[stackSize++] = JsonScope.EMPTY_DOCUMENT;
}
/*
* The path members. It corresponds directly to stack: At indices where the
* stack contains an object (EMPTY_OBJECT, DANGLING_NAME or NONEMPTY_OBJECT),
* pathNames contains the name at this scope. Where it contains an array
* (EMPTY_ARRAY, NONEMPTY_ARRAY) pathIndices contains the current index in
* that array. Otherwise the value is undefined, and we take advantage of that
* by incrementing pathIndices when doing so isn't useful.
*/
private String[] pathNames = new String[32];
private int[] pathIndices = new int[32];
/** Creates a new instance that reads a JSON-encoded stream from {@code in}. */
public JsonReader(Reader in) {
this.in = Objects.requireNonNull(in, "in == null");
}
/**
* Sets the strictness of this reader.
*
* @deprecated Please use {@link #setStrictness(Strictness)} instead. {@code
* JsonReader.setLenient(true)} should be replaced by {@code
* JsonReader.setStrictness(Strictness.LENIENT)} and {@code JsonReader.setLenient(false)}
* should be replaced by {@code JsonReader.setStrictness(Strictness.LEGACY_STRICT)}.<br>
* However, if you used {@code setLenient(false)} before, you might prefer {@link
* Strictness#STRICT} now instead.
* @param lenient whether this reader should be lenient. If true, the strictness is set to {@link
* Strictness#LENIENT}. If false, the strictness is set to {@link Strictness#LEGACY_STRICT}.
* @see #setStrictness(Strictness)
*/
@Deprecated
// Don't specify @InlineMe, so caller with `setLenient(false)` becomes aware of new
// Strictness.STRICT
@SuppressWarnings("InlineMeSuggester")
public final void setLenient(boolean lenient) {
setStrictness(lenient ? Strictness.LENIENT : Strictness.LEGACY_STRICT);
}
/**
* Returns true if the {@link Strictness} of this reader is equal to {@link Strictness#LENIENT}.
*
* @see #getStrictness()
*/
public final boolean isLenient() {
return strictness == Strictness.LENIENT;
}
/**
* Configures how liberal this parser is in what it accepts.
*
* <p>In {@linkplain Strictness#STRICT strict} mode, the parser only accepts JSON in accordance
* with <a href="https://www.ietf.org/rfc/rfc8259.txt">RFC 8259</a>. In {@linkplain
* Strictness#LEGACY_STRICT legacy strict} mode (the default), only JSON in accordance with the
* RFC 8259 is accepted, with a few exceptions denoted below for backwards compatibility reasons.
* In {@linkplain Strictness#LENIENT lenient} mode, all sort of non-spec compliant JSON is
* accepted (see below).
*
* <dl>
* <dt>{@link Strictness#STRICT}
* <dd>In strict mode, only input compliant with RFC 8259 is accepted.
* <dt>{@link Strictness#LEGACY_STRICT}
* <dd>In legacy strict mode, the following departures from RFC 8259 are accepted:
* <ul>
* <li>JsonReader allows the literals {@code true}, {@code false} and {@code null} to have
* any capitalization, for example {@code fAlSe} or {@code NULL}
* <li>JsonReader supports the escape sequence {@code \'}, representing a {@code '}
* (single-quote)
* <li>JsonReader supports the escape sequence <code>\<i>LF</i></code> (with {@code LF}
* being the Unicode character {@code U+000A}), resulting in a {@code LF} within the
* read JSON string
* <li>JsonReader allows unescaped control characters ({@code U+0000} through {@code
* U+001F})
* </ul>
* <dt>{@link Strictness#LENIENT}
* <dd>In lenient mode, all input that is accepted in legacy strict mode is accepted in addition
* to the following departures from RFC 8259:
* <ul>
* <li>Streams that start with the <a href="#nonexecuteprefix">non-execute prefix</a>,
* {@code ")]}'\n"}
* <li>Streams that include multiple top-level values. With legacy strict or strict
* parsing, each stream must contain exactly one top-level value.
* <li>Numbers may be {@link Double#isNaN() NaNs} or {@link Double#isInfinite()
* infinities} represented by {@code NaN} and {@code (-)Infinity} respectively.
* <li>End of line comments starting with {@code //} or {@code #} and ending with a
* newline character.
* <li>C-style comments starting with {@code /*} and ending with {@code *}{@code /}. Such
* comments may not be nested.
* <li>Names that are unquoted or {@code 'single quoted'}.
* <li>Strings that are unquoted or {@code 'single quoted'}.
* <li>Array elements separated by {@code ;} instead of {@code ,}.
* <li>Unnecessary array separators. These are interpreted as if null was the omitted
* value.
* <li>Names and values separated by {@code =} or {@code =>} instead of {@code :}.
* <li>Name/value pairs separated by {@code ;} instead of {@code ,}.
* </ul>
* </dl>
*
* @param strictness the new strictness value of this reader. May not be {@code null}.
* @see #getStrictness()
* @since 2.11.0
*/
public final void setStrictness(Strictness strictness) {
Objects.requireNonNull(strictness);
this.strictness = strictness;
}
/**
* Returns the {@linkplain Strictness strictness} of this reader.
*
* @see #setStrictness(Strictness)
* @since 2.11.0
*/
public final Strictness getStrictness() {
return strictness;
}
/**
* Sets the nesting limit of this reader.
*
* <p>The nesting limit defines how many JSON arrays or objects may be open at the same time. For
* example a nesting limit of 0 means no arrays or objects may be opened at all, a nesting limit
* of 1 means one array or object may be open at the same time, and so on. So a nesting limit of 3
* allows reading the JSON data <code>[{"a":[true]}]</code>, but for a nesting limit of 2 it would
* fail at the inner {@code [true]}.
*
* <p>The nesting limit can help to protect against a {@link StackOverflowError} when recursive
* {@link com.google.gson.TypeAdapter} implementations process deeply nested JSON data.
*
* <p>The default nesting limit is {@value #DEFAULT_NESTING_LIMIT}.
*
* @throws IllegalArgumentException if the nesting limit is negative.
* @since 2.12.0
* @see #getNestingLimit()
*/
public final void setNestingLimit(int limit) {
if (limit < 0) {
throw new IllegalArgumentException("Invalid nesting limit: " + limit);
}
this.nestingLimit = limit;
}
/**
* Returns the nesting limit of this reader.
*
* @since 2.12.0
* @see #setNestingLimit(int)
*/
public final int getNestingLimit() {
return nestingLimit;
}
/**
* Consumes the next token from the JSON stream and asserts that it is the beginning of a new
* array.
*
* @throws IllegalStateException if the next token is not the beginning of an array.
*/
public void beginArray() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_BEGIN_ARRAY) {
push(JsonScope.EMPTY_ARRAY);
pathIndices[stackSize - 1] = 0;
peeked = PEEKED_NONE;
} else {
throw unexpectedTokenError("BEGIN_ARRAY");
}
}
/**
* Consumes the next token from the JSON stream and asserts that it is the end of the current
* array.
*
* @throws IllegalStateException if the next token is not the end of an array.
*/
public void endArray() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_END_ARRAY) {
stackSize--;
pathIndices[stackSize - 1]++;
peeked = PEEKED_NONE;
} else {
throw unexpectedTokenError("END_ARRAY");
}
}
/**
* Consumes the next token from the JSON stream and asserts that it is the beginning of a new
* object.
*
* @throws IllegalStateException if the next token is not the beginning of an object.
*/
public void beginObject() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_BEGIN_OBJECT) {
push(JsonScope.EMPTY_OBJECT);
peeked = PEEKED_NONE;
} else {
throw unexpectedTokenError("BEGIN_OBJECT");
}
}
/**
* Consumes the next token from the JSON stream and asserts that it is the end of the current
* object.
*
* @throws IllegalStateException if the next token is not the end of an object.
*/
public void endObject() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_END_OBJECT) {
stackSize--;
pathNames[stackSize] = null; // Free the last path name so that it can be garbage collected!
pathIndices[stackSize - 1]++;
peeked = PEEKED_NONE;
} else {
throw unexpectedTokenError("END_OBJECT");
}
}
/** Returns true if the current array or object has another element. */
public boolean hasNext() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
return p != PEEKED_END_OBJECT && p != PEEKED_END_ARRAY && p != PEEKED_EOF;
}
/** Returns the type of the next token without consuming it. */
public JsonToken peek() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
switch (p) {
case PEEKED_BEGIN_OBJECT:
return JsonToken.BEGIN_OBJECT;
case PEEKED_END_OBJECT:
return JsonToken.END_OBJECT;
case PEEKED_BEGIN_ARRAY:
return JsonToken.BEGIN_ARRAY;
case PEEKED_END_ARRAY:
return JsonToken.END_ARRAY;
case PEEKED_SINGLE_QUOTED_NAME:
case PEEKED_DOUBLE_QUOTED_NAME:
case PEEKED_UNQUOTED_NAME:
return JsonToken.NAME;
case PEEKED_TRUE:
case PEEKED_FALSE:
return JsonToken.BOOLEAN;
case PEEKED_NULL:
return JsonToken.NULL;
case PEEKED_SINGLE_QUOTED:
case PEEKED_DOUBLE_QUOTED:
case PEEKED_UNQUOTED:
case PEEKED_BUFFERED:
return JsonToken.STRING;
case PEEKED_LONG:
case PEEKED_NUMBER:
return JsonToken.NUMBER;
case PEEKED_EOF:
return JsonToken.END_DOCUMENT;
default:
throw new AssertionError();
}
}
@SuppressWarnings("fallthrough")
int doPeek() throws IOException {
int peekStack = stack[stackSize - 1];
if (peekStack == JsonScope.EMPTY_ARRAY) {
stack[stackSize - 1] = JsonScope.NONEMPTY_ARRAY;
} else if (peekStack == JsonScope.NONEMPTY_ARRAY) {
// Look for a comma before the next element.
int c = nextNonWhitespace(true);
switch (c) {
case ']':
peeked = PEEKED_END_ARRAY;
return peeked;
case ';':
checkLenient(); // fall-through
case ',':
break;
default:
throw syntaxError("Unterminated array");
}
} else if (peekStack == JsonScope.EMPTY_OBJECT || peekStack == JsonScope.NONEMPTY_OBJECT) {
stack[stackSize - 1] = JsonScope.DANGLING_NAME;
// Look for a comma before the next element.
if (peekStack == JsonScope.NONEMPTY_OBJECT) {
int c = nextNonWhitespace(true);
switch (c) {
case '}':
peeked = PEEKED_END_OBJECT;
return peeked;
case ';':
checkLenient(); // fall-through
case ',':
break;
default:
throw syntaxError("Unterminated object");
}
}
int c = nextNonWhitespace(true);
switch (c) {
case '"':
peeked = PEEKED_DOUBLE_QUOTED_NAME;
return peeked;
case '\'':
checkLenient();
peeked = PEEKED_SINGLE_QUOTED_NAME;
return peeked;
case '}':
if (peekStack != JsonScope.NONEMPTY_OBJECT) {
peeked = PEEKED_END_OBJECT;
return peeked;
} else {
throw syntaxError("Expected name");
}
default:
checkLenient();
pos--; // Don't consume the first character in an unquoted string.
if (isLiteral((char) c)) {
peeked = PEEKED_UNQUOTED_NAME;
return peeked;
} else {
throw syntaxError("Expected name");
}
}
} else if (peekStack == JsonScope.DANGLING_NAME) {
stack[stackSize - 1] = JsonScope.NONEMPTY_OBJECT;
// Look for a colon before the value.
int c = nextNonWhitespace(true);
switch (c) {
case ':':
break;
case '=':
checkLenient();
if ((pos < limit || fillBuffer(1)) && buffer[pos] == '>') {
pos++;
}
break;
default:
throw syntaxError("Expected ':'");
}
} else if (peekStack == JsonScope.EMPTY_DOCUMENT) {
if (strictness == Strictness.LENIENT) {
consumeNonExecutePrefix();
}
stack[stackSize - 1] = JsonScope.NONEMPTY_DOCUMENT;
} else if (peekStack == JsonScope.NONEMPTY_DOCUMENT) {
int c = nextNonWhitespace(false);
if (c == -1) {
peeked = PEEKED_EOF;
return peeked;
} else {
checkLenient();
pos--;
}
} else if (peekStack == JsonScope.CLOSED) {
throw new IllegalStateException("JsonReader is closed");
}
int c = nextNonWhitespace(true);
switch (c) {
case ']':
if (peekStack == JsonScope.EMPTY_ARRAY) {
peeked = PEEKED_END_ARRAY;
return peeked;
}
// fall-through to handle ",]"
case ';':
case ',':
// In lenient mode, a 0-length literal in an array means 'null'.
if (peekStack == JsonScope.EMPTY_ARRAY || peekStack == JsonScope.NONEMPTY_ARRAY) {
checkLenient();
pos--;
peeked = PEEKED_NULL;
return peeked;
} else {
throw syntaxError("Unexpected value");
}
case '\'':
checkLenient();
peeked = PEEKED_SINGLE_QUOTED;
return peeked;
case '"':
peeked = PEEKED_DOUBLE_QUOTED;
return peeked;
case '[':
peeked = PEEKED_BEGIN_ARRAY;
return peeked;
case '{':
peeked = PEEKED_BEGIN_OBJECT;
return peeked;
default:
pos--; // Don't consume the first character in a literal value.
}
int result = peekKeyword();
if (result != PEEKED_NONE) {
return result;
}
result = peekNumber();
if (result != PEEKED_NONE) {
return result;
}
if (!isLiteral(buffer[pos])) {
throw syntaxError("Expected value");
}
checkLenient();
peeked = PEEKED_UNQUOTED;
return peeked;
}
private int peekKeyword() throws IOException {
// Figure out which keyword we're matching against by its first character.
char c = buffer[pos];
String keyword;
String keywordUpper;
int peeking;
// Look at the first letter to determine what keyword we are trying to match.
if (c == 't' || c == 'T') {
keyword = "true";
keywordUpper = "TRUE";
peeking = PEEKED_TRUE;
} else if (c == 'f' || c == 'F') {
keyword = "false";
keywordUpper = "FALSE";
peeking = PEEKED_FALSE;
} else if (c == 'n' || c == 'N') {
keyword = "null";
keywordUpper = "NULL";
peeking = PEEKED_NULL;
} else {
return PEEKED_NONE;
}
// Uppercased keywords are not allowed in STRICT mode
boolean allowsUpperCased = strictness != Strictness.STRICT;
// Confirm that chars [0..length) match the keyword.
int length = keyword.length();
for (int i = 0; i < length; i++) {
if (pos + i >= limit && !fillBuffer(i + 1)) {
return PEEKED_NONE;
}
c = buffer[pos + i];
boolean matched = c == keyword.charAt(i) || (allowsUpperCased && c == keywordUpper.charAt(i));
if (!matched) {
return PEEKED_NONE;
}
}
if ((pos + length < limit || fillBuffer(length + 1)) && isLiteral(buffer[pos + length])) {
return PEEKED_NONE; // Don't match trues, falsey or nullsoft!
}
// We've found the keyword followed either by EOF or by a non-literal character.
pos += length;
peeked = peeking;
return peeked;
}
private int peekNumber() throws IOException {
// Like nextNonWhitespace, this uses locals 'p' and 'l' to save inner-loop field access.
char[] buffer = this.buffer;
int p = pos;
int l = limit;
long value = 0; // Negative to accommodate Long.MIN_VALUE more easily.
boolean negative = false;
boolean fitsInLong = true;
int last = NUMBER_CHAR_NONE;
int i = 0;
charactersOfNumber:
for (; true; i++) {
if (p + i == l) {
if (i == buffer.length) {
// Though this looks like a well-formed number, it's too long to continue reading. Give up
// and let the application handle this as an unquoted literal.
return PEEKED_NONE;
}
if (!fillBuffer(i + 1)) {
break;
}
p = pos;
l = limit;
}
char c = buffer[p + i];
switch (c) {
case '-':
if (last == NUMBER_CHAR_NONE) {
negative = true;
last = NUMBER_CHAR_SIGN;
continue;
} else if (last == NUMBER_CHAR_EXP_E) {
last = NUMBER_CHAR_EXP_SIGN;
continue;
}
return PEEKED_NONE;
case '+':
if (last == NUMBER_CHAR_EXP_E) {
last = NUMBER_CHAR_EXP_SIGN;
continue;
}
return PEEKED_NONE;
case 'e':
case 'E':
if (last == NUMBER_CHAR_DIGIT || last == NUMBER_CHAR_FRACTION_DIGIT) {
last = NUMBER_CHAR_EXP_E;
continue;
}
return PEEKED_NONE;
case '.':
if (last == NUMBER_CHAR_DIGIT) {
last = NUMBER_CHAR_DECIMAL;
continue;
}
return PEEKED_NONE;
default:
if (c < '0' || c > '9') {
if (!isLiteral(c)) {
break charactersOfNumber;
}
return PEEKED_NONE;
}
if (last == NUMBER_CHAR_SIGN || last == NUMBER_CHAR_NONE) {
value = -(c - '0');
last = NUMBER_CHAR_DIGIT;
} else if (last == NUMBER_CHAR_DIGIT) {
if (value == 0) {
return PEEKED_NONE; // Leading '0' prefix is not allowed (since it could be octal).
}
long newValue = value * 10 - (c - '0');
fitsInLong &=
value > MIN_INCOMPLETE_INTEGER
|| (value == MIN_INCOMPLETE_INTEGER && newValue < value);
value = newValue;
} else if (last == NUMBER_CHAR_DECIMAL) {
last = NUMBER_CHAR_FRACTION_DIGIT;
} else if (last == NUMBER_CHAR_EXP_E || last == NUMBER_CHAR_EXP_SIGN) {
last = NUMBER_CHAR_EXP_DIGIT;
}
}
}
// We've read a complete number. Decide if it's a PEEKED_LONG or a PEEKED_NUMBER.
// Don't store -0 as long; user might want to read it as double -0.0
// Don't try to convert Long.MIN_VALUE to positive long; it would overflow MAX_VALUE
if (last == NUMBER_CHAR_DIGIT
&& fitsInLong
&& (value != Long.MIN_VALUE || negative)
&& (value != 0 || !negative)) {
peekedLong = negative ? value : -value;
pos += i;
peeked = PEEKED_LONG;
return peeked;
} else if (last == NUMBER_CHAR_DIGIT
|| last == NUMBER_CHAR_FRACTION_DIGIT
|| last == NUMBER_CHAR_EXP_DIGIT) {
peekedNumberLength = i;
peeked = PEEKED_NUMBER;
return peeked;
} else {
return PEEKED_NONE;
}
}
@SuppressWarnings("fallthrough")
private boolean isLiteral(char c) throws IOException {
switch (c) {
case '/':
case '\\':
case ';':
case '#':
case '=':
checkLenient(); // fall-through
case '{':
case '}':
case '[':
case ']':
case ':':
case ',':
case ' ':
case '\t':
case '\f':
case '\r':
case '\n':
return false;
default:
return true;
}
}
/**
* Returns the next token, a {@link JsonToken#NAME property name}, and consumes it.
*
* @throws IllegalStateException if the next token is not a property name.
*/
public String nextName() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
String result;
if (p == PEEKED_UNQUOTED_NAME) {
result = nextUnquotedValue();
} else if (p == PEEKED_SINGLE_QUOTED_NAME) {
result = nextQuotedValue('\'');
} else if (p == PEEKED_DOUBLE_QUOTED_NAME) {
result = nextQuotedValue('"');
} else {
throw unexpectedTokenError("a name");
}
peeked = PEEKED_NONE;
pathNames[stackSize - 1] = result;
return result;
}
/**
* Returns the {@link JsonToken#STRING string} value of the next token, consuming it. If the next
* token is a number, this method will return its string form.
*
* @throws IllegalStateException if the next token is not a string.
*/
public String nextString() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
String result;
if (p == PEEKED_UNQUOTED) {
result = nextUnquotedValue();
} else if (p == PEEKED_SINGLE_QUOTED) {
result = nextQuotedValue('\'');
} else if (p == PEEKED_DOUBLE_QUOTED) {
result = nextQuotedValue('"');
} else if (p == PEEKED_BUFFERED) {
result = peekedString;
peekedString = null;
} else if (p == PEEKED_LONG) {
result = Long.toString(peekedLong);
} else if (p == PEEKED_NUMBER) {
result = new String(buffer, pos, peekedNumberLength);
pos += peekedNumberLength;
} else {
throw unexpectedTokenError("a string");
}
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
}
/**
* Returns the {@link JsonToken#BOOLEAN boolean} value of the next token, consuming it.
*
* @throws IllegalStateException if the next token is not a boolean.
*/
public boolean nextBoolean() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_TRUE) {
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return true;
} else if (p == PEEKED_FALSE) {
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return false;
}
throw unexpectedTokenError("a boolean");
}
/**
* Consumes the next token from the JSON stream and asserts that it is a literal null.
*
* @throws IllegalStateException if the next token is not a JSON null.
*/
public void nextNull() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_NULL) {
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
} else {
throw unexpectedTokenError("null");
}
}
/**
* Returns the {@link JsonToken#NUMBER double} value of the next token, consuming it. If the next
* token is a string, this method will attempt to parse it as a double using {@link
* Double#parseDouble(String)}.
*
* @throws IllegalStateException if the next token is neither a number nor a string.
* @throws NumberFormatException if the next literal value cannot be parsed as a double.
* @throws MalformedJsonException if the next literal value is NaN or Infinity and this reader is
* not {@link #setStrictness(Strictness) lenient}.
*/
public double nextDouble() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_LONG) {
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return (double) peekedLong;
}
if (p == PEEKED_NUMBER) {
peekedString = new String(buffer, pos, peekedNumberLength);
pos += peekedNumberLength;
} else if (p == PEEKED_SINGLE_QUOTED || p == PEEKED_DOUBLE_QUOTED) {
peekedString = nextQuotedValue(p == PEEKED_SINGLE_QUOTED ? '\'' : '"');
} else if (p == PEEKED_UNQUOTED) {
peekedString = nextUnquotedValue();
} else if (p != PEEKED_BUFFERED) {
throw unexpectedTokenError("a double");
}
peeked = PEEKED_BUFFERED;
double result = Double.parseDouble(peekedString); // don't catch this NumberFormatException.
if (strictness != Strictness.LENIENT && (Double.isNaN(result) || Double.isInfinite(result))) {
throw syntaxError("JSON forbids NaN and infinities: " + result);
}
peekedString = null;
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
}
/**
* Returns the {@link JsonToken#NUMBER long} value of the next token, consuming it. If the next
* token is a string, this method will attempt to parse it as a long. If the next token's numeric
* value cannot be exactly represented by a Java {@code long}, this method throws.
*
* @throws IllegalStateException if the next token is neither a number nor a string.
* @throws NumberFormatException if the next literal value cannot be parsed as a number, or
* exactly represented as a long.
*/
public long nextLong() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
if (p == PEEKED_LONG) {
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return peekedLong;
}
if (p == PEEKED_NUMBER) {
peekedString = new String(buffer, pos, peekedNumberLength);
pos += peekedNumberLength;
} else if (p == PEEKED_SINGLE_QUOTED || p == PEEKED_DOUBLE_QUOTED || p == PEEKED_UNQUOTED) {
if (p == PEEKED_UNQUOTED) {
peekedString = nextUnquotedValue();
} else {
peekedString = nextQuotedValue(p == PEEKED_SINGLE_QUOTED ? '\'' : '"');
}
try {
long result = Long.parseLong(peekedString);
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
} catch (NumberFormatException ignored) {
// Fall back to parse as a double below.
}
} else {
throw unexpectedTokenError("a long");
}
peeked = PEEKED_BUFFERED;
double asDouble = Double.parseDouble(peekedString); // don't catch this NumberFormatException.
long result = (long) asDouble;
if (result != asDouble) { // Make sure no precision was lost casting to 'long'.
throw new NumberFormatException("Expected a long but was " + peekedString + locationString());
}
peekedString = null;
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
}
/**
* Returns the string up to but not including {@code quote}, unescaping any character escape
* sequences encountered along the way. The opening quote should have already been read. This
* consumes the closing quote, but does not include it in the returned string.
*
* @param quote either ' or ".
*/
private String nextQuotedValue(char quote) throws IOException {
// Like nextNonWhitespace, this uses locals 'p' and 'l' to save inner-loop field access.
char[] buffer = this.buffer;
StringBuilder builder = null;
while (true) {
int p = pos;
int l = limit;
/* the index of the first character not yet appended to the builder. */
int start = p;
while (p < l) {
int c = buffer[p++];
// In strict mode, throw an exception when meeting unescaped control characters (U+0000
// through U+001F)
if (strictness == Strictness.STRICT && c < 0x20) {
throw syntaxError(
"Unescaped control characters (\\u0000-\\u001F) are not allowed in strict mode");
} else if (c == quote) {
pos = p;
int len = p - start - 1;
if (builder == null) {
return new String(buffer, start, len);
} else {
builder.append(buffer, start, len);
return builder.toString();
}
} else if (c == '\\') {
pos = p;
int len = p - start - 1;
if (builder == null) {
int estimatedLength = (len + 1) * 2;
builder = new StringBuilder(Math.max(estimatedLength, 16));
}
builder.append(buffer, start, len);
builder.append(readEscapeCharacter());
p = pos;
l = limit;
start = p;
} else if (c == '\n') {
lineNumber++;
lineStart = p;
}
}
if (builder == null) {
int estimatedLength = (p - start) * 2;
builder = new StringBuilder(Math.max(estimatedLength, 16));
}
builder.append(buffer, start, p - start);
pos = p;
if (!fillBuffer(1)) {
throw syntaxError("Unterminated string");
}
}
}
/** Returns an unquoted value as a string. */
@SuppressWarnings("fallthrough")
private String nextUnquotedValue() throws IOException {
StringBuilder builder = null;
int i = 0;
findNonLiteralCharacter:
while (true) {
for (; pos + i < limit; i++) {
switch (buffer[pos + i]) {
case '/':
case '\\':
case ';':
case '#':
case '=':
checkLenient(); // fall-through
case '{':
case '}':
case '[':
case ']':
case ':':
case ',':
case ' ':
case '\t':
case '\f':
case '\r':
case '\n':
break findNonLiteralCharacter;
default:
// skip character to be included in string value
}
}
// Attempt to load the entire literal into the buffer at once.
if (i < buffer.length) {
if (fillBuffer(i + 1)) {
continue;
} else {
break;
}
}
// use a StringBuilder when the value is too long. This is too long to be a number!
if (builder == null) {
builder = new StringBuilder(Math.max(i, 16));
}
builder.append(buffer, pos, i);
pos += i;
i = 0;
if (!fillBuffer(1)) {
break;
}
}
String result =
(builder == null) ? new String(buffer, pos, i) : builder.append(buffer, pos, i).toString();
pos += i;
return result;
}
private void skipQuotedValue(char quote) throws IOException {
// Like nextNonWhitespace, this uses locals 'p' and 'l' to save inner-loop field access.
char[] buffer = this.buffer;
do {
int p = pos;
int l = limit;
/* the index of the first character not yet appended to the builder. */
while (p < l) {
int c = buffer[p++];
if (c == quote) {
pos = p;
return;
} else if (c == '\\') {
pos = p;
char unused = readEscapeCharacter();
p = pos;
l = limit;
} else if (c == '\n') {
lineNumber++;
lineStart = p;
}
}
pos = p;
} while (fillBuffer(1));
throw syntaxError("Unterminated string");
}
@SuppressWarnings("fallthrough")
private void skipUnquotedValue() throws IOException {
do {
int i = 0;
for (; pos + i < limit; i++) {
switch (buffer[pos + i]) {
case '/':
case '\\':
case ';':
case '#':
case '=':
checkLenient(); // fall-through
case '{':
case '}':
case '[':
case ']':
case ':':
case ',':
case ' ':
case '\t':
case '\f':
case '\r':
case '\n':
pos += i;
return;
default:
// skip the character
}
}
pos += i;
} while (fillBuffer(1));
}
/**
* Returns the {@link JsonToken#NUMBER int} value of the next token, consuming it. If the next
* token is a string, this method will attempt to parse it as an int. If the next token's numeric
* value cannot be exactly represented by a Java {@code int}, this method throws.
*
* @throws IllegalStateException if the next token is neither a number nor a string.
* @throws NumberFormatException if the next literal value cannot be parsed as a number, or
* exactly represented as an int.
*/
public int nextInt() throws IOException {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
int result;
if (p == PEEKED_LONG) {
result = (int) peekedLong;
if (peekedLong != result) { // Make sure no precision was lost casting to 'int'.
throw new NumberFormatException("Expected an int but was " + peekedLong + locationString());
}
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
}
if (p == PEEKED_NUMBER) {
peekedString = new String(buffer, pos, peekedNumberLength);
pos += peekedNumberLength;
} else if (p == PEEKED_SINGLE_QUOTED || p == PEEKED_DOUBLE_QUOTED || p == PEEKED_UNQUOTED) {
if (p == PEEKED_UNQUOTED) {
peekedString = nextUnquotedValue();
} else {
peekedString = nextQuotedValue(p == PEEKED_SINGLE_QUOTED ? '\'' : '"');
}
try {
result = Integer.parseInt(peekedString);
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
} catch (NumberFormatException ignored) {
// Fall back to parse as a double below.
}
} else {
throw unexpectedTokenError("an int");
}
peeked = PEEKED_BUFFERED;
double asDouble = Double.parseDouble(peekedString); // don't catch this NumberFormatException.
result = (int) asDouble;
if (result != asDouble) { // Make sure no precision was lost casting to 'int'.
throw new NumberFormatException("Expected an int but was " + peekedString + locationString());
}
peekedString = null;
peeked = PEEKED_NONE;
pathIndices[stackSize - 1]++;
return result;
}
/**
* Closes this JSON reader and the underlying {@link Reader}.
*
* <p>Using the JSON reader after it has been closed will throw an {@link IllegalStateException}
* in most cases.
*/
@Override
public void close() throws IOException {
peeked = PEEKED_NONE;
stack[0] = JsonScope.CLOSED;
stackSize = 1;
in.close();
}
/**
* Skips the next value recursively. This method is intended for use when the JSON token stream
* contains unrecognized or unhandled values.
*
* <p>The behavior depends on the type of the next JSON token:
*
* <ul>
* <li>Start of a JSON array or object: It and all of its nested values are skipped.
* <li>Primitive value (for example a JSON number): The primitive value is skipped.
* <li>Property name: Only the name but not the value of the property is skipped. {@code
* skipValue()} has to be called again to skip the property value as well.
* <li>End of a JSON array or object: Only this end token is skipped.
* <li>End of JSON document: Skipping has no effect, the next token continues to be the end of
* the document.
* </ul>
*/
public void skipValue() throws IOException {
int count = 0;
do {
int p = peeked;
if (p == PEEKED_NONE) {
p = doPeek();
}
switch (p) {
case PEEKED_BEGIN_ARRAY:
push(JsonScope.EMPTY_ARRAY);
count++;
break;
case PEEKED_BEGIN_OBJECT:
push(JsonScope.EMPTY_OBJECT);
count++;
break;
case PEEKED_END_ARRAY:
stackSize--;
count--;
break;
case PEEKED_END_OBJECT:
// Only update when object end is explicitly skipped, otherwise stack is not updated
// anyways
if (count == 0) {
// Free the last path name so that it can be garbage collected
pathNames[stackSize - 1] = null;
}
stackSize--;
count--;
break;
case PEEKED_UNQUOTED:
skipUnquotedValue();
break;
case PEEKED_SINGLE_QUOTED:
skipQuotedValue('\'');
break;
case PEEKED_DOUBLE_QUOTED:
skipQuotedValue('"');
break;
case PEEKED_UNQUOTED_NAME:
skipUnquotedValue();
// Only update when name is explicitly skipped, otherwise stack is not updated anyways
if (count == 0) {
pathNames[stackSize - 1] = "<skipped>";
}
break;
case PEEKED_SINGLE_QUOTED_NAME:
skipQuotedValue('\'');
// Only update when name is explicitly skipped, otherwise stack is not updated anyways
if (count == 0) {
pathNames[stackSize - 1] = "<skipped>";
}
break;
case PEEKED_DOUBLE_QUOTED_NAME:
skipQuotedValue('"');
// Only update when name is explicitly skipped, otherwise stack is not updated anyways
if (count == 0) {
pathNames[stackSize - 1] = "<skipped>";
}
break;
case PEEKED_NUMBER:
pos += peekedNumberLength;
break;
case PEEKED_EOF:
// Do nothing
return;
default:
// For all other tokens there is nothing to do; token has already been consumed from
// underlying reader
}
peeked = PEEKED_NONE;
} while (count > 0);
pathIndices[stackSize - 1]++;
}
private void push(int newTop) throws MalformedJsonException {
// - 1 because stack contains as first element either EMPTY_DOCUMENT or NONEMPTY_DOCUMENT
if (stackSize - 1 >= nestingLimit) {
throw new MalformedJsonException(
"Nesting limit " + nestingLimit + " reached" + locationString());
}
if (stackSize == stack.length) {
int newLength = stackSize * 2;
stack = Arrays.copyOf(stack, newLength);
pathIndices = Arrays.copyOf(pathIndices, newLength);
pathNames = Arrays.copyOf(pathNames, newLength);
}
stack[stackSize++] = newTop;
}
/**
* Returns true once {@code limit - pos >= minimum}. If the data is exhausted before that many
* characters are available, this returns false.
*/
private boolean fillBuffer(int minimum) throws IOException {
char[] buffer = this.buffer;
lineStart -= pos;
if (limit != pos) {
limit -= pos;
System.arraycopy(buffer, pos, buffer, 0, limit);
} else {
limit = 0;
}
pos = 0;
int total;
while ((total = in.read(buffer, limit, buffer.length - limit)) != -1) {
limit += total;
// if this is the first read, consume an optional byte order mark (BOM) if it exists
if (lineNumber == 0 && lineStart == 0 && limit > 0 && buffer[0] == '\ufeff') {
pos++;
lineStart++;
minimum++;
}
if (limit >= minimum) {
return true;
}
}
return false;
}
/**
* Returns the next character in the stream that is neither whitespace nor a part of a comment.
* When this returns, the returned character is always at {@code buffer[pos-1]}; this means the
* caller can always push back the returned character by decrementing {@code pos}.
*/
private int nextNonWhitespace(boolean throwOnEof) throws IOException {
/*
* This code uses ugly local variables 'p' and 'l' representing the 'pos'
* and 'limit' fields respectively. Using locals rather than fields saves
* a few field reads for each whitespace character in a pretty-printed
* document, resulting in a 5% speedup. We need to flush 'p' to its field
* before any (potentially indirect) call to fillBuffer() and reread both
* 'p' and 'l' after any (potentially indirect) call to the same method.
*/
char[] buffer = this.buffer;
int p = pos;
int l = limit;
while (true) {
if (p == l) {
pos = p;
if (!fillBuffer(1)) {
break;
}
p = pos;
l = limit;
}
int c = buffer[p++];
if (c == '\n') {
lineNumber++;
lineStart = p;
continue;
} else if (c == ' ' || c == '\r' || c == '\t') {
continue;
}
if (c == '/') {
pos = p;
if (p == l) {
pos--; // push back '/' so it's still in the buffer when this method returns
boolean charsLoaded = fillBuffer(2);
pos++; // consume the '/' again
if (!charsLoaded) {
return c;
}
}
checkLenient();
char peek = buffer[pos];
switch (peek) {
case '*':
// skip a /* c-style comment */
pos++;
if (!skipTo("*/")) {
throw syntaxError("Unterminated comment");
}
p = pos + 2;
l = limit;
continue;
case '/':
// skip a // end-of-line comment
pos++;
skipToEndOfLine();
p = pos;
l = limit;
continue;
default:
return c;
}
} else if (c == '#') {
pos = p;
/*
* Skip a # hash end-of-line comment. The JSON RFC doesn't
* specify this behaviour, but it's required to parse
* existing documents. See http://b/2571423.
*/
checkLenient();
skipToEndOfLine();
p = pos;
l = limit;
} else {
pos = p;
return c;
}
}
if (throwOnEof) {
throw new EOFException("End of input" + locationString());
} else {
return -1;
}
}
private void checkLenient() throws MalformedJsonException {
if (strictness != Strictness.LENIENT) {
throw syntaxError(
"Use JsonReader.setStrictness(Strictness.LENIENT) to accept malformed JSON");
}
}
/**
* Advances the position until after the next newline character. If the line is terminated by
* "\r\n", the '\n' must be consumed as whitespace by the caller.
*/
private void skipToEndOfLine() throws IOException {
while (pos < limit || fillBuffer(1)) {
char c = buffer[pos++];
if (c == '\n') {
lineNumber++;
lineStart = pos;
break;
} else if (c == '\r') {
break;
}
}
}
/**
* @param toFind a string to search for. Must not contain a newline.
*/
private boolean skipTo(String toFind) throws IOException {
int length = toFind.length();
outer:
for (; pos + length <= limit || fillBuffer(length); pos++) {
if (buffer[pos] == '\n') {
lineNumber++;
lineStart = pos + 1;
continue;
}
for (int c = 0; c < length; c++) {
if (buffer[pos + c] != toFind.charAt(c)) {
continue outer;
}
}
return true;
}
return false;
}
@Override
public String toString() {
return getClass().getSimpleName() + locationString();
}
String locationString() {
int line = lineNumber + 1;
int column = pos - lineStart + 1;
return " at line " + line + " column " + column + " path " + getPath();
}
private String getPath(boolean usePreviousPath) {
StringBuilder result = new StringBuilder().append('$');
for (int i = 0; i < stackSize; i++) {
int scope = stack[i];
switch (scope) {
case JsonScope.EMPTY_ARRAY:
case JsonScope.NONEMPTY_ARRAY:
int pathIndex = pathIndices[i];
// If index is last path element it points to next array element; have to decrement
if (usePreviousPath && pathIndex > 0 && i == stackSize - 1) {
pathIndex--;
}
result.append('[').append(pathIndex).append(']');
break;
case JsonScope.EMPTY_OBJECT:
case JsonScope.DANGLING_NAME:
case JsonScope.NONEMPTY_OBJECT:
result.append('.');
if (pathNames[i] != null) {
result.append(pathNames[i]);
}
break;
case JsonScope.NONEMPTY_DOCUMENT:
case JsonScope.EMPTY_DOCUMENT:
case JsonScope.CLOSED:
break;
default:
throw new AssertionError("Unknown scope value: " + scope);
}
}
return result.toString();
}
/**
* Returns a <a href="https://goessner.net/articles/JsonPath/">JSONPath</a> in <i>dot-notation</i>
* to the next (or current) location in the JSON document. That means:
*
* <ul>
* <li>For JSON arrays the path points to the index of the next element (even if there are no
* further elements).
* <li>For JSON objects the path points to the last property, or to the current property if its
* name has already been consumed.
* </ul>
*
* <p>This method can be useful to add additional context to exception messages <i>before</i> a
* value is consumed, for example when the {@linkplain #peek() peeked} token is unexpected.
*/
public String getPath() {
return getPath(false);
}
/**
* Returns a <a href="https://goessner.net/articles/JsonPath/">JSONPath</a> in <i>dot-notation</i>
* to the previous (or current) location in the JSON document. That means:
*
* <ul>
* <li>For JSON arrays the path points to the index of the previous element.<br>
* If no element has been consumed yet it uses the index 0 (even if there are no elements).
* <li>For JSON objects the path points to the last property, or to the current property if its
* name has already been consumed.
* </ul>
*
* <p>This method can be useful to add additional context to exception messages <i>after</i> a
* value has been consumed.
*/
public String getPreviousPath() {
return getPath(true);
}
/**
* Unescapes the character identified by the character or characters that immediately follow a
* backslash. The backslash '\' should have already been read. This supports both Unicode escapes
* "u000A" and two-character escapes "\n".
*
* @throws MalformedJsonException if the escape sequence is malformed
*/
@SuppressWarnings("fallthrough")
private char readEscapeCharacter() throws IOException {
if (pos == limit && !fillBuffer(1)) {
throw syntaxError("Unterminated escape sequence");
}
char escaped = buffer[pos++];
switch (escaped) {
case 'u':
if (pos + 4 > limit && !fillBuffer(4)) {
throw syntaxError("Unterminated escape sequence");
}
// Equivalent to Integer.parseInt(stringPool.get(buffer, pos, 4), 16);
int result = 0;
for (int i = pos, end = i + 4; i < end; i++) {
char c = buffer[i];
result <<= 4;
if (c >= '0' && c <= '9') {
result += (c - '0');
} else if (c >= 'a' && c <= 'f') {
result += (c - 'a' + 10);
} else if (c >= 'A' && c <= 'F') {
result += (c - 'A' + 10);
} else {
throw syntaxError("Malformed Unicode escape \\u" + new String(buffer, pos, 4));
}
}
pos += 4;
return (char) result;
case 't':
return '\t';
case 'b':
return '\b';
case 'n':
return '\n';
case 'r':
return '\r';
case 'f':
return '\f';
case '\n':
if (strictness == Strictness.STRICT) {
throw syntaxError("Cannot escape a newline character in strict mode");
}
lineNumber++;
lineStart = pos;
// fall-through
case '\'':
if (strictness == Strictness.STRICT) {
throw syntaxError("Invalid escaped character \"'\" in strict mode");
}
case '"':
case '\\':
case '/':
return escaped;
default:
// throw error when none of the above cases are matched
throw syntaxError("Invalid escape sequence");
}
}
/**
* Throws a new {@link MalformedJsonException} with the given message and information about the
* current location.
*/
private MalformedJsonException syntaxError(String message) throws MalformedJsonException {
throw new MalformedJsonException(
message + locationString() + "\nSee " + TroubleshootingGuide.createUrl("malformed-json"));
}
private IllegalStateException unexpectedTokenError(String expected) throws IOException {
JsonToken peeked = peek();
String troubleshootingId =
peeked == JsonToken.NULL ? "adapter-not-null-safe" : "unexpected-json-structure";
return new IllegalStateException(
"Expected "
+ expected
+ " but was "
+ peek()
+ locationString()
+ "\nSee "
+ TroubleshootingGuide.createUrl(troubleshootingId));
}
/** Consumes the non-execute prefix if it exists. */
private void consumeNonExecutePrefix() throws IOException {
// fast-forward through the leading whitespace
int unused = nextNonWhitespace(true);
pos--;
if (pos + 5 > limit && !fillBuffer(5)) {
return;
}
int p = pos;
char[] buf = buffer;
if (buf[p] != ')'
|| buf[p + 1] != ']'
|| buf[p + 2] != '}'
|| buf[p + 3] != '\''
|| buf[p + 4] != '\n') {
return; // not a security token!
}
// we consumed a security token!
pos += 5;
}
static {
JsonReaderInternalAccess.INSTANCE =
new JsonReaderInternalAccess() {
@Override
public void promoteNameToValue(JsonReader reader) throws IOException {
if (reader instanceof JsonTreeReader) {
((JsonTreeReader) reader).promoteNameToValue();
return;
}
int p = reader.peeked;
if (p == PEEKED_NONE) {
p = reader.doPeek();
}
if (p == PEEKED_DOUBLE_QUOTED_NAME) {
reader.peeked = PEEKED_DOUBLE_QUOTED;
} else if (p == PEEKED_SINGLE_QUOTED_NAME) {
reader.peeked = PEEKED_SINGLE_QUOTED;
} else if (p == PEEKED_UNQUOTED_NAME) {
reader.peeked = PEEKED_UNQUOTED;
} else {
throw reader.unexpectedTokenError("a name");
}
}
};
}
}
| JsonReader |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authorization/method/PostFilterAuthorizationMethodInterceptorTests.java | {
"start": 8547,
"end": 8723
} | interface ____ {
@MyPostFilter
void inheritedAnnotations();
}
@Retention(RetentionPolicy.RUNTIME)
@PostFilter("filterObject == 'john'")
public @ | InterfaceAnnotationsThree |
java | quarkusio__quarkus | extensions/funqy/funqy-http/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/http/FunqyHttpBindingRecorder.java | {
"start": 1087,
"end": 3253
} | class ____ {
private static ObjectMapper objectMapper;
private static QueryObjectMapper queryMapper;
public void init() {
objectMapper = getObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
queryMapper = new QueryObjectMapper();
for (FunctionInvoker invoker : FunctionRecorder.registry.invokers()) {
if (invoker.hasInput()) {
JavaType javaInputType = objectMapper.constructType(invoker.getInputType());
ObjectReader reader = objectMapper.readerFor(javaInputType);
QueryReader queryReader = queryMapper.readerFor(invoker.getInputType());
invoker.getBindingContext().put(ObjectReader.class.getName(), reader);
invoker.getBindingContext().put(QueryReader.class.getName(), queryReader);
}
if (invoker.hasOutput()) {
JavaType javaOutputType = objectMapper.constructType(invoker.getOutputType());
ObjectWriter writer = objectMapper.writerFor(javaOutputType);
invoker.getBindingContext().put(ObjectWriter.class.getName(), writer);
}
}
}
private ObjectMapper getObjectMapper() {
InstanceHandle<ObjectMapper> instance = Arc.container().instance(ObjectMapper.class);
if (instance.isAvailable()) {
return instance.get().copy();
}
return new ObjectMapper();
}
public Handler<RoutingContext> start(String contextPath,
Supplier<Vertx> vertx,
ShutdownContext shutdown,
BeanContainer beanContainer,
Executor executor) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
FunctionConstructor.CONTAINER = null;
objectMapper = null;
}
});
FunctionConstructor.CONTAINER = beanContainer;
return new VertxRequestHandler(vertx.get(), beanContainer, contextPath, executor);
}
}
| FunqyHttpBindingRecorder |
java | apache__camel | components/camel-spring-parent/camel-spring-ws/src/test/java/org/apache/camel/component/spring/ws/addressing/StaticIdStrategy.java | {
"start": 1067,
"end": 1239
} | class ____ extends UuidMessageIdStrategy {
@Override
public URI newMessageId(SoapMessage message) {
return URI.create("staticTestId");
}
}
| StaticIdStrategy |
java | redisson__redisson | redisson/src/main/java/org/redisson/misc/FastRemovalQueue.java | {
"start": 2673,
"end": 5902
} | class ____<E> implements Iterable<E> {
private final WrappedLock lock = new WrappedLock();
private Node<E> head;
private Node<E> tail;
DoublyLinkedList() {
}
public void clear() {
lock.execute(() -> {
head = null;
tail = null;
});
}
public void add(Node<E> newNode) {
lock.execute(() -> {
addNode(newNode);
});
}
private void addNode(Node<E> newNode) {
Node<E> currentTail = tail;
tail = newNode;
if (currentTail == null) {
head = newNode;
} else {
newNode.prev = currentTail;
currentTail.next = newNode;
}
}
public boolean remove(Node<E> node) {
Boolean r = lock.execute(() -> {
if (node.isDeleted()) {
return false;
}
removeNode(node);
node.setDeleted();
return true;
});
return Boolean.TRUE.equals(r);
}
private void removeNode(Node<E> node) {
Node<E> prevNode = node.prev;
Node<E> nextNode = node.next;
if (prevNode != null) {
prevNode.next = nextNode;
} else {
head = nextNode;
}
if (nextNode != null) {
nextNode.prev = prevNode;
} else {
tail = prevNode;
}
}
public void moveToTail(Node<E> node) {
lock.execute(() -> {
if (node.isDeleted()) {
return;
}
removeNode(node);
node.prev = null;
node.next = null;
addNode(node);
});
}
public Node<E> removeFirst() {
return lock.execute(() -> {
Node<E> currentHead = head;
if (head == tail) {
head = null;
tail = null;
} else {
head = head.next;
head.prev = null;
}
if (currentHead != null) {
currentHead.setDeleted();
}
return currentHead;
});
}
@Override
public Iterator<E> iterator() {
return new Iterator<E>() {
private Node<E> current = head;
@Override
public boolean hasNext() {
while (current != null && current.isDeleted()) {
current = current.next;
}
return current != null;
}
@Override
public E next() {
if (current == null) {
throw new NoSuchElementException();
}
E value = current.getValue();
current = current.next;
return value;
}
};
}
}
} | DoublyLinkedList |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/NoTypeInfoTest.java | {
"start": 477,
"end": 618
} | class ____ extends DatabindTestUtil
{
@JsonTypeInfo(use=JsonTypeInfo.Id.NONE)
@JsonDeserialize(as=NoType.class)
static | NoTypeInfoTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isNotSameAs_Test.java | {
"start": 1123,
"end": 1872
} | class ____ extends AbstractAssertBaseTest {
@Override
protected ConcreteAssert invoke_api_method() {
return assertions.isNotSameAs(8L);
}
@Override
protected void verify_internal_effects() {
verify(objects).assertNotSame(getInfo(assertions), getActual(assertions), 8L);
}
@Test
void should_be_loosely_typed1() {
List<String> expected = new ArrayList<>();
List<? extends String> actual = new ArrayList<>();
Assertions.assertThat(actual).isNotSameAs(expected);
}
@Test
void should_be_loosely_typed2() {
List<? extends String> expected = new ArrayList<>();
List<? extends String> actual = new ArrayList<>();
Assertions.assertThat(actual).isNotSameAs(expected);
}
}
| AbstractAssert_isNotSameAs_Test |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestInitializeSharedEdits.java | {
"start": 2144,
"end": 7046
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestInitializeSharedEdits.class);
private static final Path TEST_PATH = new Path("/test");
private Configuration conf;
private MiniDFSCluster cluster;
@BeforeEach
public void setupCluster() throws IOException {
conf = new Configuration();
conf.setInt(DFSConfigKeys.DFS_HA_LOGROLL_PERIOD_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
HAUtil.setAllowStandbyReads(conf, true);
MiniDFSNNTopology topology = MiniDFSNNTopology.simpleHATopology();
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(topology)
.numDataNodes(0)
.build();
cluster.waitActive();
shutdownClusterAndRemoveSharedEditsDir();
}
@AfterEach
public void shutdownCluster() throws IOException {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
private void shutdownClusterAndRemoveSharedEditsDir() throws IOException {
cluster.shutdownNameNode(0);
cluster.shutdownNameNode(1);
File sharedEditsDir = new File(cluster.getSharedEditsDir(0, 1));
assertTrue(FileUtil.fullyDelete(sharedEditsDir));
}
private void assertCannotStartNameNodes() {
// Make sure we can't currently start either NN.
try {
cluster.restartNameNode(0, false);
fail("Should not have been able to start NN1 without shared dir");
} catch (IOException ioe) {
LOG.info("Got expected exception", ioe);
GenericTestUtils.assertExceptionContains(
"storage directory does not exist or is not accessible", ioe);
}
try {
cluster.restartNameNode(1, false);
fail("Should not have been able to start NN2 without shared dir");
} catch (IOException ioe) {
LOG.info("Got expected exception", ioe);
GenericTestUtils.assertExceptionContains(
"storage directory does not exist or is not accessible", ioe);
}
}
private void assertCanStartHaNameNodes(String pathSuffix)
throws ServiceFailedException, IOException, URISyntaxException,
InterruptedException {
// Now should be able to start both NNs. Pass "false" here so that we don't
// try to waitActive on all NNs, since the second NN doesn't exist yet.
cluster.restartNameNode(0, false);
cluster.restartNameNode(1, true);
// Make sure HA is working.
cluster.getNameNode(0).getRpcServer().transitionToActive(
new StateChangeRequestInfo(RequestSource.REQUEST_BY_USER));
FileSystem fs = null;
try {
Path newPath = new Path(TEST_PATH, pathSuffix);
fs = HATestUtil.configureFailoverFs(cluster, conf);
assertTrue(fs.mkdirs(newPath));
HATestUtil.waitForStandbyToCatchUp(cluster.getNameNode(0),
cluster.getNameNode(1));
assertTrue(NameNodeAdapter.getFileInfo(cluster.getNameNode(1),
newPath.toString(), false, false, false).isDirectory());
} finally {
if (fs != null) {
fs.close();
}
}
}
@Test
public void testInitializeSharedEdits() throws Exception {
assertCannotStartNameNodes();
// Initialize the shared edits dir.
assertFalse(NameNode.initializeSharedEdits(cluster.getConfiguration(0)));
assertCanStartHaNameNodes("1");
// Now that we've done a metadata operation, make sure that deleting and
// re-initializing the shared edits dir will let the standby still start.
shutdownClusterAndRemoveSharedEditsDir();
assertCannotStartNameNodes();
// Re-initialize the shared edits dir.
assertFalse(NameNode.initializeSharedEdits(cluster.getConfiguration(0)));
// Should *still* be able to start both NNs
assertCanStartHaNameNodes("2");
}
@Test
public void testFailWhenNoSharedEditsSpecified() throws Exception {
Configuration confNoShared = new Configuration(conf);
confNoShared.unset(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
assertFalse(NameNode.initializeSharedEdits(confNoShared, true));
}
@Test
public void testDontOverWriteExistingDir() throws IOException {
assertFalse(NameNode.initializeSharedEdits(conf, false));
assertTrue(NameNode.initializeSharedEdits(conf, false));
}
@Test
public void testInitializeSharedEditsConfiguresGenericConfKeys() throws IOException {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMESERVICES, "ns1");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX,
"ns1"), "nn1,nn2");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY,
"ns1", "nn1"), "localhost:1234");
assertNull(conf.get(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY));
NameNode.initializeSharedEdits(conf);
assertNotNull(conf.get(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY));
}
}
| TestInitializeSharedEdits |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/translate/AggregateTranslator.java | {
"start": 1444,
"end": 2436
} | class ____ extends CharSequenceTranslator {
private final CharSequenceTranslator[] translators;
/**
* Specify the translators to be used at creation time.
*
* @param translators CharSequenceTranslator array to aggregate
*/
public AggregateTranslator(final CharSequenceTranslator... translators) {
this.translators = ArrayUtils.clone(translators);
}
/**
* The first translator to consume code points from the input is the 'winner'.
* Execution stops with the number of consumed code points being returned.
* {@inheritDoc}
*/
@Override
public int translate(final CharSequence input, final int index, final Writer out) throws IOException {
for (final CharSequenceTranslator translator : translators) {
final int consumed = translator.translate(input, index, out);
if (consumed != 0) {
return consumed;
}
}
return 0;
}
}
| AggregateTranslator |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java | {
"start": 4549,
"end": 4610
} | class ____ extends the StandardSocketFactory.
*/
static | that |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/vectors/AbstractIVFKnnVectorQuery.java | {
"start": 1931,
"end": 9132
} | class ____ extends Query implements QueryProfilerProvider {
static final TopDocs NO_RESULTS = TopDocsCollector.EMPTY_TOPDOCS;
protected final String field;
protected final float providedVisitRatio;
protected final int k;
protected final int numCands;
protected final Query filter;
protected int vectorOpsCount;
protected AbstractIVFKnnVectorQuery(String field, float visitRatio, int k, int numCands, Query filter) {
if (k < 1) {
throw new IllegalArgumentException("k must be at least 1, got: " + k);
}
if (visitRatio < 0.0f || visitRatio > 1.0f) {
throw new IllegalArgumentException("visitRatio must be between 0.0 and 1.0 (both inclusive), got: " + visitRatio);
}
if (numCands < k) {
throw new IllegalArgumentException("numCands must be at least k, got: " + numCands);
}
this.field = field;
this.providedVisitRatio = visitRatio;
this.k = k;
this.filter = filter;
this.numCands = numCands;
}
@Override
public void visit(QueryVisitor visitor) {
if (visitor.acceptField(field)) {
visitor.visitLeaf(this);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AbstractIVFKnnVectorQuery that = (AbstractIVFKnnVectorQuery) o;
return k == that.k
&& Objects.equals(field, that.field)
&& Objects.equals(filter, that.filter)
&& Objects.equals(providedVisitRatio, that.providedVisitRatio);
}
@Override
public int hashCode() {
return Objects.hash(field, k, filter, providedVisitRatio);
}
@Override
public Query rewrite(IndexSearcher indexSearcher) throws IOException {
vectorOpsCount = 0;
IndexReader reader = indexSearcher.getIndexReader();
final Weight filterWeight;
if (filter != null) {
BooleanQuery booleanQuery = new BooleanQuery.Builder().add(filter, BooleanClause.Occur.FILTER)
.add(new FieldExistsQuery(field), BooleanClause.Occur.FILTER)
.build();
Query rewritten = indexSearcher.rewrite(booleanQuery);
if (rewritten.getClass() == MatchNoDocsQuery.class) {
return rewritten;
}
filterWeight = indexSearcher.createWeight(rewritten, ScoreMode.COMPLETE_NO_SCORES, 1f);
} else {
filterWeight = null;
}
// we request numCands as we are using it as an approximation measure
// we need to ensure we are getting at least 2*k results to ensure we cover overspill duplicates
// TODO move the logic for automatically adjusting percentages to the query, so we can only pass
// 2k to the collector.
IVFCollectorManager knnCollectorManager = getKnnCollectorManager(Math.round(2f * k), indexSearcher);
TaskExecutor taskExecutor = indexSearcher.getTaskExecutor();
List<LeafReaderContext> leafReaderContexts = reader.leaves();
assert this instanceof IVFKnnFloatVectorQuery;
int totalVectors = 0;
for (LeafReaderContext leafReaderContext : leafReaderContexts) {
LeafReader leafReader = leafReaderContext.reader();
FloatVectorValues floatVectorValues = leafReader.getFloatVectorValues(field);
if (floatVectorValues != null) {
totalVectors += floatVectorValues.size();
}
}
final float visitRatio;
if (providedVisitRatio == 0.0f) {
// dynamically set the percentage
float expected = (float) Math.round(
Math.log10(totalVectors) * Math.log10(totalVectors) * (Math.min(10_000, Math.max(numCands, 5 * k)))
);
visitRatio = expected / totalVectors;
} else {
visitRatio = providedVisitRatio;
}
List<Callable<TopDocs>> tasks = new ArrayList<>(leafReaderContexts.size());
for (LeafReaderContext context : leafReaderContexts) {
tasks.add(() -> searchLeaf(context, filterWeight, knnCollectorManager, visitRatio));
}
TopDocs[] perLeafResults = taskExecutor.invokeAll(tasks).toArray(TopDocs[]::new);
// Merge sort the results
TopDocs topK = TopDocs.merge(k, perLeafResults);
vectorOpsCount = (int) topK.totalHits.value();
if (topK.scoreDocs.length == 0) {
return new MatchNoDocsQuery();
}
return new KnnScoreDocQuery(topK.scoreDocs, reader);
}
private TopDocs searchLeaf(LeafReaderContext ctx, Weight filterWeight, IVFCollectorManager knnCollectorManager, float visitRatio)
throws IOException {
TopDocs results = getLeafResults(ctx, filterWeight, knnCollectorManager, visitRatio);
IntHashSet dedup = new IntHashSet(results.scoreDocs.length * 4 / 3);
int deduplicateCount = 0;
for (ScoreDoc scoreDoc : results.scoreDocs) {
if (dedup.add(scoreDoc.doc)) {
deduplicateCount++;
}
}
ScoreDoc[] deduplicatedScoreDocs = new ScoreDoc[deduplicateCount];
dedup.clear();
int index = 0;
for (ScoreDoc scoreDoc : results.scoreDocs) {
if (dedup.add(scoreDoc.doc)) {
scoreDoc.doc += ctx.docBase;
deduplicatedScoreDocs[index++] = scoreDoc;
}
}
return new TopDocs(results.totalHits, deduplicatedScoreDocs);
}
TopDocs getLeafResults(LeafReaderContext ctx, Weight filterWeight, IVFCollectorManager knnCollectorManager, float visitRatio)
throws IOException {
final LeafReader reader = ctx.reader();
final Bits liveDocs = reader.getLiveDocs();
final int maxDoc = reader.maxDoc();
if (filterWeight == null) {
return approximateSearch(
ctx,
liveDocs == null ? ESAcceptDocs.ESAcceptDocsAll.INSTANCE : new ESAcceptDocs.BitsAcceptDocs(liveDocs, maxDoc),
Integer.MAX_VALUE,
knnCollectorManager,
visitRatio
);
}
ScorerSupplier supplier = filterWeight.scorerSupplier(ctx);
if (supplier == null) {
return TopDocsCollector.EMPTY_TOPDOCS;
}
return approximateSearch(
ctx,
new ESAcceptDocs.ScorerSupplierAcceptDocs(supplier, liveDocs, maxDoc),
Integer.MAX_VALUE,
knnCollectorManager,
visitRatio
);
}
abstract TopDocs approximateSearch(
LeafReaderContext context,
AcceptDocs acceptDocs,
int visitedLimit,
IVFCollectorManager knnCollectorManager,
float visitRatio
) throws IOException;
protected IVFCollectorManager getKnnCollectorManager(int k, IndexSearcher searcher) {
return new IVFCollectorManager(k, searcher);
}
@Override
public final void profile(QueryProfiler queryProfiler) {
queryProfiler.addVectorOpsCount(vectorOpsCount);
}
static | AbstractIVFKnnVectorQuery |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/CoLocationConstraint.java | {
"start": 1486,
"end": 2385
} | class ____ {
private final AbstractID coLocationGroupId;
private final int constraintIndex;
CoLocationConstraint(final AbstractID coLocationGroupId, final int constraintIndex) {
this.coLocationGroupId = checkNotNull(coLocationGroupId);
this.constraintIndex = constraintIndex;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == getClass()) {
CoLocationConstraint that = (CoLocationConstraint) obj;
return Objects.equals(that.coLocationGroupId, this.coLocationGroupId)
&& that.constraintIndex == this.constraintIndex;
} else {
return false;
}
}
@Override
public int hashCode() {
return 31 * coLocationGroupId.hashCode() + constraintIndex;
}
}
| CoLocationConstraint |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/array/BeanToArrayTest2.java | {
"start": 233,
"end": 1323
} | class ____ extends TestCase {
public void test_bool() throws Exception {
Model model = JSON.parseObject("[true,false]", Model.class, Feature.SupportArrayToBean);
Assert.assertEquals(true, model.v1);
Assert.assertEquals(false, model.v2);
}
public void test_bool_space() throws Exception {
Model model = JSON.parseObject("[true ,false ]", Model.class, Feature.SupportArrayToBean);
Assert.assertEquals(true, model.v1);
Assert.assertEquals(false, model.v2);
}
public void test_bool_num() throws Exception {
Model model = JSON.parseObject("[1,0]", Model.class, Feature.SupportArrayToBean);
Assert.assertEquals(true, model.v1);
Assert.assertEquals(false, model.v2);
}
public void test_bool_error() throws Exception {
Exception error = null;
try {
JSON.parseObject("[t,0]", Model.class, Feature.SupportArrayToBean);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static | BeanToArrayTest2 |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/heuristic/GND.java | {
"start": 890,
"end": 3414
} | class ____ extends NXYSignificanceHeuristic {
public static final String NAME = "gnd";
public static final ConstructingObjectParser<GND, Void> PARSER = new ConstructingObjectParser<>(NAME, args -> {
boolean backgroundIsSuperset = args[0] == null ? true : (boolean) args[0];
return new GND(backgroundIsSuperset);
});
static {
PARSER.declareBoolean(optionalConstructorArg(), BACKGROUND_IS_SUPERSET);
}
public GND(boolean backgroundIsSuperset) {
super(true, backgroundIsSuperset);
}
/**
* Read from a stream.
*/
public GND(StreamInput in) throws IOException {
super(true, in.readBoolean());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(backgroundIsSuperset);
}
@Override
public boolean equals(Object other) {
return other instanceof GND && super.equals(other);
}
@Override
public int hashCode() {
int result = NAME.hashCode();
result = 31 * result + super.hashCode();
return result;
}
/**
* Calculates Google Normalized Distance, as described in "The Google Similarity Distance", Cilibrasi and Vitanyi, 2007
* link: http://arxiv.org/pdf/cs/0412098v3.pdf
*/
@Override
public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
Frequencies frequencies = computeNxys(subsetFreq, subsetSize, supersetFreq, supersetSize, "GND");
double fx = frequencies.N1_;
double fy = frequencies.N_1;
double fxy = frequencies.N11;
double N = frequencies.N;
if (fxy == 0) {
// no co-occurrence
return 0.0;
}
if ((fx == fy) && (fx == fxy)) {
// perfect co-occurrence
return 1.0;
}
double score = (Math.max(Math.log(fx), Math.log(fy)) - Math.log(fxy)) / (Math.log(N) - Math.min(Math.log(fx), Math.log(fy)));
// we must invert the order of terms because GND scores relevant terms low
score = Math.exp(-1.0d * score);
return score;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(BACKGROUND_IS_SUPERSET.getPreferredName(), backgroundIsSuperset);
builder.endObject();
return builder;
}
}
| GND |
java | apache__spark | mllib/src/main/scala/org/apache/spark/mllib/JavaPackage.java | {
"start": 1153,
"end": 1202
} | class ____ {
private JavaPackage() {}
}
| JavaPackage |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/collection/DetachedMultipleCollectionChangeTest.java | {
"start": 11503,
"end": 13739
} | class ____ {
private final String name;
private final Integer revId;
private final RevisionType revType;
private final String joinColumnName;
private final Long joinColumnId;
private final String inverseJoinColumnName;
private final Long inverseJoinColumnId;
private AuditJoinTableInfo(
String name, SequenceIdRevisionEntity rev,
RevisionType revType, String joinColumnName, Long joinColumnId,
String inverseJoinColumnName, Long inverseJoinColumnId) {
this.name = name;
this.revId = rev.getId();
this.revType = revType;
this.joinColumnName = joinColumnName;
this.joinColumnId = joinColumnId;
this.inverseJoinColumnName = inverseJoinColumnName;
this.inverseJoinColumnId = inverseJoinColumnId;
}
@Override
public String toString() {
return "AuditJoinTableInfo [name=" + name + ", revId=" + revId
+ ", revType=" + revType + ", " + joinColumnName + "="
+ joinColumnId + ", " + inverseJoinColumnName + "="
+ inverseJoinColumnId + "]";
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof AuditJoinTableInfo) ) {
return false;
}
AuditJoinTableInfo that = (AuditJoinTableInfo) o;
if ( inverseJoinColumnId != null ?
!inverseJoinColumnId.equals( that.inverseJoinColumnId ) :
that.inverseJoinColumnId != null ) {
return false;
}
if ( joinColumnId != null ? !joinColumnId.equals( that.joinColumnId ) : that.joinColumnId != null ) {
return false;
}
if ( name != null ? !name.equals( that.name ) : that.name != null ) {
return false;
}
if ( revId != null ? !revId.equals( that.revId ) : that.revId != null ) {
return false;
}
if ( revType != that.revType ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (revId != null ? revId.hashCode() : 0);
result = 31 * result + (revType != null ? revType.hashCode() : 0);
result = 31 * result + (joinColumnId != null ? joinColumnId.hashCode() : 0);
result = 31 * result + (inverseJoinColumnId != null ? inverseJoinColumnId.hashCode() : 0);
return result;
}
}
}
| AuditJoinTableInfo |
java | elastic__elasticsearch | modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java | {
"start": 31031,
"end": 31127
} | interface ____ {
String getProperty(String key, String defaultValue);
}
}
| JvmEnvironment |
java | apache__avro | lang/java/integration-test/test-custom-conversions/src/main/java/org/apache/avro/codegentest/FixedSizeStringLogicalType.java | {
"start": 961,
"end": 2155
} | class ____ extends LogicalType {
private static final String MIN_LENGTH = "minLength";
private static final String MAX_LENGTH = "maxLength";
private final Integer minLength;
private final Integer maxLength;
public FixedSizeStringLogicalType() {
super(FixedSizeStringFactory.NAME);
this.minLength = Integer.MIN_VALUE;
this.maxLength = Integer.MAX_VALUE;
}
public FixedSizeStringLogicalType(Schema schema) {
super(FixedSizeStringFactory.NAME);
this.minLength = getInteger(schema, MIN_LENGTH);
this.maxLength = getInteger(schema, MAX_LENGTH);
}
public Integer getMinLength() {
return minLength;
}
public Integer getMaxLength() {
return maxLength;
}
private int getInteger(Schema schema, String name) {
Object value = schema.getObjectProp(name);
if (isNull(value)) {
throw new IllegalArgumentException(String.format("Invalid %s: missing %s", FixedSizeStringFactory.NAME, name));
}
if (value instanceof Integer) {
return (int) value;
}
throw new IllegalArgumentException(
String.format("Expected integer %s but get %s", name, value.getClass().getSimpleName()));
}
}
| FixedSizeStringLogicalType |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsExactlyInAnyOrder_Test.java | {
"start": 1894,
"end": 8615
} | class ____ extends IterablesBaseTest {
@Test
void should_pass_if_actual_contains_exactly_given_values() {
iterables.assertContainsExactlyInAnyOrder(someInfo(), actual, array("Luke", "Yoda", "Leia"));
}
@Test
void should_pass_if_actual_contains_given_values_exactly_with_null_elements() {
iterables.assertContainsExactlyInAnyOrder(someInfo(), actual, array("Leia", "Yoda", "Luke"));
actual.add(null);
iterables.assertContainsExactlyInAnyOrder(someInfo(), actual, array("Leia", null, "Yoda", "Luke"));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual.clear();
iterables.assertContainsExactlyInAnyOrder(someInfo(), actual, array());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> iterables.assertContainsExactlyInAnyOrder(someInfo(), actual,
emptyArray()));
}
@Test
void should_fail_if_expected_is_null() {
assertThatNullPointerException().isThrownBy(() -> iterables.assertContainsExactlyInAnyOrder(someInfo(), emptyList(),
null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> iterables.assertContainsExactlyInAnyOrder(someInfo(), null,
emptyArray()))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_exactly() {
AssertionInfo info = someInfo();
Object[] expected = { "Luke", "Yoda", "Han" };
Throwable error = catchThrowable(() -> iterables.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, newArrayList("Han"), newArrayList("Leia"),
StandardComparisonStrategy.instance()));
}
@Test
void should_pass_if_actual_contains_all_given_values_in_different_order() {
AssertionInfo info = someInfo();
Object[] expected = { "Luke", "Leia", "Yoda" };
iterables.assertContainsExactlyInAnyOrder(info, actual, expected);
}
@Test
void should_fail_if_actual_contains_duplicates_and_expected_does_not() {
AssertionInfo info = someInfo();
actual = newArrayList("Luke", "Leia", "Luke");
Object[] expected = { "Luke", "Leia" };
Throwable error = catchThrowable(() -> iterables.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList("Luke"),
StandardComparisonStrategy.instance()));
}
@Test
void should_fail_if_expected_contains_duplicates_and_actual_does_not() {
AssertionInfo info = someInfo();
actual = newArrayList("Luke", "Leia");
Object[] expected = { "Luke", "Leia", "Luke" };
Throwable error = catchThrowable(() -> iterables.assertContainsExactlyInAnyOrder(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainExactlyInAnyOrder(actual, expected, newArrayList("Luke"), emptyList(),
StandardComparisonStrategy.instance()));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_contains_given_values_exactly_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsExactlyInAnyOrder(someInfo(), actual,
array("LUKE", "YODA", "Leia"));
}
@Test
void should_fail_if_actual_does_not_contain_given_values_exactly_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] expected = { "Luke", "Yoda", "Han" };
Throwable error = catchThrowable(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsExactlyInAnyOrder(info,
actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainExactlyInAnyOrder(actual, expected, newArrayList("Han"),
newArrayList("Leia"), comparisonStrategy));
}
@Test
void should_pass_if_actual_contains_all_given_values_in_different_order_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] expected = { "Luke", "Leia", "Yoda" };
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsExactlyInAnyOrder(info, actual, expected);
}
@Test
void should_fail_if_actual_contains_all_given_values_but_size_differ_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
actual = newArrayList("Luke", "Leia", "Luke");
Object[] expected = { "LUKE", "Leia" };
Throwable error = catchThrowable(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsExactlyInAnyOrder(info,
actual,
expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainExactlyInAnyOrder(actual, expected, emptyList(), newArrayList("Luke"),
comparisonStrategy));
}
}
| Iterables_assertContainsExactlyInAnyOrder_Test |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerRefreshingLogin.java | {
"start": 3369,
"end": 3819
} | class ____ implements Login {
private static final Logger log = LoggerFactory.getLogger(OAuthBearerRefreshingLogin.class);
private ExpiringCredentialRefreshingLogin expiringCredentialRefreshingLogin = null;
@Override
public void configure(Map<String, ?> configs, String contextName, Configuration configuration,
AuthenticateCallbackHandler loginCallbackHandler) {
/*
* Specify this | OAuthBearerRefreshingLogin |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/TestFooRequestFilter.java | {
"start": 437,
"end": 853
} | class ____ implements ContainerRequestFilter {
@Context
HttpServerRequest request;
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
String previousFilterHeaderValue = requestContext.getHeaders().getFirst("filter-request");
requestContext.getHeaders().putSingle("filter-request", previousFilterHeaderValue + "-foo");
}
}
| TestFooRequestFilter |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java | {
"start": 674,
"end": 998
} | class ____ extends NumberFieldBlockLoaderTestCase<Double> {
public DoubleFieldBlockLoaderTests(Params params) {
super(FieldType.DOUBLE, params);
}
@Override
protected Double convert(Number value, Map<String, Object> fieldMapping) {
return value.doubleValue();
}
}
| DoubleFieldBlockLoaderTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java | {
"start": 25769,
"end": 36166
} | class ____ {
private final Map<String, FieldInfo> dvSuffixes = new HashMap<>();
private final Map<String, FieldInfo> postingsSuffixes = new HashMap<>();
private final Map<String, FieldInfo> vectorSuffixes = new HashMap<>();
FieldLookup(FieldInfos fieldInfos) {
for (FieldInfo field : fieldInfos) {
Map<String, String> attributes = field.attributes();
if (attributes != null) {
String postingsSuffix = attributes.get(PerFieldPostingsFormat.PER_FIELD_SUFFIX_KEY);
if (postingsSuffix != null) {
postingsSuffixes.put(postingsSuffix, field);
}
String dvSuffix = attributes.get(PerFieldDocValuesFormat.PER_FIELD_SUFFIX_KEY);
if (dvSuffix != null) {
dvSuffixes.put(dvSuffix, field);
}
String vectorSuffix = attributes.get(PerFieldKnnVectorsFormat.PER_FIELD_SUFFIX_KEY);
if (vectorSuffix != null) {
vectorSuffixes.put(vectorSuffix, field);
}
}
}
}
/**
* Returns the codec suffix from this file name, or null if there is no suffix.
*/
private static String parseSuffix(String filename) {
if (filename.startsWith("_") == false) {
return null;
}
String[] parts = IndexFileNames.stripExtension(filename).substring(1).split("_");
// 4 cases:
// segment.ext
// segment_gen.ext
// segment_codec_suffix.ext
// segment_gen_codec_suffix.ext
if (parts.length == 3) {
return parts[2];
} else if (parts.length == 4) {
return parts[3];
} else {
return null;
}
}
String getDocValuesField(String fileName) {
final String suffix = parseSuffix(fileName);
final FieldInfo field = dvSuffixes.get(suffix);
assertThat("dvSuffix[" + dvSuffixes + "] fileName[" + fileName + "]", field, notNullValue());
return field.name;
}
String getPostingsField(String fileName) {
final String suffix = parseSuffix(fileName);
final FieldInfo field = postingsSuffixes.get(suffix);
assertThat("postingsSuffixes[" + postingsSuffixes + "] fileName[" + fileName + "]", field, notNullValue());
return field.name;
}
String getVectorsField(String fileName) {
final String suffix = parseSuffix(fileName);
final FieldInfo field = vectorSuffixes.get(suffix);
assertThat("vectorSuffixes[" + vectorSuffixes + "] fileName[" + fileName + "]", field, notNullValue());
return field.name;
}
}
static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Directory dst) throws IOException {
try (DirectoryReader reader = DirectoryReader.open(source)) {
IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD)
.setUseCompoundFile(randomBoolean())
.setCodec(new Lucene103Codec(mode.mode()) {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
return new ES812PostingsFormat();
}
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return new Lucene90DocValuesFormat();
}
@Override
public KnnVectorsFormat getKnnVectorsFormatForField(String field) {
return new Lucene99HnswVectorsFormat();
}
@Override
public String toString() {
return super.toString();
}
})
.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
try (IndexWriter writer = new IndexWriter(dst, config)) {
for (LeafReaderContext leaf : reader.leaves()) {
final SegmentReader segmentReader = Lucene.segmentReader(leaf.reader());
writer.addIndexes(segmentReader);
}
writer.commit();
}
}
}
static IndexDiskUsageStats collectPerFieldStats(Directory directory) throws IOException {
try (DirectoryReader reader = DirectoryReader.open(directory)) {
final IndexDiskUsageStats stats = new IndexDiskUsageStats(IndexDiskUsageAnalyzer.getIndexSize(lastCommit(directory)));
for (LeafReaderContext leaf : reader.leaves()) {
collectPerFieldStats(Lucene.segmentReader(leaf.reader()), stats);
}
return stats;
}
}
static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats) throws IOException {
final SegmentInfo sis = reader.getSegmentInfo().info;
final String[] files;
final Directory directory;
if (sis.getUseCompoundFile()) {
directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis);
files = directory.listAll();
} else {
directory = reader.directory();
files = sis.files().toArray(new String[0]);
}
final FieldLookup fieldLookup = new FieldLookup(reader.getFieldInfos());
try {
for (String file : files) {
final LuceneFilesExtensions ext = LuceneFilesExtensions.fromFile(file);
if (ext == null) {
continue;
}
final long bytes = directory.fileLength(file);
switch (ext) {
case DVD, DVM -> stats.addDocValues(fieldLookup.getDocValuesField(file), bytes);
case TIM, TIP, TMD, DOC, POS, PAY -> stats.addInvertedIndex(fieldLookup.getPostingsField(file), bytes);
case KDI, KDD, KDM, DIM -> stats.addPoints("_all_points_fields", bytes);
case FDT, FDX, FDM ->
// We don't have per field Codec for stored, vector, and norms field
stats.addStoredField("_all_stored_fields", bytes);
case TVX, TVD -> stats.addTermVectors("_all_vectors_fields", bytes);
case NVD, NVM -> stats.addNorms("_all_norms_fields", bytes);
case VEM, VEMF, VEC, VEX, VEQ, VEMQ -> stats.addKnnVectors(fieldLookup.getVectorsField(file), bytes);
}
}
} finally {
if (directory != reader.directory()) {
IOUtils.close(directory);
}
}
}
private static void assertStats(IndexDiskUsageStats actualStats, IndexDiskUsageStats perFieldStats) {
final List<String> fields = actualStats.getFields().keySet().stream().sorted().toList();
for (String field : fields) {
IndexDiskUsageStats.PerFieldDiskUsage actualField = actualStats.getFields().get(field);
IndexDiskUsageStats.PerFieldDiskUsage expectedField = perFieldStats.getFields().get(field);
if (expectedField == null) {
assertThat(actualField.getDocValuesBytes(), equalTo(0L));
assertThat(actualField.getInvertedIndexBytes(), equalTo(0L));
continue;
}
// Allow difference up to 2.5KB as we can load up to 256 long values in the table for numeric docValues
assertFieldStats(field, "doc values", actualField.getDocValuesBytes(), expectedField.getDocValuesBytes(), 0.01, 2560);
assertFieldStats(
field,
"inverted index",
actualField.getInvertedIndexBytes(),
expectedField.getInvertedIndexBytes(),
0.01,
2048
);
// Allow difference of a file block size for knn vectors
// we get knn data usage from getOffHeapByteSize but when written on disk it can be rounded to the next block size
assertFieldStats(field, "knn vectors", actualField.getKnnVectorsBytes(), expectedField.getKnnVectorsBytes(), 0.01, 4096);
}
// We are not able to collect per field stats for stored, vector, points, and norms
IndexDiskUsageStats.PerFieldDiskUsage actualTotal = actualStats.total();
IndexDiskUsageStats.PerFieldDiskUsage expectedTotal = perFieldStats.total();
assertFieldStats("total", "stored fields", actualTotal.getStoredFieldBytes(), expectedTotal.getStoredFieldBytes(), 0.01, 2048);
assertFieldStats("total", "points", actualTotal.getPointsBytes(), expectedTotal.getPointsBytes(), 0.01, 2048);
assertFieldStats("total", "term vectors", actualTotal.getTermVectorsBytes(), expectedTotal.getTermVectorsBytes(), 0.01, 2048);
assertFieldStats("total", "norms", actualTotal.getNormsBytes(), expectedTotal.getNormsBytes(), 0.01, 2048);
}
private static void assertFieldStats(
String fieldName,
String fieldType,
long actualBytes,
long expectedBytes,
double allowErrorPercentage,
long allowErrorBytes
) {
long margin = allowErrorBytes;
if (allowErrorPercentage * actualBytes > allowErrorBytes) {
margin = (long) (allowErrorPercentage * actualBytes);
}
final boolean inRange = expectedBytes - margin <= actualBytes && actualBytes <= expectedBytes + margin;
if (inRange == false) {
throw new AssertionError("field=" + fieldName + " type=" + fieldType + " actual=" + actualBytes + " expected=" + expectedBytes);
}
}
private static IndexCommit lastCommit(Directory directory) throws IOException {
final List<IndexCommit> commits = DirectoryReader.listCommits(directory);
assertThat(commits, not(empty()));
return commits.get(commits.size() - 1);
}
private static ShardId testShardId() {
return new ShardId("test_index", "_na_", randomIntBetween(0, 3));
}
private static | FieldLookup |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/mappingcontrol/ComplexMapper.java | {
"start": 308,
"end": 595
} | interface ____ {
ComplexMapper INSTANCE = Mappers.getMapper( ComplexMapper.class );
@Mapping(target = "beerCount", source = "shelve")
Fridge map(FridgeDTO in);
default String toBeerCount(ShelveDTO in) {
return in.getCoolBeer().getBeerCount();
}
}
| ComplexMapper |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/Type.java | {
"start": 1952,
"end": 16734
} | class ____ {
/** The sort of the {@code void} type. See {@link #getSort}. */
public static final int VOID = 0;
/** The sort of the {@code boolean} type. See {@link #getSort}. */
public static final int BOOLEAN = 1;
/** The sort of the {@code char} type. See {@link #getSort}. */
public static final int CHAR = 2;
/** The sort of the {@code byte} type. See {@link #getSort}. */
public static final int BYTE = 3;
/** The sort of the {@code short} type. See {@link #getSort}. */
public static final int SHORT = 4;
/** The sort of the {@code int} type. See {@link #getSort}. */
public static final int INT = 5;
/** The sort of the {@code float} type. See {@link #getSort}. */
public static final int FLOAT = 6;
/** The sort of the {@code long} type. See {@link #getSort}. */
public static final int LONG = 7;
/** The sort of the {@code double} type. See {@link #getSort}. */
public static final int DOUBLE = 8;
/** The sort of array reference types. See {@link #getSort}. */
public static final int ARRAY = 9;
/** The sort of object reference types. See {@link #getSort}. */
public static final int OBJECT = 10;
/** The sort of method types. See {@link #getSort}. */
public static final int METHOD = 11;
/** The (private) sort of object reference types represented with an internal name. */
private static final int INTERNAL = 12;
/** The descriptors of the primitive types. */
private static final String PRIMITIVE_DESCRIPTORS = "VZCBSIFJD";
/** The {@code void} type. */
public static final Type VOID_TYPE = new Type(VOID, PRIMITIVE_DESCRIPTORS, VOID, VOID + 1);
/** The {@code boolean} type. */
public static final Type BOOLEAN_TYPE =
new Type(BOOLEAN, PRIMITIVE_DESCRIPTORS, BOOLEAN, BOOLEAN + 1);
/** The {@code char} type. */
public static final Type CHAR_TYPE = new Type(CHAR, PRIMITIVE_DESCRIPTORS, CHAR, CHAR + 1);
/** The {@code byte} type. */
public static final Type BYTE_TYPE = new Type(BYTE, PRIMITIVE_DESCRIPTORS, BYTE, BYTE + 1);
/** The {@code short} type. */
public static final Type SHORT_TYPE = new Type(SHORT, PRIMITIVE_DESCRIPTORS, SHORT, SHORT + 1);
/** The {@code int} type. */
public static final Type INT_TYPE = new Type(INT, PRIMITIVE_DESCRIPTORS, INT, INT + 1);
/** The {@code float} type. */
public static final Type FLOAT_TYPE = new Type(FLOAT, PRIMITIVE_DESCRIPTORS, FLOAT, FLOAT + 1);
/** The {@code long} type. */
public static final Type LONG_TYPE = new Type(LONG, PRIMITIVE_DESCRIPTORS, LONG, LONG + 1);
/** The {@code double} type. */
public static final Type DOUBLE_TYPE =
new Type(DOUBLE, PRIMITIVE_DESCRIPTORS, DOUBLE, DOUBLE + 1);
// -----------------------------------------------------------------------------------------------
// Fields
// -----------------------------------------------------------------------------------------------
/**
* The sort of this type. Either {@link #VOID}, {@link #BOOLEAN}, {@link #CHAR}, {@link #BYTE},
* {@link #SHORT}, {@link #INT}, {@link #FLOAT}, {@link #LONG}, {@link #DOUBLE}, {@link #ARRAY},
* {@link #OBJECT}, {@link #METHOD} or {@link #INTERNAL}.
*/
private final int sort;
/**
* A buffer containing the value of this field or method type. This value is an internal name for
* {@link #OBJECT} and {@link #INTERNAL} types, and a field or method descriptor in the other
* cases.
*
* <p>For {@link #OBJECT} types, this field also contains the descriptor: the characters in
* [{@link #valueBegin},{@link #valueEnd}) contain the internal name, and those in [{@link
* #valueBegin} - 1, {@link #valueEnd} + 1) contain the descriptor.
*/
private final String valueBuffer;
/**
* The beginning index, inclusive, of the value of this Java field or method type in {@link
* #valueBuffer}. This value is an internal name for {@link #OBJECT} and {@link #INTERNAL} types,
* and a field or method descriptor in the other cases.
*/
private final int valueBegin;
/**
* The end index, exclusive, of the value of this Java field or method type in {@link
* #valueBuffer}. This value is an internal name for {@link #OBJECT} and {@link #INTERNAL} types,
* and a field or method descriptor in the other cases.
*/
private final int valueEnd;
/**
* Constructs a reference type.
*
* @param sort the sort of this type, see {@link #sort}.
* @param valueBuffer a buffer containing the value of this field or method type.
* @param valueBegin the beginning index, inclusive, of the value of this field or method type in
* valueBuffer.
* @param valueEnd the end index, exclusive, of the value of this field or method type in
* valueBuffer.
*/
private Type(final int sort, final String valueBuffer, final int valueBegin, final int valueEnd) {
this.sort = sort;
this.valueBuffer = valueBuffer;
this.valueBegin = valueBegin;
this.valueEnd = valueEnd;
}
// -----------------------------------------------------------------------------------------------
// Methods to get Type(s) from a descriptor, a reflected Method or Constructor, other types, etc.
// -----------------------------------------------------------------------------------------------
/**
* Returns the {@link Type} corresponding to the given type descriptor.
*
* @param typeDescriptor a field or method type descriptor.
* @return the {@link Type} corresponding to the given type descriptor.
*/
public static Type getType(final String typeDescriptor) {
return getTypeInternal(typeDescriptor, 0, typeDescriptor.length());
}
/**
* Returns the {@link Type} corresponding to the given class.
*
* @param clazz a class.
* @return the {@link Type} corresponding to the given class.
*/
public static Type getType(final Class<?> clazz) {
if (clazz.isPrimitive()) {
if (clazz == Integer.TYPE) {
return INT_TYPE;
} else if (clazz == Void.TYPE) {
return VOID_TYPE;
} else if (clazz == Boolean.TYPE) {
return BOOLEAN_TYPE;
} else if (clazz == Byte.TYPE) {
return BYTE_TYPE;
} else if (clazz == Character.TYPE) {
return CHAR_TYPE;
} else if (clazz == Short.TYPE) {
return SHORT_TYPE;
} else if (clazz == Double.TYPE) {
return DOUBLE_TYPE;
} else if (clazz == Float.TYPE) {
return FLOAT_TYPE;
} else if (clazz == Long.TYPE) {
return LONG_TYPE;
} else {
throw new AssertionError();
}
} else {
return getType(getDescriptor(clazz));
}
}
/**
* Returns the method {@link Type} corresponding to the given constructor.
*
* @param constructor a {@link Constructor} object.
* @return the method {@link Type} corresponding to the given constructor.
*/
public static Type getType(final Constructor<?> constructor) {
return getType(getConstructorDescriptor(constructor));
}
/**
* Returns the method {@link Type} corresponding to the given method.
*
* @param method a {@link Method} object.
* @return the method {@link Type} corresponding to the given method.
*/
public static Type getType(final Method method) {
return getType(getMethodDescriptor(method));
}
/**
* Returns the type of the elements of this array type. This method should only be used for an
* array type.
*
* @return Returns the type of the elements of this array type.
*/
public Type getElementType() {
final int numDimensions = getDimensions();
return getTypeInternal(valueBuffer, valueBegin + numDimensions, valueEnd);
}
/**
* Returns the {@link Type} corresponding to the given internal name.
*
* @param internalName an internal name (see {@link Type#getInternalName()}).
* @return the {@link Type} corresponding to the given internal name.
*/
public static Type getObjectType(final String internalName) {
return new Type(
internalName.charAt(0) == '[' ? ARRAY : INTERNAL, internalName, 0, internalName.length());
}
/**
* Returns the {@link Type} corresponding to the given method descriptor. Equivalent to <code>
* Type.getType(methodDescriptor)</code>.
*
* @param methodDescriptor a method descriptor.
* @return the {@link Type} corresponding to the given method descriptor.
*/
public static Type getMethodType(final String methodDescriptor) {
return new Type(METHOD, methodDescriptor, 0, methodDescriptor.length());
}
/**
* Returns the method {@link Type} corresponding to the given argument and return types.
*
* @param returnType the return type of the method.
* @param argumentTypes the argument types of the method.
* @return the method {@link Type} corresponding to the given argument and return types.
*/
public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
return getType(getMethodDescriptor(returnType, argumentTypes));
}
/**
* Returns the argument types of methods of this type. This method should only be used for method
* types.
*
* @return the argument types of methods of this type.
*/
public Type[] getArgumentTypes() {
return getArgumentTypes(getDescriptor());
}
/**
* Returns the {@link Type} values corresponding to the argument types of the given method
* descriptor.
*
* @param methodDescriptor a method descriptor.
* @return the {@link Type} values corresponding to the argument types of the given method
* descriptor.
*/
public static Type[] getArgumentTypes(final String methodDescriptor) {
// First step: compute the number of argument types in methodDescriptor.
int numArgumentTypes = getArgumentCount(methodDescriptor);
// Second step: create a Type instance for each argument type.
Type[] argumentTypes = new Type[numArgumentTypes];
// Skip the first character, which is always a '('.
int currentOffset = 1;
// Parse and create the argument types, one at each loop iteration.
int currentArgumentTypeIndex = 0;
while (methodDescriptor.charAt(currentOffset) != ')') {
final int currentArgumentTypeOffset = currentOffset;
while (methodDescriptor.charAt(currentOffset) == '[') {
currentOffset++;
}
if (methodDescriptor.charAt(currentOffset++) == 'L') {
// Skip the argument descriptor content.
int semiColumnOffset = methodDescriptor.indexOf(';', currentOffset);
currentOffset = Math.max(currentOffset, semiColumnOffset + 1);
}
argumentTypes[currentArgumentTypeIndex++] =
getTypeInternal(methodDescriptor, currentArgumentTypeOffset, currentOffset);
}
return argumentTypes;
}
/**
* Returns the {@link Type} values corresponding to the argument types of the given method.
*
* @param method a method.
* @return the {@link Type} values corresponding to the argument types of the given method.
*/
public static Type[] getArgumentTypes(final Method method) {
Class<?>[] classes = method.getParameterTypes();
Type[] types = new Type[classes.length];
for (int i = classes.length - 1; i >= 0; --i) {
types[i] = getType(classes[i]);
}
return types;
}
/**
* Returns the return type of methods of this type. This method should only be used for method
* types.
*
* @return the return type of methods of this type.
*/
public Type getReturnType() {
return getReturnType(getDescriptor());
}
/**
* Returns the {@link Type} corresponding to the return type of the given method descriptor.
*
* @param methodDescriptor a method descriptor.
* @return the {@link Type} corresponding to the return type of the given method descriptor.
*/
public static Type getReturnType(final String methodDescriptor) {
return getTypeInternal(
methodDescriptor, getReturnTypeOffset(methodDescriptor), methodDescriptor.length());
}
/**
* Returns the {@link Type} corresponding to the return type of the given method.
*
* @param method a method.
* @return the {@link Type} corresponding to the return type of the given method.
*/
public static Type getReturnType(final Method method) {
return getType(method.getReturnType());
}
/**
* Returns the start index of the return type of the given method descriptor.
*
* @param methodDescriptor a method descriptor.
* @return the start index of the return type of the given method descriptor.
*/
static int getReturnTypeOffset(final String methodDescriptor) {
// Skip the first character, which is always a '('.
int currentOffset = 1;
// Skip the argument types, one at a each loop iteration.
while (methodDescriptor.charAt(currentOffset) != ')') {
while (methodDescriptor.charAt(currentOffset) == '[') {
currentOffset++;
}
if (methodDescriptor.charAt(currentOffset++) == 'L') {
// Skip the argument descriptor content.
int semiColumnOffset = methodDescriptor.indexOf(';', currentOffset);
currentOffset = Math.max(currentOffset, semiColumnOffset + 1);
}
}
return currentOffset + 1;
}
/**
* Returns the {@link Type} corresponding to the given field or method descriptor.
*
* @param descriptorBuffer a buffer containing the field or method descriptor.
* @param descriptorBegin the beginning index, inclusive, of the field or method descriptor in
* descriptorBuffer.
* @param descriptorEnd the end index, exclusive, of the field or method descriptor in
* descriptorBuffer.
* @return the {@link Type} corresponding to the given type descriptor.
*/
private static Type getTypeInternal(
final String descriptorBuffer, final int descriptorBegin, final int descriptorEnd) {
switch (descriptorBuffer.charAt(descriptorBegin)) {
case 'V':
return VOID_TYPE;
case 'Z':
return BOOLEAN_TYPE;
case 'C':
return CHAR_TYPE;
case 'B':
return BYTE_TYPE;
case 'S':
return SHORT_TYPE;
case 'I':
return INT_TYPE;
case 'F':
return FLOAT_TYPE;
case 'J':
return LONG_TYPE;
case 'D':
return DOUBLE_TYPE;
case '[':
return new Type(ARRAY, descriptorBuffer, descriptorBegin, descriptorEnd);
case 'L':
return new Type(OBJECT, descriptorBuffer, descriptorBegin + 1, descriptorEnd - 1);
case '(':
return new Type(METHOD, descriptorBuffer, descriptorBegin, descriptorEnd);
default:
throw new IllegalArgumentException("Invalid descriptor: " + descriptorBuffer);
}
}
// -----------------------------------------------------------------------------------------------
// Methods to get | Type |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/XmlSignerEndpointBuilderFactory.java | {
"start": 1585,
"end": 13509
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedXmlSignerEndpointBuilder advanced() {
return (AdvancedXmlSignerEndpointBuilder) this;
}
/**
* In order to protect the KeyInfo element from tampering you can add a
* reference to the signed info element so that it is protected via the
* signature value. The default value is true. Only relevant when a
* KeyInfo is returned by KeyAccessor. and KeyInfo#getId() is not null.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param addKeyInfoReference the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder addKeyInfoReference(Boolean addKeyInfoReference) {
doSetProperty("addKeyInfoReference", addKeyInfoReference);
return this;
}
/**
* In order to protect the KeyInfo element from tampering you can add a
* reference to the signed info element so that it is protected via the
* signature value. The default value is true. Only relevant when a
* KeyInfo is returned by KeyAccessor. and KeyInfo#getId() is not null.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param addKeyInfoReference the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder addKeyInfoReference(String addKeyInfoReference) {
doSetProperty("addKeyInfoReference", addKeyInfoReference);
return this;
}
/**
* You can set a base URI which is used in the URI dereferencing.
* Relative URIs are then concatenated with the base URI.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param baseUri the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder baseUri(String baseUri) {
doSetProperty("baseUri", baseUri);
return this;
}
/**
* Canonicalization method used to canonicalize the SignedInfo element
* before the digest is calculated. You can use the helper methods
* XmlSignatureHelper.getCanonicalizationMethod(String algorithm) or
* getCanonicalizationMethod(String algorithm, List
* inclusiveNamespacePrefixes) to create a canonicalization method.
*
* The option is a: <code>javax.xml.crypto.AlgorithmMethod</code> type.
*
* Default: http://www.w3.org/TR/2001/REC-xml-c14n-20010315
* Group: producer
*
* @param canonicalizationMethod the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder canonicalizationMethod(javax.xml.crypto.AlgorithmMethod canonicalizationMethod) {
doSetProperty("canonicalizationMethod", canonicalizationMethod);
return this;
}
/**
* Canonicalization method used to canonicalize the SignedInfo element
* before the digest is calculated. You can use the helper methods
* XmlSignatureHelper.getCanonicalizationMethod(String algorithm) or
* getCanonicalizationMethod(String algorithm, List
* inclusiveNamespacePrefixes) to create a canonicalization method.
*
* The option will be converted to a
* <code>javax.xml.crypto.AlgorithmMethod</code> type.
*
* Default: http://www.w3.org/TR/2001/REC-xml-c14n-20010315
* Group: producer
*
* @param canonicalizationMethod the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder canonicalizationMethod(String canonicalizationMethod) {
doSetProperty("canonicalizationMethod", canonicalizationMethod);
return this;
}
/**
* Determines if the XML signature specific headers be cleared after
* signing and verification. Defaults to true.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param clearHeaders the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder clearHeaders(Boolean clearHeaders) {
doSetProperty("clearHeaders", clearHeaders);
return this;
}
/**
* Determines if the XML signature specific headers be cleared after
* signing and verification. Defaults to true.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param clearHeaders the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder clearHeaders(String clearHeaders) {
doSetProperty("clearHeaders", clearHeaders);
return this;
}
/**
* Sets the content object Id attribute value. By default a UUID is
* generated. If you set the null value, then a new UUID will be
* generated. Only used in the enveloping case.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentObjectId the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder contentObjectId(String contentObjectId) {
doSetProperty("contentObjectId", contentObjectId);
return this;
}
/**
* Type of the content reference. The default value is null. This value
* can be overwritten by the header
* XmlSignatureConstants#HEADER_CONTENT_REFERENCE_TYPE.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentReferenceType the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder contentReferenceType(String contentReferenceType) {
doSetProperty("contentReferenceType", contentReferenceType);
return this;
}
/**
* Reference URI for the content to be signed. Only used in the
* enveloped case. If the reference URI contains an ID attribute value,
* then the resource schema URI ( setSchemaResourceUri(String)) must
* also be set because the schema validator will then find out which
* attributes are ID attributes. Will be ignored in the enveloping or
* detached case.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param contentReferenceUri the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder contentReferenceUri(String contentReferenceUri) {
doSetProperty("contentReferenceUri", contentReferenceUri);
return this;
}
/**
* Sets the crypto context properties. See {link
* XMLCryptoContext#setProperty(String, Object)}. Possible properties
* are defined in XMLSignContext an XMLValidateContext (see Supported
* Properties). The following properties are set by default to the value
* Boolean#TRUE for the XML validation. If you want to switch these
* features off you must set the property value to Boolean#FALSE.
* org.jcp.xml.dsig.validateManifests
* javax.xml.crypto.dsig.cacheReference.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
*
* Group: producer
*
* @param cryptoContextProperties the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder cryptoContextProperties(Map<java.lang.String, java.lang.Object> cryptoContextProperties) {
doSetProperty("cryptoContextProperties", cryptoContextProperties);
return this;
}
/**
* Sets the crypto context properties. See {link
* XMLCryptoContext#setProperty(String, Object)}. Possible properties
* are defined in XMLSignContext an XMLValidateContext (see Supported
* Properties). The following properties are set by default to the value
* Boolean#TRUE for the XML validation. If you want to switch these
* features off you must set the property value to Boolean#FALSE.
* org.jcp.xml.dsig.validateManifests
* javax.xml.crypto.dsig.cacheReference.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String, java.lang.Object></code>
* type.
*
* Group: producer
*
* @param cryptoContextProperties the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder cryptoContextProperties(String cryptoContextProperties) {
doSetProperty("cryptoContextProperties", cryptoContextProperties);
return this;
}
/**
* Digest algorithm URI. Optional parameter. This digest algorithm is
* used for calculating the digest of the input message. If this digest
* algorithm is not specified then the digest algorithm is calculated
* from the signature algorithm. Example:
* http://www.w3.org/2001/04/xmlenc#sha256.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param digestAlgorithm the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder digestAlgorithm(String digestAlgorithm) {
doSetProperty("digestAlgorithm", digestAlgorithm);
return this;
}
/**
* Disallows that the incoming XML document contains DTD DOCTYPE
* declaration. The default value is Boolean#TRUE.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: producer
*
* @param disallowDoctypeDecl the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder disallowDoctypeDecl(Boolean disallowDoctypeDecl) {
doSetProperty("disallowDoctypeDecl", disallowDoctypeDecl);
return this;
}
/**
* Disallows that the incoming XML document contains DTD DOCTYPE
* declaration. The default value is Boolean#TRUE.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: true
* Group: producer
*
* @param disallowDoctypeDecl the value to set
* @return the dsl builder
*/
default XmlSignerEndpointBuilder disallowDoctypeDecl(String disallowDoctypeDecl) {
doSetProperty("disallowDoctypeDecl", disallowDoctypeDecl);
return this;
}
/**
* For the signing process, a private key is necessary. You specify a
* key accessor bean which provides this private key. The key accessor
* bean must implement the KeyAccessor interface. The package
* org.apache.camel.component.xmlsecurity.api contains the default
* implementation | XmlSignerEndpointBuilder |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/profile/IfBuildProfileStereotypeTest.java | {
"start": 3576,
"end": 3684
} | class ____ implements MyService {
}
@ApplicationScoped
static | InheritableTransitiveDevOnlyMyService |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java | {
"start": 15537,
"end": 15642
} | interface ____ {
List<String> list(@CustomParameterAnnotation("four") String user);
}
| OtherUserService |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PageSizeUtil.java | {
"start": 3102,
"end": 3350
} | class ____ not meant to be instantiated. */
private PageSizeUtil() {}
// ------------------------------------------------------------------------
/**
* All unsafe related code must be in a separate class, so that loading the outer | is |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/shareddata/AsyncMapTest.java | {
"start": 25449,
"end": 26093
} | class ____ implements Serializable {
private String str;
public SomeSerializableObject(String str) {
this.str = str;
}
public SomeSerializableObject() {
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof SomeSerializableObject)) return false;
SomeSerializableObject that = (SomeSerializableObject) o;
if (str != null ? !str.equals(that.str) : that.str != null) return false;
return true;
}
@Override
public int hashCode() {
return str != null ? str.hashCode() : 0;
}
}
public static final | SomeSerializableObject |
java | elastic__elasticsearch | x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java | {
"start": 20810,
"end": 22612
} | class ____ extends Plugin implements ActionPlugin {
protected static final Logger logger = LogManager.getLogger(SearchBlockPlugin.class);
private final String nodeId;
private final AtomicBoolean shouldBlockOnSearch = new AtomicBoolean(false);
private final AtomicBoolean shardSearchBlocked = new AtomicBoolean(false);
public SearchBlockPlugin(Settings settings, Path configPath) throws Exception {
nodeId = settings.get("node.name");
}
@Override
public void onIndexModule(IndexModule indexModule) {
super.onIndexModule(indexModule);
indexModule.addSearchOperationListener(new SearchOperationListener() {
@Override
public void onPreQueryPhase(SearchContext c) {
logger.info("onPreQueryPhase");
}
@Override
public void onNewReaderContext(ReaderContext c) {
try {
logger.info("blocking search on " + nodeId);
shardSearchBlocked.set(true);
assertBusy(() -> assertFalse(shouldBlockOnSearch.get()), 20, TimeUnit.SECONDS);
logger.info("unblocking search on " + nodeId);
shardSearchBlocked.set(false);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
void enableSearchBlock() {
shouldBlockOnSearch.set(true);
}
void disableSearchBlock() {
shouldBlockOnSearch.set(false);
}
boolean isShardSearchBlocked() {
return shardSearchBlocked.get();
}
}
}
| SearchBlockPlugin |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 77017,
"end": 77330
} | class ____ {",
" public GenericWithImmutableParam<MutableClass> method() { return null; }",
"}")
.doTest();
}
@Test
public void genericStaticMethodParam_noViolation() {
withImmutableTypeParameterGeneric()
.addSourceLines(
"Test.class",
" | Test |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/DeduplicateConstants.java | {
"start": 4085,
"end": 6589
} | class ____)
return super.visitBlock(tree, scope.enter());
}
@Override
public Void visitVariable(VariableTree tree, Scope scope) {
// record that this variables hides previous declarations before entering its initializer
scope.remove(ASTHelpers.getSymbol(tree));
scan(tree.getInitializer(), scope);
saveConstValue(tree, scope);
return null;
}
@Override
public Void visitLiteral(LiteralTree tree, Scope scope) {
replaceLiteral(tree, scope, state);
return super.visitLiteral(tree, scope);
}
private void replaceLiteral(LiteralTree tree, Scope scope, VisitorState state) {
Object value = ASTHelpers.constValue(tree);
if (value == null) {
return;
}
VarSymbol sym = scope.get(state.getSourceForNode(tree));
if (sym == null) {
return;
}
SuggestedFix fix = SuggestedFix.replace(tree, sym.getSimpleName().toString());
fixes.put(sym, tree, fix);
}
private void saveConstValue(VariableTree tree, Scope scope) {
VarSymbol sym = ASTHelpers.getSymbol(tree);
if (!isConsideredFinal(sym)) {
return;
}
// heuristic: long string constants are generally more interesting than short ones, or
// than non-string constants (e.g. `""`, `0`, or `false`).
String constValue = ASTHelpers.constValue(tree.getInitializer(), String.class);
if (constValue == null || constValue.length() <= 1) {
return;
}
scope.put(state.getSourceForNode(tree.getInitializer()), sym);
}
}.scan(tree, new Scope(null));
for (Map.Entry<VarSymbol, Map<Tree, SuggestedFix>> entries : fixes.rowMap().entrySet()) {
Map<Tree, SuggestedFix> occurrences = entries.getValue();
if (occurrences.size() < 2) {
// heuristic: only de-duplicate when there are two or more occurrences
continue;
}
// report the finding on each occurrence, but provide a fix for all related occurrences,
// so it works better on changed-lines only
SuggestedFix fix = mergeFix(occurrences.values());
occurrences.keySet().forEach(t -> state.reportMatch(describeMatch(t, fix)));
}
return Description.NO_MATCH;
}
private static SuggestedFix mergeFix(Collection<SuggestedFix> fixes) {
SuggestedFix.Builder fix = SuggestedFix.builder();
fixes.forEach(fix::merge);
return fix.build();
}
}
| bodies |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageSerialization.java | {
"start": 2928,
"end": 2975
} | class ____ {
// Static-only | FSImageSerialization |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java | {
"start": 2045,
"end": 19021
} | class ____ implements ToXContentObject, Writeable {
/**
* Serialisation names
*/
public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField MODEL_PRUNE_WINDOW = new ParseField("model_prune_window");
public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name");
public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters");
public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER;
public static final ParseField PER_PARTITION_CATEGORIZATION = new ParseField("per_partition_categorization");
public static final ParseField LATENCY = new ParseField("latency");
public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name");
public static final ParseField DETECTORS = new ParseField("detectors");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
public static final String ML_CATEGORY_FIELD = "mlcategory";
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Collections.singletonList(ML_CATEGORY_FIELD));
// Since the C++ backend truncates the categorization field at length 1000 (see model::CCategoryExamplesCollector::MAX_EXAMPLE_LENGTH),
// adding an ellipsis on truncation, it makes no sense to send potentially very long strings to it. For the backend logic still to work
// we need to send more than that, hence we truncate at length 1001.
//
// Also, because we do the tokenization on the Java side now the tokens will still be sent correctly (separately) to the C++ backend
// even if they extend beyond the length of a truncated example.
public static final int MAX_CATEGORIZATION_FIELD_LENGTH = 1001;
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> LENIENT_PARSER = createParser(true);
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> STRICT_PARSER = createParser(false);
// The minimum number of buckets considered acceptable for the model_prune_window field
public static final long MINIMUM_MODEL_PRUNE_WINDOW_BUCKETS = 2;
public static final TimeValue DEFAULT_MODEL_PRUNE_WINDOW = TimeValue.timeValueDays(30);
@SuppressWarnings("unchecked")
private static ConstructingObjectParser<AnalysisConfig.Builder, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<AnalysisConfig.Builder, Void> parser = new ConstructingObjectParser<>(
ANALYSIS_CONFIG.getPreferredName(),
ignoreUnknownFields,
a -> new AnalysisConfig.Builder((List<Detector>) a[0])
);
parser.declareObjectArray(
ConstructingObjectParser.constructorArg(),
(p, c) -> (ignoreUnknownFields ? Detector.LENIENT_PARSER : Detector.STRICT_PARSER).apply(p, c).build(),
DETECTORS
);
parser.declareString(
(builder, val) -> builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())),
BUCKET_SPAN
);
parser.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME);
parser.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS);
// This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not
// possible to simply declare whether the field is a string or object and a completely custom parser is required
parser.declareField(
Builder::setCategorizationAnalyzerConfig,
(p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p, ignoreUnknownFields),
CATEGORIZATION_ANALYZER,
ObjectParser.ValueType.OBJECT_OR_STRING
);
parser.declareObject(
Builder::setPerPartitionCategorizationConfig,
ignoreUnknownFields ? PerPartitionCategorizationConfig.LENIENT_PARSER : PerPartitionCategorizationConfig.STRICT_PARSER,
PER_PARTITION_CATEGORIZATION
);
parser.declareString((builder, val) -> builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY);
parser.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
parser.declareStringArray(Builder::setInfluencers, INFLUENCERS);
parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
parser.declareString(
(builder, val) -> builder.setModelPruneWindow(TimeValue.parseTimeValue(val, MODEL_PRUNE_WINDOW.getPreferredName())),
MODEL_PRUNE_WINDOW
);
return parser;
}
/**
* These values apply to all detectors
*/
private final TimeValue bucketSpan;
private final String categorizationFieldName;
private final List<String> categorizationFilters;
private final CategorizationAnalyzerConfig categorizationAnalyzerConfig;
private final PerPartitionCategorizationConfig perPartitionCategorizationConfig;
private final TimeValue latency;
private final String summaryCountFieldName;
private final List<Detector> detectors;
private final List<String> influencers;
private final Boolean multivariateByFields;
private final TimeValue modelPruneWindow;
private AnalysisConfig(
TimeValue bucketSpan,
String categorizationFieldName,
List<String> categorizationFilters,
CategorizationAnalyzerConfig categorizationAnalyzerConfig,
PerPartitionCategorizationConfig perPartitionCategorizationConfig,
TimeValue latency,
String summaryCountFieldName,
List<Detector> detectors,
List<String> influencers,
Boolean multivariateByFields,
TimeValue modelPruneWindow
) {
this.detectors = detectors;
this.bucketSpan = bucketSpan;
this.latency = latency;
this.categorizationFieldName = categorizationFieldName;
this.categorizationAnalyzerConfig = categorizationAnalyzerConfig;
this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters);
this.perPartitionCategorizationConfig = perPartitionCategorizationConfig;
this.summaryCountFieldName = summaryCountFieldName;
this.influencers = Collections.unmodifiableList(influencers);
this.multivariateByFields = multivariateByFields;
this.modelPruneWindow = modelPruneWindow;
}
public AnalysisConfig(StreamInput in) throws IOException {
bucketSpan = in.readTimeValue();
categorizationFieldName = in.readOptionalString();
categorizationFilters = in.readBoolean() ? in.readCollectionAsImmutableList(StreamInput::readString) : null;
categorizationAnalyzerConfig = in.readOptionalWriteable(CategorizationAnalyzerConfig::new);
perPartitionCategorizationConfig = new PerPartitionCategorizationConfig(in);
latency = in.readOptionalTimeValue();
summaryCountFieldName = in.readOptionalString();
detectors = in.readCollectionAsImmutableList(Detector::new);
influencers = in.readCollectionAsImmutableList(StreamInput::readString);
multivariateByFields = in.readOptionalBoolean();
modelPruneWindow = in.readOptionalTimeValue();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeTimeValue(bucketSpan);
out.writeOptionalString(categorizationFieldName);
if (categorizationFilters != null) {
out.writeBoolean(true);
out.writeStringCollection(categorizationFilters);
} else {
out.writeBoolean(false);
}
out.writeOptionalWriteable(categorizationAnalyzerConfig);
perPartitionCategorizationConfig.writeTo(out);
out.writeOptionalTimeValue(latency);
out.writeOptionalString(summaryCountFieldName);
out.writeCollection(detectors);
out.writeStringCollection(influencers);
out.writeOptionalBoolean(multivariateByFields);
out.writeOptionalTimeValue(modelPruneWindow);
}
/**
* The analysis bucket span
*
* @return The bucketspan or <code>null</code> if not set
*/
public TimeValue getBucketSpan() {
return bucketSpan;
}
public String getCategorizationFieldName() {
return categorizationFieldName;
}
public List<String> getCategorizationFilters() {
return categorizationFilters;
}
public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() {
return categorizationAnalyzerConfig;
}
public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() {
return perPartitionCategorizationConfig;
}
/**
* The latency interval during which out-of-order records should be handled.
*
* @return The latency interval or <code>null</code> if not set
*/
public TimeValue getLatency() {
return latency;
}
/**
* The name of the field that contains counts for pre-summarised input
*
* @return The field name or <code>null</code> if not set
*/
public String getSummaryCountFieldName() {
return summaryCountFieldName;
}
/**
* The list of analysis detectors. In a valid configuration the list should
* contain at least 1 {@link Detector}
*
* @return The Detectors used in this job
*/
public List<Detector> getDetectors() {
return detectors;
}
/**
* The list of influence field names
*/
public List<String> getInfluencers() {
return influencers;
}
/**
* Return the list of term fields.
* These are the influencer fields, partition field,
* by field and over field of each detector.
* <code>null</code> and empty strings are filtered from the
* config.
*
* @return Set of term fields - never <code>null</code>
*/
public Set<String> termFields() {
return termFields(getDetectors(), getInfluencers());
}
static SortedSet<String> termFields(List<Detector> detectors, List<String> influencers) {
SortedSet<String> termFields = new TreeSet<>();
detectors.forEach(d -> termFields.addAll(d.getByOverPartitionTerms()));
for (String i : influencers) {
addIfNotNull(termFields, i);
}
// remove empty strings
termFields.remove("");
return termFields;
}
public Set<String> extractReferencedFilters() {
return detectors.stream().map(Detector::extractReferencedFilters).flatMap(Set::stream).collect(Collectors.toSet());
}
public Boolean getMultivariateByFields() {
return multivariateByFields;
}
public TimeValue getModelPruneWindow() {
return modelPruneWindow;
}
/**
* Return the set of fields required by the analysis.
* These are the influencer fields, metric field, partition field,
* by field and over field of each detector, plus the summary count
* field and the categorization field name of the job.
* <code>null</code> and empty strings are filtered from the
* config.
*
* @return Set of required analysis fields - never <code>null</code>
*/
public Set<String> analysisFields() {
Set<String> analysisFields = termFields();
addIfNotNull(analysisFields, categorizationFieldName);
addIfNotNull(analysisFields, summaryCountFieldName);
for (Detector d : getDetectors()) {
addIfNotNull(analysisFields, d.getFieldName());
}
// remove empty strings
analysisFields.remove("");
return analysisFields;
}
private static void addIfNotNull(Set<String> fields, String field) {
if (field != null) {
fields.add(field);
}
}
public List<String> fields() {
return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName);
}
private List<String> collectNonNullAndNonEmptyDetectorFields(Function<Detector, String> fieldGetter) {
Set<String> fields = new HashSet<>();
for (Detector d : getDetectors()) {
addIfNotNull(fields, fieldGetter.apply(d));
}
// remove empty strings
fields.remove("");
return new ArrayList<>(fields);
}
public List<String> byFields() {
return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName);
}
public List<String> overFields() {
return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName);
}
public List<String> partitionFields() {
return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep());
if (categorizationFieldName != null) {
builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName);
}
if (categorizationFilters != null) {
builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters);
}
if (categorizationAnalyzerConfig != null) {
// This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params);
// because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it
// gets written as a single string.
categorizationAnalyzerConfig.toXContent(builder, params);
}
// perPartitionCategorizationConfig is never null on the server side (it can be in the equivalent client class),
// but is not useful to know when categorization is not being used
if (categorizationFieldName != null) {
builder.field(PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig);
}
if (latency != null) {
builder.field(LATENCY.getPreferredName(), latency.getStringRep());
}
if (summaryCountFieldName != null) {
builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName);
}
builder.startArray(DETECTORS.getPreferredName());
for (Detector detector : detectors) {
detector.toXContent(builder, params);
}
builder.endArray();
builder.field(INFLUENCERS.getPreferredName(), influencers);
if (multivariateByFields != null) {
builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields);
}
if (modelPruneWindow != null) {
builder.field(MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow.getStringRep());
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalysisConfig that = (AnalysisConfig) o;
return Objects.equals(latency, that.latency)
&& Objects.equals(bucketSpan, that.bucketSpan)
&& Objects.equals(categorizationFieldName, that.categorizationFieldName)
&& Objects.equals(categorizationFilters, that.categorizationFilters)
&& Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig)
&& Objects.equals(perPartitionCategorizationConfig, that.perPartitionCategorizationConfig)
&& Objects.equals(summaryCountFieldName, that.summaryCountFieldName)
&& Objects.equals(detectors, that.detectors)
&& Objects.equals(influencers, that.influencers)
&& Objects.equals(multivariateByFields, that.multivariateByFields)
&& Objects.equals(modelPruneWindow, that.modelPruneWindow);
}
@Override
public int hashCode() {
return Objects.hash(
bucketSpan,
categorizationFieldName,
categorizationFilters,
categorizationAnalyzerConfig,
perPartitionCategorizationConfig,
latency,
summaryCountFieldName,
detectors,
influencers,
multivariateByFields,
modelPruneWindow
);
}
public static final | AnalysisConfig |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/analysis/wrappers/StableApiWrappersTests.java | {
"start": 1672,
"end": 9754
} | class ____ extends ESTestCase {
public void testUnknownClass() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(AnalyzerFactory.class.getCanonicalName())))
.thenReturn(List.of(new PluginInfo("namedComponentName1", "someRandomName", getClass().getClassLoader())));
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>>> analysisProviderMap =
StableApiWrappers.oldApiForAnalyzerFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>> oldTokenFilter = analysisProviderMap.get(
"namedComponentName1"
);
IllegalStateException illegalStateException = expectThrows(
IllegalStateException.class,
() -> oldTokenFilter.get(null, mock(Environment.class), null, null)
);
assertThat(illegalStateException.getCause(), instanceOf(ClassNotFoundException.class));
}
public void testStablePluginHasNoArgConstructor() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(AnalyzerFactory.class.getCanonicalName())))
.thenReturn(
List.of(new PluginInfo("namedComponentName1", DefaultConstrAnalyzerFactory.class.getName(), getClass().getClassLoader()))
);
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>>> analysisProviderMap =
StableApiWrappers.oldApiForAnalyzerFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>> oldTokenFilter = analysisProviderMap.get(
"namedComponentName1"
);
IllegalStateException illegalStateException = expectThrows(
IllegalStateException.class,
() -> oldTokenFilter.get(null, mock(Environment.class), null, null)
);
assertThat(
illegalStateException.getMessage(),
equalTo("Missing @org.elasticsearch.plugin.Inject annotation for constructor with settings.")
);
}
public void testAnalyzerFactoryDelegation() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(AnalyzerFactory.class.getCanonicalName())))
.thenReturn(List.of(new PluginInfo("namedComponentName1", TestAnalyzerFactory.class.getName(), getClass().getClassLoader())));
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>>> analysisProviderMap =
StableApiWrappers.oldApiForAnalyzerFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.AnalyzerProvider<?>> oldTokenFilter = analysisProviderMap.get(
"namedComponentName1"
);
org.elasticsearch.index.analysis.AnalyzerProvider<?> analyzerProvider = oldTokenFilter.get(
null,
mock(Environment.class),
null,
null
);
// test delegation
Analyzer analyzer = analyzerProvider.get();
assertTrue(Mockito.mockingDetails(analyzer).isMock());
assertThat(analyzerProvider.name(), equalTo("TestAnalyzerFactory"));
assertThat(analyzerProvider.scope(), equalTo(AnalyzerScope.GLOBAL));
}
public void testTokenizerFactoryDelegation() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(TokenizerFactory.class.getCanonicalName())))
.thenReturn(List.of(new PluginInfo("namedComponentName1", TestTokenizerFactory.class.getName(), getClass().getClassLoader())));
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.TokenizerFactory>> analysisProviderMap =
StableApiWrappers.oldApiForTokenizerFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.TokenizerFactory> oldTokenFilter = analysisProviderMap.get(
"namedComponentName1"
);
org.elasticsearch.index.analysis.TokenizerFactory tokenizerFactory = oldTokenFilter.get(null, mock(Environment.class), null, null);
// test delegation
Tokenizer tokenizer = tokenizerFactory.create();
assertTrue(Mockito.mockingDetails(tokenizer).isMock());
assertThat(tokenizerFactory.name(), equalTo("TestTokenizerFactory"));
}
public void testTokenFilterFactoryDelegation() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(TokenFilterFactory.class.getCanonicalName())))
.thenReturn(
List.of(new PluginInfo("namedComponentName1", TestTokenFilterFactory.class.getName(), getClass().getClassLoader()))
);
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.TokenFilterFactory>> analysisProviderMap =
StableApiWrappers.oldApiForTokenFilterFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.TokenFilterFactory> oldTokenFilter = analysisProviderMap.get(
"namedComponentName1"
);
org.elasticsearch.index.analysis.TokenFilterFactory tokenFilterFactory = oldTokenFilter.get(
null,
mock(Environment.class),
null,
null
);
// test delegation
TokenStream createTokenStreamMock = mock(TokenStream.class);
TokenStream tokenStream = tokenFilterFactory.create(createTokenStreamMock);
assertSame(tokenStream, createTokenStreamMock);
verify(createTokenStreamMock).incrementToken();
TokenStream normalizeTokenStreamMock = mock(TokenStream.class);
tokenStream = tokenFilterFactory.normalize(normalizeTokenStreamMock);
assertSame(tokenStream, normalizeTokenStreamMock);
verify(normalizeTokenStreamMock).incrementToken();
assertThat(tokenFilterFactory.getAnalysisMode(), equalTo(org.elasticsearch.index.analysis.AnalysisMode.INDEX_TIME));
assertThat(tokenFilterFactory.name(), equalTo("TestTokenFilterFactory"));
}
public void testCharFilterFactoryDelegation() throws IOException {
StablePluginsRegistry registry = Mockito.mock(StablePluginsRegistry.class);
Mockito.when(registry.getPluginInfosForExtensible(eq(CharFilterFactory.class.getCanonicalName())))
.thenReturn(List.of(new PluginInfo("namedComponentName1", TestCharFilterFactory.class.getName(), getClass().getClassLoader())));
Map<String, AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.CharFilterFactory>> analysisProviderMap =
StableApiWrappers.oldApiForStableCharFilterFactory(registry);
AnalysisModule.AnalysisProvider<org.elasticsearch.index.analysis.CharFilterFactory> oldCharFilter = analysisProviderMap.get(
"namedComponentName1"
);
org.elasticsearch.index.analysis.CharFilterFactory charFilterFactory = oldCharFilter.get(null, mock(Environment.class), null, null);
// test delegation
Reader createReaderMock = mock(Reader.class);
Reader reader = charFilterFactory.create(createReaderMock);
assertSame(reader, createReaderMock);
verify(createReaderMock).read();
Reader normalizeReaderMock = mock(Reader.class);
reader = charFilterFactory.normalize(normalizeReaderMock);
assertSame(reader, normalizeReaderMock);
verify(normalizeReaderMock).read();
assertThat(charFilterFactory.name(), equalTo("TestCharFilterFactory"));
}
@NamedComponent("DefaultConstrAnalyzerFactory")
public static | StableApiWrappersTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/interop/ImmutablesTypeSerializationTest.java | {
"start": 10444,
"end": 11650
} | class ____<K, V>
implements ImmutablesTypeSerializationTest.Entry<K, V> {
K key;
V value;
@JsonProperty("key")
public void setKey(K key) {
this.key = key;
}
@JsonProperty("value")
public void setValue(V value) {
this.value = value;
}
@Override
public K getKey() { throw new UnsupportedOperationException(); }
@Override
public V getValue() { throw new UnsupportedOperationException(); }
}
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
static <K, V> ImmutableEntry<K, V> fromJson(ImmutableEntry.Json<K, V> json) {
ImmutableEntry.Builder<K, V> builder = ImmutableEntry.<K, V>builder();
if (json.key != null) {
builder.key(json.key);
}
if (json.value != null) {
builder.value(json.value);
}
return builder.build();
}
public static <K, V> ImmutableEntry.Builder<K, V> builder() {
return new ImmutableEntry.Builder<>();
}
public static final | Json |
java | google__auto | value/src/test/java/com/google/auto/value/processor/PropertyAnnotationsTest.java | {
"start": 1469,
"end": 1710
} | class ____ {
private static final String TEST_ANNOTATION = "@PropertyAnnotationsTest.TestAnnotation";
private static final String TEST_ARRAY_ANNOTATION =
"@PropertyAnnotationsTest.TestArrayAnnotation";
public | PropertyAnnotationsTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/OrderSupplemental2.java | {
"start": 512,
"end": 1224
} | class ____ {
private Integer oid;
private Integer receivablesId;
private Order order;
public OrderSupplemental2() {
}
public OrderSupplemental2(Integer oid, Integer receivablesId) {
this.oid = oid;
this.receivablesId = receivablesId;
}
@Id
@Column(name = "oid")
public Integer getOid() {
return oid;
}
public void setOid(Integer oid) {
this.oid = oid;
}
public Integer getReceivablesId() {
return receivablesId;
}
public void setReceivablesId(Integer receivablesId) {
this.receivablesId = receivablesId;
}
@OneToOne(fetch = FetchType.LAZY)
@MapsId
public Order getOrder() {
return order;
}
public void setOrder(Order order) {
this.order = order;
}
}
| OrderSupplemental2 |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/WatermarkAssignerTestPrograms.java | {
"start": 1228,
"end": 9472
} | class ____ {
static final Row[] BEFORE_DATA = {
Row.of(
2,
2L,
"Hello",
"2020-04-15 08:00:00",
DateTimeUtils.toLocalDateTime(1586937600000L)),
Row.of(1, 1L, "Hi", "2020-04-15 08:00:01", DateTimeUtils.toLocalDateTime(1586937601000L)),
Row.of(
3,
2L,
"Hello world",
"2020-04-15 08:00:02",
DateTimeUtils.toLocalDateTime(1586937602000L)),
Row.of(
4,
3L,
"Hello world, how are you?",
"2020-04-15 08:00:03",
DateTimeUtils.toLocalDateTime(1586937603000L)),
Row.of(
5,
3L,
"I am fine.",
"2020-04-15 08:00:04",
DateTimeUtils.toLocalDateTime(1586937604000L)),
};
static final Row[] AFTER_DATA = {
Row.of(7, 4L, "Ack", "2020-04-15 08:00:21", DateTimeUtils.toLocalDateTime(1586937621000L)),
Row.of(6, 5L, "Syn", "2020-04-15 08:00:23", DateTimeUtils.toLocalDateTime(1586937623000L)),
Row.of(
8,
3L,
"Syn-Ack",
"2020-04-15 08:00:25",
DateTimeUtils.toLocalDateTime(1586937625000L)),
Row.of(
10,
3L,
"Close",
"2020-04-15 08:00:28",
DateTimeUtils.toLocalDateTime(1586937628000L))
};
static final String[] SINK_SCHEMA = {"a INT", "b BIGINT", "ts TIMESTAMP(3)"};
static final TableTestProgram WATERMARK_ASSIGNER_BASIC_FILTER =
TableTestProgram.of(
"watermark-assigner-basic-filter",
"validates watermark assigner with basic filtering")
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addSchema(
"a INT",
"b BIGINT",
"c VARCHAR",
"ts_string STRING",
"ts TIMESTAMP(3)",
"WATERMARK for ts AS ts - INTERVAL '1' SECOND")
.producedBeforeRestore(BEFORE_DATA)
.producedAfterRestore(AFTER_DATA)
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_SCHEMA)
.consumedBeforeRestore(
"+I[4, 3, 2020-04-15T08:00:03]",
"+I[5, 3, 2020-04-15T08:00:04]")
.consumedAfterRestore(
"+I[8, 3, 2020-04-15T08:00:25]",
"+I[10, 3, 2020-04-15T08:00:28]")
.build())
.runSql("INSERT INTO sink_t SELECT a, b, ts FROM source_t WHERE b = 3")
.build();
static final TableTestProgram WATERMARK_ASSIGNER_PUSHDOWN_METADATA =
TableTestProgram.of(
"watermark-assigner-pushdown-metadata",
"validates watermark assigner with pushdown metadata")
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addOption("enable-watermark-push-down", "true")
.addOption("readable-metadata", "ts:timestamp(3)")
.addOption("disable-lookup", "true")
.addSchema(
"a INT",
"b BIGINT",
"c VARCHAR",
"ts_string STRING",
"ts TIMESTAMP(3) METADATA",
"WATERMARK for ts AS ts - INTERVAL '1' SECOND")
.producedBeforeRestore(BEFORE_DATA)
.producedAfterRestore(AFTER_DATA)
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(SINK_SCHEMA)
.consumedBeforeRestore(
"+I[4, 3, 2020-04-15T08:00:03]",
"+I[5, 3, 2020-04-15T08:00:04]")
.consumedAfterRestore(
"+I[8, 3, 2020-04-15T08:00:25]",
"+I[10, 3, 2020-04-15T08:00:28]")
.build())
.runSql("INSERT INTO sink_t SELECT a, b, ts FROM source_t WHERE b = 3")
.build();
static final TableTestProgram WATERMARK_ASSIGNER_PUSHDOWN_COMPUTED =
TableTestProgram.of(
"watermark-assigner-pushdown-computed",
"validates watermark assigner with computed column pushdown")
.setupTableSource(
SourceTestStep.newBuilder("source_t")
.addOption("enable-watermark-push-down", "true")
.addOption("disable-lookup", "true")
.addOption("scan.watermark.emit.strategy", "on-event")
.addSchema(
"i INT",
"b BIGINT",
"s STRING",
"ts AS TO_TIMESTAMP_LTZ(b, 3)",
"WATERMARK for ts AS ts - INTERVAL '1' SECOND")
.producedBeforeRestore(
Row.of(1, 1L, "a"),
Row.of(2, 2L, "b"),
Row.of(3, 3L, "c"))
.producedAfterRestore(
Row.of(4, 4L, "d"),
Row.of(5, 5L, "e"),
Row.of(6, 6L, "f"))
.build())
.setupTableSink(
SinkTestStep.newBuilder("sink_t")
.addSchema(
"i INT",
"s STRING",
"ts TIMESTAMP_LTZ(3)",
"w TIMESTAMP_LTZ(3)")
.consumedBeforeRestore(
"+I[1, a, 1970-01-01T00:00:00.001Z, null]",
"+I[2, b, 1970-01-01T00:00:00.002Z, 1969-12-31T23:59:59.001Z]",
"+I[3, c, 1970-01-01T00:00:00.003Z, 1969-12-31T23:59:59.002Z]")
.consumedAfterRestore(
"+I[4, d, 1970-01-01T00:00:00.004Z, null]",
"+I[5, e, 1970-01-01T00:00:00.005Z, 1969-12-31T23:59:59.004Z]",
"+I[6, f, 1970-01-01T00:00:00.006Z, 1969-12-31T23:59:59.005Z]")
.build())
.runSql(
"INSERT INTO sink_t SELECT i, s, ts, CURRENT_WATERMARK(ts) as w FROM source_t")
.build();
}
| WatermarkAssignerTestPrograms |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java | {
"start": 4005,
"end": 35368
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
appContext = new MockHistoryContext(0, 1, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
bind(webApp).to(WebApp.class).named("hsWebApp");
bind(appContext).to(AppContext.class);
bind(appContext).to(HistoryContext.class).named("ctx");
bind(conf).to(Configuration.class).named("conf");
bind(acp).to(ApplicationClientProtocol.class).named("appClient");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(response).to(HttpServletResponse.class);
final HttpServletRequest request = mock(HttpServletRequest.class);
bind(request).to(HttpServletRequest.class);
}
}
@Test
public void testTasks() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request().get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks/")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json =response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksXML() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList tasks = dom.getElementsByTagName("tasks");
assertEquals(1, tasks.getLength(), "incorrect number of elements");
NodeList task = dom.getElementsByTagName("task");
verifyHsTaskXML(task, jobsMap.get(id));
}
}
@Test
public void testTasksQueryMap() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "m";
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.queryParam("type", type).request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryReduce() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "r";
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.queryParam("type", type).request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
// tasktype must be exactly either "m" or "r"
String tasktype = "reduce";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").queryParam("type", tasktype)
.request(MediaType.APPLICATION_JSON).get();
throw new BadRequestException(response);
} catch (BadRequestException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"tasktype must be either m or r", message);
WebServicesTestUtils.checkStringMatch("exception type",
"BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
}
@Test
public void testTaskId() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid + "/").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdDefault() throws Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).request()
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdBogus() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "bogustaskid";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"bogustaskid is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,11 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdNonExist() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"task not found with id task_0_0000_m_000000", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_d_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0_0000_d_000000 is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,20 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid2() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0000_m_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0000_m_000000 is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,18 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid3() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0_0000_m is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdXML() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("task");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyHsSingleTaskXML(element, task);
}
}
}
}
public void verifyHsSingleTask(JSONObject info, Task task)
throws JSONException {
assertEquals(9, info.length(), "incorrect number of elements");
verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
info.getString("type"), info.getString("successfulAttempt"),
info.getLong("startTime"), info.getLong("finishTime"),
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
}
public void verifyHsTask(JSONArray arr, Job job, String type)
throws JSONException {
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
if (type != null && task.getType() == MRApps.taskType(type)) {
for (int i = 0; i < arr.length(); i++) {
JSONObject info = arr.getJSONObject(i);
if (tid.matches(info.getString("id"))) {
found = true;
verifyHsSingleTask(info, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
}
public void verifyTaskGeneric(Task task, String id, String state,
String type, String successfulAttempt, long startTime, long finishTime,
long elapsedTime, float progress) {
TaskId taskid = task.getID();
String tid = MRApps.toString(taskid);
TaskReport report = task.getReport();
WebServicesTestUtils.checkStringMatch("id", tid, id);
WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
type);
WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
.toString(), state);
// not easily checked without duplicating logic, just make sure its here
assertNotNull(successfulAttempt, "successfulAttempt null");
assertEquals(report.getStartTime(), startTime, "startTime wrong");
assertEquals(report.getFinishTime(), finishTime, "finishTime wrong");
assertEquals(finishTime - startTime, elapsedTime, "elapsedTime wrong");
assertEquals(report.getProgress() * 100, progress, 1e-3f, "progress wrong");
}
public void verifyHsSingleTaskXML(Element element, Task task) {
verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "type"),
WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlLong(element, "finishTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
WebServicesTestUtils.getXmlFloat(element, "progress"));
}
public void verifyHsTaskXML(NodeList nodes, Job job) {
assertEquals(2, nodes.getLength(), "incorrect number of elements");
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyHsSingleTaskXML(element, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
@Test
public void testTaskIdCounters() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersSlash() throws Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters/").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request().get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testJobTaskCountersXML() throws Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request(MediaType.APPLICATION_XML)
.get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList info = dom.getElementsByTagName("jobTaskCounters");
verifyHsTaskCountersXML(info, task);
}
}
}
public void verifyHsJobTaskCounters(JSONObject info, Task task)
throws JSONException {
assertEquals(2, info.length(), "incorrect number of elements");
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
info.getString("id"));
// just do simple verification of fields - not data is correct
// in the fields
JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
for (int i = 0; i < counterGroups.length(); i++) {
JSONObject counterGroup = counterGroups.getJSONObject(i);
String name = counterGroup.getString("counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
JSONArray counters = counterGroup.getJSONArray("counter");
for (int j = 0; j < counters.length(); j++) {
JSONObject counter = counters.getJSONObject(j);
String counterName = counter.getString("name");
assertTrue((counterName != null && !counterName.isEmpty()), "name not set");
long value = counter.getLong("value");
assertTrue(value >= 0, "value >= 0");
}
}
}
public void verifyHsTaskCountersXML(NodeList nodes, Task task) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
WebServicesTestUtils.checkStringMatch("id",
MRApps.toString(task.getID()),
WebServicesTestUtils.getXmlString(element, "id"));
// just do simple verification of fields - not data is correct
// in the fields
NodeList groups = element.getElementsByTagName("taskCounterGroup");
for (int j = 0; j < groups.getLength(); j++) {
Element counters = (Element) groups.item(j);
assertNotNull(counters, "should have counters in the web service info");
String name = WebServicesTestUtils.getXmlString(counters,
"counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
NodeList counterArr = counters.getElementsByTagName("counter");
for (int z = 0; z < counterArr.getLength(); z++) {
Element counter = (Element) counterArr.item(z);
String counterName = WebServicesTestUtils.getXmlString(counter,
"name");
assertTrue((counterName != null && !counterName.isEmpty()),
"counter name not set");
long value = WebServicesTestUtils.getXmlLong(counter, "value");
assertTrue(value >= 0, "value not >= 0");
}
}
}
}
}
| JerseyBinder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.